repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
partition
stringclasses
1 value
gem/oq-engine
openquake/calculators/views.py
view_extreme_groups
def view_extreme_groups(token, dstore): """ Show the source groups contributing the most to the highest IML """ data = dstore['disagg_by_grp'].value data.sort(order='extreme_poe') return rst_table(data[::-1])
python
def view_extreme_groups(token, dstore): """ Show the source groups contributing the most to the highest IML """ data = dstore['disagg_by_grp'].value data.sort(order='extreme_poe') return rst_table(data[::-1])
[ "def", "view_extreme_groups", "(", "token", ",", "dstore", ")", ":", "data", "=", "dstore", "[", "'disagg_by_grp'", "]", ".", "value", "data", ".", "sort", "(", "order", "=", "'extreme_poe'", ")", "return", "rst_table", "(", "data", "[", ":", ":", "-", ...
Show the source groups contributing the most to the highest IML
[ "Show", "the", "source", "groups", "contributing", "the", "most", "to", "the", "highest", "IML" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/views.py#L868-L874
train
gem/oq-engine
openquake/commonlib/oqzip.py
zip_all
def zip_all(directory): """ Zip source models and exposures recursively """ zips = [] for cwd, dirs, files in os.walk(directory): if 'ssmLT.xml' in files: zips.append(zip_source_model(os.path.join(cwd, 'ssmLT.xml'))) for f in files: if f.endswith('.xml') and 'exposure' in f.lower(): zips.append(zip_exposure(os.path.join(cwd, f))) total = sum(os.path.getsize(z) for z in zips) logging.info('Generated %s of zipped data', general.humansize(total))
python
def zip_all(directory): """ Zip source models and exposures recursively """ zips = [] for cwd, dirs, files in os.walk(directory): if 'ssmLT.xml' in files: zips.append(zip_source_model(os.path.join(cwd, 'ssmLT.xml'))) for f in files: if f.endswith('.xml') and 'exposure' in f.lower(): zips.append(zip_exposure(os.path.join(cwd, f))) total = sum(os.path.getsize(z) for z in zips) logging.info('Generated %s of zipped data', general.humansize(total))
[ "def", "zip_all", "(", "directory", ")", ":", "zips", "=", "[", "]", "for", "cwd", ",", "dirs", ",", "files", "in", "os", ".", "walk", "(", "directory", ")", ":", "if", "'ssmLT.xml'", "in", "files", ":", "zips", ".", "append", "(", "zip_source_model"...
Zip source models and exposures recursively
[ "Zip", "source", "models", "and", "exposures", "recursively" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/oqzip.py#L27-L39
train
gem/oq-engine
openquake/commonlib/oqzip.py
zip_source_model
def zip_source_model(ssmLT, archive_zip='', log=logging.info): """ Zip the source model files starting from the smmLT.xml file """ basedir = os.path.dirname(ssmLT) if os.path.basename(ssmLT) != 'ssmLT.xml': orig = ssmLT ssmLT = os.path.join(basedir, 'ssmLT.xml') with open(ssmLT, 'wb') as f: f.write(open(orig, 'rb').read()) archive_zip = archive_zip or os.path.join(basedir, 'ssmLT.zip') if os.path.exists(archive_zip): sys.exit('%s exists already' % archive_zip) oq = mock.Mock(inputs={'source_model_logic_tree': ssmLT}) checksum = readinput.get_checksum32(oq) checkfile = os.path.join(os.path.dirname(ssmLT), 'CHECKSUM.txt') with open(checkfile, 'w') as f: f.write(str(checksum)) files = logictree.collect_info(ssmLT).smpaths + [ os.path.abspath(ssmLT), os.path.abspath(checkfile)] general.zipfiles(files, archive_zip, log=log, cleanup=True) return archive_zip
python
def zip_source_model(ssmLT, archive_zip='', log=logging.info): """ Zip the source model files starting from the smmLT.xml file """ basedir = os.path.dirname(ssmLT) if os.path.basename(ssmLT) != 'ssmLT.xml': orig = ssmLT ssmLT = os.path.join(basedir, 'ssmLT.xml') with open(ssmLT, 'wb') as f: f.write(open(orig, 'rb').read()) archive_zip = archive_zip or os.path.join(basedir, 'ssmLT.zip') if os.path.exists(archive_zip): sys.exit('%s exists already' % archive_zip) oq = mock.Mock(inputs={'source_model_logic_tree': ssmLT}) checksum = readinput.get_checksum32(oq) checkfile = os.path.join(os.path.dirname(ssmLT), 'CHECKSUM.txt') with open(checkfile, 'w') as f: f.write(str(checksum)) files = logictree.collect_info(ssmLT).smpaths + [ os.path.abspath(ssmLT), os.path.abspath(checkfile)] general.zipfiles(files, archive_zip, log=log, cleanup=True) return archive_zip
[ "def", "zip_source_model", "(", "ssmLT", ",", "archive_zip", "=", "''", ",", "log", "=", "logging", ".", "info", ")", ":", "basedir", "=", "os", ".", "path", ".", "dirname", "(", "ssmLT", ")", "if", "os", ".", "path", ".", "basename", "(", "ssmLT", ...
Zip the source model files starting from the smmLT.xml file
[ "Zip", "the", "source", "model", "files", "starting", "from", "the", "smmLT", ".", "xml", "file" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/oqzip.py#L42-L64
train
gem/oq-engine
openquake/commonlib/oqzip.py
zip_job
def zip_job(job_ini, archive_zip='', risk_ini='', oq=None, log=logging.info): """ Zip the given job.ini file into the given archive, together with all related files. """ if not os.path.exists(job_ini): sys.exit('%s does not exist' % job_ini) archive_zip = archive_zip or 'job.zip' if isinstance(archive_zip, str): # actually it should be path-like if not archive_zip.endswith('.zip'): sys.exit('%s does not end with .zip' % archive_zip) if os.path.exists(archive_zip): sys.exit('%s exists already' % archive_zip) # do not validate to avoid permissions error on the export_dir oq = oq or readinput.get_oqparam(job_ini, validate=False) if risk_ini: risk_ini = os.path.normpath(os.path.abspath(risk_ini)) risk_inputs = readinput.get_params([risk_ini])['inputs'] del risk_inputs['job_ini'] oq.inputs.update(risk_inputs) files = readinput.get_input_files(oq) if risk_ini: files = [risk_ini] + files return general.zipfiles(files, archive_zip, log=log)
python
def zip_job(job_ini, archive_zip='', risk_ini='', oq=None, log=logging.info): """ Zip the given job.ini file into the given archive, together with all related files. """ if not os.path.exists(job_ini): sys.exit('%s does not exist' % job_ini) archive_zip = archive_zip or 'job.zip' if isinstance(archive_zip, str): # actually it should be path-like if not archive_zip.endswith('.zip'): sys.exit('%s does not end with .zip' % archive_zip) if os.path.exists(archive_zip): sys.exit('%s exists already' % archive_zip) # do not validate to avoid permissions error on the export_dir oq = oq or readinput.get_oqparam(job_ini, validate=False) if risk_ini: risk_ini = os.path.normpath(os.path.abspath(risk_ini)) risk_inputs = readinput.get_params([risk_ini])['inputs'] del risk_inputs['job_ini'] oq.inputs.update(risk_inputs) files = readinput.get_input_files(oq) if risk_ini: files = [risk_ini] + files return general.zipfiles(files, archive_zip, log=log)
[ "def", "zip_job", "(", "job_ini", ",", "archive_zip", "=", "''", ",", "risk_ini", "=", "''", ",", "oq", "=", "None", ",", "log", "=", "logging", ".", "info", ")", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "job_ini", ")", ":", "sys"...
Zip the given job.ini file into the given archive, together with all related files.
[ "Zip", "the", "given", "job", ".", "ini", "file", "into", "the", "given", "archive", "together", "with", "all", "related", "files", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/oqzip.py#L80-L103
train
gem/oq-engine
openquake/calculators/reportwriter.py
build_report
def build_report(job_ini, output_dir=None): """ Write a `report.csv` file with information about the calculation without running it :param job_ini: full pathname of the job.ini file :param output_dir: the directory where the report is written (default the input directory) """ calc_id = logs.init() oq = readinput.get_oqparam(job_ini) if oq.calculation_mode == 'classical': oq.calculation_mode = 'preclassical' oq.ground_motion_fields = False output_dir = output_dir or os.path.dirname(job_ini) from openquake.calculators import base # ugly calc = base.calculators(oq, calc_id) calc.save_params() # needed to save oqparam # some taken is care so that the real calculation is not run: # the goal is to extract information about the source management only calc.pre_execute() if oq.calculation_mode == 'preclassical': calc.execute() rw = ReportWriter(calc.datastore) rw.make_report() report = (os.path.join(output_dir, 'report.rst') if output_dir else calc.datastore.export_path('report.rst')) try: rw.save(report) except IOError as exc: # permission error sys.stderr.write(str(exc) + '\n') readinput.exposure = None # ugly hack return report
python
def build_report(job_ini, output_dir=None): """ Write a `report.csv` file with information about the calculation without running it :param job_ini: full pathname of the job.ini file :param output_dir: the directory where the report is written (default the input directory) """ calc_id = logs.init() oq = readinput.get_oqparam(job_ini) if oq.calculation_mode == 'classical': oq.calculation_mode = 'preclassical' oq.ground_motion_fields = False output_dir = output_dir or os.path.dirname(job_ini) from openquake.calculators import base # ugly calc = base.calculators(oq, calc_id) calc.save_params() # needed to save oqparam # some taken is care so that the real calculation is not run: # the goal is to extract information about the source management only calc.pre_execute() if oq.calculation_mode == 'preclassical': calc.execute() rw = ReportWriter(calc.datastore) rw.make_report() report = (os.path.join(output_dir, 'report.rst') if output_dir else calc.datastore.export_path('report.rst')) try: rw.save(report) except IOError as exc: # permission error sys.stderr.write(str(exc) + '\n') readinput.exposure = None # ugly hack return report
[ "def", "build_report", "(", "job_ini", ",", "output_dir", "=", "None", ")", ":", "calc_id", "=", "logs", ".", "init", "(", ")", "oq", "=", "readinput", ".", "get_oqparam", "(", "job_ini", ")", "if", "oq", ".", "calculation_mode", "==", "'classical'", ":"...
Write a `report.csv` file with information about the calculation without running it :param job_ini: full pathname of the job.ini file :param output_dir: the directory where the report is written (default the input directory)
[ "Write", "a", "report", ".", "csv", "file", "with", "information", "about", "the", "calculation", "without", "running", "it" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/reportwriter.py#L125-L159
train
gem/oq-engine
openquake/calculators/reportwriter.py
ReportWriter.add
def add(self, name, obj=None): """Add the view named `name` to the report text""" if obj: text = '\n::\n\n' + indent(str(obj)) else: text = views.view(name, self.dstore) if text: title = self.title[name] line = '-' * len(title) self.text += '\n'.join(['\n\n' + title, line, text])
python
def add(self, name, obj=None): """Add the view named `name` to the report text""" if obj: text = '\n::\n\n' + indent(str(obj)) else: text = views.view(name, self.dstore) if text: title = self.title[name] line = '-' * len(title) self.text += '\n'.join(['\n\n' + title, line, text])
[ "def", "add", "(", "self", ",", "name", ",", "obj", "=", "None", ")", ":", "if", "obj", ":", "text", "=", "'\\n::\\n\\n'", "+", "indent", "(", "str", "(", "obj", ")", ")", "else", ":", "text", "=", "views", ".", "view", "(", "name", ",", "self"...
Add the view named `name` to the report text
[ "Add", "the", "view", "named", "name", "to", "the", "report", "text" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/reportwriter.py#L74-L83
train
gem/oq-engine
openquake/calculators/reportwriter.py
ReportWriter.make_report
def make_report(self): """Build the report and return a restructed text string""" oq, ds = self.oq, self.dstore for name in ('params', 'inputs'): self.add(name) if 'csm_info' in ds: self.add('csm_info') if ds['csm_info'].source_models[0].name != 'scenario': # required_params_per_trt makes no sense for GMFs from file self.add('required_params_per_trt') self.add('rlzs_assoc', ds['csm_info'].get_rlzs_assoc()) if 'csm_info' in ds: self.add('ruptures_per_trt') if 'rup_data' in ds: self.add('ruptures_events') if oq.calculation_mode in ('event_based_risk',): self.add('avglosses_data_transfer') if 'exposure' in oq.inputs: self.add('exposure_info') if 'source_info' in ds: self.add('slow_sources') self.add('times_by_source_class') self.add('dupl_sources') if 'task_info' in ds: self.add('task_info') tasks = set(ds['task_info']) if 'classical' in tasks: self.add('task_hazard:0') self.add('task_hazard:-1') self.add('job_info') if 'performance_data' in ds: self.add('performance') return self.text
python
def make_report(self): """Build the report and return a restructed text string""" oq, ds = self.oq, self.dstore for name in ('params', 'inputs'): self.add(name) if 'csm_info' in ds: self.add('csm_info') if ds['csm_info'].source_models[0].name != 'scenario': # required_params_per_trt makes no sense for GMFs from file self.add('required_params_per_trt') self.add('rlzs_assoc', ds['csm_info'].get_rlzs_assoc()) if 'csm_info' in ds: self.add('ruptures_per_trt') if 'rup_data' in ds: self.add('ruptures_events') if oq.calculation_mode in ('event_based_risk',): self.add('avglosses_data_transfer') if 'exposure' in oq.inputs: self.add('exposure_info') if 'source_info' in ds: self.add('slow_sources') self.add('times_by_source_class') self.add('dupl_sources') if 'task_info' in ds: self.add('task_info') tasks = set(ds['task_info']) if 'classical' in tasks: self.add('task_hazard:0') self.add('task_hazard:-1') self.add('job_info') if 'performance_data' in ds: self.add('performance') return self.text
[ "def", "make_report", "(", "self", ")", ":", "oq", ",", "ds", "=", "self", ".", "oq", ",", "self", ".", "dstore", "for", "name", "in", "(", "'params'", ",", "'inputs'", ")", ":", "self", ".", "add", "(", "name", ")", "if", "'csm_info'", "in", "ds...
Build the report and return a restructed text string
[ "Build", "the", "report", "and", "return", "a", "restructed", "text", "string" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/reportwriter.py#L85-L117
train
gem/oq-engine
openquake/calculators/reportwriter.py
ReportWriter.save
def save(self, fname): """Save the report""" with open(fname, 'wb') as f: f.write(encode(self.text))
python
def save(self, fname): """Save the report""" with open(fname, 'wb') as f: f.write(encode(self.text))
[ "def", "save", "(", "self", ",", "fname", ")", ":", "with", "open", "(", "fname", ",", "'wb'", ")", "as", "f", ":", "f", ".", "write", "(", "encode", "(", "self", ".", "text", ")", ")" ]
Save the report
[ "Save", "the", "report" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/reportwriter.py#L119-L122
train
gem/oq-engine
openquake/hazardlib/gsim/frankel_1996.py
FrankelEtAl1996MblgAB1987NSHMP2008._compute_mean
def _compute_mean(self, imt, mag, rhypo): """ Compute mean value from lookup table. Lookup table defines log10(IMT) (in g) for combinations of Mw and log10(rhypo) values. ``mag`` is therefore converted from Mblg to Mw using Atkinson and Boore 1987 conversion equation. Mean value is finally converted from base 10 to base e. """ mag = np.zeros_like(rhypo) + self._convert_magnitude(mag) # to avoid run time warning in case rhypo is zero set minimum distance # to 10, which is anyhow the minimum distance allowed by the tables rhypo[rhypo < 10] = 10 rhypo = np.log10(rhypo) # create lookup table and interpolate it at magnitude/distance values table = RectBivariateSpline( self.MAGS, self.DISTS, self.IMTS_TABLES[imt].T ) mean = table.ev(mag, rhypo) # convert mean from base 10 to base e return mean * np.log(10)
python
def _compute_mean(self, imt, mag, rhypo): """ Compute mean value from lookup table. Lookup table defines log10(IMT) (in g) for combinations of Mw and log10(rhypo) values. ``mag`` is therefore converted from Mblg to Mw using Atkinson and Boore 1987 conversion equation. Mean value is finally converted from base 10 to base e. """ mag = np.zeros_like(rhypo) + self._convert_magnitude(mag) # to avoid run time warning in case rhypo is zero set minimum distance # to 10, which is anyhow the minimum distance allowed by the tables rhypo[rhypo < 10] = 10 rhypo = np.log10(rhypo) # create lookup table and interpolate it at magnitude/distance values table = RectBivariateSpline( self.MAGS, self.DISTS, self.IMTS_TABLES[imt].T ) mean = table.ev(mag, rhypo) # convert mean from base 10 to base e return mean * np.log(10)
[ "def", "_compute_mean", "(", "self", ",", "imt", ",", "mag", ",", "rhypo", ")", ":", "mag", "=", "np", ".", "zeros_like", "(", "rhypo", ")", "+", "self", ".", "_convert_magnitude", "(", "mag", ")", "rhypo", "[", "rhypo", "<", "10", "]", "=", "10", ...
Compute mean value from lookup table. Lookup table defines log10(IMT) (in g) for combinations of Mw and log10(rhypo) values. ``mag`` is therefore converted from Mblg to Mw using Atkinson and Boore 1987 conversion equation. Mean value is finally converted from base 10 to base e.
[ "Compute", "mean", "value", "from", "lookup", "table", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/frankel_1996.py#L129-L152
train
gem/oq-engine
openquake/hmtk/plotting/seismicity/occurrence/recurrence_plot.py
_get_recurrence_model
def _get_recurrence_model(input_model): """ Returns the annual and cumulative recurrence rates predicted by the recurrence model """ if not isinstance(input_model, (TruncatedGRMFD, EvenlyDiscretizedMFD, YoungsCoppersmith1985MFD)): raise ValueError('Recurrence model not recognised') # Get model annual occurrence rates annual_rates = input_model.get_annual_occurrence_rates() annual_rates = np.array([[val[0], val[1]] for val in annual_rates]) # Get cumulative rates cumulative_rates = np.array([np.sum(annual_rates[iloc:, 1]) for iloc in range(0, len(annual_rates), 1)]) return annual_rates, cumulative_rates
python
def _get_recurrence_model(input_model): """ Returns the annual and cumulative recurrence rates predicted by the recurrence model """ if not isinstance(input_model, (TruncatedGRMFD, EvenlyDiscretizedMFD, YoungsCoppersmith1985MFD)): raise ValueError('Recurrence model not recognised') # Get model annual occurrence rates annual_rates = input_model.get_annual_occurrence_rates() annual_rates = np.array([[val[0], val[1]] for val in annual_rates]) # Get cumulative rates cumulative_rates = np.array([np.sum(annual_rates[iloc:, 1]) for iloc in range(0, len(annual_rates), 1)]) return annual_rates, cumulative_rates
[ "def", "_get_recurrence_model", "(", "input_model", ")", ":", "if", "not", "isinstance", "(", "input_model", ",", "(", "TruncatedGRMFD", ",", "EvenlyDiscretizedMFD", ",", "YoungsCoppersmith1985MFD", ")", ")", ":", "raise", "ValueError", "(", "'Recurrence model not rec...
Returns the annual and cumulative recurrence rates predicted by the recurrence model
[ "Returns", "the", "annual", "and", "cumulative", "recurrence", "rates", "predicted", "by", "the", "recurrence", "model" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/plotting/seismicity/occurrence/recurrence_plot.py#L62-L77
train
gem/oq-engine
openquake/hmtk/plotting/seismicity/occurrence/recurrence_plot.py
_check_completeness_table
def _check_completeness_table(completeness, catalogue): """ Generates the completeness table according to different instances """ if isinstance(completeness, np.ndarray) and np.shape(completeness)[1] == 2: return completeness elif isinstance(completeness, float): return np.array([[float(np.min(catalogue.data['year'])), completeness]]) elif completeness is None: return np.array([[float(np.min(catalogue.data['year'])), np.min(catalogue.data['magnitude'])]]) else: raise ValueError('Completeness representation not recognised')
python
def _check_completeness_table(completeness, catalogue): """ Generates the completeness table according to different instances """ if isinstance(completeness, np.ndarray) and np.shape(completeness)[1] == 2: return completeness elif isinstance(completeness, float): return np.array([[float(np.min(catalogue.data['year'])), completeness]]) elif completeness is None: return np.array([[float(np.min(catalogue.data['year'])), np.min(catalogue.data['magnitude'])]]) else: raise ValueError('Completeness representation not recognised')
[ "def", "_check_completeness_table", "(", "completeness", ",", "catalogue", ")", ":", "if", "isinstance", "(", "completeness", ",", "np", ".", "ndarray", ")", "and", "np", ".", "shape", "(", "completeness", ")", "[", "1", "]", "==", "2", ":", "return", "c...
Generates the completeness table according to different instances
[ "Generates", "the", "completeness", "table", "according", "to", "different", "instances" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/plotting/seismicity/occurrence/recurrence_plot.py#L80-L93
train
gem/oq-engine
openquake/hmtk/plotting/seismicity/occurrence/recurrence_plot.py
plot_recurrence_model
def plot_recurrence_model( input_model, catalogue, completeness, dmag=0.1, filename=None, figure_size=(8, 6), filetype='png', dpi=300, ax=None): """ Plot a calculated recurrence model over an observed catalogue, adjusted for time-varying completeness """ annual_rates, cumulative_rates = _get_recurrence_model(input_model) # Get observed annual recurrence if not catalogue.end_year: catalogue.update_end_year() cent_mag, t_per, n_obs = get_completeness_counts(catalogue, completeness, dmag) obs_rates = n_obs / t_per cum_obs_rates = np.array([np.sum(obs_rates[i:]) for i in range(len(obs_rates))]) if ax is None: fig, ax = plt.subplots(figsize=figure_size) else: fig = ax.get_figure() ax.semilogy(cent_mag, obs_rates, 'bo') ax.semilogy(annual_rates[:, 0], annual_rates[:, 1], 'b-') ax.semilogy(cent_mag, cum_obs_rates, 'rs') ax.semilogy(annual_rates[:, 0], cumulative_rates, 'r-') ax.grid(which='both') ax.set_xlabel('Magnitude') ax.set_ylabel('Annual Rate') ax.legend(['Observed Incremental Rate', 'Model Incremental Rate', 'Observed Cumulative Rate', 'Model Cumulative Rate']) ax.tick_params(labelsize=12) _save_image(fig, filename, filetype, dpi)
python
def plot_recurrence_model( input_model, catalogue, completeness, dmag=0.1, filename=None, figure_size=(8, 6), filetype='png', dpi=300, ax=None): """ Plot a calculated recurrence model over an observed catalogue, adjusted for time-varying completeness """ annual_rates, cumulative_rates = _get_recurrence_model(input_model) # Get observed annual recurrence if not catalogue.end_year: catalogue.update_end_year() cent_mag, t_per, n_obs = get_completeness_counts(catalogue, completeness, dmag) obs_rates = n_obs / t_per cum_obs_rates = np.array([np.sum(obs_rates[i:]) for i in range(len(obs_rates))]) if ax is None: fig, ax = plt.subplots(figsize=figure_size) else: fig = ax.get_figure() ax.semilogy(cent_mag, obs_rates, 'bo') ax.semilogy(annual_rates[:, 0], annual_rates[:, 1], 'b-') ax.semilogy(cent_mag, cum_obs_rates, 'rs') ax.semilogy(annual_rates[:, 0], cumulative_rates, 'r-') ax.grid(which='both') ax.set_xlabel('Magnitude') ax.set_ylabel('Annual Rate') ax.legend(['Observed Incremental Rate', 'Model Incremental Rate', 'Observed Cumulative Rate', 'Model Cumulative Rate']) ax.tick_params(labelsize=12) _save_image(fig, filename, filetype, dpi)
[ "def", "plot_recurrence_model", "(", "input_model", ",", "catalogue", ",", "completeness", ",", "dmag", "=", "0.1", ",", "filename", "=", "None", ",", "figure_size", "=", "(", "8", ",", "6", ")", ",", "filetype", "=", "'png'", ",", "dpi", "=", "300", "...
Plot a calculated recurrence model over an observed catalogue, adjusted for time-varying completeness
[ "Plot", "a", "calculated", "recurrence", "model", "over", "an", "observed", "catalogue", "adjusted", "for", "time", "-", "varying", "completeness" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/plotting/seismicity/occurrence/recurrence_plot.py#L96-L132
train
gem/oq-engine
openquake/hmtk/plotting/seismicity/occurrence/recurrence_plot.py
plot_trunc_gr_model
def plot_trunc_gr_model( aval, bval, min_mag, max_mag, dmag, catalogue=None, completeness=None, filename=None, figure_size=(8, 6), filetype='png', dpi=300, ax=None): """ Plots a Gutenberg-Richter model """ input_model = TruncatedGRMFD(min_mag, max_mag, dmag, aval, bval) if not catalogue: # Plot only the modelled recurrence annual_rates, cumulative_rates = _get_recurrence_model(input_model) if ax is None: fig, ax = plt.subplots(figsize=figure_size) else: fig = ax.get_figure() ax.semilogy(annual_rates[:, 0], annual_rates[:, 1], 'b-') ax.semilogy(annual_rates[:, 0], cumulative_rates, 'r-') ax.xlabel('Magnitude') ax.set_ylabel('Annual Rate') ax.set_legend(['Incremental Rate', 'Cumulative Rate']) _save_image(fig, filename, filetype, dpi) else: completeness = _check_completeness_table(completeness, catalogue) plot_recurrence_model( input_model, catalogue, completeness, dmag, filename=filename, figure_size=figure_size, filetype=filetype, dpi=dpi, ax=ax)
python
def plot_trunc_gr_model( aval, bval, min_mag, max_mag, dmag, catalogue=None, completeness=None, filename=None, figure_size=(8, 6), filetype='png', dpi=300, ax=None): """ Plots a Gutenberg-Richter model """ input_model = TruncatedGRMFD(min_mag, max_mag, dmag, aval, bval) if not catalogue: # Plot only the modelled recurrence annual_rates, cumulative_rates = _get_recurrence_model(input_model) if ax is None: fig, ax = plt.subplots(figsize=figure_size) else: fig = ax.get_figure() ax.semilogy(annual_rates[:, 0], annual_rates[:, 1], 'b-') ax.semilogy(annual_rates[:, 0], cumulative_rates, 'r-') ax.xlabel('Magnitude') ax.set_ylabel('Annual Rate') ax.set_legend(['Incremental Rate', 'Cumulative Rate']) _save_image(fig, filename, filetype, dpi) else: completeness = _check_completeness_table(completeness, catalogue) plot_recurrence_model( input_model, catalogue, completeness, dmag, filename=filename, figure_size=figure_size, filetype=filetype, dpi=dpi, ax=ax)
[ "def", "plot_trunc_gr_model", "(", "aval", ",", "bval", ",", "min_mag", ",", "max_mag", ",", "dmag", ",", "catalogue", "=", "None", ",", "completeness", "=", "None", ",", "filename", "=", "None", ",", "figure_size", "=", "(", "8", ",", "6", ")", ",", ...
Plots a Gutenberg-Richter model
[ "Plots", "a", "Gutenberg", "-", "Richter", "model" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/plotting/seismicity/occurrence/recurrence_plot.py#L135-L163
train
gem/oq-engine
openquake/hazardlib/nrml.py
read
def read(source, chatty=True, stop=None): """ Convert a NRML file into a validated Node object. Keeps the entire tree in memory. :param source: a file name or file object open for reading """ vparser = ValidatingXmlParser(validators, stop) nrml = vparser.parse_file(source) if striptag(nrml.tag) != 'nrml': raise ValueError('%s: expected a node of kind nrml, got %s' % (source, nrml.tag)) # extract the XML namespace URL ('http://openquake.org/xmlns/nrml/0.5') xmlns = nrml.tag.split('}')[0][1:] if xmlns != NRML05 and chatty: # for the moment NRML04 is still supported, so we hide the warning logging.debug('%s is at an outdated version: %s', source, xmlns) nrml['xmlns'] = xmlns nrml['xmlns:gml'] = GML_NAMESPACE return nrml
python
def read(source, chatty=True, stop=None): """ Convert a NRML file into a validated Node object. Keeps the entire tree in memory. :param source: a file name or file object open for reading """ vparser = ValidatingXmlParser(validators, stop) nrml = vparser.parse_file(source) if striptag(nrml.tag) != 'nrml': raise ValueError('%s: expected a node of kind nrml, got %s' % (source, nrml.tag)) # extract the XML namespace URL ('http://openquake.org/xmlns/nrml/0.5') xmlns = nrml.tag.split('}')[0][1:] if xmlns != NRML05 and chatty: # for the moment NRML04 is still supported, so we hide the warning logging.debug('%s is at an outdated version: %s', source, xmlns) nrml['xmlns'] = xmlns nrml['xmlns:gml'] = GML_NAMESPACE return nrml
[ "def", "read", "(", "source", ",", "chatty", "=", "True", ",", "stop", "=", "None", ")", ":", "vparser", "=", "ValidatingXmlParser", "(", "validators", ",", "stop", ")", "nrml", "=", "vparser", ".", "parse_file", "(", "source", ")", "if", "striptag", "...
Convert a NRML file into a validated Node object. Keeps the entire tree in memory. :param source: a file name or file object open for reading
[ "Convert", "a", "NRML", "file", "into", "a", "validated", "Node", "object", ".", "Keeps", "the", "entire", "tree", "in", "memory", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/nrml.py#L329-L349
train
gem/oq-engine
openquake/hazardlib/nrml.py
write
def write(nodes, output=sys.stdout, fmt='%.7E', gml=True, xmlns=None): """ Convert nodes into a NRML file. output must be a file object open in write mode. If you want to perform a consistency check, open it in read-write mode, then it will be read after creation and validated. :params nodes: an iterable over Node objects :params output: a file-like object in write or read-write mode :param fmt: format used for writing the floats (default '%.7E') :param gml: add the http://www.opengis.net/gml namespace :param xmlns: NRML namespace like http://openquake.org/xmlns/nrml/0.4 """ root = Node('nrml', nodes=nodes) namespaces = {xmlns or NRML05: ''} if gml: namespaces[GML_NAMESPACE] = 'gml:' with floatformat(fmt): node_to_xml(root, output, namespaces) if hasattr(output, 'mode') and '+' in output.mode: # read-write mode output.seek(0) read(output)
python
def write(nodes, output=sys.stdout, fmt='%.7E', gml=True, xmlns=None): """ Convert nodes into a NRML file. output must be a file object open in write mode. If you want to perform a consistency check, open it in read-write mode, then it will be read after creation and validated. :params nodes: an iterable over Node objects :params output: a file-like object in write or read-write mode :param fmt: format used for writing the floats (default '%.7E') :param gml: add the http://www.opengis.net/gml namespace :param xmlns: NRML namespace like http://openquake.org/xmlns/nrml/0.4 """ root = Node('nrml', nodes=nodes) namespaces = {xmlns or NRML05: ''} if gml: namespaces[GML_NAMESPACE] = 'gml:' with floatformat(fmt): node_to_xml(root, output, namespaces) if hasattr(output, 'mode') and '+' in output.mode: # read-write mode output.seek(0) read(output)
[ "def", "write", "(", "nodes", ",", "output", "=", "sys", ".", "stdout", ",", "fmt", "=", "'%.7E'", ",", "gml", "=", "True", ",", "xmlns", "=", "None", ")", ":", "root", "=", "Node", "(", "'nrml'", ",", "nodes", "=", "nodes", ")", "namespaces", "=...
Convert nodes into a NRML file. output must be a file object open in write mode. If you want to perform a consistency check, open it in read-write mode, then it will be read after creation and validated. :params nodes: an iterable over Node objects :params output: a file-like object in write or read-write mode :param fmt: format used for writing the floats (default '%.7E') :param gml: add the http://www.opengis.net/gml namespace :param xmlns: NRML namespace like http://openquake.org/xmlns/nrml/0.4
[ "Convert", "nodes", "into", "a", "NRML", "file", ".", "output", "must", "be", "a", "file", "object", "open", "in", "write", "mode", ".", "If", "you", "want", "to", "perform", "a", "consistency", "check", "open", "it", "in", "read", "-", "write", "mode"...
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/nrml.py#L352-L373
train
gem/oq-engine
openquake/hazardlib/nrml.py
to_string
def to_string(node): """ Convert a node into a string in NRML format """ with io.BytesIO() as f: write([node], f) return f.getvalue().decode('utf-8')
python
def to_string(node): """ Convert a node into a string in NRML format """ with io.BytesIO() as f: write([node], f) return f.getvalue().decode('utf-8')
[ "def", "to_string", "(", "node", ")", ":", "with", "io", ".", "BytesIO", "(", ")", "as", "f", ":", "write", "(", "[", "node", "]", ",", "f", ")", "return", "f", ".", "getvalue", "(", ")", ".", "decode", "(", "'utf-8'", ")" ]
Convert a node into a string in NRML format
[ "Convert", "a", "node", "into", "a", "string", "in", "NRML", "format" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/nrml.py#L376-L382
train
gem/oq-engine
openquake/hazardlib/gsim/campbell_bozorgnia_2014.py
CampbellBozorgnia2014.get_mean_values
def get_mean_values(self, C, sites, rup, dists, a1100): """ Returns the mean values for a specific IMT """ if isinstance(a1100, np.ndarray): # Site model defined temp_vs30 = sites.vs30 temp_z2pt5 = sites.z2pt5 else: # Default site and basin model temp_vs30 = 1100.0 * np.ones(len(sites.vs30)) temp_z2pt5 = self._select_basin_model(1100.0) *\ np.ones_like(temp_vs30) return (self._get_magnitude_term(C, rup.mag) + self._get_geometric_attenuation_term(C, rup.mag, dists.rrup) + self._get_style_of_faulting_term(C, rup) + self._get_hanging_wall_term(C, rup, dists) + self._get_shallow_site_response_term(C, temp_vs30, a1100) + self._get_basin_response_term(C, temp_z2pt5) + self._get_hypocentral_depth_term(C, rup) + self._get_fault_dip_term(C, rup) + self._get_anelastic_attenuation_term(C, dists.rrup))
python
def get_mean_values(self, C, sites, rup, dists, a1100): """ Returns the mean values for a specific IMT """ if isinstance(a1100, np.ndarray): # Site model defined temp_vs30 = sites.vs30 temp_z2pt5 = sites.z2pt5 else: # Default site and basin model temp_vs30 = 1100.0 * np.ones(len(sites.vs30)) temp_z2pt5 = self._select_basin_model(1100.0) *\ np.ones_like(temp_vs30) return (self._get_magnitude_term(C, rup.mag) + self._get_geometric_attenuation_term(C, rup.mag, dists.rrup) + self._get_style_of_faulting_term(C, rup) + self._get_hanging_wall_term(C, rup, dists) + self._get_shallow_site_response_term(C, temp_vs30, a1100) + self._get_basin_response_term(C, temp_z2pt5) + self._get_hypocentral_depth_term(C, rup) + self._get_fault_dip_term(C, rup) + self._get_anelastic_attenuation_term(C, dists.rrup))
[ "def", "get_mean_values", "(", "self", ",", "C", ",", "sites", ",", "rup", ",", "dists", ",", "a1100", ")", ":", "if", "isinstance", "(", "a1100", ",", "np", ".", "ndarray", ")", ":", "temp_vs30", "=", "sites", ".", "vs30", "temp_z2pt5", "=", "sites"...
Returns the mean values for a specific IMT
[ "Returns", "the", "mean", "values", "for", "a", "specific", "IMT" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2014.py#L122-L144
train
gem/oq-engine
openquake/hazardlib/gsim/campbell_bozorgnia_2014.py
CampbellBozorgnia2014._get_magnitude_term
def _get_magnitude_term(self, C, mag): """ Returns the magnitude scaling term defined in equation 2 """ f_mag = C["c0"] + C["c1"] * mag if (mag > 4.5) and (mag <= 5.5): return f_mag + (C["c2"] * (mag - 4.5)) elif (mag > 5.5) and (mag <= 6.5): return f_mag + (C["c2"] * (mag - 4.5)) + (C["c3"] * (mag - 5.5)) elif mag > 6.5: return f_mag + (C["c2"] * (mag - 4.5)) + (C["c3"] * (mag - 5.5)) +\ (C["c4"] * (mag - 6.5)) else: return f_mag
python
def _get_magnitude_term(self, C, mag): """ Returns the magnitude scaling term defined in equation 2 """ f_mag = C["c0"] + C["c1"] * mag if (mag > 4.5) and (mag <= 5.5): return f_mag + (C["c2"] * (mag - 4.5)) elif (mag > 5.5) and (mag <= 6.5): return f_mag + (C["c2"] * (mag - 4.5)) + (C["c3"] * (mag - 5.5)) elif mag > 6.5: return f_mag + (C["c2"] * (mag - 4.5)) + (C["c3"] * (mag - 5.5)) +\ (C["c4"] * (mag - 6.5)) else: return f_mag
[ "def", "_get_magnitude_term", "(", "self", ",", "C", ",", "mag", ")", ":", "f_mag", "=", "C", "[", "\"c0\"", "]", "+", "C", "[", "\"c1\"", "]", "*", "mag", "if", "(", "mag", ">", "4.5", ")", "and", "(", "mag", "<=", "5.5", ")", ":", "return", ...
Returns the magnitude scaling term defined in equation 2
[ "Returns", "the", "magnitude", "scaling", "term", "defined", "in", "equation", "2" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2014.py#L146-L159
train
gem/oq-engine
openquake/hazardlib/gsim/campbell_bozorgnia_2014.py
CampbellBozorgnia2014._get_geometric_attenuation_term
def _get_geometric_attenuation_term(self, C, mag, rrup): """ Returns the geometric attenuation term defined in equation 3 """ return (C["c5"] + C["c6"] * mag) * np.log(np.sqrt((rrup ** 2.) + (C["c7"] ** 2.)))
python
def _get_geometric_attenuation_term(self, C, mag, rrup): """ Returns the geometric attenuation term defined in equation 3 """ return (C["c5"] + C["c6"] * mag) * np.log(np.sqrt((rrup ** 2.) + (C["c7"] ** 2.)))
[ "def", "_get_geometric_attenuation_term", "(", "self", ",", "C", ",", "mag", ",", "rrup", ")", ":", "return", "(", "C", "[", "\"c5\"", "]", "+", "C", "[", "\"c6\"", "]", "*", "mag", ")", "*", "np", ".", "log", "(", "np", ".", "sqrt", "(", "(", ...
Returns the geometric attenuation term defined in equation 3
[ "Returns", "the", "geometric", "attenuation", "term", "defined", "in", "equation", "3" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2014.py#L161-L166
train
gem/oq-engine
openquake/hazardlib/gsim/campbell_bozorgnia_2014.py
CampbellBozorgnia2014._get_style_of_faulting_term
def _get_style_of_faulting_term(self, C, rup): """ Returns the style-of-faulting scaling term defined in equations 4 to 6 """ if (rup.rake > 30.0) and (rup.rake < 150.): frv = 1.0 fnm = 0.0 elif (rup.rake > -150.0) and (rup.rake < -30.0): fnm = 1.0 frv = 0.0 else: fnm = 0.0 frv = 0.0 fflt_f = (self.CONSTS["c8"] * frv) + (C["c9"] * fnm) if rup.mag <= 4.5: fflt_m = 0.0 elif rup.mag > 5.5: fflt_m = 1.0 else: fflt_m = rup.mag - 4.5 return fflt_f * fflt_m
python
def _get_style_of_faulting_term(self, C, rup): """ Returns the style-of-faulting scaling term defined in equations 4 to 6 """ if (rup.rake > 30.0) and (rup.rake < 150.): frv = 1.0 fnm = 0.0 elif (rup.rake > -150.0) and (rup.rake < -30.0): fnm = 1.0 frv = 0.0 else: fnm = 0.0 frv = 0.0 fflt_f = (self.CONSTS["c8"] * frv) + (C["c9"] * fnm) if rup.mag <= 4.5: fflt_m = 0.0 elif rup.mag > 5.5: fflt_m = 1.0 else: fflt_m = rup.mag - 4.5 return fflt_f * fflt_m
[ "def", "_get_style_of_faulting_term", "(", "self", ",", "C", ",", "rup", ")", ":", "if", "(", "rup", ".", "rake", ">", "30.0", ")", "and", "(", "rup", ".", "rake", "<", "150.", ")", ":", "frv", "=", "1.0", "fnm", "=", "0.0", "elif", "(", "rup", ...
Returns the style-of-faulting scaling term defined in equations 4 to 6
[ "Returns", "the", "style", "-", "of", "-", "faulting", "scaling", "term", "defined", "in", "equations", "4", "to", "6" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2014.py#L168-L189
train
gem/oq-engine
openquake/hazardlib/gsim/campbell_bozorgnia_2014.py
CampbellBozorgnia2014._get_hanging_wall_term
def _get_hanging_wall_term(self, C, rup, dists): """ Returns the hanging wall scaling term defined in equations 7 to 16 """ return (C["c10"] * self._get_hanging_wall_coeffs_rx(C, rup, dists.rx) * self._get_hanging_wall_coeffs_rrup(dists) * self._get_hanging_wall_coeffs_mag(C, rup.mag) * self._get_hanging_wall_coeffs_ztor(rup.ztor) * self._get_hanging_wall_coeffs_dip(rup.dip))
python
def _get_hanging_wall_term(self, C, rup, dists): """ Returns the hanging wall scaling term defined in equations 7 to 16 """ return (C["c10"] * self._get_hanging_wall_coeffs_rx(C, rup, dists.rx) * self._get_hanging_wall_coeffs_rrup(dists) * self._get_hanging_wall_coeffs_mag(C, rup.mag) * self._get_hanging_wall_coeffs_ztor(rup.ztor) * self._get_hanging_wall_coeffs_dip(rup.dip))
[ "def", "_get_hanging_wall_term", "(", "self", ",", "C", ",", "rup", ",", "dists", ")", ":", "return", "(", "C", "[", "\"c10\"", "]", "*", "self", ".", "_get_hanging_wall_coeffs_rx", "(", "C", ",", "rup", ",", "dists", ".", "rx", ")", "*", "self", "."...
Returns the hanging wall scaling term defined in equations 7 to 16
[ "Returns", "the", "hanging", "wall", "scaling", "term", "defined", "in", "equations", "7", "to", "16" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2014.py#L191-L200
train
gem/oq-engine
openquake/hazardlib/gsim/campbell_bozorgnia_2014.py
CampbellBozorgnia2014._get_hanging_wall_coeffs_rx
def _get_hanging_wall_coeffs_rx(self, C, rup, r_x): """ Returns the hanging wall r-x caling term defined in equation 7 to 12 """ # Define coefficients R1 and R2 r_1 = rup.width * cos(radians(rup.dip)) r_2 = 62.0 * rup.mag - 350.0 fhngrx = np.zeros(len(r_x)) # Case when 0 <= Rx <= R1 idx = np.logical_and(r_x >= 0., r_x < r_1) fhngrx[idx] = self._get_f1rx(C, r_x[idx], r_1) # Case when Rx > R1 idx = r_x >= r_1 f2rx = self._get_f2rx(C, r_x[idx], r_1, r_2) f2rx[f2rx < 0.0] = 0.0 fhngrx[idx] = f2rx return fhngrx
python
def _get_hanging_wall_coeffs_rx(self, C, rup, r_x): """ Returns the hanging wall r-x caling term defined in equation 7 to 12 """ # Define coefficients R1 and R2 r_1 = rup.width * cos(radians(rup.dip)) r_2 = 62.0 * rup.mag - 350.0 fhngrx = np.zeros(len(r_x)) # Case when 0 <= Rx <= R1 idx = np.logical_and(r_x >= 0., r_x < r_1) fhngrx[idx] = self._get_f1rx(C, r_x[idx], r_1) # Case when Rx > R1 idx = r_x >= r_1 f2rx = self._get_f2rx(C, r_x[idx], r_1, r_2) f2rx[f2rx < 0.0] = 0.0 fhngrx[idx] = f2rx return fhngrx
[ "def", "_get_hanging_wall_coeffs_rx", "(", "self", ",", "C", ",", "rup", ",", "r_x", ")", ":", "r_1", "=", "rup", ".", "width", "*", "cos", "(", "radians", "(", "rup", ".", "dip", ")", ")", "r_2", "=", "62.0", "*", "rup", ".", "mag", "-", "350.0"...
Returns the hanging wall r-x caling term defined in equation 7 to 12
[ "Returns", "the", "hanging", "wall", "r", "-", "x", "caling", "term", "defined", "in", "equation", "7", "to", "12" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2014.py#L202-L218
train
gem/oq-engine
openquake/hazardlib/gsim/campbell_bozorgnia_2014.py
CampbellBozorgnia2014._get_f1rx
def _get_f1rx(self, C, r_x, r_1): """ Defines the f1 scaling coefficient defined in equation 9 """ rxr1 = r_x / r_1 return C["h1"] + (C["h2"] * rxr1) + (C["h3"] * (rxr1 ** 2.))
python
def _get_f1rx(self, C, r_x, r_1): """ Defines the f1 scaling coefficient defined in equation 9 """ rxr1 = r_x / r_1 return C["h1"] + (C["h2"] * rxr1) + (C["h3"] * (rxr1 ** 2.))
[ "def", "_get_f1rx", "(", "self", ",", "C", ",", "r_x", ",", "r_1", ")", ":", "rxr1", "=", "r_x", "/", "r_1", "return", "C", "[", "\"h1\"", "]", "+", "(", "C", "[", "\"h2\"", "]", "*", "rxr1", ")", "+", "(", "C", "[", "\"h3\"", "]", "*", "("...
Defines the f1 scaling coefficient defined in equation 9
[ "Defines", "the", "f1", "scaling", "coefficient", "defined", "in", "equation", "9" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2014.py#L220-L225
train
gem/oq-engine
openquake/hazardlib/gsim/campbell_bozorgnia_2014.py
CampbellBozorgnia2014._get_f2rx
def _get_f2rx(self, C, r_x, r_1, r_2): """ Defines the f2 scaling coefficient defined in equation 10 """ drx = (r_x - r_1) / (r_2 - r_1) return self.CONSTS["h4"] + (C["h5"] * drx) + (C["h6"] * (drx ** 2.))
python
def _get_f2rx(self, C, r_x, r_1, r_2): """ Defines the f2 scaling coefficient defined in equation 10 """ drx = (r_x - r_1) / (r_2 - r_1) return self.CONSTS["h4"] + (C["h5"] * drx) + (C["h6"] * (drx ** 2.))
[ "def", "_get_f2rx", "(", "self", ",", "C", ",", "r_x", ",", "r_1", ",", "r_2", ")", ":", "drx", "=", "(", "r_x", "-", "r_1", ")", "/", "(", "r_2", "-", "r_1", ")", "return", "self", ".", "CONSTS", "[", "\"h4\"", "]", "+", "(", "C", "[", "\"...
Defines the f2 scaling coefficient defined in equation 10
[ "Defines", "the", "f2", "scaling", "coefficient", "defined", "in", "equation", "10" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2014.py#L227-L232
train
gem/oq-engine
openquake/hazardlib/gsim/campbell_bozorgnia_2014.py
CampbellBozorgnia2014._get_hanging_wall_coeffs_rrup
def _get_hanging_wall_coeffs_rrup(self, dists): """ Returns the hanging wall rrup term defined in equation 13 """ fhngrrup = np.ones(len(dists.rrup)) idx = dists.rrup > 0.0 fhngrrup[idx] = (dists.rrup[idx] - dists.rjb[idx]) / dists.rrup[idx] return fhngrrup
python
def _get_hanging_wall_coeffs_rrup(self, dists): """ Returns the hanging wall rrup term defined in equation 13 """ fhngrrup = np.ones(len(dists.rrup)) idx = dists.rrup > 0.0 fhngrrup[idx] = (dists.rrup[idx] - dists.rjb[idx]) / dists.rrup[idx] return fhngrrup
[ "def", "_get_hanging_wall_coeffs_rrup", "(", "self", ",", "dists", ")", ":", "fhngrrup", "=", "np", ".", "ones", "(", "len", "(", "dists", ".", "rrup", ")", ")", "idx", "=", "dists", ".", "rrup", ">", "0.0", "fhngrrup", "[", "idx", "]", "=", "(", "...
Returns the hanging wall rrup term defined in equation 13
[ "Returns", "the", "hanging", "wall", "rrup", "term", "defined", "in", "equation", "13" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2014.py#L234-L241
train
gem/oq-engine
openquake/hazardlib/gsim/campbell_bozorgnia_2014.py
CampbellBozorgnia2014._get_hanging_wall_coeffs_mag
def _get_hanging_wall_coeffs_mag(self, C, mag): """ Returns the hanging wall magnitude term defined in equation 14 """ if mag < 5.5: return 0.0 elif mag > 6.5: return 1.0 + C["a2"] * (mag - 6.5) else: return (mag - 5.5) * (1.0 + C["a2"] * (mag - 6.5))
python
def _get_hanging_wall_coeffs_mag(self, C, mag): """ Returns the hanging wall magnitude term defined in equation 14 """ if mag < 5.5: return 0.0 elif mag > 6.5: return 1.0 + C["a2"] * (mag - 6.5) else: return (mag - 5.5) * (1.0 + C["a2"] * (mag - 6.5))
[ "def", "_get_hanging_wall_coeffs_mag", "(", "self", ",", "C", ",", "mag", ")", ":", "if", "mag", "<", "5.5", ":", "return", "0.0", "elif", "mag", ">", "6.5", ":", "return", "1.0", "+", "C", "[", "\"a2\"", "]", "*", "(", "mag", "-", "6.5", ")", "e...
Returns the hanging wall magnitude term defined in equation 14
[ "Returns", "the", "hanging", "wall", "magnitude", "term", "defined", "in", "equation", "14" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2014.py#L243-L252
train
gem/oq-engine
openquake/hazardlib/gsim/campbell_bozorgnia_2014.py
CampbellBozorgnia2014._get_hypocentral_depth_term
def _get_hypocentral_depth_term(self, C, rup): """ Returns the hypocentral depth scaling term defined in equations 21 - 23 """ if rup.hypo_depth <= 7.0: fhyp_h = 0.0 elif rup.hypo_depth > 20.0: fhyp_h = 13.0 else: fhyp_h = rup.hypo_depth - 7.0 if rup.mag <= 5.5: fhyp_m = C["c17"] elif rup.mag > 6.5: fhyp_m = C["c18"] else: fhyp_m = C["c17"] + ((C["c18"] - C["c17"]) * (rup.mag - 5.5)) return fhyp_h * fhyp_m
python
def _get_hypocentral_depth_term(self, C, rup): """ Returns the hypocentral depth scaling term defined in equations 21 - 23 """ if rup.hypo_depth <= 7.0: fhyp_h = 0.0 elif rup.hypo_depth > 20.0: fhyp_h = 13.0 else: fhyp_h = rup.hypo_depth - 7.0 if rup.mag <= 5.5: fhyp_m = C["c17"] elif rup.mag > 6.5: fhyp_m = C["c18"] else: fhyp_m = C["c17"] + ((C["c18"] - C["c17"]) * (rup.mag - 5.5)) return fhyp_h * fhyp_m
[ "def", "_get_hypocentral_depth_term", "(", "self", ",", "C", ",", "rup", ")", ":", "if", "rup", ".", "hypo_depth", "<=", "7.0", ":", "fhyp_h", "=", "0.0", "elif", "rup", ".", "hypo_depth", ">", "20.0", ":", "fhyp_h", "=", "13.0", "else", ":", "fhyp_h",...
Returns the hypocentral depth scaling term defined in equations 21 - 23
[ "Returns", "the", "hypocentral", "depth", "scaling", "term", "defined", "in", "equations", "21", "-", "23" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2014.py#L269-L286
train
gem/oq-engine
openquake/hazardlib/gsim/campbell_bozorgnia_2014.py
CampbellBozorgnia2014._get_fault_dip_term
def _get_fault_dip_term(self, C, rup): """ Returns the fault dip term, defined in equation 24 """ if rup.mag < 4.5: return C["c19"] * rup.dip elif rup.mag > 5.5: return 0.0 else: return C["c19"] * (5.5 - rup.mag) * rup.dip
python
def _get_fault_dip_term(self, C, rup): """ Returns the fault dip term, defined in equation 24 """ if rup.mag < 4.5: return C["c19"] * rup.dip elif rup.mag > 5.5: return 0.0 else: return C["c19"] * (5.5 - rup.mag) * rup.dip
[ "def", "_get_fault_dip_term", "(", "self", ",", "C", ",", "rup", ")", ":", "if", "rup", ".", "mag", "<", "4.5", ":", "return", "C", "[", "\"c19\"", "]", "*", "rup", ".", "dip", "elif", "rup", ".", "mag", ">", "5.5", ":", "return", "0.0", "else", ...
Returns the fault dip term, defined in equation 24
[ "Returns", "the", "fault", "dip", "term", "defined", "in", "equation", "24" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2014.py#L288-L297
train
gem/oq-engine
openquake/hazardlib/gsim/campbell_bozorgnia_2014.py
CampbellBozorgnia2014._get_anelastic_attenuation_term
def _get_anelastic_attenuation_term(self, C, rrup): """ Returns the anelastic attenuation term defined in equation 25 """ f_atn = np.zeros(len(rrup)) idx = rrup >= 80.0 f_atn[idx] = (C["c20"] + C["Dc20"]) * (rrup[idx] - 80.0) return f_atn
python
def _get_anelastic_attenuation_term(self, C, rrup): """ Returns the anelastic attenuation term defined in equation 25 """ f_atn = np.zeros(len(rrup)) idx = rrup >= 80.0 f_atn[idx] = (C["c20"] + C["Dc20"]) * (rrup[idx] - 80.0) return f_atn
[ "def", "_get_anelastic_attenuation_term", "(", "self", ",", "C", ",", "rrup", ")", ":", "f_atn", "=", "np", ".", "zeros", "(", "len", "(", "rrup", ")", ")", "idx", "=", "rrup", ">=", "80.0", "f_atn", "[", "idx", "]", "=", "(", "C", "[", "\"c20\"", ...
Returns the anelastic attenuation term defined in equation 25
[ "Returns", "the", "anelastic", "attenuation", "term", "defined", "in", "equation", "25" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2014.py#L299-L306
train
gem/oq-engine
openquake/hazardlib/gsim/campbell_bozorgnia_2014.py
CampbellBozorgnia2014._get_basin_response_term
def _get_basin_response_term(self, C, z2pt5): """ Returns the basin response term defined in equation 20 """ f_sed = np.zeros(len(z2pt5)) idx = z2pt5 < 1.0 f_sed[idx] = (C["c14"] + C["c15"] * float(self.CONSTS["SJ"])) *\ (z2pt5[idx] - 1.0) idx = z2pt5 > 3.0 f_sed[idx] = C["c16"] * C["k3"] * exp(-0.75) *\ (1.0 - np.exp(-0.25 * (z2pt5[idx] - 3.0))) return f_sed
python
def _get_basin_response_term(self, C, z2pt5): """ Returns the basin response term defined in equation 20 """ f_sed = np.zeros(len(z2pt5)) idx = z2pt5 < 1.0 f_sed[idx] = (C["c14"] + C["c15"] * float(self.CONSTS["SJ"])) *\ (z2pt5[idx] - 1.0) idx = z2pt5 > 3.0 f_sed[idx] = C["c16"] * C["k3"] * exp(-0.75) *\ (1.0 - np.exp(-0.25 * (z2pt5[idx] - 3.0))) return f_sed
[ "def", "_get_basin_response_term", "(", "self", ",", "C", ",", "z2pt5", ")", ":", "f_sed", "=", "np", ".", "zeros", "(", "len", "(", "z2pt5", ")", ")", "idx", "=", "z2pt5", "<", "1.0", "f_sed", "[", "idx", "]", "=", "(", "C", "[", "\"c14\"", "]",...
Returns the basin response term defined in equation 20
[ "Returns", "the", "basin", "response", "term", "defined", "in", "equation", "20" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2014.py#L321-L332
train
gem/oq-engine
openquake/hazardlib/gsim/campbell_bozorgnia_2014.py
CampbellBozorgnia2014._get_shallow_site_response_term
def _get_shallow_site_response_term(self, C, vs30, pga_rock): """ Returns the shallow site response term defined in equations 17, 18 and 19 """ vs_mod = vs30 / C["k1"] # Get linear global site response term f_site_g = C["c11"] * np.log(vs_mod) idx = vs30 > C["k1"] f_site_g[idx] = f_site_g[idx] + (C["k2"] * self.CONSTS["n"] * np.log(vs_mod[idx])) # Get nonlinear site response term idx = np.logical_not(idx) if np.any(idx): f_site_g[idx] = f_site_g[idx] + C["k2"] * ( np.log(pga_rock[idx] + self.CONSTS["c"] * (vs_mod[idx] ** self.CONSTS["n"])) - np.log(pga_rock[idx] + self.CONSTS["c"]) ) # For Japan sites (SJ = 1) further scaling is needed (equation 19) if self.CONSTS["SJ"]: fsite_j = np.log(vs_mod) idx = vs30 > 200.0 if np.any(idx): fsite_j[idx] = (C["c13"] + C["k2"] * self.CONSTS["n"]) *\ fsite_j[idx] idx = np.logical_not(idx) if np.any(idx): fsite_j[idx] = (C["c12"] + C["k2"] * self.CONSTS["n"]) *\ (fsite_j[idx] - np.log(200.0 / C["k1"])) return f_site_g + fsite_j else: return f_site_g
python
def _get_shallow_site_response_term(self, C, vs30, pga_rock): """ Returns the shallow site response term defined in equations 17, 18 and 19 """ vs_mod = vs30 / C["k1"] # Get linear global site response term f_site_g = C["c11"] * np.log(vs_mod) idx = vs30 > C["k1"] f_site_g[idx] = f_site_g[idx] + (C["k2"] * self.CONSTS["n"] * np.log(vs_mod[idx])) # Get nonlinear site response term idx = np.logical_not(idx) if np.any(idx): f_site_g[idx] = f_site_g[idx] + C["k2"] * ( np.log(pga_rock[idx] + self.CONSTS["c"] * (vs_mod[idx] ** self.CONSTS["n"])) - np.log(pga_rock[idx] + self.CONSTS["c"]) ) # For Japan sites (SJ = 1) further scaling is needed (equation 19) if self.CONSTS["SJ"]: fsite_j = np.log(vs_mod) idx = vs30 > 200.0 if np.any(idx): fsite_j[idx] = (C["c13"] + C["k2"] * self.CONSTS["n"]) *\ fsite_j[idx] idx = np.logical_not(idx) if np.any(idx): fsite_j[idx] = (C["c12"] + C["k2"] * self.CONSTS["n"]) *\ (fsite_j[idx] - np.log(200.0 / C["k1"])) return f_site_g + fsite_j else: return f_site_g
[ "def", "_get_shallow_site_response_term", "(", "self", ",", "C", ",", "vs30", ",", "pga_rock", ")", ":", "vs_mod", "=", "vs30", "/", "C", "[", "\"k1\"", "]", "f_site_g", "=", "C", "[", "\"c11\"", "]", "*", "np", ".", "log", "(", "vs_mod", ")", "idx",...
Returns the shallow site response term defined in equations 17, 18 and 19
[ "Returns", "the", "shallow", "site", "response", "term", "defined", "in", "equations", "17", "18", "and", "19" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2014.py#L334-L369
train
gem/oq-engine
openquake/hazardlib/gsim/campbell_bozorgnia_2014.py
CampbellBozorgnia2014._get_stddevs
def _get_stddevs(self, C, C_PGA, rup, sites, pga1100, stddev_types): """ Returns the inter- and intra-event and total standard deviations """ # Get stddevs for PGA on basement rock tau_lnpga_b, phi_lnpga_b = self._get_stddevs_pga(C_PGA, rup) num_sites = len(sites.vs30) # Get tau_lny on the basement rock tau_lnyb = self._get_taulny(C, rup.mag) # Get phi_lny on the basement rock phi_lnyb = np.sqrt(self._get_philny(C, rup.mag) ** 2. - self.CONSTS["philnAF"] ** 2.) # Get site scaling term alpha = self._get_alpha(C, sites.vs30, pga1100) # Evaluate tau according to equation 29 tau = np.sqrt( (tau_lnyb ** 2.) + ((alpha ** 2.) * (tau_lnpga_b ** 2.)) + (2.0 * alpha * C["rholny"] * tau_lnyb * tau_lnpga_b)) # Evaluate phi according to equation 30 phi = np.sqrt( (phi_lnyb ** 2.) + (self.CONSTS["philnAF"] ** 2.) + ((alpha ** 2.) * (phi_lnpga_b ** 2.)) + (2.0 * alpha * C["rholny"] * phi_lnyb * phi_lnpga_b)) stddevs = [] for stddev_type in stddev_types: assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES if stddev_type == const.StdDev.TOTAL: stddevs.append(np.sqrt((tau ** 2.) + (phi ** 2.)) + np.zeros(num_sites)) elif stddev_type == const.StdDev.INTRA_EVENT: stddevs.append(phi + np.zeros(num_sites)) elif stddev_type == const.StdDev.INTER_EVENT: stddevs.append(tau + np.zeros(num_sites)) return stddevs
python
def _get_stddevs(self, C, C_PGA, rup, sites, pga1100, stddev_types): """ Returns the inter- and intra-event and total standard deviations """ # Get stddevs for PGA on basement rock tau_lnpga_b, phi_lnpga_b = self._get_stddevs_pga(C_PGA, rup) num_sites = len(sites.vs30) # Get tau_lny on the basement rock tau_lnyb = self._get_taulny(C, rup.mag) # Get phi_lny on the basement rock phi_lnyb = np.sqrt(self._get_philny(C, rup.mag) ** 2. - self.CONSTS["philnAF"] ** 2.) # Get site scaling term alpha = self._get_alpha(C, sites.vs30, pga1100) # Evaluate tau according to equation 29 tau = np.sqrt( (tau_lnyb ** 2.) + ((alpha ** 2.) * (tau_lnpga_b ** 2.)) + (2.0 * alpha * C["rholny"] * tau_lnyb * tau_lnpga_b)) # Evaluate phi according to equation 30 phi = np.sqrt( (phi_lnyb ** 2.) + (self.CONSTS["philnAF"] ** 2.) + ((alpha ** 2.) * (phi_lnpga_b ** 2.)) + (2.0 * alpha * C["rholny"] * phi_lnyb * phi_lnpga_b)) stddevs = [] for stddev_type in stddev_types: assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES if stddev_type == const.StdDev.TOTAL: stddevs.append(np.sqrt((tau ** 2.) + (phi ** 2.)) + np.zeros(num_sites)) elif stddev_type == const.StdDev.INTRA_EVENT: stddevs.append(phi + np.zeros(num_sites)) elif stddev_type == const.StdDev.INTER_EVENT: stddevs.append(tau + np.zeros(num_sites)) return stddevs
[ "def", "_get_stddevs", "(", "self", ",", "C", ",", "C_PGA", ",", "rup", ",", "sites", ",", "pga1100", ",", "stddev_types", ")", ":", "tau_lnpga_b", ",", "phi_lnpga_b", "=", "self", ".", "_get_stddevs_pga", "(", "C_PGA", ",", "rup", ")", "num_sites", "=",...
Returns the inter- and intra-event and total standard deviations
[ "Returns", "the", "inter", "-", "and", "intra", "-", "event", "and", "total", "standard", "deviations" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2014.py#L371-L407
train
gem/oq-engine
openquake/hazardlib/gsim/campbell_bozorgnia_2014.py
CampbellBozorgnia2014._get_stddevs_pga
def _get_stddevs_pga(self, C, rup): """ Returns the inter- and intra-event coefficients for PGA """ tau_lnpga_b = self._get_taulny(C, rup.mag) phi_lnpga_b = np.sqrt(self._get_philny(C, rup.mag) ** 2. - self.CONSTS["philnAF"] ** 2.) return tau_lnpga_b, phi_lnpga_b
python
def _get_stddevs_pga(self, C, rup): """ Returns the inter- and intra-event coefficients for PGA """ tau_lnpga_b = self._get_taulny(C, rup.mag) phi_lnpga_b = np.sqrt(self._get_philny(C, rup.mag) ** 2. - self.CONSTS["philnAF"] ** 2.) return tau_lnpga_b, phi_lnpga_b
[ "def", "_get_stddevs_pga", "(", "self", ",", "C", ",", "rup", ")", ":", "tau_lnpga_b", "=", "self", ".", "_get_taulny", "(", "C", ",", "rup", ".", "mag", ")", "phi_lnpga_b", "=", "np", ".", "sqrt", "(", "self", ".", "_get_philny", "(", "C", ",", "r...
Returns the inter- and intra-event coefficients for PGA
[ "Returns", "the", "inter", "-", "and", "intra", "-", "event", "coefficients", "for", "PGA" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2014.py#L409-L416
train
gem/oq-engine
openquake/hazardlib/gsim/campbell_bozorgnia_2014.py
CampbellBozorgnia2014._get_alpha
def _get_alpha(self, C, vs30, pga_rock): """ Returns the alpha, the linearised functional relationship between the site amplification and the PGA on rock. Equation 31. """ alpha = np.zeros(len(pga_rock)) idx = vs30 < C["k1"] if np.any(idx): af1 = pga_rock[idx] +\ self.CONSTS["c"] * ((vs30[idx] / C["k1"]) ** self.CONSTS["n"]) af2 = pga_rock[idx] + self.CONSTS["c"] alpha[idx] = C["k2"] * pga_rock[idx] * ((1.0 / af1) - (1.0 / af2)) return alpha
python
def _get_alpha(self, C, vs30, pga_rock): """ Returns the alpha, the linearised functional relationship between the site amplification and the PGA on rock. Equation 31. """ alpha = np.zeros(len(pga_rock)) idx = vs30 < C["k1"] if np.any(idx): af1 = pga_rock[idx] +\ self.CONSTS["c"] * ((vs30[idx] / C["k1"]) ** self.CONSTS["n"]) af2 = pga_rock[idx] + self.CONSTS["c"] alpha[idx] = C["k2"] * pga_rock[idx] * ((1.0 / af1) - (1.0 / af2)) return alpha
[ "def", "_get_alpha", "(", "self", ",", "C", ",", "vs30", ",", "pga_rock", ")", ":", "alpha", "=", "np", ".", "zeros", "(", "len", "(", "pga_rock", ")", ")", "idx", "=", "vs30", "<", "C", "[", "\"k1\"", "]", "if", "np", ".", "any", "(", "idx", ...
Returns the alpha, the linearised functional relationship between the site amplification and the PGA on rock. Equation 31.
[ "Returns", "the", "alpha", "the", "linearised", "functional", "relationship", "between", "the", "site", "amplification", "and", "the", "PGA", "on", "rock", ".", "Equation", "31", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2014.py#L442-L454
train
gem/oq-engine
openquake/hmtk/seismicity/utils.py
decimal_time
def decimal_time(year, month, day, hour, minute, second): """ Returns the full time as a decimal value :param year: Year of events (integer numpy.ndarray) :param month: Month of events (integer numpy.ndarray) :param day: Days of event (integer numpy.ndarray) :param hour: Hour of event (integer numpy.ndarray) :param minute: Minute of event (integer numpy.ndarray) :param second: Second of event (float numpy.ndarray) :returns decimal_time: Decimal representation of the time (as numpy.ndarray) """ tmo = np.ones_like(year, dtype=int) tda = np.ones_like(year, dtype=int) tho = np.zeros_like(year, dtype=int) tmi = np.zeros_like(year, dtype=int) tse = np.zeros_like(year, dtype=float) # # Checking inputs if any(month < 1) or any(month > 12): raise ValueError('Month must be in [1, 12]') if any(day < 1) or any(day > 31): raise ValueError('Day must be in [1, 31]') if any(hour < 0) or any(hour > 24): raise ValueError('Hour must be in [0, 24]') if any(minute < 0) or any(minute > 60): raise ValueError('Minute must be in [0, 60]') if any(second < 0) or any(second > 60): raise ValueError('Second must be in [0, 60]') # # Initialising values if any(month): tmo = month if any(day): tda = day if any(hour): tho = hour if any(minute): tmi = minute if any(second): tse = second # # Computing decimal tmonth = tmo - 1 day_count = MARKER_NORMAL[tmonth] + tda - 1 id_leap = leap_check(year) leap_loc = np.where(id_leap)[0] day_count[leap_loc] = MARKER_LEAP[tmonth[leap_loc]] + tda[leap_loc] - 1 year_secs = ((day_count.astype(float) * SECONDS_PER_DAY) + tse + (60. * tmi.astype(float)) + (3600. * tho.astype(float))) dtime = year.astype(float) + (year_secs / (365. * 24. * 3600.)) dtime[leap_loc] = year[leap_loc].astype(float) + \ (year_secs[leap_loc] / (366. * 24. * 3600.)) return dtime
python
def decimal_time(year, month, day, hour, minute, second): """ Returns the full time as a decimal value :param year: Year of events (integer numpy.ndarray) :param month: Month of events (integer numpy.ndarray) :param day: Days of event (integer numpy.ndarray) :param hour: Hour of event (integer numpy.ndarray) :param minute: Minute of event (integer numpy.ndarray) :param second: Second of event (float numpy.ndarray) :returns decimal_time: Decimal representation of the time (as numpy.ndarray) """ tmo = np.ones_like(year, dtype=int) tda = np.ones_like(year, dtype=int) tho = np.zeros_like(year, dtype=int) tmi = np.zeros_like(year, dtype=int) tse = np.zeros_like(year, dtype=float) # # Checking inputs if any(month < 1) or any(month > 12): raise ValueError('Month must be in [1, 12]') if any(day < 1) or any(day > 31): raise ValueError('Day must be in [1, 31]') if any(hour < 0) or any(hour > 24): raise ValueError('Hour must be in [0, 24]') if any(minute < 0) or any(minute > 60): raise ValueError('Minute must be in [0, 60]') if any(second < 0) or any(second > 60): raise ValueError('Second must be in [0, 60]') # # Initialising values if any(month): tmo = month if any(day): tda = day if any(hour): tho = hour if any(minute): tmi = minute if any(second): tse = second # # Computing decimal tmonth = tmo - 1 day_count = MARKER_NORMAL[tmonth] + tda - 1 id_leap = leap_check(year) leap_loc = np.where(id_leap)[0] day_count[leap_loc] = MARKER_LEAP[tmonth[leap_loc]] + tda[leap_loc] - 1 year_secs = ((day_count.astype(float) * SECONDS_PER_DAY) + tse + (60. * tmi.astype(float)) + (3600. * tho.astype(float))) dtime = year.astype(float) + (year_secs / (365. * 24. * 3600.)) dtime[leap_loc] = year[leap_loc].astype(float) + \ (year_secs[leap_loc] / (366. * 24. * 3600.)) return dtime
[ "def", "decimal_time", "(", "year", ",", "month", ",", "day", ",", "hour", ",", "minute", ",", "second", ")", ":", "tmo", "=", "np", ".", "ones_like", "(", "year", ",", "dtype", "=", "int", ")", "tda", "=", "np", ".", "ones_like", "(", "year", ",...
Returns the full time as a decimal value :param year: Year of events (integer numpy.ndarray) :param month: Month of events (integer numpy.ndarray) :param day: Days of event (integer numpy.ndarray) :param hour: Hour of event (integer numpy.ndarray) :param minute: Minute of event (integer numpy.ndarray) :param second: Second of event (float numpy.ndarray) :returns decimal_time: Decimal representation of the time (as numpy.ndarray)
[ "Returns", "the", "full", "time", "as", "a", "decimal", "value" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/utils.py#L137-L197
train
gem/oq-engine
openquake/hmtk/seismicity/utils.py
haversine
def haversine(lon1, lat1, lon2, lat2, radians=False, earth_rad=6371.227): """ Allows to calculate geographical distance using the haversine formula. :param lon1: longitude of the first set of locations :type lon1: numpy.ndarray :param lat1: latitude of the frist set of locations :type lat1: numpy.ndarray :param lon2: longitude of the second set of locations :type lon2: numpy.float64 :param lat2: latitude of the second set of locations :type lat2: numpy.float64 :keyword radians: states if locations are given in terms of radians :type radians: bool :keyword earth_rad: radius of the earth in km :type earth_rad: float :returns: geographical distance in km :rtype: numpy.ndarray """ if not radians: cfact = np.pi / 180. lon1 = cfact * lon1 lat1 = cfact * lat1 lon2 = cfact * lon2 lat2 = cfact * lat2 # Number of locations in each set of points if not np.shape(lon1): nlocs1 = 1 lon1 = np.array([lon1]) lat1 = np.array([lat1]) else: nlocs1 = np.max(np.shape(lon1)) if not np.shape(lon2): nlocs2 = 1 lon2 = np.array([lon2]) lat2 = np.array([lat2]) else: nlocs2 = np.max(np.shape(lon2)) # Pre-allocate array distance = np.zeros((nlocs1, nlocs2)) i = 0 while i < nlocs2: # Perform distance calculation dlat = lat1 - lat2[i] dlon = lon1 - lon2[i] aval = (np.sin(dlat / 2.) ** 2.) + (np.cos(lat1) * np.cos(lat2[i]) * (np.sin(dlon / 2.) ** 2.)) distance[:, i] = (2. * earth_rad * np.arctan2(np.sqrt(aval), np.sqrt(1 - aval))).T i += 1 return distance
python
def haversine(lon1, lat1, lon2, lat2, radians=False, earth_rad=6371.227): """ Allows to calculate geographical distance using the haversine formula. :param lon1: longitude of the first set of locations :type lon1: numpy.ndarray :param lat1: latitude of the frist set of locations :type lat1: numpy.ndarray :param lon2: longitude of the second set of locations :type lon2: numpy.float64 :param lat2: latitude of the second set of locations :type lat2: numpy.float64 :keyword radians: states if locations are given in terms of radians :type radians: bool :keyword earth_rad: radius of the earth in km :type earth_rad: float :returns: geographical distance in km :rtype: numpy.ndarray """ if not radians: cfact = np.pi / 180. lon1 = cfact * lon1 lat1 = cfact * lat1 lon2 = cfact * lon2 lat2 = cfact * lat2 # Number of locations in each set of points if not np.shape(lon1): nlocs1 = 1 lon1 = np.array([lon1]) lat1 = np.array([lat1]) else: nlocs1 = np.max(np.shape(lon1)) if not np.shape(lon2): nlocs2 = 1 lon2 = np.array([lon2]) lat2 = np.array([lat2]) else: nlocs2 = np.max(np.shape(lon2)) # Pre-allocate array distance = np.zeros((nlocs1, nlocs2)) i = 0 while i < nlocs2: # Perform distance calculation dlat = lat1 - lat2[i] dlon = lon1 - lon2[i] aval = (np.sin(dlat / 2.) ** 2.) + (np.cos(lat1) * np.cos(lat2[i]) * (np.sin(dlon / 2.) ** 2.)) distance[:, i] = (2. * earth_rad * np.arctan2(np.sqrt(aval), np.sqrt(1 - aval))).T i += 1 return distance
[ "def", "haversine", "(", "lon1", ",", "lat1", ",", "lon2", ",", "lat2", ",", "radians", "=", "False", ",", "earth_rad", "=", "6371.227", ")", ":", "if", "not", "radians", ":", "cfact", "=", "np", ".", "pi", "/", "180.", "lon1", "=", "cfact", "*", ...
Allows to calculate geographical distance using the haversine formula. :param lon1: longitude of the first set of locations :type lon1: numpy.ndarray :param lat1: latitude of the frist set of locations :type lat1: numpy.ndarray :param lon2: longitude of the second set of locations :type lon2: numpy.float64 :param lat2: latitude of the second set of locations :type lat2: numpy.float64 :keyword radians: states if locations are given in terms of radians :type radians: bool :keyword earth_rad: radius of the earth in km :type earth_rad: float :returns: geographical distance in km :rtype: numpy.ndarray
[ "Allows", "to", "calculate", "geographical", "distance", "using", "the", "haversine", "formula", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/utils.py#L200-L252
train
gem/oq-engine
openquake/hmtk/seismicity/utils.py
greg2julian
def greg2julian(year, month, day, hour, minute, second): """ Function to convert a date from Gregorian to Julian format :param year: Year of events (integer numpy.ndarray) :param month: Month of events (integer numpy.ndarray) :param day: Days of event (integer numpy.ndarray) :param hour: Hour of event (integer numpy.ndarray) :param minute: Minute of event (integer numpy.ndarray) :param second: Second of event (float numpy.ndarray) :returns julian_time: Julian representation of the time (as float numpy.ndarray) """ year = year.astype(float) month = month.astype(float) day = day.astype(float) timeut = hour.astype(float) + (minute.astype(float) / 60.0) + \ (second / 3600.0) julian_time = ((367.0 * year) - np.floor( 7.0 * (year + np.floor((month + 9.0) / 12.0)) / 4.0) - np.floor(3.0 * (np.floor((year + (month - 9.0) / 7.0) / 100.0) + 1.0) / 4.0) + np.floor((275.0 * month) / 9.0) + day + 1721028.5 + (timeut / 24.0)) return julian_time
python
def greg2julian(year, month, day, hour, minute, second): """ Function to convert a date from Gregorian to Julian format :param year: Year of events (integer numpy.ndarray) :param month: Month of events (integer numpy.ndarray) :param day: Days of event (integer numpy.ndarray) :param hour: Hour of event (integer numpy.ndarray) :param minute: Minute of event (integer numpy.ndarray) :param second: Second of event (float numpy.ndarray) :returns julian_time: Julian representation of the time (as float numpy.ndarray) """ year = year.astype(float) month = month.astype(float) day = day.astype(float) timeut = hour.astype(float) + (minute.astype(float) / 60.0) + \ (second / 3600.0) julian_time = ((367.0 * year) - np.floor( 7.0 * (year + np.floor((month + 9.0) / 12.0)) / 4.0) - np.floor(3.0 * (np.floor((year + (month - 9.0) / 7.0) / 100.0) + 1.0) / 4.0) + np.floor((275.0 * month) / 9.0) + day + 1721028.5 + (timeut / 24.0)) return julian_time
[ "def", "greg2julian", "(", "year", ",", "month", ",", "day", ",", "hour", ",", "minute", ",", "second", ")", ":", "year", "=", "year", ".", "astype", "(", "float", ")", "month", "=", "month", ".", "astype", "(", "float", ")", "day", "=", "day", "...
Function to convert a date from Gregorian to Julian format :param year: Year of events (integer numpy.ndarray) :param month: Month of events (integer numpy.ndarray) :param day: Days of event (integer numpy.ndarray) :param hour: Hour of event (integer numpy.ndarray) :param minute: Minute of event (integer numpy.ndarray) :param second: Second of event (float numpy.ndarray) :returns julian_time: Julian representation of the time (as float numpy.ndarray)
[ "Function", "to", "convert", "a", "date", "from", "Gregorian", "to", "Julian", "format" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/utils.py#L255-L289
train
gem/oq-engine
openquake/hmtk/seismicity/utils.py
sample_truncated_gaussian_vector
def sample_truncated_gaussian_vector(data, uncertainties, bounds=None): ''' Samples a Gaussian distribution subject to boundaries on the data :param numpy.ndarray data: Vector of N data values :param numpy.ndarray uncertainties: Vector of N data uncertainties :param int number_bootstraps: Number of bootstrap samples :param tuple bounds: (Lower, Upper) bound of data space ''' nvals = len(data) if bounds: # if bounds[0] or (fabs(bounds[0]) < PRECISION): if bounds[0] is not None: lower_bound = (bounds[0] - data) / uncertainties else: lower_bound = -np.inf * np.ones_like(data) # if bounds[1] or (fabs(bounds[1]) < PRECISION): if bounds[1] is not None: upper_bound = (bounds[1] - data) / uncertainties else: upper_bound = np.inf * np.ones_like(data) sample = hmtk_truncnorm.rvs(lower_bound, upper_bound, size=nvals) else: sample = np.random.normal(0., 1., nvals) return data + uncertainties * sample
python
def sample_truncated_gaussian_vector(data, uncertainties, bounds=None): ''' Samples a Gaussian distribution subject to boundaries on the data :param numpy.ndarray data: Vector of N data values :param numpy.ndarray uncertainties: Vector of N data uncertainties :param int number_bootstraps: Number of bootstrap samples :param tuple bounds: (Lower, Upper) bound of data space ''' nvals = len(data) if bounds: # if bounds[0] or (fabs(bounds[0]) < PRECISION): if bounds[0] is not None: lower_bound = (bounds[0] - data) / uncertainties else: lower_bound = -np.inf * np.ones_like(data) # if bounds[1] or (fabs(bounds[1]) < PRECISION): if bounds[1] is not None: upper_bound = (bounds[1] - data) / uncertainties else: upper_bound = np.inf * np.ones_like(data) sample = hmtk_truncnorm.rvs(lower_bound, upper_bound, size=nvals) else: sample = np.random.normal(0., 1., nvals) return data + uncertainties * sample
[ "def", "sample_truncated_gaussian_vector", "(", "data", ",", "uncertainties", ",", "bounds", "=", "None", ")", ":", "nvals", "=", "len", "(", "data", ")", "if", "bounds", ":", "if", "bounds", "[", "0", "]", "is", "not", "None", ":", "lower_bound", "=", ...
Samples a Gaussian distribution subject to boundaries on the data :param numpy.ndarray data: Vector of N data values :param numpy.ndarray uncertainties: Vector of N data uncertainties :param int number_bootstraps: Number of bootstrap samples :param tuple bounds: (Lower, Upper) bound of data space
[ "Samples", "a", "Gaussian", "distribution", "subject", "to", "boundaries", "on", "the", "data" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/utils.py#L333-L363
train
gem/oq-engine
openquake/hmtk/seismicity/utils.py
hmtk_histogram_2D
def hmtk_histogram_2D(xvalues, yvalues, bins, x_offset=1.0E-10, y_offset=1.0E-10): """ See the explanation for the 1D case - now applied to 2D. :param numpy.ndarray xvalues: Values of x-data :param numpy.ndarray yvalues: Values of y-data :param tuple bins: Tuple containing bin intervals for x-data and y-data (as numpy arrays) :param float x_offset: Small amount to offset the x-bins for floating point precision :param float y_offset: Small amount to offset the y-bins for floating point precision :returns: Count in each bin (as float) """ xbins, ybins = (bins[0] - x_offset, bins[1] - y_offset) n_x = len(xbins) - 1 n_y = len(ybins) - 1 counter = np.zeros([n_y, n_x], dtype=float) for j in range(n_y): y_idx = np.logical_and(yvalues >= ybins[j], yvalues < ybins[j + 1]) x_vals = xvalues[y_idx] for i in range(n_x): idx = np.logical_and(x_vals >= xbins[i], x_vals < xbins[i + 1]) counter[j, i] += float(np.sum(idx)) return counter.T
python
def hmtk_histogram_2D(xvalues, yvalues, bins, x_offset=1.0E-10, y_offset=1.0E-10): """ See the explanation for the 1D case - now applied to 2D. :param numpy.ndarray xvalues: Values of x-data :param numpy.ndarray yvalues: Values of y-data :param tuple bins: Tuple containing bin intervals for x-data and y-data (as numpy arrays) :param float x_offset: Small amount to offset the x-bins for floating point precision :param float y_offset: Small amount to offset the y-bins for floating point precision :returns: Count in each bin (as float) """ xbins, ybins = (bins[0] - x_offset, bins[1] - y_offset) n_x = len(xbins) - 1 n_y = len(ybins) - 1 counter = np.zeros([n_y, n_x], dtype=float) for j in range(n_y): y_idx = np.logical_and(yvalues >= ybins[j], yvalues < ybins[j + 1]) x_vals = xvalues[y_idx] for i in range(n_x): idx = np.logical_and(x_vals >= xbins[i], x_vals < xbins[i + 1]) counter[j, i] += float(np.sum(idx)) return counter.T
[ "def", "hmtk_histogram_2D", "(", "xvalues", ",", "yvalues", ",", "bins", ",", "x_offset", "=", "1.0E-10", ",", "y_offset", "=", "1.0E-10", ")", ":", "xbins", ",", "ybins", "=", "(", "bins", "[", "0", "]", "-", "x_offset", ",", "bins", "[", "1", "]", ...
See the explanation for the 1D case - now applied to 2D. :param numpy.ndarray xvalues: Values of x-data :param numpy.ndarray yvalues: Values of y-data :param tuple bins: Tuple containing bin intervals for x-data and y-data (as numpy arrays) :param float x_offset: Small amount to offset the x-bins for floating point precision :param float y_offset: Small amount to offset the y-bins for floating point precision :returns: Count in each bin (as float)
[ "See", "the", "explanation", "for", "the", "1D", "case", "-", "now", "applied", "to", "2D", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/utils.py#L404-L432
train
gem/oq-engine
openquake/hmtk/seismicity/utils.py
bootstrap_histogram_1D
def bootstrap_histogram_1D( values, intervals, uncertainties=None, normalisation=False, number_bootstraps=None, boundaries=None): ''' Bootstrap samples a set of vectors :param numpy.ndarray values: The data values :param numpy.ndarray intervals: The bin edges :param numpy.ndarray uncertainties: The standard deviations of each observation :param bool normalisation: If True then returns the histogram as a density function :param int number_bootstraps: Number of bootstraps :param tuple boundaries: (Lower, Upper) bounds on the data :param returns: 1-D histogram of data ''' if not number_bootstraps or np.all(np.fabs(uncertainties < PRECISION)): # No bootstraps or all uncertaintes are zero - return ordinary # histogram #output = np.histogram(values, intervals)[0] output = hmtk_histogram_1D(values, intervals) if normalisation: output = output / float(np.sum(output)) else: output = output return output else: temp_hist = np.zeros([len(intervals) - 1, number_bootstraps], dtype=float) for iloc in range(0, number_bootstraps): sample = sample_truncated_gaussian_vector(values, uncertainties, boundaries) #output = np.histogram(sample, intervals)[0] output = hmtk_histogram_1D(sample, intervals) temp_hist[:, iloc] = output output = np.sum(temp_hist, axis=1) if normalisation: output = output / float(np.sum(output)) else: output = output / float(number_bootstraps) return output
python
def bootstrap_histogram_1D( values, intervals, uncertainties=None, normalisation=False, number_bootstraps=None, boundaries=None): ''' Bootstrap samples a set of vectors :param numpy.ndarray values: The data values :param numpy.ndarray intervals: The bin edges :param numpy.ndarray uncertainties: The standard deviations of each observation :param bool normalisation: If True then returns the histogram as a density function :param int number_bootstraps: Number of bootstraps :param tuple boundaries: (Lower, Upper) bounds on the data :param returns: 1-D histogram of data ''' if not number_bootstraps or np.all(np.fabs(uncertainties < PRECISION)): # No bootstraps or all uncertaintes are zero - return ordinary # histogram #output = np.histogram(values, intervals)[0] output = hmtk_histogram_1D(values, intervals) if normalisation: output = output / float(np.sum(output)) else: output = output return output else: temp_hist = np.zeros([len(intervals) - 1, number_bootstraps], dtype=float) for iloc in range(0, number_bootstraps): sample = sample_truncated_gaussian_vector(values, uncertainties, boundaries) #output = np.histogram(sample, intervals)[0] output = hmtk_histogram_1D(sample, intervals) temp_hist[:, iloc] = output output = np.sum(temp_hist, axis=1) if normalisation: output = output / float(np.sum(output)) else: output = output / float(number_bootstraps) return output
[ "def", "bootstrap_histogram_1D", "(", "values", ",", "intervals", ",", "uncertainties", "=", "None", ",", "normalisation", "=", "False", ",", "number_bootstraps", "=", "None", ",", "boundaries", "=", "None", ")", ":", "if", "not", "number_bootstraps", "or", "n...
Bootstrap samples a set of vectors :param numpy.ndarray values: The data values :param numpy.ndarray intervals: The bin edges :param numpy.ndarray uncertainties: The standard deviations of each observation :param bool normalisation: If True then returns the histogram as a density function :param int number_bootstraps: Number of bootstraps :param tuple boundaries: (Lower, Upper) bounds on the data :param returns: 1-D histogram of data
[ "Bootstrap", "samples", "a", "set", "of", "vectors" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/utils.py#L435-L483
train
gem/oq-engine
openquake/hmtk/seismicity/utils.py
bootstrap_histogram_2D
def bootstrap_histogram_2D( xvalues, yvalues, xbins, ybins, boundaries=[None, None], xsigma=None, ysigma=None, normalisation=False, number_bootstraps=None): ''' Calculates a 2D histogram of data, allowing for normalisation and bootstrap sampling :param numpy.ndarray xvalues: Data values of the first variable :param numpy.ndarray yvalues: Data values of the second variable :param numpy.ndarray xbins: Bin edges for the first variable :param numpy.ndarray ybins: Bin edges for the second variable :param list boundaries: List of (Lower, Upper) tuples corresponding to the bounds of the two data sets :param numpy.ndarray xsigma: Error values (standard deviatons) on first variable :param numpy.ndarray ysigma: Error values (standard deviatons) on second variable :param bool normalisation: If True then returns the histogram as a density function :param int number_bootstraps: Number of bootstraps :param returns: 2-D histogram of data ''' if (xsigma is None and ysigma is None) or not number_bootstraps: # No sampling - return simple 2-D histrogram #output = np.histogram2d(xvalues, yvalues, bins=[xbins, ybins])[0] output = hmtk_histogram_2D(xvalues, yvalues, bins=(xbins, ybins)) if normalisation: output = output / float(np.sum(output)) return output else: if xsigma is None: xsigma = np.zeros(len(xvalues), dtype=float) if ysigma is None: ysigma = np.zeros(len(yvalues), dtype=float) temp_hist = np.zeros( [len(xbins) - 1, len(ybins) - 1, number_bootstraps], dtype=float) for iloc in range(0, number_bootstraps): xsample = sample_truncated_gaussian_vector(xvalues, xsigma, boundaries[0]) ysample = sample_truncated_gaussian_vector(yvalues, ysigma, boundaries[0]) # temp_hist[:, :, iloc] = np.histogram2d(xsample, # ysample, # bins=[xbins, ybins])[0] temp_hist[:, :, iloc] = hmtk_histogram_2D(xsample, ysample, bins=(xbins, ybins)) if normalisation: output = np.sum(temp_hist, axis=2) output = output / np.sum(output) else: output = np.sum(temp_hist, axis=2) / float(number_bootstraps) return output
python
def bootstrap_histogram_2D( xvalues, yvalues, xbins, ybins, boundaries=[None, None], xsigma=None, ysigma=None, normalisation=False, number_bootstraps=None): ''' Calculates a 2D histogram of data, allowing for normalisation and bootstrap sampling :param numpy.ndarray xvalues: Data values of the first variable :param numpy.ndarray yvalues: Data values of the second variable :param numpy.ndarray xbins: Bin edges for the first variable :param numpy.ndarray ybins: Bin edges for the second variable :param list boundaries: List of (Lower, Upper) tuples corresponding to the bounds of the two data sets :param numpy.ndarray xsigma: Error values (standard deviatons) on first variable :param numpy.ndarray ysigma: Error values (standard deviatons) on second variable :param bool normalisation: If True then returns the histogram as a density function :param int number_bootstraps: Number of bootstraps :param returns: 2-D histogram of data ''' if (xsigma is None and ysigma is None) or not number_bootstraps: # No sampling - return simple 2-D histrogram #output = np.histogram2d(xvalues, yvalues, bins=[xbins, ybins])[0] output = hmtk_histogram_2D(xvalues, yvalues, bins=(xbins, ybins)) if normalisation: output = output / float(np.sum(output)) return output else: if xsigma is None: xsigma = np.zeros(len(xvalues), dtype=float) if ysigma is None: ysigma = np.zeros(len(yvalues), dtype=float) temp_hist = np.zeros( [len(xbins) - 1, len(ybins) - 1, number_bootstraps], dtype=float) for iloc in range(0, number_bootstraps): xsample = sample_truncated_gaussian_vector(xvalues, xsigma, boundaries[0]) ysample = sample_truncated_gaussian_vector(yvalues, ysigma, boundaries[0]) # temp_hist[:, :, iloc] = np.histogram2d(xsample, # ysample, # bins=[xbins, ybins])[0] temp_hist[:, :, iloc] = hmtk_histogram_2D(xsample, ysample, bins=(xbins, ybins)) if normalisation: output = np.sum(temp_hist, axis=2) output = output / np.sum(output) else: output = np.sum(temp_hist, axis=2) / float(number_bootstraps) return output
[ "def", "bootstrap_histogram_2D", "(", "xvalues", ",", "yvalues", ",", "xbins", ",", "ybins", ",", "boundaries", "=", "[", "None", ",", "None", "]", ",", "xsigma", "=", "None", ",", "ysigma", "=", "None", ",", "normalisation", "=", "False", ",", "number_b...
Calculates a 2D histogram of data, allowing for normalisation and bootstrap sampling :param numpy.ndarray xvalues: Data values of the first variable :param numpy.ndarray yvalues: Data values of the second variable :param numpy.ndarray xbins: Bin edges for the first variable :param numpy.ndarray ybins: Bin edges for the second variable :param list boundaries: List of (Lower, Upper) tuples corresponding to the bounds of the two data sets :param numpy.ndarray xsigma: Error values (standard deviatons) on first variable :param numpy.ndarray ysigma: Error values (standard deviatons) on second variable :param bool normalisation: If True then returns the histogram as a density function :param int number_bootstraps: Number of bootstraps :param returns: 2-D histogram of data
[ "Calculates", "a", "2D", "histogram", "of", "data", "allowing", "for", "normalisation", "and", "bootstrap", "sampling" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/utils.py#L486-L558
train
gem/oq-engine
openquake/hmtk/seismicity/utils.py
area_of_polygon
def area_of_polygon(polygon): """ Returns the area of an OpenQuake polygon in square kilometres """ lon0 = np.mean(polygon.lons) lat0 = np.mean(polygon.lats) # Transform to lamber equal area projection x, y = lonlat_to_laea(polygon.lons, polygon.lats, lon0, lat0) # Build shapely polygons poly = geometry.Polygon(zip(x, y)) return poly.area
python
def area_of_polygon(polygon): """ Returns the area of an OpenQuake polygon in square kilometres """ lon0 = np.mean(polygon.lons) lat0 = np.mean(polygon.lats) # Transform to lamber equal area projection x, y = lonlat_to_laea(polygon.lons, polygon.lats, lon0, lat0) # Build shapely polygons poly = geometry.Polygon(zip(x, y)) return poly.area
[ "def", "area_of_polygon", "(", "polygon", ")", ":", "lon0", "=", "np", ".", "mean", "(", "polygon", ".", "lons", ")", "lat0", "=", "np", ".", "mean", "(", "polygon", ".", "lats", ")", "x", ",", "y", "=", "lonlat_to_laea", "(", "polygon", ".", "lons...
Returns the area of an OpenQuake polygon in square kilometres
[ "Returns", "the", "area", "of", "an", "OpenQuake", "polygon", "in", "square", "kilometres" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/utils.py#L628-L638
train
gem/oq-engine
openquake/commonlib/oqvalidation.py
OqParam.lti
def lti(self): """ Dictionary extended_loss_type -> extended_loss_type index """ return {lt: i for i, (lt, dt) in enumerate(self.loss_dt_list())}
python
def lti(self): """ Dictionary extended_loss_type -> extended_loss_type index """ return {lt: i for i, (lt, dt) in enumerate(self.loss_dt_list())}
[ "def", "lti", "(", "self", ")", ":", "return", "{", "lt", ":", "i", "for", "i", ",", "(", "lt", ",", "dt", ")", "in", "enumerate", "(", "self", ".", "loss_dt_list", "(", ")", ")", "}" ]
Dictionary extended_loss_type -> extended_loss_type index
[ "Dictionary", "extended_loss_type", "-", ">", "extended_loss_type", "index" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/oqvalidation.py#L468-L472
train
gem/oq-engine
openquake/commonlib/oqvalidation.py
OqParam.loss_maps_dt
def loss_maps_dt(self, dtype=F32): """ Return a composite data type for loss maps """ ltypes = self.loss_dt(dtype).names lst = [('poe-%s' % poe, dtype) for poe in self.conditional_loss_poes] return numpy.dtype([(lt, lst) for lt in ltypes])
python
def loss_maps_dt(self, dtype=F32): """ Return a composite data type for loss maps """ ltypes = self.loss_dt(dtype).names lst = [('poe-%s' % poe, dtype) for poe in self.conditional_loss_poes] return numpy.dtype([(lt, lst) for lt in ltypes])
[ "def", "loss_maps_dt", "(", "self", ",", "dtype", "=", "F32", ")", ":", "ltypes", "=", "self", ".", "loss_dt", "(", "dtype", ")", ".", "names", "lst", "=", "[", "(", "'poe-%s'", "%", "poe", ",", "dtype", ")", "for", "poe", "in", "self", ".", "con...
Return a composite data type for loss maps
[ "Return", "a", "composite", "data", "type", "for", "loss", "maps" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/oqvalidation.py#L488-L494
train
gem/oq-engine
openquake/commonlib/oqvalidation.py
OqParam.gmf_data_dt
def gmf_data_dt(self): """ Return a composite data type for the GMFs """ return numpy.dtype( [('rlzi', U16), ('sid', U32), ('eid', U64), ('gmv', (F32, (len(self.imtls),)))])
python
def gmf_data_dt(self): """ Return a composite data type for the GMFs """ return numpy.dtype( [('rlzi', U16), ('sid', U32), ('eid', U64), ('gmv', (F32, (len(self.imtls),)))])
[ "def", "gmf_data_dt", "(", "self", ")", ":", "return", "numpy", ".", "dtype", "(", "[", "(", "'rlzi'", ",", "U16", ")", ",", "(", "'sid'", ",", "U32", ")", ",", "(", "'eid'", ",", "U64", ")", ",", "(", "'gmv'", ",", "(", "F32", ",", "(", "len...
Return a composite data type for the GMFs
[ "Return", "a", "composite", "data", "type", "for", "the", "GMFs" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/oqvalidation.py#L496-L502
train
gem/oq-engine
openquake/commonlib/oqvalidation.py
OqParam.no_imls
def no_imls(self): """ Return True if there are no intensity measure levels """ return all(numpy.isnan(ls).any() for ls in self.imtls.values())
python
def no_imls(self): """ Return True if there are no intensity measure levels """ return all(numpy.isnan(ls).any() for ls in self.imtls.values())
[ "def", "no_imls", "(", "self", ")", ":", "return", "all", "(", "numpy", ".", "isnan", "(", "ls", ")", ".", "any", "(", ")", "for", "ls", "in", "self", ".", "imtls", ".", "values", "(", ")", ")" ]
Return True if there are no intensity measure levels
[ "Return", "True", "if", "there", "are", "no", "intensity", "measure", "levels" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/oqvalidation.py#L504-L508
train
gem/oq-engine
openquake/commonlib/oqvalidation.py
OqParam.get_kinds
def get_kinds(self, kind, R): """ Yield 'rlz-000', 'rlz-001', ...', 'mean', 'quantile-0.1', ... """ stats = self.hazard_stats() if kind == 'stats': yield from stats return elif kind == 'rlzs': for r in range(R): yield 'rlz-%d' % r return elif kind: yield kind return # default: yield stats (and realizations if required) if R > 1 and self.individual_curves or not stats: for r in range(R): yield 'rlz-%03d' % r yield from stats
python
def get_kinds(self, kind, R): """ Yield 'rlz-000', 'rlz-001', ...', 'mean', 'quantile-0.1', ... """ stats = self.hazard_stats() if kind == 'stats': yield from stats return elif kind == 'rlzs': for r in range(R): yield 'rlz-%d' % r return elif kind: yield kind return # default: yield stats (and realizations if required) if R > 1 and self.individual_curves or not stats: for r in range(R): yield 'rlz-%03d' % r yield from stats
[ "def", "get_kinds", "(", "self", ",", "kind", ",", "R", ")", ":", "stats", "=", "self", ".", "hazard_stats", "(", ")", "if", "kind", "==", "'stats'", ":", "yield", "from", "stats", "return", "elif", "kind", "==", "'rlzs'", ":", "for", "r", "in", "r...
Yield 'rlz-000', 'rlz-001', ...', 'mean', 'quantile-0.1', ...
[ "Yield", "rlz", "-", "000", "rlz", "-", "001", "...", "mean", "quantile", "-", "0", ".", "1", "..." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/oqvalidation.py#L523-L542
train
gem/oq-engine
openquake/commonlib/oqvalidation.py
OqParam.hazard_stats
def hazard_stats(self): """ Return a list of item with the statistical functions defined for the hazard calculation """ names = [] # name of statistical functions funcs = [] # statistical functions of kind func(values, weights) if self.mean_hazard_curves: names.append('mean') funcs.append(stats.mean_curve) if self.std_hazard_curves: names.append('std') funcs.append(stats.std_curve) for q in self.quantiles: names.append('quantile-%s' % q) funcs.append(functools.partial(stats.quantile_curve, q)) if self.max_hazard_curves: names.append('max') funcs.append(stats.max_curve) return dict(zip(names, funcs))
python
def hazard_stats(self): """ Return a list of item with the statistical functions defined for the hazard calculation """ names = [] # name of statistical functions funcs = [] # statistical functions of kind func(values, weights) if self.mean_hazard_curves: names.append('mean') funcs.append(stats.mean_curve) if self.std_hazard_curves: names.append('std') funcs.append(stats.std_curve) for q in self.quantiles: names.append('quantile-%s' % q) funcs.append(functools.partial(stats.quantile_curve, q)) if self.max_hazard_curves: names.append('max') funcs.append(stats.max_curve) return dict(zip(names, funcs))
[ "def", "hazard_stats", "(", "self", ")", ":", "names", "=", "[", "]", "funcs", "=", "[", "]", "if", "self", ".", "mean_hazard_curves", ":", "names", ".", "append", "(", "'mean'", ")", "funcs", ".", "append", "(", "stats", ".", "mean_curve", ")", "if"...
Return a list of item with the statistical functions defined for the hazard calculation
[ "Return", "a", "list", "of", "item", "with", "the", "statistical", "functions", "defined", "for", "the", "hazard", "calculation" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/oqvalidation.py#L544-L563
train
gem/oq-engine
openquake/commonlib/oqvalidation.py
OqParam.is_valid_geometry
def is_valid_geometry(self): """ It is possible to infer the geometry only if exactly one of sites, sites_csv, hazard_curves_csv, gmfs_csv, region is set. You did set more than one, or nothing. """ has_sites = (self.sites is not None or 'sites' in self.inputs or 'site_model' in self.inputs) if not has_sites and not self.ground_motion_fields: # when generating only the ruptures you do not need the sites return True if ('gmfs' in self.inputs and not has_sites and not self.inputs['gmfs'].endswith('.xml')): raise ValueError('Missing sites or sites_csv in the .ini file') elif ('risk' in self.calculation_mode or 'damage' in self.calculation_mode or 'bcr' in self.calculation_mode): return True # no check on the sites for risk flags = dict( sites=bool(self.sites), sites_csv=self.inputs.get('sites', 0), hazard_curves_csv=self.inputs.get('hazard_curves', 0), gmfs_csv=self.inputs.get('gmfs', 0), region=bool(self.region and self.region_grid_spacing)) # NB: below we check that all the flags # are mutually exclusive return sum(bool(v) for v in flags.values()) == 1 or self.inputs.get( 'exposure') or self.inputs.get('site_model')
python
def is_valid_geometry(self): """ It is possible to infer the geometry only if exactly one of sites, sites_csv, hazard_curves_csv, gmfs_csv, region is set. You did set more than one, or nothing. """ has_sites = (self.sites is not None or 'sites' in self.inputs or 'site_model' in self.inputs) if not has_sites and not self.ground_motion_fields: # when generating only the ruptures you do not need the sites return True if ('gmfs' in self.inputs and not has_sites and not self.inputs['gmfs'].endswith('.xml')): raise ValueError('Missing sites or sites_csv in the .ini file') elif ('risk' in self.calculation_mode or 'damage' in self.calculation_mode or 'bcr' in self.calculation_mode): return True # no check on the sites for risk flags = dict( sites=bool(self.sites), sites_csv=self.inputs.get('sites', 0), hazard_curves_csv=self.inputs.get('hazard_curves', 0), gmfs_csv=self.inputs.get('gmfs', 0), region=bool(self.region and self.region_grid_spacing)) # NB: below we check that all the flags # are mutually exclusive return sum(bool(v) for v in flags.values()) == 1 or self.inputs.get( 'exposure') or self.inputs.get('site_model')
[ "def", "is_valid_geometry", "(", "self", ")", ":", "has_sites", "=", "(", "self", ".", "sites", "is", "not", "None", "or", "'sites'", "in", "self", ".", "inputs", "or", "'site_model'", "in", "self", ".", "inputs", ")", "if", "not", "has_sites", "and", ...
It is possible to infer the geometry only if exactly one of sites, sites_csv, hazard_curves_csv, gmfs_csv, region is set. You did set more than one, or nothing.
[ "It", "is", "possible", "to", "infer", "the", "geometry", "only", "if", "exactly", "one", "of", "sites", "sites_csv", "hazard_curves_csv", "gmfs_csv", "region", "is", "set", ".", "You", "did", "set", "more", "than", "one", "or", "nothing", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/oqvalidation.py#L595-L622
train
gem/oq-engine
openquake/commonlib/oqvalidation.py
OqParam.is_valid_intensity_measure_types
def is_valid_intensity_measure_types(self): """ If the IMTs and levels are extracted from the risk models, they must not be set directly. Moreover, if `intensity_measure_types_and_levels` is set directly, `intensity_measure_types` must not be set. """ if self.ground_motion_correlation_model: for imt in self.imtls: if not (imt.startswith('SA') or imt == 'PGA'): raise ValueError( 'Correlation model %s does not accept IMT=%s' % ( self.ground_motion_correlation_model, imt)) if self.risk_files: # IMTLs extracted from the risk files return (self.intensity_measure_types is None and self.intensity_measure_types_and_levels is None) elif not hasattr(self, 'hazard_imtls') and not hasattr( self, 'risk_imtls'): return False return True
python
def is_valid_intensity_measure_types(self): """ If the IMTs and levels are extracted from the risk models, they must not be set directly. Moreover, if `intensity_measure_types_and_levels` is set directly, `intensity_measure_types` must not be set. """ if self.ground_motion_correlation_model: for imt in self.imtls: if not (imt.startswith('SA') or imt == 'PGA'): raise ValueError( 'Correlation model %s does not accept IMT=%s' % ( self.ground_motion_correlation_model, imt)) if self.risk_files: # IMTLs extracted from the risk files return (self.intensity_measure_types is None and self.intensity_measure_types_and_levels is None) elif not hasattr(self, 'hazard_imtls') and not hasattr( self, 'risk_imtls'): return False return True
[ "def", "is_valid_intensity_measure_types", "(", "self", ")", ":", "if", "self", ".", "ground_motion_correlation_model", ":", "for", "imt", "in", "self", ".", "imtls", ":", "if", "not", "(", "imt", ".", "startswith", "(", "'SA'", ")", "or", "imt", "==", "'P...
If the IMTs and levels are extracted from the risk models, they must not be set directly. Moreover, if `intensity_measure_types_and_levels` is set directly, `intensity_measure_types` must not be set.
[ "If", "the", "IMTs", "and", "levels", "are", "extracted", "from", "the", "risk", "models", "they", "must", "not", "be", "set", "directly", ".", "Moreover", "if", "intensity_measure_types_and_levels", "is", "set", "directly", "intensity_measure_types", "must", "not...
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/oqvalidation.py#L660-L679
train
gem/oq-engine
openquake/commonlib/oqvalidation.py
OqParam.is_valid_intensity_measure_levels
def is_valid_intensity_measure_levels(self): """ In order to compute hazard curves, `intensity_measure_types_and_levels` must be set or extracted from the risk models. """ invalid = self.no_imls() and not self.risk_files and ( self.hazard_curves_from_gmfs or self.calculation_mode in ('classical', 'disaggregation')) return not invalid
python
def is_valid_intensity_measure_levels(self): """ In order to compute hazard curves, `intensity_measure_types_and_levels` must be set or extracted from the risk models. """ invalid = self.no_imls() and not self.risk_files and ( self.hazard_curves_from_gmfs or self.calculation_mode in ('classical', 'disaggregation')) return not invalid
[ "def", "is_valid_intensity_measure_levels", "(", "self", ")", ":", "invalid", "=", "self", ".", "no_imls", "(", ")", "and", "not", "self", ".", "risk_files", "and", "(", "self", ".", "hazard_curves_from_gmfs", "or", "self", ".", "calculation_mode", "in", "(", ...
In order to compute hazard curves, `intensity_measure_types_and_levels` must be set or extracted from the risk models.
[ "In", "order", "to", "compute", "hazard", "curves", "intensity_measure_types_and_levels", "must", "be", "set", "or", "extracted", "from", "the", "risk", "models", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/oqvalidation.py#L681-L689
train
gem/oq-engine
openquake/commonlib/oqvalidation.py
OqParam.is_valid_sites
def is_valid_sites(self): """ The sites are overdetermined """ if 'site_model' in self.inputs and 'sites' in self.inputs: return False elif 'site_model' in self.inputs and self.sites: return False elif 'sites' in self.inputs and self.sites: return False elif self.sites and self.region and self.region_grid_spacing: return False else: return True
python
def is_valid_sites(self): """ The sites are overdetermined """ if 'site_model' in self.inputs and 'sites' in self.inputs: return False elif 'site_model' in self.inputs and self.sites: return False elif 'sites' in self.inputs and self.sites: return False elif self.sites and self.region and self.region_grid_spacing: return False else: return True
[ "def", "is_valid_sites", "(", "self", ")", ":", "if", "'site_model'", "in", "self", ".", "inputs", "and", "'sites'", "in", "self", ".", "inputs", ":", "return", "False", "elif", "'site_model'", "in", "self", ".", "inputs", "and", "self", ".", "sites", ":...
The sites are overdetermined
[ "The", "sites", "are", "overdetermined" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/oqvalidation.py#L733-L746
train
gem/oq-engine
openquake/commonlib/oqvalidation.py
OqParam.is_valid_complex_fault_mesh_spacing
def is_valid_complex_fault_mesh_spacing(self): """ The `complex_fault_mesh_spacing` parameter can be None only if `rupture_mesh_spacing` is set. In that case it is identified with it. """ rms = getattr(self, 'rupture_mesh_spacing', None) if rms and not getattr(self, 'complex_fault_mesh_spacing', None): self.complex_fault_mesh_spacing = self.rupture_mesh_spacing return True
python
def is_valid_complex_fault_mesh_spacing(self): """ The `complex_fault_mesh_spacing` parameter can be None only if `rupture_mesh_spacing` is set. In that case it is identified with it. """ rms = getattr(self, 'rupture_mesh_spacing', None) if rms and not getattr(self, 'complex_fault_mesh_spacing', None): self.complex_fault_mesh_spacing = self.rupture_mesh_spacing return True
[ "def", "is_valid_complex_fault_mesh_spacing", "(", "self", ")", ":", "rms", "=", "getattr", "(", "self", ",", "'rupture_mesh_spacing'", ",", "None", ")", "if", "rms", "and", "not", "getattr", "(", "self", ",", "'complex_fault_mesh_spacing'", ",", "None", ")", ...
The `complex_fault_mesh_spacing` parameter can be None only if `rupture_mesh_spacing` is set. In that case it is identified with it.
[ "The", "complex_fault_mesh_spacing", "parameter", "can", "be", "None", "only", "if", "rupture_mesh_spacing", "is", "set", ".", "In", "that", "case", "it", "is", "identified", "with", "it", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/oqvalidation.py#L748-L756
train
gem/oq-engine
openquake/commonlib/oqvalidation.py
OqParam.is_valid_optimize_same_id_sources
def is_valid_optimize_same_id_sources(self): """ The `optimize_same_id_sources` can be true only in the classical calculators. """ if (self.optimize_same_id_sources and 'classical' in self.calculation_mode or 'disagg' in self.calculation_mode): return True elif self.optimize_same_id_sources: return False else: return True
python
def is_valid_optimize_same_id_sources(self): """ The `optimize_same_id_sources` can be true only in the classical calculators. """ if (self.optimize_same_id_sources and 'classical' in self.calculation_mode or 'disagg' in self.calculation_mode): return True elif self.optimize_same_id_sources: return False else: return True
[ "def", "is_valid_optimize_same_id_sources", "(", "self", ")", ":", "if", "(", "self", ".", "optimize_same_id_sources", "and", "'classical'", "in", "self", ".", "calculation_mode", "or", "'disagg'", "in", "self", ".", "calculation_mode", ")", ":", "return", "True",...
The `optimize_same_id_sources` can be true only in the classical calculators.
[ "The", "optimize_same_id_sources", "can", "be", "true", "only", "in", "the", "classical", "calculators", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/oqvalidation.py#L758-L770
train
gem/oq-engine
openquake/commonlib/oqvalidation.py
OqParam.check_missing
def check_missing(self, param, action): """ Make sure the given parameter is missing in the job.ini file """ assert action in ('debug', 'info', 'warn', 'error'), action if self.inputs.get(param): msg = '%s_file in %s is ignored in %s' % ( param, self.inputs['job_ini'], self.calculation_mode) if action == 'error': raise InvalidFile(msg) else: getattr(logging, action)(msg)
python
def check_missing(self, param, action): """ Make sure the given parameter is missing in the job.ini file """ assert action in ('debug', 'info', 'warn', 'error'), action if self.inputs.get(param): msg = '%s_file in %s is ignored in %s' % ( param, self.inputs['job_ini'], self.calculation_mode) if action == 'error': raise InvalidFile(msg) else: getattr(logging, action)(msg)
[ "def", "check_missing", "(", "self", ",", "param", ",", "action", ")", ":", "assert", "action", "in", "(", "'debug'", ",", "'info'", ",", "'warn'", ",", "'error'", ")", ",", "action", "if", "self", ".", "inputs", ".", "get", "(", "param", ")", ":", ...
Make sure the given parameter is missing in the job.ini file
[ "Make", "sure", "the", "given", "parameter", "is", "missing", "in", "the", "job", ".", "ini", "file" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/oqvalidation.py#L795-L806
train
gem/oq-engine
openquake/hazardlib/sourceconverter.py
get_set_num_ruptures
def get_set_num_ruptures(src): """ Extract the number of ruptures and set it """ if not src.num_ruptures: t0 = time.time() src.num_ruptures = src.count_ruptures() dt = time.time() - t0 clsname = src.__class__.__name__ if dt > 10: if 'Area' in clsname: logging.warning( '%s.count_ruptures took %d seconds, perhaps the ' 'area discretization is too small', src, dt) elif 'ComplexFault' in clsname: logging.warning( '%s.count_ruptures took %d seconds, perhaps the ' 'complex_fault_mesh_spacing is too small', src, dt) elif 'SimpleFault' in clsname: logging.warning( '%s.count_ruptures took %d seconds, perhaps the ' 'rupture_mesh_spacing is too small', src, dt) else: # multiPointSource logging.warning('count_ruptures %s took %d seconds', src, dt) return src.num_ruptures
python
def get_set_num_ruptures(src): """ Extract the number of ruptures and set it """ if not src.num_ruptures: t0 = time.time() src.num_ruptures = src.count_ruptures() dt = time.time() - t0 clsname = src.__class__.__name__ if dt > 10: if 'Area' in clsname: logging.warning( '%s.count_ruptures took %d seconds, perhaps the ' 'area discretization is too small', src, dt) elif 'ComplexFault' in clsname: logging.warning( '%s.count_ruptures took %d seconds, perhaps the ' 'complex_fault_mesh_spacing is too small', src, dt) elif 'SimpleFault' in clsname: logging.warning( '%s.count_ruptures took %d seconds, perhaps the ' 'rupture_mesh_spacing is too small', src, dt) else: # multiPointSource logging.warning('count_ruptures %s took %d seconds', src, dt) return src.num_ruptures
[ "def", "get_set_num_ruptures", "(", "src", ")", ":", "if", "not", "src", ".", "num_ruptures", ":", "t0", "=", "time", ".", "time", "(", ")", "src", ".", "num_ruptures", "=", "src", ".", "count_ruptures", "(", ")", "dt", "=", "time", ".", "time", "(",...
Extract the number of ruptures and set it
[ "Extract", "the", "number", "of", "ruptures", "and", "set", "it" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourceconverter.py#L238-L263
train
gem/oq-engine
openquake/hazardlib/sourceconverter.py
mfds2multimfd
def mfds2multimfd(mfds): """ Convert a list of MFD nodes into a single MultiMFD node """ _, kind = mfds[0].tag.split('}') node = Node('multiMFD', dict(kind=kind, size=len(mfds))) lengths = None for field in mfd.multi_mfd.ASSOC[kind][1:]: alias = mfd.multi_mfd.ALIAS.get(field, field) if field in ('magnitudes', 'occurRates'): data = [~getattr(m, field) for m in mfds] lengths = [len(d) for d in data] data = sum(data, []) # list of lists else: try: data = [m[alias] for m in mfds] except KeyError: if alias == 'binWidth': # missing bindWidth in GR MDFs is ok continue else: raise node.append(Node(field, text=collapse(data))) if lengths: # this is the last field if present node.append(Node('lengths', text=collapse(lengths))) return node
python
def mfds2multimfd(mfds): """ Convert a list of MFD nodes into a single MultiMFD node """ _, kind = mfds[0].tag.split('}') node = Node('multiMFD', dict(kind=kind, size=len(mfds))) lengths = None for field in mfd.multi_mfd.ASSOC[kind][1:]: alias = mfd.multi_mfd.ALIAS.get(field, field) if field in ('magnitudes', 'occurRates'): data = [~getattr(m, field) for m in mfds] lengths = [len(d) for d in data] data = sum(data, []) # list of lists else: try: data = [m[alias] for m in mfds] except KeyError: if alias == 'binWidth': # missing bindWidth in GR MDFs is ok continue else: raise node.append(Node(field, text=collapse(data))) if lengths: # this is the last field if present node.append(Node('lengths', text=collapse(lengths))) return node
[ "def", "mfds2multimfd", "(", "mfds", ")", ":", "_", ",", "kind", "=", "mfds", "[", "0", "]", ".", "tag", ".", "split", "(", "'}'", ")", "node", "=", "Node", "(", "'multiMFD'", ",", "dict", "(", "kind", "=", "kind", ",", "size", "=", "len", "(",...
Convert a list of MFD nodes into a single MultiMFD node
[ "Convert", "a", "list", "of", "MFD", "nodes", "into", "a", "single", "MultiMFD", "node" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourceconverter.py#L918-L943
train
gem/oq-engine
openquake/hazardlib/sourceconverter.py
SourceGroup.update
def update(self, src): """ Update the attributes sources, min_mag, max_mag according to the given source. :param src: an instance of :class: `openquake.hazardlib.source.base.BaseSeismicSource` """ assert src.tectonic_region_type == self.trt, ( src.tectonic_region_type, self.trt) if not src.min_mag: # if not set already src.min_mag = self.min_mag.get(self.trt) or self.min_mag['default'] # checking mutex ruptures if (not isinstance(src, NonParametricSeismicSource) and self.rup_interdep == 'mutex'): msg = "Mutually exclusive ruptures can only be " msg += "modelled using non-parametric sources" raise ValueError(msg) nr = get_set_num_ruptures(src) if nr == 0: # the minimum_magnitude filters all ruptures return self.tot_ruptures += nr self.sources.append(src) _, max_mag = src.get_min_max_mag() prev_max_mag = self.max_mag if prev_max_mag is None or max_mag > prev_max_mag: self.max_mag = max_mag
python
def update(self, src): """ Update the attributes sources, min_mag, max_mag according to the given source. :param src: an instance of :class: `openquake.hazardlib.source.base.BaseSeismicSource` """ assert src.tectonic_region_type == self.trt, ( src.tectonic_region_type, self.trt) if not src.min_mag: # if not set already src.min_mag = self.min_mag.get(self.trt) or self.min_mag['default'] # checking mutex ruptures if (not isinstance(src, NonParametricSeismicSource) and self.rup_interdep == 'mutex'): msg = "Mutually exclusive ruptures can only be " msg += "modelled using non-parametric sources" raise ValueError(msg) nr = get_set_num_ruptures(src) if nr == 0: # the minimum_magnitude filters all ruptures return self.tot_ruptures += nr self.sources.append(src) _, max_mag = src.get_min_max_mag() prev_max_mag = self.max_mag if prev_max_mag is None or max_mag > prev_max_mag: self.max_mag = max_mag
[ "def", "update", "(", "self", ",", "src", ")", ":", "assert", "src", ".", "tectonic_region_type", "==", "self", ".", "trt", ",", "(", "src", ".", "tectonic_region_type", ",", "self", ".", "trt", ")", "if", "not", "src", ".", "min_mag", ":", "src", "....
Update the attributes sources, min_mag, max_mag according to the given source. :param src: an instance of :class: `openquake.hazardlib.source.base.BaseSeismicSource`
[ "Update", "the", "attributes", "sources", "min_mag", "max_mag", "according", "to", "the", "given", "source", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourceconverter.py#L159-L187
train
gem/oq-engine
openquake/hazardlib/sourceconverter.py
RuptureConverter.convert_node
def convert_node(self, node): """ Convert the given rupture node into a hazardlib rupture, depending on the node tag. :param node: a node representing a rupture """ convert = getattr(self, 'convert_' + striptag(node.tag)) return convert(node)
python
def convert_node(self, node): """ Convert the given rupture node into a hazardlib rupture, depending on the node tag. :param node: a node representing a rupture """ convert = getattr(self, 'convert_' + striptag(node.tag)) return convert(node)
[ "def", "convert_node", "(", "self", ",", "node", ")", ":", "convert", "=", "getattr", "(", "self", ",", "'convert_'", "+", "striptag", "(", "node", ".", "tag", ")", ")", "return", "convert", "(", "node", ")" ]
Convert the given rupture node into a hazardlib rupture, depending on the node tag. :param node: a node representing a rupture
[ "Convert", "the", "given", "rupture", "node", "into", "a", "hazardlib", "rupture", "depending", "on", "the", "node", "tag", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourceconverter.py#L321-L329
train
gem/oq-engine
openquake/hazardlib/sourceconverter.py
RuptureConverter.convert_simpleFaultRupture
def convert_simpleFaultRupture(self, node): """ Convert a simpleFaultRupture node. :param node: the rupture node """ mag, rake, hypocenter = self.get_mag_rake_hypo(node) with context(self.fname, node): surfaces = [node.simpleFaultGeometry] rupt = source.rupture.BaseRupture( mag=mag, rake=rake, tectonic_region_type=None, hypocenter=hypocenter, surface=self.convert_surfaces(surfaces)) return rupt
python
def convert_simpleFaultRupture(self, node): """ Convert a simpleFaultRupture node. :param node: the rupture node """ mag, rake, hypocenter = self.get_mag_rake_hypo(node) with context(self.fname, node): surfaces = [node.simpleFaultGeometry] rupt = source.rupture.BaseRupture( mag=mag, rake=rake, tectonic_region_type=None, hypocenter=hypocenter, surface=self.convert_surfaces(surfaces)) return rupt
[ "def", "convert_simpleFaultRupture", "(", "self", ",", "node", ")", ":", "mag", ",", "rake", ",", "hypocenter", "=", "self", ".", "get_mag_rake_hypo", "(", "node", ")", "with", "context", "(", "self", ".", "fname", ",", "node", ")", ":", "surfaces", "=",...
Convert a simpleFaultRupture node. :param node: the rupture node
[ "Convert", "a", "simpleFaultRupture", "node", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourceconverter.py#L414-L427
train
gem/oq-engine
openquake/hazardlib/sourceconverter.py
RuptureConverter.convert_multiPlanesRupture
def convert_multiPlanesRupture(self, node): """ Convert a multiPlanesRupture node. :param node: the rupture node """ mag, rake, hypocenter = self.get_mag_rake_hypo(node) with context(self.fname, node): surfaces = list(node.getnodes('planarSurface')) rupt = source.rupture.BaseRupture( mag=mag, rake=rake, tectonic_region_type=None, hypocenter=hypocenter, surface=self.convert_surfaces(surfaces)) return rupt
python
def convert_multiPlanesRupture(self, node): """ Convert a multiPlanesRupture node. :param node: the rupture node """ mag, rake, hypocenter = self.get_mag_rake_hypo(node) with context(self.fname, node): surfaces = list(node.getnodes('planarSurface')) rupt = source.rupture.BaseRupture( mag=mag, rake=rake, tectonic_region_type=None, hypocenter=hypocenter, surface=self.convert_surfaces(surfaces)) return rupt
[ "def", "convert_multiPlanesRupture", "(", "self", ",", "node", ")", ":", "mag", ",", "rake", ",", "hypocenter", "=", "self", ".", "get_mag_rake_hypo", "(", "node", ")", "with", "context", "(", "self", ".", "fname", ",", "node", ")", ":", "surfaces", "=",...
Convert a multiPlanesRupture node. :param node: the rupture node
[ "Convert", "a", "multiPlanesRupture", "node", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourceconverter.py#L460-L474
train
gem/oq-engine
openquake/hazardlib/sourceconverter.py
SourceConverter.get_tom
def get_tom(self, node): """ Convert the given node into a Temporal Occurrence Model object. :param node: a node of kind poissonTOM or brownianTOM :returns: a :class:`openquake.hazardlib.mfd.EvenlyDiscretizedMFD.` or :class:`openquake.hazardlib.mfd.TruncatedGRMFD` instance """ if 'tom' in node.attrib: tom_cls = tom.registry[node['tom']] else: tom_cls = tom.registry['PoissonTOM'] return tom_cls(time_span=self.investigation_time, occurrence_rate=node.get('occurrence_rate'))
python
def get_tom(self, node): """ Convert the given node into a Temporal Occurrence Model object. :param node: a node of kind poissonTOM or brownianTOM :returns: a :class:`openquake.hazardlib.mfd.EvenlyDiscretizedMFD.` or :class:`openquake.hazardlib.mfd.TruncatedGRMFD` instance """ if 'tom' in node.attrib: tom_cls = tom.registry[node['tom']] else: tom_cls = tom.registry['PoissonTOM'] return tom_cls(time_span=self.investigation_time, occurrence_rate=node.get('occurrence_rate'))
[ "def", "get_tom", "(", "self", ",", "node", ")", ":", "if", "'tom'", "in", "node", ".", "attrib", ":", "tom_cls", "=", "tom", ".", "registry", "[", "node", "[", "'tom'", "]", "]", "else", ":", "tom_cls", "=", "tom", ".", "registry", "[", "'PoissonT...
Convert the given node into a Temporal Occurrence Model object. :param node: a node of kind poissonTOM or brownianTOM :returns: a :class:`openquake.hazardlib.mfd.EvenlyDiscretizedMFD.` or :class:`openquake.hazardlib.mfd.TruncatedGRMFD` instance
[ "Convert", "the", "given", "node", "into", "a", "Temporal", "Occurrence", "Model", "object", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourceconverter.py#L533-L546
train
gem/oq-engine
openquake/hazardlib/sourceconverter.py
SourceConverter.convert_mfdist
def convert_mfdist(self, node): """ Convert the given node into a Magnitude-Frequency Distribution object. :param node: a node of kind incrementalMFD or truncGutenbergRichterMFD :returns: a :class:`openquake.hazardlib.mfd.EvenlyDiscretizedMFD.` or :class:`openquake.hazardlib.mfd.TruncatedGRMFD` instance """ with context(self.fname, node): [mfd_node] = [subnode for subnode in node if subnode.tag.endswith( ('incrementalMFD', 'truncGutenbergRichterMFD', 'arbitraryMFD', 'YoungsCoppersmithMFD', 'multiMFD'))] if mfd_node.tag.endswith('incrementalMFD'): return mfd.EvenlyDiscretizedMFD( min_mag=mfd_node['minMag'], bin_width=mfd_node['binWidth'], occurrence_rates=~mfd_node.occurRates) elif mfd_node.tag.endswith('truncGutenbergRichterMFD'): return mfd.TruncatedGRMFD( a_val=mfd_node['aValue'], b_val=mfd_node['bValue'], min_mag=mfd_node['minMag'], max_mag=mfd_node['maxMag'], bin_width=self.width_of_mfd_bin) elif mfd_node.tag.endswith('arbitraryMFD'): return mfd.ArbitraryMFD( magnitudes=~mfd_node.magnitudes, occurrence_rates=~mfd_node.occurRates) elif mfd_node.tag.endswith('YoungsCoppersmithMFD'): if "totalMomentRate" in mfd_node.attrib.keys(): # Return Youngs & Coppersmith from the total moment rate return mfd.YoungsCoppersmith1985MFD.from_total_moment_rate( min_mag=mfd_node["minMag"], b_val=mfd_node["bValue"], char_mag=mfd_node["characteristicMag"], total_moment_rate=mfd_node["totalMomentRate"], bin_width=mfd_node["binWidth"]) elif "characteristicRate" in mfd_node.attrib.keys(): # Return Youngs & Coppersmith from the total moment rate return mfd.YoungsCoppersmith1985MFD.\ from_characteristic_rate( min_mag=mfd_node["minMag"], b_val=mfd_node["bValue"], char_mag=mfd_node["characteristicMag"], char_rate=mfd_node["characteristicRate"], bin_width=mfd_node["binWidth"]) elif mfd_node.tag.endswith('multiMFD'): return mfd.multi_mfd.MultiMFD.from_node( mfd_node, self.width_of_mfd_bin)
python
def convert_mfdist(self, node): """ Convert the given node into a Magnitude-Frequency Distribution object. :param node: a node of kind incrementalMFD or truncGutenbergRichterMFD :returns: a :class:`openquake.hazardlib.mfd.EvenlyDiscretizedMFD.` or :class:`openquake.hazardlib.mfd.TruncatedGRMFD` instance """ with context(self.fname, node): [mfd_node] = [subnode for subnode in node if subnode.tag.endswith( ('incrementalMFD', 'truncGutenbergRichterMFD', 'arbitraryMFD', 'YoungsCoppersmithMFD', 'multiMFD'))] if mfd_node.tag.endswith('incrementalMFD'): return mfd.EvenlyDiscretizedMFD( min_mag=mfd_node['minMag'], bin_width=mfd_node['binWidth'], occurrence_rates=~mfd_node.occurRates) elif mfd_node.tag.endswith('truncGutenbergRichterMFD'): return mfd.TruncatedGRMFD( a_val=mfd_node['aValue'], b_val=mfd_node['bValue'], min_mag=mfd_node['minMag'], max_mag=mfd_node['maxMag'], bin_width=self.width_of_mfd_bin) elif mfd_node.tag.endswith('arbitraryMFD'): return mfd.ArbitraryMFD( magnitudes=~mfd_node.magnitudes, occurrence_rates=~mfd_node.occurRates) elif mfd_node.tag.endswith('YoungsCoppersmithMFD'): if "totalMomentRate" in mfd_node.attrib.keys(): # Return Youngs & Coppersmith from the total moment rate return mfd.YoungsCoppersmith1985MFD.from_total_moment_rate( min_mag=mfd_node["minMag"], b_val=mfd_node["bValue"], char_mag=mfd_node["characteristicMag"], total_moment_rate=mfd_node["totalMomentRate"], bin_width=mfd_node["binWidth"]) elif "characteristicRate" in mfd_node.attrib.keys(): # Return Youngs & Coppersmith from the total moment rate return mfd.YoungsCoppersmith1985MFD.\ from_characteristic_rate( min_mag=mfd_node["minMag"], b_val=mfd_node["bValue"], char_mag=mfd_node["characteristicMag"], char_rate=mfd_node["characteristicRate"], bin_width=mfd_node["binWidth"]) elif mfd_node.tag.endswith('multiMFD'): return mfd.multi_mfd.MultiMFD.from_node( mfd_node, self.width_of_mfd_bin)
[ "def", "convert_mfdist", "(", "self", ",", "node", ")", ":", "with", "context", "(", "self", ".", "fname", ",", "node", ")", ":", "[", "mfd_node", "]", "=", "[", "subnode", "for", "subnode", "in", "node", "if", "subnode", ".", "tag", ".", "endswith",...
Convert the given node into a Magnitude-Frequency Distribution object. :param node: a node of kind incrementalMFD or truncGutenbergRichterMFD :returns: a :class:`openquake.hazardlib.mfd.EvenlyDiscretizedMFD.` or :class:`openquake.hazardlib.mfd.TruncatedGRMFD` instance
[ "Convert", "the", "given", "node", "into", "a", "Magnitude", "-", "Frequency", "Distribution", "object", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourceconverter.py#L548-L595
train
gem/oq-engine
openquake/hazardlib/sourceconverter.py
SourceConverter.convert_npdist
def convert_npdist(self, node): """ Convert the given node into a Nodal Plane Distribution. :param node: a nodalPlaneDist node :returns: a :class:`openquake.hazardlib.geo.NodalPlane` instance """ with context(self.fname, node): npdist = [] for np in node.nodalPlaneDist: prob, strike, dip, rake = ( np['probability'], np['strike'], np['dip'], np['rake']) npdist.append((prob, geo.NodalPlane(strike, dip, rake))) if not self.spinning_floating: npdist = [(1, npdist[0][1])] # consider the first nodal plane return pmf.PMF(npdist)
python
def convert_npdist(self, node): """ Convert the given node into a Nodal Plane Distribution. :param node: a nodalPlaneDist node :returns: a :class:`openquake.hazardlib.geo.NodalPlane` instance """ with context(self.fname, node): npdist = [] for np in node.nodalPlaneDist: prob, strike, dip, rake = ( np['probability'], np['strike'], np['dip'], np['rake']) npdist.append((prob, geo.NodalPlane(strike, dip, rake))) if not self.spinning_floating: npdist = [(1, npdist[0][1])] # consider the first nodal plane return pmf.PMF(npdist)
[ "def", "convert_npdist", "(", "self", ",", "node", ")", ":", "with", "context", "(", "self", ".", "fname", ",", "node", ")", ":", "npdist", "=", "[", "]", "for", "np", "in", "node", ".", "nodalPlaneDist", ":", "prob", ",", "strike", ",", "dip", ","...
Convert the given node into a Nodal Plane Distribution. :param node: a nodalPlaneDist node :returns: a :class:`openquake.hazardlib.geo.NodalPlane` instance
[ "Convert", "the", "given", "node", "into", "a", "Nodal", "Plane", "Distribution", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourceconverter.py#L597-L612
train
gem/oq-engine
openquake/hazardlib/sourceconverter.py
SourceConverter.convert_hpdist
def convert_hpdist(self, node): """ Convert the given node into a probability mass function for the hypo depth distribution. :param node: a hypoDepthDist node :returns: a :class:`openquake.hazardlib.pmf.PMF` instance """ with context(self.fname, node): hcdist = [(hd['probability'], hd['depth']) for hd in node.hypoDepthDist] if not self.spinning_floating: # consider the first hypocenter hcdist = [(1, hcdist[0][1])] return pmf.PMF(hcdist)
python
def convert_hpdist(self, node): """ Convert the given node into a probability mass function for the hypo depth distribution. :param node: a hypoDepthDist node :returns: a :class:`openquake.hazardlib.pmf.PMF` instance """ with context(self.fname, node): hcdist = [(hd['probability'], hd['depth']) for hd in node.hypoDepthDist] if not self.spinning_floating: # consider the first hypocenter hcdist = [(1, hcdist[0][1])] return pmf.PMF(hcdist)
[ "def", "convert_hpdist", "(", "self", ",", "node", ")", ":", "with", "context", "(", "self", ".", "fname", ",", "node", ")", ":", "hcdist", "=", "[", "(", "hd", "[", "'probability'", "]", ",", "hd", "[", "'depth'", "]", ")", "for", "hd", "in", "n...
Convert the given node into a probability mass function for the hypo depth distribution. :param node: a hypoDepthDist node :returns: a :class:`openquake.hazardlib.pmf.PMF` instance
[ "Convert", "the", "given", "node", "into", "a", "probability", "mass", "function", "for", "the", "hypo", "depth", "distribution", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourceconverter.py#L614-L627
train
gem/oq-engine
openquake/hazardlib/sourceconverter.py
SourceConverter.convert_areaSource
def convert_areaSource(self, node): """ Convert the given node into an area source object. :param node: a node with tag areaGeometry :returns: a :class:`openquake.hazardlib.source.AreaSource` instance """ geom = node.areaGeometry coords = split_coords_2d(~geom.Polygon.exterior.LinearRing.posList) polygon = geo.Polygon([geo.Point(*xy) for xy in coords]) msr = valid.SCALEREL[~node.magScaleRel]() area_discretization = geom.attrib.get( 'discretization', self.area_source_discretization) if area_discretization is None: raise ValueError( 'The source %r has no `discretization` parameter and the job.' 'ini file has no `area_source_discretization` parameter either' % node['id']) return source.AreaSource( source_id=node['id'], name=node['name'], tectonic_region_type=node.attrib.get('tectonicRegion'), mfd=self.convert_mfdist(node), rupture_mesh_spacing=self.rupture_mesh_spacing, magnitude_scaling_relationship=msr, rupture_aspect_ratio=~node.ruptAspectRatio, upper_seismogenic_depth=~geom.upperSeismoDepth, lower_seismogenic_depth=~geom.lowerSeismoDepth, nodal_plane_distribution=self.convert_npdist(node), hypocenter_distribution=self.convert_hpdist(node), polygon=polygon, area_discretization=area_discretization, temporal_occurrence_model=self.get_tom(node))
python
def convert_areaSource(self, node): """ Convert the given node into an area source object. :param node: a node with tag areaGeometry :returns: a :class:`openquake.hazardlib.source.AreaSource` instance """ geom = node.areaGeometry coords = split_coords_2d(~geom.Polygon.exterior.LinearRing.posList) polygon = geo.Polygon([geo.Point(*xy) for xy in coords]) msr = valid.SCALEREL[~node.magScaleRel]() area_discretization = geom.attrib.get( 'discretization', self.area_source_discretization) if area_discretization is None: raise ValueError( 'The source %r has no `discretization` parameter and the job.' 'ini file has no `area_source_discretization` parameter either' % node['id']) return source.AreaSource( source_id=node['id'], name=node['name'], tectonic_region_type=node.attrib.get('tectonicRegion'), mfd=self.convert_mfdist(node), rupture_mesh_spacing=self.rupture_mesh_spacing, magnitude_scaling_relationship=msr, rupture_aspect_ratio=~node.ruptAspectRatio, upper_seismogenic_depth=~geom.upperSeismoDepth, lower_seismogenic_depth=~geom.lowerSeismoDepth, nodal_plane_distribution=self.convert_npdist(node), hypocenter_distribution=self.convert_hpdist(node), polygon=polygon, area_discretization=area_discretization, temporal_occurrence_model=self.get_tom(node))
[ "def", "convert_areaSource", "(", "self", ",", "node", ")", ":", "geom", "=", "node", ".", "areaGeometry", "coords", "=", "split_coords_2d", "(", "~", "geom", ".", "Polygon", ".", "exterior", ".", "LinearRing", ".", "posList", ")", "polygon", "=", "geo", ...
Convert the given node into an area source object. :param node: a node with tag areaGeometry :returns: a :class:`openquake.hazardlib.source.AreaSource` instance
[ "Convert", "the", "given", "node", "into", "an", "area", "source", "object", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourceconverter.py#L629-L661
train
gem/oq-engine
openquake/hazardlib/sourceconverter.py
SourceConverter.convert_pointSource
def convert_pointSource(self, node): """ Convert the given node into a point source object. :param node: a node with tag pointGeometry :returns: a :class:`openquake.hazardlib.source.PointSource` instance """ geom = node.pointGeometry lon_lat = ~geom.Point.pos msr = valid.SCALEREL[~node.magScaleRel]() return source.PointSource( source_id=node['id'], name=node['name'], tectonic_region_type=node.attrib.get('tectonicRegion'), mfd=self.convert_mfdist(node), rupture_mesh_spacing=self.rupture_mesh_spacing, magnitude_scaling_relationship=msr, rupture_aspect_ratio=~node.ruptAspectRatio, upper_seismogenic_depth=~geom.upperSeismoDepth, lower_seismogenic_depth=~geom.lowerSeismoDepth, location=geo.Point(*lon_lat), nodal_plane_distribution=self.convert_npdist(node), hypocenter_distribution=self.convert_hpdist(node), temporal_occurrence_model=self.get_tom(node))
python
def convert_pointSource(self, node): """ Convert the given node into a point source object. :param node: a node with tag pointGeometry :returns: a :class:`openquake.hazardlib.source.PointSource` instance """ geom = node.pointGeometry lon_lat = ~geom.Point.pos msr = valid.SCALEREL[~node.magScaleRel]() return source.PointSource( source_id=node['id'], name=node['name'], tectonic_region_type=node.attrib.get('tectonicRegion'), mfd=self.convert_mfdist(node), rupture_mesh_spacing=self.rupture_mesh_spacing, magnitude_scaling_relationship=msr, rupture_aspect_ratio=~node.ruptAspectRatio, upper_seismogenic_depth=~geom.upperSeismoDepth, lower_seismogenic_depth=~geom.lowerSeismoDepth, location=geo.Point(*lon_lat), nodal_plane_distribution=self.convert_npdist(node), hypocenter_distribution=self.convert_hpdist(node), temporal_occurrence_model=self.get_tom(node))
[ "def", "convert_pointSource", "(", "self", ",", "node", ")", ":", "geom", "=", "node", ".", "pointGeometry", "lon_lat", "=", "~", "geom", ".", "Point", ".", "pos", "msr", "=", "valid", ".", "SCALEREL", "[", "~", "node", ".", "magScaleRel", "]", "(", ...
Convert the given node into a point source object. :param node: a node with tag pointGeometry :returns: a :class:`openquake.hazardlib.source.PointSource` instance
[ "Convert", "the", "given", "node", "into", "a", "point", "source", "object", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourceconverter.py#L663-L686
train
gem/oq-engine
openquake/hazardlib/sourceconverter.py
SourceConverter.convert_multiPointSource
def convert_multiPointSource(self, node): """ Convert the given node into a MultiPointSource object. :param node: a node with tag multiPointGeometry :returns: a :class:`openquake.hazardlib.source.MultiPointSource` """ geom = node.multiPointGeometry lons, lats = zip(*split_coords_2d(~geom.posList)) msr = valid.SCALEREL[~node.magScaleRel]() return source.MultiPointSource( source_id=node['id'], name=node['name'], tectonic_region_type=node.attrib.get('tectonicRegion'), mfd=self.convert_mfdist(node), magnitude_scaling_relationship=msr, rupture_aspect_ratio=~node.ruptAspectRatio, upper_seismogenic_depth=~geom.upperSeismoDepth, lower_seismogenic_depth=~geom.lowerSeismoDepth, nodal_plane_distribution=self.convert_npdist(node), hypocenter_distribution=self.convert_hpdist(node), mesh=geo.Mesh(F32(lons), F32(lats)), temporal_occurrence_model=self.get_tom(node))
python
def convert_multiPointSource(self, node): """ Convert the given node into a MultiPointSource object. :param node: a node with tag multiPointGeometry :returns: a :class:`openquake.hazardlib.source.MultiPointSource` """ geom = node.multiPointGeometry lons, lats = zip(*split_coords_2d(~geom.posList)) msr = valid.SCALEREL[~node.magScaleRel]() return source.MultiPointSource( source_id=node['id'], name=node['name'], tectonic_region_type=node.attrib.get('tectonicRegion'), mfd=self.convert_mfdist(node), magnitude_scaling_relationship=msr, rupture_aspect_ratio=~node.ruptAspectRatio, upper_seismogenic_depth=~geom.upperSeismoDepth, lower_seismogenic_depth=~geom.lowerSeismoDepth, nodal_plane_distribution=self.convert_npdist(node), hypocenter_distribution=self.convert_hpdist(node), mesh=geo.Mesh(F32(lons), F32(lats)), temporal_occurrence_model=self.get_tom(node))
[ "def", "convert_multiPointSource", "(", "self", ",", "node", ")", ":", "geom", "=", "node", ".", "multiPointGeometry", "lons", ",", "lats", "=", "zip", "(", "*", "split_coords_2d", "(", "~", "geom", ".", "posList", ")", ")", "msr", "=", "valid", ".", "...
Convert the given node into a MultiPointSource object. :param node: a node with tag multiPointGeometry :returns: a :class:`openquake.hazardlib.source.MultiPointSource`
[ "Convert", "the", "given", "node", "into", "a", "MultiPointSource", "object", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourceconverter.py#L688-L710
train
gem/oq-engine
openquake/hazardlib/sourceconverter.py
SourceConverter.convert_simpleFaultSource
def convert_simpleFaultSource(self, node): """ Convert the given node into a simple fault object. :param node: a node with tag areaGeometry :returns: a :class:`openquake.hazardlib.source.SimpleFaultSource` instance """ geom = node.simpleFaultGeometry msr = valid.SCALEREL[~node.magScaleRel]() fault_trace = self.geo_line(geom) mfd = self.convert_mfdist(node) with context(self.fname, node): try: hypo_list = valid.hypo_list(node.hypoList) except AttributeError: hypo_list = () try: slip_list = valid.slip_list(node.slipList) except AttributeError: slip_list = () simple = source.SimpleFaultSource( source_id=node['id'], name=node['name'], tectonic_region_type=node.attrib.get('tectonicRegion'), mfd=mfd, rupture_mesh_spacing=self.rupture_mesh_spacing, magnitude_scaling_relationship=msr, rupture_aspect_ratio=~node.ruptAspectRatio, upper_seismogenic_depth=~geom.upperSeismoDepth, lower_seismogenic_depth=~geom.lowerSeismoDepth, fault_trace=fault_trace, dip=~geom.dip, rake=~node.rake, temporal_occurrence_model=self.get_tom(node), hypo_list=hypo_list, slip_list=slip_list) return simple
python
def convert_simpleFaultSource(self, node): """ Convert the given node into a simple fault object. :param node: a node with tag areaGeometry :returns: a :class:`openquake.hazardlib.source.SimpleFaultSource` instance """ geom = node.simpleFaultGeometry msr = valid.SCALEREL[~node.magScaleRel]() fault_trace = self.geo_line(geom) mfd = self.convert_mfdist(node) with context(self.fname, node): try: hypo_list = valid.hypo_list(node.hypoList) except AttributeError: hypo_list = () try: slip_list = valid.slip_list(node.slipList) except AttributeError: slip_list = () simple = source.SimpleFaultSource( source_id=node['id'], name=node['name'], tectonic_region_type=node.attrib.get('tectonicRegion'), mfd=mfd, rupture_mesh_spacing=self.rupture_mesh_spacing, magnitude_scaling_relationship=msr, rupture_aspect_ratio=~node.ruptAspectRatio, upper_seismogenic_depth=~geom.upperSeismoDepth, lower_seismogenic_depth=~geom.lowerSeismoDepth, fault_trace=fault_trace, dip=~geom.dip, rake=~node.rake, temporal_occurrence_model=self.get_tom(node), hypo_list=hypo_list, slip_list=slip_list) return simple
[ "def", "convert_simpleFaultSource", "(", "self", ",", "node", ")", ":", "geom", "=", "node", ".", "simpleFaultGeometry", "msr", "=", "valid", ".", "SCALEREL", "[", "~", "node", ".", "magScaleRel", "]", "(", ")", "fault_trace", "=", "self", ".", "geo_line",...
Convert the given node into a simple fault object. :param node: a node with tag areaGeometry :returns: a :class:`openquake.hazardlib.source.SimpleFaultSource` instance
[ "Convert", "the", "given", "node", "into", "a", "simple", "fault", "object", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourceconverter.py#L712-L749
train
gem/oq-engine
openquake/hazardlib/sourceconverter.py
SourceConverter.convert_complexFaultSource
def convert_complexFaultSource(self, node): """ Convert the given node into a complex fault object. :param node: a node with tag areaGeometry :returns: a :class:`openquake.hazardlib.source.ComplexFaultSource` instance """ geom = node.complexFaultGeometry edges = self.geo_lines(geom) mfd = self.convert_mfdist(node) msr = valid.SCALEREL[~node.magScaleRel]() with context(self.fname, node): cmplx = source.ComplexFaultSource( source_id=node['id'], name=node['name'], tectonic_region_type=node.attrib.get('tectonicRegion'), mfd=mfd, rupture_mesh_spacing=self.complex_fault_mesh_spacing, magnitude_scaling_relationship=msr, rupture_aspect_ratio=~node.ruptAspectRatio, edges=edges, rake=~node.rake, temporal_occurrence_model=self.get_tom(node)) return cmplx
python
def convert_complexFaultSource(self, node): """ Convert the given node into a complex fault object. :param node: a node with tag areaGeometry :returns: a :class:`openquake.hazardlib.source.ComplexFaultSource` instance """ geom = node.complexFaultGeometry edges = self.geo_lines(geom) mfd = self.convert_mfdist(node) msr = valid.SCALEREL[~node.magScaleRel]() with context(self.fname, node): cmplx = source.ComplexFaultSource( source_id=node['id'], name=node['name'], tectonic_region_type=node.attrib.get('tectonicRegion'), mfd=mfd, rupture_mesh_spacing=self.complex_fault_mesh_spacing, magnitude_scaling_relationship=msr, rupture_aspect_ratio=~node.ruptAspectRatio, edges=edges, rake=~node.rake, temporal_occurrence_model=self.get_tom(node)) return cmplx
[ "def", "convert_complexFaultSource", "(", "self", ",", "node", ")", ":", "geom", "=", "node", ".", "complexFaultGeometry", "edges", "=", "self", ".", "geo_lines", "(", "geom", ")", "mfd", "=", "self", ".", "convert_mfdist", "(", "node", ")", "msr", "=", ...
Convert the given node into a complex fault object. :param node: a node with tag areaGeometry :returns: a :class:`openquake.hazardlib.source.ComplexFaultSource` instance
[ "Convert", "the", "given", "node", "into", "a", "complex", "fault", "object", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourceconverter.py#L751-L775
train
gem/oq-engine
openquake/hazardlib/sourceconverter.py
SourceConverter.convert_characteristicFaultSource
def convert_characteristicFaultSource(self, node): """ Convert the given node into a characteristic fault object. :param node: a node with tag areaGeometry :returns: a :class:`openquake.hazardlib.source.CharacteristicFaultSource` instance """ char = source.CharacteristicFaultSource( source_id=node['id'], name=node['name'], tectonic_region_type=node.attrib.get('tectonicRegion'), mfd=self.convert_mfdist(node), surface=self.convert_surfaces(node.surface), rake=~node.rake, temporal_occurrence_model=self.get_tom(node)) return char
python
def convert_characteristicFaultSource(self, node): """ Convert the given node into a characteristic fault object. :param node: a node with tag areaGeometry :returns: a :class:`openquake.hazardlib.source.CharacteristicFaultSource` instance """ char = source.CharacteristicFaultSource( source_id=node['id'], name=node['name'], tectonic_region_type=node.attrib.get('tectonicRegion'), mfd=self.convert_mfdist(node), surface=self.convert_surfaces(node.surface), rake=~node.rake, temporal_occurrence_model=self.get_tom(node)) return char
[ "def", "convert_characteristicFaultSource", "(", "self", ",", "node", ")", ":", "char", "=", "source", ".", "CharacteristicFaultSource", "(", "source_id", "=", "node", "[", "'id'", "]", ",", "name", "=", "node", "[", "'name'", "]", ",", "tectonic_region_type",...
Convert the given node into a characteristic fault object. :param node: a node with tag areaGeometry :returns: a :class:`openquake.hazardlib.source.CharacteristicFaultSource` instance
[ "Convert", "the", "given", "node", "into", "a", "characteristic", "fault", "object", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourceconverter.py#L777-L795
train
gem/oq-engine
openquake/hazardlib/sourceconverter.py
SourceConverter.convert_nonParametricSeismicSource
def convert_nonParametricSeismicSource(self, node): """ Convert the given node into a non parametric source object. :param node: a node with tag areaGeometry :returns: a :class:`openquake.hazardlib.source.NonParametricSeismicSource` instance """ trt = node.attrib.get('tectonicRegion') rup_pmf_data = [] rups_weights = None if 'rup_weights' in node.attrib: tmp = node.attrib.get('rup_weights') rups_weights = numpy.array([float(s) for s in tmp.split()]) for i, rupnode in enumerate(node): probs = pmf.PMF(valid.pmf(rupnode['probs_occur'])) rup = RuptureConverter.convert_node(self, rupnode) rup.tectonic_region_type = trt rup.weight = None if rups_weights is None else rups_weights[i] rup_pmf_data.append((rup, probs)) nps = source.NonParametricSeismicSource( node['id'], node['name'], trt, rup_pmf_data) nps.splittable = 'rup_weights' not in node.attrib return nps
python
def convert_nonParametricSeismicSource(self, node): """ Convert the given node into a non parametric source object. :param node: a node with tag areaGeometry :returns: a :class:`openquake.hazardlib.source.NonParametricSeismicSource` instance """ trt = node.attrib.get('tectonicRegion') rup_pmf_data = [] rups_weights = None if 'rup_weights' in node.attrib: tmp = node.attrib.get('rup_weights') rups_weights = numpy.array([float(s) for s in tmp.split()]) for i, rupnode in enumerate(node): probs = pmf.PMF(valid.pmf(rupnode['probs_occur'])) rup = RuptureConverter.convert_node(self, rupnode) rup.tectonic_region_type = trt rup.weight = None if rups_weights is None else rups_weights[i] rup_pmf_data.append((rup, probs)) nps = source.NonParametricSeismicSource( node['id'], node['name'], trt, rup_pmf_data) nps.splittable = 'rup_weights' not in node.attrib return nps
[ "def", "convert_nonParametricSeismicSource", "(", "self", ",", "node", ")", ":", "trt", "=", "node", ".", "attrib", ".", "get", "(", "'tectonicRegion'", ")", "rup_pmf_data", "=", "[", "]", "rups_weights", "=", "None", "if", "'rup_weights'", "in", "node", "."...
Convert the given node into a non parametric source object. :param node: a node with tag areaGeometry :returns: a :class:`openquake.hazardlib.source.NonParametricSeismicSource` instance
[ "Convert", "the", "given", "node", "into", "a", "non", "parametric", "source", "object", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourceconverter.py#L797-L822
train
gem/oq-engine
openquake/hazardlib/sourceconverter.py
SourceConverter.convert_sourceGroup
def convert_sourceGroup(self, node): """ Convert the given node into a SourceGroup object. :param node: a node with tag sourceGroup :returns: a :class:`SourceGroup` instance """ trt = node['tectonicRegion'] srcs_weights = node.attrib.get('srcs_weights') grp_attrs = {k: v for k, v in node.attrib.items() if k not in ('name', 'src_interdep', 'rup_interdep', 'srcs_weights')} sg = SourceGroup(trt, min_mag=self.minimum_magnitude) sg.temporal_occurrence_model = self.get_tom(node) sg.name = node.attrib.get('name') # Set attributes related to occurrence sg.src_interdep = node.attrib.get('src_interdep', 'indep') sg.rup_interdep = node.attrib.get('rup_interdep', 'indep') sg.grp_probability = node.attrib.get('grp_probability') # Set the cluster attribute sg.cluster = node.attrib.get('cluster') == 'true' # Filter admitted cases # 1. The source group is a cluster. In this case the cluster must have # the attributes required to define its occurrence in time. if sg.cluster: msg = 'A cluster group requires the definition of a temporal' msg += ' occurrence model' assert 'tom' in node.attrib, msg if isinstance(tom, PoissonTOM): assert hasattr(sg, 'occurrence_rate') # for src_node in node: if self.source_id and self.source_id != src_node['id']: continue # filter by source_id src = self.convert_node(src_node) # transmit the group attributes to the underlying source for attr, value in grp_attrs.items(): if attr == 'tectonicRegion': src_trt = src_node.get('tectonicRegion') if src_trt and src_trt != trt: with context(self.fname, src_node): raise ValueError('Found %s, expected %s' % (src_node['tectonicRegion'], trt)) src.tectonic_region_type = trt elif attr == 'grp_probability': pass # do not transmit else: # transmit as it is setattr(src, attr, node[attr]) sg.update(src) if srcs_weights is not None: if len(node) and len(srcs_weights) != len(node): raise ValueError( 'There are %d srcs_weights but %d source(s) in %s' % (len(srcs_weights), len(node), self.fname)) for src, sw in zip(sg, srcs_weights): src.mutex_weight = sw # check that, when the cluster option is set, the group has a temporal # occurrence model properly defined if sg.cluster and not hasattr(sg, 'temporal_occurrence_model'): msg = 'The Source Group is a cluster but does not have a ' msg += 'temporal occurrence model' raise ValueError(msg) return sg
python
def convert_sourceGroup(self, node): """ Convert the given node into a SourceGroup object. :param node: a node with tag sourceGroup :returns: a :class:`SourceGroup` instance """ trt = node['tectonicRegion'] srcs_weights = node.attrib.get('srcs_weights') grp_attrs = {k: v for k, v in node.attrib.items() if k not in ('name', 'src_interdep', 'rup_interdep', 'srcs_weights')} sg = SourceGroup(trt, min_mag=self.minimum_magnitude) sg.temporal_occurrence_model = self.get_tom(node) sg.name = node.attrib.get('name') # Set attributes related to occurrence sg.src_interdep = node.attrib.get('src_interdep', 'indep') sg.rup_interdep = node.attrib.get('rup_interdep', 'indep') sg.grp_probability = node.attrib.get('grp_probability') # Set the cluster attribute sg.cluster = node.attrib.get('cluster') == 'true' # Filter admitted cases # 1. The source group is a cluster. In this case the cluster must have # the attributes required to define its occurrence in time. if sg.cluster: msg = 'A cluster group requires the definition of a temporal' msg += ' occurrence model' assert 'tom' in node.attrib, msg if isinstance(tom, PoissonTOM): assert hasattr(sg, 'occurrence_rate') # for src_node in node: if self.source_id and self.source_id != src_node['id']: continue # filter by source_id src = self.convert_node(src_node) # transmit the group attributes to the underlying source for attr, value in grp_attrs.items(): if attr == 'tectonicRegion': src_trt = src_node.get('tectonicRegion') if src_trt and src_trt != trt: with context(self.fname, src_node): raise ValueError('Found %s, expected %s' % (src_node['tectonicRegion'], trt)) src.tectonic_region_type = trt elif attr == 'grp_probability': pass # do not transmit else: # transmit as it is setattr(src, attr, node[attr]) sg.update(src) if srcs_weights is not None: if len(node) and len(srcs_weights) != len(node): raise ValueError( 'There are %d srcs_weights but %d source(s) in %s' % (len(srcs_weights), len(node), self.fname)) for src, sw in zip(sg, srcs_weights): src.mutex_weight = sw # check that, when the cluster option is set, the group has a temporal # occurrence model properly defined if sg.cluster and not hasattr(sg, 'temporal_occurrence_model'): msg = 'The Source Group is a cluster but does not have a ' msg += 'temporal occurrence model' raise ValueError(msg) return sg
[ "def", "convert_sourceGroup", "(", "self", ",", "node", ")", ":", "trt", "=", "node", "[", "'tectonicRegion'", "]", "srcs_weights", "=", "node", ".", "attrib", ".", "get", "(", "'srcs_weights'", ")", "grp_attrs", "=", "{", "k", ":", "v", "for", "k", ",...
Convert the given node into a SourceGroup object. :param node: a node with tag sourceGroup :returns: a :class:`SourceGroup` instance
[ "Convert", "the", "given", "node", "into", "a", "SourceGroup", "object", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourceconverter.py#L827-L891
train
gem/oq-engine
openquake/hmtk/faults/tectonic_regionalisation.py
_check_list_weights
def _check_list_weights(parameter, name): ''' Checks that the weights in a list of tuples sums to 1.0 ''' if not isinstance(parameter, list): raise ValueError('%s must be formatted with a list of tuples' % name) weight = np.sum([val[1] for val in parameter]) if fabs(weight - 1.) > 1E-8: raise ValueError('%s weights do not sum to 1.0!' % name) return parameter
python
def _check_list_weights(parameter, name): ''' Checks that the weights in a list of tuples sums to 1.0 ''' if not isinstance(parameter, list): raise ValueError('%s must be formatted with a list of tuples' % name) weight = np.sum([val[1] for val in parameter]) if fabs(weight - 1.) > 1E-8: raise ValueError('%s weights do not sum to 1.0!' % name) return parameter
[ "def", "_check_list_weights", "(", "parameter", ",", "name", ")", ":", "if", "not", "isinstance", "(", "parameter", ",", "list", ")", ":", "raise", "ValueError", "(", "'%s must be formatted with a list of tuples'", "%", "name", ")", "weight", "=", "np", ".", "...
Checks that the weights in a list of tuples sums to 1.0
[ "Checks", "that", "the", "weights", "in", "a", "list", "of", "tuples", "sums", "to", "1", ".", "0" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/faults/tectonic_regionalisation.py#L63-L72
train
gem/oq-engine
openquake/hmtk/faults/active_fault_model.py
mtkActiveFaultModel.build_fault_model
def build_fault_model(self, collapse=False, rendered_msr=WC1994(), mfd_config=None): ''' Constructs a full fault model with epistemic uncertainty by enumerating all the possible recurrence models of each fault as separate faults, with the recurrence rates multiplied by the corresponding weights. :param bool collapse: Determines whether or not to collapse the branches :param rendered_msr: If the option is taken to collapse the branches then a recurrence model for rendering must be defined :param list/dict mfd_config: Universal list or dictionay of configuration parameters for the magnitude frequency distribution - will overwrite whatever is previously defined for the fault! ''' self.source_model = mtkSourceModel(self.id, self.name) for fault in self.faults: fault.generate_recurrence_models(collapse, config=mfd_config, rendered_msr=rendered_msr) src_model, src_weight = fault.generate_fault_source_model() for iloc, model in enumerate(src_model): new_model = deepcopy(model) new_model.id = str(model.id) + '_%g' % (iloc + 1) new_model.mfd.occurrence_rates = \ (np.array(new_model.mfd.occurrence_rates) * src_weight[iloc]).tolist() self.source_model.sources.append(new_model)
python
def build_fault_model(self, collapse=False, rendered_msr=WC1994(), mfd_config=None): ''' Constructs a full fault model with epistemic uncertainty by enumerating all the possible recurrence models of each fault as separate faults, with the recurrence rates multiplied by the corresponding weights. :param bool collapse: Determines whether or not to collapse the branches :param rendered_msr: If the option is taken to collapse the branches then a recurrence model for rendering must be defined :param list/dict mfd_config: Universal list or dictionay of configuration parameters for the magnitude frequency distribution - will overwrite whatever is previously defined for the fault! ''' self.source_model = mtkSourceModel(self.id, self.name) for fault in self.faults: fault.generate_recurrence_models(collapse, config=mfd_config, rendered_msr=rendered_msr) src_model, src_weight = fault.generate_fault_source_model() for iloc, model in enumerate(src_model): new_model = deepcopy(model) new_model.id = str(model.id) + '_%g' % (iloc + 1) new_model.mfd.occurrence_rates = \ (np.array(new_model.mfd.occurrence_rates) * src_weight[iloc]).tolist() self.source_model.sources.append(new_model)
[ "def", "build_fault_model", "(", "self", ",", "collapse", "=", "False", ",", "rendered_msr", "=", "WC1994", "(", ")", ",", "mfd_config", "=", "None", ")", ":", "self", ".", "source_model", "=", "mtkSourceModel", "(", "self", ".", "id", ",", "self", ".", ...
Constructs a full fault model with epistemic uncertainty by enumerating all the possible recurrence models of each fault as separate faults, with the recurrence rates multiplied by the corresponding weights. :param bool collapse: Determines whether or not to collapse the branches :param rendered_msr: If the option is taken to collapse the branches then a recurrence model for rendering must be defined :param list/dict mfd_config: Universal list or dictionay of configuration parameters for the magnitude frequency distribution - will overwrite whatever is previously defined for the fault!
[ "Constructs", "a", "full", "fault", "model", "with", "epistemic", "uncertainty", "by", "enumerating", "all", "the", "possible", "recurrence", "models", "of", "each", "fault", "as", "separate", "faults", "with", "the", "recurrence", "rates", "multiplied", "by", "...
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/faults/active_fault_model.py#L94-L125
train
gem/oq-engine
openquake/hmtk/parsers/catalogue/gcmt_ndk_parser.py
ParseNDKtoGCMT.read_file
def read_file(self, start_year=None, end_year=None, use_centroid=None): """ Reads the file """ raw_data = getlines(self.filename) num_lines = len(raw_data) if ((float(num_lines) / 5.) - float(num_lines / 5)) > 1E-9: raise IOError('GCMT represented by 5 lines - number in file not' ' a multiple of 5!') self.catalogue.number_gcmts = num_lines // 5 self.catalogue.gcmts = [None] * self.catalogue.number_gcmts # Pre-allocates list id0 = 0 print('Parsing catalogue ...') for iloc in range(0, self.catalogue.number_gcmts): self.catalogue.gcmts[iloc] = self.read_ndk_event(raw_data, id0) id0 += 5 print('complete. Contains %s moment tensors' % self.catalogue.get_number_tensors()) if not start_year: min_years = [] min_years = [cent.centroid.date.year for cent in self.catalogue.gcmts] self.catalogue.start_year = np.min(min_years) if not end_year: max_years = [] max_years = [cent.centroid.date.year for cent in self.catalogue.gcmts] self.catalogue.end_year = np.max(max_years) self.to_hmtk(use_centroid) return self.catalogue
python
def read_file(self, start_year=None, end_year=None, use_centroid=None): """ Reads the file """ raw_data = getlines(self.filename) num_lines = len(raw_data) if ((float(num_lines) / 5.) - float(num_lines / 5)) > 1E-9: raise IOError('GCMT represented by 5 lines - number in file not' ' a multiple of 5!') self.catalogue.number_gcmts = num_lines // 5 self.catalogue.gcmts = [None] * self.catalogue.number_gcmts # Pre-allocates list id0 = 0 print('Parsing catalogue ...') for iloc in range(0, self.catalogue.number_gcmts): self.catalogue.gcmts[iloc] = self.read_ndk_event(raw_data, id0) id0 += 5 print('complete. Contains %s moment tensors' % self.catalogue.get_number_tensors()) if not start_year: min_years = [] min_years = [cent.centroid.date.year for cent in self.catalogue.gcmts] self.catalogue.start_year = np.min(min_years) if not end_year: max_years = [] max_years = [cent.centroid.date.year for cent in self.catalogue.gcmts] self.catalogue.end_year = np.max(max_years) self.to_hmtk(use_centroid) return self.catalogue
[ "def", "read_file", "(", "self", ",", "start_year", "=", "None", ",", "end_year", "=", "None", ",", "use_centroid", "=", "None", ")", ":", "raw_data", "=", "getlines", "(", "self", ".", "filename", ")", "num_lines", "=", "len", "(", "raw_data", ")", "i...
Reads the file
[ "Reads", "the", "file" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/parsers/catalogue/gcmt_ndk_parser.py#L145-L176
train
gem/oq-engine
openquake/hmtk/parsers/catalogue/gcmt_ndk_parser.py
ParseNDKtoGCMT.read_ndk_event
def read_ndk_event(self, raw_data, id0): """ Reads a 5-line batch of data into a set of GCMTs """ gcmt = GCMTEvent() # Get hypocentre ndkstring = raw_data[id0].rstrip('\n') gcmt.hypocentre = self._read_hypocentre_from_ndk_string(ndkstring) # GCMT metadata ndkstring = raw_data[id0 + 1].rstrip('\n') gcmt = self._get_metadata_from_ndk_string(gcmt, ndkstring) # Get Centroid ndkstring = raw_data[id0 + 2].rstrip('\n') gcmt.centroid = self._read_centroid_from_ndk_string(ndkstring, gcmt.hypocentre) # Get Moment Tensor ndkstring = raw_data[id0 + 3].rstrip('\n') gcmt.moment_tensor = self._get_moment_tensor_from_ndk_string(ndkstring) # Get principal axes ndkstring = raw_data[id0 + 4].rstrip('\n') gcmt.principal_axes = self._get_principal_axes_from_ndk_string( ndkstring[3:48], exponent=gcmt.moment_tensor.exponent) # Get Nodal Planes gcmt.nodal_planes = self._get_nodal_planes_from_ndk_string( ndkstring[57:]) # Get Moment and Magnitude gcmt.moment, gcmt.version, gcmt.magnitude = \ self._get_moment_from_ndk_string( ndkstring, gcmt.moment_tensor.exponent) return gcmt
python
def read_ndk_event(self, raw_data, id0): """ Reads a 5-line batch of data into a set of GCMTs """ gcmt = GCMTEvent() # Get hypocentre ndkstring = raw_data[id0].rstrip('\n') gcmt.hypocentre = self._read_hypocentre_from_ndk_string(ndkstring) # GCMT metadata ndkstring = raw_data[id0 + 1].rstrip('\n') gcmt = self._get_metadata_from_ndk_string(gcmt, ndkstring) # Get Centroid ndkstring = raw_data[id0 + 2].rstrip('\n') gcmt.centroid = self._read_centroid_from_ndk_string(ndkstring, gcmt.hypocentre) # Get Moment Tensor ndkstring = raw_data[id0 + 3].rstrip('\n') gcmt.moment_tensor = self._get_moment_tensor_from_ndk_string(ndkstring) # Get principal axes ndkstring = raw_data[id0 + 4].rstrip('\n') gcmt.principal_axes = self._get_principal_axes_from_ndk_string( ndkstring[3:48], exponent=gcmt.moment_tensor.exponent) # Get Nodal Planes gcmt.nodal_planes = self._get_nodal_planes_from_ndk_string( ndkstring[57:]) # Get Moment and Magnitude gcmt.moment, gcmt.version, gcmt.magnitude = \ self._get_moment_from_ndk_string( ndkstring, gcmt.moment_tensor.exponent) return gcmt
[ "def", "read_ndk_event", "(", "self", ",", "raw_data", ",", "id0", ")", ":", "gcmt", "=", "GCMTEvent", "(", ")", "ndkstring", "=", "raw_data", "[", "id0", "]", ".", "rstrip", "(", "'\\n'", ")", "gcmt", ".", "hypocentre", "=", "self", ".", "_read_hypoce...
Reads a 5-line batch of data into a set of GCMTs
[ "Reads", "a", "5", "-", "line", "batch", "of", "data", "into", "a", "set", "of", "GCMTs" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/parsers/catalogue/gcmt_ndk_parser.py#L178-L214
train
gem/oq-engine
openquake/hmtk/parsers/catalogue/gcmt_ndk_parser.py
ParseNDKtoGCMT._read_hypocentre_from_ndk_string
def _read_hypocentre_from_ndk_string(self, linestring): """ Reads the hypocentre data from the ndk string to return an instance of the GCMTHypocentre class """ hypo = GCMTHypocentre() hypo.source = linestring[0:4] hypo.date = _read_date_from_string(linestring[5:15]) hypo.time = _read_time_from_string(linestring[16:26]) hypo.latitude = float(linestring[27:33]) hypo.longitude = float(linestring[34:41]) hypo.depth = float(linestring[42:47]) magnitudes = [float(x) for x in linestring[48:55].split(' ')] if magnitudes[0] > 0.: hypo.m_b = magnitudes[0] if magnitudes[1] > 0.: hypo.m_s = magnitudes[1] hypo.location = linestring[56:] return hypo
python
def _read_hypocentre_from_ndk_string(self, linestring): """ Reads the hypocentre data from the ndk string to return an instance of the GCMTHypocentre class """ hypo = GCMTHypocentre() hypo.source = linestring[0:4] hypo.date = _read_date_from_string(linestring[5:15]) hypo.time = _read_time_from_string(linestring[16:26]) hypo.latitude = float(linestring[27:33]) hypo.longitude = float(linestring[34:41]) hypo.depth = float(linestring[42:47]) magnitudes = [float(x) for x in linestring[48:55].split(' ')] if magnitudes[0] > 0.: hypo.m_b = magnitudes[0] if magnitudes[1] > 0.: hypo.m_s = magnitudes[1] hypo.location = linestring[56:] return hypo
[ "def", "_read_hypocentre_from_ndk_string", "(", "self", ",", "linestring", ")", ":", "hypo", "=", "GCMTHypocentre", "(", ")", "hypo", ".", "source", "=", "linestring", "[", "0", ":", "4", "]", "hypo", ".", "date", "=", "_read_date_from_string", "(", "linestr...
Reads the hypocentre data from the ndk string to return an instance of the GCMTHypocentre class
[ "Reads", "the", "hypocentre", "data", "from", "the", "ndk", "string", "to", "return", "an", "instance", "of", "the", "GCMTHypocentre", "class" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/parsers/catalogue/gcmt_ndk_parser.py#L315-L333
train
gem/oq-engine
openquake/hmtk/parsers/catalogue/gcmt_ndk_parser.py
ParseNDKtoGCMT._get_metadata_from_ndk_string
def _get_metadata_from_ndk_string(self, gcmt, ndk_string): """ Reads the GCMT metadata from line 2 of the ndk batch """ gcmt.identifier = ndk_string[:16] inversion_data = re.split('[A-Z:]+', ndk_string[17:61]) gcmt.metadata['BODY'] = [float(x) for x in inversion_data[1].split()] gcmt.metadata['SURFACE'] = [ float(x) for x in inversion_data[2].split()] gcmt.metadata['MANTLE'] = [float(x) for x in inversion_data[3].split()] further_meta = re.split('[: ]+', ndk_string[62:]) gcmt.metadata['CMT'] = int(further_meta[1]) gcmt.metadata['FUNCTION'] = {'TYPE': further_meta[2], 'DURATION': float(further_meta[3])} return gcmt
python
def _get_metadata_from_ndk_string(self, gcmt, ndk_string): """ Reads the GCMT metadata from line 2 of the ndk batch """ gcmt.identifier = ndk_string[:16] inversion_data = re.split('[A-Z:]+', ndk_string[17:61]) gcmt.metadata['BODY'] = [float(x) for x in inversion_data[1].split()] gcmt.metadata['SURFACE'] = [ float(x) for x in inversion_data[2].split()] gcmt.metadata['MANTLE'] = [float(x) for x in inversion_data[3].split()] further_meta = re.split('[: ]+', ndk_string[62:]) gcmt.metadata['CMT'] = int(further_meta[1]) gcmt.metadata['FUNCTION'] = {'TYPE': further_meta[2], 'DURATION': float(further_meta[3])} return gcmt
[ "def", "_get_metadata_from_ndk_string", "(", "self", ",", "gcmt", ",", "ndk_string", ")", ":", "gcmt", ".", "identifier", "=", "ndk_string", "[", ":", "16", "]", "inversion_data", "=", "re", ".", "split", "(", "'[A-Z:]+'", ",", "ndk_string", "[", "17", ":"...
Reads the GCMT metadata from line 2 of the ndk batch
[ "Reads", "the", "GCMT", "metadata", "from", "line", "2", "of", "the", "ndk", "batch" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/parsers/catalogue/gcmt_ndk_parser.py#L335-L349
train
gem/oq-engine
openquake/hmtk/parsers/catalogue/gcmt_ndk_parser.py
ParseNDKtoGCMT._get_principal_axes_from_ndk_string
def _get_principal_axes_from_ndk_string(self, ndk_string, exponent): """ Gets the principal axes from the ndk string and returns an instance of the GCMTPrincipalAxes class """ axes = GCMTPrincipalAxes() # The principal axes is defined in characters 3:48 of the 5th line exponent = 10. ** exponent axes.t_axis = {'eigenvalue': exponent * float(ndk_string[0:8]), 'plunge': float(ndk_string[8:11]), 'azimuth': float(ndk_string[11:15])} axes.b_axis = {'eigenvalue': exponent * float(ndk_string[15:23]), 'plunge': float(ndk_string[23:26]), 'azimuth': float(ndk_string[26:30])} axes.p_axis = {'eigenvalue': exponent * float(ndk_string[30:38]), 'plunge': float(ndk_string[38:41]), 'azimuth': float(ndk_string[41:])} return axes
python
def _get_principal_axes_from_ndk_string(self, ndk_string, exponent): """ Gets the principal axes from the ndk string and returns an instance of the GCMTPrincipalAxes class """ axes = GCMTPrincipalAxes() # The principal axes is defined in characters 3:48 of the 5th line exponent = 10. ** exponent axes.t_axis = {'eigenvalue': exponent * float(ndk_string[0:8]), 'plunge': float(ndk_string[8:11]), 'azimuth': float(ndk_string[11:15])} axes.b_axis = {'eigenvalue': exponent * float(ndk_string[15:23]), 'plunge': float(ndk_string[23:26]), 'azimuth': float(ndk_string[26:30])} axes.p_axis = {'eigenvalue': exponent * float(ndk_string[30:38]), 'plunge': float(ndk_string[38:41]), 'azimuth': float(ndk_string[41:])} return axes
[ "def", "_get_principal_axes_from_ndk_string", "(", "self", ",", "ndk_string", ",", "exponent", ")", ":", "axes", "=", "GCMTPrincipalAxes", "(", ")", "exponent", "=", "10.", "**", "exponent", "axes", ".", "t_axis", "=", "{", "'eigenvalue'", ":", "exponent", "*"...
Gets the principal axes from the ndk string and returns an instance of the GCMTPrincipalAxes class
[ "Gets", "the", "principal", "axes", "from", "the", "ndk", "string", "and", "returns", "an", "instance", "of", "the", "GCMTPrincipalAxes", "class" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/parsers/catalogue/gcmt_ndk_parser.py#L394-L413
train
gem/oq-engine
openquake/hmtk/parsers/catalogue/gcmt_ndk_parser.py
ParseNDKtoGCMT._get_moment_from_ndk_string
def _get_moment_from_ndk_string(self, ndk_string, exponent): """ Returns the moment and the moment magnitude """ moment = float(ndk_string[49:56]) * (10. ** exponent) version = ndk_string[:3] magnitude = utils.moment_magnitude_scalar(moment) return moment, version, magnitude
python
def _get_moment_from_ndk_string(self, ndk_string, exponent): """ Returns the moment and the moment magnitude """ moment = float(ndk_string[49:56]) * (10. ** exponent) version = ndk_string[:3] magnitude = utils.moment_magnitude_scalar(moment) return moment, version, magnitude
[ "def", "_get_moment_from_ndk_string", "(", "self", ",", "ndk_string", ",", "exponent", ")", ":", "moment", "=", "float", "(", "ndk_string", "[", "49", ":", "56", "]", ")", "*", "(", "10.", "**", "exponent", ")", "version", "=", "ndk_string", "[", ":", ...
Returns the moment and the moment magnitude
[ "Returns", "the", "moment", "and", "the", "moment", "magnitude" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/parsers/catalogue/gcmt_ndk_parser.py#L430-L437
train
gem/oq-engine
openquake/hmtk/sources/source_model.py
mtkSourceModel.serialise_to_nrml
def serialise_to_nrml(self, filename, use_defaults=False): ''' Writes the source model to a nrml source model file given by the filename :param str filename: Path to output file :param bool use_defaults: Boolean to indicate whether to use default values (True) or not. If set to False, ValueErrors will be raised when an essential attribute is missing. ''' source_model = self.convert_to_oqhazardlib( PoissonTOM(1.0), 2.0, 2.0, 10.0, use_defaults=use_defaults) write_source_model(filename, source_model, name=self.name)
python
def serialise_to_nrml(self, filename, use_defaults=False): ''' Writes the source model to a nrml source model file given by the filename :param str filename: Path to output file :param bool use_defaults: Boolean to indicate whether to use default values (True) or not. If set to False, ValueErrors will be raised when an essential attribute is missing. ''' source_model = self.convert_to_oqhazardlib( PoissonTOM(1.0), 2.0, 2.0, 10.0, use_defaults=use_defaults) write_source_model(filename, source_model, name=self.name)
[ "def", "serialise_to_nrml", "(", "self", ",", "filename", ",", "use_defaults", "=", "False", ")", ":", "source_model", "=", "self", ".", "convert_to_oqhazardlib", "(", "PoissonTOM", "(", "1.0", ")", ",", "2.0", ",", "2.0", ",", "10.0", ",", "use_defaults", ...
Writes the source model to a nrml source model file given by the filename :param str filename: Path to output file :param bool use_defaults: Boolean to indicate whether to use default values (True) or not. If set to False, ValueErrors will be raised when an essential attribute is missing.
[ "Writes", "the", "source", "model", "to", "a", "nrml", "source", "model", "file", "given", "by", "the", "filename" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/sources/source_model.py#L95-L110
train
gem/oq-engine
openquake/hmtk/seismicity/occurrence/utils.py
input_checks
def input_checks(catalogue, config, completeness): """ Performs a basic set of input checks on the data """ if isinstance(completeness, np.ndarray): # completeness table is a numpy array (i.e. [year, magnitude]) if np.shape(completeness)[1] != 2: raise ValueError('Completeness Table incorrectly configured') else: cmag = completeness[:, 1] ctime = completeness[:, 0] elif isinstance(completeness, float): # Completeness corresponds to a single magnitude (i.e. applies to # the entire catalogue) cmag = np.array(completeness) ctime = np.array(np.min(catalogue.data['year'])) else: # Everything is valid - i.e. no completeness magnitude cmag = np.array(np.min(catalogue.data['magnitude'])) ctime = np.array(np.min(catalogue.data['year'])) # Set reference magnitude - if not in config then default to M = 0. if not config: # use default reference magnitude of 0.0 and magnitude interval of 0.1 ref_mag = 0.0 dmag = 0.1 config = {'reference_magnitude': None, 'magnitude_interval': 0.1} else: if (not 'reference_magnitude' in config.keys()) or\ (config['reference_magnitude'] is None): ref_mag = 0. config['reference_magnitude'] = None else: ref_mag = config['reference_magnitude'] if (not 'magnitude_interval' in config.keys()) or \ not config['magnitude_interval']: dmag = 0.1 else: dmag = config['magnitude_interval'] return cmag, ctime, ref_mag, dmag, config
python
def input_checks(catalogue, config, completeness): """ Performs a basic set of input checks on the data """ if isinstance(completeness, np.ndarray): # completeness table is a numpy array (i.e. [year, magnitude]) if np.shape(completeness)[1] != 2: raise ValueError('Completeness Table incorrectly configured') else: cmag = completeness[:, 1] ctime = completeness[:, 0] elif isinstance(completeness, float): # Completeness corresponds to a single magnitude (i.e. applies to # the entire catalogue) cmag = np.array(completeness) ctime = np.array(np.min(catalogue.data['year'])) else: # Everything is valid - i.e. no completeness magnitude cmag = np.array(np.min(catalogue.data['magnitude'])) ctime = np.array(np.min(catalogue.data['year'])) # Set reference magnitude - if not in config then default to M = 0. if not config: # use default reference magnitude of 0.0 and magnitude interval of 0.1 ref_mag = 0.0 dmag = 0.1 config = {'reference_magnitude': None, 'magnitude_interval': 0.1} else: if (not 'reference_magnitude' in config.keys()) or\ (config['reference_magnitude'] is None): ref_mag = 0. config['reference_magnitude'] = None else: ref_mag = config['reference_magnitude'] if (not 'magnitude_interval' in config.keys()) or \ not config['magnitude_interval']: dmag = 0.1 else: dmag = config['magnitude_interval'] return cmag, ctime, ref_mag, dmag, config
[ "def", "input_checks", "(", "catalogue", ",", "config", ",", "completeness", ")", ":", "if", "isinstance", "(", "completeness", ",", "np", ".", "ndarray", ")", ":", "if", "np", ".", "shape", "(", "completeness", ")", "[", "1", "]", "!=", "2", ":", "r...
Performs a basic set of input checks on the data
[ "Performs", "a", "basic", "set", "of", "input", "checks", "on", "the", "data" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/occurrence/utils.py#L100-L142
train
gem/oq-engine
openquake/hmtk/seismicity/occurrence/utils.py
generate_trunc_gr_magnitudes
def generate_trunc_gr_magnitudes(bval, mmin, mmax, nsamples): ''' Generate a random list of magnitudes distributed according to a truncated Gutenberg-Richter model :param float bval: b-value :param float mmin: Minimum Magnitude :param float mmax: Maximum Magnitude :param int nsamples: Number of samples :returns: Vector of generated magnitudes ''' sampler = np.random.uniform(0., 1., nsamples) beta = bval * np.log(10.) return (-1. / beta) * ( np.log(1. - sampler * (1 - np.exp(-beta * (mmax - mmin))))) + mmin
python
def generate_trunc_gr_magnitudes(bval, mmin, mmax, nsamples): ''' Generate a random list of magnitudes distributed according to a truncated Gutenberg-Richter model :param float bval: b-value :param float mmin: Minimum Magnitude :param float mmax: Maximum Magnitude :param int nsamples: Number of samples :returns: Vector of generated magnitudes ''' sampler = np.random.uniform(0., 1., nsamples) beta = bval * np.log(10.) return (-1. / beta) * ( np.log(1. - sampler * (1 - np.exp(-beta * (mmax - mmin))))) + mmin
[ "def", "generate_trunc_gr_magnitudes", "(", "bval", ",", "mmin", ",", "mmax", ",", "nsamples", ")", ":", "sampler", "=", "np", ".", "random", ".", "uniform", "(", "0.", ",", "1.", ",", "nsamples", ")", "beta", "=", "bval", "*", "np", ".", "log", "(",...
Generate a random list of magnitudes distributed according to a truncated Gutenberg-Richter model :param float bval: b-value :param float mmin: Minimum Magnitude :param float mmax: Maximum Magnitude :param int nsamples: Number of samples :returns: Vector of generated magnitudes
[ "Generate", "a", "random", "list", "of", "magnitudes", "distributed", "according", "to", "a", "truncated", "Gutenberg", "-", "Richter", "model" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/occurrence/utils.py#L145-L165
train
gem/oq-engine
openquake/hmtk/seismicity/occurrence/utils.py
generate_synthetic_magnitudes
def generate_synthetic_magnitudes(aval, bval, mmin, mmax, nyears): ''' Generates a synthetic catalogue for a specified number of years, with magnitudes distributed according to a truncated Gutenberg-Richter distribution :param float aval: a-value :param float bval: b-value :param float mmin: Minimum Magnitude :param float mmax: Maximum Magnitude :param int nyears: Number of years :returns: Synthetic catalogue (dict) with year and magnitude attributes ''' nsamples = int(np.round(nyears * (10. ** (aval - bval * mmin)), 0)) year = np.random.randint(0, nyears, nsamples) # Get magnitudes mags = generate_trunc_gr_magnitudes(bval, mmin, mmax, nsamples) return {'magnitude': mags, 'year': np.sort(year)}
python
def generate_synthetic_magnitudes(aval, bval, mmin, mmax, nyears): ''' Generates a synthetic catalogue for a specified number of years, with magnitudes distributed according to a truncated Gutenberg-Richter distribution :param float aval: a-value :param float bval: b-value :param float mmin: Minimum Magnitude :param float mmax: Maximum Magnitude :param int nyears: Number of years :returns: Synthetic catalogue (dict) with year and magnitude attributes ''' nsamples = int(np.round(nyears * (10. ** (aval - bval * mmin)), 0)) year = np.random.randint(0, nyears, nsamples) # Get magnitudes mags = generate_trunc_gr_magnitudes(bval, mmin, mmax, nsamples) return {'magnitude': mags, 'year': np.sort(year)}
[ "def", "generate_synthetic_magnitudes", "(", "aval", ",", "bval", ",", "mmin", ",", "mmax", ",", "nyears", ")", ":", "nsamples", "=", "int", "(", "np", ".", "round", "(", "nyears", "*", "(", "10.", "**", "(", "aval", "-", "bval", "*", "mmin", ")", ...
Generates a synthetic catalogue for a specified number of years, with magnitudes distributed according to a truncated Gutenberg-Richter distribution :param float aval: a-value :param float bval: b-value :param float mmin: Minimum Magnitude :param float mmax: Maximum Magnitude :param int nyears: Number of years :returns: Synthetic catalogue (dict) with year and magnitude attributes
[ "Generates", "a", "synthetic", "catalogue", "for", "a", "specified", "number", "of", "years", "with", "magnitudes", "distributed", "according", "to", "a", "truncated", "Gutenberg", "-", "Richter", "distribution" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/occurrence/utils.py#L168-L191
train
gem/oq-engine
openquake/hmtk/seismicity/occurrence/utils.py
downsample_completeness_table
def downsample_completeness_table(comp_table, sample_width=0.1, mmax=None): """ Re-sample the completeness table to a specified sample_width """ new_comp_table = [] for i in range(comp_table.shape[0] - 1): mvals = np.arange(comp_table[i, 1], comp_table[i + 1, 1], d_m) # FIXME: d_m is undefined! new_comp_table.extend([[comp_table[i, 0], mval] for mval in mvals]) # If mmax > last magnitude in completeness table if mmax and (mmax > comp_table[-1, 1]): new_comp_table.extend( [[comp_table[-1, 0], mval] for mval in np.arange(comp_table[-1, 1], mmax + d_m, d_m)]) return np.array(new_comp_table)
python
def downsample_completeness_table(comp_table, sample_width=0.1, mmax=None): """ Re-sample the completeness table to a specified sample_width """ new_comp_table = [] for i in range(comp_table.shape[0] - 1): mvals = np.arange(comp_table[i, 1], comp_table[i + 1, 1], d_m) # FIXME: d_m is undefined! new_comp_table.extend([[comp_table[i, 0], mval] for mval in mvals]) # If mmax > last magnitude in completeness table if mmax and (mmax > comp_table[-1, 1]): new_comp_table.extend( [[comp_table[-1, 0], mval] for mval in np.arange(comp_table[-1, 1], mmax + d_m, d_m)]) return np.array(new_comp_table)
[ "def", "downsample_completeness_table", "(", "comp_table", ",", "sample_width", "=", "0.1", ",", "mmax", "=", "None", ")", ":", "new_comp_table", "=", "[", "]", "for", "i", "in", "range", "(", "comp_table", ".", "shape", "[", "0", "]", "-", "1", ")", "...
Re-sample the completeness table to a specified sample_width
[ "Re", "-", "sample", "the", "completeness", "table", "to", "a", "specified", "sample_width" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/occurrence/utils.py#L194-L208
train
gem/oq-engine
openquake/commands/reset.py
reset
def reset(yes): """ Remove all the datastores and the database of the current user """ ok = yes or confirm('Do you really want to destroy all your data? (y/n) ') if not ok: return dbpath = os.path.realpath(os.path.expanduser(config.dbserver.file)) # user must be able to access and write the databse file to remove it if os.path.isfile(dbpath) and os.access(dbpath, os.W_OK): if dbserver.get_status() == 'running': if config.dbserver.multi_user: sys.exit('The oq dbserver must be stopped ' 'before proceeding') else: pid = logs.dbcmd('getpid') os.kill(pid, signal.SIGTERM) time.sleep(.5) # give time to stop assert dbserver.get_status() == 'not-running' print('dbserver stopped') try: os.remove(dbpath) print('Removed %s' % dbpath) except OSError as exc: print(exc, file=sys.stderr) # fast way of removing everything purge_all(fast=True)
python
def reset(yes): """ Remove all the datastores and the database of the current user """ ok = yes or confirm('Do you really want to destroy all your data? (y/n) ') if not ok: return dbpath = os.path.realpath(os.path.expanduser(config.dbserver.file)) # user must be able to access and write the databse file to remove it if os.path.isfile(dbpath) and os.access(dbpath, os.W_OK): if dbserver.get_status() == 'running': if config.dbserver.multi_user: sys.exit('The oq dbserver must be stopped ' 'before proceeding') else: pid = logs.dbcmd('getpid') os.kill(pid, signal.SIGTERM) time.sleep(.5) # give time to stop assert dbserver.get_status() == 'not-running' print('dbserver stopped') try: os.remove(dbpath) print('Removed %s' % dbpath) except OSError as exc: print(exc, file=sys.stderr) # fast way of removing everything purge_all(fast=True)
[ "def", "reset", "(", "yes", ")", ":", "ok", "=", "yes", "or", "confirm", "(", "'Do you really want to destroy all your data? (y/n) '", ")", "if", "not", "ok", ":", "return", "dbpath", "=", "os", ".", "path", ".", "realpath", "(", "os", ".", "path", ".", ...
Remove all the datastores and the database of the current user
[ "Remove", "all", "the", "datastores", "and", "the", "database", "of", "the", "current", "user" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/reset.py#L30-L59
train
gem/oq-engine
openquake/server/db/actions.py
set_status
def set_status(db, job_id, status): """ Set the status 'created', 'executing', 'complete', 'failed', 'aborted' consistently with `is_running`. :param db: a :class:`openquake.server.dbapi.Db` instance :param job_id: ID of the current job :param status: status string """ assert status in ( 'created', 'submitted', 'executing', 'complete', 'aborted', 'failed' ), status if status in ('created', 'complete', 'failed', 'aborted'): is_running = 0 else: # 'executing' is_running = 1 if job_id < 0: rows = db('SELECT id FROM job ORDER BY id DESC LIMIT ?x', -job_id) if not rows: return 0 job_id = rows[-1].id cursor = db('UPDATE job SET status=?x, is_running=?x WHERE id=?x', status, is_running, job_id) return cursor.rowcount
python
def set_status(db, job_id, status): """ Set the status 'created', 'executing', 'complete', 'failed', 'aborted' consistently with `is_running`. :param db: a :class:`openquake.server.dbapi.Db` instance :param job_id: ID of the current job :param status: status string """ assert status in ( 'created', 'submitted', 'executing', 'complete', 'aborted', 'failed' ), status if status in ('created', 'complete', 'failed', 'aborted'): is_running = 0 else: # 'executing' is_running = 1 if job_id < 0: rows = db('SELECT id FROM job ORDER BY id DESC LIMIT ?x', -job_id) if not rows: return 0 job_id = rows[-1].id cursor = db('UPDATE job SET status=?x, is_running=?x WHERE id=?x', status, is_running, job_id) return cursor.rowcount
[ "def", "set_status", "(", "db", ",", "job_id", ",", "status", ")", ":", "assert", "status", "in", "(", "'created'", ",", "'submitted'", ",", "'executing'", ",", "'complete'", ",", "'aborted'", ",", "'failed'", ")", ",", "status", "if", "status", "in", "(...
Set the status 'created', 'executing', 'complete', 'failed', 'aborted' consistently with `is_running`. :param db: a :class:`openquake.server.dbapi.Db` instance :param job_id: ID of the current job :param status: status string
[ "Set", "the", "status", "created", "executing", "complete", "failed", "aborted", "consistently", "with", "is_running", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/db/actions.py#L62-L85
train
gem/oq-engine
openquake/server/db/actions.py
create_job
def create_job(db, datadir): """ Create job for the given user, return it. :param db: a :class:`openquake.server.dbapi.Db` instance :param datadir: Data directory of the user who owns/started this job. :returns: the job ID """ calc_id = get_calc_id(db, datadir) + 1 job = dict(id=calc_id, is_running=1, description='just created', user_name='openquake', calculation_mode='to be set', ds_calc_dir=os.path.join('%s/calc_%s' % (datadir, calc_id))) return db('INSERT INTO job (?S) VALUES (?X)', job.keys(), job.values()).lastrowid
python
def create_job(db, datadir): """ Create job for the given user, return it. :param db: a :class:`openquake.server.dbapi.Db` instance :param datadir: Data directory of the user who owns/started this job. :returns: the job ID """ calc_id = get_calc_id(db, datadir) + 1 job = dict(id=calc_id, is_running=1, description='just created', user_name='openquake', calculation_mode='to be set', ds_calc_dir=os.path.join('%s/calc_%s' % (datadir, calc_id))) return db('INSERT INTO job (?S) VALUES (?X)', job.keys(), job.values()).lastrowid
[ "def", "create_job", "(", "db", ",", "datadir", ")", ":", "calc_id", "=", "get_calc_id", "(", "db", ",", "datadir", ")", "+", "1", "job", "=", "dict", "(", "id", "=", "calc_id", ",", "is_running", "=", "1", ",", "description", "=", "'just created'", ...
Create job for the given user, return it. :param db: a :class:`openquake.server.dbapi.Db` instance :param datadir: Data directory of the user who owns/started this job. :returns: the job ID
[ "Create", "job", "for", "the", "given", "user", "return", "it", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/db/actions.py#L88-L104
train
gem/oq-engine
openquake/server/db/actions.py
import_job
def import_job(db, calc_id, calc_mode, description, user_name, status, hc_id, datadir): """ Insert a calculation inside the database, if calc_id is not taken """ job = dict(id=calc_id, calculation_mode=calc_mode, description=description, user_name=user_name, hazard_calculation_id=hc_id, is_running=0, status=status, ds_calc_dir=os.path.join('%s/calc_%s' % (datadir, calc_id))) db('INSERT INTO job (?S) VALUES (?X)', job.keys(), job.values())
python
def import_job(db, calc_id, calc_mode, description, user_name, status, hc_id, datadir): """ Insert a calculation inside the database, if calc_id is not taken """ job = dict(id=calc_id, calculation_mode=calc_mode, description=description, user_name=user_name, hazard_calculation_id=hc_id, is_running=0, status=status, ds_calc_dir=os.path.join('%s/calc_%s' % (datadir, calc_id))) db('INSERT INTO job (?S) VALUES (?X)', job.keys(), job.values())
[ "def", "import_job", "(", "db", ",", "calc_id", ",", "calc_mode", ",", "description", ",", "user_name", ",", "status", ",", "hc_id", ",", "datadir", ")", ":", "job", "=", "dict", "(", "id", "=", "calc_id", ",", "calculation_mode", "=", "calc_mode", ",", ...
Insert a calculation inside the database, if calc_id is not taken
[ "Insert", "a", "calculation", "inside", "the", "database", "if", "calc_id", "is", "not", "taken" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/db/actions.py#L107-L120
train
gem/oq-engine
openquake/server/db/actions.py
get_job
def get_job(db, job_id, username=None): """ If job_id is negative, return the last calculation of the current user, otherwise returns the job_id unchanged. :param db: a :class:`openquake.server.dbapi.Db` instance :param job_id: a job ID (can be negative and can be nonexisting) :param username: an user name (if None, ignore it) :returns: a valid job or None if the original job ID was invalid """ job_id = int(job_id) if job_id > 0: dic = dict(id=job_id) if username: dic['user_name'] = username try: return db('SELECT * FROM job WHERE ?A', dic, one=True) except NotFound: return # else negative job_id if username: joblist = db('SELECT * FROM job WHERE user_name=?x ' 'ORDER BY id DESC LIMIT ?x', username, -job_id) else: joblist = db('SELECT * FROM job ORDER BY id DESC LIMIT ?x', -job_id) if not joblist: # no jobs return else: return joblist[-1]
python
def get_job(db, job_id, username=None): """ If job_id is negative, return the last calculation of the current user, otherwise returns the job_id unchanged. :param db: a :class:`openquake.server.dbapi.Db` instance :param job_id: a job ID (can be negative and can be nonexisting) :param username: an user name (if None, ignore it) :returns: a valid job or None if the original job ID was invalid """ job_id = int(job_id) if job_id > 0: dic = dict(id=job_id) if username: dic['user_name'] = username try: return db('SELECT * FROM job WHERE ?A', dic, one=True) except NotFound: return # else negative job_id if username: joblist = db('SELECT * FROM job WHERE user_name=?x ' 'ORDER BY id DESC LIMIT ?x', username, -job_id) else: joblist = db('SELECT * FROM job ORDER BY id DESC LIMIT ?x', -job_id) if not joblist: # no jobs return else: return joblist[-1]
[ "def", "get_job", "(", "db", ",", "job_id", ",", "username", "=", "None", ")", ":", "job_id", "=", "int", "(", "job_id", ")", "if", "job_id", ">", "0", ":", "dic", "=", "dict", "(", "id", "=", "job_id", ")", "if", "username", ":", "dic", "[", "...
If job_id is negative, return the last calculation of the current user, otherwise returns the job_id unchanged. :param db: a :class:`openquake.server.dbapi.Db` instance :param job_id: a job ID (can be negative and can be nonexisting) :param username: an user name (if None, ignore it) :returns: a valid job or None if the original job ID was invalid
[ "If", "job_id", "is", "negative", "return", "the", "last", "calculation", "of", "the", "current", "user", "otherwise", "returns", "the", "job_id", "unchanged", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/db/actions.py#L133-L163
train
gem/oq-engine
openquake/server/db/actions.py
get_calc_id
def get_calc_id(db, datadir, job_id=None): """ Return the latest calc_id by looking both at the datastore and the database. :param db: a :class:`openquake.server.dbapi.Db` instance :param datadir: the directory containing the datastores :param job_id: a job ID; if None, returns the latest job ID """ calcs = datastore.get_calc_ids(datadir) calc_id = 0 if not calcs else calcs[-1] if job_id is None: try: job_id = db('SELECT seq FROM sqlite_sequence WHERE name="job"', scalar=True) except NotFound: job_id = 0 return max(calc_id, job_id)
python
def get_calc_id(db, datadir, job_id=None): """ Return the latest calc_id by looking both at the datastore and the database. :param db: a :class:`openquake.server.dbapi.Db` instance :param datadir: the directory containing the datastores :param job_id: a job ID; if None, returns the latest job ID """ calcs = datastore.get_calc_ids(datadir) calc_id = 0 if not calcs else calcs[-1] if job_id is None: try: job_id = db('SELECT seq FROM sqlite_sequence WHERE name="job"', scalar=True) except NotFound: job_id = 0 return max(calc_id, job_id)
[ "def", "get_calc_id", "(", "db", ",", "datadir", ",", "job_id", "=", "None", ")", ":", "calcs", "=", "datastore", ".", "get_calc_ids", "(", "datadir", ")", "calc_id", "=", "0", "if", "not", "calcs", "else", "calcs", "[", "-", "1", "]", "if", "job_id"...
Return the latest calc_id by looking both at the datastore and the database. :param db: a :class:`openquake.server.dbapi.Db` instance :param datadir: the directory containing the datastores :param job_id: a job ID; if None, returns the latest job ID
[ "Return", "the", "latest", "calc_id", "by", "looking", "both", "at", "the", "datastore", "and", "the", "database", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/db/actions.py#L166-L183
train
gem/oq-engine
openquake/server/db/actions.py
list_calculations
def list_calculations(db, job_type, user_name): """ Yield a summary of past calculations. :param db: a :class:`openquake.server.dbapi.Db` instance :param job_type: 'hazard' or 'risk' :param user_name: an user name """ jobs = db('SELECT *, %s FROM job WHERE user_name=?x ' 'AND job_type=?x ORDER BY start_time' % JOB_TYPE, user_name, job_type) out = [] if len(jobs) == 0: out.append('None') else: out.append('job_id | status | start_time | ' ' description') for job in jobs: descr = job.description start_time = job.start_time out.append('%6d | %10s | %s | %s' % ( job.id, job.status, start_time, descr)) return out
python
def list_calculations(db, job_type, user_name): """ Yield a summary of past calculations. :param db: a :class:`openquake.server.dbapi.Db` instance :param job_type: 'hazard' or 'risk' :param user_name: an user name """ jobs = db('SELECT *, %s FROM job WHERE user_name=?x ' 'AND job_type=?x ORDER BY start_time' % JOB_TYPE, user_name, job_type) out = [] if len(jobs) == 0: out.append('None') else: out.append('job_id | status | start_time | ' ' description') for job in jobs: descr = job.description start_time = job.start_time out.append('%6d | %10s | %s | %s' % ( job.id, job.status, start_time, descr)) return out
[ "def", "list_calculations", "(", "db", ",", "job_type", ",", "user_name", ")", ":", "jobs", "=", "db", "(", "'SELECT *, %s FROM job WHERE user_name=?x '", "'AND job_type=?x ORDER BY start_time'", "%", "JOB_TYPE", ",", "user_name", ",", "job_type", ")", "out", "=", "...
Yield a summary of past calculations. :param db: a :class:`openquake.server.dbapi.Db` instance :param job_type: 'hazard' or 'risk' :param user_name: an user name
[ "Yield", "a", "summary", "of", "past", "calculations", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/db/actions.py#L186-L208
train
gem/oq-engine
openquake/server/db/actions.py
create_outputs
def create_outputs(db, job_id, keysize, ds_size): """ Build a correspondence between the outputs in the datastore and the ones in the database. Also, update the datastore size in the job table. :param db: a :class:`openquake.server.dbapi.Db` instance :param job_id: ID of the current job :param keysize: a list of pairs (key, size_mb) :param ds_size: total datastore size in MB """ rows = [(job_id, DISPLAY_NAME.get(key, key), key, size) for key, size in keysize] db('UPDATE job SET size_mb=?x WHERE id=?x', ds_size, job_id) db.insert('output', 'oq_job_id display_name ds_key size_mb'.split(), rows)
python
def create_outputs(db, job_id, keysize, ds_size): """ Build a correspondence between the outputs in the datastore and the ones in the database. Also, update the datastore size in the job table. :param db: a :class:`openquake.server.dbapi.Db` instance :param job_id: ID of the current job :param keysize: a list of pairs (key, size_mb) :param ds_size: total datastore size in MB """ rows = [(job_id, DISPLAY_NAME.get(key, key), key, size) for key, size in keysize] db('UPDATE job SET size_mb=?x WHERE id=?x', ds_size, job_id) db.insert('output', 'oq_job_id display_name ds_key size_mb'.split(), rows)
[ "def", "create_outputs", "(", "db", ",", "job_id", ",", "keysize", ",", "ds_size", ")", ":", "rows", "=", "[", "(", "job_id", ",", "DISPLAY_NAME", ".", "get", "(", "key", ",", "key", ")", ",", "key", ",", "size", ")", "for", "key", ",", "size", "...
Build a correspondence between the outputs in the datastore and the ones in the database. Also, update the datastore size in the job table. :param db: a :class:`openquake.server.dbapi.Db` instance :param job_id: ID of the current job :param keysize: a list of pairs (key, size_mb) :param ds_size: total datastore size in MB
[ "Build", "a", "correspondence", "between", "the", "outputs", "in", "the", "datastore", "and", "the", "ones", "in", "the", "database", ".", "Also", "update", "the", "datastore", "size", "in", "the", "job", "table", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/db/actions.py#L299-L312
train
gem/oq-engine
openquake/server/db/actions.py
finish
def finish(db, job_id, status): """ Set the job columns `is_running`, `status`, and `stop_time`. :param db: a :class:`openquake.server.dbapi.Db` instance :param job_id: ID of the current job :param status: a string such as 'successful' or 'failed' """ db('UPDATE job SET ?D WHERE id=?x', dict(is_running=False, status=status, stop_time=datetime.utcnow()), job_id)
python
def finish(db, job_id, status): """ Set the job columns `is_running`, `status`, and `stop_time`. :param db: a :class:`openquake.server.dbapi.Db` instance :param job_id: ID of the current job :param status: a string such as 'successful' or 'failed' """ db('UPDATE job SET ?D WHERE id=?x', dict(is_running=False, status=status, stop_time=datetime.utcnow()), job_id)
[ "def", "finish", "(", "db", ",", "job_id", ",", "status", ")", ":", "db", "(", "'UPDATE job SET ?D WHERE id=?x'", ",", "dict", "(", "is_running", "=", "False", ",", "status", "=", "status", ",", "stop_time", "=", "datetime", ".", "utcnow", "(", ")", ")",...
Set the job columns `is_running`, `status`, and `stop_time`. :param db: a :class:`openquake.server.dbapi.Db` instance :param job_id: ID of the current job :param status: a string such as 'successful' or 'failed'
[ "Set", "the", "job", "columns", "is_running", "status", "and", "stop_time", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/db/actions.py#L315-L328
train
gem/oq-engine
openquake/server/db/actions.py
del_calc
def del_calc(db, job_id, user): """ Delete a calculation and all associated outputs, if possible. :param db: a :class:`openquake.server.dbapi.Db` instance :param job_id: job ID, can be an integer or a string :param user: username :returns: None if everything went fine or an error message """ job_id = int(job_id) dependent = db( 'SELECT id FROM job WHERE hazard_calculation_id=?x', job_id) if dependent: return {"error": 'Cannot delete calculation %d: there ' 'are calculations ' 'dependent from it: %s' % (job_id, [j.id for j in dependent])} try: owner, path = db('SELECT user_name, ds_calc_dir FROM job WHERE id=?x', job_id, one=True) except NotFound: return {"error": 'Cannot delete calculation %d:' ' ID does not exist' % job_id} deleted = db('DELETE FROM job WHERE id=?x AND user_name=?x', job_id, user).rowcount if not deleted: return {"error": 'Cannot delete calculation %d: it belongs to ' '%s and you are %s' % (job_id, owner, user)} # try to delete datastore and associated file # path has typically the form /home/user/oqdata/calc_XXX fname = path + ".hdf5" try: os.remove(fname) except OSError as exc: # permission error return {"error": 'Could not remove %s: %s' % (fname, exc)} return {"success": fname}
python
def del_calc(db, job_id, user): """ Delete a calculation and all associated outputs, if possible. :param db: a :class:`openquake.server.dbapi.Db` instance :param job_id: job ID, can be an integer or a string :param user: username :returns: None if everything went fine or an error message """ job_id = int(job_id) dependent = db( 'SELECT id FROM job WHERE hazard_calculation_id=?x', job_id) if dependent: return {"error": 'Cannot delete calculation %d: there ' 'are calculations ' 'dependent from it: %s' % (job_id, [j.id for j in dependent])} try: owner, path = db('SELECT user_name, ds_calc_dir FROM job WHERE id=?x', job_id, one=True) except NotFound: return {"error": 'Cannot delete calculation %d:' ' ID does not exist' % job_id} deleted = db('DELETE FROM job WHERE id=?x AND user_name=?x', job_id, user).rowcount if not deleted: return {"error": 'Cannot delete calculation %d: it belongs to ' '%s and you are %s' % (job_id, owner, user)} # try to delete datastore and associated file # path has typically the form /home/user/oqdata/calc_XXX fname = path + ".hdf5" try: os.remove(fname) except OSError as exc: # permission error return {"error": 'Could not remove %s: %s' % (fname, exc)} return {"success": fname}
[ "def", "del_calc", "(", "db", ",", "job_id", ",", "user", ")", ":", "job_id", "=", "int", "(", "job_id", ")", "dependent", "=", "db", "(", "'SELECT id FROM job WHERE hazard_calculation_id=?x'", ",", "job_id", ")", "if", "dependent", ":", "return", "{", "\"er...
Delete a calculation and all associated outputs, if possible. :param db: a :class:`openquake.server.dbapi.Db` instance :param job_id: job ID, can be an integer or a string :param user: username :returns: None if everything went fine or an error message
[ "Delete", "a", "calculation", "and", "all", "associated", "outputs", "if", "possible", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/db/actions.py#L331-L367
train
gem/oq-engine
openquake/server/db/actions.py
log
def log(db, job_id, timestamp, level, process, message): """ Write a log record in the database. :param db: a :class:`openquake.server.dbapi.Db` instance :param job_id: a job ID :param timestamp: timestamp to store in the log record :param level: logging level to store in the log record :param process: process ID to store in the log record :param message: message to store in the log record """ db('INSERT INTO log (job_id, timestamp, level, process, message) ' 'VALUES (?X)', (job_id, timestamp, level, process, message))
python
def log(db, job_id, timestamp, level, process, message): """ Write a log record in the database. :param db: a :class:`openquake.server.dbapi.Db` instance :param job_id: a job ID :param timestamp: timestamp to store in the log record :param level: logging level to store in the log record :param process: process ID to store in the log record :param message: message to store in the log record """ db('INSERT INTO log (job_id, timestamp, level, process, message) ' 'VALUES (?X)', (job_id, timestamp, level, process, message))
[ "def", "log", "(", "db", ",", "job_id", ",", "timestamp", ",", "level", ",", "process", ",", "message", ")", ":", "db", "(", "'INSERT INTO log (job_id, timestamp, level, process, message) '", "'VALUES (?X)'", ",", "(", "job_id", ",", "timestamp", ",", "level", "...
Write a log record in the database. :param db: a :class:`openquake.server.dbapi.Db` instance :param job_id: a job ID :param timestamp: timestamp to store in the log record :param level: logging level to store in the log record :param process: process ID to store in the log record :param message: message to store in the log record
[ "Write", "a", "log", "record", "in", "the", "database", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/db/actions.py#L370-L388
train
gem/oq-engine
openquake/server/db/actions.py
get_log
def get_log(db, job_id): """ Extract the logs as a big string :param db: a :class:`openquake.server.dbapi.Db` instance :param job_id: a job ID """ logs = db('SELECT * FROM log WHERE job_id=?x ORDER BY id', job_id) out = [] for log in logs: time = str(log.timestamp)[:-4] # strip decimals out.append('[%s #%d %s] %s' % (time, job_id, log.level, log.message)) return out
python
def get_log(db, job_id): """ Extract the logs as a big string :param db: a :class:`openquake.server.dbapi.Db` instance :param job_id: a job ID """ logs = db('SELECT * FROM log WHERE job_id=?x ORDER BY id', job_id) out = [] for log in logs: time = str(log.timestamp)[:-4] # strip decimals out.append('[%s #%d %s] %s' % (time, job_id, log.level, log.message)) return out
[ "def", "get_log", "(", "db", ",", "job_id", ")", ":", "logs", "=", "db", "(", "'SELECT * FROM log WHERE job_id=?x ORDER BY id'", ",", "job_id", ")", "out", "=", "[", "]", "for", "log", "in", "logs", ":", "time", "=", "str", "(", "log", ".", "timestamp", ...
Extract the logs as a big string :param db: a :class:`openquake.server.dbapi.Db` instance :param job_id: a job ID
[ "Extract", "the", "logs", "as", "a", "big", "string" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/db/actions.py#L391-L403
train
gem/oq-engine
openquake/server/db/actions.py
save_performance
def save_performance(db, job_id, records): """ Save in the database the performance information about the given job. :param db: a :class:`openquake.server.dbapi.Db` instance :param job_id: a job ID :param records: a list of performance records """ # NB: rec['counts'] is a numpy.uint64 which is not automatically converted # into an int in Ubuntu 12.04, so we convert it manually below rows = [(job_id, rec['operation'], rec['time_sec'], rec['memory_mb'], int(rec['counts'])) for rec in records] db.insert('performance', 'job_id operation time_sec memory_mb counts'.split(), rows)
python
def save_performance(db, job_id, records): """ Save in the database the performance information about the given job. :param db: a :class:`openquake.server.dbapi.Db` instance :param job_id: a job ID :param records: a list of performance records """ # NB: rec['counts'] is a numpy.uint64 which is not automatically converted # into an int in Ubuntu 12.04, so we convert it manually below rows = [(job_id, rec['operation'], rec['time_sec'], rec['memory_mb'], int(rec['counts'])) for rec in records] db.insert('performance', 'job_id operation time_sec memory_mb counts'.split(), rows)
[ "def", "save_performance", "(", "db", ",", "job_id", ",", "records", ")", ":", "rows", "=", "[", "(", "job_id", ",", "rec", "[", "'operation'", "]", ",", "rec", "[", "'time_sec'", "]", ",", "rec", "[", "'memory_mb'", "]", ",", "int", "(", "rec", "[...
Save in the database the performance information about the given job. :param db: a :class:`openquake.server.dbapi.Db` instance :param job_id: a job ID :param records: a list of performance records
[ "Save", "in", "the", "database", "the", "performance", "information", "about", "the", "given", "job", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/db/actions.py#L417-L430
train
gem/oq-engine
openquake/server/db/actions.py
get_traceback
def get_traceback(db, job_id): """ Return the traceback of the given calculation as a list of lines. The list is empty if the calculation was successful. :param db: a :class:`openquake.server.dbapi.Db` instance :param job_id: a job ID """ # strange: understand why the filter returns two lines or zero lines log = db("SELECT * FROM log WHERE job_id=?x AND level='CRITICAL'", job_id) if not log: return [] response_data = log[-1].message.splitlines() return response_data
python
def get_traceback(db, job_id): """ Return the traceback of the given calculation as a list of lines. The list is empty if the calculation was successful. :param db: a :class:`openquake.server.dbapi.Db` instance :param job_id: a job ID """ # strange: understand why the filter returns two lines or zero lines log = db("SELECT * FROM log WHERE job_id=?x AND level='CRITICAL'", job_id) if not log: return [] response_data = log[-1].message.splitlines() return response_data
[ "def", "get_traceback", "(", "db", ",", "job_id", ")", ":", "log", "=", "db", "(", "\"SELECT * FROM log WHERE job_id=?x AND level='CRITICAL'\"", ",", "job_id", ")", "if", "not", "log", ":", "return", "[", "]", "response_data", "=", "log", "[", "-", "1", "]",...
Return the traceback of the given calculation as a list of lines. The list is empty if the calculation was successful. :param db: a :class:`openquake.server.dbapi.Db` instance :param job_id: a job ID
[ "Return", "the", "traceback", "of", "the", "given", "calculation", "as", "a", "list", "of", "lines", ".", "The", "list", "is", "empty", "if", "the", "calculation", "was", "successful", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/db/actions.py#L626-L642
train