sentence1 stringlengths 52 3.87M | sentence2 stringlengths 1 47.2k | label stringclasses 1 value |
|---|---|---|
def check_storage_controllers():
"""
Check the status of the storage controllers
Skip this check, if --noController is set
"""
if ctrl_flag:
ctrl = walk_data(sess, oid_ctrl, helper)[0]
for x, data in enumerate(ctrl, 1):
ctrl_summary_output, ctrl_long_output = state_summary(data, 'Controller %d' % x, normal_state, helper)
add_output(ctrl_summary_output, ctrl_long_output, helper) | Check the status of the storage controllers
Skip this check, if --noController is set | entailment |
def check_temperature_sensors():
"""
Check all temperature sensors of the server
All sensors with the value or threshold is -99 or 0 are ignored
"""
# walk all temperature sensor values and thresholds
env_temp = walk_data(sess, oid_env_temp, helper)[0]
env_temp_thresh = walk_data(sess, oid_env_temp_thres, helper)[0]
env_temp_zipped = zip(env_temp, env_temp_thresh)
for x, data in enumerate(env_temp_zipped, 1):
# skip the check if -99 or 0 is in the value or threshold, because these data we can not use
if '-99' not in data and '0' not in data:
#check if the value is over the treshold
if int(data[0]) > int(data[1]):
helper.add_summary('Temperature at sensor %d above threshold (%s / %s)' % (x, data[0], data[1]))
helper.status(critical)
# always add the sensor to the output
helper.add_long_output('Temperature %d: %s Celsius (threshold: %s Celsius)' % (x, data[0], data[1]))
# for the first sensor (envirnoment temperature, we add performance data)
if x == 1:
helper.add_metric("Environment Temperature", data[0], '', ":" + data[1], "", "", "Celsius") | Check all temperature sensors of the server
All sensors with the value or threshold is -99 or 0 are ignored | entailment |
def check_ps():
"""
Check if the power supplies are ok, and we have the configured amount
The check is skipped if --ps=0
"""
if int(input_pwr_sply) != 0:
ps_data = walk_data(sess, oid_ps, helper)[0]
ps_ok_count = 0
for x, state in enumerate(ps_data, 1):
# human readable status
hr_status = normal_state[int(state)]
if hr_status != "ok":
# if the power supply is ok, we will set a critical status and add it to the summary
helper.add_summary('Power supply status %s: %s' % (x, hr_status))
helper.status(critical)
else:
# if everything is ok, we increase the ps_ok_count
ps_ok_count += 1
# we always want to see the status in the long output
helper.add_long_output('Power supply status %s: %s' % (x, hr_status))
helper.add_long_output('')
if int(input_pwr_sply) != ps_ok_count:
# if the confiugred power supplies and power supplies in ok state are different
helper.add_summary('%s power supplies expected - %s power supplies ok ' % (input_pwr_sply, ps_ok_count))
helper.status(critical) | Check if the power supplies are ok, and we have the configured amount
The check is skipped if --ps=0 | entailment |
def check_power_redundancy():
"""
Check if the power supplies are redundant
The check is skipped if --noPowerRedundancy is set
"""
# skip the check if --noPowerRedundancy is set
if power_redundancy_flag:
# walk the data
ps_redundant_data = walk_data(sess, oid_ps_redundant, helper)[0]
for x, state in enumerate(ps_redundant_data, 1):
# human readable status
hr_status = ps_redundant_state[int(state)]
if hr_status != "redundant":
# if the power supply is not redundant, we will set a critical status and add it to the summary
helper.add_summary('Power supply %s: %s' % (x, hr_status))
helper.status(critical)
# we always want to see the redundancy status in the long output
helper.add_long_output('Power supply %s: %s' % (x, hr_status))
helper.add_long_output('') | Check if the power supplies are redundant
The check is skipped if --noPowerRedundancy is set | entailment |
def check_fan(input_fan):
"""
check the fans
"""
# get a list of all fans
fan_data = walk_data(sess, oid_fan, helper)[0]
fan_count = 0
summary_output = ''
long_output = ''
for x, fan in enumerate(fan_data, 1):
fan = int(fan)
if normal_state[fan] == 'ok':
# if the fan is ok, we increase the fan_count varaible
fan_count += 1
# we always want to the the status in the long output
long_output += 'Fan %d: %s.\n' % (x, normal_state[fan])
# check we have the correct amount ok fans in OK state, otherwise set status to critical and print the fan in the summary
if int(fan_count) != int(input_fan):
summary_output += '%s fan(s) expected - %s fan(s) ok. ' % (input_fan, fan_count)
helper.status(critical)
return (summary_output, long_output) | check the fans | entailment |
def get_snmp_from_host1(self):
"""
Get SNMP values from 1st host.
"""
response = self.snmp1.get_oids(ps1_oid, ps2_oid, fan1_oid, fan2_oid, bat_oid, temp_oid, activity_oid, logfill_oid)
self.ps1_value = states[int(response[0])]
self.ps2_value = states[int(response[1])]
self.fan1_value = states[int(response[2])]
self.fan2_value = states[int(response[3])]
self.bat_value = states[int(response[4])]
self.temp_value = states[int(response[5])]
self.activity_value1 = activity[int(response[6])]
self.logfill_value = str(response[7]) | Get SNMP values from 1st host. | entailment |
def get_snmp_from_host2(self):
"""
Get SNMP values from 2nd host.
"""
if not self.snmp2:
self.activity_value2 = None
else:
response = self.snmp2.get_oids(activity_oid)
self.activity_value2 = activity[int(response[0])] | Get SNMP values from 2nd host. | entailment |
def check(self):
"""
Evaluate health status from device parameters.
"""
try:
self.get_snmp_from_host1()
self.get_snmp_from_host2()
except (health_monitoring_plugins.SnmpException, TypeError, KeyError):
self.helper.status(unknown)
self.helper.add_summary("SNMP response incomplete or invalid")
return
self.helper.add_summary("Filter Status")
self.helper.add_long_output("Power Supply 1: %s" % self.ps1_value)
if self.ps1_value != "ok":
self.helper.status(critical)
self.helper.add_summary("Power Supply 1: %s" % self.ps1_value)
self.helper.add_long_output("Power Supply 2: %s" % self.ps2_value)
if self.ps2_value != "ok":
self.helper.status(critical)
self.helper.add_summary("Power Supply 2: %s" % self.ps2_value)
self.helper.add_long_output("Fan 1: %s" % self.fan1_value)
if self.fan1_value != "ok":
self.helper.status(critical)
self.helper.add_summary("Fan 1: %s" % self.fan1_value)
self.helper.add_long_output("Fan 2: %s" % self.fan2_value)
if self.fan2_value != "ok":
self.helper.status(critical)
self.helper.add_summary("Fan 2: %s" % self.fan2_value)
self.helper.add_long_output("Battery: %s" % self.bat_value)
if self.bat_value != "ok":
self.helper.status(critical)
self.helper.add_summary("Battery: %s" % self.bat_value)
self.helper.add_long_output("Temperature: %s" % self.temp_value)
if self.temp_value != "ok":
self.helper.status(critical)
self.helper.add_summary("Temperature: %s" % self.temp_value)
self.helper.add_metric(label='logfill',value=self.logfill_value, uom="%%")
self.helper.add_long_output("Fill Level internal log: %s%%" % self.logfill_value)
self.helper.add_long_output("Activity State: %s" % self.activity_value1)
if self.activity_value1 == "error":
self.helper.status(critical)
self.helper.add_summary("Activity State: %s" % self.activity_value1)
if self.activity_value2:
self.helper.add_long_output("Activity State 2: %s" % self.activity_value2)
if self.activity_value1 == "active" and self.activity_value2 == "active":
self.helper.status(critical)
self.helper.add_summary("Filter 1 and Filter 2 active!")
if self.activity_value1 == "standby" and self.activity_value2 == "standby":
self.helper.status(critical)
self.helper.add_summary("Filter 1 and Filter 2 standby!")
self.helper.check_all_metrics() | Evaluate health status from device parameters. | entailment |
def update_status(self, helper, status):
""" update the helper """
if status:
self.status(status[0])
# if the status is ok, add it to the long output
if status[0] == 0:
self.add_long_output(status[1])
# if the status is not ok, add it to the summary
else:
self.add_summary(status[1]) | update the helper | entailment |
def get_snmp_value(sess, helper, oid):
""" return a snmp value or exits the plugin with unknown"""
snmp_result = sess.get_oids(oid)[0]
if snmp_result is None:
helper.exit(summary="No response from device for oid " + oid, exit_code=unknown, perfdata='')
else:
return snmp_result | return a snmp value or exits the plugin with unknown | entailment |
def walk_snmp_values(sess, helper, oid, check):
""" return a snmp value or exits the plugin with unknown"""
try:
snmp_walk = sess.walk_oid(oid)
result_list = []
for x in range(len(snmp_walk)):
result_list.append(snmp_walk[x].val)
if result_list != []:
return result_list
else:
raise SnmpException("No content")
except SnmpException:
helper.exit(summary="No response from device for {} ({})".format(check, oid),
exit_code=unknown, perfdata='') | return a snmp value or exits the plugin with unknown | entailment |
def walk_oid(self, oid):
"""Get a list of SNMP varbinds in response to a walk for oid.
Each varbind in response list has a tag, iid, val and type attribute."""
var = netsnmp.Varbind(oid)
varlist = netsnmp.VarList(var)
data = self.walk(varlist)
if len(data) == 0:
raise SnmpException("SNMP walk response incomplete")
return varlist | Get a list of SNMP varbinds in response to a walk for oid.
Each varbind in response list has a tag, iid, val and type attribute. | entailment |
def run_scan():
"""
show all available partitions
"""
all_disks = walk_data(sess, oid_hrStorageDescr, helper)[0]
print "All available disks at: " + host
for disk in all_disks:
print "Disk: \t'" + disk + "'"
quit() | show all available partitions | entailment |
def partition_found(partition, description):
"""
returns True, if the partition (--partition) is in the description we received from the host
"""
# if we want to have a linux partition (/) we use the full path (startswith "/" would result in / /var /dev etc).
# if we start with something else, we use the startswith function
if "/" in partition:
use_fullcompare = True
else:
use_fullcompare = False
if use_fullcompare and (partition == description):
return True
elif not use_fullcompare and description.startswith(partition):
return True
else:
return False | returns True, if the partition (--partition) is in the description we received from the host | entailment |
def check_partition():
"""
check the defined partition
"""
all_index = walk_data(sess, oid_hrStorageIndex, helper)[0]
all_descriptions = walk_data(sess, oid_hrStorageDescr, helper)[0]
# we need the sucess flag for the error handling (partition found or not found)
sucess = False
# here we zip all index and descriptions to have a list like
# [('Physical memory', '1'), ('Virtual memory', '3'), ('/', '32'), ('/proc/xen', '33')]
zipped = zip(all_index, all_descriptions)
for partition in zipped:
index = partition[0]
description = partition[1]
if partition_found(disk, description):
# we found the partition
sucess = True
# receive all values we need
unit = float(get_data(sess, oid_hrStorageAllocationUnits + "." + index, helper))
size = float(get_data(sess, oid_hrStorageSize + "." + index, helper))
used = float(get_data(sess, oid_hrStorageUsed + "." + index, helper))
if size == 0 or used == 0:
# if the host return "0" as used or size, then we have a problem with the calculation (devision by zero)
helper.exit(summary="Received value 0 as StorageSize or StorageUsed: calculation error", exit_code=unknown, perfdata='')
# calculate the real size (size*unit) and convert the results to the target unit the user wants to see
used_result = convert_to_XX(calculate_real_size(used), unit, targetunit)
size_result = convert_to_XX(calculate_real_size(size), unit, targetunit)
# calculation of the used percentage
percent_used = used_result / size_result * 100
# we need a string and want only two decimals
used_string = str(float("{0:.2f}".format(used_result)))
size_string = str(float("{0:.2f}".format(size_result)))
percent_string = str(float("{0:.2f}".format(percent_used)))
if percent_used < 0 or percent_used > 100:
# just a validation that percent_used is not smaller then 0% or lager then 100%
helper.exit(summary="Calculation error - second counter overrun?", exit_code=unknown, perfdata='')
# show the summary
helper.add_summary("%s%% used (%s%s of %s%s) at '%s'" % (percent_string, used_string, targetunit, size_string, targetunit, description))
# add the metric in percent.
helper.add_metric(label='percent used',value=percent_string, min="0", max="100", uom="%")
else:
if not sucess:
# if the partition was not found in the data output, we return an error
helper.exit(summary="Partition '%s' not found" % disk, exit_code=unknown, perfdata='') | check the defined partition | entailment |
def check_sensors():
"""
collect and check all available sensors
"""
all_sensors = walk_data(sess, oid_description, helper)[0]
all_status = walk_data(sess, oid_status, helper)[0]
# here we zip all index and descriptions to have a list like
# [('Fan Sensor', '2'), ('Power Supply Sensor', '4')]
# we are doomed if the lists do not have the same length ... but that should never happen ... hopefully
zipped = zip(all_sensors, all_status)
for sensor in zipped:
description = sensor[0]
status = sensor[1]
# translate the value to human readable
try:
status_string = senor_status_table[status]
except KeyError:
# if we receive an invalid value, we don't want to crash...
helper.exit(summary="received an undefined value from device: " + status, exit_code=unknown, perfdata='')
# for each sensor the summary is added like: Fan Sensor: good
helper.add_summary("%s: %s" % (description, status_string))
# set the status
if status == "2":
helper.status(critical)
if status == "3":
helper.status(warning) | collect and check all available sensors | entailment |
def check_basic_battery_status(the_session, the_helper, the_snmp_value):
"""
OID .1.3.6.1.4.1.318.1.1.1.2.1.1.0
MIB Excerpt
The status of the UPS batteries. A batteryLow(3)
value indicates the UPS will be unable to sustain the
current load, and its services will be lost if power is
not restored. The amount of run time in reserve at the
time of low battery can be configured by the
upsAdvConfigLowBatteryRunTime.
Value List
unknown (1)
batteryNormal (2)
batteryLow (3)
batteryInFaultCondition (4)
"""
apc_battery_states = {
'1' : 'unknown',
'2' : 'batteryNormal',
'3' : 'batteryLow',
'4' : 'batteryInFaultCondition'
}
a_state = apc_battery_states.get(the_snmp_value, 'unknown')
if the_snmp_value == '2':
the_helper.add_status(pynag.Plugins.ok)
elif the_snmp_value == '3':
the_helper.add_status(pynag.Plugins.warning)
else:
the_helper.add_status(pynag.Plugins.critical)
the_helper.set_summary("UPS batteries state is {}".format(a_state)) | OID .1.3.6.1.4.1.318.1.1.1.2.1.1.0
MIB Excerpt
The status of the UPS batteries. A batteryLow(3)
value indicates the UPS will be unable to sustain the
current load, and its services will be lost if power is
not restored. The amount of run time in reserve at the
time of low battery can be configured by the
upsAdvConfigLowBatteryRunTime.
Value List
unknown (1)
batteryNormal (2)
batteryLow (3)
batteryInFaultCondition (4) | entailment |
def check_runtime_remaining(the_session, the_helper, the_snmp_value):
"""
OID .1.3.6.1.4.1.318.1.1.1.2.2.3.0
MIB excerpt
The UPS battery run time remaining before battery
exhaustion.
SNMP value is in TimeTicks aka hundredths of a second
"""
a_minute_value = calc_minutes_from_ticks(the_snmp_value)
the_helper.add_metric(
label=the_helper.options.type,
value=a_minute_value,
warn=the_helper.options.warning,
crit=the_helper.options.critical,
uom="Minutes")
the_helper.check_all_metrics()
the_helper.set_summary("Remaining runtime on battery is {} minutes".format(a_minute_value)) | OID .1.3.6.1.4.1.318.1.1.1.2.2.3.0
MIB excerpt
The UPS battery run time remaining before battery
exhaustion.
SNMP value is in TimeTicks aka hundredths of a second | entailment |
def check_battery_replace_indicator(the_session, the_helper, the_snmp_value):
"""
OID .1.3.6.1.4.1.318.1.1.1.2.2.4.0
MIB Excerpt
Indicates whether the UPS batteries need replacing.
Value List
noBatteryNeedsReplacing (1)
batteryNeedsReplacing (2)
"""
apc_states = {
'1' : 'Battery does not need to be replaced',
'2' : 'Battery needs to be replaced!'}
a_state = apc_states.get(the_snmp_value, "Unknown battery replacement state!")
if the_snmp_value == '1':
the_helper.add_status(pynag.Plugins.ok)
else:
the_helper.add_status(pynag.Plugins.critical)
the_helper.set_summary(a_state) | OID .1.3.6.1.4.1.318.1.1.1.2.2.4.0
MIB Excerpt
Indicates whether the UPS batteries need replacing.
Value List
noBatteryNeedsReplacing (1)
batteryNeedsReplacing (2) | entailment |
def check_environment_temperature(the_session, the_helper, the_snmp_value, the_unit=1):
"""
OID .1.3.6.1.4.1.318.1.1.10.2.3.2.1.4.1
MIB Excerpt
The current temperature reading from the probe displayed
in the units shown in the 'iemStatusProbeTempUnits' OID
(Celsius or Fahrenheit).
Description of unit OID
OID .1.3.6.1.4.1.318.1.1.10.2.3.2.1.5
The temperature scale used to display the temperature
thresholds of the probe, Celsius(1) or Fahrenheit(2).
This setting is based on the system preferences
configuration in the agent.
"""
a_snmp_unit = snmpSessionBaseClass.get_data(
the_session,
apc_oid_environment_temperature_unit,
the_helper)
snmp_units = {
'1' : 'C',
'2' : 'F'
}
a_unit = snmp_units.get(a_snmp_unit, 'UNKNOWN_UNIT')
the_helper.add_metric(
label=the_helper.options.type,
value=the_snmp_value,
warn=the_helper.options.warning,
crit=the_helper.options.critical,
uom=a_unit)
the_helper.check_all_metrics()
the_helper.set_summary("Current environmental temperature is {}{}".format(the_snmp_value, a_unit)) | OID .1.3.6.1.4.1.318.1.1.10.2.3.2.1.4.1
MIB Excerpt
The current temperature reading from the probe displayed
in the units shown in the 'iemStatusProbeTempUnits' OID
(Celsius or Fahrenheit).
Description of unit OID
OID .1.3.6.1.4.1.318.1.1.10.2.3.2.1.5
The temperature scale used to display the temperature
thresholds of the probe, Celsius(1) or Fahrenheit(2).
This setting is based on the system preferences
configuration in the agent. | entailment |
def check_typ(helper, typ):
"""
check if typ parameter is TCP or UDP
"""
if typ != "tcp" and typ != "udp":
helper.exit(summary="Type (-t) must be udp or tcp.", exit_code=unknown, perfdata='') | check if typ parameter is TCP or UDP | entailment |
def check_port(helper, port):
"""
check if the port parameter is really a port or "scan"
"""
try:
int(port)
except ValueError:
helper.exit(summary="Port (-p) must be a integer value.", exit_code=unknown, perfdata='') | check if the port parameter is really a port or "scan" | entailment |
def check_udp(helper, host, port, session):
"""
the check logic for UDP ports
"""
open_ports = walk_data(session, '.1.3.6.1.2.1.7.5.1.2', helper)[0] # the udpLocaLPort from UDP-MIB.mib (deprecated)
# here we show all open UDP ports
if scan:
print "All open UDP ports at host " + host
for port in open_ports:
print "UDP: \t" + port
quit()
if port in open_ports:
udp_status = "OPEN"
else:
udp_status = "CLOSED"
helper.status(critical)
return ("Current status for UDP port " + port + " is: " + udp_status) | the check logic for UDP ports | entailment |
def check_tcp(helper, host, port, warning_param, critical_param, session):
"""
the check logic for check TCP ports
"""
# from tcpConnState from TCP-MIB
tcp_translate = {
"1" : "closed",
"2" : "listen",
"3" : "synSent",
"4" : "synReceived",
"5" : "established",
"6" : "finWait1",
"7" : "finWait2",
"8" : "closeWait",
"9" : "lastAck",
"10": "closing",
"11": "timeWait",
"12": "deleteTCB"
}
# collect all open local ports
open_ports = walk_data(session, '.1.3.6.1.2.1.6.13.1.3', helper)[0] #tcpConnLocalPort from TCP-MIB (deprecated)
# collect all status information about the open ports
port_status = walk_data(session, '.1.3.6.1.2.1.6.13.1.1', helper)[0] #tcpConnState from TCP-MIB (deprecated)
# make a dict out of the two lists
port_and_status = dict(zip(open_ports, port_status))
# here we show all open TCP ports and it's status
if scan:
print "All open TCP ports: " + host
for port in open_ports:
tcp_status = port_and_status[port]
tcp_status = tcp_translate[tcp_status]
print "TCP: \t" + port + "\t Status: \t" + tcp_status
quit()
#here we have the real check logic for TCP ports
if port in open_ports:
# if the port is available in the list of open_ports, then extract the status
tcp_status = port_and_status[port]
# translate the status from the integer value to a human readable string
tcp_status = tcp_translate[tcp_status]
# now let's set the status according to the warning / critical "threshold" parameter
if tcp_status in warning_param:
helper.status(warning)
elif tcp_status in critical_param:
helper.status(critical)
else:
helper.status(ok)
else:
# if there is no value in the list => the port is closed for sure
tcp_status = "CLOSED"
helper.status(critical)
return ("Current status for TCP port " + port + " is: " + tcp_status) | the check logic for check TCP ports | entailment |
def to_json(obj):
"""Return a json string representing the python object obj."""
i = StringIO.StringIO()
w = Writer(i, encoding='UTF-8')
w.write_value(obj)
return i.getvalue() | Return a json string representing the python object obj. | entailment |
def get_data(self):
"Get SNMP values from host"
alarm_oids = [netsnmp.Varbind(alarms[alarm_id]['oid']) for alarm_id in self.models[self.modem_type]['alarms']]
metric_oids = [netsnmp.Varbind(metrics[metric_id]['oid']) for metric_id in self.models[self.modem_type]['metrics']]
response = self.snmp_session.get(netsnmp.VarList(*alarm_oids + metric_oids))
return (
response[0:len(alarm_oids)],
response[len(alarm_oids):]
) | Get SNMP values from host | entailment |
def process_alarms(self, snmp_data):
"Build list with active alarms"
self.active_alarms = []
for i in range(0, len(self.models[self.modem_type]['alarms'])):
if bool(int(snmp_data[i])) == True:
self.active_alarms.append(self.models[self.modem_type]['alarms'][i]) | Build list with active alarms | entailment |
def process_metrics(self, snmp_data):
"Build list with metrics"
self.metrics = {}
for i in range(0, len(snmp_data)):
metric_id = self.models[self.modem_type]['metrics'][i]
value = int(snmp_data[i])
self.metrics[metric_id] = value | Build list with metrics | entailment |
def convert_in_oid(service_name):
"""
calculate the correct OID for the service name
"""
s = service_name
# convert the service_name to ascci
service_ascii = [ord(c) for c in s]
# we need the length of the service name
length = str(len(s))
# make the oid
oid = base_oid + "." + length + "." + ".".join(str(x) for x in service_ascii)
return oid | calculate the correct OID for the service name | entailment |
def get_data(self):
"Return one SNMP response list for all status OIDs, and one list for all metric OIDs."
alarm_oids = [netsnmp.Varbind(status_mib[alarm_id]['oid']) for alarm_id in self.models[self.modem_type]['alarms']]
metric_oids = [netsnmp.Varbind(metric_mib[metric_id]['oid']) for metric_id in self.models[self.modem_type]['metrics']]
response = self.snmp_session.get(netsnmp.VarList(*alarm_oids + metric_oids))
return (
response[0:len(alarm_oids)],
response[len(alarm_oids):]
) | Return one SNMP response list for all status OIDs, and one list for all metric OIDs. | entailment |
def process_alarms(self, snmp_data):
"Build list with active alarms"
self.active_alarms = {}
for i in range(0, len(self.models[self.modem_type]['alarms'])):
mib_name = self.models[self.modem_type]['alarms'][i]
conv = status_mib[mib_name]['conv']
self.active_alarms[mib_name] = conv(snmp_data[i]) | Build list with active alarms | entailment |
def process_metrics(self, snmp_data):
"Build list with metrics"
self.metrics = {}
for i in range(0, len(snmp_data)):
mib_name = self.models[self.modem_type]['metrics'][i]
conv = metric_mib[mib_name]['conv']
self.metrics[mib_name] = conv(snmp_data[i]) | Build list with metrics | entailment |
def real_value(value, digit):
"""
function to calculate the real value
we need to devide the value by the digit
e.g.
value = 100
digit = 2
return: "1.0"
"""
return str(float(value) / math.pow(10, float(digit))) | function to calculate the real value
we need to devide the value by the digit
e.g.
value = 100
digit = 2
return: "1.0" | entailment |
def check_inlet(self, helper):
"""
check the Inlets of Raritan PDUs
"""
# walk the data
try:
inlet_values = self.sess.walk_oid(self.oids['oid_inlet_value'])
inlet_units = self.sess.walk_oid(self.oids['oid_inlet_unit'])
inlet_digits = self.sess.walk_oid(self.oids['oid_inlet_digits'])
inlet_states = self.sess.walk_oid(self.oids['oid_inlet_state'])
inlet_warning_uppers = self.sess.walk_oid(self.oids['oid_inlet_warning_upper'])
inlet_critical_uppers = self.sess.walk_oid(self.oids['oid_inlet_critical_upper'])
inlet_critical_lowers = self.sess.walk_oid(self.oids['oid_inlet_critical_lower'])
inlet_warning_lowers = self.sess.walk_oid(self.oids['oid_inlet_warning_lower'])
except health_monitoring_plugins.SnmpException as e:
helper.exit(summary=str(e), exit_code=unknown, perfdata='')
# just print the summary, that the inlet sensors are checked
helper.add_summary("Inlet")
# all list must have the same length, if not something went wrong. that makes it easier and we need less loops
# translate the data in human readable units with help of the dicts
for x in range(len(inlet_values)):
inlet_unit = units[int(inlet_units[x].val)]
inlet_digit = inlet_digits[x].val
inlet_state = states[int(inlet_states[x].val)]
inlet_value = real_value(inlet_values[x].val, inlet_digit)
inlet_warning_upper = real_value(inlet_warning_uppers[x].val, inlet_digit)
inlet_critical_upper = real_value(inlet_critical_uppers[x].val, inlet_digit)
inlet_warning_lower = real_value(inlet_warning_lowers[x].val, inlet_digit)
inlet_critical_lower = real_value(inlet_critical_lowers[x].val, inlet_digit)
if inlet_state != "normal":
# we don't want to use the thresholds. we rely on the state value of the device
helper.add_summary("%s %s is %s" % (inlet_value, inlet_unit, inlet_state))
helper.status(critical)
# we always want to see the values in the long output and in the perf data
helper.add_summary("%s %s" % (inlet_value, inlet_unit))
helper.add_long_output("%s %s: %s" % (inlet_value, inlet_unit, inlet_state))
helper.add_metric("Sensor " + str(x) + " -%s-" % inlet_unit, inlet_value,
inlet_warning_lower +\
":" + inlet_warning_upper, inlet_critical_lower + ":" +\
inlet_critical_upper, "", "", "") | check the Inlets of Raritan PDUs | entailment |
def check_outlet(self, helper):
"""
check the status of the specified outlet
"""
try:
outlet_name, outlet_state = self.sess.get_oids(self.oids['oid_outlet_name'], self.oids['oid_outlet_state'])
except health_monitoring_plugins.SnmpException as e:
helper.exit(summary=str(e), exit_code=unknown, perfdata='')
outlet_real_state = states[int(outlet_state)]
# here we check if the outlet is powered on
if outlet_real_state != "on":
helper.status(critical)
# print the status
helper.add_summary("Outlet %s - '%s' is: %s" % (self.number, outlet_name, outlet_real_state.upper())) | check the status of the specified outlet | entailment |
def check_sensor(self, helper):
"""
check the status of the specified sensor
"""
try:
sensor_name, sensor_state, sensor_type = self.sess.get_oids(
self.oids['oid_sensor_name'], self.oids['oid_sensor_state'], self.oids['oid_sensor_type'])
except health_monitoring_plugins.SnmpException as e:
helper.exit(summary=str(e), exit_code=unknown, perfdata='')
try:
sensor_state_string = states[int(sensor_state)]
except KeyError as e:
helper.exit(summary="Invalid sensor response " + sensor_state, exit_code=unknown, perfdata='')
sensor_unit = "" # if it's a onOff Sensor or something like that, we need an empty string for the summary
sensor_unit_string = ""
sensor_value = ""
sensor_digit = ""
real_sensor_value = ""
sensor_warning_upper = ""
sensor_critical_upper = ""
sensor_warning_lower = ""
sensor_critical_lower = ""
if int(sensor_type) not in [14, 16, 17, 18, 19, 20]:
# for all sensors except these, we want to calculate the real value and show the metric.
# 14: onOff
# 16: vibration
# 17: waterDetection
# 18: smokeDetection
# 19: binary
# 20: contact
try:
sensor_unit, sensor_digit, sensor_warning_upper, sensor_critical_upper, sensor_warning_lower, sensor_critical_lower, sensor_value = self.sess.get_oids(
self.oids['oid_sensor_unit'], self.oids['oid_sensor_digit'],
self.oids['oid_sensor_warning_upper'], self.oids['oid_sensor_critical_upper'],
self.oids['oid_sensor_warning_lower'], self.oids['oid_sensor_critical_lower'],
self.oids['oid_sensor_value'])
except health_monitoring_plugins.SnmpException as e:
helper.exit(summary=str(e), exit_code=unknown, perfdata='')
sensor_unit_string = units[int(sensor_unit)]
real_sensor_value = real_value(int(sensor_value), sensor_digit)
real_sensor_warning_upper = real_value(sensor_warning_upper, sensor_digit)
real_sensor_critical_upper = real_value(sensor_critical_upper, sensor_digit)
real_sensor_warning_lower = real_value(sensor_warning_lower, sensor_digit)
real_sensor_critical_lower = real_value(sensor_critical_lower, sensor_digit)
# metrics are only possible for these sensors
helper.add_metric(sensor_name + " -%s- " % sensor_unit_string, real_sensor_value,
real_sensor_warning_lower +\
":" + real_sensor_warning_upper, real_sensor_critical_lower +\
":" + real_sensor_critical_upper, "", "", "")
# "OK" state
if sensor_state_string in ["closed", "normal", "on", "notDetected", "ok", "yes", "one", "two", "inSync"]:
helper.status(ok)
# "WARNING" state
elif sensor_state_string in ["open", "belowLowerWarning", "aboveUpperWarning", "marginal", "standby"]:
helper.status(warning)
# "CRITICAL" state
elif sensor_state_string in ["belowLowerCritical", "aboveUpperCritical", "off", "detected", "alarmed", "fail", "no", "outOfSync"]:
helper.status(critical)
# "UNKOWN" state
elif sensor_state_string in ["unavailable"]:
helper.status(unknown)
# received an undefined state
else:
helper.exit(summary="Something went wrong - received undefined state", exit_code=unknown, perfdata='')
# summary is shown for all sensors
helper.add_summary("Sensor %s - '%s' %s%s is: %s" % (self.number, sensor_name, real_sensor_value, sensor_unit_string, sensor_state_string)) | check the status of the specified sensor | entailment |
def dev_null_wrapper(func, *a, **kwargs):
"""
Temporarily swap stdout with /dev/null, and execute given function while stdout goes to /dev/null.
This is useful because netsnmp writes to stdout and disturbes Icinga result in some cases.
"""
os.dup2(dev_null, sys.stdout.fileno())
return_object = func(*a, **kwargs)
sys.stdout.flush()
os.dup2(tmp_stdout, sys.stdout.fileno())
return return_object | Temporarily swap stdout with /dev/null, and execute given function while stdout goes to /dev/null.
This is useful because netsnmp writes to stdout and disturbes Icinga result in some cases. | entailment |
def state_summary(value, name, state_list, helper, ok_value = 'ok', info = None):
"""
Always add the status to the long output, and if the status is not ok (or ok_value),
we show it in the summary and set the status to critical
"""
# translate the value (integer) we receive to a human readable value (e.g. ok, critical etc.) with the given state_list
state_value = state_list[int(value)]
summary_output = ''
long_output = ''
if not info:
info = ''
if state_value != ok_value:
summary_output += ('%s status: %s %s ' % (name, state_value, info))
helper.status(pynag.Plugins.critical)
long_output += ('%s status: %s %s\n' % (name, state_value, info))
return (summary_output, long_output) | Always add the status to the long output, and if the status is not ok (or ok_value),
we show it in the summary and set the status to critical | entailment |
def add_output(summary_output, long_output, helper):
"""
if the summary output is empty, we don't add it as summary, otherwise we would have empty spaces (e.g.: '. . . . .') in our summary report
"""
if summary_output != '':
helper.add_summary(summary_output)
helper.add_long_output(long_output) | if the summary output is empty, we don't add it as summary, otherwise we would have empty spaces (e.g.: '. . . . .') in our summary report | entailment |
def process_gps_position(self, helper, sess):
"""
just print the current GPS position
"""
gps_position = helper.get_snmp_value(sess, helper, self.oids['oid_gps_position'])
if gps_position:
helper.add_summary(gps_position)
else:
helper.add_summary("Could not retrieve GPS position")
helper.status(unknown) | just print the current GPS position | entailment |
def process_status(self, helper, sess, check):
""" get the snmp value, check the status and update the helper"""
if check == 'ntp_current_state':
ntp_status_int = helper.get_snmp_value(sess, helper, self.oids['oid_ntp_current_state_int'])
result = self.check_ntp_status(ntp_status_int)
elif check == 'gps_mode':
gps_status_int = helper.get_snmp_value(sess, helper, self.oids['oid_gps_mode_int'])
result = self.check_gps_status(gps_status_int)
else:
return
helper.update_status(helper, result) | get the snmp value, check the status and update the helper | entailment |
def check_ntp_status(self, ntp_status_int):
"""
check the NTP status
"""
# convert the ntp_status integer value in a human readable value
ntp_status_string = self.ntp_status.get(ntp_status_int, "unknown")
if ntp_status_string == "unknown":
return unknown, ("NTP status: " + ntp_status_string)
# the ntp status should be synchronized (new MIB) or normalOperation (old mib)
elif ntp_status_string != "synchronized" and ntp_status_string != "normalOperationPPS":
# that is a critical condition, because the time reference will not be reliable anymore
return critical, ("NTP status: " + ntp_status_string)
return None | check the NTP status | entailment |
def check_gps_status(self, gps_status_int):
"""
check the GPS status
"""
gps_mode_string = self.gps_mode.get(gps_status_int, "unknown")
if gps_mode_string == "unknown":
return unknown, ("GPS status: " + gps_mode_string)
elif gps_mode_string != "normalOperation" \
and gps_mode_string != "gpsSync":
# that is a warning condition, NTP could still work without the GPS antenna
return warning, ("GPS status: " + gps_mode_string)
return None | check the GPS status | entailment |
def process_satellites(self, helper, sess):
"""
check and show the good satellites
"""
good_satellites = helper.get_snmp_value(sess, helper, self.oids['oid_gps_satellites_good'])
# Show the summary and add the metric and afterwards check the metric
helper.add_summary("Good satellites: {}".format(good_satellites))
helper.add_metric(label='satellites', value=good_satellites) | check and show the good satellites | entailment |
def login_password(self, value):
"""Set the value of the login password field."""
password = self.selenium.find_element(*self._password_input_locator)
password.clear()
password.send_keys(value) | Set the value of the login password field. | entailment |
def email(self, value):
"""Set the value of the email field."""
email = self.wait.until(expected.visibility_of_element_located(
self._email_input_locator))
email.clear()
email.send_keys(value) | Set the value of the email field. | entailment |
def sign_in(self, email, password):
"""Signs in using the specified email address and password."""
self.email = email
self.login_password = password
if self.is_element_present(*self._next_button_locator):
self.wait.until(expected.visibility_of_element_located(
self._next_button_locator))
self.click_next()
self.click_sign_in() | Signs in using the specified email address and password. | entailment |
def sign_in(self, email=None, password=None):
"""Signs in a user, either with the specified email address and password, or a returning user."""
from .pages.sign_in import SignIn
sign_in = SignIn(self.selenium, self.timeout)
sign_in.sign_in(email, password) | Signs in a user, either with the specified email address and password, or a returning user. | entailment |
def write_csv(fileobj, rows, encoding=ENCODING, dialect=DIALECT):
"""Dump rows to ``fileobj`` with the given ``encoding`` and CSV ``dialect``."""
csvwriter = csv.writer(fileobj, dialect=dialect)
csv_writerows(csvwriter, rows, encoding) | Dump rows to ``fileobj`` with the given ``encoding`` and CSV ``dialect``. | entailment |
def write_dataframe(rows, encoding=ENCODING, dialect=DIALECT, **kwargs):
"""Dump ``rows`` to string buffer and load with ``pandas.read_csv()`` using ``kwargs``."""
global pandas
if pandas is None: # pragma: no cover
import pandas
with contextlib.closing(CsvBuffer()) as fd:
write_csv(fd, rows, encoding, dialect)
fd.seek(0)
df = read_csv(pandas, fd, encoding, dialect, kwargs)
return df | Dump ``rows`` to string buffer and load with ``pandas.read_csv()`` using ``kwargs``. | entailment |
def from_string(cls, link):
"""Return a new SheetUrl instance from parsed URL string.
>>> SheetUrl.from_string('https://docs.google.com/spreadsheets/d/spam')
<SheetUrl id='spam' gid=0>
"""
ma = cls._pattern.search(link)
if ma is None:
raise ValueError(link)
id = ma.group('id')
return cls(id) | Return a new SheetUrl instance from parsed URL string.
>>> SheetUrl.from_string('https://docs.google.com/spreadsheets/d/spam')
<SheetUrl id='spam' gid=0> | entailment |
def doctemplate(*args):
"""Return a decorator putting ``args`` into the docstring of the decorated ``func``.
>>> @doctemplate('spam', 'spam')
... def spam():
... '''Returns %s, lovely %s.'''
... return 'Spam'
>>> spam.__doc__
'Returns spam, lovely spam.'
"""
def decorator(func):
func.__doc__ = func.__doc__ % tuple(args)
return func
return decorator | Return a decorator putting ``args`` into the docstring of the decorated ``func``.
>>> @doctemplate('spam', 'spam')
... def spam():
... '''Returns %s, lovely %s.'''
... return 'Spam'
>>> spam.__doc__
'Returns spam, lovely spam.' | entailment |
def group_dict(items, keyfunc):
"""Return a list defaultdict with ``items`` grouped by ``keyfunc``.
>>> sorted(group_dict('eggs', lambda x: x).items())
[('e', ['e']), ('g', ['g', 'g']), ('s', ['s'])]
"""
result = collections.defaultdict(list)
for i in items:
key = keyfunc(i)
result[key].append(i)
return result | Return a list defaultdict with ``items`` grouped by ``keyfunc``.
>>> sorted(group_dict('eggs', lambda x: x).items())
[('e', ['e']), ('g', ['g', 'g']), ('s', ['s'])] | entailment |
def uniqued(iterable):
"""Return unique list of ``iterable`` items preserving order.
>>> uniqued('spameggs')
['s', 'p', 'a', 'm', 'e', 'g']
"""
seen = set()
return [item for item in iterable if item not in seen and not seen.add(item)] | Return unique list of ``iterable`` items preserving order.
>>> uniqued('spameggs')
['s', 'p', 'a', 'm', 'e', 'g'] | entailment |
def build_service(name=None, **kwargs):
"""Return a service endpoint for interacting with a Google API."""
if name is not None:
for kw, value in iteritems(SERVICES[name]):
kwargs.setdefault(kw, value)
return apiclient.discovery.build(**kwargs) | Return a service endpoint for interacting with a Google API. | entailment |
def iterfiles(service, name=None, mimeType=SHEET, order=FILEORDER):
"""Fetch and yield ``(id, name)`` pairs for Google drive files."""
params = {'orderBy': order, 'pageToken': None}
q = []
if name is not None:
q.append("name='%s'" % name)
if mimeType is not None:
q.append("mimeType='%s'" % mimeType)
if q:
params['q'] = ' and '.join(q)
while True:
response = service.files().list(**params).execute()
for f in response['files']:
yield f['id'], f['name']
try:
params['pageToken'] = response['nextPageToken']
except KeyError:
return | Fetch and yield ``(id, name)`` pairs for Google drive files. | entailment |
def spreadsheet(service, id):
"""Fetch and return spreadsheet meta data with Google sheets API."""
request = service.spreadsheets().get(spreadsheetId=id)
try:
response = request.execute()
except apiclient.errors.HttpError as e:
if e.resp.status == 404:
raise KeyError(id)
else: # pragma: no cover
raise
return response | Fetch and return spreadsheet meta data with Google sheets API. | entailment |
def values(service, id, ranges):
"""Fetch and return spreadsheet cell values with Google sheets API."""
params = {'majorDimension': 'ROWS', 'valueRenderOption': 'UNFORMATTED_VALUE',
'dateTimeRenderOption': 'FORMATTED_STRING'}
params.update(spreadsheetId=id, ranges=ranges)
response = service.spreadsheets().values().batchGet(**params).execute()
return response['valueRanges'] | Fetch and return spreadsheet cell values with Google sheets API. | entailment |
def get_credentials(scopes=None, secrets=None, storage=None, no_webserver=False):
"""Make OAuth 2.0 credentials for scopes from ``secrets`` and ``storage`` files.
Args:
scopes: scope URL(s) or ``'read'``, ``'write'`` (default: ``%r``)
secrets: location of secrets file (default: ``%r``)
storage: location of storage file (default: ``%r``)
no_webserver: url/code prompt instead of webbrowser based auth
see https://developers.google.com/sheets/quickstart/python
see https://developers.google.com/api-client-library/python/guide/aaa_client_secrets
"""
scopes = Scopes.get(scopes)
if secrets is None:
secrets = SECRETS
if storage is None:
storage = STORAGE
secrets, storage = map(os.path.expanduser, (secrets, storage))
store = file.Storage(storage)
creds = store.get()
if creds is None or creds.invalid:
flow = client.flow_from_clientsecrets(secrets, scopes)
args = ['--noauth_local_webserver'] if no_webserver else []
flags = tools.argparser.parse_args(args)
creds = tools.run_flow(flow, store, flags)
return creds | Make OAuth 2.0 credentials for scopes from ``secrets`` and ``storage`` files.
Args:
scopes: scope URL(s) or ``'read'``, ``'write'`` (default: ``%r``)
secrets: location of secrets file (default: ``%r``)
storage: location of storage file (default: ``%r``)
no_webserver: url/code prompt instead of webbrowser based auth
see https://developers.google.com/sheets/quickstart/python
see https://developers.google.com/api-client-library/python/guide/aaa_client_secrets | entailment |
def get(cls, scope=None):
"""Return default or predefined URLs from keyword, pass through ``scope``."""
if scope is None:
scope = cls.default
if isinstance(scope, string_types) and scope in cls._keywords:
return getattr(cls, scope)
return scope | Return default or predefined URLs from keyword, pass through ``scope``. | entailment |
def search_all(self, limit=50, format='json'):
''' Returns a single list containing up to 'limit' Result objects'''
desired_limit = limit
results = self._search(limit, format)
limit = limit - len(results)
while len(results) < desired_limit:
more_results = self._search(limit, format)
if not more_results:
break
results += more_results
limit = limit - len(more_results)
time.sleep(1)
return results | Returns a single list containing up to 'limit' Result objects | entailment |
def _search(self, limit, format):
'''
Returns a list of result objects, with the url for the next page bing search url.
'''
url = self.QUERY_URL.format(requests.utils.quote("'{}'".format(self.query)), min(50, limit), self.current_offset, format)
r = requests.get(url, auth=("", self.api_key))
try:
json_results = r.json()
except ValueError as vE:
if not self.safe:
raise PyBingVideoException("Request returned with code %s, error msg: %s" % (r.status_code, r.text))
else:
print ("[ERROR] Request returned with code %s, error msg: %s. \nContinuing in 5 seconds." % (r.status_code, r.text))
time.sleep(5)
packaged_results = [VideoResult(single_result_json) for single_result_json in json_results['d']['results']]
self.current_offset += min(50, limit, len(packaged_results))
return packaged_results | Returns a list of result objects, with the url for the next page bing search url. | entailment |
def base26int(s, _start=1 - ord('A')):
"""Return string ``s`` as ``int`` in bijective base26 notation.
>>> base26int('SPAM')
344799
"""
return sum((_start + ord(c)) * 26**i for i, c in enumerate(reversed(s))) | Return string ``s`` as ``int`` in bijective base26 notation.
>>> base26int('SPAM')
344799 | entailment |
def base26(x, _alphabet=string.ascii_uppercase):
"""Return positive ``int`` ``x`` as string in bijective base26 notation.
>>> [base26(i) for i in [0, 1, 2, 26, 27, 28, 702, 703, 704]]
['', 'A', 'B', 'Z', 'AA', 'AB', 'ZZ', 'AAA', 'AAB']
>>> base26(344799) # 19 * 26**3 + 16 * 26**2 + 1 * 26**1 + 13 * 26**0
'SPAM'
>>> base26(256)
'IV'
"""
result = []
while x:
x, digit = divmod(x, 26)
if not digit:
x -= 1
digit = 26
result.append(_alphabet[digit - 1])
return ''.join(result[::-1]) | Return positive ``int`` ``x`` as string in bijective base26 notation.
>>> [base26(i) for i in [0, 1, 2, 26, 27, 28, 702, 703, 704]]
['', 'A', 'B', 'Z', 'AA', 'AB', 'ZZ', 'AAA', 'AAB']
>>> base26(344799) # 19 * 26**3 + 16 * 26**2 + 1 * 26**1 + 13 * 26**0
'SPAM'
>>> base26(256)
'IV' | entailment |
def _parse(coord, _match=_regex.match):
"""Return match groups from single sheet coordinate.
>>> Coordinates._parse('A1')
('A', '1', None, None)
>>> Coordinates._parse('A'), Coordinates._parse('1')
((None, None, 'A', None), (None, None, None, '1'))
>>> Coordinates._parse('spam')
Traceback (most recent call last):
...
ValueError: spam
"""
try:
return _match(coord).groups()
except AttributeError:
raise ValueError(coord) | Return match groups from single sheet coordinate.
>>> Coordinates._parse('A1')
('A', '1', None, None)
>>> Coordinates._parse('A'), Coordinates._parse('1')
((None, None, 'A', None), (None, None, None, '1'))
>>> Coordinates._parse('spam')
Traceback (most recent call last):
...
ValueError: spam | entailment |
def _cint(col, _map={base26(i): i - 1 for i in range(1, 257)}):
"""Return zero-based column index from bijective base26 string.
>>> Coordinates._cint('Ab')
27
>>> Coordinates._cint('spam')
Traceback (most recent call last):
...
ValueError: spam
"""
try:
return _map[col.upper()]
except KeyError:
raise ValueError(col) | Return zero-based column index from bijective base26 string.
>>> Coordinates._cint('Ab')
27
>>> Coordinates._cint('spam')
Traceback (most recent call last):
...
ValueError: spam | entailment |
def from_slice(cls, coord):
"""Return a value fetching callable given a slice of coordinate strings."""
if coord.step is not None:
raise NotImplementedError('no slice step support')
elif coord.start is not None and coord.stop is not None:
return DoubleSlice.from_slice(coord)
elif coord.start is not None:
xcol, xrow, col, row = cls._parse(coord.start)
if xcol is not None:
return StartCell(cls._cint(xcol), cls._rint(xrow))
elif col is not None:
return StartCol(cls._cint(col))
return StartRow(cls._rint(row))
elif coord.stop is not None:
xcol, xrow, col, row = cls._parse(coord.stop)
if xcol is not None:
return StopCell(cls._cint(xcol) + 1, cls._rint(xrow) + 1)
elif col is not None:
return StopCol(cls._cint(col) + 1)
return StopRow(cls._rint(row) + 1)
return cls() | Return a value fetching callable given a slice of coordinate strings. | entailment |
def find(self, title):
"""Return the first worksheet with the given title.
Args:
title(str): title/name of the worksheet to return
Returns:
WorkSheet: contained worksheet object
Raises:
KeyError: if the spreadsheet has no no worksheet with the given ``title``
"""
if title not in self._titles:
raise KeyError(title)
return self._titles[title][0] | Return the first worksheet with the given title.
Args:
title(str): title/name of the worksheet to return
Returns:
WorkSheet: contained worksheet object
Raises:
KeyError: if the spreadsheet has no no worksheet with the given ``title`` | entailment |
def findall(self, title=None):
"""Return a list of worksheets with the given title.
Args:
title(str): title/name of the worksheets to return, or ``None`` for all
Returns:
list: list of contained worksheet instances (possibly empty)
"""
if title is None:
return list(self._sheets)
if title not in self._titles:
return []
return list(self._titles[title]) | Return a list of worksheets with the given title.
Args:
title(str): title/name of the worksheets to return, or ``None`` for all
Returns:
list: list of contained worksheet instances (possibly empty) | entailment |
def to_csv(self, encoding=export.ENCODING, dialect=export.DIALECT,
make_filename=export.MAKE_FILENAME):
"""Dump all worksheets of the spreadsheet to individual CSV files.
Args:
encoding (str): result string encoding
dialect (str): :mod:`csv` dialect name or object to use
make_filename: template or one-argument callable returning the filename
If ``make_filename`` is a string, it is string-interpolated with an
infos-dictionary with the fields ``id`` (spreadhseet id), ``title``
(spreadsheet title), ``sheet`` (worksheet title), ``gid`` (worksheet
id), ``index`` (worksheet index), and ``dialect`` CSV dialect to
generate the filename: ``filename = make_filename % infos``.
If ``make_filename`` is a callable, it will be called with the
infos-dictionary to generate the filename:
``filename = make_filename(infos)``.
"""
for s in self._sheets:
s.to_csv(None, encoding, dialect, make_filename) | Dump all worksheets of the spreadsheet to individual CSV files.
Args:
encoding (str): result string encoding
dialect (str): :mod:`csv` dialect name or object to use
make_filename: template or one-argument callable returning the filename
If ``make_filename`` is a string, it is string-interpolated with an
infos-dictionary with the fields ``id`` (spreadhseet id), ``title``
(spreadsheet title), ``sheet`` (worksheet title), ``gid`` (worksheet
id), ``index`` (worksheet index), and ``dialect`` CSV dialect to
generate the filename: ``filename = make_filename % infos``.
If ``make_filename`` is a callable, it will be called with the
infos-dictionary to generate the filename:
``filename = make_filename(infos)``. | entailment |
def titles(self, unique=False):
"""Return a list of contained worksheet titles.
Args:
unique (bool): drop duplicates
Returns:
list: list of titles/name strings
"""
if unique:
return tools.uniqued(s.title for s in self._items)
return [s.title for s in self._items] | Return a list of contained worksheet titles.
Args:
unique (bool): drop duplicates
Returns:
list: list of titles/name strings | entailment |
def at(self, row, col):
"""Return the value at the given cell position.
Args:
row (int): zero-based row number
col (int): zero-based column number
Returns:
cell value
Raises:
TypeError: if ``row`` or ``col`` is not an ``int``
IndexError: if the position is out of range
"""
if not (isinstance(row, int) and isinstance(col, int)):
raise TypeError(row, col)
return self._values[row][col] | Return the value at the given cell position.
Args:
row (int): zero-based row number
col (int): zero-based column number
Returns:
cell value
Raises:
TypeError: if ``row`` or ``col`` is not an ``int``
IndexError: if the position is out of range | entailment |
def values(self, column_major=False):
"""Return a nested list with the worksheet values.
Args:
column_major (bool): as list of columns (default list of rows)
Returns:
list: list of lists with values
"""
if column_major:
return list(map(list, zip(*self._values)))
return [row[:] for row in self._values] | Return a nested list with the worksheet values.
Args:
column_major (bool): as list of columns (default list of rows)
Returns:
list: list of lists with values | entailment |
def to_csv(self, filename=None,
encoding=export.ENCODING, dialect=export.DIALECT,
make_filename=export.MAKE_FILENAME):
"""Dump the worksheet to a CSV file.
Args:
filename (str): result filename (if ``None`` use ``make_filename``)
encoding (str): result string encoding
dialect (str): :mod:`csv` dialect name or object to use
make_filename: template or one-argument callable returning the filename
If ``make_filename`` is a string, it is string-interpolated with an
infos-dictionary with the fields ``id`` (spreadhseet id), ``title``
(spreadsheet title), ``sheet`` (worksheet title), ``gid`` (worksheet
id), ``index`` (worksheet index), and ``dialect`` CSV dialect to
generate the filename: ``filename = make_filename % infos``.
If ``make_filename`` is a callable, it will be called with the
infos-dictionary to generate the filename:
``filename = make_filename(infos)``.
"""
if filename is None:
if make_filename is None:
make_filename = export.MAKE_FILENAME
infos = {
'id': self._spreadsheet._id,
'title': self._spreadsheet._title,
'sheet': self._title,
'gid': self._id,
'index': self._index,
'dialect': dialect,
}
if isinstance(make_filename, string_types):
filename = make_filename % infos
else:
filename = make_filename(infos)
with export.open_csv(filename, 'w', encoding=encoding) as fd:
export.write_csv(fd, self._values, encoding, dialect) | Dump the worksheet to a CSV file.
Args:
filename (str): result filename (if ``None`` use ``make_filename``)
encoding (str): result string encoding
dialect (str): :mod:`csv` dialect name or object to use
make_filename: template or one-argument callable returning the filename
If ``make_filename`` is a string, it is string-interpolated with an
infos-dictionary with the fields ``id`` (spreadhseet id), ``title``
(spreadsheet title), ``sheet`` (worksheet title), ``gid`` (worksheet
id), ``index`` (worksheet index), and ``dialect`` CSV dialect to
generate the filename: ``filename = make_filename % infos``.
If ``make_filename`` is a callable, it will be called with the
infos-dictionary to generate the filename:
``filename = make_filename(infos)``. | entailment |
def to_frame(self, **kwargs):
r"""Return a pandas DataFrame loaded from the worksheet data.
Args:
\**kwargs: passed to ``pandas.read_csv()`` (e.g. ``header``, ``index_col``)
Returns:
pandas.DataFrame: new ``DataFrame`` instance
"""
df = export.write_dataframe(self._values, **kwargs)
df.name = self.title
return df | r"""Return a pandas DataFrame loaded from the worksheet data.
Args:
\**kwargs: passed to ``pandas.read_csv()`` (e.g. ``header``, ``index_col``)
Returns:
pandas.DataFrame: new ``DataFrame`` instance | entailment |
def from_files(cls, secrets=None, storage=None, scopes=None, no_webserver=False):
"""Return a spreadsheet collection making OAauth 2.0 credentials.
Args:
secrets (str): location of secrets file (default: ``%r``)
storage (str): location of storage file (default: ``%r``)
scopes: scope URL(s) or ``'read'`` or ``'write'`` (default: ``%r``)
no_webserver (bool): URL/code prompt instead of webbrowser auth
Returns:
Sheets: new Sheets instance with OAauth 2.0 credentials
"""
creds = oauth2.get_credentials(scopes, secrets, storage, no_webserver)
return cls(creds) | Return a spreadsheet collection making OAauth 2.0 credentials.
Args:
secrets (str): location of secrets file (default: ``%r``)
storage (str): location of storage file (default: ``%r``)
scopes: scope URL(s) or ``'read'`` or ``'write'`` (default: ``%r``)
no_webserver (bool): URL/code prompt instead of webbrowser auth
Returns:
Sheets: new Sheets instance with OAauth 2.0 credentials | entailment |
def get(self, id_or_url, default=None):
"""Fetch and return the spreadsheet with the given id or url.
Args:
id_or_url (str): unique alphanumeric id or URL of the spreadsheet
Returns:
New SpreadSheet instance or given default if none is found
Raises:
ValueError: if an URL is given from which no id could be extracted
"""
if '/' in id_or_url:
id = urls.SheetUrl.from_string(id_or_url).id
else:
id = id_or_url
try:
return self[id]
except KeyError:
return default | Fetch and return the spreadsheet with the given id or url.
Args:
id_or_url (str): unique alphanumeric id or URL of the spreadsheet
Returns:
New SpreadSheet instance or given default if none is found
Raises:
ValueError: if an URL is given from which no id could be extracted | entailment |
def find(self, title):
"""Fetch and return the first spreadsheet with the given title.
Args:
title(str): title/name of the spreadsheet to return
Returns:
SpreadSheet: new SpreadSheet instance
Raises:
KeyError: if no spreadsheet with the given ``title`` is found
"""
files = backend.iterfiles(self._drive, name=title)
try:
return next(self[id] for id, _ in files)
except StopIteration:
raise KeyError(title) | Fetch and return the first spreadsheet with the given title.
Args:
title(str): title/name of the spreadsheet to return
Returns:
SpreadSheet: new SpreadSheet instance
Raises:
KeyError: if no spreadsheet with the given ``title`` is found | entailment |
def findall(self, title=None):
"""Fetch and return a list of spreadsheets with the given title.
Args:
title(str): title/name of the spreadsheets to return, or ``None`` for all
Returns:
list: list of new SpreadSheet instances (possibly empty)
"""
if title is None:
return list(self)
files = backend.iterfiles(self._drive, name=title)
return [self[id] for id, _ in files] | Fetch and return a list of spreadsheets with the given title.
Args:
title(str): title/name of the spreadsheets to return, or ``None`` for all
Returns:
list: list of new SpreadSheet instances (possibly empty) | entailment |
def titles(self, unique=False):
"""Return a list of all available spreadsheet titles.
Args:
unique (bool): drop duplicates
Returns:
list: list of title/name strings
"""
if unique:
return tools.uniqued(title for _, title in self.iterfiles())
return [title for _, title in self.iterfiles()] | Return a list of all available spreadsheet titles.
Args:
unique (bool): drop duplicates
Returns:
list: list of title/name strings | entailment |
def create(self, name, description, data_source_type,
url, credential_user=None, credential_pass=None,
is_public=None, is_protected=None, s3_credentials=None):
"""Create a Data Source."""
data = {
'name': name,
'description': description,
'type': data_source_type,
'url': url,
}
credentials = {}
self._copy_if_defined(credentials,
user=credential_user,
password=credential_pass)
credentials = credentials or s3_credentials
self._copy_if_defined(data, is_public=is_public,
is_protected=is_protected,
credentials=credentials)
return self._create('/data-sources', data, 'data_source') | Create a Data Source. | entailment |
def update(self, data_source_id, update_data):
"""Update a Data Source.
:param dict update_data: dict that contains fields that should be
updated with new values.
Fields that can be updated:
* name
* description
* type
* url
* is_public
* is_protected
* credentials - dict with the keys `user` and `password` for data
source in Swift, or with the keys `accesskey`, `secretkey`,
`endpoint`, `ssl`, and `bucket_in_path` for data source in S3
"""
if self.version >= 2:
UPDATE_FUNC = self._patch
else:
UPDATE_FUNC = self._update
return UPDATE_FUNC('/data-sources/%s' % data_source_id,
update_data) | Update a Data Source.
:param dict update_data: dict that contains fields that should be
updated with new values.
Fields that can be updated:
* name
* description
* type
* url
* is_public
* is_protected
* credentials - dict with the keys `user` and `password` for data
source in Swift, or with the keys `accesskey`, `secretkey`,
`endpoint`, `ssl`, and `bucket_in_path` for data source in S3 | entailment |
def getitem_by_path(d, path):
"""Access item in d using path.
a = { 0: { 1: 'item' } }
getitem_by_path(a, [0, 1]) == 'item'
"""
return reduce(
lambda d, k: d[k],
path,
d
) | Access item in d using path.
a = { 0: { 1: 'item' } }
getitem_by_path(a, [0, 1]) == 'item' | entailment |
def clean_empty(self, d=DEFAULT):
"""Returns a copy of d without empty leaves.
https://stackoverflow.com/questions/27973988/python-how-to-remove-all-empty-fields-in-a-nested-dict/35263074
"""
if d is DEFAULT:
d = self
if isinstance(d, list):
return [v for v in (self.clean_empty(v) for v in d) if v or v == 0]
elif isinstance(d, type(self)):
return type(self)({k: v for k, v in ((k, self.clean_empty(v)) for k, v in d.items()) if v or v == 0})
elif isinstance(d, dict):
return {k: v for k, v in ((k, self.clean_empty(v)) for k, v in d.items()) if v or v == 0}
return d | Returns a copy of d without empty leaves.
https://stackoverflow.com/questions/27973988/python-how-to-remove-all-empty-fields-in-a-nested-dict/35263074 | entailment |
def compress(self, d=DEFAULT):
"""Returns a copy of d with compressed leaves."""
if d is DEFAULT:
d = self
if isinstance(d, list):
l = [v for v in (self.compress(v) for v in d)]
try:
return list(set(l))
except TypeError:
# list contains not hashables
ret = []
for i in l:
if i not in ret:
ret.append(i)
return ret
elif isinstance(d, type(self)):
return type(self)({k: v for k, v in ((k, self.compress(v)) for k, v in d.items())})
elif isinstance(d, dict):
return {k: v for k, v in ((k, self.compress(v)) for k, v in d.items())}
return d | Returns a copy of d with compressed leaves. | entailment |
def cast_dicts(self, to=DEFAULT, d=DEFAULT):
"""Returns a copy of d with all dicts casted to the type 'to'."""
if to is DEFAULT:
to = type(self)
if d is DEFAULT:
d = self
if isinstance(d, list):
return [v for v in (self.cast_dicts(to, v) for v in d)]
elif isinstance(d, dict):
return to({k: v for k, v in ((k, self.cast_dicts(to, v)) for k, v in d.items())})
return d | Returns a copy of d with all dicts casted to the type 'to'. | entailment |
def merge(self, b, a=DEFAULT):
"""Merges b into a recursively, if a is not given: merges into self.
also merges lists and:
* merge({a:a},{a:b}) = {a:[a,b]}
* merge({a:[a]},{a:b}) = {a:[a,b]}
* merge({a:a},{a:[b]}) = {a:[a,b]}
* merge({a:[a]},{a:[b]}) = {a:[a,b]}
"""
if a is DEFAULT:
a = self
for key in b:
if key in a:
if isinstance(a[key], dict) and isinstance(b[key], dict):
self.merge(b[key], a[key])
else:
if type(a[key]) is list and type(b[key]) is list:
a[key] += b[key]
elif type(a[key]) is list and type(b[key]) is not list:
a[key] += [b[key]]
elif type(a[key]) is not list and type(b[key]) is list:
a[key] = [a[key]] + b[key]
elif type(a[key]) is not list and type(b[key]) is not list:
a[key] = [a[key]] + [b[key]]
else:
a[key] = b[key]
return a | Merges b into a recursively, if a is not given: merges into self.
also merges lists and:
* merge({a:a},{a:b}) = {a:[a,b]}
* merge({a:[a]},{a:b}) = {a:[a,b]}
* merge({a:a},{a:[b]}) = {a:[a,b]}
* merge({a:[a]},{a:[b]}) = {a:[a,b]} | entailment |
def create(self, name, url, description=None, extra=None, is_public=None,
is_protected=None):
"""Create a Job Binary.
:param dict extra: authentication info needed for some job binaries,
containing the keys `user` and `password` for job binary in Swift
or the keys `accesskey`, `secretkey`, and `endpoint` for job
binary in S3
"""
data = {
"name": name,
"url": url
}
self._copy_if_defined(data, description=description, extra=extra,
is_public=is_public, is_protected=is_protected)
return self._create('/job-binaries', data, 'job_binary') | Create a Job Binary.
:param dict extra: authentication info needed for some job binaries,
containing the keys `user` and `password` for job binary in Swift
or the keys `accesskey`, `secretkey`, and `endpoint` for job
binary in S3 | entailment |
def get_file(self, job_binary_id):
"""Download a Job Binary."""
resp = self.api.get('/job-binaries/%s/data' % job_binary_id)
if resp.status_code != 200:
self._raise_api_exception(resp)
return resp.content | Download a Job Binary. | entailment |
def update(self, job_binary_id, data):
"""Update Job Binary.
:param dict data: dict that contains fields that should be updated
with new values.
Fields that can be updated:
* name
* description
* url
* is_public
* is_protected
* extra - dict with the keys `user` and `password` for job binary
in Swift, or with the keys `accesskey`, `secretkey`, and `endpoint`
for job binary in S3
"""
if self.version >= 2:
UPDATE_FUNC = self._patch
else:
UPDATE_FUNC = self._update
return UPDATE_FUNC(
'/job-binaries/%s' % job_binary_id, data, 'job_binary') | Update Job Binary.
:param dict data: dict that contains fields that should be updated
with new values.
Fields that can be updated:
* name
* description
* url
* is_public
* is_protected
* extra - dict with the keys `user` and `password` for job binary
in Swift, or with the keys `accesskey`, `secretkey`, and `endpoint`
for job binary in S3 | entailment |
def set(conf):
"""Applies a configuration to the global config object"""
for name, value in conf.items():
if value is not None:
setattr(Conf, name.upper(), value) | Applies a configuration to the global config object | entailment |
def get():
"""Gets the configuration as a dict"""
return {
attr: getattr(Conf, attr)
for attr in dir(Conf()) if not callable(getattr(Conf, attr)) and not attr.startswith("__")
} | Gets the configuration as a dict | entailment |
def load(description, add_arguments_cb = lambda x: None, postprocess_conf_cb = lambda x: None):
"""Loads the global Conf object from command line arguments.
Encode the next argument after +plugin to ensure that
it does not start with a prefix_char
"""
argparser = ArgumentParser(
description = description,
prefix_chars = '-+'
)
argparser.add_argument(
'--version',
dest = 'PRINT_VERSION',
action = 'store_true',
help = 'Print version and exit'
)
add_arguments_cb(argparser)
# set up plugin argument argparser
plugin_argparser = argparser.add_argument_group('Plugins')
plugins = {}
def load_plugin_group(group):
"""Load all plugins from the given plugin_group."""
for entry_point in iter_entry_points(group = group):
name = str(entry_point).split(' =',1)[0]
plugin = entry_point.load()
if isclass(plugin) \
and not plugin in Conf.SUPPORTED_PLUGIN_INTERFACES \
and any([
issubclass(plugin, supported_plugin_interface)
for supported_plugin_interface in Conf.SUPPORTED_PLUGIN_INTERFACES
]):
plugin_argparser.add_argument(
'+{}'.format(name),
dest = 'PLUGIN_{}'.format(name),
type = str,
nargs = '?',
default = DEFAULT,
metavar = 'args'.format(name),
help = make_argparse_help_safe(
call_plugin(
plugin,
'help'
)
)
)
# register plugin
plugins[name] = plugin
else:
warning('Plugin not supported: {}'.format(name))
load_plugin_group(Conf.PLUGIN_GROUP_BASE)
if Conf.LOAD_PLUGINS:
load_plugin_group(Conf.PLUGIN_GROUP)
conf = vars(
argparser.parse_args([
v if i == 0 or v[0] == '+' or Conf.ARGS[i-1][0] != '+'
else b32encode(v.encode()).decode()
for i, v in enumerate(Conf.ARGS)
])
)
postprocess_conf_cb(conf)
# apply configuration
Conf.set(conf)
if Conf.PRINT_VERSION:
print(
'pdml2flow version {}'.format(
Conf.VERSION
),
file = Conf.OUT
)
sys.exit(0)
# initialize plugins
Conf.PLUGINS = []
for conf_name, args in conf.items():
if conf_name.startswith('PLUGIN_') and args != DEFAULT:
plugin_name = conf_name[7:]
Conf.PLUGINS.append(
# instantiate plugin
plugins[plugin_name](
*split(
b32decode(args.encode()).decode() if args is not None else ''
)
)
) | Loads the global Conf object from command line arguments.
Encode the next argument after +plugin to ensure that
it does not start with a prefix_char | entailment |
def _format_usage_without_prefix(parser):
"""
Use private argparse APIs to get the usage string without
the 'usage: ' prefix.
"""
fmt = parser._get_formatter()
fmt.add_usage(parser.usage, parser._actions,
parser._mutually_exclusive_groups, prefix='')
return fmt.format_help().strip() | Use private argparse APIs to get the usage string without
the 'usage: ' prefix. | entailment |
def update(self, ng_template_id, name=NotUpdated, plugin_name=NotUpdated,
hadoop_version=NotUpdated, flavor_id=NotUpdated,
description=NotUpdated, volumes_per_node=NotUpdated,
volumes_size=NotUpdated, node_processes=NotUpdated,
node_configs=NotUpdated, floating_ip_pool=NotUpdated,
security_groups=NotUpdated, auto_security_group=NotUpdated,
availability_zone=NotUpdated,
volumes_availability_zone=NotUpdated, volume_type=NotUpdated,
image_id=NotUpdated, is_proxy_gateway=NotUpdated,
volume_local_to_instance=NotUpdated, use_autoconfig=NotUpdated,
shares=NotUpdated, is_public=NotUpdated,
is_protected=NotUpdated, volume_mount_prefix=NotUpdated):
"""Update a Node Group Template."""
data = {}
self._copy_if_updated(
data, name=name, plugin_name=plugin_name,
hadoop_version=hadoop_version, flavor_id=flavor_id,
description=description, volumes_per_node=volumes_per_node,
volumes_size=volumes_size, node_processes=node_processes,
node_configs=node_configs, floating_ip_pool=floating_ip_pool,
security_groups=security_groups,
auto_security_group=auto_security_group,
availability_zone=availability_zone,
volumes_availability_zone=volumes_availability_zone,
volume_type=volume_type, image_id=image_id,
is_proxy_gateway=is_proxy_gateway,
volume_local_to_instance=volume_local_to_instance,
use_autoconfig=use_autoconfig, shares=shares,
is_public=is_public, is_protected=is_protected,
volume_mount_prefix=volume_mount_prefix
)
return self._update('/node-group-templates/%s' % ng_template_id, data,
'node_group_template') | Update a Node Group Template. | entailment |
def create(self, name, plugin_name, plugin_version, flavor_id,
description=None, volumes_per_node=None, volumes_size=None,
node_processes=None, node_configs=None, floating_ip_pool=None,
security_groups=None, auto_security_group=None,
availability_zone=None, volumes_availability_zone=None,
volume_type=None, image_id=None, is_proxy_gateway=None,
volume_local_to_instance=None, use_autoconfig=None,
shares=None, is_public=None, is_protected=None,
volume_mount_prefix=None, boot_from_volume=None,
boot_volume_type=None, boot_volume_availability_zone=None,
boot_volume_local_to_instance=None):
"""Create a Node Group Template."""
data = {
'name': name,
'plugin_name': plugin_name,
'plugin_version': plugin_version,
'flavor_id': flavor_id,
'node_processes': node_processes
}
return self._do_create(data, description, volumes_per_node,
volumes_size, node_configs, floating_ip_pool,
security_groups, auto_security_group,
availability_zone, volumes_availability_zone,
volume_type, image_id, is_proxy_gateway,
volume_local_to_instance, use_autoconfig,
shares, is_public, is_protected,
volume_mount_prefix, boot_from_volume,
boot_volume_type,
boot_volume_availability_zone,
boot_volume_local_to_instance) | Create a Node Group Template. | entailment |
def update(self, ng_template_id, name=NotUpdated, plugin_name=NotUpdated,
plugin_version=NotUpdated, flavor_id=NotUpdated,
description=NotUpdated, volumes_per_node=NotUpdated,
volumes_size=NotUpdated, node_processes=NotUpdated,
node_configs=NotUpdated, floating_ip_pool=NotUpdated,
security_groups=NotUpdated, auto_security_group=NotUpdated,
availability_zone=NotUpdated,
volumes_availability_zone=NotUpdated, volume_type=NotUpdated,
image_id=NotUpdated, is_proxy_gateway=NotUpdated,
volume_local_to_instance=NotUpdated, use_autoconfig=NotUpdated,
shares=NotUpdated, is_public=NotUpdated,
is_protected=NotUpdated, volume_mount_prefix=NotUpdated,
boot_from_volume=NotUpdated,
boot_volume_type=NotUpdated,
boot_volume_availability_zone=NotUpdated,
boot_volume_local_to_instance=NotUpdated):
"""Update a Node Group Template."""
data = {}
self._copy_if_updated(
data, name=name, plugin_name=plugin_name,
plugin_version=plugin_version, flavor_id=flavor_id,
description=description, volumes_per_node=volumes_per_node,
volumes_size=volumes_size, node_processes=node_processes,
node_configs=node_configs, floating_ip_pool=floating_ip_pool,
security_groups=security_groups,
auto_security_group=auto_security_group,
availability_zone=availability_zone,
volumes_availability_zone=volumes_availability_zone,
volume_type=volume_type, image_id=image_id,
is_proxy_gateway=is_proxy_gateway,
volume_local_to_instance=volume_local_to_instance,
use_autoconfig=use_autoconfig, shares=shares,
is_public=is_public, is_protected=is_protected,
volume_mount_prefix=volume_mount_prefix,
boot_from_volume=boot_from_volume,
boot_volume_type=boot_volume_type,
boot_volume_availability_zone=boot_volume_availability_zone,
boot_volume_local_to_instance=boot_volume_local_to_instance
)
return self._patch('/node-group-templates/%s' % ng_template_id, data,
'node_group_template') | Update a Node Group Template. | entailment |
def update_image(self, image_id, user_name, desc=None):
"""Create or update an Image in Image Registry."""
desc = desc if desc else ''
data = {"username": user_name,
"description": desc}
return self._post('/images/%s' % image_id, data) | Create or update an Image in Image Registry. | entailment |
def update_tags(self, image_id, new_tags):
"""Update an Image tags.
:param new_tags: list of tags that will replace currently
assigned tags
"""
# Do not add :param list in the docstring above until this is solved:
# https://github.com/sphinx-doc/sphinx/issues/2549
old_image = self.get(image_id)
old_tags = frozenset(old_image.tags)
new_tags = frozenset(new_tags)
to_add = list(new_tags - old_tags)
to_remove = list(old_tags - new_tags)
add_response, remove_response = None, None
if to_add:
add_response = self._post('/images/%s/tag' % image_id,
{'tags': to_add}, 'image')
if to_remove:
remove_response = self._post('/images/%s/untag' % image_id,
{'tags': to_remove}, 'image')
return remove_response or add_response or self.get(image_id) | Update an Image tags.
:param new_tags: list of tags that will replace currently
assigned tags | entailment |
def build_option_parser(parser):
"""Hook to add global options."""
parser.add_argument(
"--os-data-processing-api-version",
metavar="<data-processing-api-version>",
default=utils.env(
'OS_DATA_PROCESSING_API_VERSION',
default=DEFAULT_DATA_PROCESSING_API_VERSION),
help=("Data processing API version, default=" +
DEFAULT_DATA_PROCESSING_API_VERSION +
' (Env: OS_DATA_PROCESSING_API_VERSION)'))
parser.add_argument(
"--os-data-processing-url",
default=utils.env(
"OS_DATA_PROCESSING_URL"),
help=("Data processing API URL, "
"(Env: OS_DATA_PROCESSING_API_URL)"))
return parser | Hook to add global options. | entailment |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.