sentence1 stringlengths 52 3.87M | sentence2 stringlengths 1 47.2k | label stringclasses 1 value |
|---|---|---|
def _get_ilo_firmware_version(self, data):
"""Gets the ilo firmware version for server capabilities
Parse the get_host_health_data() to retreive the firmware
details.
:param data: the output returned by get_host_health_data()
:returns: a dictionary of iLO firmware version.
"""
firmware_details = self._get_firmware_embedded_health(data)
if firmware_details:
try:
return {'ilo_firmware_version': firmware_details['iLO']}
except KeyError:
return None | Gets the ilo firmware version for server capabilities
Parse the get_host_health_data() to retreive the firmware
details.
:param data: the output returned by get_host_health_data()
:returns: a dictionary of iLO firmware version. | entailment |
def get_ilo_firmware_version_as_major_minor(self):
"""Gets the ilo firmware version for server capabilities
Parse the get_host_health_data() to retreive the firmware
details.
:param data: the output returned by get_host_health_data()
:returns: String with the format "<major>.<minor>" or None.
"""
data = self.get_host_health_data()
firmware_details = self._get_firmware_embedded_health(data)
if firmware_details:
ilo_version_str = firmware_details.get('iLO', None)
return common.get_major_minor(ilo_version_str) | Gets the ilo firmware version for server capabilities
Parse the get_host_health_data() to retreive the firmware
details.
:param data: the output returned by get_host_health_data()
:returns: String with the format "<major>.<minor>" or None. | entailment |
def _get_number_of_gpu_devices_connected(self, data):
"""Gets the number of GPU devices connected to the server
Parse the get_host_health_data() and get the count of
number of GPU devices connected to the server.
:param data: the output returned by get_host_health_data()
:returns: a dictionary of rom firmware version.
"""
temp = self.get_value_as_list((data['GET_EMBEDDED_HEALTH_DATA']
['TEMPERATURE']), 'TEMP')
count = 0
if temp is None:
return {'pci_gpu_devices': count}
for key in temp:
for name, value in key.items():
if name == 'LABEL' and 'GPU' in value['VALUE']:
count = count + 1
return {'pci_gpu_devices': count} | Gets the number of GPU devices connected to the server
Parse the get_host_health_data() and get the count of
number of GPU devices connected to the server.
:param data: the output returned by get_host_health_data()
:returns: a dictionary of rom firmware version. | entailment |
def activate_license(self, key):
"""Activates iLO license.
:param key: iLO license key.
:raises: IloError, on an error from iLO.
"""
root = self._create_dynamic_xml('LICENSE', 'RIB_INFO', 'write')
element = root.find('LOGIN/RIB_INFO/LICENSE')
etree.SubElement(element, 'ACTIVATE', KEY=key)
d = self._request_ilo(root)
self._parse_output(d) | Activates iLO license.
:param key: iLO license key.
:raises: IloError, on an error from iLO. | entailment |
def update_firmware(self, filename, component_type):
"""Updates the given firmware on the server for the given component.
:param filename: location of the raw firmware file. Extraction of the
firmware file (if in compact format) is expected to
happen prior to this invocation.
:param component_type: Type of component to be applied to.
:raises: InvalidInputError, if the validation of the input fails
:raises: IloError, on an error from iLO
:raises: IloConnectionError, if not able to reach iLO.
:raises: IloCommandNotSupportedError, if the command is
not supported on the server
"""
fw_img_processor = firmware_controller.FirmwareImageUploader(filename)
LOG.debug(self._('Uploading firmware file: %s ...'), filename)
cookie = fw_img_processor.upload_file_to((self.host, self.port),
self.timeout)
LOG.debug(self._('Uploading firmware file: %s ... done'), filename)
root = self._get_firmware_update_xml_for_file_and_component(
filename, component_type)
element = root.find('LOGIN/RIB_INFO')
etree.SubElement(element, 'TPM_ENABLED', VALUE='Yes')
extra_headers = {'Cookie': cookie}
LOG.debug(self._('Flashing firmware file: %s ...'), filename)
d = self._request_ilo(root, extra_headers=extra_headers)
# wait till the firmware update completes.
common.wait_for_ribcl_firmware_update_to_complete(self)
self._parse_output(d)
LOG.info(self._('Flashing firmware file: %s ... done'), filename) | Updates the given firmware on the server for the given component.
:param filename: location of the raw firmware file. Extraction of the
firmware file (if in compact format) is expected to
happen prior to this invocation.
:param component_type: Type of component to be applied to.
:raises: InvalidInputError, if the validation of the input fails
:raises: IloError, on an error from iLO
:raises: IloConnectionError, if not able to reach iLO.
:raises: IloCommandNotSupportedError, if the command is
not supported on the server | entailment |
def _get_firmware_update_xml_for_file_and_component(
self, filename, component):
"""Creates the dynamic xml for flashing the device firmware via iLO.
This method creates the dynamic xml for flashing the firmware, based
on the component type so passed.
:param filename: location of the raw firmware file.
:param component_type: Type of component to be applied to.
:returns: the etree.Element for the root of the RIBCL XML
for flashing the device (component) firmware.
"""
if component == 'ilo':
cmd_name = 'UPDATE_RIB_FIRMWARE'
else:
# Note(deray): Not explicitly checking for all other supported
# devices (components), as those checks have already happened
# in the invoking methods and may seem redundant here.
cmd_name = 'UPDATE_FIRMWARE'
fwlen = os.path.getsize(filename)
root = self._create_dynamic_xml(cmd_name,
'RIB_INFO',
'write',
subelements={
'IMAGE_LOCATION': filename,
'IMAGE_LENGTH': str(fwlen)
})
return root | Creates the dynamic xml for flashing the device firmware via iLO.
This method creates the dynamic xml for flashing the firmware, based
on the component type so passed.
:param filename: location of the raw firmware file.
:param component_type: Type of component to be applied to.
:returns: the etree.Element for the root of the RIBCL XML
for flashing the device (component) firmware. | entailment |
def _update_nic_data_from_nic_info_based_on_model(self, nic_dict, item,
port, mac):
"""This method updates with port number and corresponding mac
:param nic_dict: dictionary contains port number and corresponding mac
:param item: dictionary containing nic details
:param port: Port number
:param mac: mac-address
"""
if 'G7' in self.model:
nic_dict[port] = mac
else:
location = item['LOCATION']['VALUE']
if location == 'Embedded':
nic_dict[port] = mac | This method updates with port number and corresponding mac
:param nic_dict: dictionary contains port number and corresponding mac
:param item: dictionary containing nic details
:param port: Port number
:param mac: mac-address | entailment |
def _get_response_body_from_gzipped_content(self, url, response):
"""Get the response body from gzipped content
Try to decode as gzip (we should check the headers for
Content-Encoding=gzip)
if response.headers['content-encoding'] == "gzip":
...
:param url: the url for which response was sent
:type url: str
:param response: response content object, probably gzipped
:type response: object
:returns: returns response body
:raises IloError: if the content is **not** gzipped
"""
try:
gzipper = gzip.GzipFile(fileobj=six.BytesIO(response.text))
LOG.debug(self._("Received compressed response for "
"url %(url)s."), {'url': url})
uncompressed_string = (gzipper.read().decode('UTF-8'))
response_body = json.loads(uncompressed_string)
except Exception as e:
LOG.debug(
self._("Error occurred while decompressing body. "
"Got invalid response '%(response)s' for "
"url %(url)s: %(error)s"),
{'url': url, 'response': response.text, 'error': e})
raise exception.IloError(e)
return response_body | Get the response body from gzipped content
Try to decode as gzip (we should check the headers for
Content-Encoding=gzip)
if response.headers['content-encoding'] == "gzip":
...
:param url: the url for which response was sent
:type url: str
:param response: response content object, probably gzipped
:type response: object
:returns: returns response body
:raises IloError: if the content is **not** gzipped | entailment |
def _rest_op(self, operation, suburi, request_headers, request_body):
"""Generic REST Operation handler."""
url = urlparse.urlparse('https://' + self.host + suburi)
# Used for logging on redirection error.
start_url = url.geturl()
LOG.debug(self._("%(operation)s %(url)s"),
{'operation': operation, 'url': start_url})
if request_headers is None or not isinstance(request_headers, dict):
request_headers = {}
# Use self.login/self.password and Basic Auth
if self.login is not None and self.password is not None:
auth_data = self.login + ":" + self.password
hr = "BASIC " + base64.b64encode(
auth_data.encode('ascii')).decode("utf-8")
request_headers['Authorization'] = hr
if request_body is not None:
if (isinstance(request_body, dict)
or isinstance(request_body, list)):
request_headers['Content-Type'] = 'application/json'
else:
request_headers['Content-Type'] = ('application/'
'x-www-form-urlencoded')
"""Helper methods to retry and keep retrying on redirection - START"""
def retry_if_response_asks_for_redirection(response):
# NOTE:Do not assume every HTTP operation will return a JSON
# request_body. For example, ExtendedError structures are only
# required for HTTP 400 errors and are optional elsewhere as they
# are mostly redundant for many of the other HTTP status code.
# In particular, 200 OK responses should not have to return any
# request_body.
# NOTE: this makes sure the headers names are all lower cases
# because HTTP says they are case insensitive
# Follow HTTP redirect
if response.status_code == 301 and 'location' in response.headers:
retry_if_response_asks_for_redirection.url = (
urlparse.urlparse(response.headers['location']))
LOG.debug(self._("Request redirected to %s."),
retry_if_response_asks_for_redirection.url.geturl())
return True
return False
@retrying.retry(
# Note(deray): Return True if we should retry, False otherwise.
# In our case, when the url response we receive asks for
# redirection then we retry.
retry_on_result=retry_if_response_asks_for_redirection,
# Note(deray): Return True if we should retry, False otherwise.
# In our case, when it's an IloConnectionError we don't retry.
# ``requests`` already takes care of issuing max number of
# retries if the URL service is unavailable.
retry_on_exception=(
lambda e: not isinstance(e, exception.IloConnectionError)),
stop_max_attempt_number=REDIRECTION_ATTEMPTS)
def _fetch_response():
url = retry_if_response_asks_for_redirection.url
kwargs = {'headers': request_headers,
'data': json.dumps(request_body)}
if self.cacert is not None:
kwargs['verify'] = self.cacert
else:
kwargs['verify'] = False
LOG.debug(self._('\n\tHTTP REQUEST: %(restreq_method)s'
'\n\tPATH: %(restreq_path)s'
'\n\tBODY: %(restreq_body)s'
'\n'),
{'restreq_method': operation,
'restreq_path': url.geturl(),
'restreq_body': request_body})
request_method = getattr(requests, operation.lower())
try:
response = request_method(url.geturl(), **kwargs)
except Exception as e:
LOG.debug(self._("Unable to connect to iLO. %s"), e)
raise exception.IloConnectionError(e)
return response
"""Helper methods to retry and keep retrying on redirection - END"""
try:
# Note(deray): This is a trick to use the function attributes
# to overwrite variable/s (in our case ``url``) and use the
# modified one in nested functions, i.e. :func:`_fetch_response`
# and :func:`retry_if_response_asks_for_redirection`
retry_if_response_asks_for_redirection.url = url
response = _fetch_response()
except retrying.RetryError as e:
# Redirected for REDIRECTION_ATTEMPTS - th time. Throw error
msg = (self._("URL Redirected %(times)s times continuously. "
"URL used: %(start_url)s More info: %(error)s") %
{'start_url': start_url, 'times': REDIRECTION_ATTEMPTS,
'error': str(e)})
LOG.debug(msg)
raise exception.IloConnectionError(msg)
response_body = {}
if response.text:
try:
response_body = json.loads(response.text)
except (TypeError, ValueError):
# Note(deray): If it doesn't decode as json, then
# resources may return gzipped content.
# ``json.loads`` on python3 raises TypeError when
# ``response.text`` is gzipped one.
response_body = (
self._get_response_body_from_gzipped_content(url,
response))
LOG.debug(self._('\n\tHTTP RESPONSE for %(restreq_path)s:'
'\n\tCode: %(status_code)s'
'\n\tResponse Body: %(response_body)s'
'\n'),
{'restreq_path': url.geturl(),
'status_code': response.status_code,
'response_body': response_body})
return response.status_code, response.headers, response_body | Generic REST Operation handler. | entailment |
def _rest_patch(self, suburi, request_headers, request_body):
"""REST PATCH operation.
HTTP response codes could be 500, 404, 202 etc.
"""
return self._rest_op('PATCH', suburi, request_headers, request_body) | REST PATCH operation.
HTTP response codes could be 500, 404, 202 etc. | entailment |
def _rest_put(self, suburi, request_headers, request_body):
"""REST PUT operation.
HTTP response codes could be 500, 404, 202 etc.
"""
return self._rest_op('PUT', suburi, request_headers, request_body) | REST PUT operation.
HTTP response codes could be 500, 404, 202 etc. | entailment |
def _rest_post(self, suburi, request_headers, request_body):
"""REST POST operation.
The response body after the operation could be the new resource, or
ExtendedError, or it could be empty.
"""
return self._rest_op('POST', suburi, request_headers, request_body) | REST POST operation.
The response body after the operation could be the new resource, or
ExtendedError, or it could be empty. | entailment |
def findCycle(self, cycNum):
'''
Method that looks through the self.cycles and returns the
nearest cycle:
Parameters
----------
cycNum : int
int of the cycle desired cycle.
'''
cycNum=int(cycNum)
i=0
while i < len(self.cycles):
if cycNum < int(self.cycles[i]):
break
i+=1
if i ==0:
return self.cycles[i]
elif i == len(self.cycles):
return self.cycles[i-1]
lower=int(self.cycles[i-1])
higher=int(self.cycles[i])
if higher- cycNum >= cycNum-lower:
return self.cycles[i-1]
else:
return self.cycles[i] | Method that looks through the self.cycles and returns the
nearest cycle:
Parameters
----------
cycNum : int
int of the cycle desired cycle. | entailment |
def get(self, cycle_list, dataitem=None, isotope=None, sparse=1):
'''
Get Data from HDF5 files.
There are three ways to call this function
1. get(dataitem)
Fetches the datatiem for all cycles. If dataitem is a header
attribute or list of attributes then the data is retured.
If detaitem an individulal or list of column attributes,
data columns or isotopes/elements the data is returned for
all cycles.
2. get(cycle_list, dataitem)
Fetches the dataitem or list of dataitems for the cycle
or list of cycles. The variable dataitems can contain column
attributes, data columns, and isotopes/elemnts.
3. get(cycle_list, dataitem, isotope)
Fetches the dataitems like the seccond method except that
one of the dataitems must be either "iso_massf" or "yps",
and in the data returned "iso_massf" and "yps" are replaced
with the data from the isotopes. The isotopes must be in
the form given by se.isotopes or se.elements.
Parameters
----------
cycle_list : list, integer or string
If cycle_list is a list or string and all of the entries
are header attributes then the attributes are returned.
If cycle_list is a list or string of dataitems then the
dataitems are fetched for all cycles.
If cycle_list is a list, integer or string of cycle numbers
then data is returned for those cycles.
dataitem: list or string, optional
If dataitem is not None then the data for each item is
returned for the cycle or list of cycles. dataitem may be an
individual or a mixed list of column attributes, column
data or isotopes/elements. If dataitem is None then
cycle_list must be a string. The default is None.
isotope: list or string, optional
If one of the dataitems is "iso_massf" or "yps" then it is
replaced with the data from the individual isotopes/elements
listed in isotope. The default is None.
sparse : int
Implements a sparsity factor on the fetched data i.e. only
the i th cycle in cycle_list data is returned,
where i = sparse.
'''
# Check out the inputs
t1=time.time()
isotopes_of_interest = []
nested_list = False
# if one of cycle_list, dataitem or isotope is given as a string convert it to a list
if isinstance(cycle_list, basestring):
cycle_list = [cycle_list]
else:
try:
if len(cycle_list) == 1:
nested_list = True
except TypeError:
pass #leave nested_list as false
if isinstance(dataitem, basestring):
dataitem = [dataitem]
if isinstance(isotope, basestring):
isotope = [isotope]
if dataitem==None and isotope==None:
option_ind = 1
dataitem = cycle_list
if not any([item in self.hattrs for item in dataitem]):
cycle_list = self.cycles
else:
first_file = mrT.File(self.h5s[0].filename,'r')
dat = []
# get all dataitems from header attributes
for item in dataitem:
tmp = first_file.attrs.get(item, None)
try:
if len(tmp) == 1:
tmp = tmp[0]
except TypeError: #if a scaler is returned do nothing
pass
dat.append(tmp)
# if only one header attribute is required dont return as a list
if (len(dat) == 1) and (not nested_list):
dat = dat[0]
first_file.close()
return dat
if any([item.split('-')[0] in self.isos for item in dataitem]):
return self.get(cycle_list,dataitem,sparse=sparse)
elif isotope==None:
option_ind = 2
cycle_list = cycle_list
dataitem = dataitem
# if one dataitem is given as a string convert it to a list
if isinstance(dataitem, basestring):
dataitem = [dataitem]
new_dataitem = []
new_isotopes = []
for item in dataitem:
if item.split('-')[0] in self.isos:
new_isotopes.append(item)
else:
new_dataitem.append(item)
if len(new_isotopes) != 0:
tmp = []
try:
tmp = self.get(cycle_list,new_dataitem + ['iso_massf'],new_isotopes,sparse=sparse)
except: # in some old se files there maybe still yps as the name for the abundance arrays
tmp = self.get(cycle_list,new_dataitem + ['yps'],new_isotopes,sparse=sparse)
# modify the dat list so dat is structured like dataitems
dat = []
#make sure tmp containes the data as a list of cycles
if isinstance(cycle_list, basestring):
tmp = [tmp]
else:
try:
if len(cycle_list) == 1:
tmp = [tmp]
except TypeError:
tmp = [tmp]
for cyc in tmp:
temp_dataitem = []
for item in dataitem:
if item in new_dataitem:
temp_dataitem.append(cyc[new_dataitem.index(item)])
else:
if len(new_dataitem) == 0:
temp_dataitem = cyc
else:
if len(new_isotopes) == 1:
temp_dataitem.append(cyc[-1])
else:
temp_dataitem.append(cyc[-1][new_isotopes.index(item)])
dat.append(temp_dataitem)
if (len(dat) == 1) and (not nested_list):
dat = dat[0]
return dat
else:
# there is an implicite rule here that if you want 2D arrays you have
# to give 3 args, or, in other words you have to give a cycle or cycle
# array; there is no good reason for that, except the programmers
# laziness
option_ind = 3
cycle_list = cycle_list
dataitem = dataitem
isotopes_of_interest = isotope
# we need to find out the shellnb to know if any yps array may just be
# a one row array, as - for example- in the surf.h5 files
# SJONES: I think here we only need to look at the first shellnb(!)
#shellnb=self.get(cycle_list,'shellnb')
try: #check if cycle_list is not a list
cycle_list[0]
except (TypeError,IndexError):
cycle_list = [cycle_list]
shellnb=self.get(cycle_list[0],'shellnb')
if sparse <1:
sparse=1
# Just in case the user inputs integers
try:
for x in range(len(cycle_list)):
cycle_list[x] = str(cycle_list[x])
except TypeError:
cycle_list = [str(cycle_list)]
if option_ind != 1:
try: #if it is a single cycle make sure its formatted correctly
if cycle_list.isdigit():
cycle_list = [cycle_list]
for cycle in cycle_list:
if len(cycle) != len(self.cycles[0]):
#print "a"
diff = len(self.cycles[0])-len(cycle)
OO = ''
while diff >=1:
OO+='0'
cycle = OO+cycle
except AttributeError: ##if it is a list of cycles make sure its formatted correctly
if cycle_list[0].isdigit():
for x in range(len(cycle_list)):
if len(str(cycle_list[x])) != len(str(self.cycles[0])):
#print "b"
diff = len(str(self.cycles[0]))-len(str(cycle_list[x]))
OO = ''
while diff >=1:
OO+='0'
diff-=1
try:
cycle_list[x] = OO+cycle_list[x]
except TypeError:
cycle_list[0] = OO+cycle_list[0]
dat = []
cycle_list.sort()
cyclelist=np.array(list(map(int, cycle_list)))
# cycles_requested is a list of indices from cyclelist
# The index of the larges and smallest indices should be stored
# in sorted order. As new requests are made if the requests
# border or over lap then only keep the index of the larges and
# smallest indices.
cycles_requested = []
# Sometimes bad data or last restart.h5 files contain no cycles,
# causing the code to crash. Do a simple try/except here:
file_min=[]
file_max=[]
try:
for h5 in self.h5s:
file_min.append(int(h5.cycle[0]))
file_max.append(int(h5.cycle[-1]))
except IndexError:
print('File '+h5.filename+' contains no data, please remove or rename it')
print('Once the file has been removed or renamed, the preprocessor file must be re-written. Do this by either removing the file h5Preproc.txt from the data directory or by invoking the se instance with rewrite=True')
print('At present, h5T cannot check for empty files since the overhead using the mounted VOSpace would be too great.')
raise IOError('Cycle-less file encountered')
file_min.sort()
file_max.sort()
for h5 in self.h5s:
#initalize file metadata
min_file = int(h5.cycle[0])
max_file = int(h5.cycle[-1])
min_list = int(cyclelist[0])
max_list = int(cyclelist[-1])
index_min = None #if None start at begining
index_max = None #if None finish at end
# SJONES Now we need to add the case that the set only contains one file:
if len(file_min) == 1:
min_file = min_list - 1
max_file = max_list + 1
else:
file_index = file_min.index(min_file)
if file_index == 0:
if min_list - 1 < min_file:
min_file = min_list - 1
max_file = (file_min[file_index + 1] + max_file)//2
elif file_index == len(file_min) - 1:
min_file = (file_max[file_index - 1] + min_file)//2 + 1
if max_list + 1 > max_file:
max_file = max_list + 1
else:
min_file = (file_max[file_index - 1] + min_file)//2 + 1
max_file = (file_min[file_index + 1] + max_file)//2
# calculate the left and right limits of the intersection
# of the lists h5.cycle and cyclelist
if (max_list < min_file) or (max_file < min_list):
# the lists do not intersect
continue
elif (min_list <= min_file) and (max_file <= max_list):
# all of h5.cycle is within cyclelist
index_min = bisect.bisect_left(cyclelist, min_file)
index_max = bisect.bisect_right(cyclelist, max_file)
elif (min_file <= min_list) and (max_list <= max_file):
# all of cyclelist is within h5.cycle
index_min = None
index_max = None
else:
if min_list > min_file:
# cyclelist overlaps the right edge of h5.cycle
index_min = None
index_max = bisect.bisect_right(cyclelist, max_file)
else:
# cyclelist overlaps the left edge of h5.cylce
index_min = bisect.bisect_left(cyclelist, min_file)
index_max = None
# maintin list of all requested cycles by keeping trak of
# the maximum and minimum indices
imin = index_min
if index_min == None:
imin = 0
imax = index_max
if index_max == None:
imax = len(cyclelist)
request_min = bisect.bisect_left(cycles_requested, imin)
request_max = bisect.bisect_right(cycles_requested, imax)
# if the new request overlabs older request remove them
del cycles_requested[request_min:request_max]
if ((request_max-request_min) % 2) ==1:
# new and old request overlaped on one edge only
if request_min % 2 == 0:
# add new starting index
cycles_requested.insert(request_min, imin)
else:
# add new ending index
cycles_requested.insert(request_min, imax)
else:
# new and old requests overlaped on two edges
if request_min % 2 == 0:
# old request was contained with in new request
cycles_requested.insert(request_min, imin)
cycles_requested.insert(request_min + 1, imax)
else:
# new request wat contained within old request
pass
if not self.h5sStarted[self.h5s.index(h5)]:
h5.start()
h5.join()
temp = h5.fetch_data_sam(dataitem,cycle_list[index_min:index_max],len(cycle_list),len(dat))
self.h5sStarted[self.h5s.index(h5)]=True
else:
temp = h5.fetch_data_sam(dataitem,cycle_list[index_min:index_max],len(cycle_list),len(dat))
temp_dat = []
for temp_num, temp_cycle in enumerate(temp):
temp_dataforcycle = []
for dataitem_num, temp_dataitem in enumerate(temp_cycle):
# identify what cycle the temp data was collected from
temp_dataitem=self.red_dim(temp_dataitem)
# if option_ind == 3 and isotopes_of_interest != []:
if (dataitem[dataitem_num] == 'iso_massf' or dataitem[dataitem_num] == 'yps') and isotopes_of_interest != []:
# Figure out the index
index = []
iso_tmp = []
if 'iso' in dataitem[dataitem_num]: #if we are looking at an isotope
iso_tmp = self.isotopes
else:
iso_tmp = self.elements
for iso in isotopes_of_interest: #finds the location of the isotope
x = iso_tmp.index(iso)
index.append(x)
if index == []:
# if none of the isotopes of interest are found
# then the index defaults to [0], so that the loop
# will still try to acess the data in t.
index = [0]
islist=True
if len(cycle_list)==1:
islist=False
# shellnb_index = 0
# if index_min == None:
# shellnb_index = temp_num
# else:
# shellnb_index = index_min + temp_num
temp_multicyc = []
for i in index:
# if islist:
# if shellnb[shellnb_index] == 1: # again take care of 1-row 2D arrays
if shellnb == 1: # again take care of 1-row 2D arrays
temp_multicyc.append(temp_dataitem[i])
else:
temp_multicyc.append(temp_dataitem[:,i])
# else:
# if shellnb == 1: # again take care of 1-row 2D arrays
# temp_multicyc.append(temp_dataitem[i])
# else:
# temp_multicyc.append(temp_dataitem[:,i])
if len(temp_multicyc) == 1: # agian take care of 1-row arrays
temp_multicyc = temp_multicyc[0]
temp_dataitem = temp_multicyc
temp_dataforcycle.append(temp_dataitem)
if len(temp_dataforcycle) == 1: # agian take care of 1-row arrays
temp_dataforcycle = temp_dataforcycle[0]
# Now add the information to the list we pass back
temp_dat.append(temp_dataforcycle)
# calculate the proper insertion point for the data colected from
# the file h5 in self.h5s
insert_pnt = 0
if index_min is not None: #alex: in py2: x < None == False
for i in range(len(cycles_requested)):
if i % 2 == 1:
if cycles_requested[i] < index_min:
insert_pnt += cycles_requested[i] - cycles_requested[i-1]
elif cycles_requested[i - 1] < index_min:
insert_pnt += index_min - cycles_requested[i - 1]
# insert the cycle data from the current file into the apropiat place
# in the output data.
dat[insert_pnt:insert_pnt] = temp_dat
#check if cycles were not requested from the file
# SJONES comment
# missing_cycles = np.array([])
# if len(cycles_requested) != 2:
# if len(cycles_requested) == 0:
# missing_cycles = np.array([cycle_list])
# else:
# cycles_requested = [None] + cycles_requested + [None]
# for i in xrange(0, len(cycles_requested), 2):
# min = cycles_requested[i]
# max = cycles_requested[i + 1]
# missing_cycles = np.append(missing_cycles, cycle_list[min:max])
# print "The requested cycles: " + str(missing_cycles) + " are not available in this data set"
# elif (cycles_requested[0] != 0) or (cycles_requested[1] != len(cyclelist)):
# min = cycles_requested[0]
# max = cycles_requested[1]
# missing_cycles = np.append(missing_cycles, cycle_list[0:min])
# missing_cycles = np.append(missing_cycles, cycle_list[max:])
# print "The requested cycles: " + str(missing_cycles) + " are not available in this data set"
if len(dat) < 2 and option_ind != 3 and (not nested_list):
try:
dat = dat[0]
except IndexError:
None
except TypeError:
None
try:
if len(dat) < 2 and isotopes_of_interest != []:
dat = dat[0]
except TypeError:
None
except IndexError:
None
t2=time.time()
return dat | Get Data from HDF5 files.
There are three ways to call this function
1. get(dataitem)
Fetches the datatiem for all cycles. If dataitem is a header
attribute or list of attributes then the data is retured.
If detaitem an individulal or list of column attributes,
data columns or isotopes/elements the data is returned for
all cycles.
2. get(cycle_list, dataitem)
Fetches the dataitem or list of dataitems for the cycle
or list of cycles. The variable dataitems can contain column
attributes, data columns, and isotopes/elemnts.
3. get(cycle_list, dataitem, isotope)
Fetches the dataitems like the seccond method except that
one of the dataitems must be either "iso_massf" or "yps",
and in the data returned "iso_massf" and "yps" are replaced
with the data from the isotopes. The isotopes must be in
the form given by se.isotopes or se.elements.
Parameters
----------
cycle_list : list, integer or string
If cycle_list is a list or string and all of the entries
are header attributes then the attributes are returned.
If cycle_list is a list or string of dataitems then the
dataitems are fetched for all cycles.
If cycle_list is a list, integer or string of cycle numbers
then data is returned for those cycles.
dataitem: list or string, optional
If dataitem is not None then the data for each item is
returned for the cycle or list of cycles. dataitem may be an
individual or a mixed list of column attributes, column
data or isotopes/elements. If dataitem is None then
cycle_list must be a string. The default is None.
isotope: list or string, optional
If one of the dataitems is "iso_massf" or "yps" then it is
replaced with the data from the individual isotopes/elements
listed in isotope. The default is None.
sparse : int
Implements a sparsity factor on the fetched data i.e. only
the i th cycle in cycle_list data is returned,
where i = sparse. | entailment |
def red_dim(self, array):
"""
This function reduces the dimensions of an array until it is
no longer of length 1.
"""
while isinstance(array, list) == True or \
isinstance(array, np.ndarray) == True:
try:
if len(array) == 1:
array = array[0]
else:
break
except:
break
return array | This function reduces the dimensions of an array until it is
no longer of length 1. | entailment |
def _perform_request(self, request, parser=None, parser_args=None, operation_context=None):
'''
Sends the request and return response. Catches HTTPError and hands it
to error handler
'''
operation_context = operation_context or _OperationContext()
retry_context = RetryContext()
retry_context.is_emulated = self.is_emulated
# Apply the appropriate host based on the location mode
self._apply_host(request, operation_context, retry_context)
# Apply common settings to the request
_update_request(request, self._X_MS_VERSION, self._USER_AGENT_STRING)
client_request_id_prefix = str.format("Client-Request-ID={0}", request.headers['x-ms-client-request-id'])
while True:
try:
try:
# Execute the request callback
if self.request_callback:
self.request_callback(request)
# Add date and auth after the callback so date doesn't get too old and
# authentication is still correct if signed headers are added in the request
# callback. This also ensures retry policies with long back offs
# will work as it resets the time sensitive headers.
_add_date_header(request)
self.authentication.sign_request(request)
# Set the request context
retry_context.request = request
# Log the request before it goes out
logger.info("%s Outgoing request: Method=%s, Path=%s, Query=%s, Headers=%s.",
client_request_id_prefix,
request.method,
request.path,
request.query,
str(request.headers).replace('\n', ''))
# Perform the request
response = self._httpclient.perform_request(request)
# Execute the response callback
if self.response_callback:
self.response_callback(response)
# Set the response context
retry_context.response = response
# Log the response when it comes back
logger.info("%s Receiving Response: "
"%s, HTTP Status Code=%s, Message=%s, Headers=%s.",
client_request_id_prefix,
self.extract_date_and_request_id(retry_context),
response.status,
response.message,
str(request.headers).replace('\n', ''))
# Parse and wrap HTTP errors in AzureHttpError which inherits from AzureException
if response.status >= 300:
# This exception will be caught by the general error handler
# and raised as an azure http exception
_http_error_handler(
HTTPError(response.status, response.message, response.headers, response.body))
# Parse the response
if parser:
if parser_args:
args = [response]
args.extend(parser_args)
return parser(*args)
else:
return parser(response)
else:
return
except AzureException as ex:
retry_context.exception = ex
raise ex
except Exception as ex:
retry_context.exception = ex
if sys.version_info >= (3,):
# Automatic chaining in Python 3 means we keep the trace
raise AzureException(ex.args[0])
else:
# There isn't a good solution in 2 for keeping the stack trace
# in general, or that will not result in an error in 3
# However, we can keep the previous error type and message
# TODO: In the future we will log the trace
msg = ""
if len(ex.args) > 0:
msg = ex.args[0]
raise AzureException('{}: {}'.format(ex.__class__.__name__, msg))
except AzureException as ex:
# only parse the strings used for logging if logging is at least enabled for CRITICAL
if logger.isEnabledFor(logging.CRITICAL):
exception_str_in_one_line = str(ex).replace('\n', '')
status_code = retry_context.response.status if retry_context.response is not None else 'Unknown'
timestamp_and_request_id = self.extract_date_and_request_id(retry_context)
logger.info("%s Operation failed: checking if the operation should be retried. "
"Current retry count=%s, %s, HTTP status code=%s, Exception=%s.",
client_request_id_prefix,
retry_context.count if hasattr(retry_context, 'count') else 0,
timestamp_and_request_id,
status_code,
exception_str_in_one_line)
# Decryption failures (invalid objects, invalid algorithms, data unencrypted in strict mode, etc)
# will not be resolved with retries.
if str(ex) == _ERROR_DECRYPTION_FAILURE:
logger.error("%s Encountered decryption failure: this cannot be retried. "
"%s, HTTP status code=%s, Exception=%s.",
client_request_id_prefix,
timestamp_and_request_id,
status_code,
exception_str_in_one_line)
raise ex
# Determine whether a retry should be performed and if so, how
# long to wait before performing retry.
retry_interval = self.retry(retry_context)
if retry_interval is not None:
# Execute the callback
if self.retry_callback:
self.retry_callback(retry_context)
logger.info(
"%s Retry policy is allowing a retry: Retry count=%s, Interval=%s.",
client_request_id_prefix,
retry_context.count,
retry_interval)
# Sleep for the desired retry interval
sleep(retry_interval)
else:
logger.error("%s Retry policy did not allow for a retry: "
"%s, HTTP status code=%s, Exception=%s.",
client_request_id_prefix,
timestamp_and_request_id,
status_code,
exception_str_in_one_line)
raise ex
finally:
# If this is a location locked operation and the location is not set,
# this is the first request of that operation. Set the location to
# be used for subsequent requests in the operation.
if operation_context.location_lock and not operation_context.host_location:
# note: to cover the emulator scenario, the host_location is grabbed
# from request.host_locations(which includes the dev account name)
# instead of request.host(which at this point no longer includes the dev account name)
operation_context.host_location = {retry_context.location_mode: request.host_locations[retry_context.location_mode]} | Sends the request and return response. Catches HTTPError and hands it
to error handler | entailment |
def _padding_model_number(number, max_num):
'''
This method returns a zero-front padded string
It makes out of str(45) -> '0045' if 999 < max_num < 10000. This is
meant to work for reasonable integers (maybe less than 10^6).
Parameters
----------
number : integer
number that the string should represent.
max_num : integer
max number of cycle list, implies how many 0s have be padded
'''
cnum = str(number)
clen = len(cnum)
cmax = int(log10(max_num)) + 1
return (cmax - clen)*'0' + cnum | This method returns a zero-front padded string
It makes out of str(45) -> '0045' if 999 < max_num < 10000. This is
meant to work for reasonable integers (maybe less than 10^6).
Parameters
----------
number : integer
number that the string should represent.
max_num : integer
max number of cycle list, implies how many 0s have be padded | entailment |
def flux_chart(file_name, plotaxis, plot_type, which_flux=None,
I_am_the_target=None, prange=None):
'''
Plots a chart with fluxes
Parameters
----------
file_name : string
Name of the file of fluxes we are looking at.
plotaxis : list
[xmin, xmax, ymin, ymax], where on x axis there is neutron
number and on y axis there is Z.
plot_types : integer
Set to 0 for standard flux plot. Set to 1 if fluxes focused
on one specie.
which_flux : integer, optional
Set to 0 for nucleosynthesis flux plot. Set to 1 is for energy
flux plot. Seting to None is the same a 0. The default is
None.
I_am_the_target : list, optional
A 2xArray used only if plot_type=1, and is given by [neutron
number, proton number]. The default is None.
prange : integer, optional
The range of fluxes to be considered. If prange is None, then
8 fluxes are consdered. The default is None.
Notes
-----
This script is terribly slow and needs to be improved. For now I
put here in data_plot:
[1]: import data_plot
[2]: data_plot.flux_chart('file_name', [xmin, xmax, ymin, ymax],
int, which_flux, I_am_the_target, prange)
The pdf is created, but an error bumped up and the gui is empty.
To avoid this, I had to set 'text.usetex': False. See below. Also,
for the same reason no label in x axys is written using
'text.usetex': True.
Note also that the GUI works really slow with this plot. So, we
need to optimize from the graphic point of view. This need to be
included in ppn.py I think, and set in multi option too, in case
we want to read more flux files at the same time.
Finally, you need to have stable.dat to read in to make it work ...
'''
import numpy as np
import matplotlib.pyplot as plt
#from matplotlib.mpl import colors,cm # deppreciated in mpl ver 1.3
# use line below instead
from matplotlib import colors,cm
from matplotlib.patches import Rectangle, Arrow
from matplotlib.collections import PatchCollection
from matplotlib.offsetbox import AnchoredOffsetbox, TextArea
import sys
print_max_flux_in_plot = True
f = open(file_name)
lines = f.readline()
lines = f.readlines()
f.close()
# starting point of arrow
coord_x_1 = []
coord_y_1 = []
# ending point of arrow (option 1)
coord_x_2 = []
coord_y_2 = []
# ending point of arrow (option 2)
coord_x_3 = []
coord_y_3 = []
# fluxes
flux_read = []
flux_log10 = []
if which_flux == None or which_flux == 0:
print('chart for nucleosynthesis fluxes [dYi/dt]')
line_to_read = 9
elif which_flux == 1:
print('chart for energy fluxes')
line_to_read = 10
elif which_flux == 2:
print('chart for timescales')
line_to_read = 11
elif which_flux > 2:
print("you have only option 0, 1 or 2, not larger than 2")
single_line = []
for i in range(len(lines)):
single_line.append(lines[i].split())
coord_y_1.append(float(single_line[i][1]))
coord_x_1.append(float(single_line[i][2])-coord_y_1[i])
coord_y_2.append(float(single_line[i][5]))
coord_x_2.append(float(single_line[i][6])-coord_y_2[i])
coord_y_3.append(float(single_line[i][7]))
coord_x_3.append(float(single_line[i][8])-coord_y_3[i])
try:
flux_read.append(float(single_line[i][line_to_read]))
except ValueError: # this is done to avoid format issues like 3.13725-181...
flux_read.append(1.0E-99)
flux_log10.append(np.log10(flux_read[i]+1.0e-99))
print('file read!')
# I need to select smaller sample, with only fluxes inside plotaxis.
coord_y_1_small=[]
coord_x_1_small=[]
coord_y_2_small=[]
coord_x_2_small=[]
coord_y_3_small=[]
coord_x_3_small=[]
flux_log10_small = []
for i in range(len(flux_log10)):
I_am_in = 0
if coord_y_1[i] > plotaxis[2] and coord_y_1[i] < plotaxis[3] and coord_x_1[i] > plotaxis[0] and coord_x_1[i] < plotaxis[1]:
I_am_in = 1
coord_y_1_small.append(coord_y_1[i])
coord_x_1_small.append(coord_x_1[i])
coord_y_2_small.append(coord_y_2[i])
coord_x_2_small.append(coord_x_2[i])
coord_y_3_small.append(coord_y_3[i])
coord_x_3_small.append(coord_x_3[i])
flux_log10_small.append(flux_log10[i])
if coord_y_3[i] > plotaxis[2] and coord_y_3[i] < plotaxis[3] and coord_x_3[i] > plotaxis[0] and coord_x_3[i] < plotaxis[1] and I_am_in == 0:
I_am_in = 1
coord_y_1_small.append(coord_y_1[i])
coord_x_1_small.append(coord_x_1[i])
coord_y_2_small.append(coord_y_2[i])
coord_x_2_small.append(coord_x_2[i])
coord_y_3_small.append(coord_y_3[i])
coord_x_3_small.append(coord_x_3[i])
flux_log10_small.append(flux_log10[i])
if plot_type == 1:
print('I_am_the_target=',I_am_the_target)
#I_am_the_target = [56.-26.,26.]
# here below need for plotting
# plotaxis = [xmin,xmax,ymin,ymax]
#plotaxis=[1,20,1,20]
#plotaxis=[0,0,0,0]
# elemental labels off/on [0/1]
ilabel = 1
# label for isotopic masses off/on [0/1]
imlabel = 1
# turn lines for magic numbers off/on [0/1]
imagic = 0
# flow is plotted over "prange" dex. If flow < maxflow-prange it is not plotted
if prange == None:
print('plot range given by default')
prange = 8.
#############################################
# we should scale prange on plot_axis range, not on max_flux!
max_flux = max(flux_log10)
ind_max_flux = flux_log10.index(max_flux)
max_flux_small = max(flux_log10_small)
min_flux = min(flux_log10)
ind_min_flux = flux_log10.index(min_flux)
min_flux_small = min(flux_log10_small)
#nzmax = int(max(max(coord_y_1),max(coord_y_2),max(coord_y_3)))+1
#nnmax = int(max(max(coord_x_1),max(coord_x_2),max(coord_x_3)))+1
nzmax = int(max(max(coord_y_1_small),max(coord_y_2_small),max(coord_y_3_small)))+1
nnmax = int(max(max(coord_x_1_small),max(coord_x_2_small),max(coord_x_3_small)))+1
nzycheck = np.zeros([nnmax,nzmax,3])
#coord_x_out = np.zeros(len(coord_x_2))
#coord_y_out = np.zeros(len(coord_y_2))
#for i in range(len(flux_log10)):
# nzycheck[coord_x_1[i],coord_y_1[i],0] = 1
# nzycheck[coord_x_1[i],coord_y_1[i],1] = flux_log10[i]
# if coord_x_2[i] >= coord_x_3[i]:
# coord_x_out[i] = coord_x_2[i]
# coord_y_out[i] = coord_y_2[i]
# nzycheck[coord_x_out[i],coord_y_out[i],0] = 1
# nzycheck[coord_x_out[i],coord_y_out[i],1] = flux_log10[i]
# elif coord_x_2[i] < coord_x_3[i]:
# coord_x_out[i] = coord_x_3[i]
# coord_y_out[i] = coord_y_3[i]
# nzycheck[coord_x_out[i],coord_y_out[i],0] = 1
# nzycheck[coord_x_out[i],coord_y_out[i],1] = flux_log10[i]
# if flux_log10[i]>max_flux-prange:
# nzycheck[coord_x_1[i],coord_y_1[i],2] = 1
# nzycheck[coord_x_out[i],coord_y_out[i],2] = 1
coord_x_out = np.zeros(len(coord_x_2_small))
coord_y_out = np.zeros(len(coord_y_2_small))
for i in range(len(flux_log10_small)):
nzycheck[coord_x_1_small[i],coord_y_1_small[i],0] = 1
nzycheck[coord_x_1_small[i],coord_y_1_small[i],1] = flux_log10_small[i]
if coord_x_2_small[i] >= coord_x_3_small[i]:
coord_x_out[i] = coord_x_2_small[i]
coord_y_out[i] = coord_y_2_small[i]
nzycheck[coord_x_out[i],coord_y_out[i],0] = 1
nzycheck[coord_x_out[i],coord_y_out[i],1] = flux_log10_small[i]
elif coord_x_2_small[i] < coord_x_3_small[i]:
coord_x_out[i] = coord_x_3_small[i]
coord_y_out[i] = coord_y_3_small[i]
nzycheck[coord_x_out[i],coord_y_out[i],0] = 1
nzycheck[coord_x_out[i],coord_y_out[i],1] = flux_log10_small[i]
if which_flux == None or which_flux < 2 and flux_log10_small[i]>max_flux_small-prange:
nzycheck[coord_x_1_small[i],coord_y_1_small[i],2] = 1
nzycheck[coord_x_out[i],coord_y_out[i],2] = 1
elif which_flux == 2 and flux_log10_small[i]<min_flux_small+prange:
nzycheck[coord_x_1_small[i],coord_y_1_small[i],2] = 1
nzycheck[coord_x_out[i],coord_y_out[i],2] = 1
#######################################################################
# elemental names: elname(i) is the name of element with Z=i
elname= ('none','H','He','Li','Be','B','C','N','O','F','Ne','Na','Mg','Al','Si','P','S','Cl','Ar','K','Ca','Sc','Ti','V','Cr','Mn','Fe',
'Co','Ni','Cu','Zn','Ga','Ge','As','Se','Br','Kr','Rb','Sr','Y','Zr','Nb','Mo','Tc','Ru','Rh','Pd','Ag','Cd','In','Sn','Sb',
'Te', 'I','Xe','Cs','Ba','La','Ce','Pr','Nd','Pm','Sm','Eu','Gd','Tb','Dy','Ho','Er','Tm','Yb','Lu','Hf','Ta','W','Re','Os',
'Ir','Pt','Au','Hg','Tl','Pb','Bi','Po','At','Rn','Fr','Ra','Ac','Th','Pa','U','Np','Pu')
#### create plot
## define axis and plot style (colormap, size, fontsize etc.)
if plotaxis==[0,0,0,0]:
xdim=10
ydim=6
else:
dx = plotaxis[1]-plotaxis[0]
dy = plotaxis[3]-plotaxis[2]
ydim = 6
xdim = ydim*dx/dy
format = 'pdf'
# note that I had to set 'text.usetex': False, to avoid Exception in Tkinter callback.
# and to make the GUI work properly. Why? some missing package?
params = {'axes.labelsize': 15,
'text.fontsize': 15,
'legend.fontsize': 15,
'xtick.labelsize': 15,
'ytick.labelsize': 15,
'text.usetex': False}
plt.rcParams.update(params)
fig=plt.figure(figsize=(xdim,ydim),dpi=100)
axx = 0.10
axy = 0.10
axw = 0.85
axh = 0.8
ax=plt.axes([axx,axy,axw,axh])
# color map choice for abundances
cmapa = cm.jet
# color map choice for arrows
if which_flux == None or which_flux < 2:
cmapr = cm.autumn
elif which_flux == 2:
cmapr = cm.autumn_r
# if a value is below the lower limit its set to white
cmapa.set_under(color='w')
cmapr.set_under(color='w')
# set value range for abundance colors (log10(Y))
norma = colors.Normalize(vmin=-20,vmax=0)
# set x- and y-axis scale aspect ratio to 1
ax.set_aspect('equal')
#print time,temp and density on top
#temp = '%8.3e' %ff['temp']
#time = '%8.3e' %ff['time']
#dens = '%8.3e' %ff['dens']
#box1 = TextArea("t : " + time + " s~~/~~T$_{9}$ : " + temp + "~~/~~$\\rho_{b}$ : " \
# + dens + ' g/cm$^{3}$', textprops=dict(color="k"))
#anchored_box = AnchoredOffsetbox(loc=3,
# child=box1, pad=0.,
# frameon=False,
# bbox_to_anchor=(0., 1.02),
# bbox_transform=ax.transAxes,
# borderpad=0.,
# )
#ax.add_artist(anchored_box)
# Add black frames for stable isotopes
f = open('stable.dat')
head = f.readline()
stable = []
for line in f.readlines():
tmp = line.split()
zz = int(tmp[2])
nn = int(tmp[3])
xy = nn-0.5,zz-0.5
rect = Rectangle(xy,1,1,ec='k',fc='None',fill='False',lw=3.)
rect.set_zorder(2)
ax.add_patch(rect)
apatches = []
acolor = []
m = old_div(0.8,prange)#0.8/prange
if which_flux == None or which_flux < 2:
vmax=np.ceil(max(flux_log10_small))
vmin=max(flux_log10_small)-prange
b=-vmin*m+0.1
elif which_flux == 2:
vmin=min(flux_log10_small)
vmax=np.ceil(min(flux_log10_small)+prange)
b=vmax*m+0.1
if which_flux == None or which_flux < 3:
normr = colors.Normalize(vmin=vmin,vmax=vmax)
print('vmin and vmax =',vmin,vmax)
ymax=0.
xmax=0.
for i in range(len(flux_log10_small)):
x = coord_x_1_small[i]
y = coord_y_1_small[i]
dx = coord_x_out[i]-coord_x_1_small[i]
dy = coord_y_out[i]-coord_y_1_small[i]
if plot_type == 0:
if which_flux == None or which_flux < 2:
if flux_log10_small[i]>=vmin:
arrowwidth = flux_log10_small[i]*m+b
arrow = Arrow(x,y,dx,dy, width=arrowwidth)
if xmax<x:
xmax=x
if ymax<y:
ymax=y
acol = flux_log10_small[i]
apatches.append(arrow)
acolor.append(acol)
elif which_flux == 2:
if flux_log10_small[i]<=vmax:
arrowwidth = -flux_log10_small[i]*m+b
arrow = Arrow(x,y,dx,dy, width=arrowwidth)
if xmax<x:
xmax=x
if ymax<y:
ymax=y
acol = flux_log10_small[i]
apatches.append(arrow)
acolor.append(acol)
elif plot_type == 1 and which_flux != 2:
if x==I_am_the_target[0] and y==I_am_the_target[1] and flux_log10_small[i]>=vmin:
arrowwidth = flux_log10_small[i]*m+b
arrow = Arrow(x,y,dx,dy, width=arrowwidth)
if xmax<x:
xmax=x
if ymax<y:
ymax=y
acol = flux_log10_small[i]
apatches.append(arrow)
acolor.append(acol)
if x+dx==I_am_the_target[0] and y+dy==I_am_the_target[1] and flux_log10_small[i]>=vmin:
arrowwidth = flux_log10_small[i]*m+b
arrow = Arrow(x,y,dx,dy, width=arrowwidth)
if xmax<x:
xmax=x
if ymax<y:
ymax=y
acol = flux_log10_small[i]
apatches.append(arrow)
acolor.append(acol)
elif plot_type == 1 and which_flux == 2:
if x==I_am_the_target[0] and y==I_am_the_target[1] and flux_log10_small[i]<=vmax:
arrowwidth = -flux_log10_small[i]*m+b
arrow = Arrow(x,y,dx,dy, width=arrowwidth)
if xmax<x:
xmax=x
if ymax<y:
ymax=y
acol = flux_log10_small[i]
apatches.append(arrow)
acolor.append(acol)
if x+dx==I_am_the_target[0] and y+dy==I_am_the_target[1] and flux_log10_small[i]<=vmax:
arrowwidth = -flux_log10_small[i]*m+b
arrow = Arrow(x,y,dx,dy, width=arrowwidth)
if xmax<x:
xmax=x
if ymax<y:
ymax=y
acol = flux_log10_small[i]
apatches.append(arrow)
acolor.append(acol)
#apatches = []
#acolor = []
#m = 0.8/prange
#vmax=np.ceil(max(flux_log10))
#vmin=max(flux_log10)-prange
#b=-vmin*m+0.1
#normr = colors.Normalize(vmin=vmin,vmax=vmax)
#ymax=0.
#xmax=0.
#for i in range(len(flux_log10)):
# x = coord_x_1[i]
# y = coord_y_1[i]
# dx = coord_x_out[i]-coord_x_1[i]
# dy = coord_y_out[i]-coord_y_1[i]
# if plot_type == 0:
# if flux_log10[i]>=vmin:
# arrowwidth = flux_log10[i]*m+b
# arrow = Arrow(x,y,dx,dy, width=arrowwidth)
# if xmax<x:
# xmax=x
# if ymax<y:
# ymax=y
# acol = flux_log10[i]
# apatches.append(arrow)
# acolor.append(acol)
# elif plot_type == 1:
# if x==I_am_the_target[0] and y==I_am_the_target[1] and flux_log10[i]>=vmin:
# arrowwidth = flux_log10[i]*m+b
# arrow = Arrow(x,y,dx,dy, width=arrowwidth)
# if xmax<x:
# xmax=x
# if ymax<y:
# ymax=y
# acol = flux_log10[i]
# apatches.append(arrow)
# acolor.append(acol)
# if x+dx==I_am_the_target[0] and y+dy==I_am_the_target[1] and flux_log10[i]>=vmin:
# arrowwidth = flux_log10[i]*m+b
# arrow = Arrow(x,y,dx,dy, width=arrowwidth)
# if xmax<x:
# xmax=x
# if ymax<y:
# ymax=y
# acol = flux_log10[i]
# apatches.append(arrow)
# acolor.append(acol)
#
xy = x-0.5,y-0.5
rect = Rectangle(xy,1,1,ec='k',fc='None',fill='False',lw=1.)
rect.set_zorder(2)
ax.add_patch(rect)
xy = x+dx-0.5,y+dy-0.5
rect = Rectangle(xy,1,1,ec='k',fc='None',fill='False',lw=1.)
rect.set_zorder(2)
ax.add_patch(rect)
a = PatchCollection(apatches, cmap=cmapr, norm=normr)
a.set_array(np.array(acolor))
a.set_zorder(3)
ax.add_collection(a)
cb = plt.colorbar(a)
# colorbar label
if which_flux == None or which_flux == 0:
cb.set_label('log$_{10}$(f)')
elif which_flux ==1:
cb.set_label('log$_{10}$(E)')
elif which_flux ==2:
cb.set_label('log$_{10}$(timescale)')
# plot file name
graphname = 'flow-chart.'+format
# decide which array to take for label positions
iarr = 2
# plot element labels
for z in range(nzmax):
try:
nmin = min(np.argwhere(nzycheck[:,z,iarr-2]))[0]-1
ax.text(nmin,z,elname[z],horizontalalignment='center',verticalalignment='center',fontsize='medium',clip_on=True)
except ValueError:
continue
# plot mass numbers
if imlabel==1:
for z in range(nzmax):
for n in range(nnmax):
a = z+n
if nzycheck[n,z,iarr-2]==1:
ax.text(n,z,a,horizontalalignment='center',verticalalignment='center',fontsize='small',clip_on=True)
# plot lines at magic numbers
if imagic==1:
ixymagic=[2, 8, 20, 28, 50, 82, 126]
nmagic = len(ixymagic)
for magic in ixymagic:
if magic<=nzmax:
try:
xnmin = min(np.argwhere(nzycheck[:,magic,iarr-2]))[0]
xnmax = max(np.argwhere(nzycheck[:,magic,iarr-2]))[0]
line = ax.plot([xnmin,xnmax],[magic,magic],lw=3.,color='r',ls='-')
except ValueError:
dummy=0
if magic<=nnmax:
try:
yzmin = min(np.argwhere(nzycheck[magic,:,iarr-2]))[0]
yzmax = max(np.argwhere(nzycheck[magic,:,iarr-2]))[0]
line = ax.plot([magic,magic],[yzmin,yzmax],lw=3.,color='r',ls='-')
except ValueError:
dummy=0
# set axis limits
if plotaxis==[0,0,0,0]:
ax.axis([-0.5,xmax+0.5,-0.5,ymax+0.5])
else:
ax.axis(plotaxis)
# set x- and y-axis label
ax.set_xlabel('neutron number')
ax.set_ylabel('proton number')
if which_flux == None or which_flux == 0:
max_flux_label="max flux = "+str('{0:.4f}'.format(max_flux))
elif which_flux == 1:
max_flux_label="max energy flux = "+str('{0:.4f}'.format(max_flux))
elif which_flux == 2:
min_flux_label="min timescale [s] = "+str('{0:.4f}'.format(min_flux))
if print_max_flux_in_plot:
if which_flux == None or which_flux < 2:
ax.text(plotaxis[1]-1.8,plotaxis[2]+0.1,max_flux_label,fontsize=10.)
elif which_flux == 2:
ax.text(plotaxis[1]-1.8,plotaxis[2]+0.1,min_flux_label,fontsize=10.)
fig.savefig(graphname)
print(graphname,'is done')
if which_flux == None or which_flux < 2:
print(max_flux_label,'for reaction =',ind_max_flux+1)
elif which_flux == 2:
print(min_flux_label,'for reaction =',ind_min_flux+1)
plt.show() | Plots a chart with fluxes
Parameters
----------
file_name : string
Name of the file of fluxes we are looking at.
plotaxis : list
[xmin, xmax, ymin, ymax], where on x axis there is neutron
number and on y axis there is Z.
plot_types : integer
Set to 0 for standard flux plot. Set to 1 if fluxes focused
on one specie.
which_flux : integer, optional
Set to 0 for nucleosynthesis flux plot. Set to 1 is for energy
flux plot. Seting to None is the same a 0. The default is
None.
I_am_the_target : list, optional
A 2xArray used only if plot_type=1, and is given by [neutron
number, proton number]. The default is None.
prange : integer, optional
The range of fluxes to be considered. If prange is None, then
8 fluxes are consdered. The default is None.
Notes
-----
This script is terribly slow and needs to be improved. For now I
put here in data_plot:
[1]: import data_plot
[2]: data_plot.flux_chart('file_name', [xmin, xmax, ymin, ymax],
int, which_flux, I_am_the_target, prange)
The pdf is created, but an error bumped up and the gui is empty.
To avoid this, I had to set 'text.usetex': False. See below. Also,
for the same reason no label in x axys is written using
'text.usetex': True.
Note also that the GUI works really slow with this plot. So, we
need to optimize from the graphic point of view. This need to be
included in ppn.py I think, and set in multi option too, in case
we want to read more flux files at the same time.
Finally, you need to have stable.dat to read in to make it work ... | entailment |
def _sparse(self, x, y, sparse):
"""
Method that removes every non sparse th element.
For example:
if this argument was 5, This method would plot the 0th, 5th,
10th ... elements.
Parameters
----------
x : list
list of x values, of length j.
y : list
list of y values, of length j.
sparse : integer
Argument that skips every so many data points.
"""
tmpX=[]
tmpY=[]
for i in range(len(x)):
if sparse == 1:
return x,y
if (i%sparse)==0:
tmpX.append(x[i])
tmpY.append(y[i])
return tmpX, tmpY | Method that removes every non sparse th element.
For example:
if this argument was 5, This method would plot the 0th, 5th,
10th ... elements.
Parameters
----------
x : list
list of x values, of length j.
y : list
list of y values, of length j.
sparse : integer
Argument that skips every so many data points. | entailment |
def plotMulti(self, atrix, atriy, cyclist, title, path='/',
legend=None, labelx=None, labely=None, logx=False,
logy=False, base=10, sparse=1, pdf=False,
limits=None):
'''
Method for plotting multiple plots and saving it to multiple
pngs or PDFs.
Parameters
----------
atrix : string
The name of the attribute you want on the x axis.
atriy : string
The name of the attribute you want on the Y axis.
cyclist : list
List of cycles that you would like plotted.
title : string
The title of the graph and the name of the file.
path : string, optional
The file path. The default is '/'
Legend : list or intager, optional
A list of legends for each of your cycles, or one legend for
all of the cycles. The default is None.
labelx : string, optional
The label on the X axis. The default is None.
labely : string, optional
The label on the Y axis. The default is None.
logx : boolean, optional
A boolean of whether the user wants the x axis
logarithmically. The default is False.
logy : boolean, optional
A boolean of whether the user wants the Y axis
logarithmically. The default is False.
base : integer, optional
The base of the logarithm. The default is 10.
sparse : integer, optional
Argument that skips every so many data points. For example
if this argument was 5, This method would plot the 0th,
5th, 10th ... elements. The default is 1.
pdf : boolean, optional
A boolean of if the image should be saved to a pdf file.
xMin, xMax, yMin, YMax: plot coordinates. The default is
False.
limits : list, optional
The length four list of the x and y limits. The order of
the list is xmin, xmax, ymin, ymax. The default is None.
'''
if str(legend.__class__)!="<type 'list'>":# Determines the legend is a list
legendList=False
else:
legendList=True
if legendList and len(cyclist) !=len(legend): #if it is a list, make sure there is an entry for each cycle
print('Please input a proper legend, with correct length, aborting plot')
return None
for i in range(len(cyclist)):
if legendList:
self.plot(atrix,atriy,cyclist[i],'ndump',legend[i],labelx,labely,base=base,sparse=sparse, \
logx=logx,logy=logy,show=False,limits=limits)
else:
self.plot(atrix,atriy,cyclist[i],'ndump',legend,labelx,labely,base=base,sparse=sparse, \
logx=logx,logy=logy,show=False,limits=limits)
pl.title(title)
if not pdf:
currentDir = os.getcwd()
os.chdir(path)
pl.savefig(title+str(cyclist[i])+'.png', dpi=400)
os.chdir(currentDir)
else:
currentDir = os.getcwd()
os.chdir(path)
pl.savefig(title+str(cyclist[i])+'.pdf', dpi=400)
os.chdir(currentDir)
pl.clf()
return None | Method for plotting multiple plots and saving it to multiple
pngs or PDFs.
Parameters
----------
atrix : string
The name of the attribute you want on the x axis.
atriy : string
The name of the attribute you want on the Y axis.
cyclist : list
List of cycles that you would like plotted.
title : string
The title of the graph and the name of the file.
path : string, optional
The file path. The default is '/'
Legend : list or intager, optional
A list of legends for each of your cycles, or one legend for
all of the cycles. The default is None.
labelx : string, optional
The label on the X axis. The default is None.
labely : string, optional
The label on the Y axis. The default is None.
logx : boolean, optional
A boolean of whether the user wants the x axis
logarithmically. The default is False.
logy : boolean, optional
A boolean of whether the user wants the Y axis
logarithmically. The default is False.
base : integer, optional
The base of the logarithm. The default is 10.
sparse : integer, optional
Argument that skips every so many data points. For example
if this argument was 5, This method would plot the 0th,
5th, 10th ... elements. The default is 1.
pdf : boolean, optional
A boolean of if the image should be saved to a pdf file.
xMin, xMax, yMin, YMax: plot coordinates. The default is
False.
limits : list, optional
The length four list of the x and y limits. The order of
the list is xmin, xmax, ymin, ymax. The default is None. | entailment |
def plot(self, atrix, atriy, fname=None, numtype='ndump',
legend=None, labelx=None, labely=None, indexx=None,
indexy=None, title=None, shape='.', logx=False,
logy=False, path='/', base=10, sparse=1, show=True, pdf=False,limits=None,
markevery=None, linewidth=1):
"""
Simple function that plots atriy as a function of atrix
This method will automatically find and plot the requested data.
Parameters
----------
atrix : string
The name of the attribute you want on the x axis.
atriy : string
The name of the attribute you want on the Y axis.
fname : optional
Be the filename, Ndump or time, or cycle, If fname is a
list, this method will then save a png for each cycle in the
list. Warning, this must be a list of cycles and not a
list of filenames. The default is None.
numtype : string, optional
designates how this function acts and how it interprets
fname. if numtype is 'file', this function will get the
desird attribute from that file. if numtype is 'NDump'
function will look at the cycle with that nDump. if numtype
is 't' or 'time' function will find the _cycle with the
closest time stamp. The default is 'ndump'.
legend : list or intager, optional
A list of legends for each of your cycles, or one legend for
all of the cycles. The default is None.
labelx : string, optional
The label on the X axis. The default is None.
labely : string, optional
The label on the Y axis. The default is None.
indexx : optional
Depreciated: If the get method returns a list of lists,
indexx would be the list at the index indexx in the list.
The default is None.
indexy : optional
Depreciated: If the get method returns a list of lists,
indexy would be the list at the index indexx in the list.
The default is None.
title : string, optional
The Title of the Graph. The default is None.
shape : string, optional
What shape and colour the user would like their plot in.
Please see
http://matplotlib.sourceforge.net/api/pyplot_api.html#matplotlib.pyplot.plot
for all possible choices. The default is '.'.
logx : boolean, optional
A boolean of weather the user wants the x axi
logarithmically. The default is False.
logy : boolean, optional
A boolean of weather the user wants the Y axis
logarithmically. The default is False.
path : string, optional
Usef for PlotMulti, give the path where to save the Figures
base : integer, optional
The base of the logarithm. The Default is 10.
sparse : integer, optional
Argument that skips every so many data points. For example
if this argument was 5, This method would plot the 0th, 5th,
10th ... elements. The default is 1.
show : boolean, optional
A boolean of if the plot should be displayed useful with the
multiPlot method. The default is True.
pdf : boolean, optional
PDF for PlotMulti? Default: False
limits : list, optional
The length four list of the x and y limits. The order of the
list is xmin, xmax, ymin, ymax. The defautl is .
markevery : integer or tupler, optional
Set the markevery property to subsample the plot when
using markers. markevery can be None, very point will be
plotted. It can be an integer N, Every N-th marker will be
plotted starting with marker 0. It can be a tuple,
markevery=(start, N) will start at point start and plot
every N-th marker. The default is None.
linewidth : integer, optional
Set linewidth. The default is 1.
Notes
-----
WARNING: Unstable if get returns a list with only one element (x=[0]).
parameters: indexx and indexy have been deprecated.
"""
t1=time.time()
#Setting the axis labels
if labelx== None :
labelx=atrix
if labely== None :
labely=atriy
if title!=None:
title=title
else:
title=labely+' vs '+labelx
if str(fname.__class__)=="<type 'list'>":
self.plotMulti(atrix,atriy,fname,title,path,legend,labelx,labely,logx, logy, 10,1,pdf,limits)
return
tmpX=[]
tmpY=[]
singleX=False
singleY=False
#Getting data
plotType=self._classTest()
if plotType=='YProfile':
if fname==None:
fname=self.cycles[-1]
listY=self.get(atriy,fname, numtype,resolution='a')
listX=self.get(atrix,fname, numtype,resolution='a')
elif plotType=='se':
if fname==None:
listY=self.get( atriy,sparse=sparse)
listX=self.get(atrix,sparse=sparse)
else:
listY=self.get(fname, atriy,sparse=sparse)
listX=self.get(fname, atrix,sparse=sparse)
t2= time.time()
print(t2 -t1)
elif plotType=='PPN' :
if fname==None and atrix not in self.cattrs and atriy not in self.cattrs:
fname=len(self.files)-1
if numtype=='ndump':
numtype='cycNum'
listY=self.get(atriy,fname,numtype)
listX=self.get(atrix,fname,numtype)
elif plotType=='xtime' or plotType=='mesa_profile' or plotType=='AsciiTable' or plotType=='mesa.star_log' or plotType=='starobs':
listY=self.get(atriy)
listX=self.get(atrix)
else:
listY=self.get(atriy)
listX=self.get(atrix)
tmpX=[]
tmpY=[]
if isinstance(listX[0], basestring) or isinstance(listY[0], basestring):
for i in range(len(listX)):
if '*****' == listX[i] or '*****' == listY[i]:
print('There seems to be a string of * in the lists')
print('Cutting out elements in both the lists that have an index equal to or greater than the index of the location of the string of *')
break
tmpX.append(float(listX[i]))
tmpY.append(float(listY[i]))
listX=tmpX
listY=tmpY
#Determining if listX is a list or a list of lists
try:
j=listX[0][0]
except:
singleX = True
if len(listX) == 1: # If it is a list of lists with one element.
tmpX=listX[0]
elif singleX == True:# If it is a plain list of values.
tmpX=listX
elif indexx==None and len(listX)>1: # If it is a list of lists of values.
# take the largest
tmpX=listX[0]
for i in range(len(listX)):
if len(tmpX)<len(listX[i]):
tmpX=listX[i]
elif indexx<len(listX): # If an index is specified, use that index
tmpX=listX[indexx]
else:
print('Sorry that indexx does not exist, returning None')
return None
#Determining if listY is a list or a list of lists
try:
j=listY[0][0]
except:
singleY = True
if len(listY) == 1: # If it is a list of lists with one element.
#print 'hello'
tmpY=listY[0]
elif singleY == True: # If it is a plain list of values.
#print 'world'
tmpY=listY
elif indexy==None and len(listY)>1:# If it is a list of lists of values.
# take the largest
#print 'fourth'
tmpY=listY[0]
for i in range(len(listY)):
if len(tmpY)<len(listY[i]):
tmpY=listY[i]
elif indexy<len(listY): # If an index is specified, use that index
#print 'sixth'
tmpY=listY[indexy]
else:
print('Sorry that indexy does not exist, returning None')
return None
'''
elif indexy==None and len(listY)==1:
#print 'fifth'
tmpY=listY
'''
#Here, if we end up with different sized lists to plot, it
#searches for a list that is of an equal length
if len(tmpY)!=len(tmpX):
found=False
print("It seems like the lists are not of equal length")
print("Now attempting to find a compatible list for ListX")
for i in range(len(listY)):
if not singleY and len(tmpX)==len(listY[i]):
tmpY=listY[i]
found=True
if not found:
print("Now attempting to find a compatible list for ListY")
for i in range(len(listX)):
if not singleX and len(tmpY)==len(listX[i]):
tmpX=listX[i]
found=True
if found:
print("Suitable list found")
else:
print("There is no suitalble list, returning None")
return None
if len(tmpY)!=len(tmpX) and single == True:
print('It seems that the selected lists are of different\nsize, now returning none')
return None
# Sparse stuff
if plotType!='se':
tmpX,tmpY=self._sparse(tmpX,tmpY, sparse)
# Logarithm stuff
if logy or logx:
tmpX,tmpY=self._logarithm(tmpX,tmpY,logx,logy,base)
# Here it ensures that if we are plotting ncycle no values of '*' will be plotted
tmX=[]
tmY=[]
for i in range(len(tmpX)):
tmX.append(str(tmpX[i]))
tmY.append(str(tmpY[i]))
tmpX=[]
tmpY=[]
for i in range(len(tmX)):
if '*' in tmX[i] or '*' in tmY[i]:
print('There seems to be a string of * in the lists')
print('Cutting out elements in both the lists that have an index equal to or greater than the index of the location of the string of *')
break
tmpX.append(float(tmX[i]))
tmpY.append(float(tmY[i]))
listX=tmpX
listY=tmpY
#Setting the axis labels
if logx:
labelx='log '+labelx
if logy:
labely='log '+labely
if legend!=None:
legend=legend
else:
legend=labely+' vs '+labelx
pl.plot(listX,listY,shape,label=legend,markevery=markevery,linewidth=linewidth)
pl.legend()
pl.title(title)
pl.xlabel(labelx)
pl.ylabel(labely)
if show:
pl.show()
if limits != None and len(limits)==4:
pl.xlim(limits[0],limits[1])
pl.ylim(limits[2],limits[3]) | Simple function that plots atriy as a function of atrix
This method will automatically find and plot the requested data.
Parameters
----------
atrix : string
The name of the attribute you want on the x axis.
atriy : string
The name of the attribute you want on the Y axis.
fname : optional
Be the filename, Ndump or time, or cycle, If fname is a
list, this method will then save a png for each cycle in the
list. Warning, this must be a list of cycles and not a
list of filenames. The default is None.
numtype : string, optional
designates how this function acts and how it interprets
fname. if numtype is 'file', this function will get the
desird attribute from that file. if numtype is 'NDump'
function will look at the cycle with that nDump. if numtype
is 't' or 'time' function will find the _cycle with the
closest time stamp. The default is 'ndump'.
legend : list or intager, optional
A list of legends for each of your cycles, or one legend for
all of the cycles. The default is None.
labelx : string, optional
The label on the X axis. The default is None.
labely : string, optional
The label on the Y axis. The default is None.
indexx : optional
Depreciated: If the get method returns a list of lists,
indexx would be the list at the index indexx in the list.
The default is None.
indexy : optional
Depreciated: If the get method returns a list of lists,
indexy would be the list at the index indexx in the list.
The default is None.
title : string, optional
The Title of the Graph. The default is None.
shape : string, optional
What shape and colour the user would like their plot in.
Please see
http://matplotlib.sourceforge.net/api/pyplot_api.html#matplotlib.pyplot.plot
for all possible choices. The default is '.'.
logx : boolean, optional
A boolean of weather the user wants the x axi
logarithmically. The default is False.
logy : boolean, optional
A boolean of weather the user wants the Y axis
logarithmically. The default is False.
path : string, optional
Usef for PlotMulti, give the path where to save the Figures
base : integer, optional
The base of the logarithm. The Default is 10.
sparse : integer, optional
Argument that skips every so many data points. For example
if this argument was 5, This method would plot the 0th, 5th,
10th ... elements. The default is 1.
show : boolean, optional
A boolean of if the plot should be displayed useful with the
multiPlot method. The default is True.
pdf : boolean, optional
PDF for PlotMulti? Default: False
limits : list, optional
The length four list of the x and y limits. The order of the
list is xmin, xmax, ymin, ymax. The defautl is .
markevery : integer or tupler, optional
Set the markevery property to subsample the plot when
using markers. markevery can be None, very point will be
plotted. It can be an integer N, Every N-th marker will be
plotted starting with marker 0. It can be a tuple,
markevery=(start, N) will start at point start and plot
every N-th marker. The default is None.
linewidth : integer, optional
Set linewidth. The default is 1.
Notes
-----
WARNING: Unstable if get returns a list with only one element (x=[0]).
parameters: indexx and indexy have been deprecated. | entailment |
def plot_isoratios(self,xiso,yiso,fign=1,spec=None,deltax=True,deltay=True,logx=False,logy=False,
title=None,legend=None,legloc='lower right',errbar=True,dcycle=500,addiso=None,
co_toggle='c',cust_toggle=None,shift=0,weighting=None,zoneselect=None,iniabufile='iniab2.0E-02GN93.ppn',
plt_sparse=1,plt_symb='o',plt_col='k',plt_lt='-',plt_lw=1.,alpha_dum=1.,plt_massrange=False,plt_show=True,
figsave=False):
'''
This is the new routine to plot isotopic ratios for ALL input. rt, June 2014
Parameters:
-----------
xiso : np.array
x data to plot. This can be an array or a list of arrays, depending on who calls the routine
yiso : np.array
y data to plot. This can be an array or a list of arrays, depending on who calls the routine
fign : integer, optional
Figure number
spec : string, optional
What specifications do you want to do when coming from nugridse models. Choose 'surf' for
surface models or 'exp' for explosions (out files)
deltax : boolean, optional
X axis in delta values?
deltay : boolean, optional
Y axis in delta values?
logx : boolean, optional
Logarithmic x axis?
logy : boolean, optional
Logarithmic y axis?
title : string, optional
Title for your plot
legend : string, optional
Legend for your model / grains. For grains the legend is automatically taken from the
grain class
legloc : string / integer, optional
Location of the legend, use matplotlib standard. Use None to not plot legend if plotted
by default, e.g., from grain class routine.
errbar : boolean, optional
Error bars on grain data?
dcycle : integer, optional
Difference between cycles to take for thermal pulse searching, if searching is
deactivated, dcycle describes how often cycles are sampled. The default is 500.
addiso : list, optional
For explosive models. Add an isotope. Format ['C-12', 0.5 ,'N-12'] to add N12
to C12 and multiply it with a factor of 0.5. Multiple isotopes can be added, the
factor is optional and does not have to be given. Isotopes can be added to other
isotopes as well, i.e., [['C-12', 'N-12'], ['C-13', 'N-13']]. The default None.
Notice that while addiso = [['N-14','O-14'],['N-14',fractionation,'C-14']]
works, other options like addiso = [['N-14','O-14',fractionation,'C-14']] or
addiso = [['N-14',1,'O-14',fractionation,'C-14']] are not working and give Typerror.
CAREFUL, that for the option addiso = [['N-14',fractionation,'C-14','O-14']] there is
no error message, but the fractionation is applied to both O14 and C14!
co_toggle : string, optional
For explosive models, choose what shells you want to look for! Select 'c' for
selecting zones with C/O >= 1. Select 'o' for C/O <= 1. If 'a' takes the
whole star. The defalut is 'c'. See cust_toggle (below) for an alternative!
cust_toggle : list, optional
This option is like co_toggle (and overwrites it when chosen) but lets you choose
your own comparison. For example you want to find zones that have a 10 fold
overabundance of Ti-46 and Ti-47 over O-16 and Zr-96, you can choose here
[['Ti-46','Ti-47'],['O-16','Zr-96'],100.] Assuming the first list is is x, the
second list y, and the comparator number is f, the statement only plots shells
in which the condition x/y>f is fulfilled. x and y are number sums of the chosen
isotopes, f has to be given as a float. This is only for explosive shells. Please
note, if this toggle is NOT None, then co_toggle is overwritten!
shift : integer, optional
For explosive models, how much do you want to shift the models back from the
last cycle? By default (0) the last cycle is taken.
weighting : string, optional
For explosive models. If None then, plot every profile separately. If 'zone'
then, average each zone. If 'all' then average all selected zones. The
default is None.
zoneselect: string, optional
For explosive models. Select if you want to plot 'all' zones or outer most zone.
Arguments are 'all' and 'top', respectively. Default is None, then the user is
asked to provide this information during the routine as input.
iniabufile : string, optional
Initial abundance file. Use absolute path for your file or filename to choose a
given file in USEEPP. Attention: You need a standard tree checked out from SVN
plt_sparse : integer, optional
Every how many datapoints is the plot done? Not used for some routines!
plt_symb : string, optional
Symbol for the plot. In case of grains, this is handled automatically.
plt_col : string / float, optional
Color for plotted curve. In case of grains, this is handled automatically.
plt_lt : string, optional
line type for plot.
plt_lw : float, optional
Line width for plot.
alpha_dum : trasparency to apply to grains data, in case of many data are plotted.
This may be allpied also for theoretical curves.
plt_massrange : boolean, optional
For explosive models. Plot mass of shell with first and last datapoint of
each zone. If list given, label those zones. The default is False.
plt_show : boolean, optional
Do you want to show the plot or not?
figsave : string, optional
Give path and filename here, if you want to save the figure.
'''
from . import utils as u
### WORK ON PATH ###
# define svn path form path where script runs, depending on standard input or not
if len(iniabufile.split('/')) == 1 : # means not an absolute path!
scriptpathtmp = __file__
if len(scriptpathtmp.split('/')) == 1: # in folder where nugridse is
scriptpathtmp = os.path.abspath('.') + '/nugridse.py' # to get the current dir
svnpathtmp = '/'
for i in range(len(scriptpathtmp.split('/'))-3): # -3 to go to folders up!
if scriptpathtmp.split('/')[i] != '':
svnpathtmp += scriptpathtmp.split('/')[i] + '/'
iniabufile = svnpathtmp + 'frames/mppnp/USEEPP/' + iniabufile # make absolute path for iniabufile
### get solar system ratios for the isotopes that are specified in the input file ###
inut = u.iniabu(iniabufile)
try:
xrat_solsys = inut.isoratio_init(xiso)
except KeyError: # if isotope not available, e.g., if plotting Ti-44 / Ti-48 ratio
xrat_solsys = 0.
try:
yrat_solsys = inut.isoratio_init(yiso)
except KeyError:
yrat_solsys = 0.
# number ratio for solar system ratio
xrat_solsys *= (old_div(float(xiso[1].split('-')[1]), float(xiso[0].split('-')[1])))
yrat_solsys *= (old_div(float(yiso[1].split('-')[1]), float(yiso[0].split('-')[1])))
# initialize xdataerr and ydataerr as None
xdataerr = None
ydataerr = None
### DO PLOTS FROM NUGRIDSE CLASS ###
if self._classTest() == 'se':
if spec==None:
spec = str(eval(input('Please specify \'surf\' for surface models (AGB stars) or \'exp\' for explosive'
'models and zone finding, etc., and press enter: ')))
### SURFACE MODELS - PLOT AGB STAR STUFF ###
if spec == 'surf':
print('Plotting AGB star stuff')
# read in thermal pulse position and co ratio
tp_pos, co_return = self._tp_finder(dcycle)
tp_pos_tmp = []
co_return_tmp = []
tp_pos_tmp.append(1)
co_return_tmp.append(co_return[0])
for i in range(len(tp_pos)):
tp_pos_tmp.append(tp_pos[i])
co_return_tmp.append(co_return[i])
tp_pos = tp_pos_tmp
co_return = co_return_tmp
# read in data
iso_alldata = self.get(tp_pos,[xiso[0],xiso[1],yiso[0],yiso[1]])
xrat = np.zeros(len(iso_alldata))
yrat = np.zeros(len(iso_alldata))
for i in range(len(iso_alldata)):
xrat[i] = old_div(iso_alldata[i][0], iso_alldata[i][1])
yrat[i] = old_div(iso_alldata[i][2], iso_alldata[i][3])
# make number ratios
for i in range(len(xrat)):
xrat[i] *= old_div(float(xiso[1].split('-')[1]), float(xiso[0].split('-')[1]))
yrat[i] *= old_div(float(yiso[1].split('-')[1]), float(yiso[0].split('-')[1]))
# if delta values are requested, need to calculate those now
if deltax:
xrat = (old_div(xrat,xrat_solsys) -1.) * 1000.
if deltay:
yrat = (old_div(yrat,yrat_solsys) -1.) * 1000.
# now we might have o and c rich zones. prepare stuff for plotting
xdata_o = []
ydata_o = []
xdata_c = []
ydata_c = []
for i in range(len(co_return)):
if co_return[i] <= 1.:
xdata_o.append(xrat[i])
ydata_o.append(yrat[i])
else:
xdata_c.append(xrat[i])
ydata_c.append(yrat[i])
if xdata_o != [] and xdata_c != []:
xdata_o.append(xdata_c[0])
ydata_o.append(ydata_c[0])
# now make the styles
style_o = [plt_symb + '--', plt_col, '1.', '4', '2',None]
style_c = [plt_symb + plt_lt, plt_col, plt_col, '7.', '1', legend]
# now make data for plotting
xdata = []
ydata = []
style = []
if xdata_o != []:
xdata.append(xdata_o)
ydata.append(ydata_o)
style.append(style_o)
if xdata_c != []:
xdata.append(xdata_c)
ydata.append(ydata_c)
style.append(style_c)
### EXPLOSIVE MODELS ###
elif spec == 'exp':
print('explosive models')
# compatibility
co_toggle = co_toggle.lower()
isotope_list = [xiso[0],xiso[1],yiso[0],yiso[1]]
# cycle
cyc_no = self.se.cycles[len(self.se.cycles)-1-shift]
mco_data = self.get(cyc_no,['mass','C-12','C-13','O-16','O-17','O-18',xiso[0],xiso[1],yiso[0],yiso[1]])
mass = mco_data[0]
# if no custom toggle for enrichment
if cust_toggle == None:
c_elem = mco_data[1]+mco_data[2]
o_elem = mco_data[3]+mco_data[4]+mco_data[5]
co_ratio = c_elem / o_elem * (old_div(16., 12.))
co_comp_val = 1.
else:
co_data1 = self.get(cyc_no,cust_toggle[0])
co_data2 = self.get(cyc_no,cust_toggle[1])
for i in range(len(co_data1)):
if i == 0:
c_elem = co_data1[i]
else:
c_elem += co_data1[i]
for i in range(len(co_data2)):
if i == 0:
o_elem = co_data2[i]
else:
o_elem += co_data2[i]
# now we need to make the mass number of everything in here to make number ratios
massn1 = 0.
for i in range(len(co_data1)):
massn1 += sum(co_data1[i]) * float(cust_toggle[0][i].split('-')[1])
massn1 /= sum(c_elem)
massn2 = 0.
for i in range(len(co_data2)):
massn2 += sum(co_data2[i]) * float(cust_toggle[1][i].split('-')[1])
massn2 /= sum(o_elem)
co_ratio = c_elem / o_elem * (old_div(massn2, massn1)) # this has nothing to do with a C/O ratio anymore! but keep name
# comparator value
co_comp_val = float(cust_toggle[2])
# get the data now
isotope_profile = []
for i in range(6,10): # in mco_data
isotope_profile.append(mco_data[i])
# add radioactive isotopes (if given)
if addiso != None:
if type(addiso[0] == list): # then list of lists
for i in range(len(addiso)):
for j in range(4):
if isotope_list[j] == addiso[i][0]:
multiplicator_addiso = 1.
try:
multiplicator_addiso = float(addiso[i][1])
starter = 2
except ValueError:
starter = 1
for k in range(starter,len(addiso[i])):
isotope_profile[j] += array(self.get(cyc_no,addiso[i][k])) * multiplicator_addiso
else:
for j in range(4):
if isotope_list[j] == addiso[0]:
multiplicator_addiso = 1.
try:
multiplicator_addiso = float(addiso[1])
starter = 2
except ValueError:
starter = 1
for k in range(starter,len(addiso)):
isotope_profile[j] += array(self.get(cyc_no,addiso[k])) * multiplicator_addiso
# search for carbon / oxygen rich layers
crich = [] # alternating start stop values. if odd number, then surface is c-rich, but add stop number
dumb = True
if cust_toggle != None:
for i in range(len(co_ratio)):
if dumb:
if co_ratio[i] >= co_comp_val:
crich.append(i)
dumb = False
continue
else:
if co_ratio[i] < co_comp_val:
crich.append(i)
dumb = True
elif co_toggle != 'a':
for i in range(len(co_ratio)):
if co_toggle == 'c': # carbon rich
if dumb:
if co_ratio[i] >= 1:
crich.append(i)
dumb = False
continue
else:
if co_ratio[i] < 1:
crich.append(i)
dumb = True
elif co_toggle == 'o': # oxygen rich
if dumb:
if co_ratio[i] <= 1:
crich.append(i)
dumb = False
continue
else:
if co_ratio[i] > 1:
crich.append(i)
dumb = True
else:
print('Select your enrichment!')
return None
else: # take whole star
print('Using all profiles to mix')
crich.append(0)
crich.append(len(co_toggle))
if len(crich)%2 == 1:
crich.append(len(co_ratio)-1)
if len(crich) == 0:
print('Star did not get rich in C or O, depending on what you specified')
return None
# make isotope_profile into array and transpose
isotope_profile = array(isotope_profile).transpose()
# Ask user which zones to use
if co_toggle != 'a':
if cust_toggle != None:
print('\n\nI have found the following zones:\n')
elif co_toggle == 'c':
print('\n\nI have found the following carbon rich zones:\n')
elif co_toggle == 'o':
print('\n\nI have found the following oxygen rich zones:\n')
mass_tmp = zeros((len(crich)))
for i in range(len(crich)):
mass_tmp[i] = mass[crich[i]]
j = 1
for i in range(old_div(len(crich),2)):
print('Mass range (' + str(j) + '):\t' + str(mass_tmp[2*i]) + ' - ' + str(mass_tmp[2*i+1]))
j += 1
print('\n')
if zoneselect == 'all':
usr_zones = 0
elif zoneselect == 'top':
usr_zones = [j-1]
else:
usr_zones = eval(input('Please select which mass range you want to use. Select 0 for all zones. Otherwise give one zone or a list of zones separated by comma (e.g.: 1, 2, 4): '))
crich_dumb = crich
if usr_zones == 0:
print('I continue w/ all zones then')
elif type(usr_zones) == int: # only one zone selected
tmp = int(usr_zones)-1
crich = crich_dumb[2*tmp:2*tmp+2]
else:
crich = []
for i in range(len(usr_zones)):
tmp = int(usr_zones[i])-1
crich.append(crich_dumb[2*tmp])
crich.append(crich_dumb[2*tmp + 1])
# weight profiles according to weighting factor using the selected crich
# define isos_to_use variable for later
if weighting == None:
isos_to_use = []
for i in range(old_div(len(crich),2)):
isos_dumb = []
n = crich[2*i]
while n <= crich[2*i+1]:
isos_dumb.append(isotope_profile[n])
n += 1
isos_to_use.append(array(isos_dumb))
elif weighting.lower() == 'zone' or weighting.lower() == 'zones':
# make array w/ mass weigted isotope ratio (4) for all mass zones
isotope_profile_cweight = zeros((old_div(len(crich),2),4))
mass_tot = []
for i in range(len(isotope_profile_cweight)): # 2*i is start, 2*i+1 is stop value
if crich[2*i] == 0:
print('C- / O-rich in first shell (core).')
else:
dumb = crich[2*i + 1]
j = crich[2*i]
mass_tmp = 0
while j <= dumb:
mass_shell = mass[j] - mass[j-1]
mass_tmp += mass_shell
for k in range(4):
isotope_profile_cweight[i][k] += isotope_profile[j][k]*mass_shell
j += 1
mass_tot.append(mass_tmp)
for i in range(len(isotope_profile_cweight)):
for j in range(4):
isotope_profile_cweight[i][j] /= mass_tot[i]
isos_to_use = [array(isotope_profile_cweight)]
elif weighting.lower() == 'all': # average all zones by mass
isos_tmp = zeros((1, len(isotope_profile[0])))
for i in range(len(isotope_profile)-1): # neglect surface effects
for j in range(len(isos_tmp[0])):
mass_shell = mass[i+1] - mass[i]
isos_tmp[0][j] += isotope_profile[i][j]*mass_shell
# weight all
isos_tmp /= sum(mass)
isos_to_use = [isos_tmp]
# change to isotope numbers from mass!
for i in range(len(isos_to_use)):
for j in range(len(isos_to_use[i])):
for k in range(len(isos_to_use[i][j])):
# here we just divide 'iso_massf' output with the mass number
# this means that in the end, the isotope ratios in number space are correc
# but have to use ratios from here on for meaningful stuff
isos_to_use[i][j][k] /= float(isotope_list[k].split('-')[1])
# do the ratios and stuff
ratiox = []
ratioy = []
for i in range(len(isos_to_use)):
ratiox_dumb = []
ratioy_dumb = []
for j in range(len(isos_to_use[i])):
ratiox_dumb.append(old_div(isos_to_use[i][j][0], isos_to_use[i][j][1]))
ratioy_dumb.append(old_div(isos_to_use[i][j][2], isos_to_use[i][j][3]))
ratiox.append(array(ratiox_dumb))
ratioy.append(array(ratioy_dumb))
# make arrays for ratiox and ratioy
ratiox = array(ratiox)
ratioy = array(ratioy)
# make number ratio out of everything
for i in range(len(ratiox)):
for j in range(len(ratiox[i])):
ratiox[i][j] *= (old_div(float(xiso[1].split('-')[1]), float(xiso[0].split('-')[1])))
ratioy[i][j] *= (old_div(float(yiso[1].split('-')[1]), float(yiso[0].split('-')[1])))
if deltax:
ratiox_tmp = []
for i in range(len(ratiox)):
ratiox_tmp_tmp = []
for j in range(len(ratiox[i])):
ratiox_tmp_tmp.append((old_div(ratiox[i][j], xrat_solsys) - 1.) * 1000.)
ratiox_tmp.append(ratiox_tmp_tmp)
ratiox = array(ratiox_tmp)
if deltay:
ratioy_tmp = []
for i in range(len(ratioy)):
ratioy_tmp_tmp = []
for j in range(len(ratioy[i])):
ratioy_tmp_tmp.append((old_div(ratioy[i][j], yrat_solsys) - 1.) * 1000.)
ratioy_tmp.append(ratioy_tmp_tmp)
ratioy = array(ratioy_tmp)
# create massrange array if necessary
plt_massrange_lst = []
if plt_massrange==True: # use == True because otherwise the list enters here too... why?
for i in range(len(ratiox)):
plt_massrange_lst.append([ratiox[i][0], ratioy[i][0], mass[crich[2*i]]])
plt_massrange_lst.append([ratiox[i][len(ratiox[i])-1], ratioy[i][len(ratioy[i])-1], mass[crich[2*i+1]]]) # start: x-ratio, y-ratio, mass label
elif plt_massrange != False:
for plt_mr_val in plt_massrange:
mrng_i = 0
while plt_mr_val > mass[mrng_i] and mrng_i < len(mass):
mrng_i += 1
if mrng_i > 0:
mrng_i -= 1
mratx = old_div(isotope_profile[mrng_i][0],isotope_profile[mrng_i][1])
mraty = old_div(isotope_profile[mrng_i][2],isotope_profile[mrng_i][3])
if deltax:
mratx = (old_div(mratx, ratiox_solsys) - 1.) * 1000.
if deltay:
mraty = (old_div(mraty, ratioy_solsys) - 1.) * 1000.
plt_massrange_lst.append([mratx,mraty,mass[mrng_i]])
# make style and prepare for plotting here
xdata = ratiox
ydata = ratioy
style_tmp0= [plt_symb + plt_lt, plt_col, plt_col, '13.', '1', legend]
style_tmp = [plt_symb + plt_lt, plt_col, plt_col, '13.', '1', None]
style = []
for i in range(len(xdata)):
if i == 0:
style.append(style_tmp0)
else:
style.append(style_tmp)
else:
print('You did not specify a useful spec argument -> abort.')
return None
### PLOTS FROM GRAIN CLASS ###
if self._classTest() == 'grain':
print('Presolar grains are cool!')
xdata,xdataerr,ydata,ydataerr,style = self.plot_ratio_return(xiso,yiso,deltax,deltay)
legend=True
plt_sparse=1 # to avoid monkey input
plt_lw = 0.
### PLOT ###
# data is prepared now, make the plots. data must be in format
# [[data1],[data2],[data3],...]
# three arrays like this, for xdata, ydata, and style.
# style format: symbol, edge color, face color, symbol size, edge width, label
# this is then compatible with grain.py style definitions
# Size of font etc.
params = {'axes.labelsize': 20,
'text.fontsize': 14,
'legend.fontsize': 14,
'xtick.labelsize': 14,
'ytick.labelsize': 14}
pl.rcParams.update(params)
pl.figure(fign)
for i in range(len(xdata)):
if errbar:
if xdataerr != None or ydataerr != None:
pl.errorbar(xdata[i],ydata[i],xerr=xdataerr[i],yerr=ydataerr[i],marker=style[i][0],
color=style[i][1],linestyle='',lw=2,markevery=plt_sparse,alpha=alpha_dum)
pl.plot(xdata[i],ydata[i],style[i][0],c=style[i][1],mfc=style[i][2],ms=float(style[i][3]),
mew=float(style[i][4]),label=style[i][5],markevery=plt_sparse,linewidth=plt_lw,alpha=alpha_dum)
# plot text labels if necessary
if plt_massrange != False:
for mrng_ind in range(len(plt_massrange_lst)):
pl.text(plt_massrange_lst[mrng_ind][0], plt_massrange_lst[mrng_ind][1],
str(round(plt_massrange_lst[mrng_ind][2], 2)), ha='right', va='bottom', color=plt_col,fontsize=15.)
# log?
if logx and logy == False:
pl.semilogx()
elif logx == False and logy:
pl.semilogy()
elif logx and logy:
pl.loglog()
# legend
if legend != None and legloc != None:
pl.legend(loc=legloc)
# title and labels
if title != None:
pl.title(title)
if deltax:
pl.xlabel('$\delta$($^{' + xiso[0].split('-')[1] + '}$' +xiso[0].split('-')[0] + '/$^{' + xiso[1].split('-')[1] + '}$' +xiso[1].split('-')[0] + ')' )
else:
pl.xlabel('$^{' + xiso[0].split('-')[1] + '}$' +xiso[0].split('-')[0] + '/$^{' + xiso[1].split('-')[1] + '}$' +xiso[1].split('-')[0])
if deltay:
pl.ylabel('$\delta$($^{' + yiso[0].split('-')[1] + '}$' +yiso[0].split('-')[0] + '/$^{' + yiso[1].split('-')[1] + '}$' +yiso[1].split('-')[0] + ')' )
else:
pl.ylabel('$^{' + yiso[0].split('-')[1] + '}$' +yiso[0].split('-')[0] + '/$^{' + yiso[1].split('-')[1] + '}$' +yiso[1].split('-')[0])
# plot horizontal and vertical lines
print(xrat_solsys, yrat_solsys)
if deltay:
pl.axhline(0,color='k')
else:
pl.axhline(yrat_solsys,color='k')
if deltax:
pl.axvline(0,color='k')
else:
pl.axvline(xrat_solsys,color='k')
# borders of plot
pl.gcf().subplots_adjust(bottom=0.15)
pl.gcf().subplots_adjust(left=0.15)
# save and show
if figsave != False:
pl.savefig(figsave)
if plt_show:
pl.show() | This is the new routine to plot isotopic ratios for ALL input. rt, June 2014
Parameters:
-----------
xiso : np.array
x data to plot. This can be an array or a list of arrays, depending on who calls the routine
yiso : np.array
y data to plot. This can be an array or a list of arrays, depending on who calls the routine
fign : integer, optional
Figure number
spec : string, optional
What specifications do you want to do when coming from nugridse models. Choose 'surf' for
surface models or 'exp' for explosions (out files)
deltax : boolean, optional
X axis in delta values?
deltay : boolean, optional
Y axis in delta values?
logx : boolean, optional
Logarithmic x axis?
logy : boolean, optional
Logarithmic y axis?
title : string, optional
Title for your plot
legend : string, optional
Legend for your model / grains. For grains the legend is automatically taken from the
grain class
legloc : string / integer, optional
Location of the legend, use matplotlib standard. Use None to not plot legend if plotted
by default, e.g., from grain class routine.
errbar : boolean, optional
Error bars on grain data?
dcycle : integer, optional
Difference between cycles to take for thermal pulse searching, if searching is
deactivated, dcycle describes how often cycles are sampled. The default is 500.
addiso : list, optional
For explosive models. Add an isotope. Format ['C-12', 0.5 ,'N-12'] to add N12
to C12 and multiply it with a factor of 0.5. Multiple isotopes can be added, the
factor is optional and does not have to be given. Isotopes can be added to other
isotopes as well, i.e., [['C-12', 'N-12'], ['C-13', 'N-13']]. The default None.
Notice that while addiso = [['N-14','O-14'],['N-14',fractionation,'C-14']]
works, other options like addiso = [['N-14','O-14',fractionation,'C-14']] or
addiso = [['N-14',1,'O-14',fractionation,'C-14']] are not working and give Typerror.
CAREFUL, that for the option addiso = [['N-14',fractionation,'C-14','O-14']] there is
no error message, but the fractionation is applied to both O14 and C14!
co_toggle : string, optional
For explosive models, choose what shells you want to look for! Select 'c' for
selecting zones with C/O >= 1. Select 'o' for C/O <= 1. If 'a' takes the
whole star. The defalut is 'c'. See cust_toggle (below) for an alternative!
cust_toggle : list, optional
This option is like co_toggle (and overwrites it when chosen) but lets you choose
your own comparison. For example you want to find zones that have a 10 fold
overabundance of Ti-46 and Ti-47 over O-16 and Zr-96, you can choose here
[['Ti-46','Ti-47'],['O-16','Zr-96'],100.] Assuming the first list is is x, the
second list y, and the comparator number is f, the statement only plots shells
in which the condition x/y>f is fulfilled. x and y are number sums of the chosen
isotopes, f has to be given as a float. This is only for explosive shells. Please
note, if this toggle is NOT None, then co_toggle is overwritten!
shift : integer, optional
For explosive models, how much do you want to shift the models back from the
last cycle? By default (0) the last cycle is taken.
weighting : string, optional
For explosive models. If None then, plot every profile separately. If 'zone'
then, average each zone. If 'all' then average all selected zones. The
default is None.
zoneselect: string, optional
For explosive models. Select if you want to plot 'all' zones or outer most zone.
Arguments are 'all' and 'top', respectively. Default is None, then the user is
asked to provide this information during the routine as input.
iniabufile : string, optional
Initial abundance file. Use absolute path for your file or filename to choose a
given file in USEEPP. Attention: You need a standard tree checked out from SVN
plt_sparse : integer, optional
Every how many datapoints is the plot done? Not used for some routines!
plt_symb : string, optional
Symbol for the plot. In case of grains, this is handled automatically.
plt_col : string / float, optional
Color for plotted curve. In case of grains, this is handled automatically.
plt_lt : string, optional
line type for plot.
plt_lw : float, optional
Line width for plot.
alpha_dum : trasparency to apply to grains data, in case of many data are plotted.
This may be allpied also for theoretical curves.
plt_massrange : boolean, optional
For explosive models. Plot mass of shell with first and last datapoint of
each zone. If list given, label those zones. The default is False.
plt_show : boolean, optional
Do you want to show the plot or not?
figsave : string, optional
Give path and filename here, if you want to save the figure. | entailment |
def plot_isopattern(self,isos,normiso,spec=None,tpulse='all',dcycle=500,fign=1,deltay=False,logy=False,
iniabufile='iniab2.0E-02GN93.ppn',legend=None,plt_symb='o',plt_col='k',plt_lt='-',
plt_lw=1.,plt_show=True):
'''
This routine plots isotopic pattern plots for different input along with grain data.
Parameters
----------
isos : list / string
Enter the list of isotopes that you want to consider here. The list should
be given in the standard format, e.g., ['Fe-54','Fe-56','Fe-57','Fe-58'] or
give the element as a string if you want to consider all stable isotopes,
e.g., 'Fe'
normiso : string
Give the isotope all the ratios should be normalized to here, e.g., 'Fe-56'
spec : string, optional
What specifications do you want to do when coming from nugridse models. Choose 'surf' for
surface models or 'exp' for explosions (out files)
tpulse : string, optional
In case you have an AGB star, here you decide which thermal pulse to plot. You can choose
'all' (default) to plot all TPs, 'c' or 'o' for all Carbon (C/O > 1) or all Oxygen
(C/O < 1) rich, respectively, or 'last' for the last thermal pulse only
dcycle : integer, optional
Difference between cycles to take for thermal pulse searching, if searching is
deactivated, dcycle describes how often cycles are sampled. The default is 500.
fign : integer, optional
Number of the figure
deltay : boolean, optional
Do you want to do delta values on y axis or regular ratios?
logy : boolean, optional
Y axis logarithmic?
iniabufile : string, optional
Initial abundance file. Use absolute path for your file or filename to choose a
given file in USEEPP. Attention: You need a standard tree checked out from SVN
legend : string, optional
Legend for your model / grains. For grains the legend is automatically taken from the
grain class
plt_symb : string, optional
Symbol for the plot. In case of grains, this is handled automatically.
plt_col : string / float, optional
Color for plotted curve. In case of grains, this is handled automatically.
plt_lt : string, optional
line type for plot.
plt_lw : float, optional
Line width for plot.
plt_show : boolean, optional
Show plot?
'''
from . import utils as u
### WORK ON PATH ###
# define svn path form path where script runs, depending on standard input or not
if len(iniabufile.split('/')) == 1 : # means not an absolute path!
scriptpathtmp = __file__
if len(scriptpathtmp.split('/')) == 1: # in folder where nugridse is
scriptpathtmp = os.path.abspath('.') + '/nugridse.py' # to get the current dir
svnpathtmp = '/'
for i in range(len(scriptpathtmp.split('/'))-3): # -3 to go to folders up!
if scriptpathtmp.split('/')[i] != '':
svnpathtmp += scriptpathtmp.split('/')[i] + '/'
iniabufile = svnpathtmp + 'frames/mppnp/USEEPP/' + iniabufile # make absolute path for iniabufile
### make a list of isotopes in case an element is specified and not a list of isotopes ###
if type(isos) == str:
tmp = u.iniabu.stable_el
tmp2 = -1
for i in range(len(tmp)):
if isos == tmp[i][0]:
tmp2 = tmp[i]
break
if tmp2==-1:
print('No valid element selected. Abort.')
return None
isos_tmp = []
for i in range(1,len(tmp2)):
isos_tmp.append([isos + '-' + str(int(tmp2[i])),normiso])
isos = isos_tmp
# isos is now a list in the sense of [['Fe-54','Fe-56'],['Fe-56','Fe-56'],...]
elif type(isos) == list:
isos_tmp = []
for i in range(len(isos)):
isos_tmp.append([isos[i], normiso])
isos = isos_tmp
else:
print('Specify a valid isotope, see docstring.')
return None
# make a list with all isotopes just as a list (not the fraction as isos)
isoslist = []
for i in range(len(isos)):
isoslist.append(isos[i][0])
# find out where normisotope sits
for i in range(len(isos)):
if isos[i][0] == isos[i][1]:
posnorm = i
break
### get solar system ratios for the isotopes that are specified in the input file ###
inut = u.iniabu(iniabufile)
ss_rat = []
for i in range(len(isos)):
ss_rat.append(inut.isoratio_init(isos[i]))
# make number ratios
for i in range(len(isos)):
ss_rat[i] *= (old_div(float(isos[i][1].split('-')[1]), float(isos[i][0].split('-')[1])))
### DO PLOTS FROM NUGRIDSE CLASS ###
if self._classTest() == 'se':
if spec==None:
spec = str(eval(input('Please specify \'surf\' for surface models (AGB stars) or \'exp\' for explosive'
'models and zone finding, etc., and press enter: ')))
### SURFACE MODELS - PLOT AGB STAR STUFF ###
if spec == 'surf':
print('Plotting AGB star stuff')
# read in thermal pulse position and co ratio
tp_pos, co_return = self._tp_finder(dcycle)
tp_pos_tmp = []
co_return_tmp = []
tp_pos_tmp.append(1)
co_return_tmp.append(co_return[0])
for i in range(len(tp_pos)):
tp_pos_tmp.append(tp_pos[i])
co_return_tmp.append(co_return[i])
tp_pos = tp_pos_tmp
co_return = co_return_tmp
# read in data
iso_alldata = self.get(tp_pos,isoslist)
norm_isotope = np.zeros(len(iso_alldata))
for i in range(len(iso_alldata)):
norm_isotope[i] = iso_alldata[i][posnorm]
# now make ratios iso_ratios out of iso_alldata
iso_ratios = np.zeros((len(iso_alldata),len(iso_alldata[0])))
for i in range(len(iso_ratios)):
for j in range(len(iso_ratios[i])):
iso_ratios[i][j] = old_div(iso_alldata[i][j], norm_isotope[i])
# make number ratios
for i in range(len(iso_ratios)):
for j in range(len(iso_ratios[i])):
iso_ratios[i][j] *= old_div(float(isos[j][1].split('-')[1]), float(isos[j][0].split('-')[1]))
# if delta values are requested, need to calculate those now
if deltay:
for i in range(len(iso_ratios)):
for j in range(len(iso_ratios[i])):
iso_ratios[i][j] = (old_div(iso_ratios[i][j], ss_rat[j]) - 1.) * 1000.
### now prepare data to plot ###
# make style list
style = []
if tpulse == 'last':
ydata = [iso_ratios[len(iso_ratios)-1]]
style = [[plt_symb + plt_lt, plt_col, plt_col, '7.', '1', legend]]
elif tpulse == 'o':
ydata = []
style = []
for i in range(len(co_return)):
if co_return[i] < 1:
ydata.append(iso_ratios[i])
style.append([plt_symb + plt_lt, plt_col, plt_col, '7.', '1', legend])
if ydata == []:
print('No O rich thermal pulses found.')
return None
else: # this means carbon rich, either only or all, but make marker size first
msizelst = [] # marker size list for all subsequent ones
crich_list = []
for i in range(len(co_return)):
if co_return[i] >= 1:
crich_list.append(co_return[i])
crich_max = np.max(crich_list)
crich_min = np.min(crich_list)
slope_tmp = old_div(9., (crich_max-crich_min))
b_tmp = 3. - slope_tmp * crich_min
for i in range(len(crich_list)):
msizelst.append(crich_list[i] * slope_tmp + b_tmp)
# make ydata and style
ydata = []
style = []
j=0
for i in range(len(co_return)):
if co_return[i] < 1.:
j += 1
if tpulse == 'all':
ydata.append(iso_ratios[i])
style.append([plt_symb + '--', plt_col, plt_col, 1., 1., None])
else:
ydata.append(iso_ratios[i])
style.append([plt_symb + plt_lt, plt_col, plt_col, msizelst[i-j], 1., legend])
### EXPLOSIVE MODELS ###
elif spec=='exp':
print('Explosive models not yet implemented... sorry')
return None
else:
print('You did not specify a useful spec argument -> abort.')
return None
###### PLOT ######
### make ratios for data to plot, first find where the normiso sits ###
# make x axis vector
xdata = []
for i in range(len(isos)):
xdata.append(int(isos[i][0].split('-')[1]))
# Size of font etc.
params = {'axes.labelsize': 20,
'text.fontsize': 14,
'legend.fontsize': 14,
'xtick.labelsize': 14,
'ytick.labelsize': 14}
pl.rcParams.update(params)
# plot
pl.figure(fign)
for i in range(len(ydata)):
pl.plot(xdata,ydata[i],style[i][0],c=style[i][1],mfc=style[i][2],ms=float(style[i][3]),
label=style[i][5],linewidth=plt_lw,mew=float(style[i][4]))
# limits and x axis scale and handling
pl.xlim([xdata[0]-0.5,xdata[len(xdata)-1]+0.5])
# labels and axis scaling
pl.xlabel('Mass number')
if deltay:
pl.ylabel('$\delta (^i$' + normiso.split('-')[0] + ' / solar)')
else:
pl.ylabel('$^{i}$'+ normiso.split('-')[0] + ' / solar')
# borders of plot
pl.gcf().subplots_adjust(bottom=0.15)
pl.gcf().subplots_adjust(left=0.15)
# log
if logy:
pl.semilogy()
if plt_show:
pl.show() | This routine plots isotopic pattern plots for different input along with grain data.
Parameters
----------
isos : list / string
Enter the list of isotopes that you want to consider here. The list should
be given in the standard format, e.g., ['Fe-54','Fe-56','Fe-57','Fe-58'] or
give the element as a string if you want to consider all stable isotopes,
e.g., 'Fe'
normiso : string
Give the isotope all the ratios should be normalized to here, e.g., 'Fe-56'
spec : string, optional
What specifications do you want to do when coming from nugridse models. Choose 'surf' for
surface models or 'exp' for explosions (out files)
tpulse : string, optional
In case you have an AGB star, here you decide which thermal pulse to plot. You can choose
'all' (default) to plot all TPs, 'c' or 'o' for all Carbon (C/O > 1) or all Oxygen
(C/O < 1) rich, respectively, or 'last' for the last thermal pulse only
dcycle : integer, optional
Difference between cycles to take for thermal pulse searching, if searching is
deactivated, dcycle describes how often cycles are sampled. The default is 500.
fign : integer, optional
Number of the figure
deltay : boolean, optional
Do you want to do delta values on y axis or regular ratios?
logy : boolean, optional
Y axis logarithmic?
iniabufile : string, optional
Initial abundance file. Use absolute path for your file or filename to choose a
given file in USEEPP. Attention: You need a standard tree checked out from SVN
legend : string, optional
Legend for your model / grains. For grains the legend is automatically taken from the
grain class
plt_symb : string, optional
Symbol for the plot. In case of grains, this is handled automatically.
plt_col : string / float, optional
Color for plotted curve. In case of grains, this is handled automatically.
plt_lt : string, optional
line type for plot.
plt_lw : float, optional
Line width for plot.
plt_show : boolean, optional
Show plot? | entailment |
def _clear(self, title=True, xlabel=True, ylabel=True):
'''
Method for removing the title and/or xlabel and/or Ylabel.
Parameters
----------
Title : boolean, optional
Boolean of if title will be cleared. The default is True.
xlabel : boolean, optional
Boolean of if xlabel will be cleared. The default is True.
ylabel : boolean, optional
Boolean of if ylabel will be cleared. The default is True.
'''
if title:
pyl.title('')
if xlabel:
pyl.xlabel('')
if ylabel:
pyl.ylabel('') | Method for removing the title and/or xlabel and/or Ylabel.
Parameters
----------
Title : boolean, optional
Boolean of if title will be cleared. The default is True.
xlabel : boolean, optional
Boolean of if xlabel will be cleared. The default is True.
ylabel : boolean, optional
Boolean of if ylabel will be cleared. The default is True. | entailment |
def _xlimrev(self):
''' reverse xrange'''
xmax,xmin=pyl.xlim()
pyl.xlim(xmin,xmax) | reverse xrange | entailment |
def abu_chartMulti(self, cyclist, mass_range=None, ilabel=True,
imlabel=True, imlabel_fontsize=8, imagic=False,
boxstable=True, lbound=20, plotaxis=[0,0,0,0],
color_map='jet', pdf=False, title=None, path=None):
'''
Method that plots abundence chart and saves those figures to a
.png file (by default). Plots a figure for each cycle in the
argument cycle.
Parameters
----------
cyclist : list
The list of cycles we are plotting.
mass_range : list, optional
A 1x2 array containing the lower and upper mass range. If
this is an instance of abu_vector this will only plot
isotopes that have an atomic mass within this range. This
will throw an error if this range does not make sence ie
[45,2] if None, it will plot over the entire range. The
default is None.
ilabel : boolean, optional
Elemental labels off/on. The default is True.
imlabel : boolean, optional
Label for isotopic masses off/on. The efault is True.
imlabel_fontsize : intager, optional
Fontsize for isotopic mass labels. The default is 8.
imagic : boolean, optional
Turn lines for magic numbers off/on. The default is False.
boxstable : boolean, optional
Plot the black boxes around the stable elements. The
default is True.
lbound : tuple, optional
Boundaries for colour spectrum ploted. The defaults is 20.
plotaxis : list, optional
Set axis limit: If [0,0,0,0] the complete range in (N,Z)
will be plotted. The default is [0,0,0,0].
color_map : string, optional
Color map according to choices in matplotlib
(e.g. www.scipy.org/Cookbook/Matplotlib/Show_colormaps).
The default is 'jet'.
pdf : boolean, optional
What format will this be saved in pdf/png. The default is
True.
title : string, optional
The title of the plots and the saved images. The default is
None.
'''
if self._which('dvipng')==None:
print("This method may need the third party program dvipng to operate")
print('It is located at http://sourceforge.net/projects/dvipng/')
max_num = max(cyclist)
for i in range(len(cyclist)):
self.abu_chart( cyclist[i], mass_range ,ilabel,imlabel,imlabel_fontsize,imagic,\
boxstable,lbound,plotaxis,False,color_map)
if title !=None:
pl.title(title)
else:
name='AbuChart'
if path is not None:
name = os.path.join(path, name)
number_str=_padding_model_number(cyclist[i],max_num)
if not pdf:
pl.savefig(name+number_str+'.png', dpi=100)
else:
pl.savefig(name+number_str+'.pdf', dpi=200)
pl.close()
return None | Method that plots abundence chart and saves those figures to a
.png file (by default). Plots a figure for each cycle in the
argument cycle.
Parameters
----------
cyclist : list
The list of cycles we are plotting.
mass_range : list, optional
A 1x2 array containing the lower and upper mass range. If
this is an instance of abu_vector this will only plot
isotopes that have an atomic mass within this range. This
will throw an error if this range does not make sence ie
[45,2] if None, it will plot over the entire range. The
default is None.
ilabel : boolean, optional
Elemental labels off/on. The default is True.
imlabel : boolean, optional
Label for isotopic masses off/on. The efault is True.
imlabel_fontsize : intager, optional
Fontsize for isotopic mass labels. The default is 8.
imagic : boolean, optional
Turn lines for magic numbers off/on. The default is False.
boxstable : boolean, optional
Plot the black boxes around the stable elements. The
default is True.
lbound : tuple, optional
Boundaries for colour spectrum ploted. The defaults is 20.
plotaxis : list, optional
Set axis limit: If [0,0,0,0] the complete range in (N,Z)
will be plotted. The default is [0,0,0,0].
color_map : string, optional
Color map according to choices in matplotlib
(e.g. www.scipy.org/Cookbook/Matplotlib/Show_colormaps).
The default is 'jet'.
pdf : boolean, optional
What format will this be saved in pdf/png. The default is
True.
title : string, optional
The title of the plots and the saved images. The default is
None. | entailment |
def abu_chart(self, cycle, mass_range=None ,ilabel=True,
imlabel=True, imlabel_fontsize=8, imagic=False,
boxstable=True, lbound=(-12, 0),
plotaxis=[0, 0, 0, 0], show=True, color_map='jet',
ifig=None,data_provided=False,thedata=None,
savefig=False,drawfig=None,drawax=None,mov=False,
path=None):
'''
Plots an abundance chart
Parameters
----------
cycle : string, integer or list
The cycle we are looking in. If it is a list of cycles,
this method will then do a plot for each of these cycles
and save them all to a file.
mass_range : list, optional
A 1x2 array containing the lower and upper mass range. If
this is an instance of abu_vector this will only plot
isotopes that have an atomic mass within this range. This
will throw an error if this range does not make sence ie
[45,2] if None, it will plot over the entire range. The
default is None.
ilabel : boolean, optional
Elemental labels off/on. The default is True.
imlabel : boolean, optional
Label for isotopic masses off/on. The default is True.
imlabel_fontsize : integer, optional
Fontsize for isotopic mass labels. The default is 8.
imagic : boolean, optional
Turn lines for magic numbers off/on. The default is False.
boxstable : boolean, optional
Plot the black boxes around the stable elements. The
defaults is True.
lbound : tuple, optional
Boundaries for colour spectrum ploted. The default is
(-12,0).
plotaxis : list, optional
Set axis limit. If [0, 0, 0, 0] the complete range in (N,Z)
will be plotted. It equates to [xMin, xMax, Ymin, Ymax].
The default is [0, 0, 0, 0].
show : boolean, optional
Boolean of if the plot should be displayed. Useful with
saving multiple plots using abu_chartMulti. The default is
True.
color_map : string, optional
Color map according to choices in matplotlib
(e.g. www.scipy.org/Cookbook/Matplotlib/Show_colormaps).
The default is 'jet'.
ifig : integer, optional
Figure number, if ifig is None it wiil be set to the cycle
number. The defaults is None.
savefig : boolean, optional
Whether or not to save the figure.
The default is False
drawfig, drawax, mov : optional, not necessary for user to set these variables
The figure and axes containers to be drawn on, and whether or not a movie is
being made (only True when se.movie is called, which sets mov to True
automatically
path: path where to save figure
'''
if ifig == None and not mov:
ifig=cycle
if type(cycle)==type([]):
self.abu_chartMulti(cycle, mass_range,ilabel,imlabel,imlabel_fontsize,imagic,boxstable,\
lbound,plotaxis,color_map, path=path)
return
plotType=self._classTest()
if mass_range!=None and mass_range[0]>mass_range[1]:
raise IOError("Please input a proper mass range")
if plotType=='se':
if not data_provided:
cycle=self.se.findCycle(cycle)
# nin=zeros(len(self.se.A))
# zin=zeros(len(self.se.Z))
yin=self.get(cycle, 'iso_massf')
isom=self.se.isomeric_states
masses = self.se.get(cycle,'mass')
else:
cycle=cycle # why so serious?
yin=thedata[0]
isom=self.se.isomeric_states
masses = thedata[1]
# for i in xrange(len(nin)):
# zin[i]=self.se.Z[i]
# nin[i]=self.se.A[i]-zin[i]
# SJONES implicit loop instead:
zin=array([el for el in self.se.Z])
nin=array([el for el in self.se.A])-zin
#Test if the mass cell order is inverted
#and hence mass[-1] the center.
if masses[0]>masses[-1]:
#invert
print('Inverted order of mass cells will be taken into account.')
yin=yin[::-1]
masses=masses[::-1]
if mass_range != None:
# trim out only the zones needed:
tmpyps=[]
masses.sort() # SJ: not sure why this sort if necessary
# for i in xrange(len(masses)):
# if (masses[i] >mass_range[0] and masses[i]<mass_range[1]) or\
# (masses[i]==mass_range[0] or masses[i]==mass_range[1]):
# tmpyps.append(yin[i])
# yin=tmpyps
# find lower and upper indices and slice instead:
idxl=np.abs(masses-mass_range[0]).argmin()
if masses[idxl] < mass_range[0]: idxl+=1
idxu=np.abs(masses-mass_range[1]).argmin()
if masses[idxu] > mass_range[1]: idxu-=1
yin=yin[idxl:idxu+1]
#tmp=zeros(len(yin[0]))
#for i in xrange(len(yin)):
# for j in xrange(len(yin[i])):
# tmp[j]+=yin[i][j]
tmp2=sum(yin,axis=0) # SJONES sum along axis instead of nested loop
tmp=old_div(tmp2,len(yin))
yin=tmp
elif plotType=='PPN':
ain=self.get('A',cycle)
zin=self.get('Z',cycle)
nin=ain-zin
yin=self.get('ABUNDANCE_MF',cycle)
isom=self.get('ISOM',cycle)
if mass_range != None:
tmpA=[]
tmpZ=[]
tmpIsom=[]
tmpyps=[]
for i in range(len(nin)):
if (ain[i] >mass_range[0] and ain[i]<mass_range[1])\
or (ain[i]==mass_range[0] or ain[i]==mass_range[1]):
tmpA.append(nin[i])
tmpZ.append(zin[i])
tmpIsom.append(isom[i])
tmpyps.append(yin[i])
zin=tmpZ
nin=tmpA
yin=tmpyps
isom=tmpIsom
else:
raise IOError("This method, abu_chart, is not supported by this class")
# in case we call from ipython -pylab, turn interactive on at end again
turnoff=False
if not show:
try:
ioff()
turnoff=True
except NameError:
turnoff=False
nnmax = int(max(nin))+1
nzmax = int(max(zin))+1
nzycheck = zeros([nnmax,nzmax,3])
for i in range(len(nin)):
if isom[i]==1:
ni = int(nin[i])
zi = int(zin[i])
nzycheck[ni,zi,0] = 1
nzycheck[ni,zi,1] = yin[i]
#######################################################################
# elemental names: elname(i) is the name of element with Z=i
elname=self.elements_names
#### create plot
## define axis and plot style (colormap, size, fontsize etc.)
if plotaxis==[0,0,0,0]:
xdim=10
ydim=6
else:
dx = plotaxis[1]-plotaxis[0]
dy = plotaxis[3]-plotaxis[2]
ydim = 6
xdim = ydim*dx/dy
params = {'axes.labelsize': 12,
'text.fontsize': 12,
'legend.fontsize': 12,
'xtick.labelsize': 12,
'ytick.labelsize': 12,
'text.usetex': True}
#pl.rcParams.update(params) #May cause Error, someting to do with tex
if mov:
fig=drawfig
fig.set_size_inches(xdim,ydim)
artists=[]
else:
fig=pl.figure(ifig,figsize=(xdim,ydim),dpi=100)
axx = 0.10
axy = 0.10
axw = 0.85
axh = 0.8
if mov:
ax=drawax
else:
ax=pl.axes([axx,axy,axw,axh])
# Tick marks
xminorlocator = MultipleLocator(1)
xmajorlocator = MultipleLocator(5)
ax.xaxis.set_major_locator(xmajorlocator)
ax.xaxis.set_minor_locator(xminorlocator)
yminorlocator = MultipleLocator(1)
ymajorlocator = MultipleLocator(5)
ax.yaxis.set_major_locator(ymajorlocator)
ax.yaxis.set_minor_locator(yminorlocator)
# color map choice for abundances
cmapa = cm.get_cmap(name=color_map)
# color map choice for arrows
cmapr = cm.autumn
# if a value is below the lower limit its set to white
cmapa.set_under(color='w')
cmapr.set_under(color='w')
# set value range for abundance colors (log10(Y))
norma = colors.Normalize(vmin=lbound[0],vmax=lbound[1])
# set x- and y-axis scale aspect ratio to 1
ax.set_aspect('equal')
#print time,temp and density on top
temp = ' '#'%8.3e' %ff['temp']
time = ' '#'%8.3e' %ff['time']
dens = ' '#'%8.3e' %ff['dens']
#May cause Error, someting to do with tex
'''
#box1 = TextArea("t : " + time + " s~~/~~T$_{9}$ : " + temp + "~~/~~$\\rho_{b}$ : " \
# + dens + ' g/cm$^{3}$', textprops=dict(color="k"))
anchored_box = AnchoredOffsetbox(loc=3,
child=box1, pad=0.,
frameon=False,
bbox_to_anchor=(0., 1.02),
bbox_transform=ax.transAxes,
borderpad=0.,
)
ax.add_artist(anchored_box)
'''
## Colour bar plotted
patches = []
color = []
for i in range(nzmax):
for j in range(nnmax):
if nzycheck[j,i,0]==1:
xy = j-0.5,i-0.5
rect = Rectangle(xy,1,1,)
# abundance
yab = nzycheck[j,i,1]
if yab == 0:
yab=1e-99
col =log10(yab)
patches.append(rect)
color.append(col)
p = PatchCollection(patches, cmap=cmapa, norm=norma)
p.set_array(array(color))
p.set_zorder(1)
if mov:
artist1=ax.add_collection(p)
artists.append(artist1)
else:
ax.add_collection(p)
if not mov:
cb = pl.colorbar(p)
# colorbar label
cb.set_label('log$_{10}$(X)')
# plot file name
graphname = 'abundance-chart'+str(cycle)
# Add black frames for stable isotopes
if boxstable:
for i in range(len(self.stable_el)):
if i == 0:
continue
tmp = self.stable_el[i]
try:
zz= self.elements_names.index(tmp[0]) #charge
except:
continue
for j in range(len(tmp)):
if j == 0:
continue
nn = int(tmp[j]) #atomic mass
nn=nn-zz
xy = nn-0.5,zz-0.5
rect = Rectangle(xy,1,1,ec='k',fc='None',fill='False',lw=3.)
rect.set_zorder(2)
ax.add_patch(rect)
# decide which array to take for label positions
iarr = 0
# plot element labels
if ilabel:
for z in range(nzmax):
try:
nmin = min(argwhere(nzycheck[:,z,iarr]))[0]-1
ax.text(nmin,z,elname[z],horizontalalignment='center',verticalalignment='center',\
fontsize='x-small',clip_on=True)
except ValueError:
continue
# plot mass numbers
if imlabel:
for z in range(nzmax):
for n in range(nnmax):
a = z+n
if nzycheck[n,z,iarr]==1:
ax.text(n,z,a,horizontalalignment='center',verticalalignment='center',\
fontsize=imlabel_fontsize,clip_on=True)
# plot lines at magic numbers
if imagic:
ixymagic=[2, 8, 20, 28, 50, 82, 126]
nmagic = len(ixymagic)
for magic in ixymagic:
if magic<=nzmax:
try:
xnmin = min(argwhere(nzycheck[:,magic,iarr]))[0]
xnmax = max(argwhere(nzycheck[:,magic,iarr]))[0]
line = ax.plot([xnmin,xnmax],[magic,magic],lw=3.,color='r',ls='-')
except ValueError:
dummy=0
if magic<=nnmax:
try:
yzmin = min(argwhere(nzycheck[magic,:,iarr]))[0]
yzmax = max(argwhere(nzycheck[magic,:,iarr]))[0]
line = ax.plot([magic,magic],[yzmin,yzmax],lw=3.,color='r',ls='-')
except ValueError:
dummy=0
# set axis limits
if plotaxis==[0,0,0,0]:
xmax=max(nin)
ymax=max(zin)
ax.axis([-0.5,xmax+0.5,-0.5,ymax+0.5])
else:
ax.axis(plotaxis)
# set x- and y-axis label
ax.set_xlabel('neutron number (A-Z)')
ax.set_ylabel('proton number Z')
if not mov:
pl.title('Isotopic Chart for cycle '+str(int(cycle)))
if savefig:
if path is not None:
graphname = os.path.join(path, graphname)
fig.savefig(graphname)
print(graphname,'is done')
if show:
pl.show()
if turnoff:
ion()
if mov:
return p,artists
else:
return | Plots an abundance chart
Parameters
----------
cycle : string, integer or list
The cycle we are looking in. If it is a list of cycles,
this method will then do a plot for each of these cycles
and save them all to a file.
mass_range : list, optional
A 1x2 array containing the lower and upper mass range. If
this is an instance of abu_vector this will only plot
isotopes that have an atomic mass within this range. This
will throw an error if this range does not make sence ie
[45,2] if None, it will plot over the entire range. The
default is None.
ilabel : boolean, optional
Elemental labels off/on. The default is True.
imlabel : boolean, optional
Label for isotopic masses off/on. The default is True.
imlabel_fontsize : integer, optional
Fontsize for isotopic mass labels. The default is 8.
imagic : boolean, optional
Turn lines for magic numbers off/on. The default is False.
boxstable : boolean, optional
Plot the black boxes around the stable elements. The
defaults is True.
lbound : tuple, optional
Boundaries for colour spectrum ploted. The default is
(-12,0).
plotaxis : list, optional
Set axis limit. If [0, 0, 0, 0] the complete range in (N,Z)
will be plotted. It equates to [xMin, xMax, Ymin, Ymax].
The default is [0, 0, 0, 0].
show : boolean, optional
Boolean of if the plot should be displayed. Useful with
saving multiple plots using abu_chartMulti. The default is
True.
color_map : string, optional
Color map according to choices in matplotlib
(e.g. www.scipy.org/Cookbook/Matplotlib/Show_colormaps).
The default is 'jet'.
ifig : integer, optional
Figure number, if ifig is None it wiil be set to the cycle
number. The defaults is None.
savefig : boolean, optional
Whether or not to save the figure.
The default is False
drawfig, drawax, mov : optional, not necessary for user to set these variables
The figure and axes containers to be drawn on, and whether or not a movie is
being made (only True when se.movie is called, which sets mov to True
automatically
path: path where to save figure | entailment |
def abu_flux_chart(self, cycle, ilabel=True, imlabel=True,
imagic=False, boxstable=True, lbound=(-12,0),
plotaxis=[0,0,0,0], which_flux=None, prange=None,
profile='charged', show=True):
'''
Plots an abundance and flux chart
Parameters
----------
cycle : string, integer or list
The cycle we are looking in. If it is a list of cycles,
this method will then do a plot for each of these cycles
and save them all to a file.
ilabel : boolean, optional
Elemental labels off/on. The default is True.
imlabel : boolean, optional
Label for isotopic masses off/on. The default is True.
imagic : boolean, optional
Turn lines for magic numbers off/on. The default is False.
boxstable : boolean, optional
Plot the black boxes around the stable elements. The
defaults is True.
lbound : tuple, optional
Boundaries for colour spectrum ploted. The default is
(-12,0).
plotaxis : list, optional
Set axis limit. If [0, 0, 0, 0] the complete range in (N,Z)
will be plotted. It equates to [xMin, xMax, Ymin, Ymax].
The default is [0, 0, 0, 0].
which_flux : integer, optional
Set to 0 for nucleosynthesis flux plot. Set to 1 for
energy flux plot. Setting wich_flux to 0 is equivelent to
setting it to 0. The default is None.
prange : integer, optional
Range of fluxes to be considered, if prange is None then
the plot range is set to 8. The default is None.
profile : string, optional
'charged' is ideal setting to show charged particle
reactions flow. 'neutron' is ideal setting for neutron
captures flows. The default is 'charged'.
show : boolean, optional
Boolean of if the plot should be displayed. Useful with
saving multiple plots using abu_chartMulti. The default is
True.
'''
#######################################################################
#### plot options
# Set axis limit: If default [0,0,0,0] the complete range in (N,Z) will
# be plotted, i.e. all isotopes, else specify the limits in
# plotaxis = [xmin,xmax,ymin,ymax]
#######################################################################
# read data file
#inpfile = cycle
#ff = fdic.ff(inpfile)
# with the flux implementation I am not using mass range for now.
# It may be introduced eventually.
mass_range = None
if str(cycle.__class__)=="<type 'list'>":
self.abu_chartMulti(cycle, mass_range,ilabel,imlabel,imlabel_fontsize,imagic,boxstable,\
lbound,plotaxis)
return
plotType=self._classTest()
#if mass_range!=None and mass_range[0]>mass_range[1]:
#print 'Please input a proper mass range'
#print 'Returning None'
#return None
if plotType=='se':
cycle=self.se.findCycle(cycle)
nin=zeros(len(self.se.A))
zin=zeros(len(self.se.Z))
for i in range(len(nin)):
nin[i]=self.se.A[i]
zin[i]=self.se.Z[i]
for i in range(len(nin)):
nin[i]=nin[i]-zin[i]
yin=self.get(cycle, 'iso_massf')
isom=self.se.isomeric_states
masses = self.se.get(cycle,'mass')
if mass_range != None:
masses = self.se.get(cycle,'mass')
masses.sort()
if mass_range != None:
tmpyps=[]
masses = self.se.get(cycle,'mass')
masses = self.se.get(cycle,'mass')
masses.sort()
for i in range(len(masses)):
if (masses[i] >mass_range[0] and masses[i]<mass_range[1]) or\
(masses[i]==mass_range[0] or masses[i]==mass_range[1]):
tmpyps.append(yin[i])
yin=tmpyps
tmp=zeros(len(yin[0]))
for i in range(len(yin)):
for j in range(len(yin[i])):
tmp[j]+=yin[i][j]
tmp=old_div(tmp,len(yin))
yin=tmp
elif plotType=='PPN':
ain=self.get('A',cycle)
zin=self.get('Z',cycle)
nin=ain-zin
yin=self.get('ABUNDANCE_MF',cycle)
isom=self.get('ISOM',cycle)
if mass_range != None:
tmpA=[]
tmpZ=[]
tmpIsom=[]
tmpyps=[]
for i in range(len(nin)):
if (ain[i] >mass_range[0] and ain[i]<mass_range[1])\
or (ain[i]==mass_range[0] or ain[i]==mass_range[1]):
tmpA.append(nin[i])
tmpZ.append(zin[i])
tmpIsom.append(isom[i])
tmpyps.append(yin[i])
zin=tmpZ
nin=tmpA
yin=tmpyps
isom=tmpIsom
else:
print('This method, abu_chart, is not supported by this class')
print('Returning None')
return None
# in case we call from ipython -pylab, turn interactive on at end again
turnoff=False
if not show:
try:
ioff()
turnoff=True
except NameError:
turnoff=False
nnmax = int(max(nin))+1
nzmax = int(max(zin))+1
nnmax_plot = nnmax
nzmax_plot = nzmax
nzycheck = zeros([nnmax,nzmax,3])
nzycheck_plot = zeros([nnmax,nzmax,3])
for i in range(len(nin)):
if isom[i]==1:
ni = int(nin[i])
zi = int(zin[i])
nzycheck[ni,zi,0] = 1
nzycheck[ni,zi,1] = yin[i]
nzycheck_plot[ni,zi,0] = 1
#######################################################################
# elemental names: elname(i) is the name of element with Z=i
elname=self.elements_names
#### create plot
## define axis and plot style (colormap, size, fontsize etc.)
if plotaxis==[0,0,0,0]:
xdim=10
ydim=6
else:
dx = plotaxis[1]-plotaxis[0]
dy = plotaxis[3]-plotaxis[2]
ydim = 6
xdim = ydim*dx/dy
params = {'axes.labelsize': 15,
'text.fontsize': 12,
'legend.fontsize': 15,
'xtick.labelsize': 15,
'ytick.labelsize': 15,
'text.usetex': True}
#pl.rcParams.update(params) #May cause Error, someting to do with tex
#fig=pl.figure(figsize=(xdim,ydim),dpi=100)
fig=pl.figure()
if profile == 'charged':
ax1 = fig.add_subplot(1, 2, 1)
elif profile == 'neutron':
ax1 = fig.add_subplot(2, 1, 1)
#axx = 0.10
#axy = 0.10
#axw = 0.85
#axh = 0.8
#ax1=pl.axes([axx,axy,axw,axh])
# Tick marks
xminorlocator = MultipleLocator(1)
xmajorlocator = MultipleLocator(5)
ax1.xaxis.set_major_locator(xmajorlocator)
ax1.xaxis.set_minor_locator(xminorlocator)
yminorlocator = MultipleLocator(1)
ymajorlocator = MultipleLocator(5)
ax1.yaxis.set_major_locator(ymajorlocator)
ax1.yaxis.set_minor_locator(yminorlocator)
# color map choice for abundances
#cmapa = cm.jet
cmapa = cm.summer
# color map choice for arrows
cmapr = cm.summer
# if a value is below the lower limit its set to white
cmapa.set_under(color='w')
cmapr.set_under(color='w')
# set value range for abundance colors (log10(Y))
norma = colors.Normalize(vmin=lbound[0],vmax=lbound[1])
# set x- and y-axis scale aspect ratio to 1
#ax1.set_aspect('equal')
#print time,temp and density on top
temp = ' '#'%8.3e' %ff['temp']
time = ' '#'%8.3e' %ff['time']
dens = ' '#'%8.3e' %ff['dens']
#May cause Error, someting to do with tex
'''
#box1 = TextArea("t : " + time + " s~~/~~T$_{9}$ : " + temp + "~~/~~$\\rho_{b}$ : " \
# + dens + ' g/cm$^{3}$', textprops=dict(color="k"))
anchored_box = AnchoredOffsetbox(loc=3,
child=box1, pad=0.,
frameon=False,
bbox_to_anchor=(0., 1.02),
bbox_transform=ax.transAxes,
borderpad=0.,
)
ax.add_artist(anchored_box)
'''
## Colour bar plotted
patches = []
color = []
for i in range(nzmax):
for j in range(nnmax):
if nzycheck[j,i,0]==1:
xy = j-0.5,i-0.5
rect = Rectangle(xy,1,1,)
# abundance
yab = nzycheck[j,i,1]
if yab == 0:
yab=1e-99
col =log10(yab)
patches.append(rect)
color.append(col)
p = PatchCollection(patches, cmap=cmapa, norm=norma)
p.set_array(array(color))
p.set_zorder(1)
ax1.add_collection(p)
cb = pl.colorbar(p)
# colorbar label
if profile == 'neutron':
cb.set_label('log$_{10}$(X)')
# plot file name
graphname = 'abundance-flux-chart'+str(cycle)
# Add black frames for stable isotopes
if boxstable:
for i in range(len(self.stable_el)):
if i == 0:
continue
tmp = self.stable_el[i]
try:
zz= self.elements_names.index(tmp[0]) #charge
except:
continue
for j in range(len(tmp)):
if j == 0:
continue
nn = int(tmp[j]) #atomic mass
nn=nn-zz
xy = nn-0.5,zz-0.5
rect = Rectangle(xy,1,1,ec='k',fc='None',fill='False',lw=4.)
rect.set_zorder(2)
ax1.add_patch(rect)
# decide which array to take for label positions
iarr = 0
# plot element labels
if ilabel:
for z in range(nzmax):
try:
nmin = min(argwhere(nzycheck[:,z,iarr]))[0]-1
nmax = max(argwhere(nzycheck[:,z,iarr]))[0]+1
ax1.text(nmin,z,elname[z],horizontalalignment='center',verticalalignment='center',\
fontsize='small',clip_on=True)
ax1.text(nmax,z,elname[z],horizontalalignment='center',verticalalignment='center',\
fontsize='small',clip_on=True)
except ValueError:
continue
# plot mass numbers
if imlabel:
for z in range(nzmax):
for n in range(nnmax):
a = z+n
if nzycheck[n,z,iarr]==1:
ax1.text(n,z,a,horizontalalignment='center',verticalalignment='center',\
fontsize='x-small',clip_on=True)
# plot lines at magic numbers
if imagic:
ixymagic=[2, 8, 20, 28, 50, 82, 126]
nmagic = len(ixymagic)
for magic in ixymagic:
if magic<=nzmax:
try:
xnmin = min(argwhere(nzycheck[:,magic,iarr]))[0]
xnmax = max(argwhere(nzycheck[:,magic,iarr]))[0]
line = ax1.plot([xnmin,xnmax],[magic,magic],lw=3.,color='r',ls='-')
except ValueError:
dummy=0
if magic<=nnmax:
try:
yzmin = min(argwhere(nzycheck[magic,:,iarr]))[0]
yzmax = max(argwhere(nzycheck[magic,:,iarr]))[0]
line = ax1.plot([magic,magic],[yzmin,yzmax],lw=3.,color='r',ls='-')
except ValueError:
dummy=0
# set axis limits
if plotaxis==[0,0,0,0]:
xmax=max(nin)
ymax=max(zin)
ax1.axis([-0.5,xmax+0.5,-0.5,ymax+0.5])
else:
ax1.axis(plotaxis)
# set x- and y-axis label
ax1.set_ylabel('Proton number')
if profile == 'charged':
ax1.set_xlabel('Neutron number')
#pl.title('Isotopic Chart for cycle '+str(int(cycle)))
#
# here below I read data from the flux_*****.DAT file.
#
file_name = 'flux_'+str(cycle).zfill(5)+'.DAT'
print(file_name)
f = open(file_name)
lines = f.readline()
lines = f.readlines()
f.close()
print_max_flux_in_plot = False
# color map choice for fluxes
#cmapa = cm.jet
cmapa = cm.autumn
# color map choice for arrows
cmapr = cm.autumn
# starting point of arrow
coord_x_1 = []
coord_y_1 = []
# ending point of arrow (option 1)
coord_x_2 = []
coord_y_2 = []
# ending point of arrow (option 2)
coord_x_3 = []
coord_y_3 = []
# fluxes
flux_read = []
flux_log10 = []
if which_flux == None or which_flux == 0:
print('chart for nucleosynthesis fluxes [dYi/dt]')
line_to_read = 9
elif which_flux == 1:
print('chart for energy fluxes')
line_to_read = 10
elif which_flux > 1:
print("you have only option 0 or 1, not larger than 1")
single_line = []
for i in range(len(lines)):
single_line.append(lines[i].split())
coord_y_1.append(int(single_line[i][1]))
coord_x_1.append(int(single_line[i][2])-coord_y_1[i])
coord_y_2.append(int(single_line[i][5]))
coord_x_2.append(int(single_line[i][6])-coord_y_2[i])
coord_y_3.append(int(single_line[i][7]))
coord_x_3.append(int(single_line[i][8])-coord_y_3[i])
try:
flux_read.append(float(single_line[i][line_to_read]))
except ValueError: # this is done to avoid format issues like 3.13725-181...
flux_read.append(1.0E-99)
flux_log10.append(log10(flux_read[i]+1.0e-99))
print(file_name,' read!')
# I need to select smaller sample, with only fluxes inside plotaxis.
if plotaxis!=[0,0,0,0]:
coord_y_1_small=[]
coord_x_1_small=[]
coord_y_2_small=[]
coord_x_2_small=[]
coord_y_3_small=[]
coord_x_3_small=[]
flux_log10_small = []
for i in range(len(flux_log10)):
I_am_in = 0
if coord_y_1[i] > plotaxis[2] and coord_y_1[i] < plotaxis[3] and coord_x_1[i] > plotaxis[0] and coord_x_1[i] < plotaxis[1]:
I_am_in = 1
coord_y_1_small.append(int(coord_y_1[i]))
coord_x_1_small.append(int(coord_x_1[i]))
coord_y_2_small.append(int(coord_y_2[i]))
coord_x_2_small.append(int(coord_x_2[i]))
coord_y_3_small.append(int(coord_y_3[i]))
coord_x_3_small.append(int(coord_x_3[i]))
flux_log10_small.append(flux_log10[i])
if coord_y_3[i] > plotaxis[2] and coord_y_3[i] < plotaxis[3] and coord_x_3[i] > plotaxis[0] and coord_x_3[i] < plotaxis[1] and I_am_in == 0:
I_am_in = 1
coord_y_1_small.append(int(coord_y_1[i]))
coord_x_1_small.append(int(coord_x_1[i]))
coord_y_2_small.append(int(coord_y_2[i]))
coord_x_2_small.append(int(coord_x_2[i]))
coord_y_3_small.append(int(coord_y_3[i]))
coord_x_3_small.append(int(coord_x_3[i]))
flux_log10_small.append(flux_log10[i])
# elemental labels off/on [0/1]
ilabel = 1
# label for isotopic masses off/on [0/1]
imlabel = 1
# turn lines for magic numbers off/on [0/1]
imagic = 0
# flow is plotted over "prange" dex. If flow < maxflow-prange it is not plotted
if prange == None:
print('plot range given by default')
prange = 8.
#############################################
#print flux_log10_small
# we should scale prange on plot_axis range, not on max_flux!
max_flux = max(flux_log10)
ind_max_flux = flux_log10.index(max_flux)
if plotaxis!=[0,0,0,0]:
max_flux_small = max(flux_log10_small)
if plotaxis==[0,0,0,0]:
nzmax = int(max(max(coord_y_1),max(coord_y_2),max(coord_y_3)))+1
nnmax = int(max(max(coord_x_1),max(coord_x_2),max(coord_x_3)))+1
coord_x_1_small = coord_x_1
coord_x_2_small = coord_x_2
coord_x_3_small = coord_x_3
coord_y_1_small = coord_y_1
coord_y_2_small = coord_y_2
coord_y_3_small = coord_y_3
flux_log10_small= flux_log10
max_flux_small = max_flux
else:
nzmax = int(max(max(coord_y_1_small),max(coord_y_2_small),max(coord_y_3_small)))+1
nnmax = int(max(max(coord_x_1_small),max(coord_x_2_small),max(coord_x_3_small)))+1
for i in range(nzmax):
for j in range(nnmax):
if nzycheck[j,i,0]==1:
xy = j-0.5,i-0.5
rect = Rectangle(xy,1,1,)
patches.append(rect)
nzycheck = zeros([nnmax_plot,nzmax,3])
coord_x_out = zeros(len(coord_x_2_small), dtype='int')
coord_y_out = zeros(len(coord_y_2_small),dtype='int')
for i in range(len(flux_log10_small)):
nzycheck[coord_x_1_small[i],coord_y_1_small[i],0] = 1
nzycheck[coord_x_1_small[i],coord_y_1_small[i],1] = flux_log10_small[i]
if coord_x_2_small[i] >= coord_x_3_small[i]:
coord_x_out[i] = coord_x_2_small[i]
coord_y_out[i] = coord_y_2_small[i]
nzycheck[coord_x_out[i],coord_y_out[i],0] = 1
nzycheck[coord_x_out[i],coord_y_out[i],1] = flux_log10_small[i]
elif coord_x_2_small[i] < coord_x_3_small[i]:
coord_x_out[i] = coord_x_3_small[i]
coord_y_out[i] = coord_y_3_small[i]
nzycheck[coord_x_out[i],coord_y_out[i],0] = 1
nzycheck[coord_x_out[i],coord_y_out[i],1] = flux_log10_small[i]
if flux_log10_small[i]>max_flux_small-prange:
nzycheck[coord_x_1_small[i],coord_y_1_small[i],2] = 1
nzycheck[coord_x_out[i],coord_y_out[i],2] = 1
#### create plot
if profile == 'charged':
ax2 = fig.add_subplot(1, 2, 2)
elif profile == 'neutron':
ax2 = fig.add_subplot(2, 1, 2)
# Tick marks
xminorlocator = MultipleLocator(1)
xmajorlocator = MultipleLocator(5)
ax2.xaxis.set_major_locator(xmajorlocator)
ax2.xaxis.set_minor_locator(xminorlocator)
yminorlocator = MultipleLocator(1)
ymajorlocator = MultipleLocator(5)
ax2.yaxis.set_major_locator(ymajorlocator)
ax2.yaxis.set_minor_locator(yminorlocator)
## define axis and plot style (colormap, size, fontsize etc.)
if plotaxis==[0,0,0,0]:
xdim=10
ydim=6
else:
dx = plotaxis[1]-plotaxis[0]
dy = plotaxis[3]-plotaxis[2]
ydim = 6
xdim = ydim*dx/dy
format = 'pdf'
# set x- and y-axis scale aspect ratio to 1
#ax2.set_aspect('equal')
# Add black frames for stable isotopes
# Add black frames for stable isotopes
if boxstable:
for i in range(len(self.stable_el)):
if i == 0:
continue
tmp = self.stable_el[i]
try:
zz= self.elements_names.index(tmp[0]) #charge
except:
continue
for j in range(len(tmp)):
if j == 0:
continue
nn = int(tmp[j]) #atomic mass
nn=nn-zz
xy = nn-0.5,zz-0.5
rect = Rectangle(xy,1,1,ec='k',fc='None',fill='False',lw=4.)
rect.set_zorder(2)
ax2.add_patch(rect)
apatches = []
acolor = []
m = old_div(0.8,prange)
vmax=ceil(max(flux_log10_small))
vmin=max(flux_log10_small)-prange
b=-vmin*m+0.1
normr = colors.Normalize(vmin=vmin,vmax=vmax)
ymax=0.
xmax=0.
for i in range(len(flux_log10_small)):
x = coord_x_1_small[i]
y = coord_y_1_small[i]
dx = coord_x_out[i]-coord_x_1_small[i]
dy = coord_y_out[i]-coord_y_1_small[i]
if flux_log10_small[i]>=vmin:
arrowwidth = flux_log10_small[i]*m+b
arrow = Arrow(x,y,dx,dy, width=arrowwidth)
if xmax<x:
xmax=x
if ymax<y:
ymax=y
acol = flux_log10_small[i]
apatches.append(arrow)
acolor.append(acol)
xy = x-0.5,y-0.5
rect = Rectangle(xy,1,1,ec='k',fc='None',fill='False',lw=1.)
patches.append(rect)
xy = x+dx-0.5,y+dy-0.5
rect = Rectangle(xy,1,1,ec='k',fc='None',fill='False',lw=1.)
patches.append(rect)
p = PatchCollection(patches,norm=0,facecolor='w')
p.set_zorder(1)
ax2.add_collection(p)
a = PatchCollection(apatches, cmap=cmapr, norm=normr)
a.set_array(array(acolor))
a.set_zorder(3)
ax2.add_collection(a)
cb = pl.colorbar(a)
# colorbar label
cb.set_label('log$_{10}$($x$)')
if profile == 'neutron':
cb.set_label('log$_{10}$(f)')
# decide which array to take for label positions
iarr = 2
# plot element labels
for z in range(nzmax):
try:
nmin = min(argwhere(nzycheck_plot[:,z,iarr-2]))[0]-1
nmax = max(argwhere(nzycheck_plot[:,z,iarr-2]))[0]+1
ax2.text(nmin,z,elname[z],horizontalalignment='center',verticalalignment='center',fontsize='small',clip_on=True)
ax2.text(nmax,z,elname[z],horizontalalignment='center',verticalalignment='center',fontsize='small',clip_on=True)
except ValueError:
continue
# plot mass numbers
if imlabel:
for z in range(nzmax):
for n in range(nnmax_plot):
a = z+n
if nzycheck_plot[n,z,iarr-2]==1:
ax2.text(n,z,a,horizontalalignment='center',verticalalignment='center',fontsize='x-small',clip_on=True)
# plot lines at magic numbers
if imagic==1:
ixymagic=[2, 8, 20, 28, 50, 82, 126]
nmagic = len(ixymagic)
for magic in ixymagic:
if magic<=nzmax:
try:
xnmin = min(argwhere(nzycheck[:,magic,iarr-2]))[0]
xnmax = max(argwhere(nzycheck[:,magic,iarr-2]))[0]
line = ax2.plot([xnmin,xnmax],[magic,magic],lw=3.,color='r',ls='-')
except ValueError:
dummy=0
if magic<=nnmax:
try:
yzmin = min(argwhere(nzycheck[magic,:,iarr-2]))[0]
yzmax = max(argwhere(nzycheck[magic,:,iarr-2]))[0]
line = ax2.plot([magic,magic],[yzmin,yzmax],lw=3.,color='r',ls='-')
except ValueError:
dummy=0
# set axis limits
if plotaxis==[0,0,0,0]:
ax2.axis([-0.5,xmax+0.5,-0.5,ymax+0.5])
else:
ax2.axis(plotaxis)
# set x- and y-axis label
ax2.set_xlabel('Neutron number')
if profile == 'neutron':
ax2.set_ylabel('Proton number')
if which_flux == None or which_flux == 0:
max_flux_label="max flux = "+str('{0:.4f}'.format(max_flux))
elif which_flux == 1:
max_flux_label="max energy flux = "+str('{0:.4f}'.format(max_flux))
if print_max_flux_in_plot:
ax2.text(plotaxis[1]-1.8,plotaxis[2]+0.1,max_flux_label,fontsize=10.)
#fig.savefig(graphname)
print(graphname,'is done')
if show:
pl.show()
if turnoff:
ion()
return | Plots an abundance and flux chart
Parameters
----------
cycle : string, integer or list
The cycle we are looking in. If it is a list of cycles,
this method will then do a plot for each of these cycles
and save them all to a file.
ilabel : boolean, optional
Elemental labels off/on. The default is True.
imlabel : boolean, optional
Label for isotopic masses off/on. The default is True.
imagic : boolean, optional
Turn lines for magic numbers off/on. The default is False.
boxstable : boolean, optional
Plot the black boxes around the stable elements. The
defaults is True.
lbound : tuple, optional
Boundaries for colour spectrum ploted. The default is
(-12,0).
plotaxis : list, optional
Set axis limit. If [0, 0, 0, 0] the complete range in (N,Z)
will be plotted. It equates to [xMin, xMax, Ymin, Ymax].
The default is [0, 0, 0, 0].
which_flux : integer, optional
Set to 0 for nucleosynthesis flux plot. Set to 1 for
energy flux plot. Setting wich_flux to 0 is equivelent to
setting it to 0. The default is None.
prange : integer, optional
Range of fluxes to be considered, if prange is None then
the plot range is set to 8. The default is None.
profile : string, optional
'charged' is ideal setting to show charged particle
reactions flow. 'neutron' is ideal setting for neutron
captures flows. The default is 'charged'.
show : boolean, optional
Boolean of if the plot should be displayed. Useful with
saving multiple plots using abu_chartMulti. The default is
True. | entailment |
def iso_abundMulti(self, cyclist, stable=False, amass_range=None,
mass_range=None, ylim=[0,0], ref=-1,
decayed=False, include_title=False, title=None,
pdf=False, color_plot=True, grid=False,
point_set=1):
'''
Method that plots figures and saves those figures to a .png
file. Plots a figure for each cycle in the argument cycle.
Can be called via iso_abund method by passing a list to cycle.
Parameters
----------
cycllist : list
The cycles of interest. This method will do a plot for
each cycle and save them to a file.
stable : boolean, optional
A boolean of whether to filter out the unstables. The
defaults is False.
amass_range : list, optional
A 1x2 array containing the lower and upper atomic mass
range. If None plot entire available atomic mass range.
The default is None.
mass_range : list, optional
A 1x2 array containing the lower and upper mass range. If
this is an instance of abu_vector this will only plot
isotopes that have an atominc mass within this range. This
will throw an error if this range does not make sense ie
[45,2]. If None, it will plot over the entire range. The
defaults is None.
ylim : list, optional
A 1x2 array containing the lower and upper Y limits. If
it is [0,0], then ylim will be determined automatically.
The default is [0,0].
ref : integer or list, optional
reference cycle. If it is not -1, this method will plot
the abundences of cycle devided by the cycle of the same
instance given in the ref variable. If ref is a list it
will be interpreted to have two elements:
ref=['dir/of/ref/run',cycle] which uses a refernece cycle
from another run. If any abundence in the reference cycle
is zero, it will replace it with 1e-99. The default is -1.
decayed : boolean, optional
If True plot decayed distributions, else plot life
distribution. The default is False.
include_title : boolean, optional
Include a title with the plot. The default is False.
title : string, optional
A title to include with the plot. The default is None.
pdf : boolean, optional
Save image as a [pdf/png]. The default is False.
color_plot : boolean, optional
Color dots and lines [True/False]. The default is True.
grid : boolean, optional
print grid. The default is False.
point_set : integer, optional
Set to 0, 1 or 2 to select one of three point sets, useful
for multiple abundances or ratios in one plot. The defalult
is 1.
'''
max_num = max(cyclist)
for i in range(len(cyclist)):
self.iso_abund(cyclist[i],stable,amass_range,mass_range,ylim,ref,\
decayed=decayed,show=False,color_plot=color_plot,grid=False,\
point_set=1,include_title=include_title)
if title !=None:
pl.title(title)
else:
name='IsoAbund'
number_str=_padding_model_number(cyclist[i],max_num)
if not pdf:
pl.savefig(name+number_str+'.png', dpi=200)
else:
pl.savefig(name+number_str+'.pdf', dpi=200)
pl.clf()
return None | Method that plots figures and saves those figures to a .png
file. Plots a figure for each cycle in the argument cycle.
Can be called via iso_abund method by passing a list to cycle.
Parameters
----------
cycllist : list
The cycles of interest. This method will do a plot for
each cycle and save them to a file.
stable : boolean, optional
A boolean of whether to filter out the unstables. The
defaults is False.
amass_range : list, optional
A 1x2 array containing the lower and upper atomic mass
range. If None plot entire available atomic mass range.
The default is None.
mass_range : list, optional
A 1x2 array containing the lower and upper mass range. If
this is an instance of abu_vector this will only plot
isotopes that have an atominc mass within this range. This
will throw an error if this range does not make sense ie
[45,2]. If None, it will plot over the entire range. The
defaults is None.
ylim : list, optional
A 1x2 array containing the lower and upper Y limits. If
it is [0,0], then ylim will be determined automatically.
The default is [0,0].
ref : integer or list, optional
reference cycle. If it is not -1, this method will plot
the abundences of cycle devided by the cycle of the same
instance given in the ref variable. If ref is a list it
will be interpreted to have two elements:
ref=['dir/of/ref/run',cycle] which uses a refernece cycle
from another run. If any abundence in the reference cycle
is zero, it will replace it with 1e-99. The default is -1.
decayed : boolean, optional
If True plot decayed distributions, else plot life
distribution. The default is False.
include_title : boolean, optional
Include a title with the plot. The default is False.
title : string, optional
A title to include with the plot. The default is None.
pdf : boolean, optional
Save image as a [pdf/png]. The default is False.
color_plot : boolean, optional
Color dots and lines [True/False]. The default is True.
grid : boolean, optional
print grid. The default is False.
point_set : integer, optional
Set to 0, 1 or 2 to select one of three point sets, useful
for multiple abundances or ratios in one plot. The defalult
is 1. | entailment |
def iso_abund(self, cycle, stable=False, amass_range=None,
mass_range=None, ylim=[0,0], ref=-1, show=True,
log_logic=True, decayed=False, color_plot=True,
grid=False, point_set=1, include_title=False,
data_provided=False,thedata=None, verbose=True,
mov=False,drawfig=None,drawax=None,show_names=True,
label=None,colour=None,elemaburtn=False,mypoint=None,plot_type=['-','--','-.',':','-']):
'''
plot the abundance of all the chemical species
Parameters
----------
cycle : string, integer or list
The cycle of interest. If it is a list of cycles, this
method will do a plot for each cycle and save them to a
file.
stable : boolean, optional
A boolean of whether to filter out the unstables. The
defaults is False.
amass_range : list, optional
A 1x2 array containing the lower and upper atomic mass
range. If None plot entire available atomic mass range.
The default is None.
mass_range : list, optional
A 1x2 array containing the lower and upper mass range. If
this is an instance of abu_vector this will only plot
isotopes that have an atominc mass within this range. This
will throw an error if this range does not make sense ie
[45,2]. If None, it will plot over the entire range. The
defaults is None.
ylim : list, optional
A 1x2 array containing the lower and upper Y limits. If
it is [0,0], then ylim will be determined automatically.
The default is [0,0].
ref : integer or list, optional
reference cycle. If it is not -1, this method will plot
the abundences of cycle devided by the cycle of the same
instance given in the ref variable. If ref is a list it
will be interpreted to have two elements:
ref=['dir/of/ref/run',cycle] which uses a refernece cycle
from another run. If any abundence in the reference cycle
is zero, it will replace it with 1e-99. The default is -1.
show : boolean, optional
Boolean of if the plot should be displayed. The default is
True.
log_logic : boolean, optional
Plot abundances in log scale or linear. The default is
True.
decayed : boolean, optional
If True plot decayed distributions, else plot life
distribution. The default is False.
color_plot : boolean, optional
Color dots and lines [True/False]. The default is True.
grid : boolean, optional
print grid. The default is False.
point_set : integer, optional
Set to 0, 1 or 2 to select one of three point sets, useful
for multiple abundances or ratios in one plot. The defalult
is 1.
include_title : boolean, optional
Include a title with the plot. The default is False.
drawfig, drawax, mov : optional, not necessary for user to set these variables
The figure and axes containers to be drawn on, and whether or not a movie is
being made (only True when se.movie is called, which sets mov to True
automatically
elemaburtn : boolean, private
If true, iso_abund() returns after writing self.***_iso_to_plot for
use with other plotting routines.f
mypoint : string, optional
fix the marker style of all the points in this plot to one type, given
as a string. If None, multiple point styles are used as per point_set.
The default is None
'''
plotType=self._classTest()
if str(cycle.__class__)=="<type 'list'>":
self.iso_abundMulti(cycle, stable,amass_range,mass_range,ylim,ref,
decayed,include_title,color_plot=color_plot,grid=False,point_set=point_set)
return
if mass_range!=None and mass_range[0]>mass_range[1]:
print('Please input a proper mass range')
print('Returning None')
return None
if amass_range!=None and amass_range[0]>amass_range[1]:
print('Please input a proper Atomic mass range')
print('Returning None')
return None
if plotType=='se':
if decayed:
print('Decay option not yet implemented for mppnp - but it is easy do! Consider investing the time!')
return None
# get things as arrays
if not data_provided:
cycle=self.se.findCycle(cycle)
a_iso_to_plot = array(self.se.A)
abunds = self.get(cycle,'iso_massf')
isotope_to_plot = array(self.se.isotopes)
z_iso_to_plot = array(self.se.Z)
isomers_to_plot = array(self.se.isomeric_states)
if ref >-1:
ref=self.se.findCycle(ref)
abundsRef=self.se.get(ref,'iso_massf')
masses = self.se.get(cycle,'mass')
else:
cycle=cycle # why so serious?
a_iso_to_plot = array(self.se.A)
abunds = thedata[0]
isotope_to_plot = array(self.se.isotopes)
z_iso_to_plot = array(self.se.Z)
isomers_to_plot = array(self.se.isomeric_states)
if ref >-1:
raise IOError("No. It's not ready yet.")
#ref=self.se.findCycle(ref)
#abundsRef=self.se.get(ref,'iso_massf')
masses = thedata[1]
if mass_range == None:
if verbose:
print('Using default mass range')
mass_range = [min(masses),max(masses)]
masses.sort()
mass_range.sort()
if amass_range == None:
amass_range=[int(min(a_iso_to_plot)),int(max(a_iso_to_plot))]
# remove neutrons - this could move in the non- se/PPN specific part below
if 0 in z_iso_to_plot:
ind_neut = where(z_iso_to_plot==0)[0][0]
a_iso_to_plot = delete(a_iso_to_plot,ind_neut)
z_iso_to_plot = delete(z_iso_to_plot,ind_neut)
isomers_to_plot = delete(isomers_to_plot,ind_neut)
isotope_to_plot = delete(isotope_to_plot,ind_neut)
abunds = delete(abunds,ind_neut,1)
if ref >-1:
abundsRef = delete(abundsRef,ind_neut,1)
# extract amass_range
acon=(a_iso_to_plot>=amass_range[0]) & (a_iso_to_plot<=amass_range[1])
isomers_to_plot = isomers_to_plot[acon]
isotope_to_plot = isotope_to_plot[acon]
z_iso_to_plot = z_iso_to_plot[acon]
abunds = abunds.T[acon].T
if ref >-1:
abundsRef = abundsRef.T[acon].T
a_iso_to_plot = a_iso_to_plot[acon]
el_iso_to_plot = array([x.split('-')[0] for x in isotope_to_plot.tolist()])
# apply mass range
if mass_range == None:
if verbose:
print('Using default mass range')
mass_range = [min(masses),max(masses)]
mass_range.sort()
aabs = []
if ref >-1:
cyc = [cycle,ref]
abus = [abunds,abundsRef]
else:
cyc = [cycle]
abus = [abunds]
for cc,aa in zip(cyc,abus):
if not data_provided:
masses = self.se.get(cc,'mass')
else:
masses=masses # why so serious?
masses.sort()
dmass = masses[1:] - masses[:-1] # I should check the grid definition
dmass = append(dmass,0.)
mcon = (masses>=mass_range[0]) & (masses<=mass_range[1])
dmass = dmass[mcon]
aa = aa[mcon]
# average over mass range:
aa = (aa.T*dmass).T.sum(0)
aa = old_div(aa, (mass_range[1] - mass_range[0]))
# abunds has now length of isotope_to_plot
aabs.append(aa)
if ref >-1:
abunds = old_div(aabs[0],(aabs[1]+1.e-99))
else:
abunds = aabs[0]
self.a_iso_to_plot=a_iso_to_plot
self.isotope_to_plot=isotope_to_plot
self.z_iso_to_plot=z_iso_to_plot
self.el_iso_to_plot=el_iso_to_plot
self.abunds=abunds
self.isomers_to_plot=isomers_to_plot
if elemaburtn: return
# self.isotopes = self.se.isotopes
elif plotType=='PPN':
print("This method adds the following variables to the instance:")
print("a_iso_to_plot mass number of plotted range of species")
print("isotope_to_plot corresponding list of isotopes")
print("z_iso_to_plot corresponding charge numbers")
print("el_iso_to_plot corresponding element names")
print("abunds corresponding abundances")
print("isom isomers and their abundance")
self.get(cycle,decayed=decayed)
if ref is not -1:
if type(ref) is list: # reference cycle from other run
import ppn
pp=ppn.abu_vector(ref[0])
abunds_pp=pp.get(ref[1],decayed=decayed)
self.abunds=old_div(self.abunds,pp.abunds)
else:
abunds=self.abunds
self.get(ref,decayed=decayed)
self.abunds=old_div(abunds,(self.abunds+1.e-99))
if amass_range == None:
amass_range=[min(self.a_iso_to_plot),max(self.a_iso_to_plot)]
aa=ma.masked_outside(self.a_iso_to_plot,amass_range[0],amass_range[1])
isotope_to_plot=ma.array(self.isotope_to_plot,mask=aa.mask).compressed()
z_iso_to_plot=ma.array(self.z_iso_to_plot,mask=aa.mask).compressed()
el_iso_to_plot=ma.array(self.el_iso_to_plot,mask=aa.mask).compressed()
abunds=ma.array(self.abunds,mask=aa.mask).compressed()
a_iso_to_plot=aa.compressed()
isomers_to_plot=[]
for i in range(len(self.isom)):
if int(self.isom[i][0].split('-')[1])>100:
isomers_to_plot.append(self.isom[i])
self.a_iso_to_plot=a_iso_to_plot
self.isotope_to_plot=isotope_to_plot
self.z_iso_to_plot=z_iso_to_plot
self.el_iso_to_plot=el_iso_to_plot
self.abunds=abunds
self.isomers_to_plot=isomers_to_plot
else:
print('This method, iso_abund, is not supported by this class')
print('Returning None')
return None
if verbose:
print('Using the following conditions:')
if plotType=='se':
print('\tmass_range:', mass_range[0], mass_range[1])
print('\tAtomic mass_range:', amass_range[0], amass_range[1])
print('\tcycle: ',cycle)
print('\tplot only stable:',stable)
print('\tplot decayed: ',decayed)
if stable: # remove unstables:
# For the element that belongs to the isotope at index 5 in isotope_to_plot
# (C-12) the following gives the mass numbers of stable elements:
# self.stable_el[self.stable_names.index(el_iso_to_plot[5])][1:]
ind_delete=[]
for i in range(len(isotope_to_plot)):
if a_iso_to_plot[i] not in self.stable_el[self.stable_names.index(el_iso_to_plot[i])][1:]:
ind_delete.append(i)
a_iso_to_plot = delete(a_iso_to_plot, ind_delete)
z_iso_to_plot = delete(z_iso_to_plot, ind_delete)
isomers_to_plot = delete(isomers_to_plot,ind_delete)
isotope_to_plot = delete(isotope_to_plot,ind_delete)
el_iso_to_plot = delete(el_iso_to_plot, ind_delete)
abunds = delete(abunds, ind_delete)
# el_list=[] # list of elements in el_iso_to_plot
#
# for el in self.elements_names:
# if el in el_iso_to_plot:
# el_list.append(el)
# SJONES implicit loop:
el_list = [el for el in self.elements_names if el in el_iso_to_plot]
abund_plot = [] # extract for each element an abundance and associated
mass_num = [] # mass number array, sorted by mass number
for el in el_list:
numbers = a_iso_to_plot[(el_iso_to_plot==el)]
abund_plot.append(abunds[(el_iso_to_plot==el)][argsort(numbers)])
mass_num.append(sort(numbers))
# now plot:
#plot_type = ['-','--','-.',':','-'] ##now implemented as an arg
print(plot_type)
while len(plot_type)<=4:
plot_type.append('')
print(plot_type)
pl_index = 0
if mypoint is None:
points = [['o','^','p','h','*'],['x','+','D','>','s'],['H','v','<','*','3']]
else:
points = [ [mypoint]*5 , [mypoint]*5 , [mypoint]*5]
if color_plot:
colors = ['g','r','c','m','k']
elif colour is not None:
colors = [colour]*5
else:
colors = ['k','k','k','k','k']
ylim1 = 1.e99
ylim2 = -1.e99
# initialise movie-related things:
if mov:
artists=[]
ax=drawax
fig=drawfig
elif drawax is not None:
ax=drawax
else:
ax=pl.axes()
if drawfig is not None:
fig=drawfig
for j in range(len(abund_plot)): #Loop through the elements of interest
# for l in xrange(len(abund_plot[j])):
# if abund_plot[j][l] == 0:
# abund_plot[j][l] = 1e-99
abund_plot[j] = np.maximum(abund_plot[j],1.e-99) # SJONES instead of looping
# a_dum=zeros(len(abund_plot[j])) # this I (FH) have to do because for some
if log_logic == False: # reason log10(abu_abund[j]) does not work
a_dum = abund_plot[j] # although abu_abund[j] is a numpy array?!?
else:
# for ii in range(len(abund_plot[j])):
# a_dum[ii]=log10(abund_plot[j][ii])
a_dum=np.log10(abund_plot[j]) # SJONES this seems to work fine for me
if type(colors[0]) is str:
this_label=str(colors[pl_index]+points[point_set][pl_index]+\
plot_type[pl_index])
else:
this_label=None
if mov:
artist1,=ax.plot(mass_num[j],a_dum,this_label,markersize=6,
markeredgecolor='None')
else:
if this_label is not None:
if label is not None and j==0:
pl.plot(mass_num[j],a_dum,this_label,markersize=6,
label=label,markeredgecolor='None')
pl.legend(loc='best').draw_frame(False)
else:
pl.plot(mass_num[j],a_dum,this_label,markersize=6,
markeredgecolor='None')
else:
if label is not None and j==0:
pl.plot(mass_num[j],a_dum,
color=colors[pl_index],
marker=points[point_set][pl_index],
linestyle=plot_type[pl_index],
markersize=6,label=label,
markeredgecolor='None')
pl.legend(loc='best').draw_frame(False)
else:
pl.plot(mass_num[j],a_dum,
color=colors[pl_index],
marker=points[point_set][pl_index],
linestyle=plot_type[pl_index],
markersize=6,markeredgecolor='None')
abu_max = max(a_dum)
max_index=where(a_dum==abu_max)[0][0]
coordinates=[mass_num[j][max_index],abu_max]
if mov:
artist2=ax.text(coordinates[0]+0.1,1.05*coordinates[1],el_list[j],clip_on=True)
else:
if show_names:
# pl.text(coordinates[0]+0.1,1.05*coordinates[1],el_list[j],clip_on=True)
pl.text(coordinates[0],np.log10(2.2*10.**coordinates[1]),
el_list[j],clip_on=True,
horizontalalignment='center')
pl_index+=1
if pl_index > 4:
pl_index = 0
ylim1=min(ylim1,min(a_dum))
ylim2=max(ylim2,max(a_dum))
if mov:
artists.extend([artist1,artist2])
# now trimming the ylims
if log_logic:
dylim=0.05*(ylim2-ylim1)
ylim1 = ylim1 -dylim
ylim2 = ylim2 +dylim
if ref is not -1:
ylim2 = min(ylim2,4)
ylim1 = max(ylim1,-4)
else:
ylim2 = min(ylim2,0.2)
ylim1 = max(ylim1,-13)
else:
ylim1 = ylim1 *0.8
ylim2 = ylim2 *1.1
if include_title:
if plotType=='se':
if ref == -1:
title = str('Range %4.2f' %mass_range[0]) + str('-%4.2f' %mass_range[1]) +\
str(' for cycle %d' %int(cycle))
else:
title = str('Range %4.2f' %mass_range[0]) + \
str('-%4.2f' %mass_range[1]) + str(' for cycle %d' %int(cycle))+\
str(' relative to cycle %d' %int(ref))
else:
if ref == -1:
title = str('Cycle %d' %int(cycle))
else:
title = str('Cycle %d' %int(cycle))+\
str(' relative to cycle %d' %int(ref))
print("including title: ...")
if mov:
artist1,=ax.title(title)
artists.append(artist1)
else:
pl.title(title)
if ylim[0] == 0 and ylim[1] == 0:
pl.ylim(ylim1,ylim2)
else:
pl.ylim(ylim[0],ylim[1])
pl.xlim([amass_range[0]-.5,amass_range[1]+.5])
pl.xlabel('mass number (A)',fontsize=14)
if ref is not -1:
if log_logic:
pl.ylabel(r'log abundance ratio',fontsize=14)
else:
pl.ylabel(r'abundance ratio',fontsize=14)
else:
if log_logic:
pl.ylabel(r'log mass fraction ',fontsize=14)
else:
pl.ylabel(r'mass fraction',fontsize=14)
if amass_range != None:
minimum_mass = amass_range[0]
maximum_mass = amass_range[1]
elif mass_range != None:
minimum_mass = mass_range[0]
maximum_mass = mass_range[1]
else:
minimum_mass = 0
maximum_mass = 200
if log_logic == False:
if mov:
artist1,=ax.plot([amass_range[0]-.5,amass_range[1]+.5],[1,1],'k-')
artists.append(artist1)
else:
pl.plot([amass_range[0]-.5,amass_range[1]+.5],[1,1],'k-')
else:
if mov:
artist1,=ax.plot([amass_range[0]-.5,amass_range[1]+.5],[0,0],'k-')
artists.append(artist1)
else:
pl.plot([amass_range[0]-.5,amass_range[1]+.5],[0,0],'k-')
labelsx=[]
if (maximum_mass-minimum_mass) > 100:
delta_labelsx = 10
else:
delta_labelsx = 5
iii = amass_range[0]%delta_labelsx
if iii == 0:
labelsx.append(str(amass_range[0]))
else:
labelsx.append(' ')
iii = iii+1
kkk = 0
for label1 in range(amass_range[1]-amass_range[0]):
if iii == 5:
kkk = kkk+1
labelsx.append(str((iii*kkk)+amass_range[0]-(amass_range[0]%5)))
iii = 0
iii = iii+1
else:
labelsx.append(' ')
iii = iii+1
if delta_labelsx == 5:
xticks = arange(amass_range[0],amass_range[1],1)
pl.xticks(xticks,labelsx)
else:
pl.xticks()
# SJONES moved the pl.grid and pl.show to the very end
if grid:
pl.grid()
if show:
pl.show()
##!!FOR!!###### print 'LEN LABELS= ', len(labelsx)
##DEBUGGING####
####!!!######## for bbb in range (len(labelsx)):
############### print labelsx[bbb]
if mov:
return artists | plot the abundance of all the chemical species
Parameters
----------
cycle : string, integer or list
The cycle of interest. If it is a list of cycles, this
method will do a plot for each cycle and save them to a
file.
stable : boolean, optional
A boolean of whether to filter out the unstables. The
defaults is False.
amass_range : list, optional
A 1x2 array containing the lower and upper atomic mass
range. If None plot entire available atomic mass range.
The default is None.
mass_range : list, optional
A 1x2 array containing the lower and upper mass range. If
this is an instance of abu_vector this will only plot
isotopes that have an atominc mass within this range. This
will throw an error if this range does not make sense ie
[45,2]. If None, it will plot over the entire range. The
defaults is None.
ylim : list, optional
A 1x2 array containing the lower and upper Y limits. If
it is [0,0], then ylim will be determined automatically.
The default is [0,0].
ref : integer or list, optional
reference cycle. If it is not -1, this method will plot
the abundences of cycle devided by the cycle of the same
instance given in the ref variable. If ref is a list it
will be interpreted to have two elements:
ref=['dir/of/ref/run',cycle] which uses a refernece cycle
from another run. If any abundence in the reference cycle
is zero, it will replace it with 1e-99. The default is -1.
show : boolean, optional
Boolean of if the plot should be displayed. The default is
True.
log_logic : boolean, optional
Plot abundances in log scale or linear. The default is
True.
decayed : boolean, optional
If True plot decayed distributions, else plot life
distribution. The default is False.
color_plot : boolean, optional
Color dots and lines [True/False]. The default is True.
grid : boolean, optional
print grid. The default is False.
point_set : integer, optional
Set to 0, 1 or 2 to select one of three point sets, useful
for multiple abundances or ratios in one plot. The defalult
is 1.
include_title : boolean, optional
Include a title with the plot. The default is False.
drawfig, drawax, mov : optional, not necessary for user to set these variables
The figure and axes containers to be drawn on, and whether or not a movie is
being made (only True when se.movie is called, which sets mov to True
automatically
elemaburtn : boolean, private
If true, iso_abund() returns after writing self.***_iso_to_plot for
use with other plotting routines.f
mypoint : string, optional
fix the marker style of all the points in this plot to one type, given
as a string. If None, multiple point styles are used as per point_set.
The default is None | entailment |
def elemental_abund(self,cycle,zrange=[1,85],ylim=[0,0],title_items=None,
ref=-1,ref_filename=None,z_pin=None,pin=None,
pin_filename=None,zchi2=None,logeps=False,dilution=None,show_names=True,label='',
colour='black',plotlines=':',plotlabels=True,mark='x',**kwargs):
'''
Plot the decayed elemental abundance distribution (PPN).
Plot the elemental abundance distribution (nugridse).
(FH, 06/2014; SJ 07/2014)
Parameters
----------
cycle : string, integer or list
The cycle of interest. If it is a list of cycles, this
method will do a plot for each cycle and save them to a
file.
zrange : list, optional
A 1x2 array containing the lower and upper atomic number
limit
ylim : list, optional
A 1x2 array containing the lower and upper Y limits. If
it is [0,0], then ylim will be determined automatically.
The default is [0,0].
title_items : list, optional
A list of cycle attributes that will be added to the title.
For possible cycle attributes see self.cattrs.
ref : integer, optional
ref = N: plot abundaces relative to cycle N abundance, similar to the
'ref_filename' option.
Cannot be active at the same time as
the 'ref_filename' option.
ref_filename : string, optional
plot abundances relative to solar abundance. For this option,
a cycle number for the 'ref' option must not be provided
z_pin : int, optional
Charge number for an element to be 'pinned'. An offset will be
calculated from the difference between the cycle value and the
value from the pinned reference.
Can be used with the 'pin_filename' option to import an external
abundance file in the same format as solar abundances.
If no file is given, the reference can be either cycle N='ref'
or the value from the 'ref_filename'.
pin : float, optional
A manually provided [X/Fe] abundance to pin the element selected with 'z_pin'
pin_filename: string, optional
use provided file to provide reference to pin an element to. An offset is
calculated and used to shift the plot.
The file requires header columns marked by '#', column spacing of ' ', and at minimum two columns
containing:
'Z': charge number
'[X/Fe]': metallicity
zchi2 : list, optional
A 1x2 array containing atomic numbers of the elements
for which chi2 test is done when plotType == 'PPN' and pin_filename != None
logeps : boolean, optional
Plots log eps instead of [X/Fe] charts.
dilution : float, optional
Provides the dilution factor for mixing nucleosynthesis products to the surface
Cannot be active at the same time as the 'z_pin' option.
label : string, optional
The label for the abundance distribution
The default is '' (i.e. do not show a label)
show_names : boolean, optional
Whether or not to show the element names on the figure.
colour : string, optional
In case you want to dictate marker and line colours. Takes cymkrgb
single-character colours or any other colour string accepted by
matplotlib. The default is '' (automatic colour selection)
plotlines : string, optional
In case you want to dictate line style. Takes MatPlotLib linestyles.
mark : string, optional
In case you want to dictate marker style. Takes MatPlotLib markers.
Default is 'x'.
kwargs : additional keyword arguments
These arguments are equivalent to those of iso_abund, e.g.
mass_range. Routines from iso_abund are called, to perform
averages and get elemental abundances in the correct form.
Output
------
if plotType == 'PPN' and pin_filename != None
chi2 : float
chi-squared deviation of predicted abundances from observed ones
if plotType == 'se'
z_el : array
proton number of elements being returned
el_abu_plot : array
elemental abundances (as you asked for them, could be ref to something else)
This method adds the following data to the abu_vector instance:
self.el_abu_hash : elemental abundance, dictionary
self.el_name : element names, can be used as keys in el_abu_hash
self.el_abu_log : array of log10 of elemental abundance as plotted, including any ref scaling
'''
#from . import utils
from . import ascii_table as asci
plotType=self._classTest()
offset=0
if ref_filename!=None:
ref=-2
if logeps==True:
if zrange[0]!=1:
print("To use logeps, the z range must be [1,X], otherwise the program will exit.")
sys.exit()
z_pin=1
ref=-3
if plotType=='PPN':
self.get(cycle,decayed=True)
z_el=unique(self.z_iso_to_plot)
zmin_ind=min(where(z_el>=zrange[0])[0])
zmax_ind=max(where(z_el<=zrange[1])[0])
# extract some elemental quantities:
a_el=[]; el_name=[]; el_abu=[]; el_abu_hash={}
for z in z_el[zmin_ind:zmax_ind]:
el=self.el_iso_to_plot[where(self.z_iso_to_plot==z)[0].tolist()[0]]
X_el=self.abunds[where(self.el_iso_to_plot==el)[0].tolist()].sum() # take all iso abunds for one Z and sum
a_el.append(self.a_iso_to_plot[where(self.z_iso_to_plot==z)[0].tolist()[0]])
el_abu.append(X_el)
el_name.append(el)
el_abu_hash[el]=X_el
fe_abund=self.abunds[where(self.el_iso_to_plot=='Fe')[0].tolist()].sum() # Fe abund is always needed to find [X/Fe]
self.el_abu_hash = el_abu_hash
self.el_name = el_name
# if we have provided a solar abundance file
if ref==-2:
from . import utils
utils.solar(ref_filename,1)
el_abu_sun=np.array(utils.solar_elem_abund)
el_abu_plot=np.zeros(len(el_abu))
for zs in z_el[zmin_ind:zmax_ind]:
zelidx=where(z_el[zmin_ind:zmax_ind]==zs)[0]
zsolidx=int(zs-1)
if el_abu_sun[zsolidx] > 0. :
el_abu_plot[zelidx]=el_abu[zelidx[0]]/el_abu_sun[zsolidx]
else:
el_abu_plot[zelidx]=-1
# if we have provided a reference cycle number
elif ref>-1:
self.get(ref,decayed=True)
z_el_ref=unique(self.z_iso_to_plot)
zmin_ind=min(where(z_el_ref>=zrange[0])[0])
zmax_ind=max(where(z_el_ref<=zrange[1])[0])
# extract some elemental quantities:
a_el_ref=[]; el_name_ref=[]; el_abu_ref=[]; el_abu_hash_ref={}
el_abu_plot=np.zeros(len(el_abu))
for z_ref in z_el[zmin_ind:zmax_ind]:
el_ref=self.el_iso_to_plot[where(self.z_iso_to_plot==z_ref)[0].tolist()[0]]
X_el_ref=self.abunds[where(self.el_iso_to_plot==el_ref)[0].tolist()].sum()
a_el_ref.append(self.a_iso_to_plot[where(self.z_iso_to_plot==z_ref)[0].tolist()[0]])
el_abu_ref.append(X_el_ref)
el_name_ref.append(el_ref)
el_abu_hash_ref[el_ref]=X_el
for i in range(len(el_abu)):
el_abu_plot[i-1]=el_abu[i-1]/el_abu_ref[i-1]
# if we want to include observation data
if pin_filename!=None:
print('using the pin filename')
obs_file=asci.readTable(pin_filename,header_char='#')
xfe_sigma=[]
el_abu_obs_log=[]
z_ul=[]
for z_i in z_el[zmin_ind:zmax_ind]:
try:
obs_file.data['[X/H]']
x_over='[X/H]'
sigma='sig_[X/H]'
except:
x_over='[X/Fe]'
sigma='sig_[X/Fe]'
zelidx=where(z_el[zmin_ind:zmax_ind]==z_i)[0]
zpinidx=where(obs_file.data['Z']==z_i)[0] #str()
if len(zpinidx)==0:
el_abu_obs_log.append([None])
xfe_sigma.append([None])
z_ul.append([None])
elif len(zpinidx)>1:
'''if any(obs_file.data['ul'][zpinidx].astype(int))==1:
print('hi')
tmp=obs_file.data['[X/Fe]'][zpinidx].astype(float)
z_ul.append(tmp.tolist())
el_abu_obs_log.append([None]*len(zpinidx))
xfe_sigma.append([None]*len(zpinidx))
else:'''
tmp=obs_file.data[x_over][zpinidx]#.astype(float) # array stores multiple values for a
el_abu_obs_log.append(tmp.tolist()) # single element
tmp=obs_file.data[sigma][zpinidx]#.astype(float)
xfe_sigma.append(tmp.tolist())
z_ul.append([None])
else:
if obs_file.data['ul'][zpinidx]==1: #.astype(int)
tmp=obs_file.data[x_over][zpinidx]#.astype(float)
z_ul.append(tmp.tolist())
tmp=obs_file.data[x_over][zpinidx]#.astype(float)
el_abu_obs_log.append([None])
xfe_sigma.append([None])
else:
tmp=obs_file.data[x_over][zpinidx][0]#.astype(float)
el_abu_obs_log.append([tmp])
tmp=obs_file.data[sigma][zpinidx][0]#.astype(float)
xfe_sigma.append([tmp])
z_ul.append([None])
el_abu_obs=[]
# set a pinned element for offset calculation and adjustment
if z_pin!=None:
print("Pinned element: "+str(z_pin))
if pin_filename!=None:
# converting obervation data from log to standard form for compatibility
# with later code
for i in range(len(el_abu_obs_log)):
if all(el_abu_obs_log[i])==None:
el_abu_obs.append(None)
else:
el_abu_obs.append(np.power(10,el_abu_obs_log[i]))
el_abu_pin=el_abu_obs
elif pin!=None:
print('using manual pin')
pin=np.power(10,pin)
el_abu_pin=np.zeros(len(el_abu))
for i in range(len(el_abu)):
el_abu_pin[i-1]=pin
elif logeps==True:
print('finding log eps')
atomic_mass=[1.008, 4.003, 6.94, 9.012, 10.81, 12.011, 14.007, 15.999, 18.998, 20.18, 22.99, 24.305, 26.982, 28.085, 30.74, 32.06, 35.45, 39.948, 39.098, 40.078, 44.956, 47.867, 50.942, 51.996, 54.938, 55.845, 58.933, 58.693, 6.46, 65.38, 69.723, 72.63, 74.922, 78.971, 79.904, 83.798, 85.468, 87.62, 88.906, 91.224, 92.906, 95.95, 97., 01.07, 102.906, 106.42, 107.868, 112.414, 114.818, 118.71, 121.76, 127.6, 126.904, 131.293, 132.905, 137.27, 138.905, 140.116, 140.908, 144.242, 145. , 150.36, 151.964, 157.25, 158.925, 162.5, 164.93, 167.259, 18.934, 173.045, 174.967, 178.49, 180.948, 183.84, 186.207, 190.23, 192.217, 195.084, 196.967, 200.592, 24.38, 207.2, 208.98, 209., 210., 222., 223., 226., 227., 232.038, 231.036, 238.029, 237., 244., 243., 247., 247., 251., 252., 257., 258., 259., 262., 267., 270., 269., 270., 270., 278., 281., 281., 285., 286., 289., 289., 293., 293., 294.] # this belongs in utils! (FH)
el_abu_pin=atomic_mass
el_abu_plot=np.zeros(len(el_abu))
for i in range(len(el_abu)):
el_abu_plot[i-1]=el_abu[i-1]/el_abu_pin[i-1]
elif ref==-2:
print('using solar pin')
el_abu_pin=np.zeros(len(el_abu))
for i in range(len(el_abu)):
el_abu_pin[i-1]=el_abu[i-1]/el_abu_sun[i-1]
elif ref>=0:
print("Error: A reference file or manual pin is required - the plot will fail")
'''elif ref>=0:
print('using ref pin')
el_abu_pin=np.zeros(len(el_abu))
for i in range(len(el_abu)):nacon
el_abu_pin[i-1]=el_abu[i-1]/el_abu_ref[i-1]
print(el_abu)
print(el_abu_ref)
print(el_abu_pin)
print(el_abu_plot)'''
# calculating the offset value
zelidx=where(z_el[zmin_ind:zmax_ind]==z_pin)[0][0]
offset=np.log10(el_abu_pin[zelidx])-np.log10(el_abu_plot[zelidx])
if ref!=-1 and dilution==None:
el_abu=el_abu_plot
if dilution!=None:
self.get(0,decayed=True)
z_el_ini=unique(self.z_iso_to_plot)
zmin_ind=min(where(z_el>=zrange[0])[0])
zmax_ind=max(where(z_el<=zrange[1])[0])
# extract some elemental quantities:
a_el_ini=[]; el_name_ini=[]; el_abu_ini=[]; el_abu_hash_ini={}
for z in z_el_ini:
el_ini=self.el_iso_to_plot[where(self.z_iso_to_plot==z)[0].tolist()[0]]
X_el_ini=self.abunds[where(self.el_iso_to_plot==el_ini)[0].tolist()].sum() # take all iso abunds for one Z and sum
a_el_ini.append(self.a_iso_to_plot[where(self.z_iso_to_plot==z)[0].tolist()[0]])
el_abu_ini.append(X_el_ini)
el_name_ini.append(el_ini)
el_abu_hash_ini[el]=X_el_ini
el_abu_dilution=[]
for i in range(len(el_abu)):
el_adjusted=(dilution*el_abu[i])+((1-dilution)*el_abu_ini[zmin_ind+i])
fe_adjusted=(dilution*fe_abund)+((1-dilution)*el_abu_ini[24])
num=el_adjusted*el_abu_sun[25]
iadd = 1
if z_el[zmin_ind+i] > 43:
iadd = 2
if z_el[zmin_ind+i] > 61:
iadd = 3
denom=fe_adjusted*el_abu_sun[zmin_ind+iadd+i]
el_abu_dilution.append(num/denom)
#print(el_abu_dilution)
el_abu=el_abu_dilution
# plot an elemental abundance distribution with labels:
self.el_abu_log = np.log10(el_abu)
chi2 = 0.
if pin_filename!=None: # plotting the observation data
# using zip() to plot multiple values for a single element
# also calculate and return chi squared
for xi,yi,wi in zip(z_el[zmin_ind:zmax_ind],el_abu_obs_log,xfe_sigma):
#print(xi)
pl.scatter([xi]*len(yi),yi,marker='o',s=25,color='black')
if all(wi)!=None:
pl.errorbar([xi]*len(yi),yi,wi,color='black',capsize=5)
if zchi2 != None:
#if zchi2[0] <= xi and xi <= zchi2[1]:
if xi in zchi2:
zelidx=where(z_el[zmin_ind:zmax_ind]==xi)[0][0]
chi2 += (((sum(yi)/len(yi)) - (np.log10(el_abu[zelidx])+offset))/\
(sum(wi)/len(wi)))**2
#pl.scatter(z_el[zmin_ind:zmax_ind],z_ul,label='Upper limits',marker='v',color='black')
pl.scatter(z_el[zmin_ind:zmax_ind],z_ul,marker='v',color='black')
# plotting simulation data
pl.plot(z_el[zmin_ind:zmax_ind],np.log10(el_abu)+offset,label=label,\
linestyle=plotlines,color=colour,marker=mark)#,np.log10(el_abu))#,**kwargs)
j=0 # add labels
if plotlabels==True:
for z in z_el[zmin_ind:zmax_ind]:
pl.text(z+0.15,log10(el_abu[j])+offset+0.05,el_name[j])
j += 1
if title_items is not None:
pl.title(self._do_title_string(title_items,cycle))
if ylim[0]==0 and ylim[1]==0:
ylim[0]=max(-15.0,min(np.log10(el_abu)+offset))
ylim[1]=max(ylim[0]+1.0,max(np.log10(el_abu)+offset))
pl.ylim(ylim[0],ylim[1])
pl.xlabel('Z')
#pl.legend()
pl.grid(True)
ylab=['log X/X$_{'+str(ref)+'}$','log mass fraction','log X/X$_{ref}$','log$\epsilon$']
if ref==-2:
pl.ylabel(ylab[2])
elif ref>-1:
if plotlabels==True:
pl.annotate('Offset: '+str(offset),xy=(0.05,0.95),xycoords='axes fraction')
pl.ylabel(ylab[0])
elif logeps==True:
pl.ylabel(ylab[3])
else:
pl.ylabel(ylab[1])
return chi2
elif plotType=='se':
# get self.***_iso_to_plot by calling iso_abund function, which writes them
self.iso_abund(cycle,elemaburtn=True,**kwargs)
z_el=unique(self.se.Z)
zmin_ind=min(where(z_el>=zrange[0])[0])
zmax_ind=max(where(z_el<=zrange[1])[0])
# extract some elemental quantities:
a_el=[]; el_name=[]; el_abu=[]; el_abu_hash={}
for z in z_el[zmin_ind:zmax_ind]:
el=self.el_iso_to_plot[where(self.se.Z==z)[0].tolist()[0]]
X_el=self.abunds[where(self.el_iso_to_plot==el)[0].tolist()].sum()
a_el.append(self.a_iso_to_plot[where(self.z_iso_to_plot==z)[0].tolist()[0]])
el_abu.append(X_el)
el_name.append(el)
el_abu_hash[el]=X_el
# plot an elemental abundance distribution with labels:
if ref==0:
el_abu_plot=el_abu
ylab='log mass fraction'
elif ref==1:
from . import utils
if ref_filename=='':
raise IOError('You chose to plot relative to the solar abundance dist. However, you did not supply the solar abundance file!')
else:
nuutils.solar(ref_filename,1)
menow = where(unique(nuutils.z_sol)==44.)[0][0]
print(1, menow, nuutils.solar_elem_abund[menow])
el_abu_sun=np.array(nuutils.solar_elem_abund)
print(2, el_abu_sun)
print(3, el_abu_sun[42])
el_abu_plot=np.zeros(len(el_abu))
for zs in z_el[zmin_ind:zmax_ind]:
zelidx=where(z_el[zmin_ind:zmax_ind]==zs)[0]
zsolidx=zs-1
if el_abu_sun[zsolidx] > 0. :
el_abu_plot[zelidx]=old_div(el_abu[zelidx],el_abu_sun[zsolidx])
else:
el_abu_plot[zelidx]=-1
ylab='log X/X$_\odot$'
else:
raise IOError('Your choice of ref is not available yet. Please use another.')
if label != '':
if colour!='':
print("Plotting without color and label:")
pl.plot(z_el[zmin_ind:zmax_ind],np.log10(el_abu_plot),
'o-',label=label,color=colour,markeredgecolor='None')
else:
pl.plot(z_el[zmin_ind:zmax_ind],np.log10(el_abu_plot)
,'o-',label=label,markeredgecolor='None')
else:
if colour!='':
pl.plot(z_el[zmin_ind:zmax_ind],np.log10(el_abu_plot),
'o-',color=colour,markeredgecolor='None')
else:
pl.plot(z_el[zmin_ind:zmax_ind],np.log10(el_abu_plot),
'o-',markeredgecolor='None')
if show_names:
j=0 # add labels
for z in z_el[zmin_ind:zmax_ind]:
# pl.text(z+0.15,log10(el_abu_plot[j])+0.05,el_name[j])
if el_abu_plot[j] > 0.:
pl.text(z,log10(el_abu_plot[j])+0.5,el_name[j],
horizontalalignment='center')
j += 1
if title_items is not None:
pl.title(self._do_title_string(title_items,cycle))
if ylim[0]==0 and ylim[1]==0:
ylim[0]=max(-15.0,min(np.log10(el_abu_plot)))
ylim[1]=max(ylim[0]+1.0,max(np.log10(el_abu_plot)))
pl.ylim(ylim[0],ylim[1])
pl.xlabel('Z')
pl.ylabel(ylab)
if label != '':
pl.legend(loc='best').draw_frame(False)
return z_el[zmin_ind:zmax_ind],el_abu_plot
else:
print('This method is not supported for '+plotType)
return
self.el_abu_hash = el_abu_hash
self.el_name = el_name
self.el_abu_log = np.log10(el_abu) | Plot the decayed elemental abundance distribution (PPN).
Plot the elemental abundance distribution (nugridse).
(FH, 06/2014; SJ 07/2014)
Parameters
----------
cycle : string, integer or list
The cycle of interest. If it is a list of cycles, this
method will do a plot for each cycle and save them to a
file.
zrange : list, optional
A 1x2 array containing the lower and upper atomic number
limit
ylim : list, optional
A 1x2 array containing the lower and upper Y limits. If
it is [0,0], then ylim will be determined automatically.
The default is [0,0].
title_items : list, optional
A list of cycle attributes that will be added to the title.
For possible cycle attributes see self.cattrs.
ref : integer, optional
ref = N: plot abundaces relative to cycle N abundance, similar to the
'ref_filename' option.
Cannot be active at the same time as
the 'ref_filename' option.
ref_filename : string, optional
plot abundances relative to solar abundance. For this option,
a cycle number for the 'ref' option must not be provided
z_pin : int, optional
Charge number for an element to be 'pinned'. An offset will be
calculated from the difference between the cycle value and the
value from the pinned reference.
Can be used with the 'pin_filename' option to import an external
abundance file in the same format as solar abundances.
If no file is given, the reference can be either cycle N='ref'
or the value from the 'ref_filename'.
pin : float, optional
A manually provided [X/Fe] abundance to pin the element selected with 'z_pin'
pin_filename: string, optional
use provided file to provide reference to pin an element to. An offset is
calculated and used to shift the plot.
The file requires header columns marked by '#', column spacing of ' ', and at minimum two columns
containing:
'Z': charge number
'[X/Fe]': metallicity
zchi2 : list, optional
A 1x2 array containing atomic numbers of the elements
for which chi2 test is done when plotType == 'PPN' and pin_filename != None
logeps : boolean, optional
Plots log eps instead of [X/Fe] charts.
dilution : float, optional
Provides the dilution factor for mixing nucleosynthesis products to the surface
Cannot be active at the same time as the 'z_pin' option.
label : string, optional
The label for the abundance distribution
The default is '' (i.e. do not show a label)
show_names : boolean, optional
Whether or not to show the element names on the figure.
colour : string, optional
In case you want to dictate marker and line colours. Takes cymkrgb
single-character colours or any other colour string accepted by
matplotlib. The default is '' (automatic colour selection)
plotlines : string, optional
In case you want to dictate line style. Takes MatPlotLib linestyles.
mark : string, optional
In case you want to dictate marker style. Takes MatPlotLib markers.
Default is 'x'.
kwargs : additional keyword arguments
These arguments are equivalent to those of iso_abund, e.g.
mass_range. Routines from iso_abund are called, to perform
averages and get elemental abundances in the correct form.
Output
------
if plotType == 'PPN' and pin_filename != None
chi2 : float
chi-squared deviation of predicted abundances from observed ones
if plotType == 'se'
z_el : array
proton number of elements being returned
el_abu_plot : array
elemental abundances (as you asked for them, could be ref to something else)
This method adds the following data to the abu_vector instance:
self.el_abu_hash : elemental abundance, dictionary
self.el_name : element names, can be used as keys in el_abu_hash
self.el_abu_log : array of log10 of elemental abundance as plotted, including any ref scaling | entailment |
def _do_title_string(self,title_items,cycle):
'''
Create title string
Private method that creates a title string for a cycle plot
out of a list of title_items that are cycle attributes and can
be obtained with self.get
Parameters
----------
title_items : list
A list of cycle attributes.
cycle : scalar
The cycle for which the title string should be created.
Returns
-------
title_string: string
Title string that can be used to decorate plot.
'''
title_string=[]
form_str='%4.1F'
for item in title_items:
num=self.get(item,fname=cycle)
if num > 999 or num < 0.1:
num=log10(num)
prefix='log '
else:
prefix=''
title_string.append(prefix+item+'='+form_str%num)
tt=''
for thing in title_string:
tt = tt+thing+", "
return tt.rstrip(', ') | Create title string
Private method that creates a title string for a cycle plot
out of a list of title_items that are cycle attributes and can
be obtained with self.get
Parameters
----------
title_items : list
A list of cycle attributes.
cycle : scalar
The cycle for which the title string should be created.
Returns
-------
title_string: string
Title string that can be used to decorate plot. | entailment |
def plotprofMulti(self, ini, end, delta, what_specie, xlim1, xlim2,
ylim1, ylim2, symbol=None):
'''
create a movie with mass fractions vs mass coordinate between
xlim1 and xlim2, ylim1 and ylim2. Only works with instances of
se.
Parameters
----------
ini : integer
Initial model i.e. cycle.
end : integer
Final model i.e. cycle.
delta : integer
Sparsity factor of the frames.
what_specie : list
Array with species in the plot.
xlim1, xlim2 : integer or float
Mass coordinate range.
ylim1, ylim2 : integer or float
Mass fraction coordinate range.
symbol : list, optional
Array indicating which symbol you want to use. Must be of
the same len of what_specie array. The default is None.
'''
plotType=self._classTest()
if plotType=='se':
for i in range(ini,end+1,delta):
step = int(i)
#print step
if symbol==None:
symbol_dummy = '-'
for j in range(len(what_specie)):
self.plot_prof_1(step,what_specie[j],xlim1,xlim2,ylim1,ylim2,symbol_dummy)
else:
for j in range(len(what_specie)):
symbol_dummy = symbol[j]
self.plot_prof_1(step,what_specie[j],xlim1,xlim2,ylim1,ylim2,symbol_dummy)
#
filename = str('%03d' % step)+'_test.png'
pl.savefig(filename, dpi=400)
print('wrote file ', filename)
#
pl.clf()
else:
print('This method is not supported for '+str(self.__class__))
return | create a movie with mass fractions vs mass coordinate between
xlim1 and xlim2, ylim1 and ylim2. Only works with instances of
se.
Parameters
----------
ini : integer
Initial model i.e. cycle.
end : integer
Final model i.e. cycle.
delta : integer
Sparsity factor of the frames.
what_specie : list
Array with species in the plot.
xlim1, xlim2 : integer or float
Mass coordinate range.
ylim1, ylim2 : integer or float
Mass fraction coordinate range.
symbol : list, optional
Array indicating which symbol you want to use. Must be of
the same len of what_specie array. The default is None. | entailment |
def movie(self, cycles, plotstyle='',movname='',fps=12,**kwargs):
from matplotlib import animation
'''
Make an interactive movie in the matplotlib window for a number of
different plot types:
Plot types
----------
'iso_abund' : abundance distribution a la se.iso_abund()
'abu_chart' : abundance chart a la se.abu_chart()
'plot' : plot any number of y_items against an x_item
Parameters
----------
cycles : list
Which cycles do you want to plot as movie frames?
plotstyle : string
What type of plot should the movie show? Currently supported is
'iso_abund', 'abu_chart' and 'plot'
movname : string, optional
Name of movie (+ extension, e.g. .mp4 or .avi) if the movie is
to be saved
The default is ''
args : list
Arguments to should be passed to the plotting function. These are
the arguments of the respective methods that make the frames. See
the docstrings of those functions for details
'plot' Parameters
-----------------
'xlims' : tuple, optional
'ylims' : tuple, optional
'xlabel' : string, optional
'ylabel' : string, optional
'legend' : boolean, optional
The default is False
'loc' : string or integer, optional
Set the legend location if legend is True.
The default is 'best'
'interval' : frame interval in ms
FAQ:
----
If ffmpeg is not installed on OSX (and you don't want to wait for port to do it) check out
these binaries:
http://stackoverflow.com/questions/18833731/how-to-set-ffmpeg-for-matplotlib-in-mac-os-x
'''
modelself=self
supported_styles=['iso_abund','abu_chart','plot']
class mov(object):
def __init__(self,cyc,style,movname,fps,**kwargs):
self.fig = None
self.ax = None
self.ani = None
self.cyc = cyc
self.movname=movname
self.fps = fps
self.style=style
if self.style in supported_styles:
animateFunc=draw_frame
else:
raise IOError("this type of movie is not available yet! Sorry!")
if self.style=='plot':
self.y_ditems=kwargs['y_items']
self.data=kwargs['data']
self._init_animation(animateFunc)
def _init_animation(self, animateFunc):
if self.style=='plot':
fsize=14
params = {'axes.labelsize': fsize,
'text.fontsize': fsize,
'legend.fontsize': fsize*0.8,
'xtick.labelsize': fsize,
'ytick.labelsize': fsize,
'text.usetex': False,
'figure.facecolor': 'white',
'ytick.minor.pad': 8,
'ytick.major.pad': 8,
'xtick.minor.pad': 8,
'xtick.major.pad': 8,
'figure.subplot.bottom' : 0.15,
'lines.markersize': 8}
matplotlib.rcParams.update(params)
self.fig, self.ax = pl.subplots()
tmp=[]
for i in range(len(self.y_ditems)):
tmp.append(self.data[0][0])
tmp.append(self.data[0][i+1])
self.lines = self.ax.plot(*tmp)
if 'ylims' in kwargs:
pl.ylim(kwargs['ylims'])
if 'xlims' in kwargs:
pl.xlim(kwargs['xlims'])
if 'xlabel' in kwargs:
pl.xlabel(kwargs['xlabel'])
else:
pl.xlabel(kwargs['x_item'])
if 'ylabel' in kwargs:
pl.ylabel(kwargs['ylabel'])
else:
if type(y_items) is str:
lab=y_items
elif type(y_items) is list and len(y_items) == 1:
lab=y_items[0]
else:
lab=''
for el in kwargs['y_items']:
lab+=el+', '
lab=lab[:-2]
pl.ylabel(lab)
if 'legend' in kwargs and kwargs['legend']:
if 'loc' in kwargs:
pl.legend([line for line in self.lines], self.y_ditems,
loc=kwargs['loc']).draw_frame(False)
else:
pl.legend([line for line in self.lines], self.y_ditems,
loc='best').draw_frame(False)
self._animation(animateFunc)
def _animation(self, animateFunc):
if plotstyle=='plot' and 'interval' in kwargs:
self.ani = animation.FuncAnimation(self.fig, animateFunc, arange(0, len(self.cyc)), interval=kwargs['interval'], blit=False, fargs=[self])
elif plotstyle=='iso_abund':
self.fig, self.ax = pl.subplots()
ims=[]
for i in arange(0,len(self.cyc)):
im=draw_frame(i,self)
ims.append(im)
self.ani = animation.ArtistAnimation(self.fig,ims,interval=50,
blit=False)
self.fig.canvas.draw()
elif plotstyle=='abu_chart':
self.fig=pl.figure()
axx = 0.10
axy = 0.10
axw = 0.85
axh = 0.8
self.ax=pl.axes([axx,axy,axw,axh])
ims=[]
for i in arange(0,len(self.cyc)):
im=draw_frame(i,self)
# draw_frame here returns the patch for the abundance squares
# im[0] as well as the artists im[1], so that the colorbar
# can be plotted only once (on the first plot)
ims.append(im[1])
if i==0:
cb=pl.colorbar(im[0])
cb.set_label('log$_{10}$(X)',fontsize='x-large')
self.ani = animation.ArtistAnimation(self.fig,ims,interval=50,
blit=False)
self.fig.canvas.draw()
if self.movname is not '':
print('\n generating animation: '+self.movname)
self.ani.save(self.movname,fps=self.fps)
print('animation '+self.movname+' saved with '+str(self.fps)+' frames per second')
plotType=self._classTest()
if plotType=='se':
if plotstyle == 'iso_abund':
data = self.se.get(cycles,['iso_massf','mass'])
def draw_frame(i,self=None):
artists=modelself.iso_abund(self.cyc[i],stable=True,show=False,
data_provided=True,thedata=data[i],
verbose=False,drawfig=self.fig,drawax=self.ax,
mov=True,**kwargs)
return artists
if plotstyle == 'abu_chart':
data = self.se.get(cycles,['iso_massf','mass'])
def draw_frame(i,self=None):
artists=modelself.abu_chart(self.cyc[i],show=False,data_provided=True,
thedata=data[i],lbound=(-12, -6),drawfig=self.fig,
drawax=self.ax,mov=True,**kwargs)
return artists
if plotstyle=='plot':
if 'x_item' not in kwargs or 'y_items' not in kwargs:
raise IOError("Please specify both x_item and y_items")
x_item = kwargs['x_item']
y_items = kwargs['y_items']
tx, ty = type(x_item), type(y_items)
if tx is list and ty is list:
data=self.se.get(cycles,x_item+y_items)
elif tx is str and ty is list:
data=self.se.get(cycles,[x_item]+y_items)
elif tx is str and ty is str:
data=self.se.get(cycles,[x_item]+[y_items])
def draw_frame(i, self=None):
# pl.title("cycle: " + self.cyc[i])
for j in range(len(self.lines)):
if 'logy' in kwargs and kwargs['logy']:
self.lines[j].set_data(self.data[i][0],
np.log10(self.data[i][j+1]))
else:
self.lines[j].set_data(self.data[i][0],
self.data[i][j+1])
return self.lines
if plotstyle=='plot':
return mov(cycles,plotstyle,movname,fps,data=data,**kwargs).ani
else:
return mov(cycles,plotstyle,movname,fps).ani | Make an interactive movie in the matplotlib window for a number of
different plot types:
Plot types
----------
'iso_abund' : abundance distribution a la se.iso_abund()
'abu_chart' : abundance chart a la se.abu_chart()
'plot' : plot any number of y_items against an x_item
Parameters
----------
cycles : list
Which cycles do you want to plot as movie frames?
plotstyle : string
What type of plot should the movie show? Currently supported is
'iso_abund', 'abu_chart' and 'plot'
movname : string, optional
Name of movie (+ extension, e.g. .mp4 or .avi) if the movie is
to be saved
The default is ''
args : list
Arguments to should be passed to the plotting function. These are
the arguments of the respective methods that make the frames. See
the docstrings of those functions for details
'plot' Parameters
-----------------
'xlims' : tuple, optional
'ylims' : tuple, optional
'xlabel' : string, optional
'ylabel' : string, optional
'legend' : boolean, optional
The default is False
'loc' : string or integer, optional
Set the legend location if legend is True.
The default is 'best'
'interval' : frame interval in ms
FAQ:
----
If ffmpeg is not installed on OSX (and you don't want to wait for port to do it) check out
these binaries:
http://stackoverflow.com/questions/18833731/how-to-set-ffmpeg-for-matplotlib-in-mac-os-x | entailment |
def plot_prof_1(self, species, keystring, xlim1, xlim2, ylim1,
ylim2, symbol=None, show=False):
'''
Plot one species for cycle between xlim1 and xlim2 Only works
with instances of se and mesa _profile.
Parameters
----------
species : list
Which species to plot.
keystring : string or integer
Label that appears in the plot or in the case of se, a
cycle.
xlim1, xlim2 : integer or float
Mass coordinate range.
ylim1, ylim2 : integer or float
Mass fraction coordinate range.
symbol : string, optional
Which symbol you want to use. If None symbol is set to '-'.
The default is None.
show : boolean, optional
Show the ploted graph. The default is False.
'''
plotType=self._classTest()
if plotType=='se':
#tot_mass=self.se.get(keystring,'total_mass')
tot_mass=self.se.get('mini')
age=self.se.get(keystring,'age')
mass=self.se.get(keystring,'mass')
Xspecies=self.se.get(keystring,'iso_massf',species)
mod=keystring
elif plotType=='mesa_profile':
tot_mass=self.header_attr['star_mass']
age=self.header_attr['star_age']
mass=self.get('mass')
mod=self.header_attr['model_number']
Xspecies=self.get(species)
else:
print('This method is not supported for '+str(self.__class__))
return
if symbol == None:
symbol = '-'
x,y=self._logarithm(Xspecies,mass,True,False,10)
#print x
pl.plot(y,x,symbol,label=str(species))
pl.xlim(xlim1,xlim2)
pl.ylim(ylim1,ylim2)
pl.legend()
pl.xlabel('$Mass$ $coordinate$', fontsize=20)
pl.ylabel('$X_{i}$', fontsize=20)
#pl.title('Mass='+str(tot_mass)+', Time='+str(age)+' years, cycle='+str(mod))
pl.title('Mass='+str(tot_mass)+', cycle='+str(mod))
if show:
pl.show() | Plot one species for cycle between xlim1 and xlim2 Only works
with instances of se and mesa _profile.
Parameters
----------
species : list
Which species to plot.
keystring : string or integer
Label that appears in the plot or in the case of se, a
cycle.
xlim1, xlim2 : integer or float
Mass coordinate range.
ylim1, ylim2 : integer or float
Mass fraction coordinate range.
symbol : string, optional
Which symbol you want to use. If None symbol is set to '-'.
The default is None.
show : boolean, optional
Show the ploted graph. The default is False. | entailment |
def density_profile(self,ixaxis='mass',ifig=None,colour=None,label=None,fname=None):
'''
Plot density as a function of either mass coordiate or radius.
Parameters
----------
ixaxis : string
'mass' or 'radius'
The default value is 'mass'
ifig : integer or string
The figure label
The default value is None
colour : string
What colour the line should be
The default value is None
label : string
Label for the line
The default value is None
fname : integer
What cycle to plot from (if SE output)
The default value is None
'''
pT=self._classTest()
# Class-specific things:
if pT is 'mesa_profile':
x = self.get(ixaxis)
if ixaxis is 'radius':
x = x*ast.rsun_cm
y = self.get('logRho')
elif pT is 'se':
if fname is None:
raise IOError("Please provide the cycle number fname")
x = self.se.get(fname,ixaxis)
y = np.log10(self.se.get(fname,'rho'))
else:
raise IOError("Sorry. the density_profile method is not available \
for this class")
# Plot-specific things:
if ixaxis is 'radius':
x = np.log10(x)
xlab='$\log_{10}(r\,/\,{\\rm cm})$'
else:
xlab='${\\rm Mass}\,/\,M_\odot$'
if ifig is not None:
pl.figure(ifig)
if label is not None:
if colour is not None:
pl.plot(x,y,color=colour,label=label)
else:
pl.plot(x,y,label=label)
pl.legend(loc='best').draw_frame(False)
else:
if colour is not None:
pl.plot(x,y,color=colour)
else:
pl.plot(x,y)
pl.xlabel(xlab)
pl.ylabel('$\log_{10}(\\rho\,/\,{\\rm g\,cm}^{-3})$') | Plot density as a function of either mass coordiate or radius.
Parameters
----------
ixaxis : string
'mass' or 'radius'
The default value is 'mass'
ifig : integer or string
The figure label
The default value is None
colour : string
What colour the line should be
The default value is None
label : string
Label for the line
The default value is None
fname : integer
What cycle to plot from (if SE output)
The default value is None | entailment |
def abu_profile(self,ixaxis='mass',isos=None,ifig=None,fname=None,logy=False,
colourblind=False):
'''
Plot common abundances as a function of either mass coordiate or radius.
Parameters
----------
ixaxis : string, optional
'mass', 'logradius' or 'radius'
The default value is 'mass'
isos : list, optional
list of isos to plot, i.e. ['h1','he4','c12'] for MESA or
['H-1','He-4','C-12'] for SE output. If None, the code decides
itself what to plot.
The default is None.
ifig : integer or string, optional
The figure label
The default value is None
fname : integer, optional
What cycle to plot from (if SE output)
The default value is None
logy : boolean, optional
Should y-axis be logarithmic?
The default value is False
colourblind : boolean, optional
do you want to use the colourblind colour palette from the NuGrid
nuutils module?
'''
pT=self._classTest()
# Class-specific things:
if pT is 'mesa_profile':
x = self.get(ixaxis)
if ixaxis is 'radius':
x = x*ast.rsun_cm
if isos is None:
isos=['h1','he4','c12','c13','n14','o16','ne20','ne22','mg24','mg25',
'al26','si28','si30','s32','s34','cl35','ar36','ar38','cr52',
'cr56','fe56','ni56']
risos=[i for i in isos if i in self.cols]
abunds = [self.get(riso) for riso in risos]
names=risos
elif pT is 'se':
if fname is None:
raise IOError("Please provide the cycle number fname")
x = self.se.get(fname,ixaxis)
if isos is None:
isos=['H-1','He-4','C-12','C-13','N-14','O-16','Ne-20','Ne-22','Mg-24','Mg-25',
'Sl-26','Si-28','Si-30','S-32','S-34','Cl-35','Ar-36','Ar-38','Cr-52',
'Cr-56','Fe-56','Ni-56']
risos=[i for i in isos if i in self.se.isotopes]
abunds = self.se.get(fname,'iso_massf',risos)
names=risos
else:
raise IOError("Sorry. the density_profile method is not available \
for this class")
# Plot-specific things:
if ixaxis is 'logradius':
x = np.log10(x)
xlab='$\log_{10}(r\,/\,{\\rm cm})$'
elif ixaxis is 'radius':
x = old_div(x, 1.e8)
xlab = 'r / Mm'
else:
xlab='${\\rm Mass}\,/\,M_\odot$'
if ifig is not None:
pl.figure(ifig)
from . import utils as u
cb = u.colourblind
lscb = u.linestylecb # colourblind linestyle function
for i in range(len(risos)):
if logy:
y = np.log10(abunds if len(risos) < 2 else abunds[i])
else:
y = abunds if len(risos) < 2 else abunds[i]
if colourblind:
pl.plot(x,y,ls=lscb(i)[0],marker=lscb(i)[1],
color=lscb(i)[2],markevery=u.linestyle(i)[1]*20,
label=names[i],mec='None')
else:
pl.plot(x,y,u.linestyle(i)[0],markevery=u.linestyle(i)[1]*20,
label=names[i],mec='None')
pl.legend(loc='best').draw_frame(False)
pl.xlabel(xlab)
pl.ylabel('$\log(X)$') | Plot common abundances as a function of either mass coordiate or radius.
Parameters
----------
ixaxis : string, optional
'mass', 'logradius' or 'radius'
The default value is 'mass'
isos : list, optional
list of isos to plot, i.e. ['h1','he4','c12'] for MESA or
['H-1','He-4','C-12'] for SE output. If None, the code decides
itself what to plot.
The default is None.
ifig : integer or string, optional
The figure label
The default value is None
fname : integer, optional
What cycle to plot from (if SE output)
The default value is None
logy : boolean, optional
Should y-axis be logarithmic?
The default value is False
colourblind : boolean, optional
do you want to use the colourblind colour palette from the NuGrid
nuutils module? | entailment |
def get_system(self, identity):
"""Given the identity return a HPESystem object
:param identity: The identity of the System resource
:returns: The System object
"""
return system.HPESystem(self._conn, identity,
redfish_version=self.redfish_version) | Given the identity return a HPESystem object
:param identity: The identity of the System resource
:returns: The System object | entailment |
def get_manager(self, identity):
"""Given the identity return a HPEManager object
:param identity: The identity of the Manager resource
:returns: The Manager object
"""
return manager.HPEManager(self._conn, identity,
redfish_version=self.redfish_version) | Given the identity return a HPEManager object
:param identity: The identity of the Manager resource
:returns: The Manager object | entailment |
def get_update_service(self):
"""Return a HPEUpdateService object
:returns: The UpdateService object
"""
update_service_url = utils.get_subresource_path_by(self,
'UpdateService')
return (update_service.
HPEUpdateService(self._conn, update_service_url,
redfish_version=self.redfish_version)) | Return a HPEUpdateService object
:returns: The UpdateService object | entailment |
def get_account_service(self):
"""Return a HPEAccountService object"""
account_service_url = utils.get_subresource_path_by(self,
'AccountService')
return (account_service.
HPEAccountService(self._conn, account_service_url,
redfish_version=self.redfish_version)) | Return a HPEAccountService object | entailment |
def _execute_sum(sum_file_path, mount_point, components=None):
"""Executes the SUM based firmware update command.
This method executes the SUM based firmware update command to update the
components specified, if not, it performs update on all the firmware
components on th server.
:param sum_file_path: A string with the path to the SUM binary to be
executed
:param components: A list of components to be updated. If it is None, all
the firmware components are updated.
:param mount_point: Location in which SPP iso is mounted.
:returns: A string with the statistics of the updated/failed components.
:raises: SUMOperationError, when the SUM based firmware update operation
on the node fails.
"""
cmd = ' --c ' + ' --c '.join(components) if components else ''
try:
if SUM_LOCATION in sum_file_path:
location = os.path.join(mount_point, 'packages')
# NOTE: 'launch_sum.sh' binary is part of SPP ISO and it is
# available in the SPP mount point (eg:'/mount/launch_sum.sh').
# 'launch_sum.sh' binary calls the 'smartupdate' binary by passing
# the arguments.
processutils.execute('./launch_sum.sh', '--s', '--romonly',
'--use_location', location, cmd,
cwd=mount_point)
else:
processutils.execute(sum_file_path, '--s', '--romonly', cmd)
except processutils.ProcessExecutionError as e:
result = _parse_sum_ouput(e.exit_code)
if result:
return result
else:
raise exception.SUMOperationError(reason=str(e)) | Executes the SUM based firmware update command.
This method executes the SUM based firmware update command to update the
components specified, if not, it performs update on all the firmware
components on th server.
:param sum_file_path: A string with the path to the SUM binary to be
executed
:param components: A list of components to be updated. If it is None, all
the firmware components are updated.
:param mount_point: Location in which SPP iso is mounted.
:returns: A string with the statistics of the updated/failed components.
:raises: SUMOperationError, when the SUM based firmware update operation
on the node fails. | entailment |
def _get_log_file_data_as_encoded_content():
"""Gzip and base64 encode files and BytesIO buffers.
This method gets the log files created by SUM based
firmware update and tar zip the files.
:returns: A gzipped and base64 encoded string as text.
"""
with io.BytesIO() as fp:
with tarfile.open(fileobj=fp, mode='w:gz') as tar:
for f in OUTPUT_FILES:
if os.path.isfile(f):
tar.add(f)
fp.seek(0)
return base64.encode_as_bytes(fp.getvalue()) | Gzip and base64 encode files and BytesIO buffers.
This method gets the log files created by SUM based
firmware update and tar zip the files.
:returns: A gzipped and base64 encoded string as text. | entailment |
def _parse_sum_ouput(exit_code):
"""Parse the SUM output log file.
This method parses through the SUM log file in the
default location to return the SUM update status. Sample return
string:
"Summary: The installation of the component failed. Status of updated
components: Total: 5 Success: 4 Failed: 1"
:param exit_code: A integer returned by the SUM after command execution.
:returns: A string with the statistics of the updated/failed
components and 'None' when the exit_code is not 0, 1, 3 or 253.
"""
if exit_code == 3:
return "Summary: %s" % EXIT_CODE_TO_STRING.get(exit_code)
if exit_code in (0, 1, 253):
if os.path.exists(OUTPUT_FILES[0]):
with open(OUTPUT_FILES[0], 'r') as f:
output_data = f.read()
ret_data = output_data[(output_data.find('Deployed Components:') +
len('Deployed Components:')):
output_data.find('Exit status:')]
failed = 0
success = 0
for line in re.split('\n\n', ret_data):
if line:
if 'Success' not in line:
failed += 1
else:
success += 1
return {
'Summary': (
"%(return_string)s Status of updated components: Total: "
"%(total)s Success: %(success)s Failed: %(failed)s." %
{'return_string': EXIT_CODE_TO_STRING.get(exit_code),
'total': (success + failed), 'success': success,
'failed': failed}),
'Log Data': _get_log_file_data_as_encoded_content()
}
return "UPDATE STATUS: UNKNOWN" | Parse the SUM output log file.
This method parses through the SUM log file in the
default location to return the SUM update status. Sample return
string:
"Summary: The installation of the component failed. Status of updated
components: Total: 5 Success: 4 Failed: 1"
:param exit_code: A integer returned by the SUM after command execution.
:returns: A string with the statistics of the updated/failed
components and 'None' when the exit_code is not 0, 1, 3 or 253. | entailment |
def update_firmware(node):
"""Performs SUM based firmware update on the node.
This method performs SUM firmware update by mounting the
SPP ISO on the node. It performs firmware update on all or
some of the firmware components.
:param node: A node object of type dict.
:returns: Operation Status string.
:raises: SUMOperationError, when the vmedia device is not found or
when the mount operation fails or when the image validation fails.
:raises: IloConnectionError, when the iLO connection fails.
:raises: IloError, when vmedia eject or insert operation fails.
"""
sum_update_iso = node['clean_step']['args'].get('url')
# Validates the http image reference for SUM update ISO.
try:
utils.validate_href(sum_update_iso)
except exception.ImageRefValidationFailed as e:
raise exception.SUMOperationError(reason=e)
# Ejects the CDROM device in the iLO and inserts the SUM update ISO
# to the CDROM device.
info = node.get('driver_info')
ilo_object = client.IloClient(info.get('ilo_address'),
info.get('ilo_username'),
info.get('ilo_password'))
ilo_object.eject_virtual_media('CDROM')
ilo_object.insert_virtual_media(sum_update_iso, 'CDROM')
# Waits for the OS to detect the disk and update the label file. SPP ISO
# is identified by matching its label.
time.sleep(WAIT_TIME_DISK_LABEL_TO_BE_VISIBLE)
vmedia_device_dir = "/dev/disk/by-label/"
for file in os.listdir(vmedia_device_dir):
if fnmatch.fnmatch(file, 'SPP*'):
vmedia_device_file = os.path.join(vmedia_device_dir, file)
if not os.path.exists(vmedia_device_file):
msg = "Unable to find the virtual media device for SUM"
raise exception.SUMOperationError(reason=msg)
# Validates the SPP ISO image for any file corruption using the checksum
# of the ISO file.
expected_checksum = node['clean_step']['args'].get('checksum')
try:
utils.verify_image_checksum(vmedia_device_file, expected_checksum)
except exception.ImageRefValidationFailed as e:
raise exception.SUMOperationError(reason=e)
# Mounts SPP ISO on a temporary directory.
vmedia_mount_point = tempfile.mkdtemp()
try:
try:
processutils.execute("mount", vmedia_device_file,
vmedia_mount_point)
except processutils.ProcessExecutionError as e:
msg = ("Unable to mount virtual media device %(device)s: "
"%(error)s" % {'device': vmedia_device_file, 'error': e})
raise exception.SUMOperationError(reason=msg)
# Executes the SUM based firmware update by passing the 'smartupdate'
# executable path if exists else 'hpsum' executable path and the
# components specified (if any).
sum_file_path = os.path.join(vmedia_mount_point, SUM_LOCATION)
if not os.path.exists(sum_file_path):
sum_file_path = os.path.join(vmedia_mount_point, HPSUM_LOCATION)
components = node['clean_step']['args'].get('components')
result = _execute_sum(sum_file_path, vmedia_mount_point,
components=components)
processutils.trycmd("umount", vmedia_mount_point)
finally:
shutil.rmtree(vmedia_mount_point, ignore_errors=True)
return result | Performs SUM based firmware update on the node.
This method performs SUM firmware update by mounting the
SPP ISO on the node. It performs firmware update on all or
some of the firmware components.
:param node: A node object of type dict.
:returns: Operation Status string.
:raises: SUMOperationError, when the vmedia device is not found or
when the mount operation fails or when the image validation fails.
:raises: IloConnectionError, when the iLO connection fails.
:raises: IloError, when vmedia eject or insert operation fails. | entailment |
def parse(self, text):
'''Return a string with markup tags converted to ansi-escape sequences.'''
tags, results = [], []
text = self.re_tag.sub(lambda m: self.sub_tag(m, tags, results), text)
if self.strict and tags:
markup = "%s%s%s" % (self.tag_sep[0], tags.pop(0), self.tag_sep[1])
raise MismatchedTag('opening tag "%s" has no corresponding closing tag' % markup)
if self.always_reset:
if not text.endswith(Style.RESET_ALL):
text += Style.RESET_ALL
return text | Return a string with markup tags converted to ansi-escape sequences. | entailment |
def ansiprint(self, *args, **kwargs):
'''Wrapper around builtins.print() that runs parse() on all arguments first.'''
args = (self.parse(str(i)) for i in args)
builtins.print(*args, **kwargs) | Wrapper around builtins.print() that runs parse() on all arguments first. | entailment |
def strip(self, text):
'''Return string with markup tags removed.'''
tags, results = [], []
return self.re_tag.sub(lambda m: self.clear_tag(m, tags, results), text) | Return string with markup tags removed. | entailment |
def list_shares(self, prefix=None, marker=None, num_results=None,
include_metadata=False, timeout=None):
'''
Returns a generator to list the shares under the specified account.
The generator will lazily follow the continuation tokens returned by
the service and stop when all shares have been returned or num_results
is reached.
If num_results is specified and the account has more than that number of
shares, the generator will have a populated next_marker field once it
finishes. This marker can be used to create a new generator if more
results are desired.
:param str prefix:
Filters the results to return only shares whose names
begin with the specified prefix.
:param int num_results:
Specifies the maximum number of shares to return.
:param bool include_metadata:
Specifies that share metadata be returned in the response.
:param str marker:
An opaque continuation token. This value can be retrieved from the
next_marker field of a previous generator object if num_results was
specified and that generator has finished enumerating results. If
specified, this generator will begin returning results from the point
where the previous generator stopped.
:param int timeout:
The timeout parameter is expressed in seconds.
'''
include = 'metadata' if include_metadata else None
operation_context = _OperationContext(location_lock=True)
kwargs = {'prefix': prefix, 'marker': marker, 'max_results': num_results,
'include': include, 'timeout': timeout, '_context': operation_context}
resp = self._list_shares(**kwargs)
return ListGenerator(resp, self._list_shares, (), kwargs) | Returns a generator to list the shares under the specified account.
The generator will lazily follow the continuation tokens returned by
the service and stop when all shares have been returned or num_results
is reached.
If num_results is specified and the account has more than that number of
shares, the generator will have a populated next_marker field once it
finishes. This marker can be used to create a new generator if more
results are desired.
:param str prefix:
Filters the results to return only shares whose names
begin with the specified prefix.
:param int num_results:
Specifies the maximum number of shares to return.
:param bool include_metadata:
Specifies that share metadata be returned in the response.
:param str marker:
An opaque continuation token. This value can be retrieved from the
next_marker field of a previous generator object if num_results was
specified and that generator has finished enumerating results. If
specified, this generator will begin returning results from the point
where the previous generator stopped.
:param int timeout:
The timeout parameter is expressed in seconds. | entailment |
def list_directories_and_files(self, share_name, directory_name=None,
num_results=None, marker=None, timeout=None,
prefix=None):
'''
Returns a generator to list the directories and files under the specified share.
The generator will lazily follow the continuation tokens returned by
the service and stop when all directories and files have been returned or
num_results is reached.
If num_results is specified and the share has more than that number of
files and directories, the generator will have a populated next_marker
field once it finishes. This marker can be used to create a new generator
if more results are desired.
:param str share_name:
Name of existing share.
:param str directory_name:
The path to the directory.
:param int num_results:
Specifies the maximum number of files to return,
including all directory elements. If the request does not specify
num_results or specifies a value greater than 5,000, the server will
return up to 5,000 items. Setting num_results to a value less than
or equal to zero results in error response code 400 (Bad Request).
:param str marker:
An opaque continuation token. This value can be retrieved from the
next_marker field of a previous generator object if num_results was
specified and that generator has finished enumerating results. If
specified, this generator will begin returning results from the point
where the previous generator stopped.
:param int timeout:
The timeout parameter is expressed in seconds.
:param str prefix:
List only the files and/or directories with the given prefix.
'''
operation_context = _OperationContext(location_lock=True)
args = (share_name, directory_name)
kwargs = {'marker': marker, 'max_results': num_results, 'timeout': timeout,
'_context': operation_context, 'prefix': prefix}
resp = self._list_directories_and_files(*args, **kwargs)
return ListGenerator(resp, self._list_directories_and_files, args, kwargs) | Returns a generator to list the directories and files under the specified share.
The generator will lazily follow the continuation tokens returned by
the service and stop when all directories and files have been returned or
num_results is reached.
If num_results is specified and the share has more than that number of
files and directories, the generator will have a populated next_marker
field once it finishes. This marker can be used to create a new generator
if more results are desired.
:param str share_name:
Name of existing share.
:param str directory_name:
The path to the directory.
:param int num_results:
Specifies the maximum number of files to return,
including all directory elements. If the request does not specify
num_results or specifies a value greater than 5,000, the server will
return up to 5,000 items. Setting num_results to a value less than
or equal to zero results in error response code 400 (Bad Request).
:param str marker:
An opaque continuation token. This value can be retrieved from the
next_marker field of a previous generator object if num_results was
specified and that generator has finished enumerating results. If
specified, this generator will begin returning results from the point
where the previous generator stopped.
:param int timeout:
The timeout parameter is expressed in seconds.
:param str prefix:
List only the files and/or directories with the given prefix. | entailment |
def _get_file(self, share_name, directory_name, file_name,
start_range=None, end_range=None, validate_content=False,
timeout=None, _context=None):
'''
Downloads a file's content, metadata, and properties. You can specify a
range if you don't need to download the file in its entirety. If no range
is specified, the full file will be downloaded.
See get_file_to_* for high level functions that handle the download
of large files with automatic chunking and progress notifications.
:param str share_name:
Name of existing share.
:param str directory_name:
The path to the directory.
:param str file_name:
Name of existing file.
:param int start_range:
Start of byte range to use for downloading a section of the file.
If no end_range is given, all bytes after the start_range will be downloaded.
The start_range and end_range params are inclusive.
Ex: start_range=0, end_range=511 will download first 512 bytes of file.
:param int end_range:
End of byte range to use for downloading a section of the file.
If end_range is given, start_range must be provided.
The start_range and end_range params are inclusive.
Ex: start_range=0, end_range=511 will download first 512 bytes of file.
:param bool validate_content:
When this is set to True and specified together with the Range header,
the service returns the MD5 hash for the range, as long as the range
is less than or equal to 4 MB in size.
:param int timeout:
The timeout parameter is expressed in seconds.
:return: A File with content, properties, and metadata.
:rtype: :class:`~azure.storage.file.models.File`
'''
_validate_not_none('share_name', share_name)
_validate_not_none('file_name', file_name)
request = HTTPRequest()
request.method = 'GET'
request.host_locations = self._get_host_locations()
request.path = _get_path(share_name, directory_name, file_name)
request.query = { 'timeout': _int_to_str(timeout)}
_validate_and_format_range_headers(
request,
start_range,
end_range,
start_range_required=False,
end_range_required=False,
check_content_md5=validate_content)
return self._perform_request(request, _parse_file,
[file_name, validate_content],
operation_context=_context) | Downloads a file's content, metadata, and properties. You can specify a
range if you don't need to download the file in its entirety. If no range
is specified, the full file will be downloaded.
See get_file_to_* for high level functions that handle the download
of large files with automatic chunking and progress notifications.
:param str share_name:
Name of existing share.
:param str directory_name:
The path to the directory.
:param str file_name:
Name of existing file.
:param int start_range:
Start of byte range to use for downloading a section of the file.
If no end_range is given, all bytes after the start_range will be downloaded.
The start_range and end_range params are inclusive.
Ex: start_range=0, end_range=511 will download first 512 bytes of file.
:param int end_range:
End of byte range to use for downloading a section of the file.
If end_range is given, start_range must be provided.
The start_range and end_range params are inclusive.
Ex: start_range=0, end_range=511 will download first 512 bytes of file.
:param bool validate_content:
When this is set to True and specified together with the Range header,
the service returns the MD5 hash for the range, as long as the range
is less than or equal to 4 MB in size.
:param int timeout:
The timeout parameter is expressed in seconds.
:return: A File with content, properties, and metadata.
:rtype: :class:`~azure.storage.file.models.File` | entailment |
def get_file_to_path(self, share_name, directory_name, file_name, file_path,
open_mode='wb', start_range=None, end_range=None,
validate_content=False, progress_callback=None,
max_connections=2, timeout=None):
'''
Downloads a file to a file path, with automatic chunking and progress
notifications. Returns an instance of File with properties and metadata.
:param str share_name:
Name of existing share.
:param str directory_name:
The path to the directory.
:param str file_name:
Name of existing file.
:param str file_path:
Path of file to write to.
:param str open_mode:
Mode to use when opening the file. Note that specifying append only
open_mode prevents parallel download. So, max_connections must be set
to 1 if this open_mode is used.
:param int start_range:
Start of byte range to use for downloading a section of the file.
If no end_range is given, all bytes after the start_range will be downloaded.
The start_range and end_range params are inclusive.
Ex: start_range=0, end_range=511 will download first 512 bytes of file.
:param int end_range:
End of byte range to use for downloading a section of the file.
If end_range is given, start_range must be provided.
The start_range and end_range params are inclusive.
Ex: start_range=0, end_range=511 will download first 512 bytes of file.
:param bool validate_content:
If set to true, validates an MD5 hash for each retrieved portion of
the file. This is primarily valuable for detecting bitflips on the wire
if using http instead of https as https (the default) will already
validate. Note that the service will only return transactional MD5s
for chunks 4MB or less so the first get request will be of size
self.MAX_CHUNK_GET_SIZE instead of self.MAX_SINGLE_GET_SIZE. If
self.MAX_CHUNK_GET_SIZE was set to greater than 4MB an error will be
thrown. As computing the MD5 takes processing time and more requests
will need to be done due to the reduced chunk size there may be some
increase in latency.
:param progress_callback:
Callback for progress with signature function(current, total)
where current is the number of bytes transfered so far, and total is
the size of the file if known.
:type progress_callback: callback function in format of func(current, total)
:param int max_connections:
If set to 2 or greater, an initial get will be done for the first
self.MAX_SINGLE_GET_SIZE bytes of the file. If this is the entire file,
the method returns at this point. If it is not, it will download the
remaining data parallel using the number of threads equal to
max_connections. Each chunk will be of size self.MAX_CHUNK_GET_SIZE.
If set to 1, a single large get request will be done. This is not
generally recommended but available if very few threads should be
used, network requests are very expensive, or a non-seekable stream
prevents parallel download. This may also be valuable if the file is
being concurrently modified to enforce atomicity or if many files are
expected to be empty as an extra request is required for empty files
if max_connections is greater than 1.
:param int timeout:
The timeout parameter is expressed in seconds. This method may make
multiple calls to the Azure service and the timeout will apply to
each call individually.
:return: A File with properties and metadata.
:rtype: :class:`~azure.storage.file.models.File`
'''
_validate_not_none('share_name', share_name)
_validate_not_none('file_name', file_name)
_validate_not_none('file_path', file_path)
_validate_not_none('open_mode', open_mode)
if max_connections > 1 and 'a' in open_mode:
raise ValueError(_ERROR_PARALLEL_NOT_SEEKABLE)
with open(file_path, open_mode) as stream:
file = self.get_file_to_stream(
share_name, directory_name, file_name, stream,
start_range, end_range, validate_content,
progress_callback, max_connections, timeout)
return file | Downloads a file to a file path, with automatic chunking and progress
notifications. Returns an instance of File with properties and metadata.
:param str share_name:
Name of existing share.
:param str directory_name:
The path to the directory.
:param str file_name:
Name of existing file.
:param str file_path:
Path of file to write to.
:param str open_mode:
Mode to use when opening the file. Note that specifying append only
open_mode prevents parallel download. So, max_connections must be set
to 1 if this open_mode is used.
:param int start_range:
Start of byte range to use for downloading a section of the file.
If no end_range is given, all bytes after the start_range will be downloaded.
The start_range and end_range params are inclusive.
Ex: start_range=0, end_range=511 will download first 512 bytes of file.
:param int end_range:
End of byte range to use for downloading a section of the file.
If end_range is given, start_range must be provided.
The start_range and end_range params are inclusive.
Ex: start_range=0, end_range=511 will download first 512 bytes of file.
:param bool validate_content:
If set to true, validates an MD5 hash for each retrieved portion of
the file. This is primarily valuable for detecting bitflips on the wire
if using http instead of https as https (the default) will already
validate. Note that the service will only return transactional MD5s
for chunks 4MB or less so the first get request will be of size
self.MAX_CHUNK_GET_SIZE instead of self.MAX_SINGLE_GET_SIZE. If
self.MAX_CHUNK_GET_SIZE was set to greater than 4MB an error will be
thrown. As computing the MD5 takes processing time and more requests
will need to be done due to the reduced chunk size there may be some
increase in latency.
:param progress_callback:
Callback for progress with signature function(current, total)
where current is the number of bytes transfered so far, and total is
the size of the file if known.
:type progress_callback: callback function in format of func(current, total)
:param int max_connections:
If set to 2 or greater, an initial get will be done for the first
self.MAX_SINGLE_GET_SIZE bytes of the file. If this is the entire file,
the method returns at this point. If it is not, it will download the
remaining data parallel using the number of threads equal to
max_connections. Each chunk will be of size self.MAX_CHUNK_GET_SIZE.
If set to 1, a single large get request will be done. This is not
generally recommended but available if very few threads should be
used, network requests are very expensive, or a non-seekable stream
prevents parallel download. This may also be valuable if the file is
being concurrently modified to enforce atomicity or if many files are
expected to be empty as an extra request is required for empty files
if max_connections is greater than 1.
:param int timeout:
The timeout parameter is expressed in seconds. This method may make
multiple calls to the Azure service and the timeout will apply to
each call individually.
:return: A File with properties and metadata.
:rtype: :class:`~azure.storage.file.models.File` | entailment |
def get_file_to_stream(
self, share_name, directory_name, file_name, stream,
start_range=None, end_range=None, validate_content=False,
progress_callback=None, max_connections=2, timeout=None):
'''
Downloads a file to a stream, with automatic chunking and progress
notifications. Returns an instance of :class:`File` with properties
and metadata.
:param str share_name:
Name of existing share.
:param str directory_name:
The path to the directory.
:param str file_name:
Name of existing file.
:param io.IOBase stream:
Opened file/stream to write to.
:param int start_range:
Start of byte range to use for downloading a section of the file.
If no end_range is given, all bytes after the start_range will be downloaded.
The start_range and end_range params are inclusive.
Ex: start_range=0, end_range=511 will download first 512 bytes of file.
:param int end_range:
End of byte range to use for downloading a section of the file.
If end_range is given, start_range must be provided.
The start_range and end_range params are inclusive.
Ex: start_range=0, end_range=511 will download first 512 bytes of file.
:param bool validate_content:
If set to true, validates an MD5 hash for each retrieved portion of
the file. This is primarily valuable for detecting bitflips on the wire
if using http instead of https as https (the default) will already
validate. Note that the service will only return transactional MD5s
for chunks 4MB or less so the first get request will be of size
self.MAX_CHUNK_GET_SIZE instead of self.MAX_SINGLE_GET_SIZE. If
self.MAX_CHUNK_GET_SIZE was set to greater than 4MB an error will be
thrown. As computing the MD5 takes processing time and more requests
will need to be done due to the reduced chunk size there may be some
increase in latency.
:param progress_callback:
Callback for progress with signature function(current, total)
where current is the number of bytes transfered so far, and total is
the size of the file if known.
:type progress_callback: callback function in format of func(current, total)
:param int max_connections:
If set to 2 or greater, an initial get will be done for the first
self.MAX_SINGLE_GET_SIZE bytes of the file. If this is the entire file,
the method returns at this point. If it is not, it will download the
remaining data parallel using the number of threads equal to
max_connections. Each chunk will be of size self.MAX_CHUNK_GET_SIZE.
If set to 1, a single large get request will be done. This is not
generally recommended but available if very few threads should be
used, network requests are very expensive, or a non-seekable stream
prevents parallel download. This may also be valuable if the file is
being concurrently modified to enforce atomicity or if many files are
expected to be empty as an extra request is required for empty files
if max_connections is greater than 1.
:param int timeout:
The timeout parameter is expressed in seconds. This method may make
multiple calls to the Azure service and the timeout will apply to
each call individually.
:return: A File with properties and metadata.
:rtype: :class:`~azure.storage.file.models.File`
'''
_validate_not_none('share_name', share_name)
_validate_not_none('file_name', file_name)
_validate_not_none('stream', stream)
# If the user explicitly sets max_connections to 1, do a single shot download
if max_connections == 1:
file = self._get_file(share_name,
directory_name,
file_name,
start_range=start_range,
end_range=end_range,
validate_content=validate_content,
timeout=timeout)
# Set the download size
download_size = file.properties.content_length
# If max_connections is greater than 1, do the first get to establish the
# size of the file and get the first segment of data
else:
if sys.version_info >= (3,) and not stream.seekable():
raise ValueError(_ERROR_PARALLEL_NOT_SEEKABLE)
# The service only provides transactional MD5s for chunks under 4MB.
# If validate_content is on, get only self.MAX_CHUNK_GET_SIZE for the first
# chunk so a transactional MD5 can be retrieved.
first_get_size = self.MAX_SINGLE_GET_SIZE if not validate_content else self.MAX_CHUNK_GET_SIZE
initial_request_start = start_range if start_range else 0
if end_range and end_range - start_range < first_get_size:
initial_request_end = end_range
else:
initial_request_end = initial_request_start + first_get_size - 1
# Send a context object to make sure we always retry to the initial location
operation_context = _OperationContext(location_lock=True)
try:
file = self._get_file(share_name,
directory_name,
file_name,
start_range=initial_request_start,
end_range=initial_request_end,
validate_content=validate_content,
timeout=timeout,
_context=operation_context)
# Parse the total file size and adjust the download size if ranges
# were specified
file_size = _parse_length_from_content_range(file.properties.content_range)
if end_range:
# Use the end_range unless it is over the end of the file
download_size = min(file_size, end_range - start_range + 1)
elif start_range:
download_size = file_size - start_range
else:
download_size = file_size
except AzureHttpError as ex:
if not start_range and ex.status_code == 416:
# Get range will fail on an empty file. If the user did not
# request a range, do a regular get request in order to get
# any properties.
file = self._get_file(share_name,
directory_name,
file_name,
validate_content=validate_content,
timeout=timeout,
_context=operation_context)
# Set the download size to empty
download_size = 0
else:
raise ex
# Mark the first progress chunk. If the file is small or this is a single
# shot download, this is the only call
if progress_callback:
progress_callback(file.properties.content_length, download_size)
# Write the content to the user stream
# Clear file content since output has been written to user stream
if file.content is not None:
stream.write(file.content)
file.content = None
# If the file is small or single shot download was used, the download is
# complete at this point. If file size is large, use parallel download.
if file.properties.content_length != download_size:
# At this point would like to lock on something like the etag so that
# if the file is modified, we dont get a corrupted download. However,
# this feature is not yet available on the file service.
end_file = file_size
if end_range:
# Use the end_range unless it is over the end of the file
end_file = min(file_size, end_range + 1)
_download_file_chunks(
self,
share_name,
directory_name,
file_name,
download_size,
self.MAX_CHUNK_GET_SIZE,
first_get_size,
initial_request_end + 1, # start where the first download ended
end_file,
stream,
max_connections,
progress_callback,
validate_content,
timeout,
operation_context,
)
# Set the content length to the download size instead of the size of
# the last range
file.properties.content_length = download_size
# Overwrite the content range to the user requested range
file.properties.content_range = 'bytes {0}-{1}/{2}'.format(start_range, end_range, file_size)
# Overwrite the content MD5 as it is the MD5 for the last range instead
# of the stored MD5
# TODO: Set to the stored MD5 when the service returns this
file.properties.content_md5 = None
return file | Downloads a file to a stream, with automatic chunking and progress
notifications. Returns an instance of :class:`File` with properties
and metadata.
:param str share_name:
Name of existing share.
:param str directory_name:
The path to the directory.
:param str file_name:
Name of existing file.
:param io.IOBase stream:
Opened file/stream to write to.
:param int start_range:
Start of byte range to use for downloading a section of the file.
If no end_range is given, all bytes after the start_range will be downloaded.
The start_range and end_range params are inclusive.
Ex: start_range=0, end_range=511 will download first 512 bytes of file.
:param int end_range:
End of byte range to use for downloading a section of the file.
If end_range is given, start_range must be provided.
The start_range and end_range params are inclusive.
Ex: start_range=0, end_range=511 will download first 512 bytes of file.
:param bool validate_content:
If set to true, validates an MD5 hash for each retrieved portion of
the file. This is primarily valuable for detecting bitflips on the wire
if using http instead of https as https (the default) will already
validate. Note that the service will only return transactional MD5s
for chunks 4MB or less so the first get request will be of size
self.MAX_CHUNK_GET_SIZE instead of self.MAX_SINGLE_GET_SIZE. If
self.MAX_CHUNK_GET_SIZE was set to greater than 4MB an error will be
thrown. As computing the MD5 takes processing time and more requests
will need to be done due to the reduced chunk size there may be some
increase in latency.
:param progress_callback:
Callback for progress with signature function(current, total)
where current is the number of bytes transfered so far, and total is
the size of the file if known.
:type progress_callback: callback function in format of func(current, total)
:param int max_connections:
If set to 2 or greater, an initial get will be done for the first
self.MAX_SINGLE_GET_SIZE bytes of the file. If this is the entire file,
the method returns at this point. If it is not, it will download the
remaining data parallel using the number of threads equal to
max_connections. Each chunk will be of size self.MAX_CHUNK_GET_SIZE.
If set to 1, a single large get request will be done. This is not
generally recommended but available if very few threads should be
used, network requests are very expensive, or a non-seekable stream
prevents parallel download. This may also be valuable if the file is
being concurrently modified to enforce atomicity or if many files are
expected to be empty as an extra request is required for empty files
if max_connections is greater than 1.
:param int timeout:
The timeout parameter is expressed in seconds. This method may make
multiple calls to the Azure service and the timeout will apply to
each call individually.
:return: A File with properties and metadata.
:rtype: :class:`~azure.storage.file.models.File` | entailment |
def get_file_to_bytes(self, share_name, directory_name, file_name,
start_range=None, end_range=None, validate_content=False,
progress_callback=None, max_connections=2, timeout=None):
'''
Downloads a file as an array of bytes, with automatic chunking and
progress notifications. Returns an instance of :class:`File` with
properties, metadata, and content.
:param str share_name:
Name of existing share.
:param str directory_name:
The path to the directory.
:param str file_name:
Name of existing file.
:param int start_range:
Start of byte range to use for downloading a section of the file.
If no end_range is given, all bytes after the start_range will be downloaded.
The start_range and end_range params are inclusive.
Ex: start_range=0, end_range=511 will download first 512 bytes of file.
:param int end_range:
End of byte range to use for downloading a section of the file.
If end_range is given, start_range must be provided.
The start_range and end_range params are inclusive.
Ex: start_range=0, end_range=511 will download first 512 bytes of file.
:param bool validate_content:
If set to true, validates an MD5 hash for each retrieved portion of
the file. This is primarily valuable for detecting bitflips on the wire
if using http instead of https as https (the default) will already
validate. Note that the service will only return transactional MD5s
for chunks 4MB or less so the first get request will be of size
self.MAX_CHUNK_GET_SIZE instead of self.MAX_SINGLE_GET_SIZE. If
self.MAX_CHUNK_GET_SIZE was set to greater than 4MB an error will be
thrown. As computing the MD5 takes processing time and more requests
will need to be done due to the reduced chunk size there may be some
increase in latency.
:param progress_callback:
Callback for progress with signature function(current, total)
where current is the number of bytes transfered so far, and total is
the size of the file if known.
:type progress_callback: callback function in format of func(current, total)
:param int max_connections:
If set to 2 or greater, an initial get will be done for the first
self.MAX_SINGLE_GET_SIZE bytes of the file. If this is the entire file,
the method returns at this point. If it is not, it will download the
remaining data parallel using the number of threads equal to
max_connections. Each chunk will be of size self.MAX_CHUNK_GET_SIZE.
If set to 1, a single large get request will be done. This is not
generally recommended but available if very few threads should be
used, network requests are very expensive, or a non-seekable stream
prevents parallel download. This may also be valuable if the file is
being concurrently modified to enforce atomicity or if many files are
expected to be empty as an extra request is required for empty files
if max_connections is greater than 1.
:param int timeout:
The timeout parameter is expressed in seconds. This method may make
multiple calls to the Azure service and the timeout will apply to
each call individually.
:return: A File with properties, content, and metadata.
:rtype: :class:`~azure.storage.file.models.File`
'''
_validate_not_none('share_name', share_name)
_validate_not_none('file_name', file_name)
stream = BytesIO()
file = self.get_file_to_stream(
share_name,
directory_name,
file_name,
stream,
start_range,
end_range,
validate_content,
progress_callback,
max_connections,
timeout)
file.content = stream.getvalue()
return file | Downloads a file as an array of bytes, with automatic chunking and
progress notifications. Returns an instance of :class:`File` with
properties, metadata, and content.
:param str share_name:
Name of existing share.
:param str directory_name:
The path to the directory.
:param str file_name:
Name of existing file.
:param int start_range:
Start of byte range to use for downloading a section of the file.
If no end_range is given, all bytes after the start_range will be downloaded.
The start_range and end_range params are inclusive.
Ex: start_range=0, end_range=511 will download first 512 bytes of file.
:param int end_range:
End of byte range to use for downloading a section of the file.
If end_range is given, start_range must be provided.
The start_range and end_range params are inclusive.
Ex: start_range=0, end_range=511 will download first 512 bytes of file.
:param bool validate_content:
If set to true, validates an MD5 hash for each retrieved portion of
the file. This is primarily valuable for detecting bitflips on the wire
if using http instead of https as https (the default) will already
validate. Note that the service will only return transactional MD5s
for chunks 4MB or less so the first get request will be of size
self.MAX_CHUNK_GET_SIZE instead of self.MAX_SINGLE_GET_SIZE. If
self.MAX_CHUNK_GET_SIZE was set to greater than 4MB an error will be
thrown. As computing the MD5 takes processing time and more requests
will need to be done due to the reduced chunk size there may be some
increase in latency.
:param progress_callback:
Callback for progress with signature function(current, total)
where current is the number of bytes transfered so far, and total is
the size of the file if known.
:type progress_callback: callback function in format of func(current, total)
:param int max_connections:
If set to 2 or greater, an initial get will be done for the first
self.MAX_SINGLE_GET_SIZE bytes of the file. If this is the entire file,
the method returns at this point. If it is not, it will download the
remaining data parallel using the number of threads equal to
max_connections. Each chunk will be of size self.MAX_CHUNK_GET_SIZE.
If set to 1, a single large get request will be done. This is not
generally recommended but available if very few threads should be
used, network requests are very expensive, or a non-seekable stream
prevents parallel download. This may also be valuable if the file is
being concurrently modified to enforce atomicity or if many files are
expected to be empty as an extra request is required for empty files
if max_connections is greater than 1.
:param int timeout:
The timeout parameter is expressed in seconds. This method may make
multiple calls to the Azure service and the timeout will apply to
each call individually.
:return: A File with properties, content, and metadata.
:rtype: :class:`~azure.storage.file.models.File` | entailment |
def get_file_to_text(
self, share_name, directory_name, file_name, encoding='utf-8',
start_range=None, end_range=None, validate_content=False,
progress_callback=None, max_connections=2, timeout=None):
'''
Downloads a file as unicode text, with automatic chunking and progress
notifications. Returns an instance of :class:`File` with properties,
metadata, and content.
:param str share_name:
Name of existing share.
:param str directory_name:
The path to the directory.
:param str file_name:
Name of existing file.
:param str encoding:
Python encoding to use when decoding the file data.
:param int start_range:
Start of byte range to use for downloading a section of the file.
If no end_range is given, all bytes after the start_range will be downloaded.
The start_range and end_range params are inclusive.
Ex: start_range=0, end_range=511 will download first 512 bytes of file.
:param int end_range:
End of byte range to use for downloading a section of the file.
If end_range is given, start_range must be provided.
The start_range and end_range params are inclusive.
Ex: start_range=0, end_range=511 will download first 512 bytes of file.
:param bool validate_content:
If set to true, validates an MD5 hash for each retrieved portion of
the file. This is primarily valuable for detecting bitflips on the wire
if using http instead of https as https (the default) will already
validate. Note that the service will only return transactional MD5s
for chunks 4MB or less so the first get request will be of size
self.MAX_CHUNK_GET_SIZE instead of self.MAX_SINGLE_GET_SIZE. If
self.MAX_CHUNK_GET_SIZE was set to greater than 4MB an error will be
thrown. As computing the MD5 takes processing time and more requests
will need to be done due to the reduced chunk size there may be some
increase in latency.
:param progress_callback:
Callback for progress with signature function(current, total)
where current is the number of bytes transfered so far, and total is
the size of the file if known.
:type progress_callback: callback function in format of func(current, total)
:param int max_connections:
If set to 2 or greater, an initial get will be done for the first
self.MAX_SINGLE_GET_SIZE bytes of the file. If this is the entire file,
the method returns at this point. If it is not, it will download the
remaining data parallel using the number of threads equal to
max_connections. Each chunk will be of size self.MAX_CHUNK_GET_SIZE.
If set to 1, a single large get request will be done. This is not
generally recommended but available if very few threads should be
used, network requests are very expensive, or a non-seekable stream
prevents parallel download. This may also be valuable if the file is
being concurrently modified to enforce atomicity or if many files are
expected to be empty as an extra request is required for empty files
if max_connections is greater than 1.
:param int timeout:
The timeout parameter is expressed in seconds. This method may make
multiple calls to the Azure service and the timeout will apply to
each call individually.
:return: A File with properties, content, and metadata.
:rtype: :class:`~azure.storage.file.models.File`
'''
_validate_not_none('share_name', share_name)
_validate_not_none('file_name', file_name)
_validate_not_none('encoding', encoding)
file = self.get_file_to_bytes(
share_name,
directory_name,
file_name,
start_range,
end_range,
validate_content,
progress_callback,
max_connections,
timeout)
file.content = file.content.decode(encoding)
return file | Downloads a file as unicode text, with automatic chunking and progress
notifications. Returns an instance of :class:`File` with properties,
metadata, and content.
:param str share_name:
Name of existing share.
:param str directory_name:
The path to the directory.
:param str file_name:
Name of existing file.
:param str encoding:
Python encoding to use when decoding the file data.
:param int start_range:
Start of byte range to use for downloading a section of the file.
If no end_range is given, all bytes after the start_range will be downloaded.
The start_range and end_range params are inclusive.
Ex: start_range=0, end_range=511 will download first 512 bytes of file.
:param int end_range:
End of byte range to use for downloading a section of the file.
If end_range is given, start_range must be provided.
The start_range and end_range params are inclusive.
Ex: start_range=0, end_range=511 will download first 512 bytes of file.
:param bool validate_content:
If set to true, validates an MD5 hash for each retrieved portion of
the file. This is primarily valuable for detecting bitflips on the wire
if using http instead of https as https (the default) will already
validate. Note that the service will only return transactional MD5s
for chunks 4MB or less so the first get request will be of size
self.MAX_CHUNK_GET_SIZE instead of self.MAX_SINGLE_GET_SIZE. If
self.MAX_CHUNK_GET_SIZE was set to greater than 4MB an error will be
thrown. As computing the MD5 takes processing time and more requests
will need to be done due to the reduced chunk size there may be some
increase in latency.
:param progress_callback:
Callback for progress with signature function(current, total)
where current is the number of bytes transfered so far, and total is
the size of the file if known.
:type progress_callback: callback function in format of func(current, total)
:param int max_connections:
If set to 2 or greater, an initial get will be done for the first
self.MAX_SINGLE_GET_SIZE bytes of the file. If this is the entire file,
the method returns at this point. If it is not, it will download the
remaining data parallel using the number of threads equal to
max_connections. Each chunk will be of size self.MAX_CHUNK_GET_SIZE.
If set to 1, a single large get request will be done. This is not
generally recommended but available if very few threads should be
used, network requests are very expensive, or a non-seekable stream
prevents parallel download. This may also be valuable if the file is
being concurrently modified to enforce atomicity or if many files are
expected to be empty as an extra request is required for empty files
if max_connections is greater than 1.
:param int timeout:
The timeout parameter is expressed in seconds. This method may make
multiple calls to the Azure service and the timeout will apply to
each call individually.
:return: A File with properties, content, and metadata.
:rtype: :class:`~azure.storage.file.models.File` | entailment |
def process_firmware_image(compact_firmware_file, ilo_object):
"""Processes the firmware file.
Processing the firmware file entails extracting the firmware file from its
compact format. Along with the raw (extracted) firmware file, this method
also sends out information of whether or not the extracted firmware file
a) needs to be uploaded to http store
b) is extracted in reality or the file was already in raw format
:param compact_firmware_file: firmware file to extract from
:param ilo_object: ilo client object (ribcl/ris object)
:raises: InvalidInputError, for unsupported file types or raw firmware
file not found from compact format.
:raises: ImageExtractionFailed, for extraction related issues
:returns: core(raw) firmware file
:returns: to_upload, boolean to indicate whether to upload or not
:returns: is_extracted, boolean to indicate firmware image is actually
extracted or not.
"""
fw_img_extractor = firmware_controller.get_fw_extractor(
compact_firmware_file)
LOG.debug('Extracting firmware file: %s ...', compact_firmware_file)
raw_fw_file_path, is_extracted = fw_img_extractor.extract()
# Note(deray): Need to check if this processing is for RIS or RIBCL
# based systems. For Gen9 machines (RIS based) the firmware file needs
# to be on a http store, and hence requires the upload to happen for the
# firmware file.
to_upload = False
m = re.search('Gen(\d+)', ilo_object.model)
if int(m.group(1)) > 8:
to_upload = True
LOG.debug('Extracting firmware file: %s ... done', compact_firmware_file)
msg = ('Firmware file %(fw_file)s is %(msg)s. Need hosting (on an http '
'store): %(yes_or_no)s' %
{'fw_file': compact_firmware_file,
'msg': ('extracted. Extracted file: %s' % raw_fw_file_path
if is_extracted else 'already in raw format'),
'yes_or_no': 'Yes' if to_upload else 'No'})
LOG.info(msg)
return raw_fw_file_path, to_upload, is_extracted | Processes the firmware file.
Processing the firmware file entails extracting the firmware file from its
compact format. Along with the raw (extracted) firmware file, this method
also sends out information of whether or not the extracted firmware file
a) needs to be uploaded to http store
b) is extracted in reality or the file was already in raw format
:param compact_firmware_file: firmware file to extract from
:param ilo_object: ilo client object (ribcl/ris object)
:raises: InvalidInputError, for unsupported file types or raw firmware
file not found from compact format.
:raises: ImageExtractionFailed, for extraction related issues
:returns: core(raw) firmware file
:returns: to_upload, boolean to indicate whether to upload or not
:returns: is_extracted, boolean to indicate firmware image is actually
extracted or not. | entailment |
def _get_hash_object(hash_algo_name):
"""Create a hash object based on given algorithm.
:param hash_algo_name: name of the hashing algorithm.
:raises: InvalidInputError, on unsupported or invalid input.
:returns: a hash object based on the given named algorithm.
"""
algorithms = (hashlib.algorithms_guaranteed if six.PY3
else hashlib.algorithms)
if hash_algo_name not in algorithms:
msg = ("Unsupported/Invalid hash name '%s' provided."
% hash_algo_name)
raise exception.InvalidInputError(msg)
return getattr(hashlib, hash_algo_name)() | Create a hash object based on given algorithm.
:param hash_algo_name: name of the hashing algorithm.
:raises: InvalidInputError, on unsupported or invalid input.
:returns: a hash object based on the given named algorithm. | entailment |
def hash_file(file_like_object, hash_algo='md5'):
"""Generate a hash for the contents of a file.
It returns a hash of the file object as a string of double length,
containing only hexadecimal digits. It supports all the algorithms
hashlib does.
:param file_like_object: file like object whose hash to be calculated.
:param hash_algo: name of the hashing strategy, default being 'md5'.
:raises: InvalidInputError, on unsupported or invalid input.
:returns: a condensed digest of the bytes of contents.
"""
checksum = _get_hash_object(hash_algo)
for chunk in iter(lambda: file_like_object.read(32768), b''):
checksum.update(chunk)
return checksum.hexdigest() | Generate a hash for the contents of a file.
It returns a hash of the file object as a string of double length,
containing only hexadecimal digits. It supports all the algorithms
hashlib does.
:param file_like_object: file like object whose hash to be calculated.
:param hash_algo: name of the hashing strategy, default being 'md5'.
:raises: InvalidInputError, on unsupported or invalid input.
:returns: a condensed digest of the bytes of contents. | entailment |
def verify_image_checksum(image_location, expected_checksum):
"""Verifies checksum (md5) of image file against the expected one.
This method generates the checksum of the image file on the fly and
verifies it against the expected checksum provided as argument.
:param image_location: location of image file whose checksum is verified.
:param expected_checksum: checksum to be checked against
:raises: ImageRefValidationFailed, if invalid file path or
verification fails.
"""
try:
with open(image_location, 'rb') as fd:
actual_checksum = hash_file(fd)
except IOError as e:
raise exception.ImageRefValidationFailed(image_href=image_location,
reason=e)
if actual_checksum != expected_checksum:
msg = ('Error verifying image checksum. Image %(image)s failed to '
'verify against checksum %(checksum)s. Actual checksum is: '
'%(actual_checksum)s' %
{'image': image_location, 'checksum': expected_checksum,
'actual_checksum': actual_checksum})
raise exception.ImageRefValidationFailed(image_href=image_location,
reason=msg) | Verifies checksum (md5) of image file against the expected one.
This method generates the checksum of the image file on the fly and
verifies it against the expected checksum provided as argument.
:param image_location: location of image file whose checksum is verified.
:param expected_checksum: checksum to be checked against
:raises: ImageRefValidationFailed, if invalid file path or
verification fails. | entailment |
def validate_href(image_href):
"""Validate HTTP image reference.
:param image_href: Image reference.
:raises: exception.ImageRefValidationFailed if HEAD request failed or
returned response code not equal to 200.
:returns: Response to HEAD request.
"""
try:
response = requests.head(image_href)
if response.status_code != http_client.OK:
raise exception.ImageRefValidationFailed(
image_href=image_href,
reason=("Got HTTP code %s instead of 200 in response to "
"HEAD request." % response.status_code))
except requests.RequestException as e:
raise exception.ImageRefValidationFailed(image_href=image_href,
reason=e)
return response | Validate HTTP image reference.
:param image_href: Image reference.
:raises: exception.ImageRefValidationFailed if HEAD request failed or
returned response code not equal to 200.
:returns: Response to HEAD request. | entailment |
def apply_bios_properties_filter(settings, filter_to_be_applied):
"""Applies the filter to return the dict of filtered BIOS properties.
:param settings: dict of BIOS settings on which filter to be applied.
:param filter_to_be_applied: list of keys to be applied as filter.
:returns: A dictionary of filtered BIOS settings.
"""
if not settings or not filter_to_be_applied:
return settings
return {k: settings[k] for k in filter_to_be_applied if k in settings} | Applies the filter to return the dict of filtered BIOS properties.
:param settings: dict of BIOS settings on which filter to be applied.
:param filter_to_be_applied: list of keys to be applied as filter.
:returns: A dictionary of filtered BIOS settings. | entailment |
def _insert_or_replace_entity(entity, require_encryption=False,
key_encryption_key=None, encryption_resolver=None):
'''
Constructs an insert or replace entity request.
'''
_validate_entity(entity, key_encryption_key is not None)
_validate_encryption_required(require_encryption, key_encryption_key)
request = HTTPRequest()
request.method = 'PUT'
request.headers = {
_DEFAULT_CONTENT_TYPE_HEADER[0]: _DEFAULT_CONTENT_TYPE_HEADER[1],
_DEFAULT_ACCEPT_HEADER[0]: _DEFAULT_ACCEPT_HEADER[1],
}
if(key_encryption_key):
entity = _encrypt_entity(entity, key_encryption_key, encryption_resolver)
request.body = _get_request_body(_convert_entity_to_json(entity))
return request | Constructs an insert or replace entity request. | entailment |
def accounts(self):
"""Property to provide instance of HPEAccountCollection"""
return account.HPEAccountCollection(
self._conn, utils.get_subresource_path_by(self, 'Accounts'),
redfish_version=self.redfish_version) | Property to provide instance of HPEAccountCollection | entailment |
def update_credentials(self, password):
"""Update credentials of a redfish system
:param password: password to be updated
"""
data = {
'Password': password,
}
self._conn.patch(self.path, data=data) | Update credentials of a redfish system
:param password: password to be updated | entailment |
def get_member_details(self, username):
"""Returns the HPEAccount object
:param username: username of account
:returns: HPEAccount object if criterion matches, None otherwise
"""
members = self.get_members()
for member in members:
if member.username == username:
return member | Returns the HPEAccount object
:param username: username of account
:returns: HPEAccount object if criterion matches, None otherwise | entailment |
def _get_media(media_types):
"""Helper method to map the media types."""
get_mapped_media = (lambda x: maps.VIRTUAL_MEDIA_TYPES_MAP[x]
if x in maps.VIRTUAL_MEDIA_TYPES_MAP else None)
return list(map(get_mapped_media, media_types)) | Helper method to map the media types. | entailment |
def _get_action_element(self, action_type):
"""Helper method to return the action object."""
action = eval("self._hpe_actions." + action_type + "_vmedia")
if not action:
if action_type == "insert":
action_path = '#HpeiLOVirtualMedia.InsertVirtualMedia'
else:
action_path = '#HpeiLOVirtualMedia.EjectVirtualMedia'
raise exception.MissingAttributeError(
attribute=action_path,
resource=self._path)
return action | Helper method to return the action object. | entailment |
def insert_media(self, url):
"""Inserts Virtual Media to the device
:param url: URL to image.
:raises: SushyError, on an error from iLO.
"""
try:
super(VirtualMedia, self).insert_media(url, write_protected=True)
except sushy_exceptions.SushyError:
target_uri = self._get_action_element('insert').target_uri
data = {'Image': url}
self._conn.post(target_uri, data=data) | Inserts Virtual Media to the device
:param url: URL to image.
:raises: SushyError, on an error from iLO. | entailment |
def eject_media(self):
"""Ejects Virtual Media.
:raises: SushyError, on an error from iLO.
"""
try:
super(VirtualMedia, self).eject_media()
except sushy_exceptions.SushyError:
target_uri = self._get_action_element('eject').target_uri
self._conn.post(target_uri, data={}) | Ejects Virtual Media.
:raises: SushyError, on an error from iLO. | entailment |
def set_vm_status(self, boot_on_next_reset):
"""Set the Virtual Media drive status.
:param boot_on_next_reset: boolean value
:raises: SushyError, on an error from iLO.
"""
data = {
"Oem": {
"Hpe": {
"BootOnNextServerReset": boot_on_next_reset
}
}
}
self._conn.patch(self.path, data=data) | Set the Virtual Media drive status.
:param boot_on_next_reset: boolean value
:raises: SushyError, on an error from iLO. | entailment |
def get_member_device(self, device):
"""Returns the given virtual media device object.
:param device: virtual media device to be queried
:returns virtual media device object.
"""
for vmedia_device in self.get_members():
if device in vmedia_device.media_types:
return vmedia_device | Returns the given virtual media device object.
:param device: virtual media device to be queried
:returns virtual media device object. | entailment |
def _get_entity(partition_key, row_key, select, accept):
'''
Constructs a get entity request.
'''
_validate_not_none('partition_key', partition_key)
_validate_not_none('row_key', row_key)
_validate_not_none('accept', accept)
request = HTTPRequest()
request.method = 'GET'
request.headers = [('Accept', _to_str(accept))]
request.query = [('$select', _to_str(select))]
return request | Constructs a get entity request. | entailment |
def _insert_entity(entity):
'''
Constructs an insert entity request.
'''
_validate_entity(entity)
request = HTTPRequest()
request.method = 'POST'
request.headers = [_DEFAULT_CONTENT_TYPE_HEADER,
_DEFAULT_PREFER_HEADER,
_DEFAULT_ACCEPT_HEADER]
request.body = _get_request_body(_convert_entity_to_json(entity))
return request | Constructs an insert entity request. | entailment |
def _merge_entity(entity, if_match):
'''
Constructs a merge entity request.
'''
_validate_not_none('if_match', if_match)
_validate_entity(entity)
request = HTTPRequest()
request.method = 'MERGE'
request.headers = [_DEFAULT_CONTENT_TYPE_HEADER,
_DEFAULT_ACCEPT_HEADER,
('If-Match', _to_str(if_match))]
request.body = _get_request_body(_convert_entity_to_json(entity))
return request | Constructs a merge entity request. | entailment |
def _delete_entity(partition_key, row_key, if_match):
'''
Constructs a delete entity request.
'''
_validate_not_none('if_match', if_match)
_validate_not_none('partition_key', partition_key)
_validate_not_none('row_key', row_key)
request = HTTPRequest()
request.method = 'DELETE'
request.headers = [_DEFAULT_ACCEPT_HEADER,
('If-Match', _to_str(if_match))]
return request | Constructs a delete entity request. | entailment |
def _insert_or_replace_entity(entity):
'''
Constructs an insert or replace entity request.
'''
_validate_entity(entity)
request = HTTPRequest()
request.method = 'PUT'
request.headers = [_DEFAULT_CONTENT_TYPE_HEADER,
_DEFAULT_ACCEPT_HEADER]
request.body = _get_request_body(_convert_entity_to_json(entity))
return request | Constructs an insert or replace entity request. | entailment |
def find_executable(executable_name):
"""Tries to find executable in PATH environment
It uses ``shutil.which`` method in Python3 and
``distutils.spawn.find_executable`` method in Python2.7 to find the
absolute path to the 'name' executable.
:param executable_name: name of the executable
:returns: Returns the absolute path to the executable or None if not found.
"""
if six.PY3:
executable_abs = shutil.which(executable_name)
else:
import distutils.spawn
executable_abs = distutils.spawn.find_executable(executable_name)
return executable_abs | Tries to find executable in PATH environment
It uses ``shutil.which`` method in Python3 and
``distutils.spawn.find_executable`` method in Python2.7 to find the
absolute path to the 'name' executable.
:param executable_name: name of the executable
:returns: Returns the absolute path to the executable or None if not found. | entailment |
def check_firmware_update_component(func):
"""Checks the firmware update component."""
@six.wraps(func)
def wrapper(self, filename, component_type):
"""Wrapper around ``update_firmware`` call.
:param filename: location of the raw firmware file.
:param component_type: Type of component to be applied to.
"""
component_type = component_type and component_type.lower()
if (component_type not in SUPPORTED_FIRMWARE_UPDATE_COMPONENTS):
msg = ("Got invalid component type for firmware update: "
"``update_firmware`` is not supported on %(component)s" %
{'component': component_type})
LOG.error(self._(msg)) # noqa
raise exception.InvalidInputError(msg)
return func(self, filename, component_type)
return wrapper | Checks the firmware update component. | entailment |
def get_fw_extractor(fw_file):
"""Gets the firmware extractor object fine-tuned for specified type
:param fw_file: compact firmware file to be extracted from
:raises: InvalidInputError, for unsupported file types
:returns: FirmwareImageExtractor object
"""
fw_img_extractor = FirmwareImageExtractor(fw_file)
extension = fw_img_extractor.fw_file_ext.lower()
if extension == '.scexe':
# assign _do_extract attribute to refer to _extract_scexe_file
fw_img_extractor._do_extract = types.MethodType(
_extract_scexe_file, fw_img_extractor)
elif extension == '.rpm':
# assign _do_extract attribute to refer to _extract_rpm_file
fw_img_extractor._do_extract = types.MethodType(
_extract_rpm_file, fw_img_extractor)
elif extension in RAW_FIRMWARE_EXTNS:
# Note(deray): Assigning ``extract`` attribute to return
# 1. the firmware file itself
# 2. boolean (False) to indicate firmware file is not extracted
def dummy_extract(self):
"""Dummy (no-op) extract method
:returns: the same firmware file with the complete path
:returns: boolean(False) to indicate that a new file is not
generated.
"""
return fw_img_extractor.fw_file, False
fw_img_extractor.extract = types.MethodType(
dummy_extract, fw_img_extractor)
else:
raise exception.InvalidInputError(
'Unexpected compact firmware file type: %s' % fw_file)
return fw_img_extractor | Gets the firmware extractor object fine-tuned for specified type
:param fw_file: compact firmware file to be extracted from
:raises: InvalidInputError, for unsupported file types
:returns: FirmwareImageExtractor object | entailment |
def _extract_scexe_file(self, target_file, extract_path):
"""Extracts the scexe file.
:param target_file: the firmware file to be extracted from
:param extract_path: the path where extraction is supposed to happen
"""
# Command to extract the smart component file.
unpack_cmd = '--unpack=' + extract_path
# os.path.isfile(target_file)
cmd = [target_file, unpack_cmd]
out, err = utils.trycmd(*cmd) | Extracts the scexe file.
:param target_file: the firmware file to be extracted from
:param extract_path: the path where extraction is supposed to happen | entailment |
def _extract_rpm_file(self, target_file, extract_path):
"""Extracts the rpm file.
:param target_file: the firmware file to be extracted from
:param extract_path: the path where extraction is supposed to happen
:raises: ImageExtractionFailed, if any issue with extraction
"""
if not os.path.exists(extract_path):
os.makedirs(extract_path)
os.chdir(extract_path)
if find_executable('rpm2cpio') is None:
raise exception.ImageExtractionFailed(
image_ref=target_file, reason='Command `rpm2cpio` not found.')
if find_executable('cpio') is None:
raise exception.ImageExtractionFailed(
image_ref=target_file, reason='Command `cpio` not found.')
try:
rpm2cpio = subprocess.Popen('rpm2cpio ' + target_file,
shell=True,
stdout=subprocess.PIPE)
cpio = subprocess.Popen('cpio -idm', shell=True,
stdin=rpm2cpio.stdout)
out, err = cpio.communicate()
except (OSError, ValueError) as e:
raise exception.ImageExtractionFailed(
image_ref=target_file,
reason='Unexpected error in extracting file. ' + str(e)) | Extracts the rpm file.
:param target_file: the firmware file to be extracted from
:param extract_path: the path where extraction is supposed to happen
:raises: ImageExtractionFailed, if any issue with extraction | entailment |
def _get_firmware_file(path):
"""Gets the raw firmware file
Gets the raw firmware file from the extracted directory structure
:param path: the directory structure to search for
:returns: the raw firmware file with the complete path
"""
for dirpath, dirnames, filenames in os.walk(path):
for filename in filenames:
file_name, file_ext = os.path.splitext(os.path.basename(filename))
if file_ext in RAW_FIRMWARE_EXTNS:
# return filename
return os.path.join(dirpath, filename) | Gets the raw firmware file
Gets the raw firmware file from the extracted directory structure
:param path: the directory structure to search for
:returns: the raw firmware file with the complete path | entailment |
def _get_firmware_file_in_new_path(searching_path):
"""Gets the raw firmware file in a new path
Gets the raw firmware file from the extracted directory structure
and creates a hard link to that in a file path and cleans up the
lookup extract path.
:param searching_path: the directory structure to search for
:returns: the raw firmware file with the complete new path
"""
firmware_file_path = _get_firmware_file(searching_path)
if not firmware_file_path:
return None
# Note(deray): the path of the new firmware file will be of the form:
#
# [TEMP_DIR]/xxx-xxx_actual_firmware_filename
#
# e.g. /tmp/77e8f689-f32c-4727-9fc3-a7dacefe67e4_ilo4_210.bin
file_name, file_ext_with_dot = common.get_filename_and_extension_of(
firmware_file_path)
new_firmware_file_path = os.path.join(
tempfile.gettempdir(), str(uuid.uuid4()) + '_' +
file_name + file_ext_with_dot)
# create a hard link to the raw firmware file
os.link(firmware_file_path, new_firmware_file_path)
return new_firmware_file_path | Gets the raw firmware file in a new path
Gets the raw firmware file from the extracted directory structure
and creates a hard link to that in a file path and cleans up the
lookup extract path.
:param searching_path: the directory structure to search for
:returns: the raw firmware file with the complete new path | entailment |
def upload_file_to(self, addressinfo, timeout):
"""Uploads the raw firmware file to iLO
Uploads the raw firmware file (already set as attribute in
FirmwareImageControllerBase constructor) to iLO, whose address
information is passed to this method.
:param addressinfo: tuple of hostname and port of the iLO
:param timeout: timeout in secs, used for connecting to iLO
:raises: IloInvalidInputError, if raw firmware file not found
:raises: IloError, for other internal problems
:returns: the cookie so sent back from iLO on successful upload
"""
self.hostname, self.port = addressinfo
self.timeout = timeout
filename = self.fw_file
firmware = open(filename, 'rb').read()
# generate boundary
boundary = b('------hpiLO3t' +
str(random.randint(100000, 1000000)) + 'z')
while boundary in firmware:
boundary = b('------hpiLO3t' +
str(random.randint(100000, 1000000)) + 'z')
# generate body parts
parts = [
# body1
b("--") + boundary +
b("""\r\nContent-Disposition: form-data; """
"""name="fileType"\r\n\r\n"""),
# body2
b("\r\n--") + boundary +
b('''\r\nContent-Disposition: form-data; name="fwimgfile"; '''
'''filename="''') +
b(filename) +
b('''"\r\nContent-Type: application/octet-stream\r\n\r\n'''),
# firmware image
firmware,
# body3
b("\r\n--") + boundary + b("--\r\n"),
]
total_bytes = sum([len(x) for x in parts])
sock = self._get_socket()
# send the firmware image
sock.write(b(self.HTTP_UPLOAD_HEADER %
(total_bytes, boundary.decode('ascii'))))
for part in parts:
sock.write(part)
data = ''
try:
while True:
d = sock.read()
data += d.decode('latin-1')
if not d:
break
except socket.sslerror: # Connection closed
e = sys.exc_info()[1]
if not data:
raise exception.IloConnectionError(
"Communication with %(hostname)s:%(port)d failed: "
"%(error)s" % {'hostname': self.hostname,
'port': self.port, 'error': str(e)})
# Received len(data) bytes
cookie_match = re.search('Set-Cookie: *(.*)', data)
if not cookie_match:
raise exception.IloError("Uploading of file: %s failed due "
"to unknown reason." % filename)
# return the cookie
return cookie_match.group(1) | Uploads the raw firmware file to iLO
Uploads the raw firmware file (already set as attribute in
FirmwareImageControllerBase constructor) to iLO, whose address
information is passed to this method.
:param addressinfo: tuple of hostname and port of the iLO
:param timeout: timeout in secs, used for connecting to iLO
:raises: IloInvalidInputError, if raw firmware file not found
:raises: IloError, for other internal problems
:returns: the cookie so sent back from iLO on successful upload | entailment |
def _get_socket(self, sslversion=ssl.PROTOCOL_TLSv1):
"""Sets up an https connection and do an HTTP/raw socket request
:param sslversion: version of ssl session
:raises: IloConnectionError, for connection failures
:returns: ssl wrapped socket object
"""
err = None
sock = None
try:
for res in socket.getaddrinfo(
self.hostname, self.port, 0, socket.SOCK_STREAM):
af, socktype, proto, canonname, sa = res
try:
sock = socket.socket(af, socktype, proto)
sock.settimeout(self.timeout)
# Connecting to {self.hostname} at port {self.port}
sock.connect(sa)
except socket.timeout:
if sock is not None:
sock.close()
err = exception.IloConnectionError(
"Timeout connecting to %(hostname)s:%(port)d"
% {'hostname': self.hostname, 'port': self.port})
except socket.error:
if sock is not None:
sock.close()
e = sys.exc_info()[1]
err = exception.IloConnectionError(
"Error connecting to %(hostname)s:%(port)d : %(error)s"
% {'hostname': self.hostname, 'port': self.port,
'error': str(e)})
except Exception:
raise exception.IloConnectionError(
"Unable to resolve %s" % self.hostname)
if err is not None:
raise err
# wrapping the socket over ssl session
try:
return ssl.wrap_socket(sock, ssl_version=sslversion)
except socket.sslerror:
e = sys.exc_info()[1]
msg = (getattr(e, 'reason', None) or
getattr(e, 'message', None))
# Some older iLO s don't support TLSv1, retry with SSLv3
if ('wrong version number' in msg) and (
sslversion == ssl.PROTOCOL_TLSv1):
return self._get_socket(ssl.PROTOCOL_SSLv3)
raise exception.IloConnectionError(
"Cannot establish ssl session with %(hostname)s:%(port)d : "
"%(error)s" % {'hostname': self.hostname, 'port': self.port,
'error': str(e)}) | Sets up an https connection and do an HTTP/raw socket request
:param sslversion: version of ssl session
:raises: IloConnectionError, for connection failures
:returns: ssl wrapped socket object | entailment |
def extract(self):
"""Extracts the raw firmware file from its compact format
Extracts the raw firmware file from its compact file format (already
set as attribute in FirmwareImageControllerBase constructor).
:raises: InvalidInputError, if raw firmware file not found
:raises: ImageExtractionFailed, for extraction related issues
:returns: the raw firmware file with the complete path
:returns: boolean(True) to indicate that a new file got generated
after successful extraction.
"""
target_file = self.fw_file
common.add_exec_permission_to(target_file)
# create a temp directory where the extraction will occur
temp_dir = tempfile.mkdtemp()
extract_path = os.path.join(temp_dir, self.fw_filename)
try:
self._do_extract(target_file, extract_path)
except exception.ImageExtractionFailed:
# clean up the partial extracted content, if any,
# along with temp dir and re-raise the exception
shutil.rmtree(temp_dir, ignore_errors=True)
raise
# creating a new hard link to the core firmware file
firmware_file_path = _get_firmware_file_in_new_path(extract_path)
# delete the entire extracted content along with temp dir.
shutil.rmtree(temp_dir, ignore_errors=True)
if not firmware_file_path:
raise exception.InvalidInputError(
"Raw firmware file not found in: '%s'" % target_file)
return firmware_file_path, True | Extracts the raw firmware file from its compact format
Extracts the raw firmware file from its compact file format (already
set as attribute in FirmwareImageControllerBase constructor).
:raises: InvalidInputError, if raw firmware file not found
:raises: ImageExtractionFailed, for extraction related issues
:returns: the raw firmware file with the complete path
:returns: boolean(True) to indicate that a new file got generated
after successful extraction. | entailment |
def _generic_format(self, raid_config, controller=None):
"""Convert redfish data of current raid config to generic format.
:param raid_config: Raid configuration dictionary
:param controller: Array controller model in post_create read else
None
:returns: current raid config.
"""
logical_drives = raid_config["LogicalDrives"]
logical_disks = []
controller = controller
for ld in logical_drives:
prop = {'size_gb': ld['CapacityGiB'],
'raid_level': ld['Raid'].strip('Raid'),
'root_device_hint': {
'wwn': '0x' + ld['VolumeUniqueIdentifier']},
'controller': controller,
'physical_disks': ld['DataDrives'],
'volume_name': ld['LogicalDriveName']}
logical_disks.append(prop)
return logical_disks | Convert redfish data of current raid config to generic format.
:param raid_config: Raid configuration dictionary
:param controller: Array controller model in post_create read else
None
:returns: current raid config. | entailment |
def _check_smart_storage_message(self):
"""Check for smart storage message.
:returns: result, raid_message
"""
ssc_mesg = self.smart_storage_config_message
result = True
raid_message = ""
for element in ssc_mesg:
if "Success" not in element['MessageId']:
result = False
raid_message = element['MessageId']
return result, raid_message | Check for smart storage message.
:returns: result, raid_message | entailment |
def read_raid(self, controller=None):
"""Get the current RAID configuration from the system.
:param controller: If controller model its post-create read else
post-delete
:returns: current raid config.
"""
if controller:
if not self.logical_drives:
msg = ('No logical drives found on the controller')
LOG.debug(msg)
raise exception.IloLogicalDriveNotFoundError(msg)
raid_op = 'create_raid'
else:
raid_op = 'delete_raid'
result, raid_message = self._check_smart_storage_message()
if result:
configured_raid_settings = self._conn.get(self.settings_uri)
raid_data = {
'logical_disks': self._generic_format(
configured_raid_settings.json(), controller=controller)}
return raid_data
else:
if self.physical_drives is None or not raid_message:
# This controller is not configured or controller
# not used in raid operation
return
else:
msg = ('Failed to perform the %(opr)s operation '
'successfully. Error - %(error)s'
% {'opr': raid_op, 'error': str(raid_message)})
raise exception.IloError(msg) | Get the current RAID configuration from the system.
:param controller: If controller model its post-create read else
post-delete
:returns: current raid config. | entailment |
def delete_raid(self):
"""Clears the RAID configuration from the system.
"""
if not self.logical_drives:
msg = ('No logical drives found on the controller '
'%(controller)s' % {'controller': str(self.controller_id)})
LOG.debug(msg)
raise exception.IloLogicalDriveNotFoundError(msg)
lds = [{
'Actions': [{"Action": "LogicalDriveDelete"}],
'VolumeUniqueIdentifier':
logical_drive.volume_unique_identifier}
for logical_drive in self.logical_drives]
data = {'LogicalDrives': lds, 'DataGuard': 'Permissive'}
self._conn.put(self.settings_uri, data=data) | Clears the RAID configuration from the system. | entailment |
def create_raid(self, raid_config):
"""Create the raid configuration on the hardware.
:param raid_config: A dictionary containing target raid configuration
data. This data stucture should be as follows:
raid_config = {'logical_disks': [{'raid_level': 1,
'size_gb': 100, 'physical_disks': ['6I:1:5'],
'controller': 'HPE Smart Array P408i-a SR Gen10'},
<info-for-logical-disk-2>]}
"""
manager.validate(raid_config)
logical_drives = raid_config['logical_disks']
redfish_logical_disk = []
for ld in logical_drives:
ld_attr = {"Raid": "Raid" + ld["raid_level"]}
ld_attr[
"CapacityGiB"] = -1 if ld[
"size_gb"] == "MAX" else int(ld["size_gb"])
if 'physical_disks' in ld:
ld_attr["DataDrives"] = ld["physical_disks"]
else:
datadrives = {}
if 'number_of_physical_disks' in ld:
datadrives["DataDriveCount"] = (
ld["number_of_physical_disks"])
else:
datadrives["DataDriveCount"] = (constants.
RAID_LEVEL_MIN_DISKS
[ld["raid_level"]])
if 'disk_type' in ld:
datadrives["DataDriveMediaType"] = ld["disk_type"]
if 'interface_type' in ld:
datadrives["DataDriveInterfaceType"] = ld["interface_type"]
ld_attr["DataDrives"] = datadrives
if 'volume_name' in ld:
ld_attr["LogicalDriveName"] = ld["volume_name"]
redfish_logical_disk.append(ld_attr)
data = {
"DataGuard": "Disabled",
"LogicalDrives": redfish_logical_disk
}
self._conn.put(self.settings_uri, data=data) | Create the raid configuration on the hardware.
:param raid_config: A dictionary containing target raid configuration
data. This data stucture should be as follows:
raid_config = {'logical_disks': [{'raid_level': 1,
'size_gb': 100, 'physical_disks': ['6I:1:5'],
'controller': 'HPE Smart Array P408i-a SR Gen10'},
<info-for-logical-disk-2>]} | entailment |
def _readFile(self, fname, sldir):
'''
Private method that reads in the data file and organizes it
within this object.
'''
if sldir.endswith('/'):
fname = str(sldir)+str(fname)
else:
fname = str(sldir)+'/'+str(fname)
f=open(fname,'r')
# read header line
line=f.readline()
cols = []
ispec = 0
for i in range(1,len(line.split('|'))):
col = line.split('|')[i].strip()
if '-' in col:
ispec += 1
col = col.split('-')[1]
cols.append(col)
col_num={}
col_tot = len(cols)
print('number of species: ', str(ispec))
print('number of cols: ', str(col_tot))
col_num={}
for a,b in zip(cols,list(range(col_tot))):
col_num[a]=b
# read remainder of the file
lines=f.readlines()
data=[]
for i in range(len(lines)):
v=lines[i].split()
vv=array(v,dtype='float')
data.append(vv)
ilines=i
print("There are "+str(ilines)+" time steps found.")
return data,col_num,cols,col_tot,ilines | Private method that reads in the data file and organizes it
within this object. | entailment |
def get(self, col_str):
'''
get one data column with the data
Parameters
----------
col_str : string
One of the column strings in self.cols.
'''
data_column=zeros(self.ilines)
for i in range(self.ilines):
data_column[i]=self.data[i][self.col_num[col_str]]
return data_column | get one data column with the data
Parameters
----------
col_str : string
One of the column strings in self.cols. | entailment |
def plot_xtime(self, y, x='time', label='default', labelx=None,
labely=None ,title=None, shape='.', logx=False,
logy=True, base=10):
'''
make a simple plot of two columns against each other.
An example would be instance.plot_xtime('PB206', label='PB206 vs t_y'
Recomend using the plot function DataPlot.plot() it has more
functionality.
Parameters
----------
Y : string
Column on Y-axis.
X : string, optional
Column on X-axis. The default is "time".
label : string, optional
Legend label. The default is "default".
labelX : string, optional
The label on the X axis. The default is None.
labelY : string, optional
The label on the Y axis. The default is None.
title : string, optional
The Title of the Graph. The default is None.
shape : string, optional
What shape and colour the user would like their plot in.
The default is '.'.
logX : boolean, optional
A boolean of weather the user wants the x axis
logarithmically. The default is False.
logY : boolean, optional
A boolean of weather the user wants the Y axis
logarithmically. The default is True.
base : integer, optional
The base of the logarithm. The default is 10.
Notes
-----
For all possable choices visit,
<http://matplotlib.sourceforge.net/api/pyplot_api.html#matplotlib.pyplot.plot>
'''
if label is 'default':
lab_str=y
else:
lab_str=label
try:
self.get(x)
except KeyError:
x='age'
DataPlot.plot(self,x,y,legend=lab_str,labelx=labelx, labely=labely,
title=title, shape=shape,logx=logx, logy=logy, base=base)
'''
print X,Y
xdat=self.get(X)
ydat=self.get(Y)
self.xdat = xdat
self.ydat = ydat
plot(xdat,log10(ydat),label=lab_str)
legend()
''' | make a simple plot of two columns against each other.
An example would be instance.plot_xtime('PB206', label='PB206 vs t_y'
Recomend using the plot function DataPlot.plot() it has more
functionality.
Parameters
----------
Y : string
Column on Y-axis.
X : string, optional
Column on X-axis. The default is "time".
label : string, optional
Legend label. The default is "default".
labelX : string, optional
The label on the X axis. The default is None.
labelY : string, optional
The label on the Y axis. The default is None.
title : string, optional
The Title of the Graph. The default is None.
shape : string, optional
What shape and colour the user would like their plot in.
The default is '.'.
logX : boolean, optional
A boolean of weather the user wants the x axis
logarithmically. The default is False.
logY : boolean, optional
A boolean of weather the user wants the Y axis
logarithmically. The default is True.
base : integer, optional
The base of the logarithm. The default is 10.
Notes
-----
For all possable choices visit,
<http://matplotlib.sourceforge.net/api/pyplot_api.html#matplotlib.pyplot.plot> | entailment |
def getCycleData(self, attri, fname, numtype='cycNum'):
"""
In this method a column of data for the associated cycle
attribute is returned.
Parameters
----------
attri : string
The name of the attribute we are looking for.
fname : string
The name of the file we are getting the data from or the
cycle number found in the filename.
numtype : string, optional
Determines whether fname is the name of a file or, the
cycle number. If it is 'file' it will then interpret it as
a file, if it is 'cycNum' it will then interpret it as a
cycle number. The default is "cycNum".
"""
fname=self.findFile(fname,numtype)
if self.inputdir == '':
self.inputdir = self.sldir # This chunk of code changes into the directory where fname is,
os.chdir(self.inputdir) # and appends a '/' to the directory title so it accesses the
self.sldir=os.getcwd() + '/' # file correctly
f=open(fname,'r')
lines=f.readlines()
if self.inputdir != './': #This chunk of code changes back into the directory you started in.
os.chdir(self.startdir)
self.sldir = self.inputdir
for i in range(len(lines)):
lines[i]=lines[i].strip()
for i in range(len(lines)):
if lines[i].startswith('#'):
lines[i]=lines[i].strip('#')
tmp=lines[i].split()
tmp1=[]
for j in range(len(tmp)):
if tmp[j] != '=' or '':
tmp1.append(tmp[j])
tmp=tmp1
for j in range(len(tmp)):
if tmp[j]== attri:
try:
if '.' in tmp[j+1]:
return float(tmp[j+1])
else:
return int(tmp[j+1])
except ValueError:
return str(tmp[j+1])
elif lines[i].startswith('H'):
continue
else:
print('This cycle attribute does not exist')
print('Returning None')
return None | In this method a column of data for the associated cycle
attribute is returned.
Parameters
----------
attri : string
The name of the attribute we are looking for.
fname : string
The name of the file we are getting the data from or the
cycle number found in the filename.
numtype : string, optional
Determines whether fname is the name of a file or, the
cycle number. If it is 'file' it will then interpret it as
a file, if it is 'cycNum' it will then interpret it as a
cycle number. The default is "cycNum". | entailment |
def getColData(self, attri, fname, numtype='cycNum'):
"""
In this method a column of data for the associated column
attribute is returned.
Parameters
----------
attri : string
The name of the attribute we are looking for.
fname : string
The name of the file we are getting the data from or the
cycle number found in the filename.
numtype : string, optional
Determines whether fname is the name of a file or, the
cycle number. If it is 'file' it will then interpret it as
a file, if it is 'cycNum' it will then interpret it as a
cycle number. The default is "cycNum".
"""
fname=self.findFile(fname,numtype)
f=open(fname,'r')
for i in range(self.index+1):
f.readline()
lines=f.readlines()
for i in range(len(lines)):
lines[i]=lines[i].strip()
lines[i]=lines[i].split()
index=0
data=[]
while index < len (self.dcols):
if attri== self.dcols[index]:
break
index+=1
for i in range(len(lines)):
if index==5 and len(lines[i])==7:
data.append(str(lines[i][index].capitalize())+'-'\
+str(lines[i][index+1]))
elif index==5 and len(lines[i])!=7:
tmp=str(lines[i][index])
if tmp[len(tmp)-1].isdigit():
tmp1=tmp[0]+tmp[1]
tmp1=tmp1.capitalize()
tmp2=''
for j in range(len(tmp)):
if j == 0 or j == 1:
continue
tmp2+=tmp[j]
data.append(tmp1+'-'+tmp2)
elif tmp=='PROT':
data.append('H-1')
elif tmp==('NEUT'or'NEUTR'or'nn'or'N 1'or'N-1'):
data.append('N-1')
else:
data.append(tmp)
elif index==0:
data.append(int(lines[i][index]))
else:
data.append(float(lines[i][index]))
return array(data) | In this method a column of data for the associated column
attribute is returned.
Parameters
----------
attri : string
The name of the attribute we are looking for.
fname : string
The name of the file we are getting the data from or the
cycle number found in the filename.
numtype : string, optional
Determines whether fname is the name of a file or, the
cycle number. If it is 'file' it will then interpret it as
a file, if it is 'cycNum' it will then interpret it as a
cycle number. The default is "cycNum". | entailment |
def getElement(self, attri, fname, numtype='cycNum'):
'''
In this method instead of getting a particular column of data,
the program gets a particular row of data for a particular
element name.
attri : string
The name of the attribute we are looking for. A complete
list of them can be obtained by calling
>>> get('element_name')
fname : string
The name of the file we are getting the data from or the
cycle number found in the filename.
numtype : string, optional
Determines whether fname is the name of a file or, the
cycle number. If it is 'file' it will then interpret it as
a file, if it is 'cycNum' it will then interpret it as a
cycle number. The default is "cycNum".
Returns
-------
array
A numpy array of the four element attributes, number, Z, A
and abundance, in that order.
Notes
-----
Warning
'''
element=[] #Variable for holding the list of element names
number=[] #Variable for holding the array of numbers
z=[] #Variable for holding the array of z
a=[] #Variable for holding the array of a
abd=[] #Variable for holding the array of Abundance
data=[] #variable for the final list of data
fname=self.findFile(fname,numtype)
f=open(fname,'r')
for i in range(self.index+1):
f.readline()
lines=f.readlines()
for i in range(len(lines)):
lines[i]=lines[i].strip()
lines[i]=lines[i].split()
index=0
data=[]
while index < len (self.dcols):
if attri== self.dcols[index]:
break
index+=1
element=self.get(self.dcols[5],fname,numtype)
number=[]
z=[]
a=[]
isom=[]
abd=[]
for i in range(len(lines)):
number.append(int(lines[i][0]))
z.append(float(lines[i][1]))
isom.append(float(lines[i][2]))
abd.append(float(lines[i][1]))
index=0 #Variable for determing the index in the data columns
while index < len(element):
if attri == element[index]:
break
index+=1
data.append(number[index])
data.append(z[index])
data.append(a[index])
data.append(isom[index])
data.append(abd[index])
return array(data) | In this method instead of getting a particular column of data,
the program gets a particular row of data for a particular
element name.
attri : string
The name of the attribute we are looking for. A complete
list of them can be obtained by calling
>>> get('element_name')
fname : string
The name of the file we are getting the data from or the
cycle number found in the filename.
numtype : string, optional
Determines whether fname is the name of a file or, the
cycle number. If it is 'file' it will then interpret it as
a file, if it is 'cycNum' it will then interpret it as a
cycle number. The default is "cycNum".
Returns
-------
array
A numpy array of the four element attributes, number, Z, A
and abundance, in that order.
Notes
-----
Warning | entailment |
def get(self, attri, fname=None, numtype='cycNum', decayed=False):
'''
In this method all data for an entire cycle (basically the
content of an iso_massfnnnn.DAT file) or a column of data for
the associated attribute is returned.
Parameters
----------
attri : string or integer
If attri is a string, attri is the cycle or name of the
attribute we are looking for.
If attri is an integer, attri is the cycle number (cycle arrays
are not supported).
fname : string, optional
If attri is a string, fname is the name of the file we are
getting the data from or the cycle number found in the
filename, or a List of either cycles or filenames. If fname
is None, the data from all cycles is returned.
If attri is an integer, then fname is not supported.
The default is None.
numtype : string, optional
If attri is a string, numtype determines whether fname is
the name of a file or, the cycle number. If numtype is
'file' it will then interpret fname as a file. If numtype
is 'cycNum' it will then interpret fname as a cycle number.
If attri is an Integer, then numtype is not supported.
The default is "cycNum".
decayed : boolean, optional
If attri is a string, then decayed is not supported.
If attri is an integer, then get instantaneously decay
abundance distribution.
The default is False.
Returns
-------
array
If attri is a string, data in the form of a numpy array is
returned.
If attri is an integer, Nothing is returned.
Notes
-----
If attri is an integer, then the following variables will be
added to the instance.
a_iso_to_plot: mass number of plotted range of species.
isotope_to_plot: corresponding list of isotopes.
z_iso_to_plot: corresponding charge numbers.
el_iso_to_plot: corresponding element names.
abunds: corresponding abundances.
isom: list of isomers with their abundances.
'''
if type(attri) is type(1):
print("Calling get method in cycle mode, adding a_iso_to_plot, z.. el.. isotope.. isotope... to instance")
self._getcycle(attri,decayed)
elif type(attri) is type("string"):
data=self._getattr(attri,fname,numtype)
return data | In this method all data for an entire cycle (basically the
content of an iso_massfnnnn.DAT file) or a column of data for
the associated attribute is returned.
Parameters
----------
attri : string or integer
If attri is a string, attri is the cycle or name of the
attribute we are looking for.
If attri is an integer, attri is the cycle number (cycle arrays
are not supported).
fname : string, optional
If attri is a string, fname is the name of the file we are
getting the data from or the cycle number found in the
filename, or a List of either cycles or filenames. If fname
is None, the data from all cycles is returned.
If attri is an integer, then fname is not supported.
The default is None.
numtype : string, optional
If attri is a string, numtype determines whether fname is
the name of a file or, the cycle number. If numtype is
'file' it will then interpret fname as a file. If numtype
is 'cycNum' it will then interpret fname as a cycle number.
If attri is an Integer, then numtype is not supported.
The default is "cycNum".
decayed : boolean, optional
If attri is a string, then decayed is not supported.
If attri is an integer, then get instantaneously decay
abundance distribution.
The default is False.
Returns
-------
array
If attri is a string, data in the form of a numpy array is
returned.
If attri is an integer, Nothing is returned.
Notes
-----
If attri is an integer, then the following variables will be
added to the instance.
a_iso_to_plot: mass number of plotted range of species.
isotope_to_plot: corresponding list of isotopes.
z_iso_to_plot: corresponding charge numbers.
el_iso_to_plot: corresponding element names.
abunds: corresponding abundances.
isom: list of isomers with their abundances. | entailment |
def _getcycle(self, cycle, decayed=False):
''' Private method for getting a cycle, called from get.'''
yps=self.get('ABUNDANCE_MF', cycle)
z=self.get('Z', cycle) #charge
a=self.get('A', cycle) #mass
isomers=self.get('ISOM', cycle)
a_iso_to_plot,z_iso_to_plot,abunds,isotope_to_plot,el_iso_to_plot,isom=\
self._process_abundance_vector(a,z,isomers,yps)
self.a_iso_to_plot=a_iso_to_plot
self.isotope_to_plot=isotope_to_plot
self.z_iso_to_plot=z_iso_to_plot
self.el_iso_to_plot=el_iso_to_plot
self.abunds=array(abunds)
self.isom=isom
if decayed:
try:
self.decay_idp
except AttributeError:
print("WARNING: decayed in _getcycle ignores isomers " \
"and will decay alpha-unstable p-rich nuclei as if they were beta+ stable.")
print("Initialising decay index pointers ....")
self.decay_indexpointer() # provides self.decay_idp and
ind_tmp=self.idp_to_stables_in_isostoplot
isotope_decay=array(isotope_to_plot)[ind_tmp]
z_iso_decay=array(z_iso_to_plot)[ind_tmp]
a_iso_decay=array(a_iso_to_plot)[ind_tmp]
el_iso_decay=array(el_iso_to_plot)[ind_tmp]
abunds_decay=zeros(len(ind_tmp), dtype='float64')
for i in range(len(isotope_to_plot)):
idp=where(isotope_decay==isotope_to_plot[self.decay_idp[i]])[0] # points from
# i on isotope_to_plot scale to decay target_on_decayed array scale
abunds_decay[idp] += abunds[i]
if self.debug:
print("Decayed array:")
for i in range(len(ind_tmp)):
print(isotope_decay[i], z_iso_decay[i], a_iso_decay[i], el_iso_decay[i], abunds_decay[i])
self.a_iso_to_plot=a_iso_decay
self.isotope_to_plot=isotope_decay
self.z_iso_to_plot=z_iso_decay
self.el_iso_to_plot=el_iso_decay
self.abunds=abunds_decay | Private method for getting a cycle, called from get. | entailment |
def _getattr(self, attri, fname=None, numtype='cycNum'):
''' Private method for getting an attribute, called from get.'''
if str(fname.__class__)=="<type 'list'>":
isList=True
else:
isList=False
data=[]
if fname==None:
fname=self.files
numtype='file'
isList=True
if isList:
for i in range(len(fname)):
if attri in self.cattrs:
data.append(self.getCycleData(attri,fname[i],numtype))
elif attri in self.dcols:
data.append(self.getColData(attri,fname[i],numtype))
elif attri in self.get('ISOTP',fname,numtype):
data.append(self.getElement(attri,fname[i],numtype))
else:
print('Attribute '+attri+ ' does not exist')
print('Returning none')
return None
else:
if attri in self.cattrs:
return self.getCycleData(attri,fname,numtype)
elif attri in self.dcols:
return self.getColData(attri,fname,numtype)
elif attri in self.get('ISOTP',fname,numtype):
return self.getElement(attri,fname,numtype)
else:
print('Attribute '+attri+ ' does not exist')
print('Returning none')
return None
return data | Private method for getting an attribute, called from get. | entailment |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.