code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
''' np.array: The grid points in y. ''' if None not in (self.y_min, self.y_max, self.y_step) and \ self.y_min != self.y_max: y = np.arange(self.y_min, self.y_max-self.y_step*0.1, self.y_step) else: y = np.array([]) return y
def y(self)
np.array: The grid points in y.
3.148201
2.472328
1.273375
''' function: a function that when passed a `x` and `y` values, returns the permittivity profile of the structure, interpolating if necessary. ''' interp_real = interpolate.interp2d(self.x, self.y, self.eps.real) interp_imag = interpolate.interp2d(self.x, self.y, self.eps.imag) interp = lambda x, y: interp_real(x, y) + 1.j*interp_imag(x, y) return interp
def eps_func(self)
function: a function that when passed a `x` and `y` values, returns the permittivity profile of the structure, interpolating if necessary.
3.705488
1.957679
1.892797
''' function: a function that when passed a `x` and `y` values, returns the refractive index profile of the structure, interpolating if necessary. ''' return interpolate.interp2d(self.x, self.y, self.n)
def n_func(self)
function: a function that when passed a `x` and `y` values, returns the refractive index profile of the structure, interpolating if necessary.
9.863134
2.547859
3.871146
''' A low-level function that allows writing a rectangle refractive index profile to a `Structure`. Args: x_bot_left (float): The bottom-left x-coordinate of the rectangle. y_bot_left (float): The bottom-left y-coordinate of the rectangle. x_top_right (float): The top-right x-coordinate of the rectangle. y_top_right (float): The top-right y-coordinate of the rectangle. n_material (float): The refractive index of the points encompassed by the defined rectangle. angle (float): The angle in degrees of the sidewalls of the defined rectangle. Default is 0. This is useful for creating a ridge with angled sidewalls. ''' x_mask = np.logical_and(x_bot_left<=self.x, self.x<=x_top_right) y_mask = np.logical_and(y_bot_left<=self.y, self.y<=y_top_right) xy_mask = np.kron(y_mask, x_mask).reshape((y_mask.size, x_mask.size)) self.n[xy_mask] = n_material if angle: self._add_triangular_sides(xy_mask, angle, y_top_right, y_bot_left, x_top_right, x_bot_left, n_material) return self.n
def _add_material(self, x_bot_left, y_bot_left, x_top_right, y_top_right, n_material, angle=0)
A low-level function that allows writing a rectangle refractive index profile to a `Structure`. Args: x_bot_left (float): The bottom-left x-coordinate of the rectangle. y_bot_left (float): The bottom-left y-coordinate of the rectangle. x_top_right (float): The top-right x-coordinate of the rectangle. y_top_right (float): The top-right y-coordinate of the rectangle. n_material (float): The refractive index of the points encompassed by the defined rectangle. angle (float): The angle in degrees of the sidewalls of the defined rectangle. Default is 0. This is useful for creating a ridge with angled sidewalls.
2.707813
1.587883
1.705297
''' Write the refractive index profile to file. Args: filename (str): The nominal filename the refractive index data should be saved to. plot (bool): `True` if plots should be generates, otherwise `False`. Default is `True`. ''' path = os.path.dirname(sys.modules[__name__].__file__) + '/' with open(filename, 'w') as fs: for n_row in np.abs(self.n[::-1]): n_str = ','.join([str(v) for v in n_row]) fs.write(n_str+'\n') if plot: filename_image_prefix, _ = os.path.splitext(filename) filename_image = filename_image_prefix + '.png' args = { 'title': 'Refractive Index Profile', 'x_pts': self.x_pts, 'y_pts': self.y_pts, 'x_min': self.x_min, 'x_max': self.x_max, 'y_min': self.y_min, 'y_max': self.y_max, 'filename_data': filename, 'filename_image': filename_image } if MPL: heatmap = np.loadtxt(args['filename_data'], delimiter=',') plt.clf() plt.title(args['title']) plt.xlabel('$x$') plt.ylabel('$y$') plt.imshow(np.flipud(heatmap), extent=(args['x_min'], args['x_max'], args['y_min'], args['y_max']), aspect="auto") plt.colorbar() plt.savefig(filename_image) else: gp.gnuplot(path+'structure.gpi', args)
def write_to_file(self, filename='material_index.dat', plot=True)
Write the refractive index profile to file. Args: filename (str): The nominal filename the refractive index data should be saved to. plot (bool): `True` if plots should be generates, otherwise `False`. Default is `True`.
2.793279
2.268281
1.231452
''' Creates and adds a :class:`Slab` object. Args: height (float): Height of the slab. n_background (float): The nominal refractive index of the slab. Default is 1 (air). Returns: str: The name of the slab. ''' assert position in ('top', 'bottom') name = str(self.slab_count) if not callable(n_background): n_back = lambda wl: n_background else: n_back = n_background height_discretised = self.y_step*((height // self.y_step) + 1) y_min = self._next_start y_max = y_min + height_discretised self.slabs[name] = Slab(name, self.x_step, self.y_step, self.x_max, y_max, self.x_min, y_min, n_back, self._wl) self.y_max = y_max self._next_start = y_min + height_discretised self.slab_count += 1 if position == 'bottom': slabs = {} for k in self.slabs.keys(): slabs[str(int(k)+1)] = self.slabs[k] slabs['0'] = slabs.pop(str(self.slab_count)) self.slabs = slabs return name
def add_slab(self, height, n_background=1., position='top')
Creates and adds a :class:`Slab` object. Args: height (float): Height of the slab. n_background (float): The nominal refractive index of the slab. Default is 1 (air). Returns: str: The name of the slab.
3.012681
2.446064
1.231645
''' Changes the wavelength of the structure. This will affect the mode solver and potentially the refractive indices used (provided functions were provided as refractive indices). Args: wavelength (float): The new wavelength. ''' for name, slab in self.slabs.items(): const_args = slab._const_args mat_args = slab._mat_params const_args[8] = wavelength s = Slab(*const_args) for mat_arg in mat_args: s.add_material(*mat_arg) self.slabs[name] = s self._wl = wavelength
def change_wavelength(self, wavelength)
Changes the wavelength of the structure. This will affect the mode solver and potentially the refractive indices used (provided functions were provided as refractive indices). Args: wavelength (float): The new wavelength.
6.457124
2.914433
2.215568
''' np.array: The refractive index profile matrix of the current slab. ''' try: n_mat = self.slabs['0'].n for s in range(1, self.slab_count): n_mat = np.vstack((self.slabs[str(s)].n, n_mat)) except KeyError: n_mat = None return n_mat
def n(self)
np.array: The refractive index profile matrix of the current slab.
5.042882
2.771497
1.819552
''' Add a refractive index between two x-points. Args: x_min (float): The start x-point. x_max (float): The stop x-point. n (float, function): Refractive index between `x_min` and `x_max`. Either a constant (`float`), or a function that accepts one parameters, the wavelength, and returns a float of the refractive index. This is useful when doing wavelength sweeps and solving for the group velocity. The function provided could be a Sellmeier equation. angle (float): Angle in degrees of the slope of the sidewalls at `x_min` and `x_max`. This is useful for defining a ridge with angled sidewalls. ''' self._mat_params.append([x_min, x_max, n, angle]) if not callable(n): n_mat = lambda wl: n else: n_mat = n Structure._add_material(self, x_min, self.y_min, x_max, self.y_max, n_mat(self._wl), angle) return self.n
def add_material(self, x_min, x_max, n, angle=0)
Add a refractive index between two x-points. Args: x_min (float): The start x-point. x_max (float): The stop x-point. n (float, function): Refractive index between `x_min` and `x_max`. Either a constant (`float`), or a function that accepts one parameters, the wavelength, and returns a float of the refractive index. This is useful when doing wavelength sweeps and solving for the group velocity. The function provided could be a Sellmeier equation. angle (float): Angle in degrees of the slope of the sidewalls at `x_min` and `x_max`. This is useful for defining a ridge with angled sidewalls.
5.087145
1.674999
3.037104
''' Write the refractive index profile to file. Args: filename (str): The nominal filename the refractive index data should be saved to. plot (bool): `True` if plots should be generates, otherwise `False`. Default is `True`. ''' path = os.path.dirname(sys.modules[__name__].__file__) + '/' dir_plot = 'material_index/' if not os.path.exists(dir_plot): os.makedirs(dir_plot) for axis, name in zip(self.axes, self.axes_str): root, ext = os.path.splitext(filename) fn = dir_plot + root + '_'+ name + ext with open(fn, 'w') as fs: for n_row in np.abs(axis.n[::-1]): n_str = ','.join([str(v) for v in n_row]) fs.write(n_str+'\n') if plot: filename_image_prefix, _ = os.path.splitext(fn) filename_image = filename_image_prefix + '.png' args = { 'title': 'Refractive Index Profile: %s' % name, 'x_pts': self.xx.x_pts, 'y_pts': self.xx.y_pts, 'x_min': self.xx.x_min, 'x_max': self.xx.x_max, 'y_min': self.xx.y_min, 'y_max': self.xx.y_max, 'filename_data': fn, 'filename_image': filename_image } if MPL: heatmap = np.loadtxt(args['filename_data'], delimiter=',') plt.clf() plt.title(args['title']) plt.xlabel('$x$') plt.ylabel('$y$') plt.imshow(np.flipud(heatmap), extent=(args['x_min'], args['x_max'], args['y_min'], args['y_max']), aspect="auto") plt.colorbar() plt.savefig(filename_image) else: gp.gnuplot(path+'structure.gpi', args, silent=False)
def write_to_file(self, filename='material_index.dat', plot=True)
Write the refractive index profile to file. Args: filename (str): The nominal filename the refractive index data should be saved to. plot (bool): `True` if plots should be generates, otherwise `False`. Default is `True`.
2.931083
2.490172
1.17706
''' Changes the wavelength of the structure. This will affect the mode solver and potentially the refractive indices used (provided functions were provided as refractive indices). Args: wavelength (float): The new wavelength. ''' for axis in self.axes: if issubclass(type(axis), Slabs): axis.change_wavelength(wavelength) self.xx, self.xy, self.yx, self.yy, self.zz = self.axes self._wl = wavelength
def change_wavelength(self, wavelength)
Changes the wavelength of the structure. This will affect the mode solver and potentially the refractive indices used (provided functions were provided as refractive indices). Args: wavelength (float): The new wavelength.
7.765923
2.827654
2.746419
try: ret = int(s) except ValueError: ret = float(s) return ret
def covstr(s)
convert string to int or float.
4.94147
3.290156
1.501895
#re = "{%(time)s} %(name)s %(stock_no)s %(c)s %(range)+.2f(%(pp)+.2f%%) %(value)s" % { ''' re = % { ''' if covstr(self.g['range']) > 0: css = "red" elif covstr(self.g['range']) < 0: css = "green" else: css = "gray" re = { 'name': self.g['name'], 'stock_no': self.g['no'], 'time': self.g['time'], 'open': self.g['open'], 'h': self.g['h'], 'l': self.g['l'], 'c': self.g['c'], 'max': self.g['max'], 'min': self.g['min'], 'range': covstr(self.g['range']), 'ranges': self.g['ranges'], 'value': self.g['value'], 'pvalue': self.g['pvalue'], 'pp': covstr(self.g['pp']), 'top5buy': self.g['top5buy'], 'top5sell': self.g['top5sell'], 'crosspic': self.g['crosspic'], 'css': css } return re
def output(self)
re = """<table> <tr><td>%(name)s</td><td>%(c)s</td><td>%(range)+.2f(%(pp)+.2f%%)</td></tr> <tr><td>%(stock_no)s</td><td>%(value)s</td><td>%(time)s</td></tr></table>""" % {
3.306484
2.498648
1.323309
a = twsk(stock_no).real if a: re = "{%(time)s} %(stock_no)s %(c)s %(range)+.2f(%(pp)+.2f%%) %(value)s" % { 'stock_no': stock_no, 'time': a['time'], 'c': a['c'], 'range': covstr(a['range']), 'value': a['value'], 'pp': covstr(a['pp']) } return re else: return a
def Rt_display(stock_no)
For real time stock display 即時盤用,顯示目前查詢各股的股價資訊。
5.763866
6.045054
0.953485
pattern = re.compile(r"[0-9]{2}/[0-9]{2}/[0-9]{2}") b = re.search(pattern, oo[0]) try: b.group() return True except: return False
def ckinv(self,oo)
check the value is date or not 檢查是否為日期格式
3.777221
3.232783
1.168411
if rev == 0: if one > two: re = '↑'.decode('utf-8') elif one < two: re = '↓'.decode('utf-8') else: re = '-'.decode('utf-8') else: if one > two: re = 1 elif one < two: re = -1 else: re = 0 return re
def high_or_low(self,one,two,rev=0)
Return ↑↓- for high, low or equal. 回傳漲跌標示 rev = 0 回傳 ↑↓- rev = 1 回傳 1 -1 0
2.17533
1.905485
1.141615
for i in xrange(days): self.raw_data.pop() self.data_date.pop() self.stock_range.pop() self.stock_vol.pop() self.stock_open.pop() self.stock_h.pop() self.stock_l.pop()
def goback(self,days = 1)
Go back days 刪除最新天數資料數據 days 代表刪除多少天數(倒退幾天)
3.308074
3.409962
0.970121
url = 'http://www.twse.com.tw/ch/trading/exchange/STOCK_DAY/STOCK_DAY_print.php?genpage=genpage/Report%(year)d%(mon)02d/%(year)d%(mon)02d_F3_1_8_%(stock)s.php&type=csv&r=%(rand)s' % {'year': nowdatetime.year, 'mon': nowdatetime.month, 'stock': stock_no, 'rand': random.randrange(1,1000000)} self.debug_print(url) logging.info(url) #print cc.info().headers # set memcache expire now = TWTime().now if now >= datetime(now.year, now.month, now.day, 14, 45): addday = 1 else: addday = 0 endtime = datetime(now.year, now.month, now.day, 14, 00) + timedelta(days = addday) ## change from 13:35 to 14:00 logging.info('endtime: %s' % str(endtime)) if firsttime == 0: if endtime <= now: expire = 'ALUP' ## always update. else: expire = (endtime - now).seconds else: expire = 0 ## never expire. logging.info('expire: %s' % expire) ## get memcache memname = '%(stock)s%(year)d%(mon)02d' % {'year': nowdatetime.year, 'mon': nowdatetime.month,'stock': stock_no} stkm = memcache.get(memname) if stkm: csv_read = csv.reader(stkm) logging.info('#MemcacheGet: %s' % memname) else: cc = urllib2.urlopen(url) cc_read = cc.readlines() csv_read = csv.reader(cc_read) if expire != 'ALUP': memcache.add(memname, cc_read, expire) else: memcache.delete(memname) memcache.add('time%s' % memname, '%s %s' % (now, expire)) logging.info('#MemcacheAdd: %s' % memname) return csv_read
def fetch_data(self, stock_no, nowdatetime, firsttime = 1)
Fetch data from twse.com.tw return list. 從 twse.com.tw 下載資料,回傳格式為 list
3.558044
3.617072
0.983681
getr = [] getdate = [] getrange = [] getvol = [] getopen = [] geth = [] getl = [] otherinfo = [] fetch_data_raw = 1 for i in csv_read: if self.ckinv(i): #if len(i) > 1: self.debug_print(i) getr.append(self.covstr(i[6])) getdate.append(i[0].replace(' ','')) getrange.append(i[-2]) getvol.append(int(i[1].replace(',',''))) getopen.append(self.covstr(i[3])) geth.append(self.covstr(i[4])) getl.append(self.covstr(i[5])) else: otherinfo.append(i[0]) fetch_data_raw += 1 if fetch_data_raw >= 3: #stock_name = otherinfo[0].split(' ')[2].decode('big5').encode('utf-8') stock_name = unicode(otherinfo[0].split(' ')[2],'cp950') else: pass return_value = { 'stock_price': getr, 'stock_name': stock_name, 'data_date': getdate, 'stock_range': getrange, 'stock_vol': getvol, 'stock_open': getopen, 'stock_h': geth, 'stock_l': getl } self.debug_print(otherinfo) self.debug_print(stock_name) return return_value
def list_data(self, csv_read)
將資料 list 化 return dictionary: [stock_price]: Closing price (list) 收盤價格 [stock_name]: Stock name (str) and encode form big5 to utf-8 該股名稱,big5 → UTF-8 [data_date]: Stock date (list) 數據日期資訊 [stock_range]: Stock range price (list) 該鼓漲跌價格 [stock_vol]: Stock Volue (list) 成交量 [stock_open]: Stock open price (list) 開盤價 [stock_h]: Stock high price (list) 最高價 [stock_l]: Stock low price (list) 最低價
2.91171
2.461109
1.183088
rp = float((self.raw_data[-1] - self.raw_data[-2]) / self.raw_data[-2] * 100) return rp
def range_per(self)
Range percentage 計算最新日之漲跌幅度百分比
5.402702
4.443576
1.215846
if len(self.raw_data) >= days: data = self.raw_data[-days:] data_avg = float(sum(data) / days) data2 = [] for x in data: data2.append((x - data_avg ) ** 2) return math.sqrt(sum(data2) / len(data2)) else: return 0
def SD(self, days=45)
Standard Deviation. 計算 days 日內之標準差,預設 45 日
3.044217
3.00465
1.013169
if len(self.raw_data) >= days: data = self.raw_data[-days:] data_avg = float(sum(data) / days) return data_avg else: return 0
def SDAVG(self, days=45)
the last 45 days average. 計算 days 日內之平均數,預設 45 日
3.599822
3.685804
0.976672
if len(self.raw_data) >= days: data_avg = sum(self.raw_data[-days:]) / days return self.SD / data_avg else: return 0
def CV(self, days=45)
Coefficient of Variation. 計算 days 日內之變異數,預設 45 日
4.718739
5.137988
0.918402
now = TWTime().now.hour if now >= 9 and now <= 14: return True else: return False
def TimeinOpen(self)
In open market time. 在當日開市時刻,9 - 14
9.102536
7.705656
1.18128
yesterday = self.raw_data[:] yesterday.pop() yes_MA = float(sum(yesterday[-days:]) / days) today_MA = self.MA(days) return self.high_or_low(today_MA, yes_MA, rev)
def MAC(self,days,rev = 0)
Comparing yesterday price is high, low or equal. return ↑,↓ or - 與前一天 days 日收盤價移動平均比較 rev = 0 回傳 ↑,↓ or - rev = 1 回傳 1,-1 or 0
7.471723
6.762627
1.104855
return self.make_serial(self.raw_data,days,rev)
def MA_serial(self,days,rev=0)
see make_serial() 收盤價移動平均 list 化,資料格式請見 def make_serial()
9.738008
7.818158
1.245563
yesterday = self.stock_vol[:] yesterday.pop() yes_MAVOL = float(sum(yesterday[-days:]) / days) today_MAVOL = self.MAVOL(days) return self.high_or_low(today_MAVOL, yes_MAVOL,rev)
def MACVOL(self,days,rev=0)
Comparing yesterday volume is high, low or equal. return ↑,↓ or - 與前一天 days 日成交量移動平均比較 rev = 0 回傳 ↑,↓ or - rev = 1 回傳 1,-1 or 0
8.661421
8.091585
1.070423
return self.make_serial(self.stock_vol,days,rev=0)
def MAVOL_serial(self,days,rev=0)
see make_serial() 成較量移動平均 list 化,資料格式請見 def make_serial()
12.259433
10.187272
1.203407
if self.stock_vol[-1] > self.stock_vol[-2] and self.stock_vol[-1] > self.stock_vol[-3]: return True else: return False
def VOLMAX3(self)
Volume is the max in last 3 days. 三日內最大成交量
3.555097
3.157491
1.125925
day1MA = self.MA_serial(day1)[1] day2MA = self.MA_serial(day2)[1] bw = abs(day1-day2) if len(day1MA) > len(day2MA): day1MAs = day1MA[bw:] day2MAs = day2MA[:] elif len(day1MA) < len(day2MA): day1MAs = day1MA[:] day2MAs = day2MA[bw:] else: day1MAs = day1MA[:] day2MAs = day2MA[:] serial = [] for i in xrange(len(day1MAs)): serial.append(day1MAs[i]-day2MAs[i]) cum = self.make_serial(serial,1,rev) #return [day1MAs,day2MAs,serial,cum,self.high_or_low(cum[-1],cum[-2])] return [cum,self.high_or_low(day1MAs[-1]-day2MAs[-1],day1MAs[-2]-day2MAs[-2],rev)]
def MAO(self,day1,day2,rev=0)
This is MAO(Moving Average Oscillator), not BIAS. It's only 'MAday1 - MAday2'. 乖離率,MAday1 - MAday2 兩日之移動平均之差 return list: [0] is the times of high, low or equal [0] is times [1] is the MAO data [1] rev=0:↑ ↓ or -,rev=1:1 -1 0 回傳: [0] [0] 回傳次數 [1] MAO 資料數據 [1] 漲跌標示,rev=0:↑ ↓ or -,rev=1:1 -1 0
2.498413
2.502688
0.998292
c = data[-s:] if pm: ckvalue = max(c) preckvalue = max(c) > 0 else: ckvalue = min(c) preckvalue = max(c) < 0 return [s - c.index(ckvalue) < 4 and c.index(ckvalue) != s-1 and preckvalue, s - c.index(ckvalue) - 1, ckvalue]
def ckMAO(self,data,s=5,pm=False)
判斷正負乖離位置 s = 取樣判斷區間 pm = True(正)/False(負) 乖離 return [T/F, 第幾個轉折日, 乖離值]
4.679273
4.27515
1.094528
A = self.raw_data[-3]*2 - self.raw_data[-6] B = self.raw_data[-2]*2 - self.raw_data[-5] C = self.raw_data[-1]*2 - self.raw_data[-4] return '(%.2f,%.2f,%.2f)' % (A,B,C)
def RABC(self)
Return ABC 轉折點 ABC
2.453754
2.267348
1.082213
raw = data[:] result = [] try: while len(raw) >= days: result.append(float(sum(raw[-days:]) / days)) raw.pop() self.debug_print(len(result)) result.reverse() re = [self.cum_serial(result,rev), result] return re except: return '?'
def make_serial(self,data,days,rev=0)
make data in list if data enough, will return: [0] is the times of high, low or equal [1] is the serial of data. or return '?' 資料數據 list 化,days 移動平均值 [0] 回傳次數 [1] 回傳數據
6.654728
5.346945
1.244585
org = raw[1:] diff = raw[:-1] result = [] for i in xrange(len(org)): result.append(self.high_or_low(org[i], diff[i],rev)) times = 0 try: if result[-1] == result[-2]: signal = result[-1] re_signal = result[:] try: while signal == re_signal[-1]: re_signal.pop() times += 1 except: pass else: times += 1 except: times = '?' if self.debug: for i in result: print i self.debug_print(times) return times
def cum_serial(self, raw,rev=0)
Cumulate serial data and return times(int) 計算數據重複(持續)次數
4.263641
4.17359
1.021576
print self.stock_name,self.stock_no print '%s %s %s(%+.2f%%)' % (self.data_date[-1],self.raw_data[-1],self.stock_range[-1],self.range_per) for i in arg: print ' - MA%02s %.2f %s(%s)' % (i,self.MA(i),self.MAC(i),self.MA_serial(i)[0]) print ' - Volume: %s %s(%s)' % (self.MAVOL(1)/1000,self.MACVOL(1),self.MAVOL_serial(1)[0]) MAO = self.MAO(3,6) print ' - MAO(3-6): %.2f %s(%s)' % (MAO[0][1][-1], MAO[1], MAO[0][0]) print ' - RABC: %s' % self.RABC
def display(self,*arg)
For simple Demo 測試用顯示樣式。
5.300175
5.35133
0.990441
MA = '' for i in arg: MAs = '- MA%02s: %.2f %s(%s)\n' % ( unicode(i), self.MA(i), self.MAC(i), unicode(self.MA_serial(i)[0]) ) MA = MA + MAs vol = '- Volume: %s %s(%s)' % ( unicode(self.MAVOL(1)/1000), unicode(self.MACVOL(1)), unicode(self.MAVOL_serial(1)[0]) ) MAO = self.MAO(3,6) re = % { 'stock_name': unicode(self.stock_name), 'stock_no': unicode(self.stock_no), 'stock_date': unicode(self.data_date[-1]), 'stock_price': unicode(self.raw_data[-1]), 'stock_range': unicode(self.stock_range[-1]), 'range_per': self.range_per, 'MA': MA, 'vol': vol, 'MAO_v': MAO[0][1][-1], 'MAO_c': unicode(MAO[1]), 'MAO_times': unicode(MAO[0][0]), 'RABC': self.RABC } return re
def XMPP_display(self,*arg)
For XMPP Demo 輸出到 XMPP 之樣式。
4.437037
4.42055
1.00373
re = % { 'stock_name': unicode(self.stock_name), 'stock_no': unicode(self.stock_no), 'stock_date': unicode(self.data_date[-1]), 'stock_price': unicode(self.raw_data[-1]), 'stock_range': unicode(self.stock_range[-1]), } return re
def Task_display(self)
For Task overall stock display 顯示資訊樣式之一,兩行資訊。
5.158422
4.592244
1.12329
re = "%(stock_no)s %(stock_name)s %(stock_date)s %(stock_price)s %(stock_range)s %(stock_range_per).2f%% %(RABC)s %(stock_vol)s" % { 'stock_name': unicode(self.stock_name), 'stock_no': unicode(self.stock_no), 'stock_date': unicode(self.data_date[-1]), 'stock_price': unicode(self.raw_data[-1]), 'stock_range': unicode(self.stock_range[-1]), 'stock_range_per': self.range_per, 'stock_vol': self.stock_vol[-1]/1000, 'RABC': self.RABC } return re
def Cmd_display(self)
For Task overall stock display 一行顯示資訊,用於終端機顯示樣式。
3.290188
3.261088
1.008923
if s == 0: s = len(self.raw_data) if len(size) == 2: sw,sh = size else: sh = 300 sw = 25 * s if sw > 1000: sw = 1000 candle = 950/s stc = '' for i in self.raw_data[-s:]: stc += str(i) + ',' sto = '' for i in self.stock_open[-s:]: sto += str(i) + ',' sth = '' for i in self.stock_h[-s:]: sth += str(i) + ',' stl = '' for i in self.stock_l[-s:]: stl += str(i) + ',' stdate = '' for i in self.data_date[-s:]: stdate += str(i[-2:]) + '|' stmax = max(self.stock_h[-s:]) stmin = min(self.stock_l[-s:]) strange = (stmax-stmin) / 10 re = "http://%(rand)s.chart.apis.google.com/chart?chs=%(sw)sx%(sh)s&cht=lc&chd=t1:0,0,0|0,%(h)s0|0,%(c)s0|0,%(o)s0|0,%(l)s0&chm=F,,1,1:-1,%(candle)s&chxt=y,x&chds=%(min)s,%(max)s&chxr=0,%(min)s,%(max)s,%(range)s&chg=20,%(chg)s&chtt=%(chtt)s&chxl=1:||%(chxl)s" % { 'h': sth, 'c': stc, 'o': sto, 'l': stl, 'min': stmin, 'max': stmax, 'sw': sw, 'sh': sh, 'range': strange, 'candle': candle, 'chg': 10, 'rand': random.randint(0,9), 'chxl': stdate, 'chtt': '%s %s' % (self.stock_name,self.stock_no) } return re
def gchart(self, s = 0, size = [], candle = 20)
Chart for serious stocks 輸出 Google Chart 圖表。 s = 資料筆數 size = 圖表寬度、高度 [寬度,高度] candle = K 棒的寬度
2.757765
2.764326
0.997627
''' 買 ''' self.money += -price*value try: self.store[no] += value except: self.store[no] = value try: self.avgprice[no]['buy'] += [price] except: try: self.avgprice[no]['buy'] = [price] except: self.avgprice[no] = {} self.avgprice[no]['buy'] = [price]
def buy(self, no, price, value)
2.993094
2.827321
1.058632
''' 賣 ''' self.money += price*value try: self.store[no] += -value except: self.store[no] = -value try: self.avgprice[no]['sell'] += [price] except: try: self.avgprice[no]['sell'] = [price] except: self.avgprice[no] = {} self.avgprice[no]['sell'] = [price]
def sell(self, no, price, value)
3.153344
2.813255
1.120888
''' 總覽顯示 ''' print 'money:',self.money print 'store:',self.store print 'avgprice:',self.avgprice
def showinfo(self)
總覽顯示
11.445085
6.90048
1.658593
import re pattern = re.compile("%s" % q) result = {} for i in self.allstockno: b = re.search(pattern, self.allstockno[i]) try: b.group() result[i] = self.allstockno[i] except: pass return result
def search(self,q)
Search.
4.006309
4.042735
0.99099
return numpy.trapz(numpy.trapz(f, x=y, dx=dy), x=x, dx=dx)
def trapz2(f, x=None, y=None, dx=1.0, dy=1.0)
Double integrate.
2.872863
3.037573
0.945776
from scipy.sparse.linalg import eigen self.nmodes = neigs self.tol = tol A = self.build_matrix() if guess is not None: # calculate shift for eigs function k = 2 * numpy.pi / self.wl shift = (guess * k) ** 2 else: shift = None [eigvals, eigvecs] = eigen.eigs(A, k=neigs, which='LR', tol=0.001, ncv=None, v0 = initial_mode_guess, return_eigenvectors=mode_profiles, sigma=shift) neffs = self.wl * scipy.sqrt(eigvals) / (2 * numpy.pi) if mode_profiles: Hxs = [] Hys = [] nx = self.nx ny = self.ny for ieig in range(neigs): Hxs.append(eigvecs[:nx * ny, ieig].reshape(nx, ny)) Hys.append(eigvecs[nx * ny:, ieig].reshape(nx, ny)) # sort the modes idx = numpy.flipud(numpy.argsort(neffs)) neffs = neffs[idx] self.neff = neffs if mode_profiles: tmpx = [] tmpy = [] for i in idx: tmpx.append(Hxs[i]) tmpy.append(Hys[i]) Hxs = tmpx Hys = tmpy [Hzs, Exs, Eys, Ezs] = self.compute_other_fields(neffs, Hxs, Hys) self.modes = [] for (neff, Hx, Hy, Hz, Ex, Ey, Ez) in zip(neffs, Hxs, Hys, Hzs, Exs, Eys, Ezs): self.modes.append( FDMode(self.wl, self.x, self.y, neff, Ey, Ex, Ez, Hy, Hx, Hz).normalize()) return self
def solve(self, neigs=4, tol=0, guess=None, mode_profiles=True, initial_mode_guess=None)
This function finds the eigenmodes. Parameters ---------- neigs : int number of eigenmodes to find tol : float Relative accuracy for eigenvalues. The default value of 0 implies machine precision. guess : float a guess for the refractive index. Only finds eigenvectors with an effective refractive index higher than this value. Returns ------- self : an instance of the VFDModeSolver class obtain the fields of interest for specific modes using, for example: solver = EMpy.modesolvers.FD.VFDModeSolver(wavelength, x, y, epsf, boundary).solve() Ex = solver.modes[0].Ex Ey = solver.modes[0].Ey Ez = solver.modes[0].Ez
2.84192
2.746354
1.034797
try: unch = sum([covstr(self.stock[3]),covstr(self.stock[4])])/2 re = {'name': unicode(self.stock[36].replace(' ',''), 'cp950'), 'no': self.stock[0], 'range': self.stock[1], 'time': self.stock[2], 'max': self.stock[3], 'min': self.stock[4], 'unch': '%.2f' % unch, 'pp': '%.2f' % ((covstr(self.stock[8]) - unch)/unch*100), 'open': self.stock[5], 'h': self.stock[6], 'l': self.stock[7], 'c': self.stock[8], 'value': self.stock[9], 'pvalue': self.stock[10], 'top5buy': [ (self.stock[11], self.stock[12]), (self.stock[13], self.stock[14]), (self.stock[15], self.stock[16]), (self.stock[17], self.stock[18]), (self.stock[19], self.stock[20]) ], 'top5sell': [ (self.stock[21], self.stock[22]), (self.stock[23], self.stock[24]), (self.stock[25], self.stock[26]), (self.stock[27], self.stock[28]), (self.stock[29], self.stock[30]) ] } if '-' in self.stock[1]: re['ranges'] = False ## price down else: re['ranges'] = True ## price up re['crosspic'] = "http://chart.apis.google.com/chart?chf=bg,s,ffffff&chs=20x50&cht=ls&chd=t1:0,0,0|0,%s,0|0,%s,0|0,%s,0|0,%s,0&chds=%s,%s&chm=F,,1,1:4,20" % (re['h'],re['c'],re['open'],re['l'],re['l'],re['h']) re['top5buy'].sort() re['top5sell'].sort() return re except: return False
def real(self)
Real time data
2.744923
2.731197
1.005026
''' Display Taiwan Time now 顯示台灣此刻時間 ''' localtime = datetime.datetime.now() return localtime + datetime.timedelta(hours = time.timezone/60/60 + self.TimeZone)
def now(self)
Display Taiwan Time now 顯示台灣此刻時間
13.316648
5.496635
2.422691
''' Display Taiwan date now 顯示台灣此刻日期 ''' localtime = datetime.date.today() return localtime + datetime.timedelta(hours = time.timezone/60/60 + self.TimeZone)
def date(self)
Display Taiwan date now 顯示台灣此刻日期
15.03652
6.052945
2.484166
''' 檢查所有股票買賣點,剔除$10以下、成交量小於1000張的股票。 ''' for i in twseno().allstockno: a = goristock.goristock(i) try: if a.stock_vol[-1] > 1000*1000 and a.raw_data[-1] > 10: #a.goback(3) ## 倒退天數 ck4m(a) except: pass
def allck()
檢查所有股票買賣點,剔除$10以下、成交量小於1000張的股票。
14.054977
7.554813
1.8604
''' Calculates the coherence length (100% power transfer) of a directional coupler. Args: wavelength_nm (float): The wavelength in [nm] the directional coupler should operate at. n_eff_1 (float): n_eff of the fundamental (even) supermode of the directional coupler. n_eff_2 (float): n_eff of the first-order (odd) supermode of the directional coupler. Returns: float: The length [um] the directional coupler needs to be to achieve 100% power transfer. ''' wavelength_m = wavelength_nm * 1.e-9 dn_eff = (n_eff_1 - n_eff_2).real lc_m = wavelength_m / (2. * dn_eff) lc_um = lc_m * 1.e6 return lc_um
def directional_coupler_lc(wavelength_nm, n_eff_1, n_eff_2)
Calculates the coherence length (100% power transfer) of a directional coupler. Args: wavelength_nm (float): The wavelength in [nm] the directional coupler should operate at. n_eff_1 (float): n_eff of the fundamental (even) supermode of the directional coupler. n_eff_2 (float): n_eff of the first-order (odd) supermode of the directional coupler. Returns: float: The length [um] the directional coupler needs to be to achieve 100% power transfer.
3.403846
1.583243
2.14992
''' Calculate the period needed for a grating coupler. Args: wavelength (float): The target wavelength for the grating coupler. n_eff (float): The effective index of the mode of a waveguide with the width of the grating coupler. n_clad (float): The refractive index of the cladding. incidence_angle_deg (float): The incidence angle the grating coupler should operate at [degrees]. diffration_order (int): The grating order the coupler should work at. Default is 1st order (1). Returns: float: The period needed for the grating coupler in the same units as the wavelength was given at. ''' k0 = 2. * np.pi / wavelength beta = n_eff.real * k0 n_inc = n_clad grating_period = (2.*np.pi*diffration_order) \ / (beta - k0*n_inc*np.sin(np.radians(incidence_angle_deg))) return grating_period
def grating_coupler_period(wavelength, n_eff, n_clad, incidence_angle_deg, diffration_order=1)
Calculate the period needed for a grating coupler. Args: wavelength (float): The target wavelength for the grating coupler. n_eff (float): The effective index of the mode of a waveguide with the width of the grating coupler. n_clad (float): The refractive index of the cladding. incidence_angle_deg (float): The incidence angle the grating coupler should operate at [degrees]. diffration_order (int): The grating order the coupler should work at. Default is 1st order (1). Returns: float: The period needed for the grating coupler in the same units as the wavelength was given at.
3.465254
1.681266
2.061098
return ('%s %s %s %.2f %+.2f %s %s %s %s %+.2f %s %s %.2f %.4f %.4f' % (aa.stock_no, aa.stock_name, aa.data_date[-1], aa.raw_data[-1], aa.range_per, aa.MAC(3), aa.MAC(6), aa.MAC(18), aa.MAO(3,6)[1], aa.MAO(3,6)[0][1][-1], aa.MAO(3,6)[0][0], aa.RABC, aa.stock_vol[-1]/1000, aa.SD, aa.CV)).encode('utf-8')
def oop(aa)
For cmd output.
5.60463
5.615554
0.998055
from twseno import twseno for i in twseno().allstock: #timetest(i) try: if case == 1: try: a = goristock(i) if goback: a.goback(goback) if a.MAO(3,6)[1] == '↑'.decode('utf-8') and (a.MAO(3,6)[0][1][-1] < 0 or ( a.MAO(3,6)[0][1][-1] < 1 and a.MAO(3,6)[0][1][-1] > 0 and a.MAO(3,6)[0][1][-2] < 0 and a.MAO(3,6)[0][0] == 3)) and a.VOLMAX3 and a.stock_vol[-1] > 1000*1000 and a.raw_data[-1] > 10: #print a.Cmd_display print 'buy-: ' + oop(a) elif a.MAO(3,6)[1] == '↓'.decode('utf-8') and a.MAO(3,6)[0][1][-1] > 0 and a.MAO(3,6)[0][0] <= 3: print 'sell: ' + oop(a) except KeyboardInterrupt: print '::KeyboardInterrupt' break except IndexError: print i elif case == 2: try: a = goristock(i) if goback: a.goback(goback) if a.MAO(3,6)[1] == '↑'.decode('utf-8') and (a.MAO(3,6)[0][1][-1] < 0 or ( a.MAO(3,6)[0][1][-1] < 1 and a.MAO(3,6)[0][1][-1] > 0 and a.MAO(3,6)[0][1][-2] < 0 and a.MAO(3,6)[0][0] == 3)) and a.stock_vol[-1] >= 1000*1000 and a.raw_data[-1] > 10 and (sum(a.stock_vol[-45:])/45) <= 1000*1000: #print a.Cmd_display print 'buy-: ' + oop(a) except KeyboardInterrupt: print '::KeyboardInterrupt' break except IndexError: print i elif case == 3: try: a = goristock(i) if goback: a.goback(goback) if a.MA(3) > a.raw_data[-1] and a.MA(6) <= a.raw_data[-1] and a.MA(6) > a.MA(18): #print a.Cmd_display print 'buy-: ' + oop(a) except KeyboardInterrupt: print '::KeyboardInterrupt' break except IndexError: print i except KeyboardInterrupt: print 'KeyboardInterrupt' break
def overall(goback = 0, case = 1)
To run all over the stock and to find who match the 'case' 'goback' is back to what days ago. 0 is the last day.
2.33749
2.335256
1.000957
if prepend_paths is True: prepend_paths = [ os.path.abspath(os.path.dirname(__file__)), ] if prepend_paths: prepend_paths.append(filename) filename = os.path.join(*prepend_paths) print(filename) with open(filename, encoding='utf-8') as f: return f.read()
def read_file(filename, prepend_paths=[])
Returns the contents of *filename* (UTF-8). If *prepend_paths* is set, join those before the *fielname*. If it is `True`, prepend the path to `setup.py`.
2.574168
2.389216
1.077411
''' 載入檔案 檔案依據 http://www.twse.com.tw/ch/trading/trading_days.php ''' ld = csv.reader(open('./%s/opendate.csv' % _CSVFILEPATH, 'r')) re = {} re['close'] = [] re['open'] = [] for i in ld: ''' 0 = 休市, 1 = 開市 ''' if i[1] == '0': re['close'] += [datetime.strptime(i[0],'%Y/%m/%d').date()] elif i[1] == '1': re['open'] += [datetime.strptime(i[0],'%Y/%m/%d').date()] else: pass return re
def loaddate(self)
載入檔案 檔案依據 http://www.twse.com.tw/ch/trading/trading_days.php
3.747973
2.822941
1.327684
''' Open or close 回傳 True:開市,False:休市。 ''' if self.ptime.date() in self.ocdate['close']: ## 判對是否為法定休市 return False elif self.ptime.date() in self.ocdate['open']: ## 判對是否為法定開市 return True else: ''' 判斷是否為每週開休市 ''' if self.ptime.weekday() <= 4: return True else: return False
def ooc(self)
Open or close 回傳 True:開市,False:休市。
6.228461
4.307316
1.446019
''' 3-6負乖離且向上,三日內最大量,成交量大於 1000 張,收盤價大於 10 元。(較嚴謹的選股)''' return self.a.MAO(3,6)[1] == '↑'.decode('utf-8') and (self.a.MAO(3,6)[0][1][-1] < 0 or ( self.a.MAO(3,6)[0][1][-1] < 1 and self.a.MAO(3,6)[0][1][-1] > 0 and self.a.MAO(3,6)[0][1][-2] < 0 and self.a.MAO(3,6)[0][0] == 3)) and self.a.VOLMAX3 and self.a.stock_vol[-1] > 1000*1000 and self.a.raw_data[-1] > 10
def ck_portf_001(self)
3-6負乖離且向上,三日內最大量,成交量大於 1000 張,收盤價大於 10 元。(較嚴謹的選股)
5.497509
2.675037
2.055115
''' 3日均價大於6日均價,6日均價大於18日均價。(短中長線呈現多頭的態勢) ''' return self.a.MA(3) > self.a.MA(6) > self.a.MA(18) and self.a.MAC(18) == '↑'.decode('utf-8') and self.a.stock_vol[-1] > 1000*1000 and self.a.raw_data[-1] > 10
def ck_portf_002(self)
3日均價大於6日均價,6日均價大於18日均價。(短中長線呈現多頭的態勢)
8.483212
3.438612
2.467046
''' 當日成交量,大於前三天的總成交量。(短線多空動能) ''' return self.a.stock_vol[-1] > sum(self.a.stock_vol[-4:-1]) and self.a.stock_vol[-1] > 1000*1000 and self.a.raw_data[-1] > 10
def ck_portf_003(self)
當日成交量,大於前三天的總成交量。(短線多空動能)
7.747419
3.335535
2.322691
''' 價走平一個半月。(箱型整理、盤整) ''' return self.a.SD < 0.25 and self.a.stock_vol[-1] > 1000*1000 and self.a.raw_data[-1] > 10
def ck_portf_004(self)
價走平一個半月。(箱型整理、盤整)
23.57057
6.045921
3.89859
''' 判斷乖離 ''' return list(self.a.ckMAO(self.a.MAO(3,6)[0][1], pm=pm))[0]
def GLI(self, pm=False)
判斷乖離
23.09548
13.623141
1.695312
''' 量大收紅 ''' return self.a.stock_vol[-1] > self.a.stock_vol[-2] and self.a.PUPTY
def B1(self)
量大收紅
24.779459
10.215481
2.425677
''' 量縮價不跌 ''' return self.a.stock_vol[-1] < self.a.stock_vol[-2] and self.a.PUPTY
def B2(self)
量縮價不跌
26.168095
9.5695
2.734531
''' 量大收黑 ''' return self.a.stock_vol[-1] > self.a.stock_vol[-2] and not self.a.PUPTY
def S1(self)
量大收黑
27.295038
11.417506
2.39063
''' 量縮價跌 ''' return self.a.stock_vol[-1] < self.a.stock_vol[-2] and not self.a.PUPTY
def S2(self)
量縮價跌
25.786928
11.106614
2.321763
''' 判斷是否為四大買點 ''' return self.ckMinsGLI and (self.B1 or self.B2 or self.B3 or self.B4)
def B4PB(self)
判斷是否為四大買點
15.617154
6.825696
2.287994
''' 判斷是否為四大賣點 ''' return self.ckPlusGLI and (self.S1 or self.S2 or self.S3 or self.S4)
def B4PS(self)
判斷是否為四大賣點
19.318737
7.086468
2.726145
''' Calculate the power reflection at the interface of two refractive index materials. Args: n1 (float): Refractive index of material 1. n2 (float): Refractive index of material 2. Returns: float: The percentage of reflected power. ''' r = abs((n1-n2) / (n1+n2))**2 return r
def reflection(n1, n2)
Calculate the power reflection at the interface of two refractive index materials. Args: n1 (float): Refractive index of material 1. n2 (float): Refractive index of material 2. Returns: float: The percentage of reflected power.
4.065303
1.97772
2.05555
''' Finds the coupling efficiency between a solved fundamental mode and a fibre of given MFD. Args: mode_solver (_ModeSolver): Mode solver that has found a fundamental mode. fibre_mfd (float): The mode-field diameter (MFD) of the fibre. fibre_offset_x (float): Offset the fibre from the centre position of the window in x. Default is 0 (no offset). fibre_offset_y (float): Offset the fibre from the centre position of the window in y. Default is 0 (no offset). n_eff_fibre (float): The effective index of the fibre mode. Default is 1.441. Returns: float: The power coupling efficiency. ''' etas = [] gaus = _make_gaussian(mode_solver._structure.xc, mode_solver._structure.yc, fibre_mfd, fibre_offset_x, fibre_offset_y) for mode, n_eff in zip(mode_solver.modes, mode_solver.n_effs): o = abs(_overlap(mode, gaus)) t = abs(transmission(n_eff, n_eff_fibre)) eta = o * t etas.append(eta) return etas
def coupling_efficiency(mode_solver, fibre_mfd, fibre_offset_x=0, fibre_offset_y=0, n_eff_fibre=1.441)
Finds the coupling efficiency between a solved fundamental mode and a fibre of given MFD. Args: mode_solver (_ModeSolver): Mode solver that has found a fundamental mode. fibre_mfd (float): The mode-field diameter (MFD) of the fibre. fibre_offset_x (float): Offset the fibre from the centre position of the window in x. Default is 0 (no offset). fibre_offset_y (float): Offset the fibre from the centre position of the window in y. Default is 0 (no offset). n_eff_fibre (float): The effective index of the fibre mode. Default is 1.441. Returns: float: The power coupling efficiency.
3.466043
1.85621
1.867269
''' Find the reference folder using the location of the script file Create the index, test if successful ''' if reference: reffas = reference else: parent_directory = path.dirname(path.abspath(path.dirname(__file__))) reffas = path.join(parent_directory, "reference/DNA_CS.fasta") if not path.isfile(reffas): logging.error("Could not find reference fasta for lambda genome.") sys.exit("Could not find reference fasta for lambda genome.") aligner = mp.Aligner(reffas, preset="map-ont") # build index if not aligner: logging.error("Failed to load/build index") raise Exception("ERROR: failed to load/build index") return aligner
def getIndex(reference)
Find the reference folder using the location of the script file Create the index, test if successful
6.207943
4.213793
1.473244
''' Test if reads can get aligned to the lambda genome, if not: write to stdout ''' i = 0 for record in SeqIO.parse(reads, "fastq"): try: next(aligner.map(str(record.seq))) i += 1 except StopIteration: print(record.format("fastq"), end='') sys.stderr.write("NanoLyse: removed {} reads.\n".format(i))
def align(aligner, reads)
Test if reads can get aligned to the lambda genome, if not: write to stdout
7.16669
3.944344
1.816954
''' Converts the response to tables class. ''' if response is None or response.body is None: return None entities = _list() entities.next_marker = _get_continuation_from_response_headers(response) root = loads(response.body.decode('utf-8')) if 'value' in root: for entity in root['value']: entity = _decrypt_and_deserialize_entity(entity, property_resolver, require_encryption, key_encryption_key, key_resolver) entities.append(entity) else: entities.append(_convert_json_to_entity(entity, property_resolver)) return entities
def _convert_json_response_to_entities(response, property_resolver, require_encryption, key_encryption_key, key_resolver)
Converts the response to tables class.
4.187893
3.675894
1.139286
ipmi_cmd = ("ipmitool -H %(address)s" " -I lanplus -U %(user)s -P %(passwd)s %(cmd)s" % {'address': driver_info['address'], 'user': driver_info['username'], 'passwd': driver_info['password'], 'cmd': command}) out = None try: out = subprocess.check_output(ipmi_cmd, shell=True) except Exception: pass return out
def _exec_ipmitool(driver_info, command)
Execute the ipmitool command. This uses the lanplus interface to communicate with the BMC device driver. :param driver_info: the ipmitool parameters for accessing a node. :param command: the ipmitool command to be executed.
1.976077
2.277439
0.867675
i = 0x0 value = None ilo_fw_rev = get_ilo_version(ilo_fw) or DEFAULT_FW_REV # Note(vmud213): iLO firmware versions >= 2.3 support reading the FRU # information in a single call instead of iterating over each FRU id. if ilo_fw_rev < MIN_SUGGESTED_FW_REV: for i in range(0xff): # Note(vmud213): We can discard FRU ID's between 0x6e and 0xee # as they don't contain any NIC related information if (i < 0x6e) or (i > 0xee): cmd = "fru print %s" % hex(i) out = _exec_ipmitool(driver_info, cmd) if out and 'port' in out and 'Adapter' in out: value = _parse_ipmi_nic_capacity(out) if value is not None: break else: continue else: cmd = "fru print" out = _exec_ipmitool(driver_info, cmd) if out: for line in out.split('\n'): if line and 'port' in line and 'Adapter' in line: value = _parse_ipmi_nic_capacity(line) if value is not None: break return value
def get_nic_capacity(driver_info, ilo_fw)
Gets the FRU data to see if it is NIC data Gets the FRU data in loop from 0-255 FRU Ids and check if the returned data is NIC data. Couldn't find any easy way to detect if it is NIC data. We should't be hardcoding the FRU Id. :param driver_info: Contains the access credentials to access the BMC. :param ilo_fw: a tuple containing major and minor versions of firmware :returns: the max capacity supported by the NIC adapter.
3.770625
3.784557
0.996319
if (("Device not present" in nic_out) or ("Unknown FRU header" in nic_out) or not nic_out): return None capacity = None product_name = None data = nic_out.split('\n') for item in data: fields = item.split(':') if len(fields) > 1: first_field = fields[0].strip() if first_field == "Product Name": # Join the string back if the Product Name had some # ':' by any chance product_name = ':'.join(fields[1:]) break if product_name: product_name_array = product_name.split(' ') for item in product_name_array: if 'Gb' in item: capacity_int = item.strip('Gb') if capacity_int.isdigit(): capacity = item return capacity
def _parse_ipmi_nic_capacity(nic_out)
Parse the FRU output for NIC capacity Parses the FRU output. Seraches for the key "Product Name" in FRU output and greps for maximum speed supported by the NIC adapter. :param nic_out: the FRU output for NIC adapter. :returns: the max capacity supported by the NIC adapter.
4.395852
4.135821
1.062873
return logical_drive.HPELogicalDriveCollection( self._conn, utils.get_subresource_path_by( self, ['Links', 'LogicalDrives']), redfish_version=self.redfish_version)
def logical_drives(self)
Gets the resource HPELogicalDriveCollection of ArrayControllers
7.223423
4.375566
1.650855
return physical_drive.HPEPhysicalDriveCollection( self._conn, utils.get_subresource_path_by( self, ['Links', 'PhysicalDrives']), redfish_version=self.redfish_version)
def physical_drives(self)
Gets the resource HPEPhysicalDriveCollection of ArrayControllers
6.076555
4.007037
1.516471
return utils.max_safe([member.logical_drives.maximum_size_mib for member in self.get_members()])
def logical_drives_maximum_size_mib(self)
Gets the biggest logical drive :returns the size in MiB.
8.345242
10.025962
0.832363
return utils.max_safe([member.physical_drives.maximum_size_mib for member in self.get_members()])
def physical_drives_maximum_size_mib(self)
Gets the biggest disk :returns the size in MiB.
8.752318
10.278767
0.851495
for member in self.get_members(): if member.physical_drives.has_ssd: return True return False
def has_ssd(self)
Return true if any of the drive under ArrayControllers is ssd
7.780179
6.207592
1.253333
for member in self.get_members(): if member.physical_drives.has_rotational: return True return False
def has_rotational(self)
Return true if any of the drive under ArrayControllers is ssd
8.2616
6.684499
1.235934
lg_raid_lvls = set() for member in self.get_members(): lg_raid_lvls.update(member.logical_drives.logical_raid_levels) return lg_raid_lvls
def logical_raid_levels(self)
Gets the raid level for each logical volume :returns the set of list of raid levels configured
3.949955
4.5191
0.874058
for member in self.get_members(): if member.location == location: return member
def array_controller_by_location(self, location)
Returns array controller instance by location :returns Instance of array controller
4.789507
6.711198
0.713659
for member in self.get_members(): if member.model == model: return member
def array_controller_by_model(self, model)
Returns array controller instance by model :returns Instance of array controller
5.174683
6.640856
0.779219
if isinstance(subresource_path, six.string_types): subresource_path = [subresource_path] elif not subresource_path: raise ValueError('"subresource_path" cannot be empty') body = resource.json for path_item in subresource_path: body = body.get(path_item, {}) if not body: raise exception.MissingAttributeError( attribute='/'.join(subresource_path), resource=resource.path) if '@odata.id' not in body: raise exception.MissingAttributeError( attribute='/'.join(subresource_path)+'/@odata.id', resource=resource.path) return body['@odata.id']
def get_subresource_path_by(resource, subresource_path)
Helper function to find the resource path :param resource: ResourceBase instance from which the path is loaded. :param subresource_path: JSON field to fetch the value from. Either a string, or a list of strings in case of a nested field. It should also include the '@odata.id' :raises: MissingAttributeError, if required path is missing. :raises: ValueError, if path is empty. :raises: AttributeError, if json attr not found in resource
2.420223
2.204195
1.098008
boot_mode_bios = 'false' boot_mode_uefi = 'false' if (supported_boot_mode == sys_cons.SUPPORTED_LEGACY_BIOS_ONLY): boot_mode_bios = 'true' elif (supported_boot_mode == sys_cons.SUPPORTED_UEFI_ONLY): boot_mode_uefi = 'true' elif (supported_boot_mode == sys_cons.SUPPORTED_LEGACY_BIOS_AND_UEFI): boot_mode_bios = 'true' boot_mode_uefi = 'true' return SupportedBootModes(boot_mode_bios=boot_mode_bios, boot_mode_uefi=boot_mode_uefi)
def get_supported_boot_mode(supported_boot_mode)
Return bios and uefi support. :param supported_boot_mode: Supported boot modes :return: A tuple of 'true'/'false' based on bios and uefi support respectively.
2.087973
1.95409
1.068514
uri = get_subresource_path_by(resource, subresouce_path) response = resource._conn.get(path=uri) return response.headers['Allow']
def get_allowed_operations(resource, subresouce_path)
Helper function to get the HTTP allowed methods. :param resource: ResourceBase instance from which the path is loaded. :param subresource_path: JSON field to fetch the value from. Either a string, or a list of strings in case of a nested field. :returns: A list of allowed HTTP methods.
6.170954
7.153084
0.862698
''' A function which sets the next host location on the request, if applicable. :param ~azure.storage.models.RetryContext context: The retry context containing the previous host location and the request to evaluate and possibly modify. ''' if len(context.request.host_locations) > 1: # If there's more than one possible location, retry to the alternative if context.location_mode == LocationMode.PRIMARY: context.location_mode = LocationMode.SECONDARY else: context.location_mode = LocationMode.PRIMARY context.request.host = context.request.host_locations.get(context.location_mode)
def _set_next_host_location(self, context)
A function which sets the next host location on the request, if applicable. :param ~azure.storage.models.RetryContext context: The retry context containing the previous host location and the request to evaluate and possibly modify.
5.552775
2.442109
2.273762
resp = super(HPEConnector, self)._op(method, path, data, headers, allow_redirects=False) # With IPv6, Gen10 server gives redirection response with new path with # a prefix of '/' so this check is required if resp.status_code == 308: path = urlparse(resp.headers['Location']).path resp = super(HPEConnector, self)._op(method, path, data, headers) return resp
def _op(self, method, path='', data=None, headers=None)
Overrides the base method to support retrying the operation. :param method: The HTTP method to be used, e.g: GET, POST, PUT, PATCH, etc... :param path: The sub-URI path to the resource. :param data: Optional JSON data. :param headers: Optional dictionary of headers. :returns: The response from the connector.Connector's _op method.
5.755624
6.276699
0.916983
''' Sets the properties of a storage account's File service, including Azure Storage Analytics. If an element (ex HourMetrics) is left as None, the existing settings on the service for that functionality are preserved. :param Metrics hour_metrics: The hour metrics settings provide a summary of request statistics grouped by API in hourly aggregates for files. :param Metrics minute_metrics: The minute metrics settings provide request statistics for each minute for files. :param cors: You can include up to five CorsRule elements in the list. If an empty list is specified, all CORS rules will be deleted, and CORS will be disabled for the service. :type cors: list of :class:`~azure.storage.models.CorsRule` :param int timeout: The timeout parameter is expressed in seconds. ''' request = HTTPRequest() request.method = 'PUT' request.host = self._get_host() request.path = _get_path() request.query = [ ('restype', 'service'), ('comp', 'properties'), ('timeout', _int_to_str(timeout)), ] request.body = _get_request_body( _convert_service_properties_to_xml(None, hour_metrics, minute_metrics, cors)) self._perform_request(request)
def set_file_service_properties(self, hour_metrics=None, minute_metrics=None, cors=None, timeout=None)
Sets the properties of a storage account's File service, including Azure Storage Analytics. If an element (ex HourMetrics) is left as None, the existing settings on the service for that functionality are preserved. :param Metrics hour_metrics: The hour metrics settings provide a summary of request statistics grouped by API in hourly aggregates for files. :param Metrics minute_metrics: The minute metrics settings provide request statistics for each minute for files. :param cors: You can include up to five CorsRule elements in the list. If an empty list is specified, all CORS rules will be deleted, and CORS will be disabled for the service. :type cors: list of :class:`~azure.storage.models.CorsRule` :param int timeout: The timeout parameter is expressed in seconds.
2.766907
1.440073
1.921366
''' Returns a generator to list the shares under the specified account. The generator will lazily follow the continuation tokens returned by the service and stop when all shares have been returned or num_results is reached. If num_results is specified and the account has more than that number of shares, the generator will have a populated next_marker field once it finishes. This marker can be used to create a new generator if more results are desired. :param str prefix: Filters the results to return only shares whose names begin with the specified prefix. :param int num_results: Specifies the maximum number of shares to return. :param bool include_metadata: Specifies that share metadata be returned in the response. :param str marker: An opaque continuation token. This value can be retrieved from the next_marker field of a previous generator object if num_results was specified and that generator has finished enumerating results. If specified, this generator will begin returning results from the point where the previous generator stopped. :param int timeout: The timeout parameter is expressed in seconds. ''' include = 'metadata' if include_metadata else None kwargs = {'prefix': prefix, 'marker': marker, 'max_results': num_results, 'include': include, 'timeout': timeout} resp = self._list_shares(**kwargs) return ListGenerator(resp, self._list_shares, (), kwargs)
def list_shares(self, prefix=None, marker=None, num_results=None, include_metadata=False, timeout=None)
Returns a generator to list the shares under the specified account. The generator will lazily follow the continuation tokens returned by the service and stop when all shares have been returned or num_results is reached. If num_results is specified and the account has more than that number of shares, the generator will have a populated next_marker field once it finishes. This marker can be used to create a new generator if more results are desired. :param str prefix: Filters the results to return only shares whose names begin with the specified prefix. :param int num_results: Specifies the maximum number of shares to return. :param bool include_metadata: Specifies that share metadata be returned in the response. :param str marker: An opaque continuation token. This value can be retrieved from the next_marker field of a previous generator object if num_results was specified and that generator has finished enumerating results. If specified, this generator will begin returning results from the point where the previous generator stopped. :param int timeout: The timeout parameter is expressed in seconds.
2.721279
1.402535
1.940257
''' Returns all user-defined metadata and system properties for the specified share. The data returned does not include the shares's list of files or directories. :param str share_name: Name of existing share. :param int timeout: The timeout parameter is expressed in seconds. :return: A Share that exposes properties and metadata. :rtype: :class:`.Share` ''' _validate_not_none('share_name', share_name) request = HTTPRequest() request.method = 'GET' request.host = self._get_host() request.path = _get_path(share_name) request.query = [ ('restype', 'share'), ('timeout', _int_to_str(timeout)), ] response = self._perform_request(request) return _parse_share(share_name, response)
def get_share_properties(self, share_name, timeout=None)
Returns all user-defined metadata and system properties for the specified share. The data returned does not include the shares's list of files or directories. :param str share_name: Name of existing share. :param int timeout: The timeout parameter is expressed in seconds. :return: A Share that exposes properties and metadata. :rtype: :class:`.Share`
2.315124
1.488496
1.555344
''' Sets service-defined properties for the specified share. :param str share_name: Name of existing share. :param int quota: Specifies the maximum size of the share, in gigabytes. Must be greater than 0, and less than or equal to 5 TB (5120 GB). :param int timeout: The timeout parameter is expressed in seconds. ''' _validate_not_none('share_name', share_name) _validate_not_none('quota', quota) request = HTTPRequest() request.method = 'PUT' request.host = self._get_host() request.path = _get_path(share_name) request.query = [ ('restype', 'share'), ('comp', 'properties'), ('timeout', _int_to_str(timeout)), ] request.headers = [('x-ms-share-quota', _int_to_str(quota))] self._perform_request(request)
def set_share_properties(self, share_name, quota, timeout=None)
Sets service-defined properties for the specified share. :param str share_name: Name of existing share. :param int quota: Specifies the maximum size of the share, in gigabytes. Must be greater than 0, and less than or equal to 5 TB (5120 GB). :param int timeout: The timeout parameter is expressed in seconds.
1.865655
1.467805
1.271051
''' Returns all user-defined metadata for the specified share. :param str share_name: Name of existing share. :param int timeout: The timeout parameter is expressed in seconds. :return: A dictionary representing the share metadata name, value pairs. :rtype: a dict mapping str to str ''' _validate_not_none('share_name', share_name) request = HTTPRequest() request.method = 'GET' request.host = self._get_host() request.path = _get_path(share_name) request.query = [ ('restype', 'share'), ('comp', 'metadata'), ('timeout', _int_to_str(timeout)), ] response = self._perform_request(request) return _parse_metadata(response)
def get_share_metadata(self, share_name, timeout=None)
Returns all user-defined metadata for the specified share. :param str share_name: Name of existing share. :param int timeout: The timeout parameter is expressed in seconds. :return: A dictionary representing the share metadata name, value pairs. :rtype: a dict mapping str to str
2.11276
1.451742
1.455328
''' Gets the approximate size of the data stored on the share, rounded up to the nearest gigabyte. Note that this value may not include all recently created or recently resized files. :param str share_name: Name of existing share. :param int timeout: The timeout parameter is expressed in seconds. :return: the approximate size of the data stored on the share. :rtype: int ''' _validate_not_none('share_name', share_name) request = HTTPRequest() request.method = 'GET' request.host = self._get_host() request.path = _get_path(share_name) request.query = [ ('restype', 'share'), ('comp', 'stats'), ('timeout', _int_to_str(timeout)), ] response = self._perform_request(request) return _convert_xml_to_share_stats(response)
def get_share_stats(self, share_name, timeout=None)
Gets the approximate size of the data stored on the share, rounded up to the nearest gigabyte. Note that this value may not include all recently created or recently resized files. :param str share_name: Name of existing share. :param int timeout: The timeout parameter is expressed in seconds. :return: the approximate size of the data stored on the share. :rtype: int
2.472788
1.468078
1.684371
''' Returns a generator to list the directories and files under the specified share. The generator will lazily follow the continuation tokens returned by the service and stop when all directories and files have been returned or num_results is reached. If num_results is specified and the share has more than that number of containers, the generator will have a populated next_marker field once it finishes. This marker can be used to create a new generator if more results are desired. :param str share_name: Name of existing share. :param str directory_name: The path to the directory. :param int num_results: Specifies the maximum number of files to return, including all directory elements. If the request does not specify num_results or specifies a value greater than 5,000, the server will return up to 5,000 items. Setting num_results to a value less than or equal to zero results in error response code 400 (Bad Request). :param str marker: An opaque continuation token. This value can be retrieved from the next_marker field of a previous generator object if num_results was specified and that generator has finished enumerating results. If specified, this generator will begin returning results from the point where the previous generator stopped. :param int timeout: The timeout parameter is expressed in seconds. ''' args = (share_name, directory_name) kwargs = {'marker': marker, 'max_results': num_results, 'timeout': timeout} resp = self._list_directories_and_files(*args, **kwargs) return ListGenerator(resp, self._list_directories_and_files, args, kwargs)
def list_directories_and_files(self, share_name, directory_name=None, num_results=None, marker=None, timeout=None)
Returns a generator to list the directories and files under the specified share. The generator will lazily follow the continuation tokens returned by the service and stop when all directories and files have been returned or num_results is reached. If num_results is specified and the share has more than that number of containers, the generator will have a populated next_marker field once it finishes. This marker can be used to create a new generator if more results are desired. :param str share_name: Name of existing share. :param str directory_name: The path to the directory. :param int num_results: Specifies the maximum number of files to return, including all directory elements. If the request does not specify num_results or specifies a value greater than 5,000, the server will return up to 5,000 items. Setting num_results to a value less than or equal to zero results in error response code 400 (Bad Request). :param str marker: An opaque continuation token. This value can be retrieved from the next_marker field of a previous generator object if num_results was specified and that generator has finished enumerating results. If specified, this generator will begin returning results from the point where the previous generator stopped. :param int timeout: The timeout parameter is expressed in seconds.
2.350826
1.313254
1.790076
''' Returns a list of the directories and files under the specified share. :param str share_name: Name of existing share. :param str directory_name: The path to the directory. :param str marker: A string value that identifies the portion of the list to be returned with the next list operation. The operation returns a next_marker value within the response body if the list returned was not complete. The marker value may then be used in a subsequent call to request the next set of list items. The marker value is opaque to the client. :param int max_results: Specifies the maximum number of files to return, including all directory elements. If the request does not specify max_results or specifies a value greater than 5,000, the server will return up to 5,000 items. Setting max_results to a value less than or equal to zero results in error response code 400 (Bad Request). :param int timeout: The timeout parameter is expressed in seconds. ''' _validate_not_none('share_name', share_name) request = HTTPRequest() request.method = 'GET' request.host = self._get_host() request.path = _get_path(share_name, directory_name) request.query = [ ('restype', 'directory'), ('comp', 'list'), ('marker', _to_str(marker)), ('maxresults', _int_to_str(max_results)), ('timeout', _int_to_str(timeout)), ] response = self._perform_request(request) return _convert_xml_to_directories_and_files(response)
def _list_directories_and_files(self, share_name, directory_name=None, marker=None, max_results=None, timeout=None)
Returns a list of the directories and files under the specified share. :param str share_name: Name of existing share. :param str directory_name: The path to the directory. :param str marker: A string value that identifies the portion of the list to be returned with the next list operation. The operation returns a next_marker value within the response body if the list returned was not complete. The marker value may then be used in a subsequent call to request the next set of list items. The marker value is opaque to the client. :param int max_results: Specifies the maximum number of files to return, including all directory elements. If the request does not specify max_results or specifies a value greater than 5,000, the server will return up to 5,000 items. Setting max_results to a value less than or equal to zero results in error response code 400 (Bad Request). :param int timeout: The timeout parameter is expressed in seconds.
1.655716
1.260597
1.313438
''' Sets system properties on the file. If one property is set for the content_settings, all properties will be overriden. :param str share_name: Name of existing share. :param str directory_name: The path to the directory. :param str file_name: Name of existing file. :param ~azure.storage.file.models.ContentSettings content_settings: ContentSettings object used to set the file properties. :param int timeout: The timeout parameter is expressed in seconds. ''' _validate_not_none('share_name', share_name) _validate_not_none('file_name', file_name) _validate_not_none('content_settings', content_settings) request = HTTPRequest() request.method = 'PUT' request.host = self._get_host() request.path = _get_path(share_name, directory_name, file_name) request.query = [ ('comp', 'properties'), ('timeout', _int_to_str(timeout)), ] request.headers = None request.headers = content_settings._to_headers() self._perform_request(request)
def set_file_properties(self, share_name, directory_name, file_name, content_settings, timeout=None)
Sets system properties on the file. If one property is set for the content_settings, all properties will be overriden. :param str share_name: Name of existing share. :param str directory_name: The path to the directory. :param str file_name: Name of existing file. :param ~azure.storage.file.models.ContentSettings content_settings: ContentSettings object used to set the file properties. :param int timeout: The timeout parameter is expressed in seconds.
1.75513
1.376115
1.275424
''' Copies a blob or file to a destination file within the storage account. :param str share_name: Name of existing share. :param str directory_name: The path to the directory. :param str file_name: Name of existing file. :param str copy_source: Specifies the URL of the source blob or file, up to 2 KB in length. A source file in the same account can be private, but a file in another account must be public or accept credentials included in this URL, such as a Shared Access Signature. Examples: https://myaccount.file.core.windows.net/myshare/mydirectory/myfile :param metadata: Dict containing name, value pairs. :type metadata: A dict mapping str to str. :param int timeout: The timeout parameter is expressed in seconds. :return: Copy operation properties such as status, source, and ID. :rtype: :class:`~azure.storage.file.models.CopyProperties` ''' _validate_not_none('share_name', share_name) _validate_not_none('file_name', file_name) _validate_not_none('copy_source', copy_source) request = HTTPRequest() request.method = 'PUT' request.host = self._get_host() request.path = _get_path(share_name, directory_name, file_name) request.query = [('timeout', _int_to_str(timeout))] request.headers = [ ('x-ms-copy-source', _to_str(copy_source)), ('x-ms-meta-name-values', metadata), ] response = self._perform_request(request) props = _parse_properties(response, FileProperties) return props.copy
def copy_file(self, share_name, directory_name, file_name, copy_source, metadata=None, timeout=None)
Copies a blob or file to a destination file within the storage account. :param str share_name: Name of existing share. :param str directory_name: The path to the directory. :param str file_name: Name of existing file. :param str copy_source: Specifies the URL of the source blob or file, up to 2 KB in length. A source file in the same account can be private, but a file in another account must be public or accept credentials included in this URL, such as a Shared Access Signature. Examples: https://myaccount.file.core.windows.net/myshare/mydirectory/myfile :param metadata: Dict containing name, value pairs. :type metadata: A dict mapping str to str. :param int timeout: The timeout parameter is expressed in seconds. :return: Copy operation properties such as status, source, and ID. :rtype: :class:`~azure.storage.file.models.CopyProperties`
2.488065
1.27764
1.947392
''' Creates a new file. See create_file_from_* for high level functions that handle the creation and upload of large files with automatic chunking and progress notifications. :param str share_name: Name of existing share. :param str directory_name: The path to the directory. :param str file_name: Name of file to create or update. :param int content_length: Length of the file in bytes. :param ~azure.storage.file.models.ContentSettings content_settings: ContentSettings object used to set file properties. :param metadata: Name-value pairs associated with the file as metadata. :type metadata: a dict mapping str to str :param int timeout: The timeout parameter is expressed in seconds. ''' _validate_not_none('share_name', share_name) _validate_not_none('file_name', file_name) _validate_not_none('content_length', content_length) request = HTTPRequest() request.method = 'PUT' request.host = self._get_host() request.path = _get_path(share_name, directory_name, file_name) request.query = [('timeout', _int_to_str(timeout))] request.headers = [ ('x-ms-meta-name-values', metadata), ('x-ms-content-length', _to_str(content_length)), ('x-ms-type', 'file') ] if content_settings is not None: request.headers += content_settings._to_headers() self._perform_request(request)
def create_file(self, share_name, directory_name, file_name, content_length, content_settings=None, metadata=None, timeout=None)
Creates a new file. See create_file_from_* for high level functions that handle the creation and upload of large files with automatic chunking and progress notifications. :param str share_name: Name of existing share. :param str directory_name: The path to the directory. :param str file_name: Name of file to create or update. :param int content_length: Length of the file in bytes. :param ~azure.storage.file.models.ContentSettings content_settings: ContentSettings object used to set file properties. :param metadata: Name-value pairs associated with the file as metadata. :type metadata: a dict mapping str to str :param int timeout: The timeout parameter is expressed in seconds.
1.778751
1.350686
1.316924