content
stringlengths
42
6.51k
def _check_for_object_list(row, obj_type): """Check to see if object is in the list of valid objects. row[in] A row containing an object obj_type[in] Object type to find Returns (bool) - True = object is obj_type False = object is not obj_type """ if row[0:len(obj_type) + 2].upper() == "# %s" % obj_type: if row.find("none found") < 0: return True else: return False else: return False
def new_or_old(user_response): """ helper function to determine if the user is generating a new or old event list and handle input errors :param user_response: string generated from user input (only '1' or '2' are valid inputs) :return: boolean to be fed into user_id_input """ while True: if user_response == '1': return False if user_response == '2': return True print("Invalid response. Please type '1' for the former, and '2' for the latter.") break
def tool_list(platform, env): """ List tools that shall be generated by top-level `gettext` tool """ return [ 'xgettext', 'msginit', 'msgmerge', 'msgfmt' ]
def chart_at(string, index): """returns the chart at the position or the empty string, if the index is outside the string""" return string[index:index+1]
def filter_record_keys(record_list, whitelist_keys): """ Filter the list records to remove verbose entries and make it suitable for notification format :param record_dict: dict containing values to process :param whitelist_keys: keys to leave in the record dicts :return: a new list with dicts that only contain the whitelisted elements """ filtered = [ {k: v for k, v in [y for y in list(x.items()) if y[0] in whitelist_keys]} for x in record_list ] return filtered
def coloration(i, j, tab): """Return the color associate to a value on the grid given the coordinates Args: i (int): row index j (int): column index tab (array): hexagonal grid Returns: str: return a color associate to a given hexagon """ if tab[i][j] == 0.5: # liquid color return "#33b2cc" elif tab[i][j] == 1.0: # hole color return "#ffffff" elif tab[i][j] == 2.0: # wall color return "#000000" else: return "#8c8c8c"
def filter_connections(connections, annotations): """ Keep connections if they were assigned the 'Equal' label or if they were not annotated. :param list connections: List of candidate connections. :param list annotations: List of annotations from the prodigy db-out command. :returns: Filtered connections. :rtype: list """ # 1 corresponds to the equal label. annotated_idxs = [ann["_input_hash"] for ann in annotations] not_annotated_idxs = [i for i in range(len(connections)) if i not in annotated_idxs] equal_idxs = [ann["_input_hash"] for ann in annotations if ann["answer"] == "accept" and 1 in ann["accept"]] keep_idxs = equal_idxs + not_annotated_idxs cnxs = [connections[i] for i in keep_idxs] return cnxs
def threeNumberSum(array, target_sum): """ Time complexity: O(n^2) Space complexity: O(n) """ array.sort() result = [] n = len(array) for i, current_number in enumerate(array): left, right = i+1, n - 1 while left < right: l, r = array[left], array[right] current_sum = current_number + l + r if current_sum > target_sum: # Since the current sum is too big, we have to decrease value right -= 1 elif current_sum < target_sum: # Since the current sum is too small, we have to increase value left += 1 else: result.append([current_number, l, r]) # Since if increment left by one only, then sum will be too big # and, if decrement right by one only, then sum will be too # small. left += 1 right -= 1 return result
def check_data_dimension(record: list, dimensions: int) -> bool: """ The method checks if the dimension of a record is equal to the --dimensions-- parameter. Parameters ---------- record : list Nested iterable. dimensions : int Number of nested dimensions. Returns ------- : bool True - dimensions are equal to a number of the nested sequences. False otherwise. """ return len(record) == dimensions
def filter_hosts_by_initiators(hosts, initiators): """Filter hosts by given list of initiators. :param hosts: list of PowerStore host objects :param initiators: list of initiators :return: PowerStore hosts list """ hosts_names_found = set() for host in hosts: for initiator in host["host_initiators"]: if initiator["port_name"] in initiators: hosts_names_found.add(host["name"]) return list(filter(lambda host: host["name"] in hosts_names_found, hosts))
def get_stairs(all_results, report_name, min_length=50): """ Given a list of Result objects and the name of a control variable time series, return two lists - all_vals: all discovered flat plateau values in the report_name time series. The plateau valuse for all reults are merged into one ordered list with unique values only - all_lims: same length as all_vals; each item is a list of the same length as all_results and each list inside this inner list is a tuple of start and end indices of the flat part; or (0, 0, 0) if there was no such flat part in that result object The indices returned are (istart_trans, istart, iend): - istart_trans: where the unsteady transition starts ("step up") - istart: where the steady part of this "stair tread" starts - iend: where the steady part of this "stair tread" ends """ # Find start and end indices of flat parts of the control value time series all_res = [] all_vals = set() for results in all_results: this_res = [] all_res.append(this_res) ts = results.reports.get(report_name, []) N = len(ts) start = True for i in range(N): if start and i < N - 2 and ts[i] == ts[i + 1] == ts[i + 2]: start = False this_res.append([ts[i], i, None]) all_vals.add(ts[i]) continue elif ( not start and N - 1 > i > 1 and ts[i - 2] == ts[i - 1] == ts[i] != ts[i + 1] ): start = True this_res[-1][-1] = i if not start: this_res[-1][-1] = N - 1 # For each flat control value, make lists of start and stop indices for each result object all_vals = sorted(all_vals) all_lims = [] for val in all_vals: all_lims.append([]) for lims in all_res: prevend = 0 for v, istart, iend in lims: if v == val and iend - istart > min_length: all_lims[-1].append((prevend, istart, iend)) break prevend = iend else: all_lims[-1].append((0, 0, 0)) # Remove values that ended up with no data due to min_length all_vals2, all_lims2 = [], [] for val, lims in zip(all_vals, all_lims): all_zero = True for i0, i1, i2 in lims: if not (i0 == i1 == i2 == 0): all_zero = False if not all_zero: all_vals2.append(val) all_lims2.append(lims) return all_vals2, all_lims2
def check_uniqueness_in_rows(board: list): """ Check buildings of unique height in each row. Return True if buildings in a row have unique length, False otherwise. >>> check_uniqueness_in_rows(['***21**', '412453*', '423145*', '*543215', '*35214*', '*41532*', '*2*1***']) True >>> check_uniqueness_in_rows(['***21**', '452453*', '423145*', '*543215', '*35214*', '*41532*', '*2*1***']) False >>> check_uniqueness_in_rows(['***21**', '412453*', '423145*', '*553215', '*35214*', '*41532*', '*2*1***']) False """ for row in board[1:6]: hints = row[1:6] for hint in hints: rep = hints.count(hint) if rep > 1: return False return True
def _parse_placekey(placekey): """ Split a Placekey in to what and where parts. :param placekey: Placekey (string) :return: what (string), where (string) """ if '@' in placekey: what, where = placekey.split('@') else: what, where = None, placekey return what, where
def construct_mock_users(email_list, type): """Takes a list of email addresses and a user type, and returns a mock user object with just enough information to check for object access control. """ class MockUser(object): def __init__(self, user_dict): for k, v in user_dict.items(): setattr(self, k, v) users = [] for email in email_list: user = { 'email': email, 'type': type, '_is_mock': True, 'is_superuser': False } users.append(MockUser(user)) return users
def levenshtein(s1: str, s2: str) -> int: """Returns the minimum edit distance between strings s1 and s2. This function implements the Levenshtein distance algorithm using Dynamic Programming. """ dp = list(range(0, len(s2) + 1)) # dp stands for dynamic programming # technically, I can reduce len(dp) to min(len(s1), len(s2)), but its not necessary. for i in range(len(s1)): for d in range(len(dp) - 1, 0, -1): j = d - 1 dp[d] = min(dp[d] + 1, dp[d - 1] + (s1[i] != s2[j])) dp[0] = i + 1 for d in range(1, len(dp)): dp[d] = min(dp[d], dp[d - 1] + 1) # print(dp) return dp[-1]
def conv_to_win_path(path): """ Converts unix paths to windows ones Args: path: string representing a unix path (partial) """ return '\\'.join(path.split('/'))
def normalizeLongitude(longitude): """ Normalize a longitude into the range -180 to 180, not including 180. Args: longitude: A longitude in signed decimal degrees. """ while longitude < -180: longitude = longitude + 360 while longitude >= 180: longitude = longitude - 360 return longitude
def cast_longlong(value): """ Cast value to 64bit integer Usage: cast_longlong(1 << 63) == -1 """ value = value & 0xFFFFFFFFFFFFFFFF if value & 0x8000000000000000: value = ~value + 1 & 0xFFFFFFFFFFFFFFFF return -value else: return value
def blend(alpha, base=(255, 255, 255), color=(0, 0, 0)): """ :param color should be a 3-element iterable, elements in [0,255] :param alpha should be a float in [0,1] :param base should be a 3-element iterable, elements in [0,255] (defaults to white) :return: rgb, example: (255, 255, 255) """ return tuple(int(round((alpha * color[i]) + ((1 - alpha) * base[i]))) for i in range(3))
def dtimems2a(dtime): """Converts time in milli-seconds to 'HH:MM:SS.mmm'""" dtime = int(dtime) msec = dtime % 1000 dtime /= 1000 sec = dtime % 60 dtime /= 60 minute = dtime % 60 dtime /= 60 hour = dtime if hour > 0: return "%d:%02d:%02d.%03d" % (hour, minute, sec, msec) elif minute>0: return "%d:%02d.%03d" % (minute, sec, msec) else: return "%d.%03d" % (sec, msec)
def de_hex(tin): """turn hex string to int and return string""" tout = '' i = 0 while i < len(tin) - 5: if tin[i:i+1] == 'Ox': tout += chr(int(tin[i:i+6], 16)) i += 6 else: tout += tin[i] i += 1 tout += tin[i:] return tout
def shot_type(play_description): """ Determine which zone the play occurred in (unless one isn't listed) :param play_description: the type would be in here :return: the type if it's there (otherwise just NA) """ types = ['wrist', 'snap', 'slap', 'deflected', 'tip-in', 'backhand', 'wrap-around'] play_description = [x.strip() for x in play_description.split(',')] # Strip leading and trailing whitespace play_description = [i.lower() for i in play_description] # Convert to lowercase for p in play_description: if p in types: if p == 'wrist' or p == 'slap' or p == 'snap': return ' '.join([p, 'shot']) else: return p return ''
def CommentCheck(line): """ description: check if line is a comment param {*} line return {*} """ for index in range(len(line)): if line[index] != " ": if line[index] == "!": return True else: return False return False
def validate_PEM(cert: str) -> bool: """Simply verify the PEM certificate format""" return cert.startswith('-----BEGIN CERTIFICATE-----\n') and cert.endswith('\n-----END CERTIFICATE-----')
def getRefId(refs, refname): """ Get the reference ID for a reference name """ for i, ref in enumerate(refs): if ref == refname: return i return -1
def eth_addr(f): """eth_addr :param f: eth frame """ data = "%.2x:%.2x:%.2x:%.2x:%.2x:%.2x" % (f[0], f[1], f[2], f[3], f[4], f[5]) return data
def percentage(sub, all): """Calculate percent relation between "sub" and "all". Args: sub (int): Some value. all (int): Maximum value. Returns: int: (sum * 100) / all """ return int((sub * 100) / all)
def hill_func(x, a, b, c, d): """Hill function commonly used to fit MIC curves Args: x (numpy.array) Concentration vector (n, 1) a (float) b (float) c (float) d (float) Returns: y (float) """ return a+(b-a)/(1+(x/c)**d)
def object_comparator_strict(src_obj, dst_obj): """ Compare an object with another entry by entry """ for i in range(len(dst_obj)): if list(dst_obj[i].keys())[0] == "last-modified": del dst_obj[i] break dont_match = [] failed_keys = 0 failed_values = 0 count = 0 if len(src_obj) == len(dst_obj): for i in src_obj: if list(i.keys())[0] == list(dst_obj[count].keys())[0]: if ( i[list(i.keys())[0]] != dst_obj[count][list(dst_obj[count].keys())[0]] ): failed_values += 1 else: failed_keys += 1 count += 1 if failed_keys or failed_values: return 1 else: return 0 else: return 1
def update_all_dists(all_dists, match): """Remove distances of centers that have already been matched""" return [i for i in all_dists if (i[1][0] != match[0] and i[1][1] != match[1])]
def reverse_lookup(d, v): """Modify reverse_lookup so that it builds and returns a list of all keys that map to v, or an empty list if there are none""" result = [] for k in d: if d[k] == v: result.append(k) return result
def rotaciona_array(lista: list, k: int): """ >>> lista = [1, 2, 3, 4, 5, 6, 7] >>> list(rotaciona_array(lista, 3)) ... [5, 6, 7, 1, 2, 3, 4] >>> for elemento in rotaciona_array(lista, 1): ... print(elemento) ... 7 1 2 3 4 5 6 >>> next(iter(rotaciona_array(lista, 2))) # retorna primeira elemento da lista retornada ... 6 :param iteravel: :param k: :return: """ primeira_fatia = lista[-k : ] segunda_fatia = lista[ : -k] return primeira_fatia + segunda_fatia
def read_int(b, i, size=2): """Read an integer from bytearray. b -- bytearray i -- starting index size -- number of bytes to read """ return int.from_bytes(b[i:i+size], byteorder='big', signed=True)
def to_unicode(s, encoding='utf-8'): """Convert to unicode :param s: str or unicode :param encoding: encode type :return: unocide """ return s.decode(encoding)
def split(p): """Split a pathname. Returns tuple "(head, tail)" where "tail" is everything after the final slash. Either part may be empty.""" i = p.rfind('/') + 1 head, tail = p[:i], p[i:] if head and head != '/' * len(head): head = head.rstrip('/') return (head, tail)
def verticesBody(o, x, y, z, h=None, top=None, override=None): """Calculates the vertices of the building block/body depending on the input.""" #-- If the h value is not supplied than it is zero if not h: h = 0.0 if top: if top < 1.5: z = z + float(top) * h elif top is None: if override: z = override else: z = z + h p = [] p0 = "%s %s %s" % (o[0],o[1],o[2]) p.append(p0) p1 = "%s %s %s" % (o[0]+x,o[1],o[2]) p.append(p1) p2 = "%s %s %s" % (o[0]+x,o[1]+y,o[2]) p.append(p2) p3 = "%s %s %s" % (o[0],o[1]+y,o[2]) p.append(p3) p4 = "%s %s %s" % (o[0],o[1],o[2]+z) p.append(p4) p5 = "%s %s %s" % (o[0]+x,o[1],o[2]+z) p.append(p5) p6 = "%s %s %s" % (o[0]+x,o[1]+y,o[2]+z) p.append(p6) p7 = "%s %s %s" % (o[0],o[1]+y,o[2]+z) p.append(p7) return p
def process_meta_data(s): """Given a csr file as string the function reads embeded metatdata dict.""" if len(s) < 2: return None if not ("#" in s): return None else: try: meta_data_str = s.replace("#", "").replace("\n", "") metadata = eval(meta_data_str) if not metadata: return None if not "OBR_REPORT_VERSION" in metadata.keys(): return None return metadata except Exception as e: return None
def remove_invalid(s: str) -> str: """ Removes characters that Windows doesn't allow in filenames from the specified string :param s: string to remove characters from :return: the given string without invalid characters """ s = s.replace('"', "'") for invalid_char in ["\\", "/", ":", "*", "?", "<", ">", "|"]: s = s.replace(invalid_char, "") return s
def encode_samples(tokenized_samples, word_to_idx): """ encode word to index """ features = [] for sample in tokenized_samples: feature = [] for token in sample: if token in word_to_idx: feature.append(word_to_idx[token]) else: feature.append(0) features.append(feature) return features
def min_required_char(text: str) -> int: """ NAIVE VERSION! Find minimum number of characters required to make a String Palindromic NOTE:The only operation allowed is to insert characters in the beginning of the string. Return the number of characters that are needed to be inserted to make the string a palindrome string. Rutime: O(n^2) Args: text(str): given string Returns: num of chars(int): min required chars to make a string palindromic """ if not text: return 0 left = 0 right = len(text) - 1 sliding_window = 2 while left <= right: if text[left] != text[right]: right = len(text) - sliding_window print(f"right: {right}") left = 0 print(f"left: {left}") sliding_window += 1 else: right -= 1 left += 1 return sliding_window - 2
def get_file_names(in_list): """Makes a list of each index[1] in a list of lists This method is deployed in the get_medical_image_list route :param in_list: list of lists containing patient medical images and file names :return: list containing file names """ temp = list() for item in in_list: temp.append(item[1]) return temp
def chain_value(row, attribute_ids): """ Join all the values of attributes to get a identifier :param row: a row of the table, e.g. a list of attribute values :param attribute_ids: a set of attribute :return: a string consists of joint value """ result = [] for attribute_id in sorted(attribute_ids): result.append(row[attribute_id]) return '-'.join(result)
def minTuple(tuples): """Return the tuple whose first element is smallest.""" minTuple = None; minValue = 1e100; for tuple in tuples: if tuple[0] < minValue: minValue = tuple[0] minTuple = tuple return minTuple
def inclusion_params_and_context_from_template(context, arg): """Expected inclusion_params_and_context_from_template __doc__""" return {"result": "inclusion_params_and_context_from_template - Expected result (context value: %s): %s" % (context['value'], arg)}
def make_file_line_lookup(diff): """Get a lookup table for each file in diff, to convert between source line number to line number in the diff """ lookup = {} # We need all these variables, since GitHub tracks diff lines per file # Python Unidiff doesn't skip lines, so each git diff has 5 lines to skip # Unidiff tracks for the diff as a whole, so count lines as well processed_lines=0 processed_files=0 lines_skipped_per_file=5 lines_in_previous_files = 0 for file in diff: processed_files += 1 # Checkpoint for processed_lines, new filename lines_in_previous_files = processed_lines filename = file.target_file[2:] lookup[filename] = {} for hunk in file: for line in hunk: if not line.is_removed and not line.is_context: try: lookup[filename][line.target_line_no] = line.diff_line_no \ - (lines_skipped_per_file * processed_files) \ - lines_in_previous_files except Exception as e: print("Something went wrong. Debug information:", "\nFilename:",filename, "\ntarget_line_no:",line.target_line_no, "\ndiff_line_no:",line.diff_line_no) print(e) processed_lines += 1 return lookup
def computeSetOfParetoPointsTuplesFromListOfParetoPoints(ListOfParetoPointsAsDicts): """ Given a list of pareto points encoded as a dictionary, we compute a set of the same pareto points (aka remove duplicates) encoded as tuples ordered by the order of the keys (returned by keys() method of first item) . """ setOfParetoPointsTuples = set() if len(ListOfParetoPointsAsDicts) > 0 : keyset = ListOfParetoPointsAsDicts[0].keys() for row in ListOfParetoPointsAsDicts: ParetoPointQualityValues = tuple(row[QualityAttributeKey] for QualityAttributeKey in keyset) setOfParetoPointsTuples.add(ParetoPointQualityValues) return setOfParetoPointsTuples
def check_is_valid_number(*args): """ Using a pointer variable, checks to see if the number(s) is(are) valid. Throws an ValueError exception and exits the program otherwise. :param args: numbers to be checked. :return: a boolean true in case it is a number. """ for arg in args: try: if arg.isdigit(): return True elif float(arg): return True except ValueError: SystemExit("Not a digit " + str(arg))
def valid_parentheses(string): """ Write a function called that takes a string of parentheses, and determines if the order of the parentheses is valid. The function should return true if the string is valid, and false if it's invalid. """ if type(string) is str: count = 0 for i in string: if i == "(": count += 1 if i == ")": count -= 1 if count < 0: print(string) return False return count == 0 else: raise TypeError('Argument must be a string')
def escapeToXML(text, isattrib = False): """Borrowed from twisted.xish.domish Escape text to proper XML form, per section 2.3 in the XML specification. @type text: L{str} @param text: Text to escape @type isattrib: L{bool} @param isattrib: Triggers escaping of characters necessary for use as attribute values """ text = text.replace("&", "&amp;") text = text.replace("<", "&lt;") text = text.replace(">", "&gt;") if isattrib: text = text.replace("'", "&apos;") text = text.replace("\"", "&quot;") return text
def validateEc2Filters( filters, available_filters ): """ Validates EC2 filters by checking if there is no unsupported filter provided by user and translates keys to CC1 ones If there is an extra filter InvalidFilter exception is raised @raises{InvalidFilter,EC2Exception} @parameter{filters, dict} Dictionary of filters provided by user @parameter{available_filters, dict} List of filters supported by server @returns{boolean} Returns True if filters are valid """ for ec2filter in filters.keys(): if ec2filter not in available_filters: return False return True
def kruskals(graph): """ Returns the minimum spanning tree of a graph using Kruskal's algorithm. """ # Initialize the minimum spanning tree. mst = [] # Initialize the set of vertices. vertices = set(graph.keys()) # Sort the edges in non-decreasing order. edges = sorted(graph.values(), key=lambda edge: edge.weight) # Iterate over the edges. while edges: # Get the first edge. edge = edges.pop(0) # Get the vertices of the edge. u, v = edge.vertices # Check if the vertices are in the same set. if u.root is not v.root: # Add the edge to the minimum spanning tree. mst.append(edge) # Merge the sets. u.root, v.root = v.root, u.root # Remove the edge from the set of edges. edges.remove(edge) # Return the minimum spanning tree. return mst
def pystr(v_str): """Convert string repr of Fortran string to Python string.""" assert isinstance(v_str, str) if v_str[0] in ("'", '"') and v_str[0] == v_str[-1]: quote = v_str[0] out = v_str[1:-1] else: # NOTE: This is non-standard Fortran. # For example, gfortran rejects non-delimited strings. quote = None out = v_str # Replace escaped strings if quote: out = out.replace(2 * quote, quote) return out
def _PackageIdArgument(x): """Convert a string into a package ID while checking its range. Args: x: argument string. Returns: the package ID as an int, or -1 in case of error. """ try: x = int(x, 0) if x < 0 or x > 127: x = -1 except ValueError: x = -1 return x
def _default(value, default): """Return default value if value is None.""" if value is None: return default return value
def container_img_name(base, dist_name, dist_ver): """Generate container image name. Return the name of container image for '-t' of docker command such as 'sppc/dpdk-ubuntu:16.04' or 'sppc/spp-ubuntu:18.04'. """ return '%s-%s:%s' % (base, dist_name, dist_ver)
def centre_print(move_it, width): """centre_print Centres a heading on a defined length Inputs - string to centre - width to centre on Returns - justified field """ print('centre_print - depracated routine') print_width = len(move_it) left_margin = (width-print_width) / 2 left_margin = left_margin + print_width new_it = move_it.rjust(int(left_margin), ' ') new_it = new_it.ljust(width) return new_it
def bpmToMs(bpm): """ Method: Convert BPM to ms, where ms is the time in milliseconds between 2 beats. Argument: bpm (int or float) : the bpm to convert. Return: (int or float) : Time between 2 beats in ms. """ if bpm <= 0: return 0 else: return 60000/bpm
def recursive_dict_update(a, b): """ Rescursively merge entries from b into a, where b will values in a for any keys. Does not copy values (e.g. aliasing can still be an issue). Will raise an error if field types do not match for shared keys (e.g. a[k] is a dict, but b[k] is not). """ assert isinstance(a, dict) and isinstance(b, dict), (a, b) for k, b_v in b.items(): if k in a: a_v = a[k] # Do not allow heterogeneous type updates. assert type(a_v) == type(b_v), (k, a_v, b_v) if isinstance(b_v, dict): a[k] = recursive_dict_update(a_v, b_v) else: a[k] = b_v else: a[k] = b_v return a
def find_start_block(job_counts, start): """ :return: index of block from where job picking will start from, cursor indicating index of starting job in selected block """ cumulative_count = 0 for i, block in enumerate(job_counts): cumulative_count += block.count if cumulative_count > start: return i, start - (cumulative_count - block.count) # marker is start index isn't available in selected jobs return -1, -1
def nodes_on_land_None(nodes, u, v, WD): """Standard function that returns itself""" return nodes, u, v, WD
def enrichData(vT1, vT2, people, date_of): """ prepare output data """ enriched = [{'person_id': str(speaker), 'score': {'vT1': vT1[i], 'vT2': vT2[i]}} for i, speaker in enumerate(people)] return enriched
def workspace_command(cmd): """Simple command to always go to the workspace directory""" return " && ".join( [ "cd {job.ws}", cmd if not isinstance(cmd, list) else " && ".join(cmd), "cd ..", ] )
def get_quarter_from_month(month: int): """Returns the quarter for a given month""" month_quarter_map = {1: 1, 2: 1, 3: 1, 4: 2, 5: 2, 6: 2, 7: 3, 8: 3, 9: 3, 10: 4, 11: 4, 12: 4} return month_quarter_map[month]
def make_host_entry(ip_count): """ Generrate JSON entry for host for topology.json """ entry=dict() entry["ip"]="10.0."+str(ip_count)+"."+str(ip_count)+"/24" entry["mac"]="08:00:00:00:0"+str(ip_count)+":"+str(ip_count)+str(ip_count) entry["commands"]=["route add default gw 10.0."+str(ip_count)+"."+str(ip_count)+"0 dev eth0", "arp -i eth0 -s 10.0."+str(ip_count)+"."+str(ip_count)+"0 08:00:00:00:0"+str(ip_count)+":00" ] return entry
def binary_rec_list_op( op, A, B ): """Apply a binary operation to two multivariable polynomials: op(A,B)""" if type(A) is list and type(B) is list: return [ binary_rec_list_op( op, a, b ) for a,b in zip(A,B) ] else: return op( A, B )
def GetTitle( text ): """Given a bit of text which has a form like this: '\n\n Film Title\n \n (OmU)\n ' return just the film title. """ pp = text.splitlines() pp2 = [p.strip() for p in pp if len(p.strip()) >= 1] return pp2[0]
def mmedian(lst): """ get the median value """ sortedLst = sorted(lst) lstLen = len(lst) if lstLen==0: return 0.0 index = (lstLen - 1) // 2 if (lstLen % 2): return sortedLst[index] else: return (sortedLst[index] + sortedLst[index + 1])/2.0
def scaled_loss(x, fn, a): """Apply a loss function to a tensor and pre- and post-scale it. Args: x: the data tensor, should already be squared: `x = y**2`. fn: the loss function, with signature `fn(x) -> y`. a: the scale parameter. Returns: The value of the loss, and its first and second derivatives. """ a2 = a**2 loss, loss_d1, loss_d2 = fn(x/a2) return loss*a2, loss_d1, loss_d2/a2
def parse_optional_param(default_params, kwargs_dict): """ parse optional parameters. Use user input keyword argument's values if possible, otherwise use the default value :param default_params: dictionary of params with default values :param kwargs_dict: dictionary of input keyword arguments with values :return: dictionary with correct values for each keyword """ result = dict() for key in default_params: if key in kwargs_dict: val = kwargs_dict[key] if val == "None": val = None result[key] = val else: result[key] = default_params[key] return result
def check_lunches(lunches_dict): """Checks if every restaurant posted their lunch already.""" if all(lunches_dict.values()): return True else: return False
def version_tuple(v): """Convert a version string to a tuple containing ints. Non-numeric version strings will be converted to 0. For example: ``'0.28.0dev0'`` will be converted to ``'0.28.0'`` Returns ------- ver_tuple : tuple Length 3 tuple representing the major, minor, and patch version. """ split_v = v.split(".") while len(split_v) < 3: split_v.append('0') if len(split_v) > 3: raise ValueError('Version strings containing more than three parts ' 'cannot be parsed') vals = [] for item in split_v: if item.isnumeric(): vals.append(int(item)) else: vals.append(0) return tuple(vals)
def call_on_if_def(obj, attr_name, callable, default, *args, **kwargs): """Calls the provided callable on the provided attribute of ``obj`` if it is defined. If not, returns default. """ try: attr = getattr(obj, attr_name) except AttributeError: return default else: return callable(attr, *args, **kwargs)
def poly2str(poly): """ Converts polynomial to a string representation. :param poly: :return: """ terms = [] for term in poly: vars = ''.join(['x_{%d}' % x for x in term]) terms.append(vars) return ' + '.join(terms)
def _doUpgradeThreadProc(oStdOut, asBuf): """Thread procedure for the upgrade test drive.""" asBuf.append(oStdOut.read()); return True;
def sum_pairs(ints, s): """ Finds the first pairs to equal the integer given. of appearance that add up to form the sum. :param ints: list of integers. :param s: integer sum value. :return: the first two values (parse from the left please) in order of appearance that add up to form the sum. """ seen = set() for x in ints: if s - x in seen: return [s - x, x] seen.add(x)
def me(yr, vol): """ calculates mill residues for a give volume of roundwood harvested (mmbf) amd year """ me70 = 0.42 # Mill efficiency in 1970 me11 = 0.48 # Mill efficiency in 2011 ann = (me11 - me70)/(2011-1970) if yr <= 1970: return vol * (me70-((1970-yr)*ann)) if yr >= 1970: return vol * (me70+((yr-1970)*ann))
def dot_prod(a,b): """ Compute the dot product of two 3 vectors """ return a[0]*b[0] + a[1]*b[1] + a[2]*b[2]
def run_demo(text): """Show message""" print(text) return True
def get_node_index(node): """ get index of node in its parent's children """ if node is None or node.parent is None: return -1 for idx, child in enumerate(node.parent.children): if child is node: return idx return -1
def threeSum_faster(nums): """ This skips the duplicates directly """ result = [] nums.sort() for index, value in enumerate(nums): if not index or (index and nums[index]!=nums[index-1]): target = -1 * value left = index + 1 right = len(nums) - 1 while left<right: sum = nums[left] + nums[right] if sum>target: right -= 1 elif sum<target: left += 1 else: result.append([value, nums[left], nums[right]]) while left<right and nums[left]==nums[left+1]: left += 1 while left<right and nums[right]==nums[right-1]: right -= 1 left += 1 right -= 1 return result
def add_number_of_different_roles(dev_type: str) -> int: """ INPUT dev_type - dev_type answer (separeted by ';') OUTPUT numeric value - number of different dev types """ try: return len(dev_type.split(';')) except: return 0
def satoshi(number): """ float prices rounded to satoshi """ return float("%.8f" % float(number))
def rename_dictionary_key(entry, dict_map): """ Rename dictionary key of particular entry. """ if isinstance(entry, dict): for old_key, new_key in dict_map.items(): entry[new_key] = entry.pop(old_key) return entry elif isinstance(entry, list): return [ dict( (dict_map[old_key], value) if old_key in dict_map else (old_key, value) for old_key, value in item.items() ) for item in entry ] elif isinstance(entry, tuple): return tuple( tuple( (dict_map[value[0]], value[1]) if value[0] in dict_map else value for value in item ) for item in entry )
def matrix_output_shape(input_shape, options): """Fully connected matrix layer input to output shape conversion""" matrix_size = options["size"] return (int(input_shape[0]), matrix_size[1])
def flatten(list_of_lists): """ :param list_of_lists: a list of sub-lists like "[[e_{00}, ...], ..., [e_{n0}, ...]]" :return: flatten all sub-lists into single list """ return [item for sublist in list_of_lists for item in sublist]
def coords_to_float(coord): """Convert a latitude or longitude coordinate from a string to a float, taking into account N, E, S, W. For example, '48.2S' to -48.2 Parameters ---------- coord : str The string form of the coordinate Returns ------- float The coordinate in float form """ if coord != None: if str(coord)[-1] == 'N' or str(coord)[-1] == 'E': return float(str(coord)[:-1]) elif str(coord)[-1] == 'S' or str(coord)[-1] == 'W': # removes the letter from '48.2S' and puts a negative return float(str(coord)[:-1]) * -1 else: return coord else: return coord
def log_rhs(t, y, a, b): """RHS for logistic model.""" return a * y * (1.0 - y / b)
def count_set_bits(number): """ Returns the number of set bits in number """ count = 0 while number: count += number & 1 number >>= 1 return count
def filter_valid(gates): """Return a list of only those gates with valid IL, OL, IH and OH""" return list(filter(lambda x: x.has_valid_thresholds, gates))
def csv_list(l): """Format list to a string with comma-separated values. """ if len(l) == 0: return "" return ', '.join(map(str, l))
def get_base_36_repr(positive_int): """Converts a positive integer to its base 36 representation :param positive_int: the positive integer to convert :type positive_int: int :return: base 36 representation of the given positive int :rtype: str """ digits = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ" result = [] while positive_int: next_digit = digits[positive_int % 36] result.append(next_digit) positive_int //= 36 return "".join(reversed(result))
def one_to_one_correspondance(observed_peaks, expected_peaks) : """Check if all expected peaks have one only observed peak corresponding""" correspondance = {} for pk in expected_peaks : closest_peak = min(observed_peaks, key=lambda x:abs(x-pk)) divergence = abs(pk - closest_peak) correspondance[pk] = closest_peak # return (True, correspondance) if no duplicated closest peaks # else return (False; None) if len(set(correspondance.values())) == len(correspondance.values()) : return True, correspondance else : return False, None
def evaluatePredictor(examples, predictor): """ predictor: a function that takes an x and returns a predicted y. Given a list of examples (x, y), makes predictions based on |predict| and returns the fraction of misclassified examples. """ error = 0 for x, y in examples: if predictor(x) != y: error += 1 return 1.0 * error / len(examples)
def generalized_fibonacci_sequence_up_to(n, p): """Compute the generalized Fibonacci sequence up to a given number. Args: n: An `int`. Return all members of the generalized Fibonacci sequence not greater than this number. p: An `int`. The number of the generalized sequence. Must be >= 1. If `p` is 1, the sequence is the standard Fibonacci sequence. Returns: A list of `int`s. """ a = [1, p] while True: latest = a[-2] + a[-1] if latest > n: break a.append(latest) return a
def parse_response(data): """Parse a "did this succeed" from a user. Only takes positive "yes" as OK.""" data = data.lower() return data in {'t', 'true', 'y', 'yes'}
def dbsnp_string(data): """Format dbSNP data, if any, for GFF attributes""" if data: cleaned_data = [] for item in data: if not item in cleaned_data: cleaned_data.append(item) return ";db_xref " + ",".join(cleaned_data) else: return ""
def set_of_vars(arg_plot): """Build set of needed field variables. Each var is a tuple, first component is a scalar field, second component is either: - a scalar field, isocontours are added to the plot. - a vector field (e.g. 'v' for the (v1,v2,v3) vector), arrows are added to the plot. Args: arg_plot (str): string with variable names separated with ``,`` (figures), and ``+`` (same plot). Returns: set of str: set of needed field variables. """ sovs = set(tuple((var + '+').split('+')[:2]) for var in arg_plot.split(',')) sovs.discard(('', '')) return sovs
def get_d_runs(ex_stat): """Get D runs.""" d_inds = [n for n, v in enumerate(ex_stat) if v == "D" or v == "mD"] if len(d_inds) <= 1: # nothing we can do return [] result = [] curr_list = [ d_inds[0], ] for elem in d_inds[1:]: prev_elem = curr_list[-1] if prev_elem + 1 == elem: # append to the current list curr_list.append(elem) else: # start new list curr_copy = curr_list.copy() result.append(curr_copy) curr_list = [ elem, ] result.append(curr_list) final_res = [x for x in result if len(x) > 1] return final_res
def if_none(value, default): """ Returns value or default if value is None. """ if (value is None): return default return value
def julian_day_dt(year, month, day, hour, minute, second, microsecond): """This is the original way to calculate the julian day from the NREL paper. However, it is much faster to convert to unix/epoch time and then convert to julian day. Note that the date must be UTC.""" if month <= 2: year = year-1 month = month+12 a = int(year/100) b = 2 - a + int(a * 0.25) frac_of_day = (microsecond + (second + minute * 60 + hour * 3600) ) * 1.0 / (3600*24) d = day + frac_of_day jd = (int(365.25 * (year + 4716)) + int(30.6001 * (month + 1)) + d + b - 1524.5) return jd