content
stringlengths
22
815k
id
int64
0
4.91M
def _find_in_iterable_case_insensitive(iterable, name): """ Return the value matching ``name``, case insensitive, from an iterable. """ iterable = list(OrderedDict.fromkeys([k for k in iterable])) iterupper = [k.upper() for k in iterable] try: match = iterable[iterupper.index(name.upper())] except (ValueError, AttributeError): match = None return match
5,339,800
def RunInSeparateProcess(fn, *args): """Runs a function in a separate process. Note: Only boolean return values are supported. @type fn: callable @param fn: Function to be called @rtype: bool @return: Function's result """ pid = os.fork() if pid == 0: # Child process try: # In case the function uses temporary files utils_wrapper.ResetTempfileModule() # Call function result = int(bool(fn(*args))) assert result in (0, 1) except: # pylint: disable=W0702 logging.exception("Error while calling function in separate process") # 0 and 1 are reserved for the return value result = 33 os._exit(result) # pylint: disable=W0212 # Parent process # Avoid zombies and check exit code (_, status) = os.waitpid(pid, 0) if os.WIFSIGNALED(status): exitcode = None signum = os.WTERMSIG(status) else: exitcode = os.WEXITSTATUS(status) signum = None if not (exitcode in (0, 1) and signum is None): raise errors.GenericError("Child program failed (code=%s, signal=%s)" % (exitcode, signum)) return bool(exitcode)
5,339,801
def predict_all_points(data, order, coefficients): """ :param data: input data to create least squares prediction of order(order) of :param order: order for least squares prediction :param coefficients: coefficients of LPC :return: returns estimation of entire data set. Will be of length (len(data) - order) """ predicted_set = np.zeros((1, len(data) - order)) index = 0 for i in np.arange(order, len(data)): y = data[i - order:i] predicted_set[0][index] = np.sum(np.multiply(data[i - order:i], -coefficients)) index += 1 return predicted_set[0]
5,339,802
def _unpack(msg, decode=True): """Unpack and decode a FETCHed message dictionary.""" if 'UID' in msg and 'BODY[]' in msg: uid = msg['UID'] body = msg['BODY[]'] if decode: idate = msg.get('INTERNALDATE', None) flags = msg.get('FLAGS', ()) return (uid, IMAP4Message(body, uid, idate, flags)) else: return (uid, body) return (None, None)
5,339,803
def video_in(filename=INPUTPATH): """reads (max.20sec!) video file and stores every frame as PNG image for processing returns image name and image files (as np array?)""" #create video capture object cap = cv2.VideoCapture(filename) name = filename.split('/')[-1].split('.')[0] i=0 if (cap.isOpened()==False): logging.error('Error opening video stream or file') while(cap.isOpened()): #capture frame-by-frame ret, frame = cap.read() if ret == True: i=i+1 cv2.imshow('Frame', frame) Image.fromarray(frame).save(f"images/{name}_{i}.png") # Press Q on keyboard to exit if cv2.waitKey(25) & 0xFF == ord('q'): break # Break the loop else: break return f'Frame count of {name}: {i}'
5,339,804
def convert_all_timestamps(results: List[ResponseResult]) -> List[ResponseResult]: """Replace all date/time info with datetime objects, where possible""" results = [convert_generic_timestamps(result) for result in results] results = [convert_observation_timestamps(result) for result in results] return results
5,339,805
def download_mnist(data_dir="/tmp/data", train=True): """Download MNIST dataset from a public S3 bucket Args: data_dir (str): directory to save the data train (bool): download training set Returns: None """ if not os.path.exists(data_dir): os.makedirs(data_dir) if train: images_file = "train-images-idx3-ubyte.gz" labels_file = "train-labels-idx1-ubyte.gz" else: images_file = "t10k-images-idx3-ubyte.gz" labels_file = "t10k-labels-idx1-ubyte.gz" # download objects s3 = boto3.client("s3") for obj in [images_file, labels_file]: key = os.path.join("datasets/image/MNIST", obj) dest = os.path.join(data_dir, obj) if not os.path.exists(dest): s3.download_file(PUBLIC_BUCKET, key, dest) return
5,339,806
def init(): """Sets up a project in current working directory with default settings. It copies files from templates directory and pastes them in the current working dir. The new project is set up with default settings. """ cfg = Path("manim.cfg") if cfg.exists(): raise FileExistsError(f"\t{cfg} exists\n") else: copy_template_files()
5,339,807
def convert(s): """ Take full markdown string and swap all math spans with img. """ matches = find_inline_equations(s) + find_display_equations(s) for match in matches: full = match[0] latex = match[1] img = makeimg(latex) s = s.replace(full, img) return s
5,339,808
def file_parser(input_file: str = 'stocks.json') -> dict: """Reads the input file and loads the file as dictionary. Args: input_file: Takes the input file name as an argument. Returns: dict: Returns a json blurb. """ if path.isfile(input_file): with open(input_file) as stock_file: try: return load(fp=stock_file) except JSONDecodeError: print(f"\033[31m{prefix(level='ERROR')}Unable to load stocks.json.\033[00m")
5,339,809
def _amplify_ep(text): """ check for added emphasis resulting from exclamation points (up to 4 of them) """ ep_count = text.count("!") if ep_count > 4: ep_count = 4 # (empirically derived mean sentiment intensity rating increase for # exclamation points) ep_amplifier = ep_count * 0.292 return ep_amplifier
5,339,810
def inline_singleton_lists(dsk): """ Inline lists that are only used once >>> d = {'b': (list, 'a'), ... 'c': (f, 'b', 1)} # doctest: +SKIP >>> inline_singleton_lists(d) # doctest: +SKIP {'c': (f, (list, 'a'), 1)} Pairs nicely with lazify afterwards """ dependencies = dict((k, get_dependencies(dsk, k)) for k in dsk) dependents = reverse_dict(dependencies) keys = [k for k, v in dsk.items() if istask(v) and v and v[0] is list and len(dependents[k]) == 1] return inline(dsk, keys, inline_constants=False)
5,339,811
def vegasflowplus_sampler(*args, **kwargs): """Convenience wrapper for sampling random numbers Parameters ---------- `integrand`: tf.function `n_dim`: number of dimensions `n_events`: number of events per iteration `training_steps`: number of training_iterations Returns ------- `sampler`: a reference to the generate_random_array method of the integrator class """ return sampler(VegasFlowPlus, *args, **kwargs)
5,339,812
def test_sparse_rail_generator_deterministic(): """Check that sparse_rail_generator runs deterministic over different python versions!""" speed_ration_map = {1.: 1., # Fast passenger train 1. / 2.: 0., # Fast freight train 1. / 3.: 0., # Slow commuter train 1. / 4.: 0.} # Slow freight train env = RailEnv(width=25, height=30, rail_generator=sparse_rail_generator(max_num_cities=5, max_rails_between_cities=3, seed=215545, # Random seed grid_mode=True ), schedule_generator=sparse_schedule_generator(speed_ration_map), number_of_agents=1) env.reset() # for r in range(env.height): # for c in range(env.width): # print("assert env.rail.get_full_transitions({}, {}) == {}, \"[{}][{}]\"".format(r, c, # env.rail.get_full_transitions( # r, c), r, c)) assert env.rail.get_full_transitions(0, 0) == 0, "[0][0]" assert env.rail.get_full_transitions(0, 1) == 0, "[0][1]" assert env.rail.get_full_transitions(0, 2) == 0, "[0][2]" assert env.rail.get_full_transitions(0, 3) == 0, "[0][3]" assert env.rail.get_full_transitions(0, 4) == 0, "[0][4]" assert env.rail.get_full_transitions(0, 5) == 0, "[0][5]" assert env.rail.get_full_transitions(0, 6) == 0, "[0][6]" assert env.rail.get_full_transitions(0, 7) == 0, "[0][7]" assert env.rail.get_full_transitions(0, 8) == 0, "[0][8]" assert env.rail.get_full_transitions(0, 9) == 0, "[0][9]" assert env.rail.get_full_transitions(0, 10) == 0, "[0][10]" assert env.rail.get_full_transitions(0, 11) == 0, "[0][11]" assert env.rail.get_full_transitions(0, 12) == 0, "[0][12]" assert env.rail.get_full_transitions(0, 13) == 0, "[0][13]" assert env.rail.get_full_transitions(0, 14) == 0, "[0][14]" assert env.rail.get_full_transitions(0, 15) == 0, "[0][15]" assert env.rail.get_full_transitions(0, 16) == 0, "[0][16]" assert env.rail.get_full_transitions(0, 17) == 0, "[0][17]" assert env.rail.get_full_transitions(0, 18) == 0, "[0][18]" assert env.rail.get_full_transitions(0, 19) == 0, "[0][19]" assert env.rail.get_full_transitions(0, 20) == 0, "[0][20]" assert env.rail.get_full_transitions(0, 21) == 0, "[0][21]" assert env.rail.get_full_transitions(0, 22) == 0, "[0][22]" assert env.rail.get_full_transitions(0, 23) == 0, "[0][23]" assert env.rail.get_full_transitions(0, 24) == 0, "[0][24]" assert env.rail.get_full_transitions(1, 0) == 0, "[1][0]" assert env.rail.get_full_transitions(1, 1) == 0, "[1][1]" assert env.rail.get_full_transitions(1, 2) == 0, "[1][2]" assert env.rail.get_full_transitions(1, 3) == 0, "[1][3]" assert env.rail.get_full_transitions(1, 4) == 0, "[1][4]" assert env.rail.get_full_transitions(1, 5) == 0, "[1][5]" assert env.rail.get_full_transitions(1, 6) == 0, "[1][6]" assert env.rail.get_full_transitions(1, 7) == 0, "[1][7]" assert env.rail.get_full_transitions(1, 8) == 0, "[1][8]" assert env.rail.get_full_transitions(1, 9) == 0, "[1][9]" assert env.rail.get_full_transitions(1, 10) == 0, "[1][10]" assert env.rail.get_full_transitions(1, 11) == 16386, "[1][11]" assert env.rail.get_full_transitions(1, 12) == 1025, "[1][12]" assert env.rail.get_full_transitions(1, 13) == 1025, "[1][13]" assert env.rail.get_full_transitions(1, 14) == 17411, "[1][14]" assert env.rail.get_full_transitions(1, 15) == 1025, "[1][15]" assert env.rail.get_full_transitions(1, 16) == 1025, "[1][16]" assert env.rail.get_full_transitions(1, 17) == 1025, "[1][17]" assert env.rail.get_full_transitions(1, 18) == 1025, "[1][18]" assert env.rail.get_full_transitions(1, 19) == 4608, "[1][19]" assert env.rail.get_full_transitions(1, 20) == 0, "[1][20]" assert env.rail.get_full_transitions(1, 21) == 0, "[1][21]" assert env.rail.get_full_transitions(1, 22) == 0, "[1][22]" assert env.rail.get_full_transitions(1, 23) == 0, "[1][23]" assert env.rail.get_full_transitions(1, 24) == 0, "[1][24]" assert env.rail.get_full_transitions(2, 0) == 0, "[2][0]" assert env.rail.get_full_transitions(2, 1) == 0, "[2][1]" assert env.rail.get_full_transitions(2, 2) == 0, "[2][2]" assert env.rail.get_full_transitions(2, 3) == 0, "[2][3]" assert env.rail.get_full_transitions(2, 4) == 0, "[2][4]" assert env.rail.get_full_transitions(2, 5) == 0, "[2][5]" assert env.rail.get_full_transitions(2, 6) == 0, "[2][6]" assert env.rail.get_full_transitions(2, 7) == 0, "[2][7]" assert env.rail.get_full_transitions(2, 8) == 0, "[2][8]" assert env.rail.get_full_transitions(2, 9) == 0, "[2][9]" assert env.rail.get_full_transitions(2, 10) == 0, "[2][10]" assert env.rail.get_full_transitions(2, 11) == 32800, "[2][11]" assert env.rail.get_full_transitions(2, 12) == 0, "[2][12]" assert env.rail.get_full_transitions(2, 13) == 0, "[2][13]" assert env.rail.get_full_transitions(2, 14) == 32800, "[2][14]" assert env.rail.get_full_transitions(2, 15) == 0, "[2][15]" assert env.rail.get_full_transitions(2, 16) == 0, "[2][16]" assert env.rail.get_full_transitions(2, 17) == 0, "[2][17]" assert env.rail.get_full_transitions(2, 18) == 0, "[2][18]" assert env.rail.get_full_transitions(2, 19) == 32800, "[2][19]" assert env.rail.get_full_transitions(2, 20) == 0, "[2][20]" assert env.rail.get_full_transitions(2, 21) == 0, "[2][21]" assert env.rail.get_full_transitions(2, 22) == 0, "[2][22]" assert env.rail.get_full_transitions(2, 23) == 0, "[2][23]" assert env.rail.get_full_transitions(2, 24) == 0, "[2][24]" assert env.rail.get_full_transitions(3, 0) == 0, "[3][0]" assert env.rail.get_full_transitions(3, 1) == 0, "[3][1]" assert env.rail.get_full_transitions(3, 2) == 0, "[3][2]" assert env.rail.get_full_transitions(3, 3) == 0, "[3][3]" assert env.rail.get_full_transitions(3, 4) == 0, "[3][4]" assert env.rail.get_full_transitions(3, 5) == 0, "[3][5]" assert env.rail.get_full_transitions(3, 6) == 0, "[3][6]" assert env.rail.get_full_transitions(3, 7) == 0, "[3][7]" assert env.rail.get_full_transitions(3, 8) == 0, "[3][8]" assert env.rail.get_full_transitions(3, 9) == 0, "[3][9]" assert env.rail.get_full_transitions(3, 10) == 0, "[3][10]" assert env.rail.get_full_transitions(3, 11) == 32800, "[3][11]" assert env.rail.get_full_transitions(3, 12) == 0, "[3][12]" assert env.rail.get_full_transitions(3, 13) == 0, "[3][13]" assert env.rail.get_full_transitions(3, 14) == 32800, "[3][14]" assert env.rail.get_full_transitions(3, 15) == 0, "[3][15]" assert env.rail.get_full_transitions(3, 16) == 0, "[3][16]" assert env.rail.get_full_transitions(3, 17) == 0, "[3][17]" assert env.rail.get_full_transitions(3, 18) == 0, "[3][18]" assert env.rail.get_full_transitions(3, 19) == 32872, "[3][19]" assert env.rail.get_full_transitions(3, 20) == 4608, "[3][20]" assert env.rail.get_full_transitions(3, 21) == 0, "[3][21]" assert env.rail.get_full_transitions(3, 22) == 0, "[3][22]" assert env.rail.get_full_transitions(3, 23) == 0, "[3][23]" assert env.rail.get_full_transitions(3, 24) == 0, "[3][24]" assert env.rail.get_full_transitions(4, 0) == 0, "[4][0]" assert env.rail.get_full_transitions(4, 1) == 0, "[4][1]" assert env.rail.get_full_transitions(4, 2) == 0, "[4][2]" assert env.rail.get_full_transitions(4, 3) == 0, "[4][3]" assert env.rail.get_full_transitions(4, 4) == 0, "[4][4]" assert env.rail.get_full_transitions(4, 5) == 0, "[4][5]" assert env.rail.get_full_transitions(4, 6) == 0, "[4][6]" assert env.rail.get_full_transitions(4, 7) == 0, "[4][7]" assert env.rail.get_full_transitions(4, 8) == 0, "[4][8]" assert env.rail.get_full_transitions(4, 9) == 0, "[4][9]" assert env.rail.get_full_transitions(4, 10) == 0, "[4][10]" assert env.rail.get_full_transitions(4, 11) == 32800, "[4][11]" assert env.rail.get_full_transitions(4, 12) == 0, "[4][12]" assert env.rail.get_full_transitions(4, 13) == 0, "[4][13]" assert env.rail.get_full_transitions(4, 14) == 32800, "[4][14]" assert env.rail.get_full_transitions(4, 15) == 0, "[4][15]" assert env.rail.get_full_transitions(4, 16) == 0, "[4][16]" assert env.rail.get_full_transitions(4, 17) == 0, "[4][17]" assert env.rail.get_full_transitions(4, 18) == 0, "[4][18]" assert env.rail.get_full_transitions(4, 19) == 49186, "[4][19]" assert env.rail.get_full_transitions(4, 20) == 34864, "[4][20]" assert env.rail.get_full_transitions(4, 21) == 0, "[4][21]" assert env.rail.get_full_transitions(4, 22) == 0, "[4][22]" assert env.rail.get_full_transitions(4, 23) == 0, "[4][23]" assert env.rail.get_full_transitions(4, 24) == 0, "[4][24]" assert env.rail.get_full_transitions(5, 0) == 0, "[5][0]" assert env.rail.get_full_transitions(5, 1) == 0, "[5][1]" assert env.rail.get_full_transitions(5, 2) == 0, "[5][2]" assert env.rail.get_full_transitions(5, 3) == 0, "[5][3]" assert env.rail.get_full_transitions(5, 4) == 0, "[5][4]" assert env.rail.get_full_transitions(5, 5) == 0, "[5][5]" assert env.rail.get_full_transitions(5, 6) == 0, "[5][6]" assert env.rail.get_full_transitions(5, 7) == 0, "[5][7]" assert env.rail.get_full_transitions(5, 8) == 0, "[5][8]" assert env.rail.get_full_transitions(5, 9) == 0, "[5][9]" assert env.rail.get_full_transitions(5, 10) == 0, "[5][10]" assert env.rail.get_full_transitions(5, 11) == 32800, "[5][11]" assert env.rail.get_full_transitions(5, 12) == 0, "[5][12]" assert env.rail.get_full_transitions(5, 13) == 0, "[5][13]" assert env.rail.get_full_transitions(5, 14) == 32800, "[5][14]" assert env.rail.get_full_transitions(5, 15) == 0, "[5][15]" assert env.rail.get_full_transitions(5, 16) == 0, "[5][16]" assert env.rail.get_full_transitions(5, 17) == 0, "[5][17]" assert env.rail.get_full_transitions(5, 18) == 0, "[5][18]" assert env.rail.get_full_transitions(5, 19) == 32800, "[5][19]" assert env.rail.get_full_transitions(5, 20) == 32800, "[5][20]" assert env.rail.get_full_transitions(5, 21) == 0, "[5][21]" assert env.rail.get_full_transitions(5, 22) == 0, "[5][22]" assert env.rail.get_full_transitions(5, 23) == 0, "[5][23]" assert env.rail.get_full_transitions(5, 24) == 0, "[5][24]" assert env.rail.get_full_transitions(6, 0) == 16386, "[6][0]" assert env.rail.get_full_transitions(6, 1) == 17411, "[6][1]" assert env.rail.get_full_transitions(6, 2) == 1025, "[6][2]" assert env.rail.get_full_transitions(6, 3) == 5633, "[6][3]" assert env.rail.get_full_transitions(6, 4) == 17411, "[6][4]" assert env.rail.get_full_transitions(6, 5) == 1025, "[6][5]" assert env.rail.get_full_transitions(6, 6) == 1025, "[6][6]" assert env.rail.get_full_transitions(6, 7) == 1025, "[6][7]" assert env.rail.get_full_transitions(6, 8) == 5633, "[6][8]" assert env.rail.get_full_transitions(6, 9) == 17411, "[6][9]" assert env.rail.get_full_transitions(6, 10) == 1025, "[6][10]" assert env.rail.get_full_transitions(6, 11) == 3089, "[6][11]" assert env.rail.get_full_transitions(6, 12) == 1025, "[6][12]" assert env.rail.get_full_transitions(6, 13) == 1025, "[6][13]" assert env.rail.get_full_transitions(6, 14) == 2064, "[6][14]" assert env.rail.get_full_transitions(6, 15) == 0, "[6][15]" assert env.rail.get_full_transitions(6, 16) == 0, "[6][16]" assert env.rail.get_full_transitions(6, 17) == 0, "[6][17]" assert env.rail.get_full_transitions(6, 18) == 0, "[6][18]" assert env.rail.get_full_transitions(6, 19) == 32800, "[6][19]" assert env.rail.get_full_transitions(6, 20) == 32800, "[6][20]" assert env.rail.get_full_transitions(6, 21) == 0, "[6][21]" assert env.rail.get_full_transitions(6, 22) == 0, "[6][22]" assert env.rail.get_full_transitions(6, 23) == 0, "[6][23]" assert env.rail.get_full_transitions(6, 24) == 0, "[6][24]" assert env.rail.get_full_transitions(7, 0) == 32800, "[7][0]" assert env.rail.get_full_transitions(7, 1) == 32800, "[7][1]" assert env.rail.get_full_transitions(7, 2) == 0, "[7][2]" assert env.rail.get_full_transitions(7, 3) == 72, "[7][3]" assert env.rail.get_full_transitions(7, 4) == 3089, "[7][4]" assert env.rail.get_full_transitions(7, 5) == 1025, "[7][5]" assert env.rail.get_full_transitions(7, 6) == 1025, "[7][6]" assert env.rail.get_full_transitions(7, 7) == 1025, "[7][7]" assert env.rail.get_full_transitions(7, 8) == 1097, "[7][8]" assert env.rail.get_full_transitions(7, 9) == 2064, "[7][9]" assert env.rail.get_full_transitions(7, 10) == 0, "[7][10]" assert env.rail.get_full_transitions(7, 11) == 0, "[7][11]" assert env.rail.get_full_transitions(7, 12) == 0, "[7][12]" assert env.rail.get_full_transitions(7, 13) == 0, "[7][13]" assert env.rail.get_full_transitions(7, 14) == 0, "[7][14]" assert env.rail.get_full_transitions(7, 15) == 0, "[7][15]" assert env.rail.get_full_transitions(7, 16) == 0, "[7][16]" assert env.rail.get_full_transitions(7, 17) == 0, "[7][17]" assert env.rail.get_full_transitions(7, 18) == 0, "[7][18]" assert env.rail.get_full_transitions(7, 19) == 32800, "[7][19]" assert env.rail.get_full_transitions(7, 20) == 32800, "[7][20]" assert env.rail.get_full_transitions(7, 21) == 0, "[7][21]" assert env.rail.get_full_transitions(7, 22) == 0, "[7][22]" assert env.rail.get_full_transitions(7, 23) == 0, "[7][23]" assert env.rail.get_full_transitions(7, 24) == 0, "[7][24]" assert env.rail.get_full_transitions(8, 0) == 32800, "[8][0]" assert env.rail.get_full_transitions(8, 1) == 32800, "[8][1]" assert env.rail.get_full_transitions(8, 2) == 0, "[8][2]" assert env.rail.get_full_transitions(8, 3) == 0, "[8][3]" assert env.rail.get_full_transitions(8, 4) == 0, "[8][4]" assert env.rail.get_full_transitions(8, 5) == 0, "[8][5]" assert env.rail.get_full_transitions(8, 6) == 0, "[8][6]" assert env.rail.get_full_transitions(8, 7) == 0, "[8][7]" assert env.rail.get_full_transitions(8, 8) == 0, "[8][8]" assert env.rail.get_full_transitions(8, 9) == 0, "[8][9]" assert env.rail.get_full_transitions(8, 10) == 0, "[8][10]" assert env.rail.get_full_transitions(8, 11) == 0, "[8][11]" assert env.rail.get_full_transitions(8, 12) == 0, "[8][12]" assert env.rail.get_full_transitions(8, 13) == 0, "[8][13]" assert env.rail.get_full_transitions(8, 14) == 0, "[8][14]" assert env.rail.get_full_transitions(8, 15) == 0, "[8][15]" assert env.rail.get_full_transitions(8, 16) == 0, "[8][16]" assert env.rail.get_full_transitions(8, 17) == 0, "[8][17]" assert env.rail.get_full_transitions(8, 18) == 0, "[8][18]" assert env.rail.get_full_transitions(8, 19) == 32872, "[8][19]" assert env.rail.get_full_transitions(8, 20) == 37408, "[8][20]" assert env.rail.get_full_transitions(8, 21) == 0, "[8][21]" assert env.rail.get_full_transitions(8, 22) == 0, "[8][22]" assert env.rail.get_full_transitions(8, 23) == 0, "[8][23]" assert env.rail.get_full_transitions(8, 24) == 0, "[8][24]" assert env.rail.get_full_transitions(9, 0) == 32800, "[9][0]" assert env.rail.get_full_transitions(9, 1) == 32800, "[9][1]" assert env.rail.get_full_transitions(9, 2) == 0, "[9][2]" assert env.rail.get_full_transitions(9, 3) == 0, "[9][3]" assert env.rail.get_full_transitions(9, 4) == 0, "[9][4]" assert env.rail.get_full_transitions(9, 5) == 0, "[9][5]" assert env.rail.get_full_transitions(9, 6) == 0, "[9][6]" assert env.rail.get_full_transitions(9, 7) == 0, "[9][7]" assert env.rail.get_full_transitions(9, 8) == 0, "[9][8]" assert env.rail.get_full_transitions(9, 9) == 0, "[9][9]" assert env.rail.get_full_transitions(9, 10) == 0, "[9][10]" assert env.rail.get_full_transitions(9, 11) == 0, "[9][11]" assert env.rail.get_full_transitions(9, 12) == 0, "[9][12]" assert env.rail.get_full_transitions(9, 13) == 0, "[9][13]" assert env.rail.get_full_transitions(9, 14) == 0, "[9][14]" assert env.rail.get_full_transitions(9, 15) == 0, "[9][15]" assert env.rail.get_full_transitions(9, 16) == 0, "[9][16]" assert env.rail.get_full_transitions(9, 17) == 0, "[9][17]" assert env.rail.get_full_transitions(9, 18) == 0, "[9][18]" assert env.rail.get_full_transitions(9, 19) == 49186, "[9][19]" assert env.rail.get_full_transitions(9, 20) == 2064, "[9][20]" assert env.rail.get_full_transitions(9, 21) == 0, "[9][21]" assert env.rail.get_full_transitions(9, 22) == 0, "[9][22]" assert env.rail.get_full_transitions(9, 23) == 0, "[9][23]" assert env.rail.get_full_transitions(9, 24) == 0, "[9][24]" assert env.rail.get_full_transitions(10, 0) == 32800, "[10][0]" assert env.rail.get_full_transitions(10, 1) == 32800, "[10][1]" assert env.rail.get_full_transitions(10, 2) == 0, "[10][2]" assert env.rail.get_full_transitions(10, 3) == 0, "[10][3]" assert env.rail.get_full_transitions(10, 4) == 0, "[10][4]" assert env.rail.get_full_transitions(10, 5) == 0, "[10][5]" assert env.rail.get_full_transitions(10, 6) == 0, "[10][6]" assert env.rail.get_full_transitions(10, 7) == 0, "[10][7]" assert env.rail.get_full_transitions(10, 8) == 0, "[10][8]" assert env.rail.get_full_transitions(10, 9) == 0, "[10][9]" assert env.rail.get_full_transitions(10, 10) == 0, "[10][10]" assert env.rail.get_full_transitions(10, 11) == 0, "[10][11]" assert env.rail.get_full_transitions(10, 12) == 0, "[10][12]" assert env.rail.get_full_transitions(10, 13) == 0, "[10][13]" assert env.rail.get_full_transitions(10, 14) == 0, "[10][14]" assert env.rail.get_full_transitions(10, 15) == 0, "[10][15]" assert env.rail.get_full_transitions(10, 16) == 0, "[10][16]" assert env.rail.get_full_transitions(10, 17) == 0, "[10][17]" assert env.rail.get_full_transitions(10, 18) == 0, "[10][18]" assert env.rail.get_full_transitions(10, 19) == 32800, "[10][19]" assert env.rail.get_full_transitions(10, 20) == 0, "[10][20]" assert env.rail.get_full_transitions(10, 21) == 0, "[10][21]" assert env.rail.get_full_transitions(10, 22) == 0, "[10][22]" assert env.rail.get_full_transitions(10, 23) == 0, "[10][23]" assert env.rail.get_full_transitions(10, 24) == 0, "[10][24]" assert env.rail.get_full_transitions(11, 0) == 32800, "[11][0]" assert env.rail.get_full_transitions(11, 1) == 32800, "[11][1]" assert env.rail.get_full_transitions(11, 2) == 0, "[11][2]" assert env.rail.get_full_transitions(11, 3) == 0, "[11][3]" assert env.rail.get_full_transitions(11, 4) == 0, "[11][4]" assert env.rail.get_full_transitions(11, 5) == 0, "[11][5]" assert env.rail.get_full_transitions(11, 6) == 0, "[11][6]" assert env.rail.get_full_transitions(11, 7) == 0, "[11][7]" assert env.rail.get_full_transitions(11, 8) == 0, "[11][8]" assert env.rail.get_full_transitions(11, 9) == 0, "[11][9]" assert env.rail.get_full_transitions(11, 10) == 0, "[11][10]" assert env.rail.get_full_transitions(11, 11) == 0, "[11][11]" assert env.rail.get_full_transitions(11, 12) == 0, "[11][12]" assert env.rail.get_full_transitions(11, 13) == 0, "[11][13]" assert env.rail.get_full_transitions(11, 14) == 0, "[11][14]" assert env.rail.get_full_transitions(11, 15) == 0, "[11][15]" assert env.rail.get_full_transitions(11, 16) == 0, "[11][16]" assert env.rail.get_full_transitions(11, 17) == 0, "[11][17]" assert env.rail.get_full_transitions(11, 18) == 0, "[11][18]" assert env.rail.get_full_transitions(11, 19) == 32872, "[11][19]" assert env.rail.get_full_transitions(11, 20) == 5633, "[11][20]" assert env.rail.get_full_transitions(11, 21) == 4608, "[11][21]" assert env.rail.get_full_transitions(11, 22) == 0, "[11][22]" assert env.rail.get_full_transitions(11, 23) == 0, "[11][23]" assert env.rail.get_full_transitions(11, 24) == 0, "[11][24]" assert env.rail.get_full_transitions(12, 0) == 32800, "[12][0]" assert env.rail.get_full_transitions(12, 1) == 32800, "[12][1]" assert env.rail.get_full_transitions(12, 2) == 0, "[12][2]" assert env.rail.get_full_transitions(12, 3) == 0, "[12][3]" assert env.rail.get_full_transitions(12, 4) == 0, "[12][4]" assert env.rail.get_full_transitions(12, 5) == 0, "[12][5]" assert env.rail.get_full_transitions(12, 6) == 0, "[12][6]" assert env.rail.get_full_transitions(12, 7) == 0, "[12][7]" assert env.rail.get_full_transitions(12, 8) == 0, "[12][8]" assert env.rail.get_full_transitions(12, 9) == 0, "[12][9]" assert env.rail.get_full_transitions(12, 10) == 0, "[12][10]" assert env.rail.get_full_transitions(12, 11) == 0, "[12][11]" assert env.rail.get_full_transitions(12, 12) == 0, "[12][12]" assert env.rail.get_full_transitions(12, 13) == 0, "[12][13]" assert env.rail.get_full_transitions(12, 14) == 0, "[12][14]" assert env.rail.get_full_transitions(12, 15) == 0, "[12][15]" assert env.rail.get_full_transitions(12, 16) == 0, "[12][16]" assert env.rail.get_full_transitions(12, 17) == 0, "[12][17]" assert env.rail.get_full_transitions(12, 18) == 0, "[12][18]" assert env.rail.get_full_transitions(12, 19) == 32800, "[12][19]" assert env.rail.get_full_transitions(12, 20) == 32800, "[12][20]" assert env.rail.get_full_transitions(12, 21) == 32800, "[12][21]" assert env.rail.get_full_transitions(12, 22) == 0, "[12][22]" assert env.rail.get_full_transitions(12, 23) == 0, "[12][23]" assert env.rail.get_full_transitions(12, 24) == 0, "[12][24]" assert env.rail.get_full_transitions(13, 0) == 32800, "[13][0]" assert env.rail.get_full_transitions(13, 1) == 32800, "[13][1]" assert env.rail.get_full_transitions(13, 2) == 0, "[13][2]" assert env.rail.get_full_transitions(13, 3) == 0, "[13][3]" assert env.rail.get_full_transitions(13, 4) == 0, "[13][4]" assert env.rail.get_full_transitions(13, 5) == 0, "[13][5]" assert env.rail.get_full_transitions(13, 6) == 0, "[13][6]" assert env.rail.get_full_transitions(13, 7) == 0, "[13][7]" assert env.rail.get_full_transitions(13, 8) == 0, "[13][8]" assert env.rail.get_full_transitions(13, 9) == 0, "[13][9]" assert env.rail.get_full_transitions(13, 10) == 0, "[13][10]" assert env.rail.get_full_transitions(13, 11) == 0, "[13][11]" assert env.rail.get_full_transitions(13, 12) == 0, "[13][12]" assert env.rail.get_full_transitions(13, 13) == 0, "[13][13]" assert env.rail.get_full_transitions(13, 14) == 0, "[13][14]" assert env.rail.get_full_transitions(13, 15) == 0, "[13][15]" assert env.rail.get_full_transitions(13, 16) == 0, "[13][16]" assert env.rail.get_full_transitions(13, 17) == 0, "[13][17]" assert env.rail.get_full_transitions(13, 18) == 0, "[13][18]" assert env.rail.get_full_transitions(13, 19) == 32800, "[13][19]" assert env.rail.get_full_transitions(13, 20) == 32800, "[13][20]" assert env.rail.get_full_transitions(13, 21) == 32800, "[13][21]" assert env.rail.get_full_transitions(13, 22) == 0, "[13][22]" assert env.rail.get_full_transitions(13, 23) == 0, "[13][23]" assert env.rail.get_full_transitions(13, 24) == 0, "[13][24]" assert env.rail.get_full_transitions(14, 0) == 32800, "[14][0]" assert env.rail.get_full_transitions(14, 1) == 32800, "[14][1]" assert env.rail.get_full_transitions(14, 2) == 0, "[14][2]" assert env.rail.get_full_transitions(14, 3) == 0, "[14][3]" assert env.rail.get_full_transitions(14, 4) == 0, "[14][4]" assert env.rail.get_full_transitions(14, 5) == 0, "[14][5]" assert env.rail.get_full_transitions(14, 6) == 0, "[14][6]" assert env.rail.get_full_transitions(14, 7) == 0, "[14][7]" assert env.rail.get_full_transitions(14, 8) == 0, "[14][8]" assert env.rail.get_full_transitions(14, 9) == 0, "[14][9]" assert env.rail.get_full_transitions(14, 10) == 0, "[14][10]" assert env.rail.get_full_transitions(14, 11) == 0, "[14][11]" assert env.rail.get_full_transitions(14, 12) == 0, "[14][12]" assert env.rail.get_full_transitions(14, 13) == 0, "[14][13]" assert env.rail.get_full_transitions(14, 14) == 0, "[14][14]" assert env.rail.get_full_transitions(14, 15) == 0, "[14][15]" assert env.rail.get_full_transitions(14, 16) == 0, "[14][16]" assert env.rail.get_full_transitions(14, 17) == 0, "[14][17]" assert env.rail.get_full_transitions(14, 18) == 0, "[14][18]" assert env.rail.get_full_transitions(14, 19) == 32800, "[14][19]" assert env.rail.get_full_transitions(14, 20) == 32800, "[14][20]" assert env.rail.get_full_transitions(14, 21) == 32800, "[14][21]" assert env.rail.get_full_transitions(14, 22) == 0, "[14][22]" assert env.rail.get_full_transitions(14, 23) == 0, "[14][23]" assert env.rail.get_full_transitions(14, 24) == 0, "[14][24]" assert env.rail.get_full_transitions(15, 0) == 32800, "[15][0]" assert env.rail.get_full_transitions(15, 1) == 32800, "[15][1]" assert env.rail.get_full_transitions(15, 2) == 0, "[15][2]" assert env.rail.get_full_transitions(15, 3) == 0, "[15][3]" assert env.rail.get_full_transitions(15, 4) == 0, "[15][4]" assert env.rail.get_full_transitions(15, 5) == 0, "[15][5]" assert env.rail.get_full_transitions(15, 6) == 0, "[15][6]" assert env.rail.get_full_transitions(15, 7) == 0, "[15][7]" assert env.rail.get_full_transitions(15, 8) == 0, "[15][8]" assert env.rail.get_full_transitions(15, 9) == 0, "[15][9]" assert env.rail.get_full_transitions(15, 10) == 0, "[15][10]" assert env.rail.get_full_transitions(15, 11) == 0, "[15][11]" assert env.rail.get_full_transitions(15, 12) == 0, "[15][12]" assert env.rail.get_full_transitions(15, 13) == 0, "[15][13]" assert env.rail.get_full_transitions(15, 14) == 0, "[15][14]" assert env.rail.get_full_transitions(15, 15) == 0, "[15][15]" assert env.rail.get_full_transitions(15, 16) == 0, "[15][16]" assert env.rail.get_full_transitions(15, 17) == 0, "[15][17]" assert env.rail.get_full_transitions(15, 18) == 0, "[15][18]" assert env.rail.get_full_transitions(15, 19) == 32800, "[15][19]" assert env.rail.get_full_transitions(15, 20) == 32800, "[15][20]" assert env.rail.get_full_transitions(15, 21) == 32800, "[15][21]" assert env.rail.get_full_transitions(15, 22) == 0, "[15][22]" assert env.rail.get_full_transitions(15, 23) == 0, "[15][23]" assert env.rail.get_full_transitions(15, 24) == 0, "[15][24]" assert env.rail.get_full_transitions(16, 0) == 32800, "[16][0]" assert env.rail.get_full_transitions(16, 1) == 32800, "[16][1]" assert env.rail.get_full_transitions(16, 2) == 0, "[16][2]" assert env.rail.get_full_transitions(16, 3) == 0, "[16][3]" assert env.rail.get_full_transitions(16, 4) == 0, "[16][4]" assert env.rail.get_full_transitions(16, 5) == 0, "[16][5]" assert env.rail.get_full_transitions(16, 6) == 0, "[16][6]" assert env.rail.get_full_transitions(16, 7) == 0, "[16][7]" assert env.rail.get_full_transitions(16, 8) == 0, "[16][8]" assert env.rail.get_full_transitions(16, 9) == 0, "[16][9]" assert env.rail.get_full_transitions(16, 10) == 0, "[16][10]" assert env.rail.get_full_transitions(16, 11) == 0, "[16][11]" assert env.rail.get_full_transitions(16, 12) == 0, "[16][12]" assert env.rail.get_full_transitions(16, 13) == 0, "[16][13]" assert env.rail.get_full_transitions(16, 14) == 0, "[16][14]" assert env.rail.get_full_transitions(16, 15) == 0, "[16][15]" assert env.rail.get_full_transitions(16, 16) == 0, "[16][16]" assert env.rail.get_full_transitions(16, 17) == 0, "[16][17]" assert env.rail.get_full_transitions(16, 18) == 0, "[16][18]" assert env.rail.get_full_transitions(16, 19) == 32800, "[16][19]" assert env.rail.get_full_transitions(16, 20) == 32800, "[16][20]" assert env.rail.get_full_transitions(16, 21) == 32800, "[16][21]" assert env.rail.get_full_transitions(16, 22) == 0, "[16][22]" assert env.rail.get_full_transitions(16, 23) == 0, "[16][23]" assert env.rail.get_full_transitions(16, 24) == 0, "[16][24]" assert env.rail.get_full_transitions(17, 0) == 32800, "[17][0]" assert env.rail.get_full_transitions(17, 1) == 32800, "[17][1]" assert env.rail.get_full_transitions(17, 2) == 0, "[17][2]" assert env.rail.get_full_transitions(17, 3) == 0, "[17][3]" assert env.rail.get_full_transitions(17, 4) == 0, "[17][4]" assert env.rail.get_full_transitions(17, 5) == 0, "[17][5]" assert env.rail.get_full_transitions(17, 6) == 0, "[17][6]" assert env.rail.get_full_transitions(17, 7) == 0, "[17][7]" assert env.rail.get_full_transitions(17, 8) == 0, "[17][8]" assert env.rail.get_full_transitions(17, 9) == 0, "[17][9]" assert env.rail.get_full_transitions(17, 10) == 0, "[17][10]" assert env.rail.get_full_transitions(17, 11) == 0, "[17][11]" assert env.rail.get_full_transitions(17, 12) == 0, "[17][12]" assert env.rail.get_full_transitions(17, 13) == 0, "[17][13]" assert env.rail.get_full_transitions(17, 14) == 0, "[17][14]" assert env.rail.get_full_transitions(17, 15) == 0, "[17][15]" assert env.rail.get_full_transitions(17, 16) == 0, "[17][16]" assert env.rail.get_full_transitions(17, 17) == 0, "[17][17]" assert env.rail.get_full_transitions(17, 18) == 0, "[17][18]" assert env.rail.get_full_transitions(17, 19) == 32800, "[17][19]" assert env.rail.get_full_transitions(17, 20) == 32800, "[17][20]" assert env.rail.get_full_transitions(17, 21) == 32800, "[17][21]" assert env.rail.get_full_transitions(17, 22) == 0, "[17][22]" assert env.rail.get_full_transitions(17, 23) == 0, "[17][23]" assert env.rail.get_full_transitions(17, 24) == 0, "[17][24]" assert env.rail.get_full_transitions(18, 0) == 72, "[18][0]" assert env.rail.get_full_transitions(18, 1) == 37408, "[18][1]" assert env.rail.get_full_transitions(18, 2) == 0, "[18][2]" assert env.rail.get_full_transitions(18, 3) == 0, "[18][3]" assert env.rail.get_full_transitions(18, 4) == 0, "[18][4]" assert env.rail.get_full_transitions(18, 5) == 0, "[18][5]" assert env.rail.get_full_transitions(18, 6) == 0, "[18][6]" assert env.rail.get_full_transitions(18, 7) == 0, "[18][7]" assert env.rail.get_full_transitions(18, 8) == 0, "[18][8]" assert env.rail.get_full_transitions(18, 9) == 0, "[18][9]" assert env.rail.get_full_transitions(18, 10) == 0, "[18][10]" assert env.rail.get_full_transitions(18, 11) == 0, "[18][11]" assert env.rail.get_full_transitions(18, 12) == 0, "[18][12]" assert env.rail.get_full_transitions(18, 13) == 0, "[18][13]" assert env.rail.get_full_transitions(18, 14) == 0, "[18][14]" assert env.rail.get_full_transitions(18, 15) == 0, "[18][15]" assert env.rail.get_full_transitions(18, 16) == 0, "[18][16]" assert env.rail.get_full_transitions(18, 17) == 0, "[18][17]" assert env.rail.get_full_transitions(18, 18) == 0, "[18][18]" assert env.rail.get_full_transitions(18, 19) == 32800, "[18][19]" assert env.rail.get_full_transitions(18, 20) == 32800, "[18][20]" assert env.rail.get_full_transitions(18, 21) == 32800, "[18][21]" assert env.rail.get_full_transitions(18, 22) == 0, "[18][22]" assert env.rail.get_full_transitions(18, 23) == 0, "[18][23]" assert env.rail.get_full_transitions(18, 24) == 0, "[18][24]" assert env.rail.get_full_transitions(19, 0) == 0, "[19][0]" assert env.rail.get_full_transitions(19, 1) == 32800, "[19][1]" assert env.rail.get_full_transitions(19, 2) == 0, "[19][2]" assert env.rail.get_full_transitions(19, 3) == 0, "[19][3]" assert env.rail.get_full_transitions(19, 4) == 0, "[19][4]" assert env.rail.get_full_transitions(19, 5) == 0, "[19][5]" assert env.rail.get_full_transitions(19, 6) == 0, "[19][6]" assert env.rail.get_full_transitions(19, 7) == 0, "[19][7]" assert env.rail.get_full_transitions(19, 8) == 0, "[19][8]" assert env.rail.get_full_transitions(19, 9) == 0, "[19][9]" assert env.rail.get_full_transitions(19, 10) == 0, "[19][10]" assert env.rail.get_full_transitions(19, 11) == 0, "[19][11]" assert env.rail.get_full_transitions(19, 12) == 0, "[19][12]" assert env.rail.get_full_transitions(19, 13) == 0, "[19][13]" assert env.rail.get_full_transitions(19, 14) == 16386, "[19][14]" assert env.rail.get_full_transitions(19, 15) == 1025, "[19][15]" assert env.rail.get_full_transitions(19, 16) == 1025, "[19][16]" assert env.rail.get_full_transitions(19, 17) == 1025, "[19][17]" assert env.rail.get_full_transitions(19, 18) == 1025, "[19][18]" assert env.rail.get_full_transitions(19, 19) == 37408, "[19][19]" assert env.rail.get_full_transitions(19, 20) == 32800, "[19][20]" assert env.rail.get_full_transitions(19, 21) == 32800, "[19][21]" assert env.rail.get_full_transitions(19, 22) == 0, "[19][22]" assert env.rail.get_full_transitions(19, 23) == 0, "[19][23]" assert env.rail.get_full_transitions(19, 24) == 0, "[19][24]" assert env.rail.get_full_transitions(20, 0) == 0, "[20][0]" assert env.rail.get_full_transitions(20, 1) == 32800, "[20][1]" assert env.rail.get_full_transitions(20, 2) == 0, "[20][2]" assert env.rail.get_full_transitions(20, 3) == 0, "[20][3]" assert env.rail.get_full_transitions(20, 4) == 0, "[20][4]" assert env.rail.get_full_transitions(20, 5) == 0, "[20][5]" assert env.rail.get_full_transitions(20, 6) == 0, "[20][6]" assert env.rail.get_full_transitions(20, 7) == 0, "[20][7]" assert env.rail.get_full_transitions(20, 8) == 0, "[20][8]" assert env.rail.get_full_transitions(20, 9) == 0, "[20][9]" assert env.rail.get_full_transitions(20, 10) == 0, "[20][10]" assert env.rail.get_full_transitions(20, 11) == 0, "[20][11]" assert env.rail.get_full_transitions(20, 12) == 0, "[20][12]" assert env.rail.get_full_transitions(20, 13) == 0, "[20][13]" assert env.rail.get_full_transitions(20, 14) == 32800, "[20][14]" assert env.rail.get_full_transitions(20, 15) == 0, "[20][15]" assert env.rail.get_full_transitions(20, 16) == 0, "[20][16]" assert env.rail.get_full_transitions(20, 17) == 0, "[20][17]" assert env.rail.get_full_transitions(20, 18) == 0, "[20][18]" assert env.rail.get_full_transitions(20, 19) == 32800, "[20][19]" assert env.rail.get_full_transitions(20, 20) == 32800, "[20][20]" assert env.rail.get_full_transitions(20, 21) == 32800, "[20][21]" assert env.rail.get_full_transitions(20, 22) == 0, "[20][22]" assert env.rail.get_full_transitions(20, 23) == 0, "[20][23]" assert env.rail.get_full_transitions(20, 24) == 0, "[20][24]" assert env.rail.get_full_transitions(21, 0) == 0, "[21][0]" assert env.rail.get_full_transitions(21, 1) == 32800, "[21][1]" assert env.rail.get_full_transitions(21, 2) == 0, "[21][2]" assert env.rail.get_full_transitions(21, 3) == 0, "[21][3]" assert env.rail.get_full_transitions(21, 4) == 0, "[21][4]" assert env.rail.get_full_transitions(21, 5) == 0, "[21][5]" assert env.rail.get_full_transitions(21, 6) == 0, "[21][6]" assert env.rail.get_full_transitions(21, 7) == 0, "[21][7]" assert env.rail.get_full_transitions(21, 8) == 0, "[21][8]" assert env.rail.get_full_transitions(21, 9) == 0, "[21][9]" assert env.rail.get_full_transitions(21, 10) == 0, "[21][10]" assert env.rail.get_full_transitions(21, 11) == 0, "[21][11]" assert env.rail.get_full_transitions(21, 12) == 0, "[21][12]" assert env.rail.get_full_transitions(21, 13) == 0, "[21][13]" assert env.rail.get_full_transitions(21, 14) == 32800, "[21][14]" assert env.rail.get_full_transitions(21, 15) == 0, "[21][15]" assert env.rail.get_full_transitions(21, 16) == 0, "[21][16]" assert env.rail.get_full_transitions(21, 17) == 0, "[21][17]" assert env.rail.get_full_transitions(21, 18) == 0, "[21][18]" assert env.rail.get_full_transitions(21, 19) == 32872, "[21][19]" assert env.rail.get_full_transitions(21, 20) == 37408, "[21][20]" assert env.rail.get_full_transitions(21, 21) == 32800, "[21][21]" assert env.rail.get_full_transitions(21, 22) == 0, "[21][22]" assert env.rail.get_full_transitions(21, 23) == 0, "[21][23]" assert env.rail.get_full_transitions(21, 24) == 0, "[21][24]" assert env.rail.get_full_transitions(22, 0) == 0, "[22][0]" assert env.rail.get_full_transitions(22, 1) == 32800, "[22][1]" assert env.rail.get_full_transitions(22, 2) == 0, "[22][2]" assert env.rail.get_full_transitions(22, 3) == 0, "[22][3]" assert env.rail.get_full_transitions(22, 4) == 0, "[22][4]" assert env.rail.get_full_transitions(22, 5) == 0, "[22][5]" assert env.rail.get_full_transitions(22, 6) == 0, "[22][6]" assert env.rail.get_full_transitions(22, 7) == 0, "[22][7]" assert env.rail.get_full_transitions(22, 8) == 0, "[22][8]" assert env.rail.get_full_transitions(22, 9) == 0, "[22][9]" assert env.rail.get_full_transitions(22, 10) == 0, "[22][10]" assert env.rail.get_full_transitions(22, 11) == 0, "[22][11]" assert env.rail.get_full_transitions(22, 12) == 0, "[22][12]" assert env.rail.get_full_transitions(22, 13) == 0, "[22][13]" assert env.rail.get_full_transitions(22, 14) == 32800, "[22][14]" assert env.rail.get_full_transitions(22, 15) == 0, "[22][15]" assert env.rail.get_full_transitions(22, 16) == 0, "[22][16]" assert env.rail.get_full_transitions(22, 17) == 0, "[22][17]" assert env.rail.get_full_transitions(22, 18) == 0, "[22][18]" assert env.rail.get_full_transitions(22, 19) == 49186, "[22][19]" assert env.rail.get_full_transitions(22, 20) == 34864, "[22][20]" assert env.rail.get_full_transitions(22, 21) == 32800, "[22][21]" assert env.rail.get_full_transitions(22, 22) == 0, "[22][22]" assert env.rail.get_full_transitions(22, 23) == 0, "[22][23]" assert env.rail.get_full_transitions(22, 24) == 0, "[22][24]" assert env.rail.get_full_transitions(23, 0) == 0, "[23][0]" assert env.rail.get_full_transitions(23, 1) == 32800, "[23][1]" assert env.rail.get_full_transitions(23, 2) == 0, "[23][2]" assert env.rail.get_full_transitions(23, 3) == 0, "[23][3]" assert env.rail.get_full_transitions(23, 4) == 0, "[23][4]" assert env.rail.get_full_transitions(23, 5) == 16386, "[23][5]" assert env.rail.get_full_transitions(23, 6) == 1025, "[23][6]" assert env.rail.get_full_transitions(23, 7) == 4608, "[23][7]" assert env.rail.get_full_transitions(23, 8) == 0, "[23][8]" assert env.rail.get_full_transitions(23, 9) == 0, "[23][9]" assert env.rail.get_full_transitions(23, 10) == 0, "[23][10]" assert env.rail.get_full_transitions(23, 11) == 0, "[23][11]" assert env.rail.get_full_transitions(23, 12) == 0, "[23][12]" assert env.rail.get_full_transitions(23, 13) == 0, "[23][13]" assert env.rail.get_full_transitions(23, 14) == 32800, "[23][14]" assert env.rail.get_full_transitions(23, 15) == 0, "[23][15]" assert env.rail.get_full_transitions(23, 16) == 0, "[23][16]" assert env.rail.get_full_transitions(23, 17) == 0, "[23][17]" assert env.rail.get_full_transitions(23, 18) == 0, "[23][18]" assert env.rail.get_full_transitions(23, 19) == 32800, "[23][19]" assert env.rail.get_full_transitions(23, 20) == 32872, "[23][20]" assert env.rail.get_full_transitions(23, 21) == 37408, "[23][21]" assert env.rail.get_full_transitions(23, 22) == 0, "[23][22]" assert env.rail.get_full_transitions(23, 23) == 0, "[23][23]" assert env.rail.get_full_transitions(23, 24) == 0, "[23][24]" assert env.rail.get_full_transitions(24, 0) == 0, "[24][0]" assert env.rail.get_full_transitions(24, 1) == 72, "[24][1]" assert env.rail.get_full_transitions(24, 2) == 1025, "[24][2]" assert env.rail.get_full_transitions(24, 3) == 5633, "[24][3]" assert env.rail.get_full_transitions(24, 4) == 17411, "[24][4]" assert env.rail.get_full_transitions(24, 5) == 3089, "[24][5]" assert env.rail.get_full_transitions(24, 6) == 1025, "[24][6]" assert env.rail.get_full_transitions(24, 7) == 1097, "[24][7]" assert env.rail.get_full_transitions(24, 8) == 5633, "[24][8]" assert env.rail.get_full_transitions(24, 9) == 17411, "[24][9]" assert env.rail.get_full_transitions(24, 10) == 1025, "[24][10]" assert env.rail.get_full_transitions(24, 11) == 5633, "[24][11]" assert env.rail.get_full_transitions(24, 12) == 1025, "[24][12]" assert env.rail.get_full_transitions(24, 13) == 1025, "[24][13]" assert env.rail.get_full_transitions(24, 14) == 2064, "[24][14]" assert env.rail.get_full_transitions(24, 15) == 0, "[24][15]" assert env.rail.get_full_transitions(24, 16) == 0, "[24][16]" assert env.rail.get_full_transitions(24, 17) == 0, "[24][17]" assert env.rail.get_full_transitions(24, 18) == 0, "[24][18]" assert env.rail.get_full_transitions(24, 19) == 32800, "[24][19]" assert env.rail.get_full_transitions(24, 20) == 32800, "[24][20]" assert env.rail.get_full_transitions(24, 21) == 32800, "[24][21]" assert env.rail.get_full_transitions(24, 22) == 0, "[24][22]" assert env.rail.get_full_transitions(24, 23) == 0, "[24][23]" assert env.rail.get_full_transitions(24, 24) == 0, "[24][24]" assert env.rail.get_full_transitions(25, 0) == 0, "[25][0]" assert env.rail.get_full_transitions(25, 1) == 0, "[25][1]" assert env.rail.get_full_transitions(25, 2) == 0, "[25][2]" assert env.rail.get_full_transitions(25, 3) == 72, "[25][3]" assert env.rail.get_full_transitions(25, 4) == 3089, "[25][4]" assert env.rail.get_full_transitions(25, 5) == 5633, "[25][5]" assert env.rail.get_full_transitions(25, 6) == 1025, "[25][6]" assert env.rail.get_full_transitions(25, 7) == 17411, "[25][7]" assert env.rail.get_full_transitions(25, 8) == 1097, "[25][8]" assert env.rail.get_full_transitions(25, 9) == 2064, "[25][9]" assert env.rail.get_full_transitions(25, 10) == 0, "[25][10]" assert env.rail.get_full_transitions(25, 11) == 32872, "[25][11]" assert env.rail.get_full_transitions(25, 12) == 5633, "[25][12]" assert env.rail.get_full_transitions(25, 13) == 4608, "[25][13]" assert env.rail.get_full_transitions(25, 14) == 0, "[25][14]" assert env.rail.get_full_transitions(25, 15) == 0, "[25][15]" assert env.rail.get_full_transitions(25, 16) == 0, "[25][16]" assert env.rail.get_full_transitions(25, 17) == 0, "[25][17]" assert env.rail.get_full_transitions(25, 18) == 0, "[25][18]" assert env.rail.get_full_transitions(25, 19) == 32800, "[25][19]" assert env.rail.get_full_transitions(25, 20) == 49186, "[25][20]" assert env.rail.get_full_transitions(25, 21) == 34864, "[25][21]" assert env.rail.get_full_transitions(25, 22) == 0, "[25][22]" assert env.rail.get_full_transitions(25, 23) == 0, "[25][23]" assert env.rail.get_full_transitions(25, 24) == 0, "[25][24]" assert env.rail.get_full_transitions(26, 0) == 0, "[26][0]" assert env.rail.get_full_transitions(26, 1) == 0, "[26][1]" assert env.rail.get_full_transitions(26, 2) == 0, "[26][2]" assert env.rail.get_full_transitions(26, 3) == 0, "[26][3]" assert env.rail.get_full_transitions(26, 4) == 0, "[26][4]" assert env.rail.get_full_transitions(26, 5) == 72, "[26][5]" assert env.rail.get_full_transitions(26, 6) == 1025, "[26][6]" assert env.rail.get_full_transitions(26, 7) == 2064, "[26][7]" assert env.rail.get_full_transitions(26, 8) == 0, "[26][8]" assert env.rail.get_full_transitions(26, 9) == 0, "[26][9]" assert env.rail.get_full_transitions(26, 10) == 0, "[26][10]" assert env.rail.get_full_transitions(26, 11) == 32800, "[26][11]" assert env.rail.get_full_transitions(26, 12) == 32800, "[26][12]" assert env.rail.get_full_transitions(26, 13) == 32800, "[26][13]" assert env.rail.get_full_transitions(26, 14) == 0, "[26][14]" assert env.rail.get_full_transitions(26, 15) == 0, "[26][15]" assert env.rail.get_full_transitions(26, 16) == 0, "[26][16]" assert env.rail.get_full_transitions(26, 17) == 0, "[26][17]" assert env.rail.get_full_transitions(26, 18) == 0, "[26][18]" assert env.rail.get_full_transitions(26, 19) == 32872, "[26][19]" assert env.rail.get_full_transitions(26, 20) == 37408, "[26][20]" assert env.rail.get_full_transitions(26, 21) == 32800, "[26][21]" assert env.rail.get_full_transitions(26, 22) == 0, "[26][22]" assert env.rail.get_full_transitions(26, 23) == 0, "[26][23]" assert env.rail.get_full_transitions(26, 24) == 0, "[26][24]" assert env.rail.get_full_transitions(27, 0) == 0, "[27][0]" assert env.rail.get_full_transitions(27, 1) == 0, "[27][1]" assert env.rail.get_full_transitions(27, 2) == 0, "[27][2]" assert env.rail.get_full_transitions(27, 3) == 0, "[27][3]" assert env.rail.get_full_transitions(27, 4) == 0, "[27][4]" assert env.rail.get_full_transitions(27, 5) == 0, "[27][5]" assert env.rail.get_full_transitions(27, 6) == 0, "[27][6]" assert env.rail.get_full_transitions(27, 7) == 0, "[27][7]" assert env.rail.get_full_transitions(27, 8) == 0, "[27][8]" assert env.rail.get_full_transitions(27, 9) == 0, "[27][9]" assert env.rail.get_full_transitions(27, 10) == 0, "[27][10]" assert env.rail.get_full_transitions(27, 11) == 32800, "[27][11]" assert env.rail.get_full_transitions(27, 12) == 32800, "[27][12]" assert env.rail.get_full_transitions(27, 13) == 72, "[27][13]" assert env.rail.get_full_transitions(27, 14) == 4608, "[27][14]" assert env.rail.get_full_transitions(27, 15) == 0, "[27][15]" assert env.rail.get_full_transitions(27, 16) == 0, "[27][16]" assert env.rail.get_full_transitions(27, 17) == 0, "[27][17]" assert env.rail.get_full_transitions(27, 18) == 0, "[27][18]" assert env.rail.get_full_transitions(27, 19) == 49186, "[27][19]" assert env.rail.get_full_transitions(27, 20) == 34864, "[27][20]" assert env.rail.get_full_transitions(27, 21) == 32800, "[27][21]" assert env.rail.get_full_transitions(27, 22) == 0, "[27][22]" assert env.rail.get_full_transitions(27, 23) == 0, "[27][23]" assert env.rail.get_full_transitions(27, 24) == 0, "[27][24]" assert env.rail.get_full_transitions(28, 0) == 0, "[28][0]" assert env.rail.get_full_transitions(28, 1) == 0, "[28][1]" assert env.rail.get_full_transitions(28, 2) == 0, "[28][2]" assert env.rail.get_full_transitions(28, 3) == 0, "[28][3]" assert env.rail.get_full_transitions(28, 4) == 0, "[28][4]" assert env.rail.get_full_transitions(28, 5) == 0, "[28][5]" assert env.rail.get_full_transitions(28, 6) == 0, "[28][6]" assert env.rail.get_full_transitions(28, 7) == 0, "[28][7]" assert env.rail.get_full_transitions(28, 8) == 0, "[28][8]" assert env.rail.get_full_transitions(28, 9) == 0, "[28][9]" assert env.rail.get_full_transitions(28, 10) == 0, "[28][10]" assert env.rail.get_full_transitions(28, 11) == 32800, "[28][11]" assert env.rail.get_full_transitions(28, 12) == 72, "[28][12]" assert env.rail.get_full_transitions(28, 13) == 1025, "[28][13]" assert env.rail.get_full_transitions(28, 14) == 37408, "[28][14]" assert env.rail.get_full_transitions(28, 15) == 0, "[28][15]" assert env.rail.get_full_transitions(28, 16) == 0, "[28][16]" assert env.rail.get_full_transitions(28, 17) == 0, "[28][17]" assert env.rail.get_full_transitions(28, 18) == 0, "[28][18]" assert env.rail.get_full_transitions(28, 19) == 32800, "[28][19]" assert env.rail.get_full_transitions(28, 20) == 32800, "[28][20]" assert env.rail.get_full_transitions(28, 21) == 32800, "[28][21]" assert env.rail.get_full_transitions(28, 22) == 0, "[28][22]" assert env.rail.get_full_transitions(28, 23) == 0, "[28][23]" assert env.rail.get_full_transitions(28, 24) == 0, "[28][24]" assert env.rail.get_full_transitions(29, 0) == 0, "[29][0]" assert env.rail.get_full_transitions(29, 1) == 0, "[29][1]" assert env.rail.get_full_transitions(29, 2) == 0, "[29][2]" assert env.rail.get_full_transitions(29, 3) == 0, "[29][3]" assert env.rail.get_full_transitions(29, 4) == 0, "[29][4]" assert env.rail.get_full_transitions(29, 5) == 0, "[29][5]" assert env.rail.get_full_transitions(29, 6) == 0, "[29][6]" assert env.rail.get_full_transitions(29, 7) == 0, "[29][7]" assert env.rail.get_full_transitions(29, 8) == 0, "[29][8]" assert env.rail.get_full_transitions(29, 9) == 0, "[29][9]" assert env.rail.get_full_transitions(29, 10) == 0, "[29][10]" assert env.rail.get_full_transitions(29, 11) == 72, "[29][11]" assert env.rail.get_full_transitions(29, 12) == 1025, "[29][12]" assert env.rail.get_full_transitions(29, 13) == 1025, "[29][13]" assert env.rail.get_full_transitions(29, 14) == 1097, "[29][14]" assert env.rail.get_full_transitions(29, 15) == 1025, "[29][15]" assert env.rail.get_full_transitions(29, 16) == 1025, "[29][16]" assert env.rail.get_full_transitions(29, 17) == 1025, "[29][17]" assert env.rail.get_full_transitions(29, 18) == 1025, "[29][18]" assert env.rail.get_full_transitions(29, 19) == 3089, "[29][19]" assert env.rail.get_full_transitions(29, 20) == 3089, "[29][20]" assert env.rail.get_full_transitions(29, 21) == 2064, "[29][21]" assert env.rail.get_full_transitions(29, 22) == 0, "[29][22]" assert env.rail.get_full_transitions(29, 23) == 0, "[29][23]" assert env.rail.get_full_transitions(29, 24) == 0, "[29][24]"
5,339,813
def remove_recalculated_sectors(df, prefix='', suffix=''): """Return df with Total gas (sum of all sectors) removed """ idx = recalculated_row_idx(df, prefix='', suffix='') return df[~idx]
5,339,814
def unparse_headers(hdrs): """Parse a dictionary of headers to a string. Args: hdrs: A dictionary of headers. Returns: The headers as a string that can be used in an NNTP POST. """ return "".join([unparse_header(n, v) for n, v in hdrs.items()]) + "\r\n"
5,339,815
def initialize(file=None, logging_level='INFO'): """Read the configuration file containing the run's parameters. This should be the first call, before using any of the other OGGM modules for most (all?) OGGM simulations. Parameters ---------- file : str path to the configuration file (default: OGGM params.cfg) logging_level : str set a logging level. See :func:`set_logging_config` for options. """ global PARAMS global DATA initialize_minimal(file=file, logging_level=logging_level) # Do not spam PARAMS.do_log = False # Make sure we have a proper cache dir from oggm.utils import download_oggm_files, get_demo_file download_oggm_files() # Read-in the reference t* data for all available models types (oggm, vas) model_prefixes = ['oggm_', 'vas_'] for prefix in model_prefixes: fns = ['ref_tstars_rgi5_cru4', 'ref_tstars_rgi6_cru4', 'ref_tstars_rgi5_histalp', 'ref_tstars_rgi6_histalp'] for fn in fns: fpath = get_demo_file(prefix + fn + '.csv') PARAMS[prefix + fn] = pd.read_csv(fpath) fpath = get_demo_file(prefix + fn + '_calib_params.json') with open(fpath, 'r') as fp: mbpar = json.load(fp) PARAMS[prefix + fn + '_calib_params'] = mbpar # Read in the demo glaciers file = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'data', 'demo_glaciers.csv') DATA['demo_glaciers'] = pd.read_csv(file, index_col=0) # Add other things if 'dem_grids' not in DATA: grids = {} for grid_json in ['gimpdem_90m_v01.1.json', 'arcticdem_mosaic_100m_v3.0.json', 'Alaska_albers_V3.json', 'AntarcticDEM_wgs84.json', 'REMA_100m_dem.json']: if grid_json not in grids: fp = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'data', grid_json) try: grids[grid_json] = salem.Grid.from_json(fp) except NameError: pass DATA['dem_grids'] = grids # Trigger a one time check of the hash file from oggm.utils import get_dl_verify_data get_dl_verify_data('dummy_section') # OK PARAMS.do_log = True
5,339,816
def gl_set_visibility(gl_project_group, gl_visibility_level): """ Sets the visibility for a Gitlab project or group :param gl_project_group: A Project or Group object :param visibility ('public','internal','private') """ gl_project_group.visibility = gl_visibility_level gl_project_group.save()
5,339,817
def modify_natoms(row, BBTs, fg): """This function takes a row of a pandas data frame and calculates the new number of atoms based on the atom difference indicated in itw functional groups BBTs : list of instances of BBT class fg : instance of the Parameters class (fg parameters) returns : n_atoms (int)""" n_atoms = row['N_ATOMS'] for i in BBTs[row['BBT']].BBT: n_atoms += fg.par[i]['atom_dif'] if n_atoms < 1: return np.nan return n_atoms
5,339,818
async def test_sample_url_max_attempts(mocker): """It should observe the max attempts, for sequential failures.""" mock_collect_trace = mocker.patch( 'lab.fetch_websites.collect_trace', autospec=True) mock_collect_trace.side_effect = [ {'protocol': proto, 'status': status} for proto, status in [ ('Q043', 'success'), ('tcp', 'timeout'), ('tcp', 'success'), ('Q046', 'failure'), ('Q046', 'success'), ('tcp', 'timeout'), ('tcp', 'failure'), ] ] results = ProtocolSampler( sniffer=sentinel.sniffer, session_factory=sentinel.factory, max_attempts=2 ).sample_url('https://pie.ch', {'Q043': 1, 'tcp': 5, 'Q046': 1}) _ = [result async for result in results] assert mock_collect_trace.call_args_list == [ mock.call('https://pie.ch', proto, sentinel.sniffer, sentinel.factory) for proto in ['Q043', 'tcp', 'tcp'] + ['Q046']*2 + ['tcp']*2]
5,339,819
def cmd_deposit_references(logger, session, references_json): """ Deposit/update a set of references into database given by DB_URL. Input is a line separated JSON file, with one reference object per line. """ import_references(session, references_json) logger.echo("References imported successfully")
5,339,820
def install(url, force, skip_platform_check=False, skip_migration=False, skip_package_migration=False, skip_setup_swap=False, swap_mem_size=None, total_mem_threshold=None, available_mem_threshold=None): """ Install image from local binary or URL""" bootloader = get_bootloader() if url.startswith('http://') or url.startswith('https://'): echo_and_log('Downloading image...') validate_url_or_abort(url) try: urlretrieve(url, bootloader.DEFAULT_IMAGE_PATH, reporthook) click.echo('') except Exception as e: echo_and_log("Download error", e) raise click.Abort() image_path = bootloader.DEFAULT_IMAGE_PATH else: image_path = os.path.join("./", url) binary_image_version = bootloader.get_binary_image_version(image_path) if not binary_image_version: echo_and_log("Image file does not exist or is not a valid SONiC image file", LOG_ERR) raise click.Abort() # Is this version already installed? if binary_image_version in bootloader.get_installed_images(): echo_and_log("Image {} is already installed. Setting it as default...".format(binary_image_version)) if not bootloader.set_default_image(binary_image_version): echo_and_log('Error: Failed to set image as default', LOG_ERR) raise click.Abort() else: # Verify not installing non-secure image in a secure running image if not bootloader.verify_secureboot_image(image_path) and not force: echo_and_log("Image file '{}' is of a different type than running image.\n".format(url) + "If you are sure you want to install this image, use -f|--force|--skip-secure-check.\n" + "Aborting...", LOG_ERR) raise click.Abort() # Verify that the binary image is of the same platform type as running platform if not bootloader.verify_image_platform(image_path) and not skip_platform_check: echo_and_log("Image file '{}' is of a different platform type than running platform.\n".format(url) + "If you are sure you want to install this image, use --skip-platform-check.\n" + "Aborting...", LOG_ERR) raise click.Abort() echo_and_log("Installing image {} and setting it as default...".format(binary_image_version)) with SWAPAllocator(not skip_setup_swap, swap_mem_size, total_mem_threshold, available_mem_threshold): bootloader.install_image(image_path) # Take a backup of current configuration if skip_migration: echo_and_log("Skipping configuration migration as requested in the command option.") else: run_command('config-setup backup') update_sonic_environment(bootloader, binary_image_version) if not bootloader.supports_package_migration(binary_image_version) and not skip_package_migration: echo_and_log("Warning: SONiC package migration is not supported for this bootloader/image", fg="yellow") skip_package_migration = True if not skip_package_migration: migrate_sonic_packages(bootloader, binary_image_version) # Finally, sync filesystem run_command("sync;sync;sync") run_command("sleep 3") # wait 3 seconds after sync echo_and_log('Done')
5,339,821
def recreate_cursor(collection, cursor_id, retrieved, batch_size): """ Creates and returns a Cursor object based on an existing cursor in the in the server. If cursor_id is invalid, the returned cursor will raise OperationFailure on read. If batch_size is -1, then all remaining documents on the cursor are returned. """ if cursor_id == 0: return None cursor_info = {'id': cursor_id, 'firstBatch': []} _logger.info( "collection: {0} cursor_info: {1} retrieved {2} batch_size {3}" .format(collection, cursor_id, retrieved, batch_size)) cursor = CommandCursor(collection, cursor_info, 0, retrieved=retrieved) cursor.batch_size(batch_size) return cursor
5,339,822
def incons(input_dictionary,m,b,use_boiling=True,use_equation=False): """It returns the coordinates X,Y and Z at a desired depth. Parameters ---------- input_dictionary : dictionary Contains the infomation of the layer under the keyword 'LAYER' and 'z_ref'. Also it contains the keyword 'INCONS_PARAM' with the specified initial conditions, i.e.:'INCONS_PARAM':{'To':30,'GRADTZ':0.08,'DEPTH_TO_SURF':100,'DELTAZ':20} m : float Pressure slope on a TVD vs P plot b : float Pressure intercept on a TVD vs P plot use_equation : bool If true the variables m and b will be use to extrapolate the values to the bottom layer use_boiling : bool If true the boiling conditions will be use for calculating the bottom conditions Returns ------- file INCON : on model/t2/sources Attention --------- It requires an updated ELEME.json Note ---- Boiling conditions are assumed Examples -------- >>> incons(input_dictionary) """ input_file='../mesh/ELEME.json' T,P,depth=initial_conditions(input_dictionary,m,b,use_boiling,use_equation) Tfunc=interpolate.interp1d(depth,T) Pfunc=interpolate.interp1d(depth,P) output_file='../model/t2/sources/INCON' string="" if os.path.isfile(input_file): eleme_df = pd.read_json(input_file).T for index, row in eleme_df.iterrows(): zi=row['Z'] Ti=Tfunc(zi) Pi=Pfunc(zi)*1E5 string+="%5s%35s\n"%(index,' ') string+=" %19.13E %19.13E\n"%(Pi,Ti) file=open(output_file,'w') file.write(string) file.close() else: sys.exit("The file %s or directory do not exist"%input_file)
5,339,823
def nut00b(date1, date2): """ Wrapper for ERFA function ``eraNut00b``. Parameters ---------- date1 : double array date2 : double array Returns ------- dpsi : double array deps : double array Notes ----- The ERFA documentation is below. - - - - - - - - - - e r a N u t 0 0 b - - - - - - - - - - Nutation, IAU 2000B model. Given: date1,date2 double TT as a 2-part Julian Date (Note 1) Returned: dpsi,deps double nutation, luni-solar + planetary (Note 2) Notes: 1) The TT date date1+date2 is a Julian Date, apportioned in any convenient way between the two arguments. For example, JD(TT)=2450123.7 could be expressed in any of these ways, among others: date1 date2 2450123.7 0.0 (JD method) 2451545.0 -1421.3 (J2000 method) 2400000.5 50123.2 (MJD method) 2450123.5 0.2 (date & time method) The JD method is the most natural and convenient to use in cases where the loss of several decimal digits of resolution is acceptable. The J2000 method is best matched to the way the argument is handled internally and will deliver the optimum resolution. The MJD method and the date & time methods are both good compromises between resolution and convenience. 2) The nutation components in longitude and obliquity are in radians and with respect to the equinox and ecliptic of date. The obliquity at J2000.0 is assumed to be the Lieske et al. (1977) value of 84381.448 arcsec. (The errors that result from using this function with the IAU 2006 value of 84381.406 arcsec can be neglected.) The nutation model consists only of luni-solar terms, but includes also a fixed offset which compensates for certain long- period planetary terms (Note 7). 3) This function is an implementation of the IAU 2000B abridged nutation model formally adopted by the IAU General Assembly in 2000. The function computes the MHB_2000_SHORT luni-solar nutation series (Luzum 2001), but without the associated corrections for the precession rate adjustments and the offset between the GCRS and J2000.0 mean poles. 4) The full IAU 2000A (MHB2000) nutation model contains nearly 1400 terms. The IAU 2000B model (McCarthy & Luzum 2003) contains only 77 terms, plus additional simplifications, yet still delivers results of 1 mas accuracy at present epochs. This combination of accuracy and size makes the IAU 2000B abridged nutation model suitable for most practical applications. The function delivers a pole accurate to 1 mas from 1900 to 2100 (usually better than 1 mas, very occasionally just outside 1 mas). The full IAU 2000A model, which is implemented in the function eraNut00a (q.v.), delivers considerably greater accuracy at current dates; however, to realize this improved accuracy, corrections for the essentially unpredictable free-core-nutation (FCN) must also be included. 5) The present function provides classical nutation. The MHB_2000_SHORT algorithm, from which it is adapted, deals also with (i) the offsets between the GCRS and mean poles and (ii) the adjustments in longitude and obliquity due to the changed precession rates. These additional functions, namely frame bias and precession adjustments, are supported by the ERFA functions eraBi00 and eraPr00. 6) The MHB_2000_SHORT algorithm also provides "total" nutations, comprising the arithmetic sum of the frame bias, precession adjustments, and nutation (luni-solar + planetary). These total nutations can be used in combination with an existing IAU 1976 precession implementation, such as eraPmat76, to deliver GCRS- to-true predictions of mas accuracy at current epochs. However, for symmetry with the eraNut00a function (q.v. for the reasons), the ERFA functions do not generate the "total nutations" directly. Should they be required, they could of course easily be generated by calling eraBi00, eraPr00 and the present function and adding the results. 7) The IAU 2000B model includes "planetary bias" terms that are fixed in size but compensate for long-period nutations. The amplitudes quoted in McCarthy & Luzum (2003), namely Dpsi = -1.5835 mas and Depsilon = +1.6339 mas, are optimized for the "total nutations" method described in Note 6. The Luzum (2001) values used in this ERFA implementation, namely -0.135 mas and +0.388 mas, are optimized for the "rigorous" method, where frame bias, precession and nutation are applied separately and in that order. During the interval 1995-2050, the ERFA implementation delivers a maximum error of 1.001 mas (not including FCN). References: Lieske, J.H., Lederle, T., Fricke, W., Morando, B., "Expressions for the precession quantities based upon the IAU /1976/ system of astronomical constants", Astron.Astrophys. 58, 1-2, 1-16. (1977) Luzum, B., private communication, 2001 (Fortran code MHB_2000_SHORT) McCarthy, D.D. & Luzum, B.J., "An abridged model of the precession-nutation of the celestial pole", Cel.Mech.Dyn.Astron. 85, 37-49 (2003) Simon, J.-L., Bretagnon, P., Chapront, J., Chapront-Touze, M., Francou, G., Laskar, J., Astron.Astrophys. 282, 663-683 (1994) Copyright (C) 2013-2017, NumFOCUS Foundation. Derived, with permission, from the SOFA library. See notes at end of file. """ dpsi, deps = ufunc.nut00b(date1, date2) return dpsi, deps
5,339,824
def train_tree_default( training_predictor_table, training_target_table, validation_predictor_table, validation_target_table): """Trains decision tree with default params. :param training_predictor_table: See doc for `utils.read_feature_file`. :param training_target_table: Same. :param validation_predictor_table: Same. :param validation_target_table: Same. """ default_tree_model_object = utils.setup_classification_tree( min_examples_at_split=30, min_examples_at_leaf=30) _ = utils.train_classification_tree( model_object=default_tree_model_object, training_predictor_table=training_predictor_table, training_target_table=training_target_table) training_probabilities = default_tree_model_object.predict_proba( training_predictor_table.as_matrix() )[:, 1] training_event_frequency = numpy.mean( training_target_table[utils.BINARIZED_TARGET_NAME].values ) utils.eval_binary_classifn( observed_labels=training_target_table[ utils.BINARIZED_TARGET_NAME].values, forecast_probabilities=training_probabilities, training_event_frequency=training_event_frequency, dataset_name='training') validation_probabilities = default_tree_model_object.predict_proba( validation_predictor_table.as_matrix() )[:, 1] utils.eval_binary_classifn( observed_labels=validation_target_table[ utils.BINARIZED_TARGET_NAME].values, forecast_probabilities=validation_probabilities, training_event_frequency=training_event_frequency, dataset_name='validation')
5,339,825
def AddResourceArg(parser, verb): """Add a resource argument for an access zone. NOTE: Must be used only if it's the only resource arg in the command. Args: parser: the parser for the command. verb: str, the verb to describe the resource, such as 'to update'. """ concept_parsers.ConceptParser.ForResource( 'zone', _GetResourceSpec(), 'The access zone {}.'.format(verb), required=True).AddToParser(parser)
5,339,826
def add_mms_attachments(db, mms, backup_dir, thread_dir): """Add all attachment objects to MMS message""" qry = db.execute( "SELECT _id, ct, unique_id, voice_note, width, height, quote " "FROM part WHERE mid=?", (mms._id,), ) for _id, ct, unique_id, voice_note, width, height, quote in qry: a = Attachment( contentType=ct, unique_id=unique_id, fileName=get_attachment_filename( _id, unique_id, backup_dir, thread_dir ), voiceNote=voice_note, width=width, height=height, quote=quote, ) mms.attachments.append(a)
5,339,827
def Join_Factors(*factor_data, merge_names=None, new_name=None, weight=None, style='SAST'): """合并因子,按照权重进行加总。只将非缺失的因子的权重重新归一合成。 Parameters: =========== factor_data: dataframe or tuple of dataframes merge_names: list 待合并因子名称,必须是data_frame中列的子集 new_name: str 合成因子名称 weight: list or None 待合并因子的权重 style : str, 'SAST" or 'AST' 字段、品种、时间三个维度在factor_data中的排布类型。SAST(Stack Attribute-Symbol-Time)是最常用的, 索引是Time-Symbol的MultiIndex,列是字段;AST(Attribute-Symbol-Time),Index是时间,Columns是Symbol. """ def nansum(a, w): nanind = np.isfinite(a) if np.sum(nanind) == 0.0: return np.nan return np.sum(a[nanind] * w[nanind]) / np.sum(w[nanind]) if new_name is None: new_name = 'new' if isinstance(merge_names, str): merge_names = [merge_names] if len(factor_data) == 1: if merge_names is None: factor_values = factor_data[0].values else: factor_values = factor_data[0][merge_names].values elif style == 'SAST': factor_data = align_dataframes(*factor_data) factor_values = np.hstack((x.values for x in factor_data)) else: factor_data = align_dataframes(*factor_data, axis='both') factor_values = np.stack((x.values for x in factor_data)) nfactors = factor_values.shape[1] if factor_values.ndim == 2 else factor_values.shape[0] if weight is None: weight = np.asarray([1.0 / nfactors] * nfactors) else: weight = np.asarray(weight) / np.sum(weight) if factor_values.ndim == 2: weight_array = np.tile(weight, (factor_values.shape[0],1)) na_ind = np.isnan(factor_values) weight_array[na_ind] = 0.0 weight_array = weight_array / weight_array.sum(axis=1)[:, np.newaxis] new_values = np.nansum(factor_values * weight_array, axis=1) new_values[np.all(na_ind, axis=1)] = np.nan return pd.DataFrame(new_values, index=factor_data[0].index, columns=[new_name]) else: new_values = np.apply_along_axis(nansum, 0, factor_values, w=weight) return pd.DataFrame(new_values, index=factor_data[0].index, columns=factor_data[0].columns)
5,339,828
def _check_for_file_changes(filepath: Path, config: Config) -> bool: """Returns True if a file was modified in a working dir.""" # Run 'git add' to avoid false negatives, as 'git diff --staged' is used for # detection. This is important when there are external factors that impact the # committing process (like pre-commit). _call_git(config, "add", [filepath.as_posix()]) git_diff_out = _get_git_output(config, "diff", ["--staged", filepath.as_posix()]) # If 'git diff' output is empty, the file wasn't modified. return git_diff_out != b""
5,339,829
def wait_or_cancel(proc, title, message): """ Display status dialog while process is running and allow user to cancel :param proc: subprocess object :param title: title for status dialog :param message: message for status dialog :return: (process exit code, stdout output or None) """ pDialog = xbmcgui.DialogProgress() pDialog.create(title, "") while proc and proc.poll() is None and not pDialog.iscanceled(): pDialog.update(50, message) try: if not pDialog.iscanceled(): msg = proc.communicate()[0] exitcode = proc.returncode if exitcode == 0: stdout = msg pDialog.update(100, "Complete!") time.sleep(3) else: xbmcgui.Dialog().ok( "Error during {desc}".format(desc=title.lower()), msg) stdout = msg else: proc.terminate() stdout = None exitcode = 1 except: pass pDialog.close() return (exitcode, stdout)
5,339,830
def assert_eventually_true(eval, timeout=None, delay=0.01): """ Checks if the passed function evaluates to true within the time limit specified by timeout """ t0 = time.time() while (True): if eval(): break time.sleep(delay) if timeout and time.time() - t0 > timeout: assert False, "%s still evaluated to false after %f seconds" % (repr(eval), timeout)
5,339,831
def getg_PyInteractiveBody_one_in_two_out(): """Return a graph that has a PyInteractiveBody with one input and two outputs. """ @dl.Interactive( [("num", dl.Int(dl.Size(32)))], [('num_out', dl.Int(dl.Size(32))), ('val_out', dl.Bool())] ) def interactive_func(node: dl.PythonNode): for _ in range(2): num = node.receive("num") print(f"received num: {num}") node.send(num_out=None, val_out=False) node.send(num_out=14, val_out=False) s0 = dl.lib.StateSaver(bool, condition=lambda x: x) s1 = dl.lib.StateSaver(int, verbose=True) with dl.DeltaGraph() as graph: int_func = interactive_func.call(4) s0.save_and_exit_if(int_func.val_out) s1.save_and_exit(int_func.num_out) return graph
5,339,832
def batch_norm_relu(inputs, is_training): """Performs a batch normalization followed by a ReLU.""" # We set fused=True for a performance boost. inputs = tf.layers.batch_normalization( inputs=inputs, axis=FLAGS.input_layout.find('C'), momentum=FLAGS.batch_norm_decay, epsilon=FLAGS.batch_norm_epsilon, center=True, scale=True, training=is_training, fused=FLAGS.use_fused_batchnorm) return tf.nn.relu(inputs)
5,339,833
def knn(points, p, k): """ Calculates the k nearest neighbours of a point. :param points: list of points :param p: reference point :param k: amount of neighbours :return: list of k neighbours """ return sorted(points, key=lambda x: distance(p, x))[:k]
5,339,834
def ruleset_detail(request, slug): """ View for return the specific ruleset that user pass by using its slug in JSON format. :param request: WSGI request from user :return: Specific ruleset metadata in JSON format. """ # try to fetch ruleset from database try: ruleset = Ruleset.objects.get(slug=slug) except Ruleset.DoesNotExist: return HttpResponse(status=404) if request.method == 'GET': serializer = RulesetSerializer(ruleset) return JsonResponse(serializer.data)
5,339,835
def send_slack_notification(message): """ Send slack notification Arguments: message {string} -- Slack notification message Returns: response {Response} -- Http response object """ response = requests.post( SLACK_WEBHOOK, data=json.dumps( { "text": message, "username": USERNAME, "channel": CHANNEL, "icon_emoji": ICON_EMOJI, } ), ) return response
5,339,836
def options_handler(): """Validates and parses script arguments. Returns: Namespace: Parsed arguments object. """ parser = argparse.ArgumentParser(description="Downloads XSOAR packs as zip and their latest docker images as tar.") parser.add_argument('-p', '--packs', help="A list of pack names as they appear in https://xsoar.pan.dev/marketplaceEither provided " "via a path to a file that contains the packs list (separated by new lines) or " "a string of comma separated packs (e.g. Base,AutoFocus)", required=False) parser.add_argument('-o', '--output_path', help="The path where the files will be saved to.", required=False, default=".") parser.add_argument('-sp', '--skip_packs', help="Don't download packs.", required=False, action='store_true') parser.add_argument('-sd', '--skip_docker', help="Don't download docker images.", required=False, action='store_true') parser.add_argument('--insecure', help="Skip certificate validation.", dest='feature', action='store_true') parser.set_defaults(skip_packs=False, skip_docker=False, insecure=False) return parser.parse_args()
5,339,837
def method_detect(method: str): """Detects which method to use and returns its object""" if method in POSTPROCESS_METHODS: if method == "rtb-bnb": return RemovingTooTransparentBordersHardAndBlurringHardBorders() elif method == "rtb-bnb2": return RemovingTooTransparentBordersHardAndBlurringHardBordersTwo() else: return None else: return False
5,339,838
def approve_pipelines_for_publishing(pipeline_ids): # noqa: E501 """approve_pipelines_for_publishing # noqa: E501 :param pipeline_ids: Array of pipeline IDs to be approved for publishing. :type pipeline_ids: List[] :rtype: None """ return util.invoke_controller_impl()
5,339,839
def get_x(document_id, word2wid, corpus_termfrequency_vector): """ Get the feature vector of a document. Parameters ---------- document_id : int word2wid : dict corpus_termfrequency_vector : list of int Returns ------- list of int """ word_list = list(reuters.words(document_id)) word_count = float(len(word_list)) assert word_count > 0 document_tf_vec = get_termfrequency_vector(word2wid, word_list) x = [] for i, wd_count in enumerate(document_tf_vec): x.append(wd_count / (word_count * corpus_termfrequency_vector[i])) return x
5,339,840
def csrf_protect(remainder, params): """ Perform CSRF protection checks. Performs checks to determine if submitted form data matches the token in the cookie. It is assumed that the GET request handler successfully set the token for the request and that the form was instrumented with a CSRF token field. Use the :py:func:`~csrf_token` decorator to do this. Generally, the handler does not need to do anything CSRF-protection-specific. All it needs is the decorator:: @csrf_protect @expose('myapp.templates.protected_post_handler') def protected_post_handler(): if successful: tg.redirect('/someplace') return dict(errors="There were some errors") """ req = tg.request._current_obj() secret, token_name, path, expires = _get_conf() token = req.signed_cookie(token_name, secret=secret) if not token: tg.abort(403, 'The form you submitted is invalid or has expired') form_token = req.args_params.get(token_name) if form_token != token.decode(ENCODING): tg.response.delete_cookie(token_name, path=path) tg.abort(403, 'The form you submitted is invalid or has expired')
5,339,841
def process_chain_of_trust(host: str, image: Image, req_delegations: list): """ Processes the whole chain of trust, provided by the notary server (`host`) for any given `image`. The 'root', 'snapshot', 'timestamp', 'targets' and potentially 'targets/releases' are requested in this order and afterwards validated, also according to the `policy_rule`. Returns the the signed image targets, which contain the digests. Raises `NotFoundExceptions` should no required delegetions be present in the trust data, or no image targets be found. """ tuf_roles = ["root", "snapshot", "timestamp", "targets"] trust_data = {} key_store = KeyStore() # get all trust data and collect keys (from root and targets), as well as # hashes (from snapshot and timestamp) for role in tuf_roles: trust_data[role] = get_trust_data(host, image, TUFRole(role)) key_store.update(trust_data[role]) # if the 'targets.json' has delegation roles defined, get their trust data # as well if trust_data["targets"].has_delegations(): for delegation in trust_data["targets"].get_delegations(): trust_data[delegation] = get_trust_data(host, image, TUFRole(delegation)) # validate all trust data's signatures, expiry dates and hashes for role in trust_data: trust_data[role].validate(key_store) # validate needed delegations if req_delegations: if trust_data["targets"].has_delegations(): delegations = trust_data["targets"].get_delegations() req_delegations_set = set(req_delegations) delegations_set = set(delegations) delegations_set.discard("targets/releases") # make an intersection between required delegations and actually # present ones if not req_delegations_set.issubset(delegations_set): missing = list(req_delegations_set - delegations_set) raise NotFoundException( "could not find delegation roles {} in trust data.".format( str(missing) ) ) else: raise NotFoundException("could not find any delegations in trust data.") # if certain delegations are required, then only take the targets fields of the # required delegation JSON's. otherwise take the targets field of the targets JSON, as # long as no delegations are defined in the targets JSON. should there be delegations # defined in the targets JSON the targets field of the releases JSON will be used. if req_delegations: image_targets = [ trust_data[target_role].signed.get("targets", {}) for target_role in req_delegations ] else: targets_key = ( "targets/releases" if trust_data["targets"].has_delegations() else "targets" ) image_targets = [trust_data[targets_key].signed.get("targets", {})] if not any(image_targets): raise NotFoundException("could not find any image digests in trust data.") return image_targets
5,339,842
def collect_js( deps, closure_library_base = None, has_direct_srcs = False, no_closure_library = False, css = None): """Aggregates transitive JavaScript source files from unfurled deps.""" srcs = [] direct_srcs = [] ijs_files = [] infos = [] modules = [] descriptors = [] stylesheets = [] js_module_roots = [] has_closure_library = False for dep in deps: srcs += [getattr(dep.closure_js_library, "srcs", depset())] ijs_files += [getattr(dep.closure_js_library, "ijs_files", depset())] infos += [getattr(dep.closure_js_library, "infos", depset())] modules += [getattr(dep.closure_js_library, "modules", depset())] descriptors += [getattr(dep.closure_js_library, "descriptors", depset())] stylesheets += [getattr(dep.closure_js_library, "stylesheets", depset())] js_module_roots += [getattr(dep.closure_js_library, "js_module_roots", depset())] has_closure_library = ( has_closure_library or getattr(dep.closure_js_library, "has_closure_library", False) ) if no_closure_library: if has_closure_library: fail("no_closure_library can't be used when Closure Library is " + "already part of the transitive closure") elif has_direct_srcs and not has_closure_library: direct_srcs += closure_library_base has_closure_library = True if css: direct_srcs += closure_library_base + [css.closure_css_binary.renaming_map] return struct( srcs = depset(direct_srcs, transitive = srcs), js_module_roots = depset(transitive = js_module_roots), ijs_files = depset(transitive = ijs_files), infos = depset(transitive = infos), modules = depset(transitive = modules), descriptors = depset(transitive = descriptors), stylesheets = depset(transitive = stylesheets), has_closure_library = has_closure_library, )
5,339,843
def test_get_region_order(): """ Test the ordering of models given a chromosome, start and end parameter """ print("test_get_region_order") hdf5_handle = coord('test', '') results = hdf5_handle.get_resolutions() hdf5_handle.set_resolution(int(results[0])) region_ids = get_region_ids(hdf5_handle, True) print('\tRegion Order:', region_ids) results = hdf5_handle.get_region_order(region_ids['chromosome'], region_ids['region_ids'][0]) print('\tRegion Order:', results) assert results is not None
5,339,844
def send_control(uuid, type, data): """ Sends control data to the terminal, as for example resize events """ sp = sessions[uuid] if type == 'resize': import termios import struct import fcntl winsize = struct.pack("HHHH", data['rows'], data['cols'], 0, 0) fcntl.ioctl(sp['ptymaster'].fileno(), termios.TIOCSWINSZ, winsize) return True else: serverboards.warning("Unknown control type: %s" % (type)) return False
5,339,845
def get_absolute_path(path): """ Returns absolute path. """ if path.startswith("/"): return path else: return os.path.join(HOME_DIR, path)
5,339,846
def read_mat1(name: str, group: h5py._hl.dataset.Dataset, geom_model: BDF) -> None: """ Dataset: attrs : <Attributes of HDF5 object at 2553977821512> chunks : (310,) compression : 'gzip' compression_opts : 1 dims : <Dimensions of HDF5 object at 2553977821512> dtype : dtype([('MID', '<i8'), ('E', '<f8'), ('G', '<f8'), ('NU', '<f8'), ('RHO', '<f8'), ('A', '<f8'), ('TREF', '<f8'), ('GE', '<f8'), ('ST', '<f8'), ('SC', '<f8'), ('SS', '<f8'), ('MCSID', '<i8'), ('DOMAIN_ID', '<i8')]) external : None file : <HDF5 file "6+element-nastran-sol103.h5" (mode r)> fillvalue : (0, 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0, 0) fletcher32 : False id : <h5py.h5d.DatasetID object at 0x00000252A4F0D948> is_virtual : False maxshape : (None,) name : '/NASTRAN/INPUT/MATERIAL/MAT1' nbytes : 104 ndim : 1 parent : <HDF5 group "/NASTRAN/INPUT/MATERIAL" (1 members)> ref : <HDF5 object reference> regionref : <h5py._hl.base._RegionProxy object at 0x00000252A60614C8> scaleoffset : None shape : (1,) shuffle : True size : 1 """ #'MID', 'E', 'G', 'NU', 'RHO', 'A', 'TREF', 'GE', 'ST', 'SC', 'SS', 'MCSID', 'DOMAIN_ID', assert len(group.dtype.names) == 13, group.dtype.names MID = group['MID'] E = group['E'] G = group['G'] NU = group['NU'] RHO = group['RHO'] A = group['A'] TREF = group['TREF'] GE = group['GE'] ST = group['ST'] SC = group['SC'] SS = group['SS'] MCSID = group['MCSID'] DOMAIN_ID = group['DOMAIN_ID'] for mid, e, g, nu, rho, a, tref, ge, st, sc, ss, mcsid in zip(MID, E, G, NU, RHO, A, TREF, GE, ST, SC, SS, MCSID): #if mcid == -1: #theta_mcid = theta #else: #asdf #assert tflag == 0, tflag #t1, t2, t3, t4 = [ti if ti != -1.0 else None #for ti in t] assert mcsid == 0, mcsid obj = geom_model.add_mat1(mid, e, g, nu, rho=rho, a=a, tref=tref, ge=ge, St=st, Sc=sc, Ss=ss, mcsid=mcsid, comment='') obj.validate() str(obj)
5,339,847
def polyfit(x, y, deg=1): """Perform linear/polynomial regression. Usage: cat a.csv | ph polyfix x y cat a.csv | ph polyfix x y --deg=1 # default cat a.csv | ph polyfix x y --deg=2 # default Outputs a column polyfit_{deg} containing the evaluated index. """ df = pipein() _assert_cols(df, (x, y), "polyfit") deg = __tryparse(deg) if not isinstance(deg, int) or deg <= 0: sys.exit("deg={} should be a positive int".format(deg)) try: import numpy except ImportError: sys.exit("numpy needed for polyfit. pip install numpy") polynomial = numpy.polyfit(df[x], df[y], deg=deg) f = numpy.poly1d(polynomial) df["polyfit_{}".format(deg)] = df[x].apply(f) pipeout(df)
5,339,848
def get_prefix_by_xml_filename(xml_filename): """ Obtém o prefixo associado a um arquivo xml Parameters ---------- xml_filename : str Nome de arquivo xml Returns ------- str Prefixo associado ao arquivo xml """ file, ext = os.path.splitext(xml_filename) return file
5,339,849
def dgausscdf(x): """ Derivative of the cumulative distribution function for the normal distribution. """ return gausspdf(x)
5,339,850
def LStatFile(path): """ LStat the file. Do not follow the symlink. """ d = None error = None try: d=os.lstat(path) except OSError as error: print("Exception lstating file " + path + " Error Code: " + str(error.errno) + " Error: " +error.strerror, file=sys.stderr) LG().Log('ERROR', "Exception lstating file " + path + " Error Code: " + str(error.errno) + " Error: " + error.strerror) except IOError as error: print("Exception lstating file " + path + " Error Code: " + str(error.errno) + " Error: " +error.strerror, file=sys.stderr) LG().Log('ERROR', "Exception lstating file " + path + " Error Code: " + str(error.errno) + " Error: " + error.strerror) return d
5,339,851
def run(module): """Change OpVariable (of Function storage class) to registers.""" for function in module.functions: process_function(module, function)
5,339,852
def update_pod(pod): """ Called when a Pod update with type MODIFIED is received. Compares if the labels have changed. If they have, updates the Calico endpoint for this pod. """ # Get Kubernetes labels and metadata. workload_id, namespace, name, labels = parse_pod(pod) _log.debug("Updating pod: %s", workload_id) # Check if the labels have changed for this pod. If they haven't, # do nothing. old_labels = label_cache.get(workload_id) _log.debug("Compare labels on %s. cached: %s, new: %s", workload_id, old_labels, labels) if old_labels == labels: _log.debug("Ignoring updated for %s with no label change", workload_id) return # Labels have changed. # Check our cache to see if we already know about this endpoint. If not, # re-load the entire cache from etcd and try again. _log.info("Labels for %s have been updated", workload_id) endpoint = endpoint_cache.get(workload_id) if not endpoint: # No endpoint in our cache. _log.info("No endpoint for %s in cache, loading", workload_id) load_caches() endpoint = endpoint_cache.get(workload_id) if not endpoint: # No endpoint in etcd - this means the pod hasn't been # created by the CNI plugin yet. Just wait until it has been. # This can only be hit when labels for a pod change before # the pod has been deployed, so should be pretty uncommon. _log.info("No endpoint for pod %s - wait for creation", workload_id) return _log.debug("Found endpoint for %s", workload_id) # Update the labels on the endpoint. endpoint.labels = labels client.set_endpoint(endpoint) # Update the label cache with the new labels. label_cache[workload_id] = labels # Update the endpoint cache with the modified endpoint. endpoint_cache[workload_id] = endpoint
5,339,853
def model_datasets_to_rch(gwf, model_ds, print_input=False): """convert the recharge data in the model dataset to a recharge package with time series. Parameters ---------- gwf : flopy.mf6.modflow.mfgwf.ModflowGwf groundwater flow model. model_ds : xr.DataSet dataset containing relevant model grid information print_input : bool, optional value is passed to flopy.mf6.ModflowGwfrch() to determine if input should be printed to the lst file. Default is False Returns ------- rch : flopy.mf6.modflow.mfgwfrch.ModflowGwfrch recharge package """ # check for nan values if model_ds['recharge'].isnull().any(): raise ValueError('please remove nan values in recharge data array') # get stress period data if model_ds.steady_state: mask = model_ds['recharge'] != 0 if model_ds.gridtype == 'structured': rch_spd_data = mdims.data_array_2d_to_rec_list( model_ds, mask, col1='recharge', first_active_layer=True, only_active_cells=False) elif model_ds.gridtype == 'vertex': rch_spd_data = mdims.data_array_1d_vertex_to_rec_list( model_ds, mask, col1='recharge', first_active_layer=True, only_active_cells=False) # create rch package rch = flopy.mf6.ModflowGwfrch(gwf, filename=f'{gwf.name}.rch', pname=f'{gwf.name}', fixed_cell=False, maxbound=len(rch_spd_data), print_input=True, stress_period_data={0: rch_spd_data}) return rch # transient recharge if model_ds.gridtype == 'structured': empty_str_array = np.zeros_like(model_ds['idomain'][0], dtype="S13") model_ds['rch_name'] = xr.DataArray(empty_str_array, dims=('y', 'x'), coords={'y': model_ds.y, 'x': model_ds.x}) model_ds['rch_name'] = model_ds['rch_name'].astype(str) # dimension check if model_ds['recharge'].dims == ('time', 'y', 'x'): axis = 0 rch_2d_arr = model_ds['recharge'].data.reshape( (model_ds.dims['time'], model_ds.dims['x'] * model_ds.dims['y'])).T # check if reshaping is correct if not (model_ds['recharge'].values[:, 0, 0] == rch_2d_arr[0]).all(): raise ValueError( 'reshaping recharge to calculate unique time series did not work out as expected') elif model_ds['recharge'].dims == ('y', 'x', 'time'): axis = 2 rch_2d_arr = model_ds['recharge'].data.reshape( (model_ds.dims['x'] * model_ds.dims['y'], model_ds.dims['time'])) # check if reshaping is correct if not (model_ds['recharge'].values[0, 0, :] == rch_2d_arr[0]).all(): raise ValueError( 'reshaping recharge to calculate unique time series did not work out as expected') else: raise ValueError('expected dataarray with 3 dimensions' f'(time, y and x) or (y, x and time), not {model_ds["recharge"].dims}') rch_unique_arr = np.unique(rch_2d_arr, axis=0) rch_unique_dic = {} for i, unique_rch in enumerate(rch_unique_arr): model_ds['rch_name'].data[np.isin( model_ds['recharge'].values, unique_rch).all(axis=axis)] = f'rch_{i}' rch_unique_dic[f'rch_{i}'] = unique_rch mask = model_ds['rch_name'] != '' rch_spd_data = mdims.data_array_2d_to_rec_list(model_ds, mask, col1='rch_name', first_active_layer=True, only_active_cells=False) elif model_ds.gridtype == 'vertex': empty_str_array = np.zeros_like(model_ds['idomain'][0], dtype="S13") model_ds['rch_name'] = xr.DataArray(empty_str_array, dims=('cid'), coords={'cid': model_ds.cid}) model_ds['rch_name'] = model_ds['rch_name'].astype(str) # dimension check if model_ds['recharge'].dims == ('cid', 'time'): rch_2d_arr = model_ds['recharge'].values elif model_ds['recharge'].dims == ('time', 'cid'): rch_2d_arr = model_ds['recharge'].values.T else: raise ValueError('expected dataarray with 2 dimensions' f'(time, cid) or (cid, time), not {model_ds["recharge"].dims}') rch_unique_arr = np.unique(rch_2d_arr, axis=0) rch_unique_dic = {} for i, unique_rch in enumerate(rch_unique_arr): model_ds['rch_name'][(rch_2d_arr == unique_rch).all( axis=1)] = f'rch_{i}' rch_unique_dic[f'rch_{i}'] = unique_rch mask = model_ds['rch_name'] != '' rch_spd_data = mdims.data_array_1d_vertex_to_rec_list(model_ds, mask, col1='rch_name', first_active_layer=True, only_active_cells=False) # create rch package rch = flopy.mf6.ModflowGwfrch(gwf, filename=f'{gwf.name}.rch', pname='rch', fixed_cell=False, maxbound=len(rch_spd_data), print_input=print_input, stress_period_data={0: rch_spd_data}) # get timesteps tdis_perioddata = mfpackages.get_tdis_perioddata(model_ds) perlen_arr = [t[0] for t in tdis_perioddata] time_steps_rch = [0.0] + np.array(perlen_arr).cumsum().tolist() # create timeseries packages for i, key in tqdm(enumerate(rch_unique_dic.keys()), total=len(rch_unique_dic.keys()), desc="Building ts packages rch"): # add extra time step to the time series object (otherwise flopy fails) recharge_val = list(rch_unique_dic[key]) + [0.0] recharge = list(zip(time_steps_rch, recharge_val)) if i == 0: rch.ts.initialize(filename=f'{key}.ts', timeseries=recharge, time_series_namerecord=key, interpolation_methodrecord='stepwise') else: rch.ts.append_package(filename=f'{key}.ts', timeseries=recharge, time_series_namerecord=key, interpolation_methodrecord='stepwise') return rch
5,339,854
def create_events_to_group( search_query: str, valid_events: bool, group: Group, amount: int = 1, venue: bool = False, ) -> List[Event]: """ Create random test events and save them to a group Arguments: search_query {str} -- use query param for the search request valid_events {bool} -- should the groups searchable by the the query term group {Group} -- group to at the events Keyword Arguments: amount {int} -- how many events should be created (default: {1}) venue {bool} -- if venue should be added to eventa (default: {False}) Returns: List[Event] -- created & saved events """ created_events: List[Event] = [] for i in range(0, amount): event_name: str = random_string(search_query=search_query, valid=valid_events) event: Event = Event( meetup_id=event_name, time=datetime.now(), name=event_name, link="http://none", date_in_series_pattern=False, ) if venue: event.venue_name = event_name event.venue_location = {"lat": i + 1, "lon": i + 1} created_events.append(event) group.add_events(events=created_events) group.save() sleep(1) return created_events
5,339,855
def alt_stubbed_receiver() -> PublicKey: """Arbitrary known public key to be used as reciever.""" return PublicKey("J3dxNj7nDRRqRRXuEMynDG57DkZK4jYRuv3Garmb1i98")
5,339,856
def create_api_headers(token): """ Create the API header. This is going to be sent along with the request for verification. """ auth_type = 'Basic ' + base64.b64encode(bytes(token + ":")).decode('ascii') return { 'Authorization': auth_type, 'Accept': 'application/json', 'Content-Type': 'application/json' }
5,339,857
def bv2fif(dataf, corf, ch_order=None, aux=('VEOG', 'HEOG', 'ECG', 'EMG'), preload='default', ref_ch='Fp1', dbs=False, use_find_events='dbs', tmin=-2, tmax=2, baseline=(-0.5,-0.1), detrend=1): """Function to convert .eeg, .vmrk and .vhdr BrainVision files to a combined .fif format file. Parameters ---------- dataf : str The .vhdr file that contains references to the data corf : str The COR.fif file that contains the montage for this particular subject. This will be generated through mne_analyze, possibly while using the TMS-EEG GUI. ch_order : list of str | None If not 'None', a custom channel order is used. aux : tuple Auxillary/accessory electrodes to be included in the data. preload : 'default' | False If false, load data into RAM, if true memory map to disk ref_ch : str Reference channel used dbs: bool If true stim channels are named 'DBS' and use_find_events is true by default. use_find_events : bool | 'dbs' If true or 'dbs' and dbs is true, then the peak amplitude will be used to find stimulus markers. tmin: float Time when epochs start tmax: float Time when epochs end baseline: tuple (float, float) | None Time to use for baseline mean subtraction detrend: int 1 for linear detrend, 0 for mean subtraction and None for nothing Notes ----- An older version of MNE contained a bug that prevented adding channels while using memory mapping. A script to circumvent this also exists. """ use_find_events = ((dbs and use_find_events == 'dbs') or (isinstance(use_find_events, bool) and use_find_events)) if preload == 'default': preload = os.path.dirname(dataf) + '/workfile' # raw = read_raw_brainvision(dataf, preload=preload) if corf is None: montage = None elif '.bvct' in op.basename(corf): montage = read_dig_montage(bvct=corf) elif '.csv' in op.basename(corf): montage = read_dig_montage(csv=corf) else: raise ValueError('corf not understood') # if ch_order is None: if all([ch in ch_name_order for ch in raw.ch_names]): order_dict = {ch: ch_name_order.index(ch) for ch in raw.ch_names} ch_order = sorted(order_dict, key=order_dict.get) else: # if no channel order is given and we get names we didn't expect, just sort the channels ch_order = sorted(inst.ch_names) # if use_find_events: event_ch = get_events(raw) old_event_ch = [ch for ch in raw.info['ch_names'] if 'STI' in ch] if old_event_ch: raw.drop_channels([old_event_ch[0]]) raw.add_channels([event_ch]) if use_find_events and use_find_events != 'dbs': raw.rename_channels({'DBS':'TMS'}) else: events, event_ids = events_from_annotations(raw) # prepInst(raw, dataf, 'raw', montage, ref_ch, aux, 'DBS' if dbs else 'TMS', ch_order)
5,339,858
def _combine(bundle, transaction_managed=False, rollback=False, use_reversion=True): """ Returns one sreg and DHCP output for that SREG. If rollback is True the sreg will be created and then rolleback, but before the rollback all its HWAdapters will be polled for their DHCP output. """ bundle['errors'] = None bundle['old-dhcp-output'] = get_all_dhcp_for_system(bundle['system']) sreg = StaticReg( label=bundle['a'].label, domain=bundle['a'].domain, ip_str=bundle['ip'], system=bundle['system'], description='Migrated SREG', ip_type=bundle['a'].ip_type ) try: bundle['new-dhcp-output'] = ( "<span class='no-dhcp-output'>No new DHCP output</span>" ) view_names = [v.name for v in bundle['a'].views.all()] try: bundle['a'].delete(check_cname=False, call_prune_tree=False) except ValidationError, e: rollback = True bundle['errors'] = 'Error while deleting the A record.' + str(e) return try: bundle['ptr'].delete() except ValidationError, e: rollback = True bundle['errors'] = 'Error while deleting the PTR record.' + str(e) return try: sreg.save() for name in view_names: sreg.views.add(View.objects.get(name=name)) if use_reversion: reversion.set_comment('Migrated via combine()') except ValidationError, e: rollback = True bundle['errors'] = 'Error while creating the SREG record.' + str(e) return for nic in bundle['hwadapters']: hw_info, kvs = nic.emit_hwadapter() if not hw_info['mac']: rollback = True return try: hw, _ = HWAdapter.objects.get_or_create( sreg=sreg, mac=hw_info['mac'] ) # HWAdapter class does this for us. #hw.name = hw_info['name'].replace hw.save() except ValidationError, e: rollback = True bundle['errors'] = 'Error while creating HW Adapter' return try: for kv in kvs: if kv['key'] in ('hostname', 'option_hostname'): # If the option host-name value matches the SREG fqdn # we don't need to add the option, it will be added by # default. all other cases it will be overriden. if kv['value'] == sreg.fqdn: continue else: key = 'host_name' else: key = kv['key'] if HWAdapterKeyValue.objects.filter(key=key, obj=hw).exists(): pass else: kv_ = HWAdapterKeyValue( key=key, value=kv['value'], obj=hw ) kv_.clean() kv_.save() for kv in nic._nic: SystemKeyValue.objects.filter(pk=kv.pk).delete() except ValidationError, e: transaction.rollback() bundle['errors'] = ( 'Error while creating HW Adapter KeyValue. ' + str(e) ) return bundle['new-dhcp-output'] = get_all_dhcp_for_system(bundle['system']) return sreg finally: if not transaction_managed: if rollback: transaction.rollback() else: transaction.commit()
5,339,859
def get_node_session(*args, **kwargs): """Creates a NodeSession instance using the provided connection data. Args: *args: Variable length argument list with the connection data used to connect to the database. It can be a dictionary or a connection string. **kwargs: Arbitrary keyword arguments with connection data used to connect to the database. Returns: mysqlx.XSession: XSession object. """ settings = _get_connection_settings(*args, **kwargs) if "routers" in settings: raise InterfaceError("NodeSession expects only one pair of host and port") return NodeSession(settings)
5,339,860
def load_aaz_command_table(loader, aaz_pkg_name, args): """ This function is used in AzCommandsLoader.load_command_table. It will load commands in module's aaz package. """ profile_pkg = _get_profile_pkg(aaz_pkg_name, loader.cli_ctx.cloud) command_table = {} command_group_table = {} arg_str = ' '.join(args) fully_load = os.environ.get(AAZ_PACKAGE_FULL_LOAD_ENV_NAME, 'False').lower() == 'true' # used to disable cut logic if profile_pkg is not None: _load_aaz_pkg(loader, profile_pkg, command_table, command_group_table, arg_str, fully_load) for group_name, command_group in command_group_table.items(): loader.command_group_table[group_name] = command_group for command_name, command in command_table.items(): loader.command_table[command_name] = command return command_table, command_group_table
5,339,861
def edit_role(payload, search_term): """Find and edit the role.""" role = Role.query.get(search_term) # if edit request == stored value if not role: return response_builder(dict(status="fail", message="Role does not exist."), 404) try: if payload["name"] == role.name: return response_builder(dict( data=dict(path=role.serialize()), message="No change specified." ), 200) else: old_role_name = role.name role.name = payload["name"] role.save() return response_builder(dict( data=dict(path=role.serialize()), message="Role {} has been changed" " to {}.".format(old_role_name, role.name) ), 200) except KeyError: return response_builder( dict(status="fail", message="Name to edit to must be provided."), 400)
5,339,862
def use_redis_cache(key, ttl_sec, work_func): """Attemps to return value by key, otherwise caches and returns `work_func`""" redis = redis_connection.get_redis() cached_value = get_pickled_key(redis, key) if cached_value: return cached_value to_cache = work_func() pickle_and_set(redis, key, to_cache, ttl_sec) return to_cache
5,339,863
def counting_sort(array, low, high): """Razeni pocitanim (CountingSort). Seradte zadane pole 'array' pricemz o poli vite, ze se v nem nachazeji pouze hodnoty v intervalu od 'low' po 'high' (vcetne okraju intervalu). Vratte serazene pole. """ counts = [0 for i in range(high - low + 1)] for elem in array: counts[elem - low] += 1 current = 0 for i in range(high - low + 1): for j in range(current, current + counts[i]): array[j] = i + low current += counts[i] return array
5,339,864
async def wait_until( wait_until_timestamp: pd.Timestamp, get_wall_clock_time: hdateti.GetWallClockTime, *, tag: Optional[str] = None, ) -> None: """ Wait until the wall clock time is `timestamp`. """ if tag is None: # Use the name of the function calling this function. tag = hintros.get_function_name(count=1) curr_timestamp = get_wall_clock_time() _LOG.debug( "wait until timestamp=%s, curr_timestamp=%s", wait_until_timestamp, curr_timestamp, ) # We only wait for times in the future. hdbg.dassert_lte(curr_timestamp, wait_until_timestamp) # time_in_secs = (wait_until_timestamp - curr_timestamp).seconds _LOG.debug( "%s: wall_clock_time=%s: sleep for %s secs", tag, get_wall_clock_time(), time_in_secs, ) hdbg.dassert_lte(0, time_in_secs) await asyncio.sleep(time_in_secs) hprint.log_frame( _LOG, "%s: wall_clock_time=%s: done waiting", tag, get_wall_clock_time() )
5,339,865
def normalize_archives_url(url): """ Normalize url. will try to infer, find or guess the most useful archives URL, given a URL. Return normalized URL, or the original URL if no improvement is found. """ # change new IETF mailarchive URLs to older, still available text .mail archives new_ietf_exp = re.compile( "https://mailarchive\\.ietf\\.org/arch/search/" "\\?email_list=(?P<list_name>[\\w-]+)" ) ietf_text_archives = ( r"https://www.ietf.org/mail-archive/text/\g<list_name>/" ) new_ietf_browse_exp = re.compile( r"https://mailarchive.ietf.org/arch/browse/(?P<list_name>[\w-]+)/?" ) match = new_ietf_exp.match(url) if match: return re.sub(new_ietf_exp, ietf_text_archives, url) match = new_ietf_browse_exp.match(url) if match: return re.sub(new_ietf_browse_exp, ietf_text_archives, url) return url
5,339,866
def logs(): """ :return: The absolute path to the directory that contains Benchmark's log file. """ return os.path.join(benchmark_confdir(), "logs")
5,339,867
def getIsolatesFromIndices(indices): """ Extracts the isolates from the indices of a df_X. :param pandas.index indices: cn.KEY_ISOLATE_DVH, cn.KEY_ISOLATE_MMP :return dict: keyed by cn.KEY_ISOLATE_DVH, cn.KEY_ISOLATE_MMP values correspond to rows element in the index """ keys = [n for n in indices.names] result = {} for idx, key in enumerate(keys): result[key] = [v[idx] for v in indices.values] return result
5,339,868
def save_group_geo_org(user_id, group_id, area_id, org_unit_id): """Method for attaching org units and sub-counties.""" try: if org_unit_id: geo_org_perm, ctd = CPOVCUserRoleGeoOrg.objects.update_or_create( user_id=user_id, group_id=group_id, org_unit_id=org_unit_id, is_void=False, defaults={'area_id': area_id, 'org_unit_id': org_unit_id, 'user_id': user_id, 'group_id': group_id, 'is_void': False},) geo_org_perm, ctd = CPOVCUserRoleGeoOrg.objects.update_or_create( user_id=user_id, group_id=group_id, area_id=area_id, is_void=False, defaults={'area_id': area_id, 'org_unit_id': org_unit_id, 'user_id': user_id, 'group_id': group_id, 'is_void': False},) except Exception, e: error = 'Error searching org unit -%s' % (str(e)) print error return None else: return geo_org_perm, ctd
5,339,869
def tf_efficientnet_lite0(pretrained=False, **kwargs): """ EfficientNet-Lite0 """ # NOTE for train, drop_rate should be 0.2, drop_path_rate should be 0.2 kwargs['bn_eps'] = BN_EPS_TF_DEFAULT kwargs['pad_type'] = 'same' model = _gen_efficientnet_lite( 'tf_efficientnet_lite0', channel_multiplier=1.0, depth_multiplier=1.0, pretrained=pretrained, **kwargs) return model
5,339,870
def ensuredir(dirpath): """ ensure @dirpath exists and return it again raises OSError on other error than EEXIST """ try: os.makedirs(dirpath, 0o700) except FileExistsError: pass return dirpath
5,339,871
def get_qualname(node: ast.AST) -> Optional[str]: """ If node represents a chain of attribute accesses, return is qualified name. """ parts = [] while True: if isinstance(node, ast.Name): parts.append(node.id) break elif isinstance(node, ast.Attribute): parts.append(node.attr) node = node.value else: return None return '.'.join(reversed(parts))
5,339,872
def roc_plot(FLAGS, y_test, y_score, target_names): """Plot Receiver Operating Characteristic curve Args: FLAGS (argument parser): input information y_test (2D array): true label of test data y_score (2D) array: prediction label of test data target_names (1D array): array of encode label Returns: [datagen] """ n_classes = y_test.shape[1] lw = 2 # Compute ROC curve and ROC area for each class fpr = dict() tpr = dict() roc_auc = dict() for i in range(n_classes): fpr[i], tpr[i], _ = roc_curve(y_test[:, i], y_score[:, i]) roc_auc[i] = auc(fpr[i], tpr[i]) # First aggregate all false positive rates all_fpr = np.unique(np.concatenate([fpr[i] for i in range(n_classes)])) # Then interpolate all ROC curves at this points mean_tpr = np.zeros_like(all_fpr) for i in range(n_classes): mean_tpr += interp(all_fpr, fpr[i], tpr[i]) # Finally average it and compute AUC mean_tpr /= n_classes # Plot all ROC curves plt.figure() # colors = cycle(['aqua', 'darkorange', 'cornflowerblue']) for i in range(n_classes): plt.plot(fpr[i], tpr[i], lw=lw, label='ROC curve of class {0} (area = {1:0.2f})' ''.format(target_names[i], roc_auc[i])) plt.plot([0, 1], [0, 1], 'k--', lw=lw) plt.xlim([0.0, 1.0]) plt.ylim([0.0, 1.05]) plt.xlabel('False Positive Rate') plt.ylabel('True Positive Rate') plt.title('Receiver Operating Characteristic - {}'.format(FLAGS.model_name)) plt.legend(loc="lower right") plt.savefig(os.path.join(FLAGS.output_dir, 'roc_of_{}.png'.format(FLAGS.model_name))) plt.close()
5,339,873
def traceUsage(addr, register, steps): """ Given a start address, a register which holds a value and the number of steps, this function disassembles forward #steps instructions and traces the value of <register> until it is used in a call instruction. It then returns the offset added to <register> and the address of the call Note that this tracing is very basic and does neither handle multiple registers at the same time nor any other modification than adding constants e.g.: 00401622 mov eax, g_IAT //start at addr = 0x00401622, register = "eax" 00401627 mov ecx, [eax+0Ch] //trace ecx, forget eax from now on. Save offset "0x0C" 0040162A push edx //ignore 0040162B call ecx //return offset 0x0c and address 0x0040162B """ potentialOffset = -1 localRegister = register for step in range(steps): addr = NextHead(addr) dis = GetMnem(addr) if dis == 'mov' and localRegister in GetOpnd(addr,1): #look for e.g."mov eax, [<register>+1CCh]" potentialOffset = GetOpnd(addr,1) if potentialOffset[0] != '[' or potentialOffset[-1] != ']': #"[<register>+1CCh]" continue potentialOffset = potentialOffset[1:-1] #"<register>+1CCh" if '+' in potentialOffset: #we might have had "mov ecx, [eax]", so there is no plus potentialOffset = potentialOffset.split(register+'+')[1] # "1CCh" else: potentialOffset = "0" if potentialOffset.endswith('h'): potentialOffset = int(potentialOffset[:-1], 16) / 4 #"1cc" else: potentialOffset = int(potentialOffset) / 4 localRegister = GetOpnd(addr,0) #get new register to search for upcoming call-instruction elif dis == 'call' and GetOpnd(addr,0) == localRegister: return potentialOffset, addr if potentialOffset != -1: print "[-] Error: Got potentialOffset %s but no corresponding call - maybe increase the steps range?" % (str(potentialOffset)) return -1, -1
5,339,874
def index_folder(folder, images=[]): """ simple multi threaded recusive function to map folder Args: @param folder: folder str path to folder @param images: images list containing absolute paths of directory images Returns: List with image paths """ print(f'Entering {folder}') folders = [] for i in os.listdir(folder): item_path = os.path.join(folder, i) try: Image.open(item_path, mode='r') images.append(item_path) except (PermissionError, IsADirectoryError): print(f'found folder {i}') print(item_path) folders.append(item_path) if folders: with ThreadPool(cpu_count()) as pool: pool.map_async(index_folder, folders).get() return images
5,339,875
def array3d (surface): """pygame.surfarray.array3d (Surface): return array Copy pixels into a 3d array. Copy the pixels from a Surface into a 3D array. The bit depth of the surface will control the size of the integer values, and will work for any type of pixel format. This function will temporarily lock the Surface as pixels are copied (see the Surface.lock - lock the Surface memory for pixel access method). """ global numpysf try: return numpysf.array3d(surface) except AttributeError: import pygame._numpysurfarray as numpysf return numpysf.array3d(surface)
5,339,876
def masked_kl_div(input, target, mask): """Evaluate masked KL divergence between input activations and target distribution. Parameters: input (tensor) - NxD batch of D-dimensional activations (un-normalized log distribution). target (tensor) - NxD normalized target distribution. mask (tensor, torch.bool) - NxD mask of elements to include in calculation. Returns: Nx1 tensor of cross-entropy calculation results. """ input = input.clone() input[~mask] = -float('inf') log_q = F.log_softmax(input, dim=1) log_q[~mask] = 0 log_p = torch.log(target) log_p[~mask] = 0 KLi = target * (log_p - log_q) KLi[target == 0] = 0 KL = torch.sum(KLi, dim=1, keepdim=True) return KL
5,339,877
def create_sample_data(input_seqs, sample_size): """ Takes a sample of size 'sample_size' from an input file containing sequences and their associated expression levels, and writes them to a separate file. The format of the first 2 lines of the resulting output file will be of the format: " number_of_seqs_in_file\t<###> length_of_each_sequence\t<$$$> " where '<###>' is the number of sequences in the file, and '<$$$>'is the length to which every sequence in the file is padded. Args: ----- input_seqs (str) -- the absolute path of the input file containing sequence and expression level data to sample. sample_size (int) -- the number of samples to take from the input file. Returns: ----- sample_data (str) -- the absolute path of the output file containing the sample of sequence and expression level data. """ # Assertions assert isinstance(input_seqs, str), 'Input sequences file path must be\ passed as a string.' assert os.path.exists(input_seqs), 'Input file does not exist.' assert isinstance(sample_size, int), 'Number of sequences to sample must\ be passed as an integer.' assert sample_size < get_seq_count(input_seqs), 'Sample size must be\ smaller than the number of sequences in the input file.' # Functionality # Define output file path index = input_seqs.rfind('/') + 1 insert = str(sample_size) + '_from_' sample_seqs = input_seqs[:index] + insert + input_seqs[index:] # Pull sequences to create sample data with smart_open(input_seqs, 'r') as inf: inf.readline() inf.readline() # skip the first 2 info lines all_lines = inf.readlines() for i in range(50): lines = random.sample(all_lines, sample_size) with smart_open(sample_seqs, 'w') as g: for line in lines: g.write(line) # Write number and length of sequence info to top of resulting file write_num_and_len_of_seqs_to_file(sample_seqs) return sample_seqs
5,339,878
def find_peaks(ts, mindist=100): """ Find peaks in time series :param ts: :return: """ extreme_value = -np.inf extreme_idx = 0 peakvalues = [] peaktimes = [] find_peak = True idx = 0 for r in ts.iteritems(): # print(r) if find_peak: # look for maximum if r[1] > extreme_value: # update current maximum point extreme_value = r[1] extreme_idx = idx elif r[1] + mindist < extreme_value: # consider current maximum a peak peakvalues.append(extreme_value) peaktimes.append(extreme_idx) # update current maximum extreme_value = r[1] extreme_idx = idx find_peak = False else: # look for minimum if r[1] < extreme_value: # update value extreme_value = r[1] extreme_idx = idx elif r[1] - mindist > extreme_value: extreme_value = r[1] extreme_idx = idx find_peak = True idx += 1 return peakvalues, peaktimes
5,339,879
def test_extract_legacy_bad_top_dir(tmpdir): """ Test Extract Legacy Bad Top Dir """ src = tmpdir.mkdir("src") boost = src.mkdir("boost") boost.ensure("lib", "libboost.so", file=True) res = qisys.archive.compress(boost.strpath) dest = tmpdir.mkdir("dest").join("boost-1.55") qitoolchain.qipackage.extract(res, dest.strpath) assert dest.join("lib", "libboost.so").check(file=True)
5,339,880
def load_dataset(name, root, sample="default", **kwargs): """ Default dataset wrapper :param name (string): Name of the dataset (Out of cifar10/100, imagenet, tinyimagenet, CUB200, STANFORD120, MIT67). :param root (string): Path to download the dataset. :param sample (string): Default (random) sampling as the classic pytorch dataloader or Pairwise sampling as mentioned in the paper "Regularizing Class-wise Predictions via Self-knowledge Distillation" """ # Dataset if name in ["imagenet", "tinyimagenet", "CUB200", "STANFORD120", "MIT67"]: if name == "tinyimagenet": transform_train = transforms.Compose( [ transforms.RandomResizedCrop(32), transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize((0.485, 0.456, 0.406), (0.229, 0.224, 0.225)), ] ) transform_test = transforms.Compose( [ transforms.Resize(32), transforms.ToTensor(), transforms.Normalize((0.485, 0.456, 0.406), (0.229, 0.224, 0.225)), ] ) train_val_dataset_dir = os.path.join(root, "train") test_dataset_dir = os.path.join(root, "val") trainset = DatasetWrapper( datasets.ImageFolder( root=train_val_dataset_dir, transform=transform_train ) ) valset = DatasetWrapper( datasets.ImageFolder(root=test_dataset_dir, transform=transform_test) ) elif name == "imagenet": transform_train = transforms.Compose( [ transforms.RandomResizedCrop(224), transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize((0.485, 0.456, 0.406), (0.229, 0.224, 0.225)), ] ) transform_test = transforms.Compose( [ transforms.Resize(256), transforms.CenterCrop(224), transforms.ToTensor(), transforms.Normalize((0.485, 0.456, 0.406), (0.229, 0.224, 0.225)), ] ) train_val_dataset_dir = os.path.join(root, "train") test_dataset_dir = os.path.join(root, "val") trainset = DatasetWrapper( datasets.ImageFolder( root=train_val_dataset_dir, transform=transform_train ) ) valset = DatasetWrapper( datasets.ImageFolder(root=test_dataset_dir, transform=transform_test) ) else: transform_train = transforms.Compose( [ transforms.RandomResizedCrop(224), transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize((0.485, 0.456, 0.406), (0.229, 0.224, 0.225)), ] ) transform_test = transforms.Compose( [ transforms.Resize(256), transforms.CenterCrop(224), transforms.ToTensor(), transforms.Normalize((0.485, 0.456, 0.406), (0.229, 0.224, 0.225)), ] ) train_val_dataset_dir = os.path.join(root, name, "train") test_dataset_dir = os.path.join(root, name, "test") trainset = DatasetWrapper( datasets.ImageFolder( root=train_val_dataset_dir, transform=transform_train ) ) valset = DatasetWrapper( datasets.ImageFolder(root=test_dataset_dir, transform=transform_test) ) elif name.startswith("cifar"): transform_train = transforms.Compose( [ transforms.RandomCrop(32, padding=4), transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize( (0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010) ), ] ) transform_test = transforms.Compose( [ transforms.ToTensor(), transforms.Normalize( (0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010) ), ] ) if name == "cifar10": CIFAR = datasets.CIFAR10 else: CIFAR = datasets.CIFAR100 trainset = DatasetWrapper( CIFAR(root, train=True, download=True, transform=transform_train) ) valset = DatasetWrapper( CIFAR(root, train=False, download=True, transform=transform_test) ) else: raise Exception("Unknown dataset: {}".format(name)) # Sampler if sample == "default": get_train_sampler = lambda d: BatchSampler( RandomSampler(d), kwargs["batch_size"], False ) get_test_sampler = lambda d: BatchSampler( SequentialSampler(d), kwargs["batch_size"], False ) elif sample == "pair": get_train_sampler = lambda d: PairBatchSampler(d, kwargs["batch_size"]) get_test_sampler = lambda d: BatchSampler( SequentialSampler(d), kwargs["batch_size"], False ) else: raise Exception("Unknown sampling: {}".format(sampling)) train_loader = DataLoader( trainset, batch_sampler=get_train_sampler(trainset), num_workers=4 ) val_loader = DataLoader( valset, batch_sampler=get_test_sampler(valset), num_workers=4 ) return train_loader, val_loader
5,339,881
def get_flanking_seq(genome, scaffold, start, end, flanking_length): """ Get flanking based on Blast hit """ for rec in SeqIO.parse(genome, "fasta"): if rec.id == scaffold: return str( rec.seq[int(start) - int(flanking_length) : int(end) + int(flanking_length)] )
5,339,882
def d(vars): """List of variables starting with string "df" in reverse order. Usage: d(dir()) @vars list of variables output by dir() command """ list_of_dfs = [item for item in vars if (item.find('df') == 0 and item.find('_') == -1 and item != 'dfs')] list_of_dfs.sort(key=lambda x:int(re.sub("[^0-9]", "", x.replace('df',''))) if len(x) > 2 else 0, reverse=True) return list_of_dfs
5,339,883
def write_to_bin(out_file, filename, article_tag, summary_tag, makevocab=False): """Reads the tokenized files corresponding to the urls listed in the url_file and writes them to a out_file.""" print "Making bin file for filename %s..." % filename if makevocab: vocab_counter = collections.Counter() with open(out_file, 'wb') as writer: # Look in the tokenized story dirs to find the .story file corresponding to this url # if os.path.isfile(os.path.join(tokenized_articles_dir, filename + "." + article_tag + '.txt')): article_file = os.path.join(tokenized_articles_dir, filename + "." + article_tag + '.txt') summary_file = os.path.join(tokenized_articles_dir, filename + "." + summary_tag + '.txt') # Get the strings to write to .bin file article, abstract = get_art_abs(article_file, summary_file) # Write to tf.Example tf_example = example_pb2.Example() tf_example.features.feature['article'].bytes_list.value.extend([article]) tf_example.features.feature['abstract'].bytes_list.value.extend([abstract]) tf_example_str = tf_example.SerializeToString() str_len = len(tf_example_str) writer.write(struct.pack('q', str_len)) writer.write(struct.pack('%ds' % str_len, tf_example_str)) # Write the vocab to file, if applicable if makevocab: art_tokens = article.split(' ') abs_tokens = abstract.split(' ') abs_tokens = [t for t in abs_tokens if t not in [SENTENCE_START, SENTENCE_END]] # remove these tags from vocab tokens = art_tokens + abs_tokens tokens = [t.strip() for t in tokens] # strip tokens = [t for t in tokens if t!=""] # remove empty vocab_counter.update(tokens) print "Finished writing file %s\n" % out_file # write vocab to file if makevocab: print "Writing vocab file..." with open(os.path.join(finished_files_dir, "vocab"), 'w') as writer: for word, count in vocab_counter.most_common(VOCAB_SIZE): writer.write(word + ' ' + str(count) + '\n') print "Finished writing vocab file"
5,339,884
def get_class_namespaces(cls: type) -> tuple[Namespace, Namespace]: """ Return the module a class is defined in and its internal dictionary Returns: globals, locals """ return inspect.getmodule(cls).__dict__, cls.__dict__ | {cls.__name__: cls}
5,339,885
def fetch_and_save_latest_definitions( base_api_url, cache, output_dir=None, save_to_db=False, save_to_fstate=False, by_latest=True, retries=2, verbose=True): """ Fetch ClearlyDefined definitions and paginate through. Save these as blobs to data_dir. Fetch the most recently updated definitions first if `by_latest` is True. Otherwise, the order is not specified. NOTE: these do not contain file details (but the harvest do) """ assert output_dir or save_to_db or save_to_fstate, 'You must select one of the --output-dir or --save-to-db or save_to_fstate options.' if save_to_db: from clearcode import dbconf dbconf.configure(verbose=verbose) definitions_url = cdutils.append_path_to_url(base_api_url, extra_path='definitions') if by_latest: definitions_url = cdutils.update_url(definitions_url, qs_mapping=dict(sort='releaseDate', sortDesc='true')) for content in fetch_definitions(api_url=definitions_url, cache=cache, retries=retries, verbose=TRACE): # content is a batch of 100 definitions definitions = content and content.get('data') if not definitions: if verbose: print(' No more data for: {}'.format(definitions_url)) break if verbose: first = cdutils.coord2str(definitions[0]['coordinates']) last = cdutils.coord2str(definitions[-1]['coordinates']) print('Fetched definitions from :', first, 'to:', last, flush=True) else: pass # print('.', end='', flush=True) savers = [] if save_to_db: savers.append(db_saver) if output_dir: savers.append(file_saver) # if save_to_fstate: # output_dir = 'definitions' # savers.append(finitestate_saver) # we received a batch of definitions: let's save each as a Gzipped JSON for definition in definitions: coordinate = cdutils.Coordinate.from_dict(definition['coordinates']) blob_path = None for saver in savers: blob_path, _size = save_def( coordinate=coordinate, content=definition, output_dir=output_dir, saver=saver) yield coordinate, blob_path
5,339,886
def write_stream(path, sync=True, *args, **kwargs): """Creates a writer object (context manager) to write multiple dataframes into one file. Must be used as context manager. Parameters ---------- path : str, filename or path to database table sync : bool, default True Set to `False` to run the writer in the background process. args, kwargs : parameters passed to writer driver (see erde.io modules) Example: with write_stream('/tmp/my_file.gpkg') as write: for df in data_generator(): write(df) """ from .io import select_driver dr, pm = select_driver(path) return dr.write_stream(path, sync=sync, *args, **kwargs)
5,339,887
def diff( df: DataFrame, columns: Dict[str, str], periods: int = 1, axis: PandasAxis = PandasAxis.ROW, ) -> DataFrame: """ Calculate row-by-row or column-by-column difference for select columns. :param df: DataFrame on which the diff will be based. :param columns: columns on which to perform diff, mapping source column to target column. For instance, `{'y': 'y'}` will replace the column `y` with the diff value in `y`, while `{'y': 'y2'}` will add a column `y2` based on diff values calculated from `y`, leaving the original column `y` unchanged. :param periods: periods to shift for calculating difference. :param axis: 0 for row, 1 for column. default 0. :return: DataFrame with diffed columns :raises QueryObjectValidationError: If the request in incorrect """ df_diff = df[columns.keys()] df_diff = df_diff.diff(periods=periods, axis=axis) return _append_columns(df, df_diff, columns)
5,339,888
def change(par, value): """ Set to change a parameter to another value. Parameters ---------- par: str Name of the parameter to change. value: any Value to be set to the parameter. """ if "CONFIG_CHANGED_PARS" not in globals(): global CONFIG_CHANGED_PARS CONFIG_CHANGED_PARS = dict() CONFIG_CHANGED_PARS[par] = value
5,339,889
def decodeTx(data: bytes) -> Transaction: """Function to convert base64 encoded data into a transaction object Args: data (bytes): the data to convert Returns a transaction object """ data = base64.b64decode(data) if data[:1] != tx_flag: return None timestamp = float(data[1:21].decode('utf-8')) hash = data[21:53].hex() script_sig = data[53:117].hex() inputs = [] outputs = [] io = data[117:].split(array_flag) for x in io: if x[:1] == tx_in: pub_key = x[1:34].hex() sig = x[34:98].hex() utxoRef = x[98:].decode('utf-8') inputs.append(Input(utxoRef, pub_key, sig)) elif x[:1] == tx_out: addr = x[1:35].decode('utf-8') amount = float(x[35:].decode('utf-8')) outputs.append(Output(addr, amount)) tx = Transaction(inputs, outputs) tx.timestamp = timestamp tx.hash = hash tx.script_sig = script_sig return tx
5,339,890
def create_package_from_datastep(table): """Create an importable model package from a score code table. Parameters ---------- table : swat.CASTable The CAS table containing the score code. Returns ------- BytesIO A byte stream representing a ZIP archive which can be imported. See Also -------- :meth:`model_repository.import_model_from_zip <.ModelRepository.import_model_from_zip>` """ assert 'DataStepSrc' in table.columns sess = table.session.get_connection() dscode = table.to_frame().loc[0, 'DataStepSrc'] file_metadata = [{'role': 'score', 'name': 'dmcas_scorecode.sas'}] zip_file = _build_zip_from_files({ 'fileMetadata.json': file_metadata, 'dmcas_scorecode.sas': dscode }) return zip_file
5,339,891
def _test_diff(diff: list[float]) -> tuple[float, float, float]: """Последовательный тест на медианную разницу с учетом множественного тестирования. Тестирование одностороннее, поэтому p-value нужно умножить на 2, но проводится 2 раза. """ _, upper = seq.median_conf_bound(diff, config.P_VALUE / population.count()) return float(np.median(diff)), upper, np.max(diff)
5,339,892
def ProcessFiles(merged_store, filenames, callback): """Fetch and process each file contained in 'filenames'.""" @gen.engine def _ProcessOneFile(contents, day_stats): """Iterate over the contents of a processed file: one entry per line. Increment stats for specific entries.""" buf = cStringIO.StringIO(contents) buf.seek(0) ui_events = Counter() while True: line = buf.readline() if not line: break parsed = json.loads(line) if not parsed: continue if 'version' not in parsed: continue # TODO(marc): lookup the user's device ID in dynamodb and get device model. payload = parsed['payload'] if 'name' in payload: if payload['name'] == '/assets/scan' and payload['type'] == 'full': day_stats.AddScan(parsed['version'], payload['num_scanned'], payload['elapsed']) elif payload['name'].startswith('/ui/'): ui_events[payload['name']] += 1 if ui_events: ui_events['/ui/anything'] += 1 day_stats.AddEvents(ui_events) buf.close() today = util.NowUTCToISO8601() # Group filenames by day. files_by_day = defaultdict(list) for filename in filenames: _, day, user = filename.split('/') if options.options.compute_today or today != day: files_by_day[day].append(filename) # Compute per-day totals. Toss them into a list, we'll want it sorted. stats_by_day = {} for day in sorted(files_by_day.keys()): # We don't really need to process days in-order, but it's nicer. files = files_by_day[day] day_stats = DayStats(day) for f in files: contents = '' try: contents = yield gen.Task(merged_store.Get, f) except Exception as e: logging.error('Error fetching file %s: %r' % (f, e)) continue _ProcessOneFile(contents, day_stats) if len(day_stats._long_scan_speeds) == 0: continue dd = DotDict() for p in [1, 5, 10, 25, 50, 75, 90, 95, 99]: dd['user_analytics.scans_gt1s_speed_percentile.%.2d' % p] = day_stats.LongScanSpeedPercentile(p) dd['user_analytics.scans_duration_percentile.%.2d' % p] = day_stats.ScanDurationPercentile(p) dd['user_analytics.scans_num_photos_percentile.%.2d' % p] = day_stats.PhotosScannedPercentile(p) dd['user_analytics.ui.event_users'] = day_stats.event_users dd['user_analytics.ui.total_events'] = day_stats.total_events stats_by_day[day] = dd callback(stats_by_day)
5,339,893
def get_repo_of_app_or_library(app_or_library_name): """ This function takes an app or library name and will return the corresponding repo for that app or library""" specs = get_specs() repo_name = specs.get_app_or_lib(app_or_library_name)['repo'] if not repo_name: return None return Repo(repo_name)
5,339,894
def get_index_price_change_by_ticker(fromdate: str, todate: str, market: str="KOSPI") -> DataFrame: """입력된 기간동안의 전체 지수 등락률 Args: fromdate (str ): 조회 시작 일자 (YYMMDD) todate (str ): 조회 종료 일자 (YYMMDD) market (str, optional): 조회 시장 (KOSPI/KOSDAQ/RKX/테마) Returns: DataFrame: >> get_index_price_change_by_ticker("20210101", "20210130") 시가 종가 등락률 거래량 거래대금 지수명 코스피 2873.47 3152.18 9.703125 7162398637 149561467924511 코스피 200 389.29 430.22 10.507812 2221276866 119905899468167 코스피 100 2974.06 3293.96 10.757812 1142234783 95023508273187 코스피 50 2725.20 3031.59 11.242188 742099360 79663247553065 코스피 200 중소형주 1151.78 1240.92 7.738281 1079042083 24882391194980 """ if isinstance(fromdate, datetime.datetime): fromdate = _datetime2string(fromdate) if isinstance(todate, datetime.datetime): todate = _datetime2string(todate) fromdate = fromdate.replace("-", "") todate = todate.replace("-", "") # KRX 웹 서버의 제약으로 인한 영업일 검사 fromdate = get_nearest_business_day_in_a_week(fromdate, prev=False) todate = get_nearest_business_day_in_a_week(todate) return krx.get_index_price_change_by_ticker(fromdate, todate, market)
5,339,895
def axpy(alpha, x, y, stream=None): """y <- alpha*x + y """ global _blas if not isinstance(alpha, Number): raise ValueError('alpha is not a numeric type') validate_argument_dtype(x, 'x') validate_argument_dtype(y, 'y') if not _blas: _blas = Blas() _blas.stream = stream dtype = promote(promote(type(alpha), x.dtype), y.dtype) yf = colmajor(y, dtype, 'y') _blas.axpy(dtype.type(alpha), x.astype(dtype), yf) if y.dtype == yf.dtype and not alias(y, yf): y[:] = yf return y else: return yf
5,339,896
def _unenroll_get_hook(app_context): """Add field to unenroll form offering data removal, if policy supports.""" removal_policy = _get_removal_policy(app_context) return removal_policy.add_unenroll_additional_fields(app_context)
5,339,897
def main(): """Run experiment with multiple classifiers.""" data = get_data() print("Got %i training samples and %i test samples." % (len(data['train']['X']), len(data['test']['X']))) # Get classifiers classifiers = [ ('Logistic Regression (C=1)', LogisticRegression(C=1)), ('Logistic Regression (C=1000)', LogisticRegression(C=10000)), ('RBM 200, n_iter=40, LR=0.01, Reg: C=1', Pipeline(steps=[('rbm', BernoulliRBM(n_components=200, n_iter=40, learning_rate=0.01, verbose=True)), ('logistic', LogisticRegression(C=1))])), ('RBM 200, n_iter=40, LR=0.01, Reg: C=10000', Pipeline(steps=[('rbm', BernoulliRBM(n_components=200, n_iter=40, learning_rate=0.01, verbose=True)), ('logistic', LogisticRegression(C=10000))])), ('RBM 100', Pipeline(steps=[('rbm', BernoulliRBM(n_components=100)), ('logistic', LogisticRegression(C=1))])), ('RBM 100, n_iter=20', Pipeline(steps=[('rbm', BernoulliRBM(n_components=100, n_iter=20)), ('logistic', LogisticRegression(C=1))])), ('RBM 256', Pipeline(steps=[('rbm', BernoulliRBM(n_components=256)), ('logistic', LogisticRegression(C=1))])), ('RBM 512, n_iter=100', Pipeline(steps=[('rbm', BernoulliRBM(n_components=512, n_iter=10)), ('logistic', LogisticRegression(C=1))])), ('NN 20:5', skflow.TensorFlowDNNClassifier(hidden_units=[20, 5], n_classes=data['n_classes'], steps=500)), # ('NN 500:200 dropout', # skflow.TensorFlowEstimator(model_fn=dropout_model, # n_classes=10, # steps=20000)), # ('CNN', skflow.TensorFlowEstimator(model_fn=conv_model, # n_classes=10, # batch_size=100, # steps=20000, # learning_rate=0.001)), ('SVM, adj.', SVC(probability=False, kernel="rbf", C=2.8, gamma=.0073, cache_size=200)), ('SVM, linear', SVC(kernel="linear", C=0.025, cache_size=200)), ('k nn', KNeighborsClassifier(3)), ('Decision Tree', DecisionTreeClassifier(max_depth=5)), ('Random Forest', RandomForestClassifier(n_estimators=50, n_jobs=10)), ('Random Forest 2', RandomForestClassifier(max_depth=5, n_estimators=10, max_features=1, n_jobs=10)), ('AdaBoost', AdaBoostClassifier()), ('Naive Bayes', GaussianNB()), ('Gradient Boosting', GradientBoostingClassifier()), ('LDA', LinearDiscriminantAnalysis()), ('QDA', QuadraticDiscriminantAnalysis()) ] # Fit them all classifier_data = {} for clf_name, clf in classifiers: print("#" * 80) print("Start fitting '%s' classifier." % clf_name) examples = 100000 # Reduce data to make training faster t0 = time.time() clf.fit(data['train']['X'][:examples], data['train']['y'][:examples]) t1 = time.time() an_data = analyze(clf, data, t1 - t0, clf_name=clf_name) classifier_data[clf_name] = {'training_time': t1 - t0, 'testing_time': an_data['testing_time'], 'accuracy': an_data['accuracy']} print_website(classifier_data)
5,339,898
def query_fetch_bom_df(search_key: str, size: int) -> Union[pd.DataFrame, None]: """Fetch and return bom dataframe of the article Runs recursive query on database to fetch the bom. """ # Recursive query raw_query = f"""WITH cte AS ( SELECT * FROM [{DB_NAME}].[dbo].[{SQL_T_BOM}] WHERE father = '{search_key}' UNION ALL SELECT p.* FROM [{DB_NAME}].[dbo].[{SQL_T_BOM}] p INNER JOIN cte ON cte.child = p.father WHERE cte.child Like '%{size}' OR cte.child Like '%l' OR cte.child Like '%g' OR cte.child Like '%x' OR cte.child Like '%b' OR cte.child Like '%r' OR cte.child Like '%k' OR cte.child Like '%c' OR cte.child Like '4-pux%' OR cte.child Like '4-cca-ang%' ) SELECT * FROM cte ORDER BY cte.process_order, cte.father, cte.child option (maxrecursion 100);""" df = None try: df = pd.read_sql(raw_query, engine) except Exception as e: df = None return df
5,339,899