query
stringlengths
9
9.05k
document
stringlengths
10
222k
negatives
listlengths
19
20
metadata
dict
Generates key pair that can be used for Solo signed firmware updates. \b Generates NIST P256 keypair. Public key must be copied into correct source location in solo bootloader The private key can be used for signing updates. You may optionally supply a file to seed the RNG for key generating.
def genkey(input_seed_file: Optional[str], output_pem_file: str) -> None: vk = pynitrokey.fido2.operations.genkey( output_pem_file, input_seed_file=input_seed_file ) local_print( "Public key in various formats:", None, [c for c in vk.to_string()], None, "".j...
[ "def genkey(input_seed_file, output_pem_file):\n\n vk = solo.operations.genkey(output_pem_file, input_seed_file=input_seed_file)\n\n print(\"Public key in various formats:\")\n print()\n print([c for c in vk.to_string()])\n print()\n print(\"\".join([\"%02x\" % c for c in vk.to_string()]))\n pr...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Signs a fwhex file, outputs a .json file that can be used for signed update.
def sign( verifying_key: str, app_hex: str, output_json: str, end_page: int, pages: int ) -> None: msg = pynitrokey.fido2.operations.sign_firmware( verifying_key, app_hex, APPLICATION_END_PAGE=end_page, PAGES=pages ) local_print(f"Saving signed firmware to: {output_json}") with open(output_...
[ "def sign(verifying_key, app_hex, output_json):\n\n msg = solo.operations.sign_firmware(verifying_key, app_hex)\n print(\"Saving signed firmware to\", output_json)\n with open(output_json, \"wb+\") as fh:\n fh.write(json.dumps(msg).encode())", "def sign_file(self, file_path):\n\t\twith open(file_p...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Merges hex files, and patches in the attestation key. \b If no attestation key is passed, uses default Solo Hacker one. Note that later hex files replace data of earlier ones, if they overlap.
def mergehex( attestation_key: Optional[bytes], attestation_cert: Optional[bytes], lock: bool, input_hex_files: List[str], output_hex_file: str, end_page: int, pages: int, ) -> None: pynitrokey.fido2.operations.mergehex( input_hex_files, output_hex_file, attestati...
[ "def mergehex(\n attestation_key, attestation_cert, lock, input_hex_files, output_hex_file, end_page\n):\n solo.operations.mergehex(\n input_hex_files,\n output_hex_file,\n attestation_key=attestation_key,\n APPLICATION_END_PAGE=end_page,\n attestation_cert=attestation_cert,...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
List all 'Nitrokey FIDO2' devices
def list() -> None: devs = nkfido2.find_all() local_print(":: 'Nitrokey FIDO2' keys") for c in devs: assert isinstance(c.dev, CtapHidDevice) descr = c.dev.descriptor if hasattr(descr, "product_name"): name = descr.product_name elif c.is_bootloader(): ...
[ "def list_devices(self):\n pass", "def get_device_list():\n token = get_auth_token() # Get Token\n url = \"https://{}/api/v1/network-device/1/10\".format(DNAC_URL)\n hdr = {'x-auth-token': token, 'content-type' : 'application/json'}\n resp = requests.get(url, headers=hdr) # Make the Get Reques...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Output COUNT number of random bytes, hexencoded.
def hexbytes(count: int, serial: Optional[str]) -> None: if not 0 <= count <= 255: local_critical(f"Number of bytes must be between 0 and 255, you passed {count}") local_print(nkfido2.find(serial).get_rng(count).hex())
[ "def sample_checkCOUNT(self):\n self.open.write('SAMPLE:COUNT?')\n reply = self.open.read() \n return('Sample Count: ' + str(reply))", "def count_cmd(self):\r\n package = \"{0}:{1}\".format(self.ID, \"count\")\r\n return self.encode(package)", "def read_hex(count):\n gl...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Uses `hmacsecret` to implement a challengeresponse mechanism. We abuse hmacsecret, which gives us `HMAC(K, hash(challenge))`, where `K` is a secret tied to the `credential_id`. We hash the challenge first, since a 32 byte value is expected (in original usage, it's a salt). This means that we first need to setup a crede...
def challenge_response( serial: Optional[str], host: str, user: str, prompt: str, credential_id: str, challenge: str, udp: bool, ) -> None: nkfido2.find().simple_secret( credential_id, challenge, host=host, user_id=user, serial=serial, pro...
[ "def hash_challenge(amt: str, challenge: str) -> str:\n h = hmac.new(amt.encode(), challenge.encode(), hashlib.sha256)\n return h.hexdigest()", "def generate_authenticator_response(nt_response,peer_challenge,authenticator_challenge,username,password=False,password_hash=False):\n Magic1=\"\\x4D\\x61\\x67\...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
test where clause in extractor sql statement
def test_sql_statement(self) -> None: with patch.object(SQLAlchemyExtractor, '_get_connection'): extractor = SnowflakeTableLastUpdatedExtractor() extractor.init(self.conf) self.assertTrue(self.where_clause_suffix in extractor.sql_stmt)
[ "def must_return_select_with_specific_where_clause():\n\ttest = Test(1, 2, 3)\n\tquery = 'SELECT a, b, c FROM Test WHERE a = 5 AND b = 4'\n\tselect_query = entity.select(test, where_clause={'a': 5, 'b': 4})\n\tassert query == select_query", "def sqlCondition(writer):", "def _where(self):\n result = []\n r...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Test cluster_key in extractor sql stmt
def test_sql_statement(self) -> None: with patch.object(SQLAlchemyExtractor, '_get_connection'): extractor = SnowflakeTableLastUpdatedExtractor() extractor.init(self.conf) self.assertTrue(self.cluster_key in extractor.sql_stmt)
[ "def _statement2key(statement: sql.Selectable) -> str:\n return hashlib.sha256(str(statement.compile(compile_kwargs={'literal_binds': True})).encode()).hexdigest()", "def test_sql_statement(self) -> None:\n with patch.object(SQLAlchemyExtractor, '_get_connection'):\n extractor = Snowflake...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Test SNOWFLAKE_DATABASE_KEY in extractor sql stmt
def test_sql_statement(self) -> None: with patch.object(SQLAlchemyExtractor, '_get_connection'): extractor = SnowflakeTableLastUpdatedExtractor() extractor.init(self.conf) self.assertTrue(self.snowflake_database_key in extractor.sql_stmt)
[ "def test_sql_statement(self) -> None:\n with patch.object(SQLAlchemyExtractor, '_get_connection'):\n extractor = SnowflakeTableLastUpdatedExtractor()\n extractor.init(self.conf)\n self.assertFalse(self.database_key in extractor.sql_stmt)", "def test_sql_statement(self) -> ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Test DATABASE_KEY in extractor sql stmt
def test_sql_statement(self) -> None: with patch.object(SQLAlchemyExtractor, '_get_connection'): extractor = SnowflakeTableLastUpdatedExtractor() extractor.init(self.conf) self.assertFalse(self.database_key in extractor.sql_stmt)
[ "def test_sql_statement(self) -> None:\n with patch.object(SQLAlchemyExtractor, '_get_connection'):\n extractor = SnowflakeTableLastUpdatedExtractor()\n extractor.init(self.conf)\n self.assertTrue(self.snowflake_database_key in extractor.sql_stmt)", "def test_sql_statement(...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Ensure catalog is used as cluster in extract sql stmt
def test_sql_statement(self) -> None: with patch.object(SQLAlchemyExtractor, '_get_connection'): extractor = SnowflakeTableLastUpdatedExtractor() extractor.init(self.conf) self.assertTrue('table_catalog' in extractor.sql_stmt) self.assertFalse(self.cluster_key in ...
[ "def test_sql_statement(self) -> None:\n with patch.object(SQLAlchemyExtractor, '_get_connection'):\n extractor = SnowflakeTableLastUpdatedExtractor()\n extractor.init(self.conf)\n self.assertTrue(self.cluster_key in extractor.sql_stmt)", "def __test_catalog_object(self, db...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Checks for updates of latest plists on Apple content server and returns the resulting files as a dict
def check(apps, latest, check_limit=11): reg = re.compile(r'\d+.plist') # Compiled regex to strip the version value and plist file extension apps = sorted([re.sub(reg, '', app) for app in apps]) # Have to convert these over to basic app names not plist values supported = [_k for _k, _ in APPLICATIONS.item...
[ "def update_package_list():\n log_helper = logging_helper.logging_helper.Logger()\n data_collector = sysinfo_ops.DataCollect()\n\n # Determine architecture and proper repository\n config = manage_config.read_config_file()\n base_url = config.get('DefaultRepo', 'base_repo')\n curated_url = base_url...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Generates a new block on the grid.
def gen_block(self): new_block = random.randrange(7) self.center_block = [4, 4] if new_block == 0: # Z Block self.falling_blocks = [[4, 4], [4, 5], [3, 4], [3, 3]] elif new_block == 1: # L Block self.falling_blocks = [[4, 4], [4, 3], [4, 5]...
[ "def create_genesis_block():\n return Block(0, date.datetime.now(), \"010101\", {\"VIN\": 123456, \"Owner\": \"Qwertz\", \"Mileage\": 0},\n hash_a_block(0, date.datetime.now(), \"010101\", {\"VIN\": 123456, \"Owner\": \"Qwertz\", \"Mileage\": 0}))", "def __create_block(self, data, genesis=0):\n...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Move block left, if possible.
def move_left(self): stop = False for block in self.falling_blocks: # Check to see if block can go left if block[1] == 0 or self.grid[block[0]][block[1]-1][0] == 1: stop = True if not stop: center = self.get_center_block() block_ima...
[ "def move_left(self):\n for block in self.blocks:\n block.move_left()", "def move_left(self):\n self.set_position(self.get_position() - 1)\n return", "def move_left(self) -> None:\n empty_pos = self._get_empty_piece_position()\n # return if empty piece is on the fir...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Clear the numbered line and move all blocks above it down.
def clear_line(self, line): for i in range(line, 3, -1): for j in range(0, 10): self.grid[i][j] = self.grid[i - 1][j] self.score += 100
[ "def clear_previous_line():\n print(CURSOR_PREV_LINE, end=\"\")\n print(CLEAR_UNTIL_EOL, end=\"\")", "def _set_line_to_initial_position(self, line_no: int) -> Paragraph:\n self.lines[1][line_no] = None\n self[line_no].move_to(self.get_center() + self.lines_initial_positions[line_no])\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Converts (x, y) grid coordinates to (x, y) pixel coordinates.
def grid2pix(self, x, y): if (x < 0 or x > 9 or y < 0 or y > 23): try: raise IndexError except IndexError as inst: print("""x index must be between 0 and 9, y index must be between 4 and 23. (%d, %d) not valid.""" % (x, y)) pi...
[ "def grid_to_world(self, x, y):\n #XyCoordinates to be returned\n xyCoord = Point()\n\n #Assign coord values from grid values\n xyCoord.x = (x + 0.5) * self.map.info.resolution + self.map.info.origin.position.x\n xyCoord.y = (y + 0.5) * self.map.info.resolution + self.map.info.ori...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Draws every block currently on the screen.
def draw(self): for row in range(4, 24): for col in range(0, 10): if self.grid[row][col][0]: x, y = self.grid2pix(col, row) block_image = pygame.image.load(self.grid[row][col][1]) \ .convert() ...
[ "def block(self):\n # First blank the previous position\n for x, y in self.game.block.last():\n y -= self.game.grid.top_buffer\n if y >= 0:\n self.pixel(x, y, 0)\n # Then draw the new position\n for x, y in self.game.block.position():\n y -...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Gets the embeddings (also external) for every term in a sentence Returns a vector of all embeddings concatenated
def getWordEmbeddings(self, sentence, train): for root in sentence: c = float(self.wordsCount.get(root.norm, 0)) dropFlag = not train or (random.random() < (c/(0.25+c))) sys.stdout.flush() root.wordvec = self.wlookup[int(self.vocab.get(root.norm, 0)) if ...
[ "def words_embedding(words: list, glove):\n\n word_embeddings = map(partial(get_word_vec, glove=glove), words)\n concat_words_embedding = np.concatenate(list(word_embeddings))\n return concat_words_embedding", "def get_embeddings(tokenizer, model, texts):\n inputs = tokenizer(texts,\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Funcion que devuelve un diccionario que contiene el valor de poblacion por cada pais y anno almacenado en el fichero, SALVO la segunda columna
def crecimiento_por_pais_anno(fichero): if not os.path.exists(fichero): raise FileNotFoundError("Error. El fichero no se encuentra en el directorio actual") elif not fichero.endswith(".csv"): raise Exception("Error. El fichero debe estar en formato csv") elif os.stat(fichero).st_size == 0:...
[ "def carica_dati_da_regione_piemonte():\n # elenco dei file csv con i dati dei positivi\n ifiles = sorted(Path('data').glob(\"dati*_da_regione_piemonte.csv\"))\n # read all the files into a dataframe array\n dfall = pd.concat((pd.read_csv(ifile, sep=\";\",\n dtype={\"Co...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Given a string, return a list of all substrings of that string with a given length. For example, substrings_of_len(2, "ABC") returns ["AB", "BC"].
def substrings_of_length(length, string): # You could also use a generator here, but I don't want to overcomplicate # things. substrings = [] for i in range(len(string) - length): substrings.append(string[i : i + length]) return substrings
[ "def substrings(s, minlength=30):\n maxsize = current = len(s)\n result = []\n while current >= minlength:\n result.extend([s[start:start+current] \n for start in range(maxsize-current+1)])\n # range(5) is [0,1,2,3,4]\n current -= 1\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Helper function for generating transition probability matrix. Takes a current position (row, col) and an action and returns the new state, the reward for the state and whether the agent is done (won or fell in hole)
def update_prob_matrix(row, col, action): # Get next field coordinates with boundary checks new_row, new_col = increment(row, col, action) # Convert coordinates to state ID new_state = self.coords_to_state_idx[(new_row, new_col)] # Get new coordinate type ...
[ "def step(self, action):\n turn_continue = True\n while turn_continue:\n prob = np.random.random()\n probs = self.T[action, self.current_state, :]\n # print(prob)\n # print(probs)\n # print(probs.sum())\n # print('where')\n #...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Extracts chain information from PDB file returns a list of the chain characters
def getchains(pdbfile): try: read = open(pdbfile,'r') except IOError: print("getchains: Couldn't open file %s"%(pdbfile)) raise else: result = [] for line in read: if line[0:4]=='ATOM': if line[21] not in result and line[21].isalnum(): result.append(line[21]) el...
[ "def getChainIDsFromPDB(cls, filename, qparent=None):\n extension = filename.split('.')[-1].lower()\n if extension == 'pdb':\n linelist = []\n for line in open(filename, 'U'):\n if line[:6] == 'COMPND' and line[10:70].split(':')[0].strip() == 'CHAIN':\n linelist = line[...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
print the style info necessary for collapsible portion
def printStyle(): print('<style type="text/css">') print('.row { vertical-align: top; height:auto !important; }') print('.list {display:none; }') print('.show { display: none; }') print('.hide:target + .show {display: inline; }') print('.hide:target {display: none; }') print('.hide:target ~ .list {display...
[ "def getStyle(self):", "def style(self):\n return self.get_page().style", "def print_info(self) :\n\n print('-'*80)\n print('Material: %s' % self._material_name)\n print('Parameters: %s' % self._parameters)\n print('Material class: %s, ' % self._material_class)\n print(...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
print the first part of the collapsible portion
def collapseHead(): print('<div class="row">') print(' <a href="#hide1" class="hide" id="hide1">Show advanced options</a>') print(' <a href="#show1" class="show" id="show1">Hide advanced options</a>') print(' <div class="list">')
[ "def print_header():\n\n print('------------------------------------')\n print(' CAT FACTORY')\n print('------------------------------------')", "def print_element(self):\n\t\tif self.state == DISCOVERED:\n\t\t\tif self.content != \"0\":\n\t\t\t\treturn self.content\n\t\t\telse :\t\t\t\t\n\t\t\t...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
creates the parameters file from the data read from the form
def makeParameters(form,out): chainset = False chains = 'CHAIN ' checkboxes = ['reducing','molscript','hlfe','fing'] for value in form: if "chain_" in value: chains = "%s%s"%(chains,form[value].value) chainset = True else: if value not in checkboxes: try: out.writ...
[ "def makeParameters(form,out):\n chainset = False\n chains = 'CHAIN '\n checkboxes = ['reducing','molscript','hlfe','fing']\n for value in form:\n if \"chain_\" in value:\n chains = \"%s%s\"%(chains,form[value].value)\n chainset = True\n else:\n try:\n out.write(\"%s %s\\n\"%(value.u...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
This script determines what parts of GeoFold need to be rerun and will send the results to secondScript
def fourthScript(form): oldParameters = {} changeParameters = ['REDUCING','BREAKCUT','PIVOTCUT','HINGECUT','SEAMCUT','BREAKPOINTENTROPY','HINGEPOINTENTROPY','TEMPERATURE','VOIDENTROPY','SOLIDITY','HBONDENERGY','HAMMONDSCALE','SIDECHAINENTROPY','HINGEBARRIER','PIVOTBARRIER','WATER','MAXSPLIT','MINSEG','CAVITATIO...
[ "def main():\n\t#set the conditionals for the calibrators and the hubble flow \n\tif input_params['fitcalib']:\n\t\tcalib_fits = fit_multiple_gps(gl=calib, out_direc='paper_plots/calib/', out_direc_fits='calib_lcs/fits/')\n\n\tif input_params['fitcalib'] and input_params['writecalib']:\n\t\tnp.savetxt('calib_fromsc...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns a git diff either from stdin or against a base.
def getDiff(base: Union[str, bool] = '') -> Iterable[str]: if type(base) is bool and base is True: if not sys.stdin.isatty(): return (line.rstrip('\n').rstrip('\r') for line in sys.stdin.readlines()) elif type(base) is str: cmd = ['git', 'diff'] if base: cmd += [str(base)] return run(cmd...
[ "def diff(self, base=\"commit\"):\n if base == \"commit\":\n base = None\n if base == \"dependencies\":\n branch = self.git.current_branch()\n try:\n self.gather(self.trac.dependencies())\n self.git.diff(\"%s..%s\"%(HEAD,branch))\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Prepares and sends Mailgun API call Returns dict with response status
def sendMailgun(self): apiURI = app.config['MAILGUN_API'] auth = ('api', app.config.get('MAILGUN_KEY')) data = { "from": self.from_name + " <" + self.from_addr + ">", "to": self.to_name + " <" + self.to_addr + ">", "subject": self.subject, "text": ...
[ "def test_mailgun():\n\n status, message = mailgun.send('marco.zingales@gmail.com', ['marzi@dtu.dk'], ['hello there'], [], 'test1',\n 'testcontent1')\n _test(\"mailgun works\", status == 0)", "def mailgun(to, subject, body, mailgun_api_key, mailgun_endpoint):\n ctx_obj =...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Prepares and sends Mandrill API call Returns dict with response status
def sendMandrill(self): headers = {'content-type': 'application/json'} apiURI = app.config['MANDRILL_API'] data = { "key": app.config.get('MANDRILL_KEY'), "message": { "from_email": self.from_addr, "from_name": self.from_name, ...
[ "def rp(success=False, message=None, payload=None):\n return{\n 'success': success,\n 'message': message, \n 'payload': payload,\n }", "def GET_request(action):\n\n # OAuth token of the user that requests will be made on behalf of\n\n\n # Login of the advertising agency client\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
A pair of (xmin, xmax).
def xrange(self): return (self.xmin, self.xmax)
[ "def bbox(self) -> Tuple[Tuple[float, float], Tuple[float, float]]:\n minx, miny, maxx, maxy = self.polygon.bounds\n return (minx, miny), (maxx, maxy)", "def bbox(points):\n assert len(points) > 0\n x_min, y_min = points[0]\n x_max, y_max = points[0]\n for pt in points:\n x,y = pt...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
The height of the region.
def height(self): return self.ymax - self.ymin
[ "def getHeight( self ):\n return self.height", "def height(self):\n return capi.get_band_ysize(self.ptr)", "def get_height(self):\n\t\treturn self.y[1] - self.y[0]", "def Height(self):\n return _handle.OperatorHandle_Height(self)", "def _get_height(self) -> \"int\" :\n return _co...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
A pair of (ymax, ymin). Inverted so as to work well with `matplotib`.
def yrange(self): return (self.ymax, self.ymin)
[ "def test_ymin_ymax(self):\n for font in self.fonts:\n head_table = font['head']\n self.assertEqual(head_table.yMin, EXPECTED_YMIN)\n self.assertEqual(head_table.yMax, EXPECTED_YMAX)", "def get_ylimits(data):\n min_overlaps = min(\n numpy.min(m[\"gradient_curvatur...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Helper method to aid in constructing a new instance centred on the given location, with a given width and/or height. If only one of the width or height is specified, the aspect ratio is used.
def from_centre(x, y, xsize=None, ysize=None, aspect=1.0): if xsize is None and ysize is None: raise ValueError("Must specify at least one of width and height") x, y, aspect = float(x), float(y), float(aspect) if xsize is not None: xsize = float(xsize) if ysize is...
[ "def __init__(self, w=20, h=10, centerPt=None):\n if not isinstance(w, (int,float)):\n raise TypeError('Width must be a number')\n if w <= 0:\n raise ValueError('The width must be positive.')\n if not isinstance(h, (int,float)):\n raise TypeError('Height must be a number')\n if h <= 0:\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return a new instance with the same midpoint, but with the width/ height divided by `scale`. So `scale=2` will zoom in.
def _with_scaling(self, scale): midx = (self._xmin + self._xmax) / 2 midy = (self._ymin + self._ymax) / 2 xs = (self._xmax - self._xmin) / scale / 2 ys = (self._ymax - self._ymin) / scale / 2 return (midx - xs, midx + xs, midy - ys, midy + ys)
[ "def scale(self, x_scale, y_scale):\n return Size(float(self.width) * x_scale, float(self.height) * y_scale)", "def multiply(self, scale):\n scale = up_tuple(scale, 3)\n c = self.copy()\n c.extent = tuple((l * t, r * t) for (l, r), t in zip(c.extent, scale))\n return c", "def ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return a new instance translated by this amount. The values are relative to the current size, so `dx==1` means translate one whole rectangle size (to the right).
def with_translation(self, dx, dy): dx = dx * (self._xmax - self._xmin) dy = dy * (self._ymax - self._ymin) return self.with_absolute_translation(dx, dy)
[ "def enlarge(self, dx):\n self.inf -= dx\n self.sup += dx\n return self", "def adjusted(self, dx: COORDINATE_TYPE, dy: COORDINATE_TYPE, dw: COORDINATE_TYPE, dh: COORDINATE_TYPE) -> \"Rect\":\n newRect = Rect()\n newRect.coreRect = self.coreRect.adjusted(dx, dy, dw, dh)\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
The least x coordinate in tile space we need to cover the region.
def xtilemin(self): return int(2 ** self._zoom * self._extent.xmin)
[ "def LayoutBoundsMinX(self) -> float:", "def get_lowest_x_coordinate(self):\n lowest_x = self.player_tetraminos[0].xcor()\n for tetramino in self.player_tetraminos:\n if (tetramino.xcor() < lowest_x):\n lowest_x = tetramino.xcor()\n return lowest_x", "def min_x(sel...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
The least y coordinate in tile space we need to cover the region.
def ytilemin(self): return int(2 ** self._zoom * self._extent._ymin)
[ "def y(self):\n return Tile.SIZE * (3 / 2) * self.r", "def MaxBoundsY(self) -> float:", "def LayoutBoundsMinY(self) -> float:", "def min_y(self):\n return self.origin[1]", "def PlotBoundsMinY(self) -> float:", "def get_lowest_y_coordinate(self):\n lowest_y = self.player_tetraminos[0]....
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
The greatest y coordinate in tile space we need to cover the region.
def ytilemax(self): return int(2 ** self._zoom * self._extent._ymax)
[ "def MaxBoundsY(self) -> float:", "def y(self):\n return Tile.SIZE * (3 / 2) * self.r", "def max_y(self):\n return self.origin[1] + self.size[1]", "def LayoutBoundsMaxY(self) -> float:", "def PlotBoundsMaxY(self) -> float:", "def ytilemin(self):\n return int(2 ** self._zoom * self._ex...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Use these settings to assemble tiles into a single image. Will always return an image which is a whole number of tiles width and height; the exact extent will be a subrectangle of the image.
def as_one_image(self, allow_large = False): if not allow_large: self._check_download_size() size = self._tile_provider.tilesize xs = size * (self.xtilemax + 1 - self.xtilemin) ys = size * (self.ytilemax + 1 - self.ytilemin) out = _Image.new("RGBA", (xs, ys)) ...
[ "def tile(dimensions, src, hwRatio=0.6667, paddingPercent=0.05, \n borderBg=(255,255,255), pageBg=(0,0,0), outsideBorderPercent=0.1, \n applyCrop=False):\n\n nw, nh = dimensions\n iw, ih = src.size\n\n if applyCrop == True:\n #calculate aspect ratio of image within photo\n #de...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Use these settings to plot the tiles to a `matplotlib` axes. This
def plot(self, ax, allow_large = False, **kwargs): tile = self.as_one_image(allow_large) scale = 2 ** self.zoom x0, y0 = self.extent.project(self.xtilemin / scale, self.ytilemin / scale) x1, y1 = self.extent.project((self.xtilemax + 1) / scale, (self.ytilemax + 1) / scale) ax.ims...
[ "def heatmap(self):\n plt.imshow(self.M)\n plt.yticks([])\n plt.xticks(np.arange(self.size[1]))\n plt.show()", "def visualise_grid(self, *args, **kwargs):\n\n plt.figure(figsize=(20,15))\n sns.heatmap(self.grid, xticklabels=False, yticklabels=False,\n *args, **kwar...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Takes the geometry from the passed data frame, looks for point objects and extracts the coordinates. Useful for ploting, as doing this is usually much faster than using the geoPandas `plot` methods. The dataframe must either have no projection set (`frame.crs == None`)
def points_from_frame(frame): proj = _parse_crs(frame.crs) xcs, ycs = [], [] if proj == _NATIVE_LONLAT: for point in frame.geometry: c = project(*point.coords[0]) xcs.append(c[0]) ycs.append(c[1]) else: for point in frame.geometry: xcs.appe...
[ "def _convert_geodataframe(self):\r\n\r\n value = self._frame\r\n\r\n c1_field, c2_field, c3_field, geometry_field = Series(), Series(), Series(), Series()\r\n try:\r\n c1_field = self._frame['coord_field1']\r\n c2_field = self._frame['coord_field2']\r\n c3_fiel...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
testing if the default value will be used when the pathurl argument is skipped
def test_pathurl_argument_is_skipped(self): f = File() self.assertEqual('', f.pathurl)
[ "def build_url(self):\n url = super().build_url()\n if '/None/' in url:\n return url.replace('/None/', '/')\n else:\n return url", "def default_validation(url):\n return bool(urlparse(url).scheme)", "def build_url(self):\n url = super().build_url()\n i...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
testing if a TypeError will be raised when the pathurl argument is not a string instance
def test_pathurl_argument_is_not_a_string(self): with self.assertRaises(TypeError) as cm: File(pathurl=123) self.assertEqual( cm.exception.message, 'File.pathurl should be a string, not int' )
[ "def test_pathurl_attribute_is_not_a_string(self):\n f = File(pathurl='shot1')\n with self.assertRaises(TypeError) as cm:\n f.pathurl = 123\n\n self.assertEqual(\n cm.exception.message,\n 'File.pathurl should be a string, not int'\n )", "def test_get_pa...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
testing if a TypeError will be raised when the pathurl attribute is set to a value other than a string
def test_pathurl_attribute_is_not_a_string(self): f = File(pathurl='shot1') with self.assertRaises(TypeError) as cm: f.pathurl = 123 self.assertEqual( cm.exception.message, 'File.pathurl should be a string, not int' )
[ "def test_pathurl_argument_is_not_a_string(self):\n with self.assertRaises(TypeError) as cm:\n File(pathurl=123)\n\n self.assertEqual(\n cm.exception.message,\n 'File.pathurl should be a string, not int'\n )", "def test_pathurl_argument_is_skipped(self):\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
testing if the pathurl argument value is correctly passed to the pathurl attribute
def test_pathurl_argument_is_working_properly(self): f = File(pathurl='shot2') self.assertEqual('file://localhost/shot2', f.pathurl)
[ "def test_pathurl_argument_is_skipped(self):\n f = File()\n self.assertEqual('', f.pathurl)", "def test_pathurl_attribute_is_working_properly(self):\n f = File(pathurl='shot1')\n test_value = 'shot2'\n expected_value = 'file://localhost/shot2'\n self.assertNotEqual(test_v...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
testing if setting the pathurl attribute will also set the id attribute
def test_pathurl_will_set_the_id_attribute(self): f = File() self.assertEqual(f.id, '') f.pathurl = 'shot2' self.assertEqual(f.id, 'shot2')
[ "def test_pathurl_attribute_is_working_properly(self):\n f = File(pathurl='shot1')\n test_value = 'shot2'\n expected_value = 'file://localhost/shot2'\n self.assertNotEqual(test_value, f.pathurl)\n f.pathurl = test_value\n self.assertEqual(expected_value, f.pathurl)", "def...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
testing if the pathurl attribute value can be correctly changed
def test_pathurl_attribute_is_working_properly(self): f = File(pathurl='shot1') test_value = 'shot2' expected_value = 'file://localhost/shot2' self.assertNotEqual(test_value, f.pathurl) f.pathurl = test_value self.assertEqual(expected_value, f.pathurl)
[ "def test_pathurl_argument_is_working_properly(self):\n f = File(pathurl='shot2')\n self.assertEqual('file://localhost/shot2', f.pathurl)", "def test_pathurl_will_set_the_id_attribute(self):\n f = File()\n self.assertEqual(f.id, '')\n f.pathurl = 'shot2'\n self.assertEqua...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
testing if the id attribute is generated from pathurl and it is equal to the file name
def test_id_is_generated_from_pathurl(self): f = File( pathurl='file://localhost/S:/KKS/Sequences/SEQ001/001A_TNGE/Shots' '/Seq001_001A_TNGE_0010/Comp/Outputs/Main/v001/exr/' 'KKS_Seq001_001A_TNGE_0010_Comp_Main_v001.%5B000-379%5D' '.exr' ...
[ "def test_pathurl_will_set_the_id_attribute(self):\n f = File()\n self.assertEqual(f.id, '')\n f.pathurl = 'shot2'\n self.assertEqual(f.id, 'shot2')", "def check_id(self):\n\n is_file = os.path.isfile(self.id_path)\n is_valid = self.validate_id_file()\n return bool...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Get the configuration for a given word.
def get_conf(self, word): return self.__provider.get_conf(word)
[ "def get_word(self):\n # Todo get a list of words fron somewhere\n pass", "def get_word(self, word_str):\n return self.words[word_str]", "def get_conf(self, comp, conf_name):\r\n for cfg in comp.configuration_sets[0].configuration_data:\r\n if cfg.name == conf_name:\r\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Initialize the TestLooperHttpServer testManager a TestManager.TestManager object httpPortOverride the port to listen on for http requests
def __init__(self, portConfig, httpServerConfig, serverConfig, testManager, machine_management, artifactStorage, src_ctrl, event_log ): self.testManager = test...
[ "def setUp(self):\n\n self.testport = random.randint(40000, 65530)\n self.server = TServer.TSimpleServer(TestService.Processor(self),\n EzSSLServerSocket(host='localhost', \n port=self.testport,\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Script that takes the file name "input.csv" and convert it into a .json file named output.json The first row of the csv file is the field name, the other rows are the values
def main(): filepath = "input.csv" delim = ";" if len(sys.argv) > 1: filepath = sys.argv[1] if len(sys.argv) > 2: delim = ";" conversion(filepath, delim, "output.json")
[ "def to_json(filename, csv_input):\n if not filename.endswith(\".json\"):\n if filename == \"\":\n filename = \"output\"\n filename += filename + \".json\"\n\n with open(os.path.join(os.path.realpath('..'), 'src', filename), 'w+') as json_file:\n json_output = json.dumps(csv_in...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Convert the csv file from path into an output.json delim is used to specify the delimiters of the csv file
def conversion(path, delim, json_filename): csv_input = [] try: #Conversion csv to json with open(path,"rt") as csv_file: reader = csv.reader(csv_file, delimiter=delim, quoting=csv.QUOTE_ALL) fieldnames = next(reader) reader = csv.DictReader(csv_file, delimit...
[ "def to_json(filename, csv_input):\n if not filename.endswith(\".json\"):\n if filename == \"\":\n filename = \"output\"\n filename += filename + \".json\"\n\n with open(os.path.join(os.path.realpath('..'), 'src', filename), 'w+') as json_file:\n json_output = json.dumps(csv_in...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Create a json file from a csv_input
def to_json(filename, csv_input): if not filename.endswith(".json"): if filename == "": filename = "output" filename += filename + ".json" with open(os.path.join(os.path.realpath('..'), 'src', filename), 'w+') as json_file: json_output = json.dumps(csv_input, sort_keys=True)...
[ "def make_json(csvFilePath, jsonFilePath):\n \n \n data = {}\n \n # Open a csv reader called DictReader\n with open(csvFilePath, encoding='utf-8') as csvf:\n csvReader = csv.DictReader(csvf)\n \n # Convert each row into a dictionary and add it to data\n for rows in cs...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Check that generate_rows handles errors while queryin the API
def test_generate_rows_error(mocker, sc, ds, error_result): mocker.patch.object(SalesforceConnector, 'make_request', return_value=error_result) with pytest.raises(SalesforceApiError): sc.generate_rows(Session(), ds, 'https://salesforce.is.awsome', 'bla')
[ "def test_row_intuition_rowgen(self):\n from csv import DictReader\n\n with open(df('rowgen_sources.csv')) as f:\n for e in DictReader(f):\n print(e['name'])\n gen = get_generator(e['url'])\n\n rows = list(gen)\n\n self.assertEqual...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Print error message `err_msg` and full trace of the error.
def full_trace_error(err_msg): import sys, traceback print(err_msg) exc_type, exc_value, exc_traceback = sys.exc_info() print("*** print_tb:") traceback.print_tb(exc_traceback, file=sys.stdout) print("*** print_exception:") traceback.print_exception(exc_type, exc_value, exc_traceback, file=sys.stdout) s...
[ "def print_err(self, msg):\n if self.time_writer:\n self.time_writer.print_stderr_message(msg)\n else:\n print(\"Error: %s\" % msg)", "def print_err( msg, exit=True ):\n LINE = inspect.currentframe().f_back.f_lineno\n FILE = os.path.basename( inspect.getfile( inspec...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Insert the vertex storing the y_pos and x_pos
def insert_vertex(self, x_pos, y_pos): v = Vertex(x_pos, y_pos) self._vertices.append(v) return v
[ "def add_vertex(self, v):\n pass", "def append_vertex(self, vertex):", "def insertVertex(self, index, v):\n self.vertexList.insert(index, v)\n \n if self.augVertexList is None:\n self.augVertexList = {generator: \\\n [StackingVertex(vertex, [], [], [], []) fo...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Inserts the edge between vertex u and v. We're going to assume in this assignment that all vertices given to this will already exist in the graph.
def insert_edge(self, u, v): e = Edge(u, v) # Check that the edge doesn't already exist for i in u.edges: if i == e: # Edge already exists. raise EdgeAlreadyExists("Edge already exists between vertex!") # Add the edge to both nodes. ...
[ "def addEdge(self, u, v):\r\n self.graph[u].append(v)", "def insert_edge(self, u, v, w):\n # make sure the vertices are in the graph\n if not self.vertices[u] or not self.vertices[v]:\n raise IndexError\n # if they're not connected, increase size,\n if not self.are_co...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Removes the vertex V from the graph.
def remove_vertex(self, v): # Remove it from the list del self._vertices[self._vertices.index(v)] # Go through and remove all edges from that node. while len(v.edges) != 0: e = v.edges.pop() u = self.opposite(e, v) u.remove_edge(e)
[ "def remove_vertex(self, vertex):\n self.remove_vertices([vertex])", "def remove_vertex(self):\r\n if len(self.vertices) > 0:\r\n self.vertices.pop()", "def remove_vertex(self, vertex: T) -> None:\n self._ensure_vertices(vertex)\n neighbors = self._adjacencies[vert...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Get the distance between vertex u and v.
def distance(u, v): # Euclidean Distance # sqrt( (x2-x1)^2 + (y2-y1)^2 ) return math.sqrt(((v.x_pos - u.x_pos)**2) + ((v.y_pos - u.y_pos)**2))
[ "def distance(self, u, v):\n # TODO: Implement the distance function between vectors u and v]\n # Note: you can also think of this as computing a similarity measure\n\n pass", "def cityblock_distance(u, v):\n return abs(u-v).sum()", "def distanceV(vector1, vector2):\n\treturn vector1[1] - vect...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Find a path from vertex B to vertex S, such that the distance from B to every vertex in the path is within R. If there is no path between B and S within R, then return None.
def find_path(self, b, s, r): start = b path = [] visited = {} for node in self._vertices: visited.update({node: False}) return self.path_finder(start, b, s, r, path, visited)
[ "def distance(R, S):\n t1 = clock()\n if R == None:\n return 0\n if S == None: \n return 0\n if len(S)==1:\n S = S[0]\n if len(R)==1:\n R = R[0]\n condition_s = not(isinstance(S[0], list))\n condition_r = not(isinstance(R[0], list))\n if condition_r and condition_...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns the minimum range required to go from Vertex B to Vertex S.
def minimum_range(self, b, s): path = [] final_paths = [] visited = {} for node in self._vertices: visited.update({node: False}) shortest = None max_ranges = [] output = self.minimum_finder(b, s, final_paths, path, visited) for lists ...
[ "def smallestRangeI(self, nums, k):\r\n minval = sys.maxsize\r\n maxval = - sys.maxsize\r\n for elem in nums:\r\n minval = min( minval, elem )\r\n maxval = max( maxval, elem )\r\n \r\n if minval + k >= maxval - k:\r\n return 0\r\n else:\r\n return ( maxv...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Move the defined vertex. If there is already a vertex there, do nothing.
def move_vertex(self, v, new_x, new_y): #Check if vertex exists at position for u in self._vertices: if u.x_pos != new_x and u.y_pos != new_y: v.move_vertex(new_x, new_y)
[ "def update_vertex(self, v, overwrite=True):\n pass", "def _move(self, vertex, word):\n for w in word:\n vertex = self._vtable[vertex][w]\n return vertex", "def remove_vertex(self):\r\n\t\t\tif not self.is_empty():\r\n\t\t\t\tself.vertices.pop()", "def remove_vertex(self):\r\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
The connection preference to use for this service attachment. Valid values include "ACCEPT_AUTOMATIC", "ACCEPT_MANUAL".
def connection_preference(self) -> pulumi.Input[str]: return pulumi.get(self, "connection_preference")
[ "def connection_preference(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"connection_preference\")", "def AutoNegotiation(self):\r\n\t\treturn self._get_attribute('autoNegotiation')", "def preferred_autoattach_pod_interface(self):\n return self._preferred_autoattach_pod_interfac...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
If true, enable the proxy protocol which is for supplying client TCP/IP address data in TCP connections that traverse proxies on their way to destination servers.
def enable_proxy_protocol(self) -> pulumi.Input[bool]: return pulumi.get(self, "enable_proxy_protocol")
[ "def _set_networkProxySetting(self, *args) -> \"bool\" :\n return _core.NetworkPreferences__set_networkProxySetting(self, *args)", "def __setHTTPProxy():\n\n global proxyHandler\n\n if not conf.proxy: \n if conf.hostname in ('localhost', '127.0.0.1') or conf.ignoreProxy:\n proxyHand...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
An array of subnets that is provided for NAT in this service attachment.
def nat_subnets(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]: return pulumi.get(self, "nat_subnets")
[ "def _get_subnets(self) -> List[dict]:\n print('Getting subnets...')\n\n return self._run_az([\n 'network', 'vnet', 'subnet', 'list',\n '--resource-group', self._selected_resource_group['name'],\n '--vnet-name', self._selected_virtual_network['name']\n ])", "d...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
The connection preference to use for this service attachment. Valid values include "ACCEPT_AUTOMATIC", "ACCEPT_MANUAL".
def connection_preference(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "connection_preference")
[ "def connection_preference(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"connection_preference\")", "def AutoNegotiation(self):\r\n\t\treturn self._get_attribute('autoNegotiation')", "def preferred_autoattach_pod_interface(self):\n return self._preferred_autoattach_pod_interface", "def...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Get an existing ServiceAttachment resource's state with the given name, id, and optional extra properties used to qualify the lookup.
def get(resource_name: str, id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions] = None, connected_endpoints: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ServiceAttachmentConnectedEndpointArgs']]]]] = None, connection_preference: Optional[pulumi...
[ "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n alb_target_group_arn: Optional[pulumi.Input[str]] = None,\n autoscaling_group_name: Optional[pulumi.Input[str]] = None,\n elb: Optional[pulumi.Input[str]] =...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
If true, enable the proxy protocol which is for supplying client TCP/IP address data in TCP connections that traverse proxies on their way to destination servers.
def enable_proxy_protocol(self) -> pulumi.Output[bool]: return pulumi.get(self, "enable_proxy_protocol")
[ "def enable_proxy_protocol(self) -> pulumi.Input[bool]:\n return pulumi.get(self, \"enable_proxy_protocol\")", "def _set_networkProxySetting(self, *args) -> \"bool\" :\n return _core.NetworkPreferences__set_networkProxySetting(self, *args)", "def __setHTTPProxy():\n\n global proxyHandler\n\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns the first value associated with key in PriorityQueue. Raises KeyError if key is not present.
def __getitem__(self, key): for value, item in self.heap: if item == key: return value raise KeyError(str(key) + " is not in the priority queue")
[ "def get(self, key: str):\r\n\r\n index = self.hash(key)\r\n\r\n if self.array[index] is None:\r\n return None\r\n else:\r\n # Loop through all the key/value pairs at this index, and find if\r\n # our key exists. If it does, return the value.\r\n\r\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return tanuki's distance (a positive distance) from an enemy if it is in the same row as tanuki. Return 999 if the current row is free of enemies.
def dist_enemy(row, col): for enemy in self.game.enemy_list: if enemy.gridR == row and enemy.isActive: return abs(col - enemy.gridC) return 999
[ "def distance(board: np.array) -> int:\n total_distance = 0\n boxes_pos = np.argwhere(board == TYPE_LOOKUP['box not on target'])\n targets_pos = np.argwhere(board == TYPE_LOOKUP['box target']).tolist()\n\n for box in boxes_pos:\n distance_from_each_target = []\n for target in targets_pos:\...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Method write datatime and scores in file.
def write_scores(filename, scores): curr_scores = read_scores(filename) now_datetime = str(datetime.datetime.now()) res = '{0}\nDate({1}): {2} scores'.format(curr_scores, now_datetime, scores) with open(filename, 'wb') as file: pickle.dump(res, file)
[ "def __writeToFile(self, score):\n with open(self.file, \"w\") as f:\n f.write(str(score))", "def write_score(self):\r\n file = open('scores.txt', 'a')\r\n row = \"{}: {}\\n\".format(self.name, self.score)\r\n file.write(row)", "def write_to_file(self, data):", "def stor...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Get ball coordinates and x, y movement distinctions and calculated if ball hits the board returns True and if ball doesn't hit the board returns False.
def is_ball_hits_board(self, ball_coord, delta_x, delta_y): ball_x = delta_x + ball_coord[0] ball_y = delta_y + ball_coord[1] ball_r = ball_coord[2] x1 = self.board.get_rect().left - ball_x x2 = self.board.get_rect().right - ball_x y1 = self.board.get_rect().top - ball_...
[ "def has_ball_moved(self, ball_1, ball_2):\r\n dist = dist_between_two_balls(ball_1, ball_2)\r\n if not self.white_is_moving:\r\n if dist > 0.1:\r\n return True\r\n else:\r\n return False\r\n else:\r\n return False", "def checkHit...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns a pd.DataFrame (nb_paths X nb_tosses), where p is the probability of getting a head.
def df_coinflip(nb_tosses, nb_paths, p=0.5): samples = np.random.rand(nb_paths, nb_tosses) col_idx = list(range(1+nb_tosses)) tosses = pd.DataFrame(np.where(samples<p, 1, 0), columns=col_idx[1:]) tosses[0] = 0 tosses = tosses[col_idx] cum_tosses = tosses.cumsum(axis=1) return cum_tosses
[ "def num_of_lattice_paths(length):\n numerator = math.factorial(length + length)\n denominator = math.factorial(length) * math.factorial(length)\n \n return numerator/denominator", "def path_length(self, X:np.ndarray) -> np.ndarray:\n nd_X = []\n for x_i in X:\n x_i_path_len =...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Corresponds to the gate information for an AND gate.
def AND(): return {(0, 0): 0, (0, 1): 0, (1, 0): 0, (1, 1): 1}
[ "def instruction_AND(self, inst):\n\t\tsrc1 = self.getOperandOneWord(inst)\n\t\tsrc2 = self.getOperandTwoWord(inst)\n\t\tself.setDestinationWord(inst, src1 & src2)", "def enterLogicalExpressionAnd(self, ctx: RulesParser.LogicalExpressionAndContext):\n\n self.context.operator = LogicalOperator.AND\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Corresponds to the gate information for an OR gate.
def OR(): return {(0, 0): 0, (0, 1): 1, (1, 0): 1, (1, 1): 1}
[ "def bitwise_or_(self, e):\n return self.__lazy_operate(operator.or_, e)", "def __or__(self, other: IntegerValue) -> IntegerValue:\n return _binop(ops.BitwiseOr, self, other)", "def enterLogicalExpressionOr(self, ctx: RulesParser.LogicalExpressionOrContext):\n self.context.operator = Logica...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Corresponds to the gate information for an XOR gate.
def XOR(): return {(0, 0): 0, (0, 1): 1, (1, 0): 1, (1, 1): 0}
[ "def get_bprop_bitwisexor(self):\n\n def bprop(x, y, out, dout):\n return zeros_like(x), zeros_like(y)\n\n return bprop", "def XOR(self,other):\n raise OpNotAllowedError(\"Cannot do operation on Bit instance\")", "def test_negated_xor(self):\n self.assert_to_cnf_transformation(\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Organize configs for initializing components from registry.
def _organize_configs(self): # organize learner configs self.learner_cfg.args = self.args self.learner_cfg.env_info = self.env_info self.learner_cfg.hyper_params = self.hyper_params self.learner_cfg.log_cfg = self.log_cfg self.learner_cfg.head.configs.state_size = self.en...
[ "def _load_configs(self):\n\n self._configs.clear()\n configs = config_utils.load(self._get_config_file_path())\n self._configs = configs.get('general')\n self._extract_test_roots()", "def configure(self):\n if self.name == 'ncm-ncd':\n self.configure_ncm_ncd()\n\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Spawn processes and run training loop.
def train(self): print("Spawning and initializing communication...") # Spawn processes self._spawn() # Initialize communication for proc in self.processes: proc.init_communication.remote() # Run main training loop print("Running main training loop......
[ "def do_training():\n train_cls = Train()\n train_cls.run()", "def main(self):\n # (i) Start Process: Statistics\n self.stats.start()\n\n # (ii) Start Process: Predictors\n if Config.TRAIN_MODE == 'policy' or (Config.TRAIN_MODE == 'selection' and not Config.LOAD_DATA):\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Gets the IP address of the device
def get_IP_address(self): IP_address = self.device.get_IP_address() return IP_address
[ "def ip_address(self) -> str | None:\n return self._device.ip_address", "def getIpAddress():\n # type: () -> String\n return socket.gethostbyname(str(getHostName()))", "def get_ip(self) -> str:\n try:\n self.remote_exec(\"from network import WLAN\")\n self.remote_exec(\...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Negate `self`. Returns NumericValue `self` negated
def __neg__(self) -> NumericValue: return self.negate()
[ "def __neg__(self):\n result = Scalar._create_raw()\n lib.crypto_core_ed25519_scalar_negate(result._ptr, self._ptr)\n return result", "def __neg__(self):\n return Ad_Var(-self._val, -self._ders)", "def negate(self):\n return Formula(\"not\", self)", "def neg(self) -> 'Tensor...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return `NULL` if an expression is zero.
def nullifzero(self) -> NumericValue: return ops.NullIfZero(self).to_expr()
[ "def is_scalar_zero(expr):\n return is_scalar_x(expr, 0)", "def zeroifnull(self) -> NumericValue:\n return ops.ZeroIfNull(self).to_expr()", "def _get_zero_expr():\n expr = delay_model_pb2.DelayExpression()\n _set_constant_expression(expr, 0)\n return expr", "def is_scalar_minus_one(expr):\n...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return zero if an expression is `NULL`.
def zeroifnull(self) -> NumericValue: return ops.ZeroIfNull(self).to_expr()
[ "def is_scalar_zero(expr):\n return is_scalar_x(expr, 0)", "def nullifzero(self) -> NumericValue:\n return ops.NullIfZero(self).to_expr()", "def is_scalar_minus_one(expr):\n return is_scalar_x(expr, -1)", "def _visit_if_not_none(expr):\r\n if expr is not None:\r\n return...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Floor divide `self` by `other`.
def __floordiv__( self, other: NumericValue, ) -> NumericValue: return _binop(ops.FloorDivide, self, other)
[ "def __floordiv__(self, other):\n return self.componentwise(other, operator.__floordiv__)", "def divide(self, other):\n return self.multiply(other.reciprocal())", "def __floordiv__(self, other):\n if isinstance(other, (int, float)):\n return Quaternion(\n self.real...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Compute `self` modulo `other`.
def __mod__(self, other: NumericValue) -> NumericValue: return _binop(ops.Modulus, self, other)
[ "def __mod__(self,other):\r\n self.Verificaciones(other)\r\n return self-(self//other)*other", "def __divmod__(self, other):\n return self.componentwise(other, divmod)", "def __mod__(self, other):\n if (isinstance(other, UInt8)):\n remainder = UInt8(0)\n quotien...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return a point constructed from the coordinate values. Constant coordinates result in construction of a `POINT` literal or column.
def point(self, right: int | float | NumericValue) -> ir.PointValue: return ops.GeoPoint(self, right).to_expr()
[ "def point(x: float, y: float, crs: MaybeCRS) -> Geometry:\n return Geometry({'type': 'Point', 'coordinates': [float(x), float(y)]}, crs=crs)", "def Point(lon, lat):\n return {\n 'type': 'Point',\n 'coordinates': [lon, lat]\n }", "def as_point(self):\n if self._geography.getTyp...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return the cumulative mean of the input.
def cummean(self) -> NumericColumn: return ops.CumulativeMean(self).to_expr()
[ "def running_mean(x, N): \n cumsum = np.cumsum(np.insert(x, 0, 0)) \n return (cumsum[N:] - cumsum[:-N]) / float(N)", "def calculate_mean(self):\n\t\t\t\t\t\n avg = 1.0 * sum(self.data) / len(self.data)\n\t\t\n self.mean = avg\n \n return self.mean", "def running_mean(x, N):\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return the cumulative sum of the input.
def cumsum(self) -> NumericColumn: return ops.CumulativeSum(self).to_expr()
[ "def cumsum(self, axis=0):\n return self.apply(lambda x: x.cumsum(), axis=axis)", "def cumsum(x, axis=0):\n\treturn tf.cumsum(x, axis=axis)", "def cumsum1(t):\n result = []\n for i in range(len(t)):\n stop = i + 1\n accu = sum(t[:stop])\n result.append(accu)\n return result"...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Compute a histogram with fixed width bins.
def histogram( self, nbins: int | None = None, binwidth: float | None = None, base: float | None = None, eps: float = 1e-13, ): if nbins is not None and binwidth is not None: raise ValueError( f"Cannot pass both `nbins` (got {nbins}) and `...
[ "def binarize(i, bins):\n\n hist, edges = np.histogram(i, bins=bins, range=[10, 2000], normed=True)\n edges = (edges[:-1] + edges[1:])/2\n hist *= edges\n\n return hist", "def get_bin_widths(hi):\n\n return [hi.GetBinWidth(i) for i in range(1, hi.GetNbinsX()+1)]", "def _calc_histogram_bins(count)...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Convert an integral UNIX timestamp to a timestamp expression.
def to_timestamp( self, unit: Literal["s", "ms", "us"] = "s", ) -> ir.TimestampValue: return ops.TimestampFromUNIX(self, unit).to_expr()
[ "def convert_to_unix(timestamp):\n return (timestamp - pd.Timestamp(\"1970-01-01\")) // pd.Timedelta('1s')", "def binary_time_to_unix_time(x):\n device_time_bytes = x[3:].strip()\n return float(struct.unpack(\"<L\", device_time_bytes)[0])", "def to_timestamp(val):\r\n # If we're given a number, give...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Convert an integer to an interval.
def to_interval( self, unit: Literal["Y", "M", "W", "D", "h", "m", "s", "ms", "us", "ns"] = "s", ) -> ir.IntervalValue: return ops.IntervalFromInteger(self, unit).to_expr()
[ "def remap_interval(val,input_interval_start,input_interval_end,output_interval_start,output_interval_end):\n num = (float(val - input_interval_start) * float(output_interval_end - output_interval_start)) \n denom = float(input_interval_end - input_interval_start) \n scaled_val = (num/denom)+ output_interv...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Bitwise and `self` with `other`.
def __and__(self, other: IntegerValue) -> IntegerValue: return _binop(ops.BitwiseAnd, self, other)
[ "def AND(self,other):\n raise OpNotAllowedError(\"Cannot do operation on Bit instance\")", "def __and__(self: bitlist, other: bitlist) -> bitlist:\n if len(self) != len(other):\n raise ValueError(\n 'arguments to logical operations must have equal lengths'\n )\n\...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Bitwise or `self` with `other`.
def __or__(self, other: IntegerValue) -> IntegerValue: return _binop(ops.BitwiseOr, self, other)
[ "def __or__(self: bitlist, other: bitlist) -> bitlist:\n if len(self) != len(other):\n raise ValueError(\n 'arguments to logical operations must have equal lengths'\n )\n return bitlist(list(reversed(\n [a | b for (a, b) in zip(self.bits, other.bits)]\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Bitwise xor `self` with `other`.
def __xor__(self, other: IntegerValue) -> IntegerValue: return _binop(ops.BitwiseXor, self, other)
[ "def __xor__(self, other):\n return self.XOR(other)", "def __xor__(self, other):\n bits = []\n for i in range(0, len(self)):\n bits += [self.bits[i] ^ other.bits[i]]\n return UInt8(bits=bits)", "def __xor__(self: bitlist, other: bitlist) -> bitlist:\n if len(self) !...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Bitwise left shift `self` with `other`.
def __lshift__(self, other: IntegerValue) -> IntegerValue: return _binop(ops.BitwiseLeftShift, self, other)
[ "def __rlshift__(self, other: IntegerValue) -> IntegerValue:\n return _binop(ops.BitwiseLeftShift, other, self)", "def __lshift__(self, other):\n newBits = []\n testOverflow = PlainBit(False)\n for i in xrange(0,len(self)-other):\n newBits += [self.bits[i+other]]\n fo...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Bitwise left shift `self` with `other`.
def __rlshift__(self, other: IntegerValue) -> IntegerValue: return _binop(ops.BitwiseLeftShift, other, self)
[ "def __lshift__(self, other: IntegerValue) -> IntegerValue:\n return _binop(ops.BitwiseLeftShift, self, other)", "def __lshift__(self, other):\n newBits = []\n testOverflow = PlainBit(False)\n for i in xrange(0,len(self)-other):\n newBits += [self.bits[i+other]]\n for...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Bitwise right shift `self` with `other`.
def __rshift__(self, other: IntegerValue) -> IntegerValue: return _binop(ops.BitwiseRightShift, self, other)
[ "def __rrshift__(self, other: IntegerValue) -> IntegerValue:\n return _binop(ops.BitwiseRightShift, other, self)", "def __rshift__(self, other):\n newBits = []\n for i in xrange(0, other+1):\n newBits += [self.bits[0]]\n for i in xrange(i + 1, len(self)):\n newBit...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Bitwise right shift `self` with `other`.
def __rrshift__(self, other: IntegerValue) -> IntegerValue: return _binop(ops.BitwiseRightShift, other, self)
[ "def __rshift__(self, other: IntegerValue) -> IntegerValue:\n return _binop(ops.BitwiseRightShift, self, other)", "def __rshift__(self, other):\n newBits = []\n for i in xrange(0, other+1):\n newBits += [self.bits[0]]\n for i in xrange(i + 1, len(self)):\n newBits...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Bitwise not of `self`. Returns IntegerValue Inverted bits of `self`.
def __invert__(self) -> IntegerValue: try: node = ops.BitwiseNot(self) except (IbisTypeError, NotImplementedError): return NotImplemented else: return node.to_expr()
[ "def __invert__(self):\n newInt = self\n bits = []\n for i in xrange(0, len(newInt.bits)):\n bits.append(~newInt.bits[i])\n return UInt8(bits=bits)", "def __invert__(self: bitlist) -> bitlist:\n return bitlist(list(reversed([1-b for b in self.bits])))", "def negate(...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Aggregate the column using the bitwise and operator.
def bit_and(self, where: ir.BooleanValue | None = None) -> IntegerScalar: return ops.BitAnd(self, where).to_expr()
[ "def bitwise_and_(self, e):\n return self.__lazy_operate(operator.and_, e)", "def __and__(self, other: IntegerValue) -> IntegerValue:\n return _binop(ops.BitwiseAnd, self, other)", "def bitwise_and(src1, src2, dst=..., mask=...) -> dst:\n ...", "def filter_expr(self):\n return lambda d...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Reads meta information from meta file.
def read_meta_from_file(): try: with open(meta_file_name, "r") as meta_file: return json.load(meta_file) except OSError: sys.exit("Could not open/read meta file: meta.json.")
[ "def meta_load_socrata(self):\n import json\n\n meta = self.filesystem.download('meta')\n\n with open(meta) as f:\n d = json.load(f)\n\n md = self.metadata\n md.about.title = d['name']\n md.about.summary = d['description']\n\n md.write_to_dir()", "def ge...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }