query
stringlengths
9
3.4k
document
stringlengths
9
87.4k
metadata
dict
negatives
listlengths
4
101
negative_scores
listlengths
4
101
document_score
stringlengths
3
10
document_rank
stringclasses
102 values
The user has a valid JWT but needs to log into this app. Do so here and return the status.
def jwt_login(request, jwt_payload): logger.debug("Logging user in via JWT. Is Authenticated? " + str(request.user.is_authenticated)) request.session['profile'] = jwt_payload user = django_auth.authenticate(**jwt_payload) if user: login(request, user) else: logger.debug("Could no...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def LoginCheck():\n jwt_data = get_jwt()\n if jwt_data['roles'] != 'admin':\n return jsonify(msg=\"Permission denied\"), Status.HTTP_BAD_FORBIDDEN\n\n identity = get_jwt_identity()\n if not identity:\n return jsonify({\"msg\": \"Token invalid\"}), Status.HTTP_BAD_UNAUTHORIZED\n\n data ...
[ "0.7635263", "0.7127661", "0.6985085", "0.6958775", "0.68365085", "0.6748926", "0.6729194", "0.6707417", "0.6696781", "0.6696681", "0.6647693", "0.66302025", "0.6625759", "0.66238236", "0.6614123", "0.66123486", "0.65989953", "0.659169", "0.6581054", "0.6579671", "0.6572046",...
0.63848686
47
This will log a user out and redirect them to log in again via the AuthN server.
def logout_redirect(request): logout(request) # Build the URL login_url = furl(login_redirect_url(request, next_url=request.build_absolute_uri())) # Check for branding if hasattr(settings, 'SCIAUTH_BRANDING'): logger.debug('SciAuth branding passed') # Encode it and pass it ...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def logout():\n logout_user()\n return redirect(url_for('auth.index'))", "def signout():\n session.pop('oauth2_state', None)\n session.pop('oauth2_token', None)\n session.pop('discord_user', None)\n return redirect('/')", "def logout_user(request):\r\n # We do not log here, because we have...
[ "0.74786663", "0.7398715", "0.7309837", "0.7262442", "0.7236717", "0.72231877", "0.72231495", "0.7213908", "0.72030735", "0.7196016", "0.71688986", "0.71640676", "0.7163589", "0.7163589", "0.7159972", "0.71333206", "0.71177167", "0.7101278", "0.709512", "0.7080758", "0.706229...
0.0
-1
Author overloads `error` method for scanning and parsing. I will define separate methods.
def scan_error(self, line: int, message: str): self.report(line, "", message)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def error(self, error):\n pass", "def error(self, *args, **kwargs):", "def error(self):\n ...", "def _error(self, token, msg):\n self._interpreter.parse_error(token, msg)\n return ParseError()", "def error(self, message):\r\n self._construct_partial_parser().error(mes...
[ "0.72092247", "0.69668496", "0.68345785", "0.67888355", "0.6770503", "0.670923", "0.66309696", "0.661976", "0.65300286", "0.6507165", "0.6377651", "0.6323891", "0.6323672", "0.6305191", "0.6301589", "0.62937343", "0.62933636", "0.6288595", "0.62223047", "0.62183744", "0.61952...
0.6191369
21
report a runtime error
def runtime_error(self, error: 'LoxRuntimeError'): output = f'{error.get_message()}{os.linesep}[line {error.token.line}]' print(output, file=sys.stderr) self.had_runtime_error = False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def error():\r\n raise RuntimeError('admin ticket generator at your service')", "def reportError(self):\n self.Q['err'].put(sys.exc_info()[:2])", "def serious_error(self, e):\n pass", "def error(self):\n pass", "def unexpected_error(self, exception):", "def error(error):\n print(...
[ "0.68166107", "0.679111", "0.6713061", "0.64715713", "0.6364697", "0.63401216", "0.6333252", "0.6314151", "0.62917364", "0.6244229", "0.61968243", "0.6178209", "0.6150258", "0.61317104", "0.6127837", "0.6092866", "0.6092197", "0.60902953", "0.60429615", "0.6041189", "0.601885...
0.7311913
0
report a nonruntime error
def report(self, line: int, where: str, message: str): output = f'[line {line}] Error{where}: {message}' print(output, file=sys.stderr) self.had_error = True
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def unexpected_error(self, exception):", "def report_unexpected_exception(self, *args, **kwargs):\n pass", "def reportError(self):\n self.Q['err'].put(sys.exc_info()[:2])", "def error(self):\n pass", "def unexpectedException(self):", "def serious_error(self, e):\n pass", "def...
[ "0.72789407", "0.7216216", "0.7035961", "0.7010877", "0.69478387", "0.6807928", "0.68002", "0.6673932", "0.6667403", "0.6664058", "0.6661008", "0.66161615", "0.6548039", "0.6492653", "0.6476202", "0.64534765", "0.64285237", "0.64251786", "0.6421437", "0.64072937", "0.63590854...
0.0
-1
Use an explicit connect/quit here, as other tests use the context manager.
async def test_plain_smtp_connect(preset_client): await preset_client.connect() assert preset_client.is_connected await preset_client.quit() assert not preset_client.is_connected
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_connection():\r\n try:\r\n connect()\r\n except:\r\n pass\r\n print ('Unable to connect.')\r\n else:\r\n main()", "def quit(self):\n \n if 'driver' in self.__dict__:\n self.driver.quit()\n if 'session' in self.__dict__:\n self.session.close()\n...
[ "0.6924016", "0.65419316", "0.6448723", "0.6423238", "0.6363327", "0.6345118", "0.62429655", "0.62380254", "0.62205863", "0.6215696", "0.61930496", "0.6161508", "0.61469656", "0.614523", "0.6138414", "0.6116715", "0.60803974", "0.60480607", "0.60480607", "0.60480607", "0.6048...
0.60078937
22
Note, SMTPTimeoutError vs SMTPConnectError here depends on processing time.
async def test_connect_error_with_no_server(event_loop): client = SMTP(hostname="127.0.0.1", port=65534, loop=event_loop) with pytest.raises(SMTPConnectError): await client.connect(timeout=0.1)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _connect_smtp(self):\n smtp = None\n try:\n smtp = smtplib.SMTP(self.servername, timeout = self.timeout)\n except smtplib.SMTPException as err:\n log.critical('smtp service at {} is not currently available'.format(self.servername))\n log.critical(err)\n ...
[ "0.67359835", "0.6397967", "0.6189997", "0.6074653", "0.60194963", "0.58749896", "0.58332354", "0.58151186", "0.58065194", "0.58065194", "0.57931924", "0.57492477", "0.5701048", "0.5651594", "0.56239885", "0.5590371", "0.5548968", "0.54814523", "0.545041", "0.5434488", "0.543...
0.59302324
5
Note, SMTPTimeoutError vs SMTPConnectError here depends on processing time.
async def test_timeout_error_with_no_server(event_loop): client = SMTP(hostname="127.0.0.1", port=65534, loop=event_loop) with pytest.raises(SMTPTimeoutError): await client.connect(timeout=0.000000001)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _connect_smtp(self):\n smtp = None\n try:\n smtp = smtplib.SMTP(self.servername, timeout = self.timeout)\n except smtplib.SMTPException as err:\n log.critical('smtp service at {} is not currently available'.format(self.servername))\n log.critical(err)\n ...
[ "0.67359835", "0.6189997", "0.6074653", "0.60194963", "0.59302324", "0.58749896", "0.58332354", "0.58151186", "0.58065194", "0.58065194", "0.57931924", "0.57492477", "0.5701048", "0.5651594", "0.56239885", "0.5590371", "0.5548968", "0.54814523", "0.545041", "0.5434488", "0.54...
0.6397967
1
The `data` command is a special case it access protocol directly, rather than using `execute_command`.
async def test_disconnected_server_raises_on_data_read(preset_client): await preset_client.connect() preset_client.server.responses.append(b"250 Hello there") await preset_client.ehlo() preset_client.server.responses.append(b"250 ok") await preset_client.mail("sender@example.com") preset_clie...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def command(self, inst_data: int, buf: bytes, /) -> None:", "def execute(self, data, options):\n raise NotImplementedError()", "def cmd(self, data, enable):\n pass", "def call_and_feed(cmd, data):\n p = Popen(cmd, shell=True, stdin=PIPE)\n p.stdin.write(data)\n p.stdin.close()\n ret...
[ "0.7060531", "0.6570072", "0.6510051", "0.63995844", "0.6374746", "0.6351639", "0.63384765", "0.62793285", "0.6275475", "0.62704265", "0.61805314", "0.61763734", "0.6167132", "0.61131483", "0.60688186", "0.60665303", "0.6035471", "0.60299027", "0.5990287", "0.5981012", "0.596...
0.0
-1
The `data` command is a special case it accesses protocol directly, rather than using `execute_command`.
async def test_disconnected_server_raises_on_data_write(preset_client): await preset_client.connect() preset_client.server.responses.append(b"250 Hello there") await preset_client.ehlo() preset_client.server.responses.append(b"250 ok") await preset_client.mail("sender@example.com") preset_cli...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def command(self, inst_data: int, buf: bytes, /) -> None:", "def cmd(self, data, enable):\n pass", "def command(dev, code, data='', verbose=False):\n communicate(dev, a2b_hex('A' + code) + data.encode('ascii'), a2b_hex('B' + code), verbose=verbose)", "def execute(self, data, options):\n rais...
[ "0.71024805", "0.65037835", "0.63819116", "0.636673", "0.63638586", "0.63270706", "0.62600714", "0.62551844", "0.62551564", "0.62109536", "0.6167672", "0.6122832", "0.61040425", "0.60816944", "0.60466945", "0.60187083", "0.60072166", "0.59970105", "0.5980538", "0.59644943", "...
0.0
-1
The `starttls` command is a special case it accesses protocol directly, rather than using `execute_command`.
async def test_disconnected_server_raises_on_starttls(preset_client): await preset_client.connect() preset_client.server.responses.append( b"\n".join([b"250-localhost, hello", b"250-SIZE 100000", b"250 STARTTLS"]) ) await preset_client.ehlo() preset_client.server.responses.append(b"220 begi...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __starttls(self, keyfile=None, certfile=None):\n if not self.has_tls_support():\n raise Error(\"STARTTLS not supported by the server\")\n code, data = self.__send_command(\"STARTTLS\")\n if code != \"OK\":\n return False\n try:\n nsock = ssl.wrap_soc...
[ "0.7027462", "0.6803335", "0.6757425", "0.6352501", "0.6308599", "0.6119223", "0.6119223", "0.59911376", "0.5971303", "0.5914122", "0.57055014", "0.5575204", "0.55595917", "0.5529245", "0.55291855", "0.53638923", "0.5362069", "0.5353421", "0.5348377", "0.5279511", "0.52661175...
0.47385627
67
Exceptions can be raised, but the context manager should handle disconnection.
async def test_context_manager_disconnect_handling(preset_server, event_loop): preset_client = SMTP( hostname=preset_server.hostname, port=preset_server.port, loop=event_loop ) async with preset_client: assert preset_client.is_connected preset_server.responses.append(b"250 noop") ...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def connection_lost(self, exc):\n pass", "def __exit__(self, exc_type, exc_val, exc_tb):\n self.conn.close()\n if exc_val:\n raise", "async def __aexit__(self, exc_type, exc_value, traceback):\n\n # Close the connection\n await self.disconnect()", "def connection...
[ "0.721711", "0.7171181", "0.7018697", "0.7018545", "0.67196554", "0.6688977", "0.66760486", "0.6672632", "0.666469", "0.666469", "0.666469", "0.66525275", "0.66458035", "0.6639069", "0.6637687", "0.6637687", "0.6637687", "0.6637687", "0.6625044", "0.661997", "0.6600019", "0...
0.0
-1
Renders its contents to a string using the current context, allowing you to process template variables embedded in things like model content, djangoflatblocks, etc.
def render_inline(parser, token): nodelist = parser.parse(('end_render_inline',)) parser.delete_first_token() return RenderInlineNode(nodelist)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def render_string(self, template: str, **vars) -> str:", "def render(self, template: str, **vars) -> str:", "def _render_context(self, template, block, **context):\n return u''.join(block(template.new_context(context)))", "def get_rendered_text(self, context):\n missing = set()\n for req...
[ "0.74136245", "0.73672175", "0.72275555", "0.7093623", "0.7001947", "0.6995675", "0.6968364", "0.69323826", "0.6897053", "0.67660546", "0.6755247", "0.6604185", "0.6604105", "0.6596961", "0.6593976", "0.65834063", "0.6507833", "0.64963186", "0.6459822", "0.64542526", "0.64530...
0.0
-1
Trivial helper for the common case where you have a dictionary and want one value
def get_key(dict, key): return dict.get(key, None)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def dict_first(my_dict: Dict) -> Any:\n return list(my_dict.values())[0]", "def _single_getitem(self, key):\n try:\n return self._dict[key]\n except KeyError:\n return self.default", "def safely_get_value(dct: Mapping, key: Any,\n default: Union[T, Non...
[ "0.7501027", "0.7214503", "0.7051891", "0.70475656", "0.7027528", "0.69873375", "0.6776066", "0.67013735", "0.66397923", "0.66397923", "0.66397923", "0.66397923", "0.66311496", "0.6604605", "0.65671986", "0.6557712", "0.6530084", "0.6483239", "0.6474412", "0.642642", "0.63722...
0.5995878
43
Initializer for the Symmetric Key Registration Client
def __init__(self, mqtt_state_based_provider): super(SymmetricKeyProvisioningDeviceClient, self).__init__(mqtt_state_based_provider) self._polling_machine = PollingMachine(mqtt_state_based_provider)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __init__(self):\n self.key = b'FSMF73R873YM187R'\n self.signer = AES.new(self.key, AES.MODE_EAX)\n self.verifier = AES.new(self.key, AES.MODE_EAX, nonce=self.signer.nonce)", "def _init_keys(self):\n\n basic_constraints = crypto.X509Extension('basicConstraints'.encode('ascii'), Tru...
[ "0.64860773", "0.6398895", "0.63749385", "0.62866133", "0.6212949", "0.6190224", "0.6162016", "0.61214364", "0.60898226", "0.60414624", "0.603934", "0.6026967", "0.6023296", "0.59877056", "0.59869146", "0.5985004", "0.5983398", "0.5977729", "0.5918868", "0.59038883", "0.58940...
0.0
-1
Register the device with the provisioning service. This is a synchronous call, meaning that this function will not return until the registration process has completed successfully or the attempt has resulted in a failure. Before returning the client will also disconnect from the Hub. If a registration attempt is made w...
def register(self): logger.info("Registering with Hub...") register_complete = Event() def on_register_complete(result=None, error=None): # This could be a failed/successful registration result from the HUB # or a error from polling machine. Response should be given appr...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def register_device():\n payload = request.get_json()\n return _register_device(payload)", "def RegisterDeviceAndSendResponse(self, msg, username):\n device_id = self.GetUniqueParam('deviceid')\n if not device_id:\n return (400, 'Missing device identifier')\n\n token_info = self.server.Regist...
[ "0.7048597", "0.6574808", "0.63383627", "0.6312321", "0.6306717", "0.6257553", "0.6199068", "0.61455584", "0.60931623", "0.60073423", "0.5975581", "0.5859668", "0.57837015", "0.57369715", "0.57089573", "0.5657908", "0.5651063", "0.56088966", "0.55459535", "0.5511144", "0.5496...
0.7058231
0
This is a synchronous call, meaning that this function will not return until the cancellation process has completed successfully or the attempt has resulted in a failure. Before returning the client will also disconnect from the Hub. In case there is no registration in process it will throw an error as there is no regi...
def cancel(self): logger.info("Cancelling the current registration process") cancel_complete = Event() def on_cancel_complete(): cancel_complete.set() logger.info("Successfully cancelled the current registration process") self._polling_machine.cancel(callback=on...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def disconnect(self):\n if self.is_connected:\n try:\n self.client.unregister()\n finally:\n if self.client.is_running:\n self.client.stop()\n self.hub.disconnect()", "async def async_cancel(self):\n raise NotImpl...
[ "0.561611", "0.53545636", "0.53502023", "0.52480394", "0.5240914", "0.5077308", "0.50643533", "0.49984068", "0.49921355", "0.498722", "0.49676874", "0.49660704", "0.49642637", "0.49493033", "0.49309194", "0.49151906", "0.4906097", "0.4895857", "0.48792404", "0.48342264", "0.4...
0.6079265
0
read dataset from file
def read_data(self, filepath, is_build_vocab=False): with open("general_list.pkl", "rb") as file: self.general_list = pl.load(file) self.vocab.token2idx = {"<pad>": 0, "<unk>": 1} print(len(self.general_list)) ll = 2 for token in self.general_list: ...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def load_dataset(file_path):\n return Dataset.load(file_path)", "def open_file(path):\n input_file = os.path.join(path)\n with open(input_file) as f:\n dataset = f.read()\n return dataset", "def import_dataset(fpath):\r\n data = read_csv(fpath)\r\n print(data.head())\r\n print(data....
[ "0.7562527", "0.7480861", "0.7459469", "0.7441094", "0.7441094", "0.7398603", "0.73654073", "0.72704476", "0.72160506", "0.7185216", "0.7167113", "0.7102375", "0.7040365", "0.7037207", "0.7018122", "0.70112073", "0.69961697", "0.69836414", "0.6924565", "0.69151974", "0.685023...
0.0
-1
Ask a yes/no/quit question via raw_input() and return their answer. "question" is a string that is presented to the user. "default" is the presumed answer if the user just hits . It must be "yes" (the default), "no", "quit" or None (meaning an answer is required of the user). The "answer" return value is one of "yes", ...
def query_yes_no_quit(question, default="yes"): valid = {"yes":"yes", "y":"yes", "ye":"yes", "no":"no", "n":"no", "quit":"quit", "qui":"quit", "qu":"quit", "q":"quit"} if default == None: prompt = " [y/n/q] " elif default == "yes": prompt = " [Y/n/q] " elif default == "no": prompt = " [y/N/q...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def query_yes_no_quit(question, default=\"yes\"):\n valid = {\"yes\":\"yes\", \"y\":\"yes\", \"ye\":\"yes\",\n \"no\":\"no\", \"n\":\"no\",\n \"quit\":\"quit\", \"qui\":\"quit\", \"qu\":\"quit\", \"q\":\"quit\"}\n if default == None:\n prompt = \" [y/n/q] \"\n elif ...
[ "0.8437878", "0.8223772", "0.8212373", "0.8156623", "0.8149691", "0.81355673", "0.81233436", "0.8122114", "0.8120957", "0.8120957", "0.8120957", "0.8120957", "0.8120957", "0.8120957", "0.8120957", "0.8120957", "0.81196237", "0.81192315", "0.81192315", "0.81192315", "0.8119231...
0.834716
1
Returns the url to access a detail record for this book.
def get_absolute_url(self): return reverse("mountain", args=[str(self.state_name), str(self.name)])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_absolute_url(self):\n return reverse('book-detail', args=[str(self.id)]) \n # Returns an URL that can be used to access a detail record for this model \n # (for this to work we will have to \n # -- Define a URL mapping that has the name 'book-detail' (name='book-detail')\n ...
[ "0.76990575", "0.7611185", "0.7257499", "0.7257499", "0.7257499", "0.7117704", "0.6992158", "0.69547594", "0.6935497", "0.684423", "0.6591669", "0.6543265", "0.6541738", "0.64656514", "0.6445659", "0.64323515", "0.64318234", "0.6417527", "0.6407866", "0.638662", "0.6367192", ...
0.0
-1
Loads directly from a joint/pose/match network's stored checkpoint
def load_weights(self, state_dict): own_state = self.state_dict() # Copy the convloutional layers for name, param in state_dict.iteritems(): if 'base_conv' in name: own_state[name].copy_(param) # Convert the FC layers to convolutional layers own_state[...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def load_checkpoint(self, session, model_dir):\n assert self.params.cp_load == True, (\"cp_load must be set to true to load a checkpoint\")\n self.loader.restore(session, model_dir)", "def load_checkpoint(self, label):\n model_dir = os.path.join(\n config.results_dir, config.experiment_na...
[ "0.7687991", "0.76203847", "0.7493186", "0.74810845", "0.74057525", "0.7371094", "0.7363626", "0.7347734", "0.72986746", "0.7289724", "0.7277159", "0.7276603", "0.72691387", "0.72505075", "0.7244596", "0.7222293", "0.7212705", "0.72107184", "0.72104317", "0.7187941", "0.71760...
0.0
-1
Creates invoice related analytics and financial move lines
def action_move_create(self): account_move = self.env['account.move'] for inv in self: if not inv.journal_id.sequence_id: raise UserError(_('Please define sequence on the journal related to this invoice.')) if not inv.invoice_line_ids.filtered(lambda line: line.account_id): raise UserError(_('Please ...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _create_invoice(self):\n self.ensure_one()\n partner = self.member_id.partner_id\n invoice = self.env['account.invoice'].create({\n 'partner_id': partner.id,\n 'account_id': partner.property_account_receivable_id.id,\n 'fiscal_position_id': partner.property...
[ "0.74461937", "0.7406681", "0.70905787", "0.7032683", "0.69090813", "0.68788975", "0.6871585", "0.68584", "0.68474764", "0.68273807", "0.6741477", "0.6672866", "0.6667218", "0.65647554", "0.65585667", "0.63950473", "0.6327984", "0.6303532", "0.6284394", "0.6256989", "0.623792...
0.65702105
13
Compute the mean absolute error on test set given X, y, and model parameter w.
def mean_absolute_error(w, X, y): ##################################################### # TODO 1: Fill in your code here # ##################################################### err = None temp = np.dot(X, w) err = np.mean(np.abs(_error(y, temp))) return err
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def mean_absolute_error(w, X, y):\n #####################################################\n # TODO 1: Fill in your code here #\n #####################################################\n if w is None:\n return None\n\n err = None\n yhat = np.dot(X , w)\n err = np.abs(np.subtract(yhat,y))....
[ "0.77339864", "0.76291317", "0.7148356", "0.7034834", "0.68424296", "0.67941666", "0.66706353", "0.6591392", "0.6557777", "0.6557774", "0.6482599", "0.6372792", "0.63640726", "0.6351532", "0.63476115", "0.63377297", "0.63268167", "0.6277267", "0.6224823", "0.61900854", "0.616...
0.7912273
0
Compute the weight parameter given X and y.
def linear_regression_noreg(X, y): ##################################################### # TODO 2: Fill in your code here # ##################################################### temp = X.T result = np.dot(temp, X) result = np.linalg.inv(result) result = np.dot(result, temp) w = np.dot(re...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def weight(self, y, xn, xo):\n\n return self._model.log_prob(y, xn) + self._model.h_weight(xn, xo) - self._kernel.log_prob(xn)", "def compute_weight(self, y, x, test_x=None, test_y=None, **kwargs):\n model = copy.copy(self)\n model.__setattr__('train_y', y)\n model.__setattr__('train_...
[ "0.7623957", "0.7152357", "0.687893", "0.68283564", "0.6542995", "0.64705515", "0.6444899", "0.64352334", "0.63989145", "0.63828194", "0.6370652", "0.63449967", "0.63418835", "0.6321859", "0.6318847", "0.6310803", "0.6279792", "0.62733525", "0.62622386", "0.62539375", "0.6229...
0.0
-1
Compute the weight parameter given X and y.
def linear_regression_invertible(X, y): ##################################################### # TODO 3: Fill in your code here # ##################################################### w = None X_X_T = np.dot(X.T, X) ev = 0 while ev < (10**-5): ev = np.min((np.linalg.eig(X_X_T)[0])) ...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def weight(self, y, xn, xo):\n\n return self._model.log_prob(y, xn) + self._model.h_weight(xn, xo) - self._kernel.log_prob(xn)", "def compute_weight(self, y, x, test_x=None, test_y=None, **kwargs):\n model = copy.copy(self)\n model.__setattr__('train_y', y)\n model.__setattr__('train_...
[ "0.7623957", "0.7152357", "0.687893", "0.68283564", "0.6542995", "0.64705515", "0.6444899", "0.64352334", "0.63989145", "0.63828194", "0.6370652", "0.63449967", "0.63418835", "0.6321859", "0.6318847", "0.6310803", "0.6279792", "0.62733525", "0.62622386", "0.62539375", "0.6229...
0.0
-1
Compute the weight parameter given X, y and lambda.
def regularized_linear_regression(X, y, lambd): ##################################################### # TODO 4: Fill in your code here # ##################################################### w = None X_X_T = np.dot(X.T, X) X_X_T += lambd * np.identity(X_X_T.shape[0]) w = np.dot(np.dot(np.linalg.i...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def value(a, y, weights, lambda_):\n\t\treturn 0.5* (np.linalg.norm(a-y)**2) / (a.shape[0] * a.shape[1])\n\t\t# return unregularize + (0.5*lambda_*np.sum(np.square(weights[-1])) / (a.shape[0] * a.shape[1])) ", "def value(a, y, weights, lambda_):\n\t\treturn np.sum(np.nan_to_num(-y*np.log(a + 1e-15)-(1-y)*np.log(...
[ "0.7590008", "0.6892587", "0.6760674", "0.66900504", "0.6663615", "0.6605572", "0.65382785", "0.65217125", "0.64970404", "0.64742047", "0.64711386", "0.6437741", "0.6418927", "0.63539314", "0.6347361", "0.63001853", "0.6257059", "0.62478036", "0.62149435", "0.6209357", "0.616...
0.0
-1
Find the best lambda value.
def tune_lambda(Xtrain, ytrain, Xval, yval): ##################################################### # TODO 5: Fill in your code here # ##################################################### bestlambda = None err = 1 for v in range(-19,20): if v>=0: val = float("1e+"+str(v)) ...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_lambda(model):\n best_lambdas = [1000.0, 0.001, 100.0, 0.001, 100.0, 100.0, 0.001, 100.0]\n lambda_ = best_lambdas[model]\n return lambda_", "def __bestLambda(self):\n\t\t\n\t\t# Determine starting value for brent-method (to avoid local minimum).\n\t\tself.startValue = self.__findStartValue()\n\...
[ "0.74376184", "0.71890557", "0.655791", "0.6550202", "0.63085747", "0.6219973", "0.6184345", "0.61574817", "0.6153785", "0.61114615", "0.6099454", "0.60798913", "0.6037426", "0.6037426", "0.6017245", "0.5970384", "0.58916986", "0.58453995", "0.5843659", "0.5816554", "0.578878...
0.6234567
5
r"""Convert ratio to decibels. Converting a ratio to decibels depends on whether the ratio is a ratio of amplitudes or a ratio of powers. For amplitudes the decibel value is
def dB(x, power=False): if power: return 10 * np.log10(np.abs(x)) else: return 20 * np.log10(np.abs(x))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def decibel(x):\n return 10.0 * np.log10(x)", "def idecibel(x):\n return 10.0 ** (x / 10.0)", "def _bcd2dec(self, value):\n return ((value >> 4) * 10) + (value & 0x0F)", "def ppcm_denominateurs(self):\n\t\tl = []\n\t\tn = 1\n\t\tif self.__valide:\n\t\t\tfor m in self.liste_decroissante():\n\t\t\...
[ "0.5972083", "0.58928466", "0.5669203", "0.56526047", "0.55357", "0.55007315", "0.5486681", "0.54774976", "0.544386", "0.5438974", "0.53794795", "0.53770524", "0.53531665", "0.52374", "0.5221092", "0.51855433", "0.5179858", "0.5170302", "0.51680213", "0.5166022", "0.51482767"...
0.4874277
42
Iterate over orders. Iterator to iterate over the orders in the indexer. Will enable the synchronized `modes` iterator.
def orders(self): self._current_order = self.min_order while self._current_order <= self.max_order: yield self._current_order self._current_order += 1 del self._current_order
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def modes(self):\n try:\n order = self._current_order\n except AttributeError:\n raise AttributeError('Cannot iterate over modes without iterating over orders!') from None\n mode = -order\n while mode <= order:\n yield mode\n mode += 1", "de...
[ "0.67183816", "0.6654742", "0.6436368", "0.6087598", "0.59756845", "0.5863205", "0.5597003", "0.55368114", "0.5527949", "0.5482221", "0.547718", "0.5439024", "0.54328644", "0.543001", "0.53729576", "0.53424174", "0.53381103", "0.5320791", "0.5310121", "0.52943534", "0.5290345...
0.58461016
6
Iterate over modes. Synchronized iterator to iterate the modes in an order.
def modes(self): try: order = self._current_order except AttributeError: raise AttributeError('Cannot iterate over modes without iterating over orders!') from None mode = -order while mode <= order: yield mode mode += 1
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __iter__(self):\n return iter([v for k, v in sorted(self._modes.items())])", "def get_modes(self):\n return [i for i, j in enumerate(self._modemap._map) if j is not None]", "async def _load_modes(self) -> None:\n modes: List[Dict[str, Any]] = await self._api_request(\"modes\")\n ...
[ "0.7799798", "0.63018954", "0.6299035", "0.62619734", "0.62619734", "0.6169084", "0.61189497", "0.60541093", "0.5993781", "0.5993781", "0.59646934", "0.58745813", "0.58432204", "0.58386713", "0.5829693", "0.57918906", "0.57898873", "0.57819664", "0.5591517", "0.5588966", "0.5...
0.78386045
0
Sum spherical harmonics coefficients of the same order. Calculates the sum of the coefficients for all modes for each order individually. The `SphericalHarmonicsIndexer` needs to be created to match the orders of the expansion coefficients. This requires that the length of the summation axis is the same as the number o...
def ordersum(self, values, axis=None): values = np.asarray(values) if axis is None: for axis in range(values.ndim): if values.shape[axis] == len(self): break else: raise ValueError('Cannot find axis of length {} in the given val...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _harmonic_sum(self, rank: int) -> complex:\n return (self.flm * self.slepian.eigenvectors[rank].conj()).sum()", "def sum(self, axis: int = 0):\r\n self.values = self.values.sum(axis=axis)\r\n self.layers = [None]\r\n return self.copy()", "def sh( values ):\n # ECMWF normalize...
[ "0.60549116", "0.5197901", "0.51612186", "0.5089202", "0.5049697", "0.49782285", "0.49448317", "0.4918859", "0.49002635", "0.48757395", "0.48220482", "0.47755814", "0.47722915", "0.47557205", "0.4742954", "0.4741564", "0.4729267", "0.47249097", "0.4709373", "0.47004437", "0.4...
0.5082671
4
r"""Find the approximate location of a levitation trap. Find an approximate position of a acoustic levitation trap close to a starting point. This is done by following the radiation force in the sound field using an differential equation solver. The differential equation is the unphysical equation
def find_trap(array, start_position, complex_transducer_amplitudes, tolerance=10e-6, time_interval=50, path_points=1, **kwargs): from scipy.integrate import solve_ivp from numpy.linalg import lstsq if 'radius' in kwargs: from .fields import SphericalHarmonicsForce as Force, SphericalHarmonicsForceGr...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def anl_solution(self):\r\n\r\n m = float(self.mass) / self.nu_m\r\n qe = 1 / self.nu_m * (self.nu_t * self.nu_t / self.nu_x) * 1.0 \\\r\n / float(self.size_tick * self.size_tick)\r\n print 'qE=', qe\r\n c = self.light_vel\r\n for i in range(0, len(self.obs.obt_g)):\r\...
[ "0.5917251", "0.5454485", "0.53869474", "0.53047276", "0.5189361", "0.51892936", "0.5161829", "0.5157585", "0.51523453", "0.51253486", "0.51195055", "0.5104085", "0.5075305", "0.5061984", "0.50457203", "0.50427437", "0.50212985", "0.49925196", "0.498679", "0.49814695", "0.497...
0.63866895
0
Do an internal (non302) redirect to the front page. Preserves the user agent's requested URL.
def show_main_page(request, error_msg=None): request.method='GET' return MainPage(request, error_msg)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def home_page():\n return redirect(url_for(_DEFAULT_ROUTE, _external=True))", "def root_redirect():\r\n return redirect(url_for(\"display_top\"))", "def redirect(url):", "def redirect(self, url):\n raise RequestRedirect(url)", "def redirect(uri):\n response = HttpResponse('', status=302)\n ...
[ "0.69821596", "0.6857515", "0.68187505", "0.65282667", "0.65206647", "0.64767575", "0.6462469", "0.6432681", "0.6411949", "0.63972974", "0.6323725", "0.63229686", "0.6311373", "0.63092226", "0.62773234", "0.6263432", "0.625649", "0.625649", "0.6236038", "0.6210916", "0.61977"...
0.0
-1
Return criteria given by user and the column in the database it refers to.
def select(): file_title, song_title = [None, None], [None, None] artist, data, tag, form = [None, None], [None, None], [None, None], [None, None] while True: file_title[0] = input("Would you like to select by file name?[Y/N]\t") if file_title[0] == 'Y': file_title[1] = inp...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def search_column_with_constraint(db, table, column, condition_col, condition_val):\n condition = condition_col + \" = '\" + str(condition_val) + \"'\"\n result = select_columns(db, table, column, condition=condition)\n\n return result", "def get_criteria(self):\n\n\t\treturn self.__criteria", "def ge...
[ "0.5852314", "0.57501423", "0.5629169", "0.5598633", "0.5358296", "0.5331501", "0.5246509", "0.52190596", "0.5164159", "0.51409566", "0.50485057", "0.50430083", "0.50312936", "0.503079", "0.5022927", "0.49895406", "0.49535307", "0.49471778", "0.49386907", "0.49316448", "0.491...
0.0
-1
Initialization of the tool Make database and table and connect to the database
def __init__(self): self.cnx = mysql.connector.connect(user='root', password='', host='127.0.0.1', database='songstorage') # Connect to mySQL, username root, password none self.cursor = self.cnx.cursor() # Init...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _init_db(self):\n cursor = self._main_connection.cursor()\n cursor.execute(self.sql[\"create_table\"])\n self._main_connection.commit()", "def create_db(self):", "def setdb():\n\n if not database_exists(DB_URL):\n print('Creating database.')\n create_database(DB_URL)\n...
[ "0.7486164", "0.7400551", "0.7391027", "0.73457515", "0.7313018", "0.726847", "0.7231708", "0.71860963", "0.7173379", "0.71718526", "0.7118039", "0.7113357", "0.7111005", "0.7111005", "0.7058268", "0.70503473", "0.70164067", "0.6996071", "0.6991648", "0.69865584", "0.6979697"...
0.0
-1
Waits for a command and calls the right function.
def start_tool(self): while True: com = input("Give a command. Type H for help...:\t ").lower() # Receive a command from the user if com == 'h': print('Available commands: H, Exit, Play, Stop, Pause, Add_song, Delete_song, Modify_data, ' 'Creat...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def do_command(command):\n send_command(command)\n # time.sleep(0.1) # may be required on slow machines\n response = get_response()\n print(\"Rcvd: <<< \" + response)\n return response", "def _execute(self):\n LOG.info(\"Waiting for a message...\")", "def execute_command_async(self, comma...
[ "0.6850583", "0.6637179", "0.66318923", "0.66277975", "0.65436953", "0.64919645", "0.64890045", "0.6457957", "0.6421299", "0.63659066", "0.63242984", "0.62695634", "0.62560266", "0.6223955", "0.62138736", "0.61878145", "0.61755884", "0.6169835", "0.6162347", "0.6137532", "0.6...
0.0
-1
Play a song based on its path.
def play_song(self): path = input('Give path to wanted song: ') # Request path to song path = path.replace('\\', '/') if not self.path_storage_re.match(path): # Check if the wanted song is from the storage directory print("Give a valid path") else: p = vlc...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def play(path):\n sound = AudioSegment.from_mp3(path)\n playback.play(sound)", "def play(self, songpos=None):\n # TODO: implement songpos !\n if songpos is None:\n resp = yield from self.command('play')\n return True", "def play(song):\n # Show the metadata\n if (ver...
[ "0.79737085", "0.68196875", "0.66903174", "0.66628766", "0.65618736", "0.6544999", "0.648962", "0.64856863", "0.6419521", "0.6384046", "0.6374167", "0.633997", "0.63393456", "0.63371986", "0.63166153", "0.6316028", "0.6295789", "0.6295789", "0.62778527", "0.6240952", "0.62402...
0.8189756
0
Stop the current playing/paused song.
def stop_song(self): if self.isPlaying: self.playSong[0].stop() self.playSong.clear() self.isPlaying = False print("Music stopped") else: print("Play a song first...")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def _stop(self, ctx: commands.Context):\n ctx.voice_state.songs.clear()\n\n if ctx.voice_state.is_playing:\n ctx.voice_state.voice.stop()\n return await ctx.send(embed=embed_msg(description=\"🛑 Stopped the music\"))\n\n else:\n return await ctx.send('Can...
[ "0.7680431", "0.76113164", "0.759389", "0.75338066", "0.75338066", "0.7464192", "0.74512273", "0.7417875", "0.7278527", "0.71976995", "0.71916264", "0.7135208", "0.710452", "0.69504833", "0.6907551", "0.68905944", "0.6856153", "0.6830112", "0.68043816", "0.6791049", "0.675709...
0.81801236
0
Pause the current playing song.
def pause_song(self): if self.isPlaying: self.playSong[0].pause() print("Song paused. To continue type Play.") else: print("Play a song first...")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def pause(self):\n self.paused = True\n # FIXME?: Why is this not doing anything? Shouldn't it be calling into the player API?", "def pause(self):\n if not self.paused:\n pygame.mixer.music.pause()\n self.paused = True\n else:\n pygame.mixer.music.un...
[ "0.7713319", "0.74910986", "0.74175787", "0.7405902", "0.7326798", "0.72035414", "0.71642816", "0.71472704", "0.7133736", "0.7114518", "0.70905143", "0.7087032", "0.7062801", "0.70473045", "0.70375997", "0.70167387", "0.70167387", "0.6942558", "0.6881331", "0.6881242", "0.683...
0.82099915
0
Add song to the storage directory and to the database. Return ID of the new song / error message.
def add_song(self): path = input("Give file path:\t") # Request file path path = path.replace('\\', '/') if self.path_song_re.match(path) and not self.path_storage_re.match( path): # Check that the path leads to a song that is not already found in Storage copy(...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def add_song():\n options = queue.instantiate_options()\n raw_queue = queue.instantiate_queue()\n track_id = request.args.get('song')\n\n for song in raw_queue:\n if song['track_id'] == track_id[14:]:\n return json.dumps({'error': 'Cannot add a song already in the queue'})\n\n num_...
[ "0.72021145", "0.7178171", "0.68124354", "0.67616093", "0.6730884", "0.661579", "0.6544573", "0.6535995", "0.65308034", "0.6529165", "0.6515031", "0.64952904", "0.64827377", "0.6465418", "0.6454924", "0.64515936", "0.6450223", "0.64397067", "0.64342123", "0.63851374", "0.6346...
0.8322641
0
Remove song from database and from the storage directory based on ID
def delete_song(self): song_id = tuple(input("Give the melody id to be deleted:\t")) sql = "SELECT file_title, form FROM songs WHERE id = %s" # Check existence of song with given ID self.cursor.execute(sql, song_id) result = self.cursor.fetchall() if len(result) > 0: ...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def delete_song(_id):\r\n Song.query.filter_by(id=_id).delete()\r\n # filter song by id and delete\r\n db.session.commit() # commiting the new change to our database\r", "def delete_music():\n track_id = request.vars.track_id\n if track_id is None:\n raise HTTP(500)\n db(db....
[ "0.724472", "0.68608385", "0.68424237", "0.6805456", "0.66874766", "0.6629392", "0.65805244", "0.65743804", "0.6480073", "0.6455035", "0.64183515", "0.6390148", "0.63755953", "0.6350693", "0.63068855", "0.6271331", "0.62588185", "0.6248313", "0.62059045", "0.6190672", "0.6164...
0.76542425
0
Modifies song info in the database
def modify_data(self): song_id = tuple(input("Give the id of the song to be modified:\t")) # Request song ID sql = "SELECT song_title, artist, data, tag FROM songs WHERE id = %s" # Find song with given ID self.cursor.execute(sql, song_id) res = self.cursor.fetchall() if le...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update_db(self):\n songs = self.db.get_all_songs()\n for song in songs:\n if choose_song(song) == ERROR:\n self.db.delete_song(song)\n files = []\n for song in glob.glob(\"songs\\*.wav\"):\n to_append = song.split('\\\\')[ONE][:-4]\n f...
[ "0.7413717", "0.6995219", "0.68380946", "0.6823178", "0.66802067", "0.6608994", "0.6446211", "0.6343561", "0.6318853", "0.6218813", "0.6192342", "0.6175601", "0.6141071", "0.61293864", "0.61132926", "0.60554576", "0.6035266", "0.6034275", "0.5926394", "0.592424", "0.591346", ...
0.70110184
1
Create a Batch from an existing batch id. Notes
def from_batch_id(batch_id: int, *args, **kwargs): b = Batch(*args, **kwargs) assert isinstance(b._backend, _backend.ServiceBackend) b._batch_handle = b._backend._batch_client.get_batch(batch_id) return b
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_batch(self, batch_name, priority = 0, comments = '', notifications = []):\n\n url = self._base_url + urlConfig.URLS['Project'] + '/' + self._project_id + '/batch'\n batch = {\n \"batch_name\": batch_name,\n \"priority\": priority,\n \"comments\": comments,\...
[ "0.69767964", "0.69643956", "0.6652114", "0.6631825", "0.61280215", "0.6110446", "0.60883904", "0.60016143", "0.59871805", "0.5886255", "0.5873244", "0.58249927", "0.5809669", "0.57384014", "0.5699515", "0.56257725", "0.56246674", "0.5616465", "0.551616", "0.53761625", "0.537...
0.7599396
0
Create a new input resource file object representing a single file.
def read_input(self, path: str) -> _resource.InputResourceFile: irf = self._new_input_resource_file(path) return irf
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _get_file_object(inputfile=None):\n if type(inputfile) == str:\n return open(inputfile, 'r')\n return inputfile", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n content_type: Optional[pulumi.Input[U...
[ "0.6874475", "0.6812216", "0.65446967", "0.65262955", "0.6509462", "0.6438844", "0.6272451", "0.62469476", "0.61195093", "0.60929656", "0.6091407", "0.60815406", "0.60800767", "0.6025725", "0.5954721", "0.59426826", "0.59278876", "0.59035605", "0.5878973", "0.5866811", "0.585...
0.70438915
0
Create a new resource group representing a mapping of identifier to input resource files.
def read_input_group(self, **kwargs: str) -> _resource.ResourceGroup: root = secret_alnum_string(5) new_resources = {name: self._new_input_resource_file(file, root) for name, file in kwargs.items()} rg = _resource.ResourceGroup(None, root, **new_resources) self._resource_map.update({rg....
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_create_resource_group(self):\n pass", "def load(self):\n self.suite.load()\n self.resource_map = {}\n dirlist = os.listdir(self.resources)\n for resource_name in (name for name in dirlist\n if os.path.isfile(os.path.join(self.resources,name...
[ "0.58438635", "0.5761084", "0.55505747", "0.5540623", "0.5514604", "0.55128634", "0.5483182", "0.54395956", "0.5409617", "0.5397805", "0.5392991", "0.53768927", "0.5376017", "0.5359928", "0.5356735", "0.5327738", "0.52933335", "0.52924156", "0.5286034", "0.52811337", "0.52422...
0.6814466
0
Write resource file or resource file group to an output destination. Examples
def write_output(self, resource: _resource.Resource, dest: str): if not isinstance(resource, _resource.Resource): raise BatchException(f"'write_output' only accepts Resource inputs. Found '{type(resource)}'.") if (isinstance(resource, _resource.JobResourceFile) and isinstanc...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def write(self, output, resources, **kw):\n\n raise NotImplementedError()", "def write_resources(self, resources):\n for filename, data in list(resources.get('outputs', {}).items()):\n # Determine where to write the file to\n dest = os.path.join(self.output_dir, filename)\n ...
[ "0.7189583", "0.67922026", "0.6485851", "0.6471249", "0.5966388", "0.5966388", "0.5919293", "0.58600813", "0.5827604", "0.5729985", "0.5707116", "0.5665977", "0.55832523", "0.557501", "0.55459183", "0.54772556", "0.5467842", "0.5445758", "0.54371053", "0.54174685", "0.5403804...
0.68054146
1
Select all jobs in the batch whose name matches `pattern`. Examples
def select_jobs(self, pattern: str) -> List[job.Job]: return [job for job in self._jobs if job.name is not None and re.match(pattern, job.name) is not None]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def filter_jobs(jobs, keyword):\n for job in jobs:\n if keyword == \"all\":\n yield job\n elif job[\"name\"].find(keyword) != -1:\n yield job", "def search_by_pattern(self, tl):\n print(\"Search by regex pattern\")\n pattern = input(\"Please enter search patte...
[ "0.6330027", "0.57241905", "0.5717007", "0.5662955", "0.5661944", "0.5415436", "0.5401124", "0.53878415", "0.5367021", "0.5349991", "0.53413725", "0.53162223", "0.5308199", "0.5304073", "0.5292824", "0.5277535", "0.5271562", "0.52473783", "0.52251303", "0.5207287", "0.5199997...
0.7555124
0
Execute a batch. Examples
def run(self, dry_run: bool = False, verbose: bool = False, delete_scratch_on_exit: bool = True, **backend_kwargs: Any) -> Optional[_bc.Batch]: seen = set() ordered_jobs = [] def schedule_job(j): if j in seen: return ...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def run_batch(self, batch_x, batch_y):\n raise NotImplementedError()", "def ExecuteBatch(self, request, context):\n context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)", "def execute(self):\n return self._batch.execute()", "def BeginExecuteBatch(self, request, context):\n context.code(beta...
[ "0.7048897", "0.6716209", "0.66513216", "0.6633743", "0.65684175", "0.6428148", "0.6417398", "0.6358319", "0.63045424", "0.6275959", "0.6252536", "0.62219197", "0.6158494", "0.61560374", "0.61498094", "0.61498094", "0.60457814", "0.60279816", "0.6023647", "0.6015122", "0.6008...
0.0
-1
Initializes querysets for keyword and headlinekeyword
def __init__(self): self.keyword_queryset = Keyword.objects.all() self.headlinekeyword_queryset = Headlinekeyword.objects.all() self.headline_queryset = Headline.objects.all()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def keyword_headlines(self):\r\n\t\td = {}\r\n\r\n\t\tfor q in self.keyword_queryset:\r\n\t\t\td[q.content] = self.headlinekeyword_queryset.filter(keywordid = q.id)\r\n\r\n\t\treturn d", "def get_queryset(self):\r\n return Keyword.objects.all()", "def setup_eager_loading(cls, queryset):\n que...
[ "0.62303746", "0.62133765", "0.5979311", "0.59030783", "0.5808249", "0.5781533", "0.5741475", "0.57359666", "0.5595646", "0.55684435", "0.5490495", "0.54874605", "0.54597276", "0.5437996", "0.5390633", "0.5304389", "0.5293679", "0.5290758", "0.5277677", "0.5255377", "0.524341...
0.7972529
0
Returns a dictionary of the keywords and the list of corresponding headlines (ids only)
def keyword_headlines(self): d = {} for q in self.keyword_queryset: d[q.content] = self.headlinekeyword_queryset.filter(keywordid = q.id) return d
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_keywords(self):\r\n\t\treturn list(self.keyword_headlines().keys())", "def get_headlines_with_keyword(self, kw):\r\n\t\tkey_head = self.keyword_headlines()\r\n\r\n\t\theadlines = set()\r\n\r\n\t\tfor headlinekw in key_head[kw]:\r\n\t\t\tcontent = headlinekw.headlineid.content\r\n\t\t\theadlines.add(conte...
[ "0.763168", "0.7235824", "0.71703315", "0.65968394", "0.61900103", "0.6189536", "0.609543", "0.60951686", "0.60745686", "0.6041486", "0.6003403", "0.5991974", "0.5966871", "0.5924465", "0.5924083", "0.5896881", "0.5850133", "0.58487964", "0.58361", "0.58255136", "0.5808301", ...
0.81225014
0
Returns a list of keywords
def get_keywords(self): return list(self.keyword_headlines().keys())
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def keywords(self):\n return list(self._kw)", "def keywords(self):\n return self._keywords", "def keywords(self):\n return self._keywords", "def getKeywords(self):\n return", "def keywords(self):\n return self.__keywords", "def extract_keywords(self):\n keywords ...
[ "0.82068", "0.80030465", "0.80030465", "0.79815817", "0.7965522", "0.795927", "0.79124683", "0.7827855", "0.7802334", "0.7794272", "0.77290803", "0.7597001", "0.7496878", "0.74123514", "0.7399484", "0.73888963", "0.737007", "0.7306008", "0.7305014", "0.73011196", "0.7273646",...
0.8675625
0
Returns a list of lists [word, number of headlines]
def keyword_frequencies(self, limit = None): key_head = self.keyword_headlines() freq_list = [] for keyword in key_head: numHeadlines = len(key_head[keyword]) if limit: if numHeadlines > limit: numHeadlines = limit freq_list.append([keyword, numHeadlines]) return freq_list
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def parse_headlines(self):\n headlines = re.findall(r\"^\\.\\.\\.(.*?)\\.\\.\\.[ ]?\\n\\n\", self.unixtext,\n re.M | re.S)\n headlines = [\" \".join(h.replace(\"...\",\n \", \").replace(\"\\n\", \" \").split())\n ...
[ "0.6359982", "0.63080597", "0.62195677", "0.6214322", "0.6143615", "0.614284", "0.6101072", "0.60887766", "0.5984331", "0.5945753", "0.58900934", "0.5865218", "0.5827097", "0.5822467", "0.5805729", "0.5790344", "0.575367", "0.5737365", "0.5706382", "0.5671385", "0.5618535", ...
0.54709244
40
Returns a list of headlines if given a keyword
def get_headlines(self, kw = None): if kw: return self.get_headlines_with_keyword(kw) else: return self.get_all_headlines()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_headlines_with_keyword(self, kw):\r\n\t\tkey_head = self.keyword_headlines()\r\n\r\n\t\theadlines = set()\r\n\r\n\t\tfor headlinekw in key_head[kw]:\r\n\t\t\tcontent = headlinekw.headlineid.content\r\n\t\t\theadlines.add(content)\r\n\r\n\t\treturn list(headlines)", "def keyword_headlines(self):\r\n\t\td ...
[ "0.8183849", "0.7191412", "0.7000567", "0.66961074", "0.66491723", "0.66434336", "0.6356157", "0.6131674", "0.61187565", "0.59622735", "0.59542644", "0.5890983", "0.5875384", "0.57926047", "0.57580566", "0.5697247", "0.56422436", "0.5626344", "0.55865705", "0.5584343", "0.556...
0.8182654
1
Returns a list of all headlines
def get_all_headlines(self): list_vals = list(self.keyword_headlines().values()) uniq_headlines = set() for list_val in list_vals: for headlineobj in list_val: uniq_headlines.add(headlineobj.headlineid.content) return list(uniq_headlines)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_headlines(self, kw = None):\r\n\t\tif kw:\r\n\t\t\treturn self.get_headlines_with_keyword(kw)\r\n\t\telse:\r\n\t\t\treturn self.get_all_headlines()", "def all_headlines(html_root_node):\n pass", "def all_headlines_from(url):\n pass", "def parse_headlines(self):\n headlines = re.findall(r...
[ "0.81433356", "0.789887", "0.76946384", "0.7529572", "0.7259968", "0.7255244", "0.70318645", "0.70286566", "0.6947499", "0.6834149", "0.6717767", "0.6677363", "0.6588224", "0.6471544", "0.6457992", "0.64250094", "0.64140487", "0.64032876", "0.63827264", "0.635251", "0.6352418...
0.76380426
3
Returns a list of the headlines with the corresponding keyword
def get_headlines_with_keyword(self, kw): key_head = self.keyword_headlines() headlines = set() for headlinekw in key_head[kw]: content = headlinekw.headlineid.content headlines.add(content) return list(headlines)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_headlines(self, kw = None):\r\n\t\tif kw:\r\n\t\t\treturn self.get_headlines_with_keyword(kw)\r\n\t\telse:\r\n\t\t\treturn self.get_all_headlines()", "def keyword_headlines(self):\r\n\t\td = {}\r\n\r\n\t\tfor q in self.keyword_queryset:\r\n\t\t\td[q.content] = self.headlinekeyword_queryset.filter(keyword...
[ "0.8323906", "0.77197367", "0.746953", "0.7303126", "0.68182117", "0.6805935", "0.66141784", "0.64018786", "0.6364445", "0.63333815", "0.59744793", "0.59658384", "0.59578186", "0.5909768", "0.5894226", "0.58745706", "0.5857797", "0.5850383", "0.58470386", "0.58389264", "0.581...
0.8318916
1
Welcome route Show api info
def get(self, **kwargs): # groups = kwargs.get('groups') return { 'app_fullname': main_config.app_name, 'app_name': main_config.package_name, 'app_version': main_config.app_version }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def welcome():\n return(\n f\"Available Routes:<br/>\"\n f\"/api/v1.0/etf_info<br/>\"\n f\"/api/v1.0/mutualfunds_info\"\n )", "def welcome():\n print(\"Server received request for 'Home' page...\")\n return (\n f\"Available Routes:<br/>\"\n f\"/api/v1.0/names<br/>\"...
[ "0.78570443", "0.7700679", "0.76447624", "0.76302814", "0.76104695", "0.75441355", "0.7513181", "0.75017893", "0.748835", "0.748835", "0.747807", "0.7470789", "0.7469684", "0.7465586", "0.7445721", "0.74422014", "0.7441175", "0.7425304", "0.74079293", "0.7407577", "0.74064386...
0.0
-1
prettyprint a table. Every column's width is the width of the widest field in that column. The given table should be a list of lists. That is, it should be a list of rows, where every row is a list of fields. To get the width of each column, we'll transpose the table. For efficiency, if the caller already has a transpo...
def texttable(table, left=False): widths = (max(len(fld) for fld in line) for line in itertools.izip_longest(*table, fillvalue="")) lc = '-' if left else '' formats = ["%{0}{1}s".format(lc, width) for width in widths] return ORS.join("%s" % OFS.join(format % fld ...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def print_table(table):\n # transpose the table:\n table = map(list, zip(*table))\n # get the column width:\n col_width = [max(len(str(x)) for x in col) for col in zip(*table)]\n # print it to screen:\n print\n for line in table:\n print \"| \" + \" | \".join(\"{:{}}\".format(x, col_wid...
[ "0.7072573", "0.70656246", "0.68265593", "0.6811348", "0.67818946", "0.6566554", "0.6547522", "0.6480734", "0.64619523", "0.6371777", "0.6231521", "0.61801976", "0.6153712", "0.61120504", "0.60410714", "0.60410714", "0.6018116", "0.6011745", "0.6011745", "0.6009689", "0.59695...
0.5369287
63
Get the nbest logits from a list.
def _get_best_indexes(logits, n_best_size): index_and_score = sorted(enumerate(logits), key=lambda x: x[1], reverse=True) best_indexes = [] for i in range(len(index_and_score)): if i >= n_best_size: break best_indexes.append(index_and_score[i][0]) return best_indexes
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _get_best_indexes(logits, n_best_size):\n index_and_score = sorted(enumerate(logits), key=lambda x: x[1], reverse=True)\n\n best_indexes = []\n for i in range(len(index_and_score)):\n if i >= n_best_size:\n break\n best_indexes.append(index_and_score[i][0])\n return best_indexes", "def _get_be...
[ "0.6809752", "0.67892396", "0.677396", "0.6740539", "0.6667522", "0.623828", "0.585659", "0.5845603", "0.5799747", "0.5723129", "0.56909186", "0.565728", "0.5616109", "0.5616109", "0.56132007", "0.56086874", "0.55966324", "0.5583005", "0.5578795", "0.55542535", "0.5510991", ...
0.67765886
4
Make sure that when we remove an option, only ~1/10th of the keys get moved.
def test_rendezvous_hash_roughly_fractional_change(): first_choices = range(10) second_choices = range(9) test_keys = [str(x) for x in range(10000)] first_results = [ marathon_tools.rendezvous_hash(first_choices, k) for k in test_keys ] second_results = [ marathon_tools.rendezv...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def truncateto(self, commandnumber):\n keytuples = self.pvalues.keys()\n allkeys = sorted(keytuples, key=lambda keytuple: keytuple[0])\n # Sanity checking\n lastkey = allkeys[0][0]\n candelete = True\n for (cmdno,proposal) in allkeys:\n if cmdno == lastkey:\n ...
[ "0.6069215", "0.60325634", "0.59787893", "0.5949097", "0.58923924", "0.573072", "0.56973803", "0.56719327", "0.5616844", "0.5563222", "0.55309904", "0.55171895", "0.5499191", "0.54319483", "0.5431215", "0.5413914", "0.5403386", "0.53861636", "0.53794116", "0.53793514", "0.536...
0.0
-1
Calculate the min number of refills to reach 'distance'. You start with a full tank.
def compute_min_refills(distance: int, tank: int, stops: List[int]): location: int = 0 n_stops = 0 last_stop = 0 max_drive = location + tank while max_drive < distance: counter = 0 # Handle the case that stops are depleted before we reach distance if len(stops) == 0: ...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def compute_min_refills(distance, tank, stops):\n\n num_refills = 0\n current_refill = 0\n\n all_stops = []\n all_stops.append(0)\n for stop in stops:\n \tall_stops.append(stop)\n all_stops.append(distance)\n\n num_stops = len(all_stops)\n\n while current_refill < num_stops:\n \tlast_...
[ "0.8302289", "0.6621628", "0.645036", "0.6314632", "0.6262712", "0.61484736", "0.60885", "0.603681", "0.59223753", "0.58915836", "0.5876038", "0.5869804", "0.58609515", "0.58593315", "0.58491707", "0.5815681", "0.5806749", "0.57953686", "0.57733434", "0.5757533", "0.57270324"...
0.7821262
1
Given the positions of a list of the indices, create a unique key to register the position.
def placementKey( geo): def diagcmp( xyA, xyB): """ Compare two positions based on x + y. If x + y is the same for the two, compare based on x. """ return cmp(xyA[0] + xyA[1], xyB[0] + xyB[1]) or cmp(xyA[0], xyB[0]) sorted = [ tuple(geo[i]) for i in xrange(geo.shape[0]) ...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def make_new_key(idx, key, d):\n\n new_key = \"%s_%d\" % (key, idx)\n if new_key in d:\n return make_new_key(idx + 1, key, d)\n return new_key", "def _make_key(self):\n all_position_values = (chromosome_sort_key(self.chromosome), self.min_position, self.max_position, \n ...
[ "0.6364943", "0.59690577", "0.58550936", "0.58094066", "0.5801118", "0.5798134", "0.5770239", "0.57411945", "0.57236964", "0.5721956", "0.56699497", "0.56316316", "0.5616401", "0.5541196", "0.55281365", "0.5510795", "0.5509949", "0.54963547", "0.5487912", "0.5483186", "0.5469...
0.53552693
35
Compare two positions based on x + y. If x + y is the same for the two, compare based on x.
def diagcmp( xyA, xyB): return cmp(xyA[0] + xyA[1], xyB[0] + xyB[1]) or cmp(xyA[0], xyB[0])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def cmp(x, y):\n if x + y > y + x: return 1\n elif x + y == y + x: return 0\n else: return -1", "def same(self, x: int, y: int):\n\n return self.find(x) == self.find(y)", "def position_equal(self, a, b):\n return None", "def cmp_position(self, other):\n if se...
[ "0.723032", "0.7027254", "0.69411623", "0.6835873", "0.6783057", "0.6730236", "0.67250097", "0.6706741", "0.66703665", "0.65392756", "0.65288377", "0.65262926", "0.652476", "0.6507796", "0.6501238", "0.64775836", "0.6462285", "0.6461377", "0.64430475", "0.6422668", "0.637349"...
0.6058523
49
For the given geometry, construct the symmetry and nondegenerate operations associated with the piece.
def __init__( self, geo, index=None): # the column vector self.geo[:,i] gives the i'th vertex's positions self.geo = np.array( geo, dtype=int) self.findNondegeneratePlacements() self.id = index
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def sym_elements(self):\n def trans(name, *sym):\n t = Element.TRANSFORMS\n n = name.split('_')\n\n for x in sym:\n n[-1] = t[x][n[-1]]\n\n return '_'.join(n)\n\n def primary():\n e = self.copy()\n e.name = '{}_p'.format...
[ "0.5479113", "0.5462708", "0.54347366", "0.53193223", "0.53082114", "0.51953447", "0.5145413", "0.51347935", "0.508981", "0.5083603", "0.50094604", "0.5006806", "0.49864545", "0.49627787", "0.4931252", "0.49236828", "0.4907775", "0.49058902", "0.4894753", "0.48798862", "0.487...
0.0
-1
Place the vertex v at position, and apply transformation T. Return the grid points that are occupied by the piece.
def place( self, position, v, T): geo = (self.geo - self.geo[v]).dot( T) return position + geo
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def translate(self, v):\n return Position.fromnp(translate(self.tonp(), v))", "def project_vector(u, v):\n u_np = np.array([u.get_x(), u.get_y()])\n v_np = np.array([v.get_x(), v.get_y()])\n proj = (np.dot(u_np, v_np) / np.dot(v_np, v_np)) * v_np\n return Point(proj[0], proj[1])", "def trans...
[ "0.61589414", "0.5887289", "0.56152284", "0.55743974", "0.5553705", "0.55252033", "0.55161303", "0.54971194", "0.5430372", "0.54015726", "0.5349081", "0.5345369", "0.53373307", "0.53254366", "0.5311833", "0.5305778", "0.5302551", "0.53003585", "0.52812153", "0.52796966", "0.5...
0.65050745
0
Generate all nondegenerate placements, with one of the vertices placed at (0,0). Return the placements as [ (v, T) ], where v is the vertex to be placed at (0,0), and T the 2x2 transformation matrix that place the piece according to self.geo[v] + T.dot(self.geo self.geo[v])
def findNondegeneratePlacements( self): # Rotate counterclockwise by 90 degrees around the v'th vertex. r90 = np.array( [ [0,1], [-1,0] ], dtype=int) # Flip the piece along the vertical axis through the v'th vertex. fv = np.array( [ [1,0], [0,-1] ], dtype=int) self.placements = ...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def generate_all_locations(grid, shape):", "def test_create_new_placements(self):\n subv = SimpleMachineVertex(None, \"\")\n pl = Placement(subv, 0, 0, 1)\n Placements([pl])", "def generate_nearby_cells(self):\n for y in range(len(self.island_map)):\n for x in range(len(s...
[ "0.6300398", "0.59959567", "0.5966825", "0.5925378", "0.58771193", "0.5777176", "0.57712615", "0.57712615", "0.5769345", "0.5725288", "0.5673758", "0.5602161", "0.56021327", "0.56002945", "0.5596927", "0.5585428", "0.5523877", "0.5508298", "0.5461161", "0.546076", "0.5455531"...
0.7902054
0
Construct a Lonpos state, with the given board and pieces. The occupation array indicates which points of the board are occupied, and by what pieces. It is specified by [ (p, i) ], which indicate point p of the board is occupied by i'th piece. p can be either the index or the 2d coordinates of the point.
def __init__( self, board, occupation=[]): self.board = board if (occupation): if (isinstance(occupation[0][0], int)): self.occupation = dict(occupation) elif (isinstance(occupation[0][0], tuple) and len(occupation[0][0])==2): try: ...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __init__(self, params):\n position = recordtype(\"position\", [\"x\", \"y\", \"kind\"])\n coordinate = recordtype(\"coordinate\", [\"x\", \"y\"])\n\n self.width = params[\"m\"]\n self.height = params[\"n\"]\n self.count = 0\n self.pieces = params[\"pieces\"]\n\n ...
[ "0.6446085", "0.63221157", "0.6167907", "0.6051009", "0.6044088", "0.6041356", "0.5958236", "0.5945604", "0.5903196", "0.589366", "0.5844264", "0.5821776", "0.5812927", "0.5807781", "0.57963544", "0.5786014", "0.5778081", "0.5771436", "0.5771375", "0.5727038", "0.56882375", ...
0.6202576
2
Use ASCII to illustrate the state of the Lonpos.
def show( self): def symbol( i): return i<0 and (i==-2 and ' ' or '0') or chr(ord('a') + i) X, Y = np.max( self.board.positions, 0) # -2 to indicate outside board. display = np.zeros( (X+1,Y+1), dtype=int) - 2 for x, y in self.board.positions: di...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __repr__(self):\n s = \" position:\" + str(self.pos) + \"\\n\"\n s += \" heading: \" + str(self.heading) + \"\\n\"\n return s", "def __str__(self):\n tapeline = self.tape.format(\n self.index - 10, self.index + 11) + ' : state {}'.format(self.state)\n pointline...
[ "0.6132644", "0.612283", "0.60547996", "0.60443765", "0.5874516", "0.5855971", "0.5843751", "0.5838602", "0.5808942", "0.58021694", "0.57527065", "0.57486653", "0.57406795", "0.5740448", "0.570328", "0.56801385", "0.56685424", "0.5661247", "0.56583273", "0.5644304", "0.564411...
0.5684829
15
Count unoccupied neighbors of a point.
def countFreeNeighbors( p, board, occupation): n = 0 for m in [0, 1]: for d in [-1, 1]: pn = [p[0], p[1]] pn[m] += d j = board.grids.get( tuple(pn), None) if (j is None): continue # Not a board point if (occupation.has_key( j)): continue # Occu...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def count_neighboors(self, x: int, y: int) -> int :\n\n cpt : int = 0\n min_x : int = max(0, x - 1)\n max_x : int = min(x + 1, self.width-1)\n min_y : int = max(0, y - 1)\n max_y : int = min(y + 1, self.height-1)\n\n x_tmp : int\n y_tmp : int\n for x_tmp in r...
[ "0.7307373", "0.7237684", "0.7203775", "0.71407646", "0.6955501", "0.68983823", "0.6883159", "0.6826486", "0.6824896", "0.6804748", "0.6788591", "0.67754936", "0.67619663", "0.67619663", "0.67333233", "0.6709194", "0.66639596", "0.66181695", "0.657233", "0.65499747", "0.65250...
0.7302581
1
Find unoccupied positions on the board.
def findUnoccupied( board, occupation): return [ j for j in xrange(len(board.positions)) if not occupation.has_key(j) ]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def free_positions(self):\n positions = []\n for i in range(self.grid_size):\n for j in range(self.grid_size):\n if self.grid[i][j] == 0:\n positions.append((i, j))\n if positions == []:\n raise GameException('Game Over. No free position ...
[ "0.74757147", "0.73623365", "0.7305674", "0.72659814", "0.7136726", "0.6983209", "0.6902076", "0.69016534", "0.68626094", "0.6858928", "0.67981094", "0.67022055", "0.66423976", "0.65927297", "0.6580229", "0.6564762", "0.65628403", "0.6559673", "0.654521", "0.65293694", "0.652...
0.77521825
0
Use a depthfirstsearch to solve the Lonpos puzzle.
def solve( board, pieces, occupation): from heapq import heappush, heappop unoccupied = findUnoccupied( board, occupation) remainingpieces = range(len(pieces)) searchq = [] nbacktrack = 0 while (unoccupied): nnheap = [] # As a heuristic, we choose to first place pieces on poi...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def depthFirstSearch(problem):\n \"*** YOUR CODE HERE ***\"\n util.raiseNotDefined()", "def depthFirstSearch(problem):\n \"*** YOUR CODE HERE ***\"\n \n util.raiseNotDefined()", "def depthFirstSearch(problem):\n \"*** YOUR CODE HERE ***\"\n\n #Creamos las estructuras de datos necesarias (stack y...
[ "0.70110327", "0.6945343", "0.67388815", "0.6720618", "0.6668133", "0.66212654", "0.65762824", "0.65408486", "0.65394676", "0.65213966", "0.65150017", "0.6503912", "0.64855695", "0.64802074", "0.64769286", "0.6470355", "0.64611316", "0.6455134", "0.64195275", "0.6398742", "0....
0.6093073
37
Returns a QuerySet containing only available instances (i.e. not selected previously)
def available(self, include_qs = None, include_obj = None): qs = self.all() available_qs = self.all() for obj in qs: if include_qs: if not obj.available and include_qs.filter(id=obj.id).count() == 0: available_qs = available_qs.exclude(id=obj.id) ...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_queryset(self):\n return NoneToEmptyQuerySet(self.model, using=self._db)", "def get_queryset(self):\n # the filter says that it only returns those w/ the pub_date\n # less or equal to timezone.now() (earlier or now)\n questions = Question.objects.filter(\n pub_date_...
[ "0.6884471", "0.68695015", "0.67739236", "0.67603797", "0.671835", "0.65459937", "0.6483099", "0.64685464", "0.64685464", "0.6414166", "0.6411734", "0.6331146", "0.6330862", "0.6273209", "0.6255611", "0.6248718", "0.62450546", "0.6231552", "0.6221019", "0.6218501", "0.6209835...
0.6682215
5
Determines whether the model instance has already been selected in a related field (ManyToManyField, OneToOneField).
def available(self): fields = self._meta.get_fields() for field in fields: if isinstance(field, models.ManyToManyRel): attr = field.get_accessor_name() if getattr(self, attr).count() > 0: return False elif isinstance(field, m...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def relation_exists(cls, model):\n return bool(cls.get_related_field(model)\n or cls.get_reverse_related_field(model))", "def isRelated(self):\n return len(self.user_storage.all()) > 0", "def has_field(self, field):\n return field in self.extra_fields", "def contains(s...
[ "0.6742698", "0.6351571", "0.6249839", "0.6177484", "0.594861", "0.58511186", "0.58348316", "0.5823608", "0.5715787", "0.56835854", "0.5671056", "0.5618836", "0.5598657", "0.5598657", "0.55803764", "0.55689776", "0.5560058", "0.55562407", "0.551438", "0.55038196", "0.54985", ...
0.66686696
1
outputs the A vector
def getA(self): return self.theta
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def A(self):\n return self._representation_vector[1:]", "def return_vec(self) :\r\n y_vec = np.concatenate((self.x_vec,self.v_vec))\r\n return y_vec", "def print_vector(self):\n print self.x, self.y, self.z", "def v(self) -> np.ndarray:\n return self.A[1:] if self.scalar_ve...
[ "0.6733412", "0.673054", "0.6519931", "0.64849705", "0.64376384", "0.6289511", "0.62315416", "0.62300235", "0.62070984", "0.61930376", "0.61860853", "0.61663204", "0.61269957", "0.609983", "0.6096781", "0.60403943", "0.60208887", "0.5975669", "0.5926071", "0.5925174", "0.5898...
0.0
-1
outputs the noise or bias
def getB(self): return self.error
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def noise(self, stddev):\n #add noise to weights\n pass", "def noise(self, freq: int, /) -> None:", "def add_noise(self, data):", "def noisePreset() :\n s.noisePreset()", "def noiseReduction(self):\n pass", "def white_noise():\n return random.randint(-32767, 32767)", "def noi...
[ "0.72268444", "0.6808519", "0.67288476", "0.6671144", "0.6670958", "0.6571604", "0.6559497", "0.65078646", "0.6505491", "0.64908093", "0.6452366", "0.63892174", "0.6370349", "0.6341129", "0.63222975", "0.6292225", "0.62755954", "0.6267123", "0.6263561", "0.6238834", "0.620964...
0.0
-1
outputs the covariance matrix
def getCovarianceMatrix(self): #ypost = np.dot ( self.getA().T, self.priorX ) theta = np.mat ( self.getA() ) Xm = np.mat ( self.priorX ) ypost = Xm * theta yprior = self.priorY error = ypost - yprior #error = error - np.mean ( error, axis = 0 ) return np...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def covariance_matrix(self):\n\n self._order_observations()\n self.cov_matrix = self._compute_covariance_matrix(\n self.list_observations, self.list_observations)\n\n self.cov_matrix += np.diag(np.array([self.noise] * self.n_observation))\n\n return self.cov_matrix", "def c...
[ "0.7898988", "0.7637602", "0.7582342", "0.74682164", "0.73953617", "0.7171067", "0.71467644", "0.6950389", "0.6944435", "0.6932731", "0.6891629", "0.68414915", "0.68384755", "0.68313056", "0.68222475", "0.67440724", "0.6723901", "0.67163885", "0.6713542", "0.6699285", "0.6695...
0.7279606
5
outputs the noise covariance matrix, R
def getCovarianceNoiseMatrix(self): return np.dot ( self.getB().T, self.getB() )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def covariance_matrix(self):\n\n self._order_observations()\n self.cov_matrix = self._compute_covariance_matrix(\n self.list_observations, self.list_observations)\n\n self.cov_matrix += np.diag(np.array([self.noise] * self.n_observation))\n\n return self.cov_matrix", "def c...
[ "0.71300656", "0.66976005", "0.6629202", "0.6629202", "0.6629202", "0.6622315", "0.6622108", "0.6622073", "0.6570352", "0.6554128", "0.6520113", "0.64823085", "0.64064324", "0.6393191", "0.6317923", "0.62303", "0.6217225", "0.6212507", "0.6195888", "0.6166094", "0.612643", ...
0.7025741
1
This will check whether the entires are pairwise close enough (within tol)
def CompareMatrices(mat1, mat2, tol): # just going to assume they are the same size... for i in range(len(mat1)): for j in range(len(mat1)): if abs(mat1[i][j] - mat2[i][j]) > tol: return False return True
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def check_symmetric(adj : np.ndarray, tol : float = 1e-8) -> bool:\n return np.all(np.abs(adj-adj.T) < tol)", "def check_restraint_pairs_for_doubles(list): # Also consider that a1 and a2 can be switches\r\n for i in range(len(list) - 1):\r\n for j in range(i + 1, len(list)):\r\n if (list...
[ "0.6715366", "0.65444267", "0.65051025", "0.64619166", "0.63805026", "0.6331604", "0.6313639", "0.6308023", "0.62470406", "0.61799294", "0.61504745", "0.6149808", "0.6135852", "0.6134906", "0.61283964", "0.6122861", "0.6102767", "0.6097179", "0.60936743", "0.60933644", "0.608...
0.0
-1
Takes a list of tuples, and each element is compared to the next one Any tuple that changes has the index of it returned
def ContinuousCompare(lst, tol): changing_indices = set() last_tup = None # iterate over all the tuples for i in range(len(lst)): # if it's the first entry, we just want to assign it and move onto the # next iteration if i == 0: last_tup = lst[i] continue ...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def task9_find_before_tuple(lst):\n result = 0\n for elem in lst:\n if isinstance(elem, tuple):\n result = lst[lst.index(elem) - 1]\n break\n return result", "def innerloop(the_list):\n for index in range(len(the_list)-1):\n if the_list[index] > the_list[index+1]:\...
[ "0.69720376", "0.67319834", "0.6196822", "0.6108386", "0.60973054", "0.5961634", "0.58837163", "0.58421284", "0.57686746", "0.5747028", "0.5688401", "0.56793284", "0.5658989", "0.56409854", "0.56406134", "0.56301296", "0.56054074", "0.5603956", "0.5601578", "0.55650455", "0.5...
0.629942
2
Determine if the object has a parent with the supplied name.
def has_parent(obj, parent_name): if obj.parent is None: return False if obj.parent.name is None: return False elif obj.parent.name == parent_name: return True else: return has_parent(obj.parent, parent_name)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def has_parent(self):\n return self.parent != None", "def has_parent(self):\n return self._parent_ is not None", "def _contains_in_self_or_parent(self, name: str) -> bool:\n return name in self", "def is_parent(self):\n if self.parent is not None:\n return False\n ...
[ "0.7545343", "0.7435591", "0.73406065", "0.7337848", "0.7336612", "0.7255639", "0.7117234", "0.7116028", "0.70370907", "0.6815638", "0.6784905", "0.67783093", "0.6720416", "0.6667514", "0.6614133", "0.6522801", "0.6410729", "0.640435", "0.63520426", "0.6320416", "0.6236038", ...
0.8476002
0
The event triggered when an error is raised while invoking a command.
async def on_command_error(self, ctx, error): if hasattr(ctx.command, 'on_error'): return ignored = (commands.CommandNotFound, commands.UserInputError) error = getattr(error, 'original', error) try: if isinstance(error, ignored): return ...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def on_command_error(self, ctx, error):\n\n if hasattr(ctx.command, 'on_error'):\n return\n\n error = getattr(error, 'original', error)\n\n if isinstance(error, commands.MissingRequiredArgument):\n LOG.error(f\"Missing argument in command {ctx.command}\")\n ...
[ "0.7980478", "0.7709615", "0.76937354", "0.7669524", "0.7622439", "0.7463245", "0.7443354", "0.7433384", "0.7432164", "0.7427173", "0.74214035", "0.74195987", "0.72977245", "0.72538507", "0.7153273", "0.714725", "0.6872951", "0.681055", "0.68062097", "0.6620473", "0.66178834"...
0.72716826
13
will simulation PARALLEL_UNIVERSES_COUNT universes then, will return the overall multiverse survival of the player
def compute_player_score(): progress_bar = ProgressBar(label="Computing universes") survivals_count = 0 for i in range(PARALLEL_UNIVERSES_COUNT): if simulate_universe(): survivals_count += 1 progress_bar.set_progression((i + 1) / PARALLEL_UNIVERSES_COUNT) progress_bar.end(...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def compute_god_score():\n\n survivals_count = 0\n for _ in range(PARALLEL_UNIVERSES_COUNT):\n best_survival = random.uniform(MIN_DISEASE_SURVIVAL, MAX_DISEASE_SURVIVAL)\n for _ in range(random.randint(MIN_TREATMENTS_COUNT, MAX_TREATMENTS_COUNT)):\n treated_survival = random.uniform(...
[ "0.66658336", "0.5772919", "0.57570714", "0.5285743", "0.52517307", "0.51297843", "0.5128983", "0.51264876", "0.50322986", "0.50084907", "0.4935301", "0.49063638", "0.48907402", "0.48814934", "0.48217684", "0.48150674", "0.48144037", "0.48021117", "0.47568566", "0.4754652", "...
0.73624575
0
simulates a universe and uses playground.choose_trial to take a decision return true in cas of survival in the simulated universe
def simulate_universe(): # untreated_survival is the probability to survive if not treated # this is an exact law of the universe, the player will not have this information untreated_survival = random.uniform(MIN_DISEASE_SURVIVAL, MAX_DISEASE_SURVIVAL) trials: list[Trial] = [] treated_survivals: ...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_run_sim_1():\n rnd = rand.Arrivals(36, 41)\n sim.run_sim(3, 2, 5, 6, 22, rnd)", "def run_trial():\n env = gym.make('CartPole-v0')\n obs_dim = env.observation_space.shape[0]\n n_actions = env.action_space.n\n\n qnet = QNet(obs_dim, n_actions)\n agent = Sarsa(qnet, n_actions, 0.99, 1....
[ "0.6240639", "0.6103099", "0.60925263", "0.5984078", "0.5937608", "0.5916742", "0.5768528", "0.5740173", "0.5682504", "0.5681896", "0.56587714", "0.56468713", "0.5616107", "0.56043226", "0.5596143", "0.5585067", "0.558289", "0.5577875", "0.5572243", "0.55598336", "0.55593103"...
0.799685
0
Does the same thing as the simulate_universes function but do not use playground.choose_trial. Instead of using trials, it takes a decision according to the real (secret) treatment survival. The obtained score is the score you obtain if you know all the variables and not only the trials facts
def compute_god_score(): survivals_count = 0 for _ in range(PARALLEL_UNIVERSES_COUNT): best_survival = random.uniform(MIN_DISEASE_SURVIVAL, MAX_DISEASE_SURVIVAL) for _ in range(random.randint(MIN_TREATMENTS_COUNT, MAX_TREATMENTS_COUNT)): treated_survival = random.uniform(MIN_TREATED...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def simulate_universe():\n\n # untreated_survival is the probability to survive if not treated\n # this is an exact law of the universe, the player will not have this information\n untreated_survival = random.uniform(MIN_DISEASE_SURVIVAL, MAX_DISEASE_SURVIVAL)\n\n trials: list[Trial] = []\n\n treate...
[ "0.7318751", "0.63951796", "0.609098", "0.593011", "0.58897114", "0.5845657", "0.583531", "0.58247733", "0.5823327", "0.5815546", "0.57771176", "0.5770173", "0.5762508", "0.57173645", "0.5677805", "0.56669015", "0.56631994", "0.5633992", "0.5622041", "0.5611543", "0.5583957",...
0.5252823
47
Method gets user credential from storage JSON file If credential are not in storage or are invalid, gets new credentials If stored credential are expired, refreshes them
def get_credentials(self, **kwargs): creds_file = os.path.join(kwargs['user_dir'], 'credentials.json') # Getting credentials from Storage store = file.Storage(creds_file) creds = store.get() # Validating or refreshing credentials, if necessary if creds is None or creds....
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _load_user_credentials(self, storage):\n # Set up a Flow object to be used if we need to authenticate.\n flow = client.flow_from_clientsecrets(\n self.client_secrets,\n scope=self.api_scopes,\n message=tools.message_if_missing(self.client_secrets))\n\n # Re...
[ "0.7427812", "0.7198125", "0.71123177", "0.7038576", "0.7036431", "0.7013569", "0.6985227", "0.69778216", "0.6926867", "0.6853255", "0.683662", "0.676247", "0.6717914", "0.6712703", "0.6711394", "0.6686284", "0.66799074", "0.66706616", "0.6668772", "0.66675913", "0.6653017", ...
0.70376235
4
Equal width bin, take a uniform distribution for the sample value range. Buckets include the right boundary, and exclude the left boundary. Namely, boundaries=[0., 1., 2.] generates buckets (inf, 0.], (0., 1.], (1., 2.], and (2., +inf)
def uniform(feature, bins): t = (feature.max()-feature.min())/bins return [t*i for i in range(1, bins)]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def all_bucket_boundaries(self):\n\n lower = self._lower_bounds[0]\n for i in xrange(1, self.total_buckets):\n upper = self._lower_bounds[i]\n yield (lower, upper)\n lower = upper\n\n yield (lower, float('Inf'))", "def bucket_boundaries(self, bucket):\n\n if bucket < 0 or bucket >= sel...
[ "0.6999874", "0.6858696", "0.67939454", "0.6425844", "0.64145726", "0.63430184", "0.6245302", "0.6230007", "0.6219566", "0.62046456", "0.618017", "0.61405003", "0.61358374", "0.61317116", "0.6131306", "0.6127178", "0.60743845", "0.6071009", "0.6056603", "0.6021089", "0.598096...
0.62419814
7
Equal frequency bin, take a uniform distribution of the sample size. Buckets include the right boundary, and exclude the left boundary. Namely, boundaries=[0., 1., 2.] generates buckets (inf, 0.], (0., 1.], (1., 2.], and (2., +inf).
def quantile(feature, bins): t = feature.sort_values().values w = round(len(t)/bins) return [t[w*i-1] for i in range(1, bins)]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_bins(self):\n min_val = 0\n max_val = 1\n buckets = 10\n values_per_bucket = 10\n\n import numpy\n\n data = list(numpy.linspace(min_val, max_val, buckets * values_per_bucket))\n bins = numpy.linspace(min_val, max_val + sys.float_info.epsilon, buckets + 1)\n...
[ "0.68158084", "0.6405568", "0.6296729", "0.62813383", "0.62101144", "0.61591256", "0.61568004", "0.6148516", "0.61107266", "0.60635036", "0.60612935", "0.60541433", "0.60541433", "0.60535467", "0.60273194", "0.5927287", "0.5926463", "0.59147674", "0.58974785", "0.5892863", "0...
0.0
-1
Probability grouping of category variables
def probability_categorical(feature, label): assert feature.nunique()>2, 'feature category nums must be greater than 2.' t = pd.DataFrame({'feature':feature, 'label':label}) cat = label.unique() cat = [(cat[i], cat[i+1]) for i in range(len(cat)-1)] prob = label.value_counts(1).to_dict() slope = ...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def ppf(self,x):\n return self.categoricalDist.ppf(x)", "def calc_priors(categories, data):\n counts = np.zeros(categories)\n for val in range(categories):\n counts[val] = np.count_nonzero(data.labels == val)\n return counts / len(data.labels)", "def categorical(pvals: np.ndarray) -> int:\n\...
[ "0.6292472", "0.6147138", "0.60119295", "0.59831697", "0.5894537", "0.5834089", "0.5826", "0.57258606", "0.5708978", "0.5685781", "0.56797826", "0.5673375", "0.5663211", "0.56225026", "0.55870885", "0.5568914", "0.55597067", "0.5559275", "0.5555052", "0.5546693", "0.55196595"...
0.70440626
0
Convert time_offsets to gps timestamps and nanoseconds
def get_gps_timestamp(file, time_offset): reference_date = get_reference_datetime(file) absolute_date = get_absolute_datetime(reference_date, time_offset) timestamp, nanosecond = datetime_to_gpstimestamp_nanoseconds(absolute_date) return timestamp, nanosecond
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def from_gps_time(self):\n reason = \"[!] GPS timestamps are 10 digits\"\n ts_type = self.ts_types['gpstime']\n try:\n if not len(self.gps) == 10 or not self.gps.isdigit():\n self.in_gpstime = indiv_output = combined_output = False\n pass\n e...
[ "0.6133533", "0.5981217", "0.5839412", "0.5720046", "0.5719699", "0.5716344", "0.5671996", "0.56716424", "0.56283784", "0.5613697", "0.55957836", "0.55948985", "0.55543983", "0.5532485", "0.55197525", "0.54900724", "0.54886615", "0.54784024", "0.5435569", "0.5418071", "0.5407...
0.674219
0
Convert datetime objects to GPS timestamp and nanoseconds
def datetime_to_gpstimestamp_nanoseconds(date): timestamp = gpstime.utc_to_gps(calendar.timegm(date.utctimetuple())) nanosecond = date.microsecond * 1000 return timestamp, nanosecond
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def to_gps_time(self):\n ts_type = self.ts_types['gpstime']\n try:\n leapseconds = self.leapseconds\n check_date = duparser.parse(self.timestamp)\n if hasattr(check_date.tzinfo, '_offset'):\n dt_tz = check_date.tzinfo._offset.total_seconds()\n ...
[ "0.64110595", "0.6309177", "0.6237445", "0.61667824", "0.61475044", "0.61468357", "0.6086212", "0.606989", "0.6046954", "0.60396606", "0.6038311", "0.60068566", "0.59911877", "0.5953865", "0.59505874", "0.5947502", "0.5941145", "0.5941145", "0.5921453", "0.591261", "0.5876311...
0.7270666
0
Get the absolute time of the discharges
def get_absolute_datetime(reference, offset): absolute_datetime = reference + datetime.timedelta(seconds=offset) return absolute_datetime
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getChargeTime(self):\n return self.json_state.get(\"charging\").get(\"seconds_charging\")", "def get_ref_time(self):\n from datetime import datetime, timedelta\n\n ref_time = datetime(2010, 1, 1, 0, 0, 0)\n ref_time += timedelta(seconds=int(self.fid['/PRODUCT/time'][0]))\n ...
[ "0.6573679", "0.65164846", "0.6515013", "0.6418441", "0.6405775", "0.63922805", "0.6391688", "0.6354492", "0.6346032", "0.6239634", "0.62149274", "0.6191225", "0.6191107", "0.61854005", "0.6183254", "0.6182833", "0.61757195", "0.61671525", "0.6158372", "0.61397165", "0.610876...
0.0
-1
Get the reference datetime from the KNMI LGT file as datetime
def get_reference_datetime(file): date_string = file.root.discharge1._f_getAttr('reference_datetime')[0] ref_date = datetime.datetime.strptime(date_string, '%d-%b-%Y;%H:%M:%S.%f') return ref_date
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_ref_time(self):\n from datetime import datetime, timedelta\n\n ref_time = datetime(2010, 1, 1, 0, 0, 0)\n ref_time += timedelta(seconds=int(self.fid['/PRODUCT/time'][0]))\n return ref_time", "def get_file_date(self, file: str) -> date:", "def extract_datetime(fpath):\n tr...
[ "0.6449726", "0.6254226", "0.6224529", "0.6155637", "0.6119671", "0.61154586", "0.6110254", "0.5968073", "0.59634435", "0.5960696", "0.59445274", "0.5924622", "0.59102046", "0.5850813", "0.57510024", "0.574005", "0.574005", "0.5737356", "0.57372004", "0.5716487", "0.5684267",...
0.7556686
0
get all orders and find the time span if order have been prepared , the ticchen's name will store in the orderdata
def cook_order_list(request): all_orders = Order.objects.all().order_by("-id") css = CookStatus.objects.filter(cook_name=request.user) cs = None current_order = None if len(css) != 0: cs = css[0] if cs.current_order != None : current_order = cs.current_order.menu_items.all() new_orders = [] for order in ...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def trackOrderRequest(self):\n\t\tstart_dat=datetime.today()\n\t\tstart_date = start_dat - timedelta( hours=start_dat.time().hour,minutes=start_dat.time().minute,seconds=start_dat.time().second ) \n\t\tend_date=start_dat\n\t\tans=None\n\t\t#print start_dat.time().hour\n\t\tprint end_date\n\t\tans=Order.objects.fil...
[ "0.67804915", "0.66184914", "0.60975254", "0.60764414", "0.6066185", "0.5877417", "0.58634293", "0.58248085", "0.57850134", "0.57413775", "0.5655956", "0.5629255", "0.55018866", "0.5500083", "0.54759413", "0.54736453", "0.54555213", "0.544849", "0.5419155", "0.53935933", "0.5...
0.5067255
46
chang the order's status to be "cooking" which is selected by the id of order
def cook_order(request): order_id = request.GET.get('order_id', 0) cs , status = CookStatus.objects.get_or_create(cook_name=request.user) if cs.current_order is None: cs.current_order = Order.objects.get(id=order_id) cs.current_order.status = 'cooking' cs.current_order.tikchen = request.user.username cs.cur...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def order_ready(request):\n\tcs , status = CookStatus.objects.get_or_create(cook_name=request.user)\n\tif cs.current_order is not None:\n\t\tcs.current_order.status = 'ready-to-serve'\n\t\tcs.current_order.save()\n\t\tcs.current_order = None\n\t\tcs.save()\n\n\treturn HttpResponseRedirect(\"/staff/cook_order_list/...
[ "0.63635933", "0.586669", "0.5858225", "0.58000624", "0.56946445", "0.5581705", "0.55623066", "0.55381656", "0.5464021", "0.5398148", "0.53747994", "0.5373412", "0.53628695", "0.5324506", "0.5319295", "0.52873236", "0.5282698", "0.52594453", "0.5258599", "0.52334434", "0.5232...
0.7182565
0
chang the order's status to be "readytoserve" which is selected by the id of order
def order_ready(request): cs , status = CookStatus.objects.get_or_create(cook_name=request.user) if cs.current_order is not None: cs.current_order.status = 'ready-to-serve' cs.current_order.save() cs.current_order = None cs.save() return HttpResponseRedirect("/staff/cook_order_list/")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def update_order_status():\n symbol = App.config[\"symbol\"]\n\n # Get currently active order and id (if any)\n order = App.order\n order_id = order.get(\"orderId\", 0) if order else 0\n if not order_id:\n log.error(f\"Wrong state or use: check order status cannot find the order id.\")\...
[ "0.67920375", "0.670818", "0.66758204", "0.6641249", "0.65196955", "0.6437769", "0.6087314", "0.6031311", "0.5999321", "0.59882295", "0.5978718", "0.5966084", "0.59375423", "0.59296644", "0.59227234", "0.5900806", "0.58614755", "0.5841006", "0.5841006", "0.58058894", "0.57958...
0.68117625
0
Format trajectory into a list of tuples before they are stored in memory. Trajectory is list of (s,a,r,s,d) tuples
def formatTrajectory(self, trajectory): return self.RLModel.formatTrajectory(trajectory)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def parse_trajectory(path: str) -> Optional[List[Dict[str, tuple]]]:\n lines = _get_lines_from_file(path)\n\n ess_file = False\n if path.split('.')[-1] != 'xyz':\n try:\n log = ess_factory(fullpath=path, check_for_errors=False)\n ess_file = True\n except (InputError, RM...
[ "0.60951096", "0.6015128", "0.5985093", "0.5921288", "0.5903913", "0.5848871", "0.5752752", "0.5743873", "0.5734865", "0.5705948", "0.56735694", "0.5643993", "0.55531675", "0.5550036", "0.55357385", "0.5534374", "0.550818", "0.5483543", "0.5480718", "0.5441098", "0.5439881", ...
0.6233229
0
This function will be use to train model and save model for given training set.
def train(X_train, y_train, save_model='model.h5'): # Hyperparameters batch_size = 32 epochs = 30 learning_rate = 0.001 # Loading model from model.py model = m(input_height=IMAGE_HEIGHT, input_width=IMAGE_WIDTH) # Plot model as image plot_model(model, to_file='model_plot.p...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def trainAndSaveModels():\n print \"\\nTraining models...\"\n\n #Use the best-performed train and test splitted data \n X_train = pickle.load(open('X_train.sav','rb'))\n X_test = pickle.load(open('X_test.sav','rb'))\n Y_train = pickle.load(open('Y_train.sav','rb'))\n \n #train models\n lass...
[ "0.7705048", "0.74385786", "0.7426859", "0.7407801", "0.7267371", "0.7208689", "0.71939373", "0.71755517", "0.71754044", "0.7151909", "0.7151358", "0.7147768", "0.7145118", "0.7128991", "0.71117663", "0.7085029", "0.7073787", "0.7053388", "0.70399153", "0.70298266", "0.702379...
0.7325195
4
Base class for Python RL environments.
def __init__(self, handle_auto_reset: bool = False): self._handle_auto_reset = handle_auto_reset self._current_time_step = None common.assert_members_are_not_overridden( base_cls=PyEnvironment, instance=self, denylist=('reset', 'step') )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __init__(self): \n\t\n\t # get the environment\n\t\tself.env = env()", "def __init__(self, env):\n gym.Wrapper.__init__(self, env)", "def __init__(self, env):\n gym.Wrapper.__init__(self, env)", "def __init__(self, env):\n super().__init__(env)", "def __init__(self, env):\n ...
[ "0.68557495", "0.66981584", "0.66981584", "0.66636485", "0.66636485", "0.66636485", "0.6464115", "0.616451", "0.6071239", "0.5907203", "0.589397", "0.58922243", "0.58735794", "0.58711684", "0.58711684", "0.58711684", "0.58711684", "0.58711684", "0.58711684", "0.5841212", "0.5...
0.0
-1
Whether the environment is batched or not. If the environment supports batched observations and actions, then overwrite this property to True. A batched environment takes in a batched set of actions and returns a batched set of observations. This means for all numpy arrays in the input and output nested structures, the...
def batched(self) -> bool: return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def batch_size(self) -> Optional[int]:\n if self.batched:\n raise RuntimeError(\n 'Environment %s marked itself as batched but did not override the '\n 'batch_size property'\n % type(self)\n )\n return None", "def is_batch():\n\n pass", "def _global_batch_size(self...
[ "0.6829763", "0.63745177", "0.6135504", "0.6097213", "0.60270363", "0.59971696", "0.5886409", "0.5588769", "0.5547144", "0.55365115", "0.5511294", "0.55018145", "0.5477585", "0.5467263", "0.54394144", "0.5351245", "0.53225625", "0.53225625", "0.53225625", "0.53225625", "0.529...
0.6470478
1
The batch size of the environment.
def batch_size(self) -> Optional[int]: if self.batched: raise RuntimeError( 'Environment %s marked itself as batched but did not override the ' 'batch_size property' % type(self) ) return None
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def batch_size(self):\n return self.size", "def batch_size(self):\n return self._batch_size", "def batch_size(self):\n return self._batch_size", "def batch_size(self):\n return self._batch_size", "def batch_size(self):\n return self._batch_size", "def get_batch_size():\...
[ "0.8748392", "0.8526226", "0.8526226", "0.8526226", "0.8526226", "0.8332826", "0.82787216", "0.82280636", "0.8090263", "0.8073407", "0.80665404", "0.80136347", "0.8010651", "0.8002175", "0.8002175", "0.8002175", "0.8002175", "0.7821338", "0.7717743", "0.76162803", "0.75835013...
0.7726753
18
Whether the Environmet should reset given the current timestep. By default it only resets when all time_steps are `LAST`.
def should_reset(self, current_time_step: ts.TimeStep) -> bool: handle_auto_reset = getattr(self, '_handle_auto_reset', False) return handle_auto_reset and np.all(current_time_step.is_last())
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def reset_values(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"reset_values\")", "def reset():\n return True", "def reset_values(self) -> pulumi.Output[Optional[bool]]:\n return pulumi.get(self, \"reset_values\")", "def reset(self, **kwargs):\n if self._backend_ag...
[ "0.6065845", "0.5846722", "0.5738438", "0.57173324", "0.5702698", "0.5700601", "0.56152225", "0.5585178", "0.55754685", "0.5555264", "0.5542493", "0.55404925", "0.5514392", "0.5394026", "0.5376499", "0.5355567", "0.53485197", "0.53262925", "0.5297183", "0.52819175", "0.528191...
0.8290577
0