sentence1
stringlengths
52
3.87M
sentence2
stringlengths
1
47.2k
label
stringclasses
1 value
def unix_timestamp_to_datetime(unix_timestamp): """ <Purpose> Convert 'unix_timestamp' (i.e., POSIX time, in UNIX_TIMESTAMP_SCHEMA format) to a datetime.datetime() object. 'unix_timestamp' is the number of seconds since the epoch (January 1, 1970.) >>> datetime_object = unix_timestamp_to_datetime(1445455680) >>> datetime_object datetime.datetime(2015, 10, 21, 19, 28) <Arguments> unix_timestamp: An integer representing the time (e.g., 1445455680). Conformant to 'securesystemslib.formats.UNIX_TIMESTAMP_SCHEMA'. <Exceptions> securesystemslib.exceptions.FormatError, if 'unix_timestamp' is improperly formatted. <Side Effects> None. <Returns> A datetime.datetime() object corresponding to 'unix_timestamp'. """ # Is 'unix_timestamp' properly formatted? # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. securesystemslib.formats.UNIX_TIMESTAMP_SCHEMA.check_match(unix_timestamp) # Convert 'unix_timestamp' to a 'time.struct_time', in UTC. The Daylight # Savings Time (DST) flag is set to zero. datetime.fromtimestamp() is not # used because it returns a local datetime. struct_time = time.gmtime(unix_timestamp) # Extract the (year, month, day, hour, minutes, seconds) arguments for the # datetime object to be returned. datetime_object = datetime.datetime(*struct_time[:6]) return datetime_object
<Purpose> Convert 'unix_timestamp' (i.e., POSIX time, in UNIX_TIMESTAMP_SCHEMA format) to a datetime.datetime() object. 'unix_timestamp' is the number of seconds since the epoch (January 1, 1970.) >>> datetime_object = unix_timestamp_to_datetime(1445455680) >>> datetime_object datetime.datetime(2015, 10, 21, 19, 28) <Arguments> unix_timestamp: An integer representing the time (e.g., 1445455680). Conformant to 'securesystemslib.formats.UNIX_TIMESTAMP_SCHEMA'. <Exceptions> securesystemslib.exceptions.FormatError, if 'unix_timestamp' is improperly formatted. <Side Effects> None. <Returns> A datetime.datetime() object corresponding to 'unix_timestamp'.
entailment
def format_base64(data): """ <Purpose> Return the base64 encoding of 'data' with whitespace and '=' signs omitted. <Arguments> data: Binary or buffer of data to convert. <Exceptions> securesystemslib.exceptions.FormatError, if the base64 encoding fails or the argument is invalid. <Side Effects> None. <Returns> A base64-encoded string. """ try: return binascii.b2a_base64(data).decode('utf-8').rstrip('=\n ') except (TypeError, binascii.Error) as e: raise securesystemslib.exceptions.FormatError('Invalid base64' ' encoding: ' + str(e))
<Purpose> Return the base64 encoding of 'data' with whitespace and '=' signs omitted. <Arguments> data: Binary or buffer of data to convert. <Exceptions> securesystemslib.exceptions.FormatError, if the base64 encoding fails or the argument is invalid. <Side Effects> None. <Returns> A base64-encoded string.
entailment
def parse_base64(base64_string): """ <Purpose> Parse a base64 encoding with whitespace and '=' signs omitted. <Arguments> base64_string: A string holding a base64 value. <Exceptions> securesystemslib.exceptions.FormatError, if 'base64_string' cannot be parsed due to an invalid base64 encoding. <Side Effects> None. <Returns> A byte string representing the parsed based64 encoding of 'base64_string'. """ if not isinstance(base64_string, six.string_types): message = 'Invalid argument: '+repr(base64_string) raise securesystemslib.exceptions.FormatError(message) extra = len(base64_string) % 4 if extra: padding = '=' * (4 - extra) base64_string = base64_string + padding try: return binascii.a2b_base64(base64_string.encode('utf-8')) except (TypeError, binascii.Error) as e: raise securesystemslib.exceptions.FormatError('Invalid base64' ' encoding: ' + str(e))
<Purpose> Parse a base64 encoding with whitespace and '=' signs omitted. <Arguments> base64_string: A string holding a base64 value. <Exceptions> securesystemslib.exceptions.FormatError, if 'base64_string' cannot be parsed due to an invalid base64 encoding. <Side Effects> None. <Returns> A byte string representing the parsed based64 encoding of 'base64_string'.
entailment
def encode_canonical(object, output_function=None): """ <Purpose> Encode 'object' in canonical JSON form, as specified at http://wiki.laptop.org/go/Canonical_JSON . It's a restricted dialect of JSON in which keys are always lexically sorted, there is no whitespace, floats aren't allowed, and only quote and backslash get escaped. The result is encoded in UTF-8, and the resulting bits are passed to output_function (if provided), or joined into a string and returned. Note: This function should be called prior to computing the hash or signature of a JSON object in TUF. For example, generating a signature of a signing role object such as 'ROOT_SCHEMA' is required to ensure repeatable hashes are generated across different json module versions and platforms. Code elsewhere is free to dump JSON objects in any format they wish (e.g., utilizing indentation and single quotes around object keys). These objects are only required to be in "canonical JSON" format when their hashes or signatures are needed. >>> encode_canonical("") '""' >>> encode_canonical([1, 2, 3]) '[1,2,3]' >>> encode_canonical([]) '[]' >>> encode_canonical({"A": [99]}) '{"A":[99]}' >>> encode_canonical({"x" : 3, "y" : 2}) '{"x":3,"y":2}' <Arguments> object: The object to be encoded. output_function: The result will be passed as arguments to 'output_function' (e.g., output_function('result')). <Exceptions> securesystemslib.exceptions.FormatError, if 'object' cannot be encoded or 'output_function' is not callable. <Side Effects> The results are fed to 'output_function()' if 'output_function' is set. <Returns> A string representing the 'object' encoded in canonical JSON form. """ result = None # If 'output_function' is unset, treat it as # appending to a list. if output_function is None: result = [] output_function = result.append try: _encode_canonical(object, output_function) except (TypeError, securesystemslib.exceptions.FormatError) as e: message = 'Could not encode ' + repr(object) + ': ' + str(e) raise securesystemslib.exceptions.FormatError(message) # Return the encoded 'object' as a string. # Note: Implies 'output_function' is None, # otherwise results are sent to 'output_function'. if result is not None: return ''.join(result)
<Purpose> Encode 'object' in canonical JSON form, as specified at http://wiki.laptop.org/go/Canonical_JSON . It's a restricted dialect of JSON in which keys are always lexically sorted, there is no whitespace, floats aren't allowed, and only quote and backslash get escaped. The result is encoded in UTF-8, and the resulting bits are passed to output_function (if provided), or joined into a string and returned. Note: This function should be called prior to computing the hash or signature of a JSON object in TUF. For example, generating a signature of a signing role object such as 'ROOT_SCHEMA' is required to ensure repeatable hashes are generated across different json module versions and platforms. Code elsewhere is free to dump JSON objects in any format they wish (e.g., utilizing indentation and single quotes around object keys). These objects are only required to be in "canonical JSON" format when their hashes or signatures are needed. >>> encode_canonical("") '""' >>> encode_canonical([1, 2, 3]) '[1,2,3]' >>> encode_canonical([]) '[]' >>> encode_canonical({"A": [99]}) '{"A":[99]}' >>> encode_canonical({"x" : 3, "y" : 2}) '{"x":3,"y":2}' <Arguments> object: The object to be encoded. output_function: The result will be passed as arguments to 'output_function' (e.g., output_function('result')). <Exceptions> securesystemslib.exceptions.FormatError, if 'object' cannot be encoded or 'output_function' is not callable. <Side Effects> The results are fed to 'output_function()' if 'output_function' is set. <Returns> A string representing the 'object' encoded in canonical JSON form.
entailment
def process_error_labels(value): """ Process the error labels of a dependent variable 'value' to ensure uniqueness. """ observed_error_labels = {} for error in value.get('errors', []): label = error.get('label', 'error') if label not in observed_error_labels: observed_error_labels[label] = 0 observed_error_labels[label] += 1 if observed_error_labels[label] > 1: error['label'] = label + '_' + str(observed_error_labels[label]) # append "_1" to first error label that has a duplicate if observed_error_labels[label] == 2: for error1 in value.get('errors', []): error1_label = error1.get('label', 'error') if error1_label == label: error1['label'] = label + "_1" break
Process the error labels of a dependent variable 'value' to ensure uniqueness.
entailment
def raw(text): """Returns a raw string representation of text""" new_string = '' for char in text: try: new_string += escape_dict[char] except KeyError: new_string += char return new_string
Returns a raw string representation of text
entailment
def get_winfunc(libname, funcname, restype=None, argtypes=(), _libcache={}): """Retrieve a function from a library/DLL, and set the data types.""" if libname not in _libcache: _libcache[libname] = windll.LoadLibrary(libname) func = getattr(_libcache[libname], funcname) func.argtypes = argtypes func.restype = restype return func
Retrieve a function from a library/DLL, and set the data types.
entailment
def WinMSGLoop(): """Run the main windows message loop.""" LPMSG = POINTER(MSG) LRESULT = c_ulong GetMessage = get_winfunc("user32", "GetMessageW", BOOL, (LPMSG, HWND, UINT, UINT)) TranslateMessage = get_winfunc("user32", "TranslateMessage", BOOL, (LPMSG,)) # restype = LRESULT DispatchMessage = get_winfunc("user32", "DispatchMessageW", LRESULT, (LPMSG,)) msg = MSG() lpmsg = byref(msg) while GetMessage(lpmsg, HWND(), 0, 0) > 0: TranslateMessage(lpmsg) DispatchMessage(lpmsg)
Run the main windows message loop.
entailment
def ConnectTo(self, appName, data=None): """Exceptional error is handled in zdde Init() method, so the exception must be re-raised""" global number_of_apps_communicating self.ddeServerName = appName try: self.ddec = DDEClient(self.ddeServerName, self.ddeClientName) # establish conversation except DDEError: raise else: number_of_apps_communicating +=1
Exceptional error is handled in zdde Init() method, so the exception must be re-raised
entailment
def Request(self, item, timeout=None): """Request DDE client timeout in seconds Note ... handle the exception within this function. """ if not timeout: timeout = self.ddetimeout try: reply = self.ddec.request(item, int(timeout*1000)) # convert timeout into milliseconds except DDEError: err_str = str(sys.exc_info()[1]) error = err_str[err_str.find('err=')+4:err_str.find('err=')+10] if error == hex(DMLERR_DATAACKTIMEOUT): print("TIMEOUT REACHED. Please use a higher timeout.\n") if (sys.version_info > (3, 0)): #this is only evaluated in case of an error reply = b'-998' #Timeout error value else: reply = '-998' #Timeout error value return reply
Request DDE client timeout in seconds Note ... handle the exception within this function.
entailment
def advise(self, item, stop=False): """Request updates when DDE data changes.""" hszItem = DDE.CreateStringHandle(self._idInst, item, CP_WINUNICODE) hDdeData = DDE.ClientTransaction(LPBYTE(), 0, self._hConv, hszItem, CF_TEXT, XTYP_ADVSTOP if stop else XTYP_ADVSTART, TIMEOUT_ASYNC, LPDWORD()) DDE.FreeStringHandle(self._idInst, hszItem) if not hDdeData: raise DDEError("Unable to %s advise" % ("stop" if stop else "start"), self._idInst) DDE.FreeDataHandle(hDdeData)
Request updates when DDE data changes.
entailment
def execute(self, command): """Execute a DDE command.""" pData = c_char_p(command) cbData = DWORD(len(command) + 1) hDdeData = DDE.ClientTransaction(pData, cbData, self._hConv, HSZ(), CF_TEXT, XTYP_EXECUTE, TIMEOUT_ASYNC, LPDWORD()) if not hDdeData: raise DDEError("Unable to send command", self._idInst) DDE.FreeDataHandle(hDdeData)
Execute a DDE command.
entailment
def request(self, item, timeout=5000): """Request data from DDE service.""" hszItem = DDE.CreateStringHandle(self._idInst, item, CP_WINUNICODE) #hDdeData = DDE.ClientTransaction(LPBYTE(), 0, self._hConv, hszItem, CF_TEXT, XTYP_REQUEST, timeout, LPDWORD()) pdwResult = DWORD(0) hDdeData = DDE.ClientTransaction(LPBYTE(), 0, self._hConv, hszItem, CF_TEXT, XTYP_REQUEST, timeout, byref(pdwResult)) DDE.FreeStringHandle(self._idInst, hszItem) if not hDdeData: raise DDEError("Unable to request item", self._idInst) if timeout != TIMEOUT_ASYNC: pdwSize = DWORD(0) pData = DDE.AccessData(hDdeData, byref(pdwSize)) if not pData: DDE.FreeDataHandle(hDdeData) raise DDEError("Unable to access data in request function", self._idInst) DDE.UnaccessData(hDdeData) else: pData = None DDE.FreeDataHandle(hDdeData) return pData
Request data from DDE service.
entailment
def _callback(self, wType, uFmt, hConv, hsz1, hsz2, hDdeData, dwData1, dwData2): """DdeCallback callback function for processing Dynamic Data Exchange (DDE) transactions sent by DDEML in response to DDE events Parameters ---------- wType : transaction type (UINT) uFmt : clipboard data format (UINT) hConv : handle to conversation (HCONV) hsz1 : handle to string (HSZ) hsz2 : handle to string (HSZ) hDDedata : handle to global memory object (HDDEDATA) dwData1 : transaction-specific data (DWORD) dwData2 : transaction-specific data (DWORD) Returns ------- ret : specific to the type of transaction (HDDEDATA) """ if wType == XTYP_ADVDATA: # value of the data item has changed [hsz1 = topic; hsz2 = item; hDdeData = data] dwSize = DWORD(0) pData = DDE.AccessData(hDdeData, byref(dwSize)) if pData: item = create_string_buffer('\000' * 128) DDE.QueryString(self._idInst, hsz2, item, 128, CP_WINANSI) self.callback(pData, item.value) DDE.UnaccessData(hDdeData) return DDE_FACK else: print("Error: AccessData returned NULL! (err = %s)"% (hex(DDE.GetLastError(self._idInst)))) if wType == XTYP_DISCONNECT: print("Disconnect notification received from server") return 0
DdeCallback callback function for processing Dynamic Data Exchange (DDE) transactions sent by DDEML in response to DDE events Parameters ---------- wType : transaction type (UINT) uFmt : clipboard data format (UINT) hConv : handle to conversation (HCONV) hsz1 : handle to string (HSZ) hsz2 : handle to string (HSZ) hDDedata : handle to global memory object (HDDEDATA) dwData1 : transaction-specific data (DWORD) dwData2 : transaction-specific data (DWORD) Returns ------- ret : specific to the type of transaction (HDDEDATA)
entailment
def parse(self, data_in, *args, **kwargs): """ :param data_in: path to submission.yaml :param args: :param kwargs: :raise ValueError: """ if not os.path.exists(data_in): raise ValueError("File / Directory does not exist: %s" % data_in) if os.path.isdir(data_in): submission_filepath = os.path.join(data_in, 'submission.yaml') if not os.path.exists(submission_filepath): submission_filepath = os.path.join(data_in, 'submission.yml') if not os.path.exists(submission_filepath): raise ValueError("No submission file in %s" % data_in) data_in = submission_filepath # first validate submission file: with open(data_in, 'r') as submission_file: submission_data = list(yaml.load_all(submission_file, Loader=Loader)) if len(submission_data) == 0: raise RuntimeError("Submission file (%s) is empty" % data_in) submission_file_validator = SubmissionFileValidator() if not submission_file_validator.validate(file_path=data_in, data=submission_data): raise RuntimeError( "Submission file (%s) did not pass validation: %s" % (data_in, self._pretty_print_errors( submission_file_validator.get_messages()))) metadata = {} tables = [] # validator for table data data_file_validator = DataFileValidator() index = 0 for i in range(0, len(submission_data)): if not submission_data[i]: # empty YAML document continue if 'data_file' not in submission_data[i]: metadata = submission_data[i] # information about whole submission continue table_filepath = os.path.join(os.path.dirname(data_in), submission_data[i]['data_file']) with open(table_filepath, 'r') as table_file: if not os.path.exists(table_filepath): raise ValueError( "table file: %s does not exist" % table.data_file) table_data = yaml.load(table_file, Loader=Loader) if not data_file_validator.validate(data=table_data, file_path=table_filepath): raise RuntimeError( "Data file (%s) did not pass validation: %s" % (table_filepath, self._pretty_print_errors( data_file_validator.get_messages()))) index = index + 1 table = Table(index=index, metadata=submission_data[i], data=table_data) tables.append(table) return ParsedData(metadata, tables)
:param data_in: path to submission.yaml :param args: :param kwargs: :raise ValueError:
entailment
def generate_ai_request( predict_rows, req_dict=None, req_file=ANTINEX_PUBLISH_REQUEST_FILE, features=ANTINEX_FEATURES_TO_PROCESS, ignore_features=ANTINEX_IGNORE_FEATURES, sort_values=ANTINEX_SORT_VALUES, ml_type=ANTINEX_ML_TYPE, use_model_name=ANTINEX_USE_MODEL_NAME, predict_feature=ANTINEX_PREDICT_FEATURE, seed=ANTINEX_SEED, test_size=ANTINEX_TEST_SIZE, batch_size=ANTINEX_BATCH_SIZE, epochs=ANTINEX_EPOCHS, num_splits=ANTINEX_NUM_SPLITS, loss=ANTINEX_LOSS, optimizer=ANTINEX_OPTIMIZER, metrics=ANTINEX_METRICS, histories=ANTINEX_HISTORIES, filter_features_dict=FILTER_FEATURES_DICT, filter_features=FILTER_FEATURES, convert_enabled=ANTINEX_CONVERT_DATA, convert_to_type=ANTINEX_CONVERT_DATA_TYPE, include_failed_conversions=ANTINEX_INCLUDE_FAILED_CONVERSIONS, value_for_missing=ANTINEX_MISSING_VALUE, version=ANTINEX_VERSION, publish_to_core=ANTINEX_PUBLISH_TO_CORE, check_missing_predict_feature=ANTINEX_CHECK_MISSING_PREDICT, debug=ANTINEX_CLIENT_DEBUG): """generate_ai_request :param predict_rows: list of predict rows to build into the request :param req_dict: request dictionary to update - for long-running clients :param req_file: file holding a request dict to update - one-off tests :param features: features to process in the data :param ignore_features: features to ignore in the data (non-numerics) :param sort_values: optional - order rows for scaler normalization :param ml_type: machine learning type - classification/regression :param use_model_name: use a pre-trained model by name :param predict_feature: predict the values of this feature :param seed: seed for randomness reproducability :param test_size: split train/test data :param batch_size: batch size for processing :param epochs: test epochs :param num_splits: test splits for cross validation :param loss: loss function :param optimizer: optimizer :param metrics: metrics to apply :param histories: historical values to test :param filter_features_dict: dictionary of features to use :param filter_features: list of features to use :param convert_to_type: convert predict_row values to scaler-ready values :param include_failed_conversions: should the predict rows include fails :param value_for_missing: set this value to any columns that are missing :param version: version of the API request :param publish_to_core: want to publish it to the core or the worker :param debug: log debug messages """ status = NOT_SET err = "not-set" data = None if not ANTINEX_PUBLISH_ENABLED: log.info(("publish disabled ANTINEX_PUBLISH_ENABLED={}") .format( ANTINEX_PUBLISH_ENABLED)) status = DISABLED err = "disabled" return { "status": status, "error": err, "data": None} # stop if not enabled try: err = "checking number of predict rows" if len(predict_rows) == 0: err = "please provide a list of predict_rows" log.error(err) status = FAILED res = { "status": status, "error": err, "data": None} return res # stop if there's no new rows body = None if not req_dict: if os.path.exists(req_file): with open(req_file, "r") as f: body = json.loads(f.read()) else: body = copy.deepcopy( req_dict) # end of loading body from requested if not body: err = ("failed to load request body " "req_dict={} req_file={}").format( req_dict, req_file) log.error(err) status = FAILED res = { "status": status, "error": err, "data": None} return res # if body is empty err = ("setting values rows={} body={} features={}").format( len(predict_rows), body, filter_features) if debug: log.info(err) use_predict_rows = [] for r in predict_rows: new_row = {} for col in r: cur_value = r[col] if col in filter_features_dict: if not cur_value: cur_value = value_for_missing if ANTINEX_CONVERT_DATA: try: if convert_to_type == "float": new_row[col] = float(cur_value) elif convert_to_type == "int": new_row[col] = int(cur_value) except Exception as e: if include_failed_conversions: new_row[col] = cur_value else: log.error(("failed converting {}={} type={}") .format( col, cur_value, convert_to_type)) # if conversion failed else: new_row[col] = cur_value # if not converting data # if the column is in the filtered features # for all columns in the row dictionary for col in filter_features: if col not in new_row: new_row[col] = value_for_missing # make sure to fill in missing columns with a default if check_missing_predict_feature: if predict_feature not in new_row: new_row[predict_feature] = value_for_missing use_predict_rows.append(new_row) # for all predict rows to convert and fileter err = ("converted rows={} to use_rows={}").format( len(predict_rows), len(use_predict_rows)) log.info(err) body["label"] = use_model_name body["predict_feature"] = predict_feature body["predict_rows"] = use_predict_rows body["publish_to_core"] = publish_to_core body["seed"] = seed body["test_size"] = test_size body["batch_size"] = batch_size body["epochs"] = epochs body["num_splits"] = num_splits body["loss"] = loss body["optimizer"] = optimizer body["metrics"] = metrics body["histories"] = histories body["ml_type"] = ml_type if sort_values: body["sort_values"] = sort_values if filter_features: body["features_to_process"] = filter_features if ignore_features: body["ignore_features"] = ignore_features data = body if debug: log.info(("req={}") .format( ppj(data))) status = SUCCESS err = "" except Exception as e: log.error(("failed last_step='{}' with ex={}") .format( err, e)) status = ERROR # end of try/ex res = { "status": status, "error": err, "data": data} return res
generate_ai_request :param predict_rows: list of predict rows to build into the request :param req_dict: request dictionary to update - for long-running clients :param req_file: file holding a request dict to update - one-off tests :param features: features to process in the data :param ignore_features: features to ignore in the data (non-numerics) :param sort_values: optional - order rows for scaler normalization :param ml_type: machine learning type - classification/regression :param use_model_name: use a pre-trained model by name :param predict_feature: predict the values of this feature :param seed: seed for randomness reproducability :param test_size: split train/test data :param batch_size: batch size for processing :param epochs: test epochs :param num_splits: test splits for cross validation :param loss: loss function :param optimizer: optimizer :param metrics: metrics to apply :param histories: historical values to test :param filter_features_dict: dictionary of features to use :param filter_features: list of features to use :param convert_to_type: convert predict_row values to scaler-ready values :param include_failed_conversions: should the predict rows include fails :param value_for_missing: set this value to any columns that are missing :param version: version of the API request :param publish_to_core: want to publish it to the core or the worker :param debug: log debug messages
entailment
def get_ml_job(): """get_ml_job Get an ``MLJob`` by database id. """ parser = argparse.ArgumentParser( description=("Python client get AI Job by ID")) parser.add_argument( "-u", help="username", required=False, dest="user") parser.add_argument( "-p", help="user password", required=False, dest="password") parser.add_argument( "-e", help="user email", required=False, dest="email") parser.add_argument( "-a", help="url endpoint with default http://localhost:8010", required=False, dest="url") parser.add_argument( "-i", help="User's MLJob.id to look up", required=False, dest="job_id") parser.add_argument( "-b", help=( "optional - path to CA bundle directory for " "client encryption over HTTP"), required=False, dest="ca_dir") parser.add_argument( "-c", help=( "optional - path to x509 certificate for " "client encryption over HTTP"), required=False, dest="cert_file") parser.add_argument( "-k", help=( "optional - path to x509 key file for " "client encryption over HTTP"), required=False, dest="key_file") parser.add_argument( "-s", help="silent", required=False, dest="silent", action="store_true") parser.add_argument( "-d", help="debug", required=False, dest="debug", action="store_true") args = parser.parse_args() user = ev( "API_USER", "user-not-set") password = ev( "API_PASSWORD", "password-not-set") email = ev( "API_EMAIL", "email-not-set") url = ev( "API_URL", "http://localhost:8010") job_id = ev( "JOB_ID", "job_id-not-set") ca_dir = os.getenv( "API_CA_BUNDLE_DIR", None) cert_file = os.getenv( "API_CERT_FILE", None) key_file = os.getenv( "API_KEY_FILE", None) verbose = bool(str(ev( "API_VERBOSE", "true")).lower() == "true") debug = bool(str(ev( "API_DEBUG", "false")).lower() == "true") if args.user: user = args.user if args.password: password = args.password if args.email: email = args.email if args.url: url = args.url if args.job_id: job_id = args.job_id if args.ca_dir: ca_dir = args.ca_dir if args.cert_file: cert_file = args.cert_file if args.key_file: key_file = args.key_file if args.silent: verbose = False if args.debug: debug = True usage = ( "Please run with " "-u <username> " "-p <password> " "-a <AntiNex URL http://localhost:8010> " "-i <job_id> " "-b <optional - path to CA bundle directory> " "-c <optional - path to x509 ssl certificate file> " "-k <optional - path to x509 ssl key file>") valid = True if not user or user == "user-not-set": log.error("missing user") valid = False if not password or password == "password-not-set": log.error("missing password") valid = False if not job_id or job_id == "job_id-not-set": log.error("missing job_id") valid = False else: try: job_id = int(job_id) except Exception as e: log.error("please use -i <job_id with an integer>") valid = False if not valid: log.error(usage) sys.exit(1) if verbose: log.info(( "creating client user={} url={} job_id={} " "ca_dir={} cert_file={} key_file={}").format( user, url, job_id, ca_dir, cert_file, key_file)) client = AIClient( user=user, email=email, password=password, url=url, ca_dir=ca_dir, cert_file=cert_file, key_file=key_file, verbose=verbose, debug=debug) if verbose: log.info(("loading request in job_id={}") .format( job_id)) response = client.get_job_by_id( job_id=job_id) if response["status"] == SUCCESS: if debug: log.info(("got a job response={}") .format( response["data"])) elif response["status"] == FAILED: log.error(("job failed with error='{}' with response={}") .format( response["error"], response["data"])) sys.exit(1) elif response["status"] == ERROR: if "missing " in response["error"]: log.error(("Did not find a job with id={} for user={}") .format( job_id, user)) else: log.error(("job had an error='{}' with response={}") .format( response["error"], response["data"])) sys.exit(1) elif response["status"] == LOGIN_FAILED: log.error(("job reported user was not able to log in " "with an error='{}' with response={}") .format( response["error"], response["data"])) sys.exit(1) job_data = response["data"] if len(job_data) == 0: log.error(("Did not find a job with id={} for user={}") .format( job_id, user)) sys.exit(1) job_id = job_data.get("id", None) job_status = job_data.get("status", None) log.info(("job={}") .format( ppj(job_data))) log.info(("done getting job.id={} status={}") .format( job_id, job_status))
get_ml_job Get an ``MLJob`` by database id.
entailment
def get_molo_comments(parser, token): """ Get a limited set of comments for a given object. Defaults to a limit of 5. Setting the limit to -1 disables limiting. Set the amount of comments to usage: {% get_molo_comments for object as variable_name %} {% get_molo_comments for object as variable_name limit amount %} {% get_molo_comments for object as variable_name limit amount child_limit amount %} # noqa """ keywords = token.contents.split() if len(keywords) != 5 and len(keywords) != 7 and len(keywords) != 9: raise template.TemplateSyntaxError( "'%s' tag takes exactly 2,4 or 6 arguments" % (keywords[0],)) if keywords[1] != 'for': raise template.TemplateSyntaxError( "first argument to '%s' tag must be 'for'" % (keywords[0],)) if keywords[3] != 'as': raise template.TemplateSyntaxError( "first argument to '%s' tag must be 'as'" % (keywords[0],)) if len(keywords) > 5 and keywords[5] != 'limit': raise template.TemplateSyntaxError( "third argument to '%s' tag must be 'limit'" % (keywords[0],)) if len(keywords) == 7: return GetMoloCommentsNode(keywords[2], keywords[4], keywords[6]) if len(keywords) > 7 and keywords[7] != 'child_limit': raise template.TemplateSyntaxError( "third argument to '%s' tag must be 'child_limit'" % (keywords[0],)) if len(keywords) > 7: return GetMoloCommentsNode(keywords[2], keywords[4], keywords[6], keywords[8]) return GetMoloCommentsNode(keywords[2], keywords[4])
Get a limited set of comments for a given object. Defaults to a limit of 5. Setting the limit to -1 disables limiting. Set the amount of comments to usage: {% get_molo_comments for object as variable_name %} {% get_molo_comments for object as variable_name limit amount %} {% get_molo_comments for object as variable_name limit amount child_limit amount %} # noqa
entailment
def get_comments_content_object(parser, token): """ Get a limited set of comments for a given object. Defaults to a limit of 5. Setting the limit to -1 disables limiting. usage: {% get_comments_content_object for form_object as variable_name %} """ keywords = token.contents.split() if len(keywords) != 5: raise template.TemplateSyntaxError( "'%s' tag takes exactly 2 arguments" % (keywords[0],)) if keywords[1] != 'for': raise template.TemplateSyntaxError( "first argument to '%s' tag must be 'for'" % (keywords[0],)) if keywords[3] != 'as': raise template.TemplateSyntaxError( "first argument to '%s' tag must be 'as'" % (keywords[0],)) return GetCommentsContentObject(keywords[2], keywords[4])
Get a limited set of comments for a given object. Defaults to a limit of 5. Setting the limit to -1 disables limiting. usage: {% get_comments_content_object for form_object as variable_name %}
entailment
def report(request, comment_id): """ Flags a comment on GET. Redirects to whatever is provided in request.REQUEST['next']. """ comment = get_object_or_404( django_comments.get_model(), pk=comment_id, site__pk=settings.SITE_ID) if comment.parent is not None: messages.info(request, _('Reporting comment replies is not allowed.')) else: perform_flag(request, comment) messages.info(request, _('The comment has been reported.')) next = request.GET.get('next') or comment.get_absolute_url() return HttpResponseRedirect(next)
Flags a comment on GET. Redirects to whatever is provided in request.REQUEST['next'].
entailment
def post_molo_comment(request, next=None, using=None): """ Allows for posting of a Molo Comment, this allows comments to be set with the "user_name" as "Anonymous" """ data = request.POST.copy() if 'submit_anonymously' in data: data['name'] = 'Anonymous' # replace with our changed POST data # ensure we always set an email data['email'] = request.user.email or 'blank@email.com' request.POST = data return post_comment(request, next=next, using=next)
Allows for posting of a Molo Comment, this allows comments to be set with the "user_name" as "Anonymous"
entailment
def build_ai_client_from_env( verbose=ANTINEX_CLIENT_VERBOSE, debug=ANTINEX_CLIENT_DEBUG, ca_dir=None, cert_file=None, key_file=None): """build_ai_client_from_env Use environment variables to build a client :param verbose: verbose logging :param debug: debug internal client calls :param ca_dir: optional path to CA bundle dir :param cert_file: optional path to x509 ssl cert file :param key_file: optional path to x509 ssl key file """ if not ANTINEX_PUBLISH_ENABLED: log.info(( "publish disabled ANTINEX_PUBLISH_ENABLED={}").format( ANTINEX_PUBLISH_ENABLED)) return None use_ca_dir = ca_dir use_cert_file = cert_file use_key_file = key_file if ANTINEX_CA_FILE or ANTINEX_KEY_FILE or ANTINEX_CERT_FILE: use_ca_dir = ANTINEX_CA_FILE use_cert_file = ANTINEX_CERT_FILE use_key_file = ANTINEX_KEY_FILE log.info(( "creating env client user={} url={} " "ca={} cert={} key={}").format( ANTINEX_USER, ANTINEX_URL, ANTINEX_CA_FILE, ANTINEX_CERT_FILE, ANTINEX_KEY_FILE)) else: log.info(( "creating client user={} url={} " "ca={} cert={} key={}").format( ANTINEX_USER, ANTINEX_URL, use_ca_dir, use_cert_file, use_key_file)) # if secure or dev return AIClient( user=ANTINEX_USER, email=ANTINEX_EMAIL, password=ANTINEX_PASSWORD, url=ANTINEX_URL, ca_dir=use_ca_dir, cert_file=use_cert_file, key_file=use_key_file, verbose=verbose, debug=debug)
build_ai_client_from_env Use environment variables to build a client :param verbose: verbose logging :param debug: debug internal client calls :param ca_dir: optional path to CA bundle dir :param cert_file: optional path to x509 ssl cert file :param key_file: optional path to x509 ssl key file
entailment
def train_new_deep_neural_network(): """train_new_deep_neural_network Train a new deep neural network and store the results as a new: ``MLJob`` and ``MLJobResult`` database records. """ parser = argparse.ArgumentParser( description=( "Python client to Train a Deep Neural Network " "with AntiNex Django Rest Framework")) parser.add_argument( "-u", help="username", required=False, dest="user") parser.add_argument( "-p", help="user password", required=False, dest="password") parser.add_argument( "-e", help="user email", required=False, dest="email") parser.add_argument( "-a", help="url endpoint with default http://localhost:8010", required=False, dest="url") parser.add_argument( "-f", help="file to use default ./examples/test-keras-dnn.json", required=False, dest="datafile") parser.add_argument( "-b", help=( "optional - path to CA bundle directory for " "client encryption over HTTP"), required=False, dest="ca_dir") parser.add_argument( "-c", help=( "optional - path to x509 certificate for " "client encryption over HTTP"), required=False, dest="cert_file") parser.add_argument( "-k", help=( "optional - path to x509 key file for " "client encryption over HTTP"), required=False, dest="key_file") parser.add_argument( "-s", help="silent", required=False, dest="silent", action="store_true") parser.add_argument( "-d", help="debug", required=False, dest="debug", action="store_true") args = parser.parse_args() user = ev( "API_USER", "user-not-set") password = ev( "API_PASSWORD", "password-not-set") email = ev( "API_EMAIL", "email-not-set") url = ev( "API_URL", "http://localhost:8010") datafile = ev( "DATAFILE", "datafile-not-set") ca_dir = os.getenv( "API_CA_BUNDLE_DIR", None) cert_file = os.getenv( "API_CERT_FILE", None) key_file = os.getenv( "API_KEY_FILE", None) verbose = bool(str(ev( "API_VERBOSE", "true")).lower() == "true") debug = bool(str(ev( "API_DEBUG", "false")).lower() == "true") if args.user: user = args.user if args.password: password = args.password if args.email: email = args.email if args.url: url = args.url if args.datafile: datafile = args.datafile if args.ca_dir: ca_dir = args.ca_dir if args.cert_file: cert_file = args.cert_file if args.key_file: key_file = args.key_file if args.silent: verbose = False if args.debug: debug = True usage = ( "Please run with " "-u <username> " "-p <password> " "-a <AntiNex URL http://localhost:8010> " "-f <path to json file> " "-b <optional - path to CA bundle directory> " "-c <optional - path to x509 ssl certificate file> " "-k <optional - path to x509 ssl key file>") valid = True if not user or user == "user-not-set": log.error("missing user") valid = False if not password or password == "password-not-set": log.error("missing password") valid = False if not datafile or datafile == "datafile-not-set": log.error("missing datafile") valid = False else: if not os.path.exists(datafile): log.error(("did not find datafile={} on disk") .format( datafile)) valid = False if not valid: log.error(usage) sys.exit(1) if verbose: log.info(( "creating client user={} url={} file={} " "ca_dir={} cert_file={} key_file={}").format( user, url, datafile, ca_dir, cert_file, key_file)) client = AIClient( user=user, email=email, password=password, url=url, ca_dir=ca_dir, cert_file=cert_file, key_file=key_file, verbose=verbose, debug=debug) if verbose: log.info(("loading request in datafile={}") .format( datafile)) req_body = None with open(datafile, "r") as f: req_body = json.loads(f.read()) if verbose: log.info("running job") job_was_started = False response = client.run_job( body=req_body) if response["status"] == SUCCESS: log.info(("job started with response={}") .format( response["data"])) job_was_started = True elif response["status"] == FAILED: log.error(("job failed with error='{}' with response={}") .format( response["error"], response["data"])) elif response["status"] == ERROR: log.error(("job had an error='{}' with response={}") .format( response["error"], response["data"])) elif response["status"] == LOGIN_FAILED: log.error(("job reported user was not able to log in " "with an error='{}' with response={}") .format( response["error"], response["data"])) if not job_was_started: sys.exit(1) if debug: log.info(("parsing response data={}") .format( response["data"])) else: if verbose: log.info("parsing data") res_data = response["data"] job_data = res_data.get( "job", None) result_data = res_data.get( "results", None) if not job_data: log.error(("missing job dictionary in response data={}") .format( response["data"])) sys.exit(1) if not result_data: log.error(("missing results dictionary in response data={}") .format( response["data"])) sys.exit(1) job_id = job_data.get("id", None) job_status = job_data.get("status", None) result_id = result_data.get("id", None) result_status = result_data.get("status", None) log.info(("started job.id={} job.status={} with " "result.id={} result.status={}") .format( job_id, job_status, result_id, result_status)) job_results = client.wait_for_job_to_finish( job_id=job_id) if job_results["status"] != SUCCESS: log.error(("failed waiting for job.id={} to finish error={} data={}") .format( job_id, job_results["error"], job_results["data"])) sys.exit(1) final_job = job_results["data"]["job"] final_result = job_results["data"]["result"] if verbose: log.info(("job={}") .format( ppj(final_job))) else: log.info(("job={}") .format( str(final_job)[0:10])) if verbose: log.info(("result={}") .format( ppj(final_result))) else: log.info(("result={}") .format( str(final_result)[0:10])) log.info(("job.id={} is done") .format( job_id)) predictions = final_result["predictions_json"].get( "predictions", []) log.info(("loading predictions={} into pandas dataframe") .format( len(predictions))) df = pd.DataFrame(predictions) if verbose: log.info(("dataframe={}") .format( df))
train_new_deep_neural_network Train a new deep neural network and store the results as a new: ``MLJob`` and ``MLJobResult`` database records.
entailment
def prepare_new_dataset(): """prepare_new_dataset Prepare a new ``MLPrepare`` record and dataset files on disk. """ parser = argparse.ArgumentParser( description=( "Python client to Prepare a dataset")) parser.add_argument( "-u", help="username", required=False, dest="user") parser.add_argument( "-p", help="user password", required=False, dest="password") parser.add_argument( "-e", help="user email", required=False, dest="email") parser.add_argument( "-a", help="url endpoint with default http://localhost:8010", required=False, dest="url") parser.add_argument( "-f", help="file to use default ./examples/test-keras-dnn.json", required=False, dest="prepare_file") parser.add_argument( "-b", help=( "optional - path to CA bundle directory for " "client encryption over HTTP"), required=False, dest="ca_dir") parser.add_argument( "-c", help=( "optional - path to x509 certificate for " "client encryption over HTTP"), required=False, dest="cert_file") parser.add_argument( "-k", help=( "optional - path to x509 key file for " "client encryption over HTTP"), required=False, dest="key_file") parser.add_argument( "-s", help="silent", required=False, dest="silent", action="store_true") parser.add_argument( "-d", help="debug", required=False, dest="debug", action="store_true") args = parser.parse_args() user = ev( "API_USER", "user-not-set") password = ev( "API_PASSWORD", "password-not-set") email = ev( "API_EMAIL", "email-not-set") url = ev( "API_URL", "http://localhost:8010") prepare_file = ev( "DATAFILE", "prepare_file-not-set") ca_dir = os.getenv( "API_CA_BUNDLE_DIR", None) cert_file = os.getenv( "API_CERT_FILE", None) key_file = os.getenv( "API_KEY_FILE", None) verbose = bool(str(ev( "API_VERBOSE", "true")).lower() == "true") debug = bool(str(ev( "API_DEBUG", "false")).lower() == "true") if args.user: user = args.user if args.password: password = args.password if args.email: email = args.email if args.url: url = args.url if args.prepare_file: prepare_file = args.prepare_file if args.ca_dir: ca_dir = args.ca_dir if args.cert_file: cert_file = args.cert_file if args.key_file: key_file = args.key_file if args.silent: verbose = False if args.debug: debug = True usage = ( "Please run with " "-u <username> " "-p <password> " "-a <AntiNex URL http://localhost:8010> " "-f <path to prepare file> " "-b <optional - path to CA bundle directory> " "-c <optional - path to x509 ssl certificate file> " "-k <optional - path to x509 ssl key file>") valid = True if not user or user == "user-not-set": log.error("missing user") valid = False if not password or password == "password-not-set": log.error("missing password") valid = False if not prepare_file or prepare_file == "prepare_file-not-set": log.error("missing prepare_file") valid = False else: if not os.path.exists(prepare_file): log.error(("did not find prepare_file={} on disk") .format( prepare_file)) valid = False if not valid: log.error(usage) sys.exit(1) if verbose: log.info(("creating client user={} url={} file={}") .format( user, url, prepare_file)) client = AIClient( user=user, email=email, password=password, url=url, ca_dir=ca_dir, cert_file=cert_file, key_file=key_file, verbose=verbose, debug=debug) if verbose: log.info(("loading request in prepare_file={}") .format( prepare_file)) req_body = None with open(prepare_file, "r") as f: req_body = json.loads(f.read()) if verbose: log.info("running prepare") prepare_was_started = False response = client.run_prepare( body=req_body) if response["status"] == SUCCESS: log.info(("prepare started with response={}") .format( response["data"])) prepare_was_started = True elif response["status"] == FAILED: log.error(("prepare failed with error='{}' with response={}") .format( response["error"], response["data"])) elif response["status"] == ERROR: log.error(("prepare had an error='{}' with response={}") .format( response["error"], response["data"])) elif response["status"] == LOGIN_FAILED: log.error(("prepare reported user was not able to log in " "with an error='{}' with response={}") .format( response["error"], response["data"])) if not prepare_was_started: sys.exit(1) if debug: log.info(("parsing response data={}") .format( response["data"])) else: if verbose: log.info("parsing data") prepare_data = response["data"] if not prepare_data: log.error(("missing prepare dictionary in response data={}") .format( response["data"])) sys.exit(1) prepare_id = prepare_data.get("id", None) prepare_status = prepare_data.get("status", None) log.info(("started prepare.id={} prepare.status={}") .format( prepare_id, prepare_status)) prepare_results = client.wait_for_prepare_to_finish( prepare_id=prepare_id) if prepare_results["status"] != SUCCESS: log.error(("failed waiting for prepare.id={} to finish " "error={} data={}") .format( prepare_id, prepare_results["error"], prepare_results["data"])) sys.exit(1) final_prepare = prepare_results["data"] log.info(("prepare={}") .format( ppj(final_prepare))) log.info(("prepare.id={} is done") .format( prepare_id))
prepare_new_dataset Prepare a new ``MLPrepare`` record and dataset files on disk.
entailment
def drape(raster, feature): """Convert a 2D feature to a 3D feature by sampling a raster Parameters: raster (rasterio): raster to provide the z coordinate feature (dict): fiona feature record to convert Returns: result (Point or Linestring): shapely Point or LineString of xyz coordinate triples """ coords = feature['geometry']['coordinates'] geom_type = feature['geometry']['type'] if geom_type == 'Point': xyz = sample(raster, [coords]) result = Point(xyz[0]) elif geom_type == 'LineString': xyz = sample(raster, coords) points = [Point(x, y, z) for x, y, z in xyz] result = LineString(points) else: logging.error('drape not implemented for {}'.format(geom_type)) return result
Convert a 2D feature to a 3D feature by sampling a raster Parameters: raster (rasterio): raster to provide the z coordinate feature (dict): fiona feature record to convert Returns: result (Point or Linestring): shapely Point or LineString of xyz coordinate triples
entailment
def sample(raster, coords): """Sample a raster at given coordinates Given a list of coordinates, return a list of x,y,z triples with z coordinates sampled from an input raster Parameters: raster (rasterio): raster dataset to sample coords: array of tuples containing coordinate pairs (x,y) or triples (x,y,z) Returns: result: array of tuples containing coordinate triples (x,y,z) """ if len(coords[0]) == 3: logging.info('Input is a 3D geometry, z coordinate will be updated.') z = raster.sample([(x, y) for x, y, z in coords], indexes=raster.indexes) else: z = raster.sample(coords, indexes=raster.indexes) result = [(vert[0], vert[1], vert_z) for vert, vert_z in zip(coords, z)] return result
Sample a raster at given coordinates Given a list of coordinates, return a list of x,y,z triples with z coordinates sampled from an input raster Parameters: raster (rasterio): raster dataset to sample coords: array of tuples containing coordinate pairs (x,y) or triples (x,y,z) Returns: result: array of tuples containing coordinate triples (x,y,z)
entailment
def setup_logging( default_level=logging.INFO, default_path="{}/logging.json".format( os.getenv( "LOG_DIR", os.path.dirname(os.path.realpath(__file__)))), env_key="LOG_CFG", config_name=None): """setup_logging Setup logging configuration :param default_level: level to log :param default_path: path to config (optional) :param env_key: path to config in this env var :param config_name: filename for config """ path = default_path file_name = default_path.split("/")[-1] if config_name: file_name = config_name path = "{}/{}".format( "/".join(default_path.split("/")[:-1]), file_name) value = os.getenv(env_key, None) if value: path = value if os.path.exists(path): with open(path, "rt") as f: config = json.load(f) logging.config.dictConfig(config) return else: cwd_path = os.getcwd() + "/antinex_client/log/{}".format( file_name) if os.path.exists(cwd_path): with open(cwd_path, "rt") as f: config = json.load(f) logging.config.dictConfig(config) return rels_path = os.getcwd() + "/../log/{}".format( file_name) if os.path.exists(rels_path): with open(rels_path, "rt") as f: config = json.load(f) logging.config.dictConfig(config) return else: logging.basicConfig(level=default_level) return
setup_logging Setup logging configuration :param default_level: level to log :param default_path: path to config (optional) :param env_key: path to config in this env var :param config_name: filename for config
entailment
def build_logger( name=os.getenv( "LOG_NAME", "client"), config="logging.json", log_level=logging.INFO, log_config_path="{}/logging.json".format( os.getenv( "LOG_CFG", os.path.dirname(os.path.realpath(__file__))))): """build_logger :param name: name that shows in the logger :param config: name of the config file :param log_level: level to log :param log_config_path: path to log config file """ use_config = ("./log/{}").format( "{}".format( config)) if not os.path.exists(use_config): use_config = log_config_path if not os.path.exists(use_config): use_config = ("./antinex_client/log/{}").format( "logging.json") # find the log processing setup_logging( default_level=log_level, default_path=use_config) return logging.getLogger(name)
build_logger :param name: name that shows in the logger :param config: name of the config file :param log_level: level to log :param log_config_path: path to log config file
entailment
def build_colorized_logger( name=os.getenv( "LOG_NAME", "client"), config="colors-logging.json", log_level=logging.INFO, log_config_path="{}/logging.json".format( os.getenv( "LOG_CFG", os.path.dirname(os.path.realpath(__file__))))): """build_colorized_logger :param name: name that shows in the logger :param config: name of the config file :param log_level: level to log :param log_config_path: path to log config file """ override_config = os.getenv( "SHARED_LOG_CFG", None) debug_log_config = bool(os.getenv( "DEBUG_SHARED_LOG_CFG", "0") == "1") if override_config: if debug_log_config: print(( "creating logger config env var: " "SHARED_LOG_CFG={}".format( override_config))) if os.path.exists(override_config): setup_logging( default_level=log_level, default_path=override_config) return logging.getLogger(name) if debug_log_config: print(( "Failed to find log config using env var: " "SHARED_LOG_CFG={}".format( override_config))) else: if debug_log_config: print(( "Not using shared logging env var: " "SHARED_LOG_CFG={}".format( override_config))) # allow a shared log config across all components use_config = ("{}").format( config) if not os.path.exists(use_config): use_config = ("./antinex_client/log/{}").format( config) if not os.path.exists(use_config): use_config = log_config_path if not os.path.exists(use_config): use_config = ("./log/{}").format( config) if not os.path.exists(use_config): use_config = ("./antinex_client/log/{}").format( "logging.json") # find the last log config backup from the base of the repo # find the log config from the defaults with the env LOG_CFG # find the log config from the base of the repo # find the log config by the given path setup_logging( default_level=log_level, default_path=use_config) return logging.getLogger(name)
build_colorized_logger :param name: name that shows in the logger :param config: name of the config file :param log_level: level to log :param log_config_path: path to log config file
entailment
def authenticate(self, request: HttpRequest, auth_route: str, actual_params: dict) -> bool: """ Your AuhtService should override this method for request authentication, otherwise means no authentication. :param request: HttpRequest Django's HttpRequest object :param auth_route: str User's resqueted route :param actual_params: User's url parameters :return: bool """ if auth_route and actual_params: self.auth_data = {} return True
Your AuhtService should override this method for request authentication, otherwise means no authentication. :param request: HttpRequest Django's HttpRequest object :param auth_route: str User's resqueted route :param actual_params: User's url parameters :return: bool
entailment
def cli(source_f, raster_f, output, verbose): """ Converts 2D geometries to 3D using GEOS sample through fiona. \b Example: drape point.shp elevation.tif -o point_z.shp """ with fiona.open(source_f, 'r') as source: source_driver = source.driver source_crs = source.crs sink_schema = source.schema.copy() source_geom = source.schema['geometry'] if source_geom == 'Point': sink_schema['geometry'] = '3D Point' elif source_geom == 'LineString': sink_schema['geometry'] = '3D LineString' elif source_geom == '3D Point' or source_geom == '3D LineString': pass else: click.BadParameter("Source geometry type {} not implemented".format(source_geom)) with rasterio.open(raster_f) as raster: if source_crs != raster.crs: click.BadParameter("Features and raster have different CRS.") if raster.count > 1: warnings.warn("Found {0} bands in {1}, expected a single band raster".format(raster.bands, raster_f)) supported = ['int16', 'int32', 'float32', 'float64'] if raster.dtypes[0] not in supported: warnings.warn("Found {0} type in {1}, expected one of {2}".format(raster.dtypes[0]), raster_f, supported) with fiona.open( output, 'w', driver=source_driver, crs=source_crs, schema=sink_schema) as sink: for feature in source: try: feature_z = drapery.drape(raster, feature) sink.write({ 'geometry': mapping(feature_z), 'properties': feature['properties'], }) except Exception: logging.exception("Error processing feature %s:", feature['id'])
Converts 2D geometries to 3D using GEOS sample through fiona. \b Example: drape point.shp elevation.tif -o point_z.shp
entailment
def for_model(self, model): """ QuerySet for all comments for a particular model (either an instance or a class). """ ct = ContentType.objects.get_for_model(model) qs = self.get_queryset().filter(content_type=ct) if isinstance(model, models.Model): qs = qs.filter(object_pk=force_text(model._get_pk_val())) return qs
QuerySet for all comments for a particular model (either an instance or a class).
entailment
def eval(self, command): 'Blocking call, returns the value of the execution in JS' event = threading.Event() # TODO: Add event to server #job_id = str(id(command)) import random job_id = str(random.random()) server.EVALUATIONS[job_id] = event message = '?' + job_id + '=' + command logging.info(('message:', [message])) for listener in server.LISTENERS.get(self.path, []): logging.debug(('listener:', listener)) listener.write_message(message) success = event.wait(timeout=30) if success: value_parser = server.RESULTS[job_id] del server.EVALUATIONS[job_id] del server.RESULTS[job_id] return value_parser() else: del server.EVALUATIONS[job_id] if job_id in server.RESULTS: del server.RESULTS[job_id] raise IOError('Evaluation failed.')
Blocking call, returns the value of the execution in JS
entailment
def register(self, callback, name): 'Register a callback on server and on connected clients.' server.CALLBACKS[name] = callback self.run(''' window.skink.%s = function(args=[]) { window.skink.call("%s", args); }''' % (name, name))
Register a callback on server and on connected clients.
entailment
def launch_exception(message): """ Launch a Python exception from an error that took place in the browser. messsage format: - name: str - description: str """ error_name = message['name'] error_descr = message['description'] mapping = { 'ReferenceError': NameError, } if message['name'] in mapping: raise mapping[error_name](error_descr) else: raise Exception('{}: {}'.format(error_name, error_descr))
Launch a Python exception from an error that took place in the browser. messsage format: - name: str - description: str
entailment
def start_predictions(): """start_predictions Using environment variables, create an AntiNex AI Client. You can also use command line args if you want. This can train a new deep neural network if it does not exist or it can use an existing pre-trained deep neural network within the AntiNex Core to make new predictions. """ parser = argparse.ArgumentParser( description=( "Python client to make Predictions " "using a Pre-trained Deep Neural Network " "with AntiNex Django Rest Framework")) parser.add_argument( "-f", help=( "file to use default ./examples/" "predict-rows-scaler-full-django.json"), required=False, dest="datafile") parser.add_argument( "-m", help="send mock data", required=False, dest="use_fake_rows", action="store_true") parser.add_argument( "-b", help=( "optional - path to CA bundle directory for " "client encryption over HTTP"), required=False, dest="ca_dir") parser.add_argument( "-c", help=( "optional - path to x509 certificate for " "client encryption over HTTP"), required=False, dest="cert_file") parser.add_argument( "-k", help=( "optional - path to x509 key file for " "client encryption over HTTP"), required=False, dest="key_file") parser.add_argument( "-s", help="silent", required=False, dest="silent", action="store_true") parser.add_argument( "-d", help="debug", required=False, dest="debug", action="store_true") args = parser.parse_args() datafile = ev( "DATAFILE", "./examples/predict-rows-scaler-full-django.json") ca_dir = os.getenv( "API_CA_BUNDLE_DIR", None) cert_file = os.getenv( "API_CERT_FILE", None) key_file = os.getenv( "API_KEY_FILE", None) verbose = bool(str(ev( "API_CLIENT_VERBOSE", "1")).lower() == "1") debug = bool(str(ev( "API_CLIENT_DEBUG", "0")).lower() == "1") use_fake_rows = False if args.use_fake_rows: use_fake_rows = True if args.datafile: datafile = args.datafile if args.ca_dir: ca_dir = args.ca_dir if args.cert_file: cert_file = args.cert_file if args.key_file: key_file = args.key_file if args.silent: verbose = False if args.debug: debug = True if verbose: log.info("creating client") client = build_ai_client_from_env( ca_dir=ca_dir, cert_file=cert_file, key_file=key_file, verbose=verbose, debug=debug) if verbose: log.info(("loading request in datafile={}") .format( datafile)) # pass in full or partial prediction record dictionaries # the generate_ai_request will fill in gaps with defaults fake_rows_for_predicting = [ { "tcp_seq": 1 }, { "tcp_seq": 2 }, { "tcp_seq": 3 }, { "tcp_seq": 4 } ] res_gen = None if use_fake_rows: res_gen = generate_ai_request( predict_rows=fake_rows_for_predicting) else: req_with_org_rows = None with open(datafile, "r") as f: req_with_org_rows = json.loads(f.read()) res_gen = generate_ai_request( predict_rows=req_with_org_rows["predict_rows"]) # end of sending mock data from this file or a file on disk if res_gen["status"] != SUCCESS: log.error(("failed generate_ai_request with error={}") .format( res_gen["error"])) sys.exit(1) req_body = res_gen["data"] if verbose: log.info("running job") job_was_started = False response = client.run_job( body=req_body) if response["status"] == SUCCESS: log.info(("job started with response={}") .format( response["data"])) job_was_started = True elif response["status"] == FAILED: log.error(("job failed with error='{}' with response={}") .format( response["error"], response["data"])) elif response["status"] == ERROR: log.error(("job had an error='{}' with response={}") .format( response["error"], response["data"])) elif response["status"] == LOGIN_FAILED: log.error(("job reported user was not able to log in " "with an error='{}' with response={}") .format( response["error"], response["data"])) if not job_was_started: sys.exit(1) if debug: log.info(("parsing response data={}") .format( response["data"])) else: if verbose: log.info("parsing data") res_data = response["data"] job_data = res_data.get( "job", None) result_data = res_data.get( "results", None) if not job_data: log.error(("missing job dictionary in response data={}") .format( response["data"])) sys.exit(1) if not result_data: log.error(("missing results dictionary in response data={}") .format( response["data"])) sys.exit(1) job_id = job_data.get("id", None) job_status = job_data.get("status", None) result_id = result_data.get("id", None) result_status = result_data.get("status", None) log.info(("started job.id={} job.status={} with " "result.id={} result.status={}") .format( job_id, job_status, result_id, result_status)) job_results = client.wait_for_job_to_finish( job_id=job_id) if job_results["status"] != SUCCESS: log.error(("failed waiting for job.id={} to finish error={} data={}") .format( job_id, job_results["error"], job_results["data"])) sys.exit(1) final_job = job_results["data"]["job"] final_result = job_results["data"]["result"] log.info(("job={}") .format( ppj(final_job))) log.info(("result={}") .format( ppj(final_result))) log.info(("job.id={} is done") .format( job_id)) predictions = final_result["predictions_json"].get( "predictions", []) log.info(("loading predictions={} into pandas dataframe") .format( len(predictions))) df = pd.DataFrame(predictions) log.info(("dataframe={}") .format( df))
start_predictions Using environment variables, create an AntiNex AI Client. You can also use command line args if you want. This can train a new deep neural network if it does not exist or it can use an existing pre-trained deep neural network within the AntiNex Core to make new predictions.
entailment
def login( self): """login""" auth_url = self.api_urls["login"] if self.verbose: log.info(("log in user={} url={} ca_dir={} cert={}") .format( self.user, auth_url, self.ca_dir, self.cert)) use_headers = { "Content-type": "application/json" } login_data = { "username": self.user, "password": self.password } if self.debug: log.info(( "LOGIN with body={} headers={} url={} " "verify={} cert={}").format( login_data, use_headers, auth_url, self.use_verify, self.cert)) response = requests.post( auth_url, verify=self.use_verify, cert=self.cert, data=json.dumps(login_data), headers=use_headers) if self.debug: log.info(("LOGIN response status_code={} text={} reason={}") .format( response.status_code, response.text, response.reason)) user_token = "" if response.status_code == 200: user_token = json.loads(response.text)["token"] if user_token != "": self.token = user_token self.login_status = LOGIN_SUCCESS if self.verbose: log.debug("login success") else: log.error(("failed to login user={} to url={} text={}") .format( self.user, auth_url, response.text)) self.login_status = LOGIN_FAILED # if the user token exists return self.login_status
login
entailment
def build_response( self, status=NOT_SET, error="", data=None): """build_response :param status: status code :param error: error message :param data: dictionary to send back """ res_node = { "status": status, "error": error, "data": data } return res_node
build_response :param status: status code :param error: error message :param data: dictionary to send back
entailment
def retry_login( self): """retry_login""" if not self.user or not self.password: return self.build_response( status=ERROR, error="please set the user and password") retry = 0 not_done = True while not_done: if self.is_logged_in(): return self.build_response( status=SUCCESS) else: if self.verbose: log.debug(("login attempt={} max={}") .format( retry, self.max_retries)) if self.login() == LOGIN_SUCCESS: return self.build_response( status=SUCCESS) else: time.sleep( self.login_retry_wait_time) # if able to login or not retry += 1 if retry > self.max_retries: return self.build_response( status=ERROR, error="failed logging in user={} retries={}".format( self.user, self.max_retries)) # if login worked or not return self.build_response( status=FAILED, error="user={} not able to login attempts={}".format( self.user, retry))
retry_login
entailment
def get_prepare_by_id( self, prepare_id=None): """get_prepare_by_id :param prepare_id: MLJob.id in the database """ if not prepare_id: log.error("missing prepare_id for get_prepare_by_id") return self.build_response( status=ERROR, error="missing prepare_id for get_prepare_by_id") if self.debug: log.info(("user={} getting prepare={}") .format( self.user, prepare_id)) url = "{}{}".format( self.api_urls["prepare"], prepare_id) not_done = True while not_done: if self.debug: log.info(( "JOB attempting to get={} to url={} " "verify={} cert={}").format( prepare_id, url, self.use_verify, self.cert)) response = requests.get( url, verify=self.use_verify, cert=self.cert, headers=self.get_auth_header()) if self.debug: log.info(("JOB response status_code={} text={} reason={}") .format( response.status_code, response.text, response.reason)) if response.status_code == 401: login_res = self.retry_login() if login_res["status"] != SUCCESS: if self.verbose: log.error( "retry login attempts failed") return self.build_response( status=login_res["status"], error=login_res["error"]) # if able to log back in just retry the call elif response.status_code == 200: if self.verbose: log.debug("deserializing") prepare_data = json.loads( response.text) prepare_id = prepare_data.get( "id", None) if not prepare_id: return self.build_response( status=ERROR, error="missing prepare.id", data="text={} reason={}".format( response.reason, response.text)) self.all_prepares[str(prepare_id)] = prepare_data if self.debug: log.info(("added prepare={} all_prepares={}") .format( prepare_id, len(self.all_prepares))) return self.build_response( status=SUCCESS, error="", data=prepare_data) else: err_msg = ("failed with " "status_code={} text={} reason={}").format( response.status_code, response.text, response.reason) if self.verbose: log.error(err_msg) return self.build_response( status=ERROR, error=err_msg)
get_prepare_by_id :param prepare_id: MLJob.id in the database
entailment
def wait_for_job_to_finish( self, job_id, sec_to_sleep=5.0, max_retries=100000): """wait_for_job_to_finish :param job_id: MLJob.id to wait on :param sec_to_sleep: seconds to sleep during polling :param max_retries: max retires until stopping """ not_done = True retry_attempt = 1 while not_done: if self.debug: log.info(("JOBSTATUS getting job.id={} details") .format( job_id)) response = self.get_job_by_id(job_id) if self.debug: log.info(("JOBSTATUS got job.id={} response={}") .format( job_id, response)) if response["status"] != SUCCESS: log.error(("JOBSTATUS failed to get job.id={} with error={}") .format( job_id, response["error"])) return self.build_response( status=ERROR, error=response["error"], data=response["data"]) # stop if this failed getting the job details job_data = response.get( "data", None) if not job_data: return self.build_response( status=ERROR, error="failed to find job dictionary in response", data=response["data"]) job_status = job_data["status"] if job_status == "finished" \ or job_status == "completed" \ or job_status == "launched": if self.debug: log.info(("job.id={} is done with status={}") .format( job_id, job_status)) result_id = job_data["predict_manifest"]["result_id"] if self.debug: log.info(("JOBRESULT getting result.id={} details") .format( result_id)) response = self.get_result_by_id(result_id) if self.debug: log.info(("JOBRESULT got result.id={} response={}") .format( result_id, response)) if response["status"] != SUCCESS: log.error(("JOBRESULT failed to get " "result.id={} with error={}") .format( result_id, response["error"])) return self.build_response( status=ERROR, error=response["error"], data=response["data"]) # stop if this failed getting the result details result_data = response.get( "data", None) if result_data["status"] == "finished": full_response = { "job": job_data, "result": result_data } not_done = False return self.build_response( status=SUCCESS, error="", data=full_response) else: if retry_attempt % 100 == 0: if self.verbose: log.info(("result_id={} are not done retry={}") .format( result_id, retry_attempt)) retry_attempt += 1 if retry_attempt > max_retries: err_msg = ("failed waiting " "for job.id={} result.id={} " "to finish").format( job_id, result_id) log.error(err_msg) return self.build_response( status=ERROR, error=err_msg) else: time.sleep(sec_to_sleep) # wait while results are written to the db else: retry_attempt += 1 if retry_attempt > max_retries: err_msg = ("failed waiting " "for job.id={} to finish").format( job_id) log.error(err_msg) return self.build_response( status=ERROR, error=err_msg) else: if self.verbose: if retry_attempt % 100 == 0: log.info(("waiting on job.id={} retry={}") .format( job_id, retry_attempt)) # if logging just to show this is running time.sleep(sec_to_sleep)
wait_for_job_to_finish :param job_id: MLJob.id to wait on :param sec_to_sleep: seconds to sleep during polling :param max_retries: max retires until stopping
entailment
def wait_for_prepare_to_finish( self, prepare_id, sec_to_sleep=5.0, max_retries=100000): """wait_for_prepare_to_finish :param prepare_id: MLPrepare.id to wait on :param sec_to_sleep: seconds to sleep during polling :param max_retries: max retires until stopping """ not_done = True retry_attempt = 1 while not_done: if self.debug: log.info(("PREPSTATUS getting prepare.id={} details") .format( prepare_id)) response = self.get_prepare_by_id(prepare_id) if self.debug: log.info(("PREPSTATUS got prepare.id={} response={}") .format( prepare_id, response)) if response["status"] != SUCCESS: log.error(("PREPSTATUS failed to get prepare.id={} " "with error={}") .format( prepare_id, response["error"])) return self.build_response( status=ERROR, error=response["error"], data=response["data"]) # stop if this failed getting the prepare details prepare_data = response.get( "data", None) if not prepare_data: return self.build_response( status=ERROR, error="failed to find prepare dictionary in response", data=response["data"]) prepare_status = prepare_data["status"] if prepare_status == "finished" \ or prepare_status == "completed": not_done = False return self.build_response( status=SUCCESS, error="", data=prepare_data) else: retry_attempt += 1 if retry_attempt > max_retries: err_msg = ("failed waiting " "for prepare.id={} to finish").format( prepare_id) log.error(err_msg) return self.build_response( status=ERROR, error=err_msg) else: if self.verbose: if retry_attempt % 100 == 0: log.info(("waiting on prepare.id={} retry={}") .format( prepare_id, retry_attempt)) # if logging just to show this is running time.sleep(sec_to_sleep)
wait_for_prepare_to_finish :param prepare_id: MLPrepare.id to wait on :param sec_to_sleep: seconds to sleep during polling :param max_retries: max retires until stopping
entailment
def get_comment_object(self): """ NB: Overridden to remove dupe comment check for admins (necessary for canned responses) Return a new (unsaved) comment object based on the information in this form. Assumes that the form is already validated and will throw a ValueError if not. Does not set any of the fields that would come from a Request object (i.e. ``user`` or ``ip_address``). """ if not self.is_valid(): raise ValueError( "get_comment_object may only be called on valid forms") CommentModel = self.get_comment_model() new = CommentModel(**self.get_comment_create_data()) user_model = get_user_model() try: user = user_model.objects.get(username=new.user_name) if not user.is_staff: new = self.check_for_duplicate_comment(new) except user_model.DoesNotExist: # post_molo_comment may have set the username to 'Anonymous' new = self.check_for_duplicate_comment(new) return new
NB: Overridden to remove dupe comment check for admins (necessary for canned responses) Return a new (unsaved) comment object based on the information in this form. Assumes that the form is already validated and will throw a ValueError if not. Does not set any of the fields that would come from a Request object (i.e. ``user`` or ``ip_address``).
entailment
def boot(app_name) -> Rinzler: """ Start Rinzler App :param app_name: str Application's identifier :return: dict """ app = Rinzler(app_name) app.log.info("App booted =)") return app
Start Rinzler App :param app_name: str Application's identifier :return: dict
entailment
def mount(self, route: str, controller: callable) -> url: """ Maps a route namespace with the given params and point it's requests to the especified controller. :param route: str Namespace route to be mapped :param controller: callback Controller callable to map end-points :rtype: url """ if issubclass(controller, TemplateView): return url( r"%s" % route, Router(self, route, controller).handle ) else: raise TypeError("The controller %s must be a subclass of %s" % ( controller, TemplateView ) )
Maps a route namespace with the given params and point it's requests to the especified controller. :param route: str Namespace route to be mapped :param controller: callback Controller callable to map end-points :rtype: url
entailment
def set_auth_service(self, auth_service: BaseAuthService): """ Sets the authentication service :param auth_service: BaseAuthService Authentication service :raises: TypeError If the auth_service object is not a subclass of rinzler.auth.BaseAuthService :rtype: Rinzler """ if issubclass(auth_service.__class__, BaseAuthService): self.auth_service = auth_service return self else: raise TypeError("Your auth service object must be a subclass of rinzler.auth.BaseAuthService.")
Sets the authentication service :param auth_service: BaseAuthService Authentication service :raises: TypeError If the auth_service object is not a subclass of rinzler.auth.BaseAuthService :rtype: Rinzler
entailment
def handle(self, request: HttpRequest) -> HttpResponse: """ Prepares for the CallBackResolver and handles the response and exceptions :param request HttpRequest :rtype: HttpResponse """ self.__request_start = datetime.now() self.__request = request self.__uri = request.path[1:] self.__method = request.method # Initializes the callable controller and call it's connect method to get the mapped end-points. controller: RouteMapping = self.__controller().connect(self.app) self.__end_points = controller.get_routes() indent = self.get_json_ident(request.META) if self.set_end_point_uri() is False: return self.set_response_headers(self.no_route_found(self.__request).render(indent)) response = HttpResponse(None) try: response = self.exec_route_callback() except RinzlerHttpException as e: client.captureException() self.app.log.error(f"< {e.status_code}", exc_info=True) response = Response(None, status=e.status_code) except RequestDataTooBig: client.captureException() self.app.log.error("< 413", exc_info=True) response = Response(None, status=413) except BaseException: client.captureException() self.app.log.error("< 500", exc_info=True) response = Response(None, status=500) finally: if type(response) == Response: return self.set_response_headers(response.render(indent)) else: return self.set_response_headers(response)
Prepares for the CallBackResolver and handles the response and exceptions :param request HttpRequest :rtype: HttpResponse
entailment
def exec_route_callback(self) -> Response or object: """ Executes the resolved end-point callback, or its fallback :rtype: Response or object """ if self.__method.lower() in self.__end_points: for bound in self.__end_points[self.__method.lower()]: route = list(bound)[0] expected_params = self.get_url_params(route) actual_params = self.get_url_params(self.get_end_point_uri()) if self.request_matches_route(self.get_end_point_uri(), route): self.app.log.info("> {0} {1}".format(self.__method, self.__uri)) if self.authenticate(route, actual_params): self.app.log.debug( "%s(%d) %s" % ("body ", len(self.__request.body), self.__request.body.decode('utf-8')) ) pattern_params = self.get_callback_pattern(expected_params, actual_params) self.app.request_handle_time = ( lambda d: int((d.days * 24 * 60 * 60 * 1000) + (d.seconds * 1000) + (d.microseconds / 1000)) )(datetime.now() - self.__request_start) return bound[route](self.__request, self.app, **pattern_params) else: raise AuthException("Authentication failed.") if self.__method == "OPTIONS": self.app.log.info("Route matched: {0} {1}".format(self.__method, self.__uri)) return self.default_route_options() if self.__route == '' and self.__uri == '': return self.welcome_page() else: return self.no_route_found(self.__request)
Executes the resolved end-point callback, or its fallback :rtype: Response or object
entailment
def request_matches_route(self, actual_route: str, expected_route: str): """ Determines whether a route matches the actual requested route or not :param actual_route str :param expected_route :rtype: Boolean """ expected_params = self.get_url_params(expected_route) actual_params = self.get_url_params(actual_route) i = 0 if len(expected_params) == len(actual_params): for param in actual_params: if expected_params[i][0] != "{": if param != expected_params[i]: return False i += 1 else: return False return True
Determines whether a route matches the actual requested route or not :param actual_route str :param expected_route :rtype: Boolean
entailment
def authenticate(self, bound_route, actual_params) -> bool: """ Runs the pre-defined authenticaton service :param bound_route str route matched :param actual_params dict actual url parameters :rtype: bool """ if self.__auth_service is not None: auth_route = "{0}_{1}{2}".format(self.__method, self.__route, bound_route) auth_data = self.__auth_service.authenticate(self.__request, auth_route, actual_params) if auth_data is True: self.app.auth_data = self.__auth_service.auth_data else: return False return True
Runs the pre-defined authenticaton service :param bound_route str route matched :param actual_params dict actual url parameters :rtype: bool
entailment
def get_callback_pattern(expected_params, actual_params): """ Assembles a dictionary whith the parameters schema defined for this route :param expected_params dict parameters schema defined for this route :param actual_params dict actual url parameters :rtype: dict """ pattern = dict() key = 0 for exp_param in expected_params: if exp_param[0] == '{' and exp_param[-1:] == '}': pattern[exp_param[1:-1]] = actual_params[key] key = key + 1 return pattern
Assembles a dictionary whith the parameters schema defined for this route :param expected_params dict parameters schema defined for this route :param actual_params dict actual url parameters :rtype: dict
entailment
def get_url_params(end_point: str) -> list: """ Gets route parameters as dictionary :param end_point str target route :rtype: list """ var_params = end_point.split('/') if len(var_params) == 1 and var_params[0] == '': return [] elif len(var_params) == 1 and var_params[0] != '': return [var_params[0]] else: params = list() for param in var_params: if len(param) > 0: params.append(param) return params
Gets route parameters as dictionary :param end_point str target route :rtype: list
entailment
def set_end_point_uri(self) -> bool: """ Extracts the route from the accessed URL and sets it to __end_point_uri :rtype: bool """ expected_parts = self.__route.split("/") actual_parts = self.__uri.split("/") i = 0 for part in expected_parts: if part != actual_parts[i]: return False i = i + 1 uri_prefix = len(self.__route) self.__end_point_uri = self.__uri[uri_prefix:] return True
Extracts the route from the accessed URL and sets it to __end_point_uri :rtype: bool
entailment
def no_route_found(self, request): """ Default callback for route not found :param request HttpRequest :rtype: Response """ response_obj = OrderedDict() response_obj["status"] = False response_obj["exceptions"] = { "message": "No route found for {0} {1}".format(self.__method, self.__uri), } response_obj["request"] = { "method": self.__method, "path_info": self.__uri, "content": request.body.decode("utf-8") } response_obj["message"] = "We are sorry, but something went terribly wrong." return Response(response_obj, content_type="application/json", status=404, charset="utf-8")
Default callback for route not found :param request HttpRequest :rtype: Response
entailment
def welcome_page(self): """ Defaulf welcome page when the route / is note mapped yet :rtype: HttpResponse """ message = "HTTP/1.1 200 OK RINZLER FRAMEWORK" return HttpResponse( "<center><h1>{0}({1})</h1></center>".format(message, self.app.app_name), content_type="text/html", charset="utf-8" )
Defaulf welcome page when the route / is note mapped yet :rtype: HttpResponse
entailment
def default_route_options(): """ Default callback for OPTIONS request :rtype: Response """ response_obj = OrderedDict() response_obj["status"] = True response_obj["data"] = "Ok" return Response(response_obj, content_type="application/json", charset="utf-8")
Default callback for OPTIONS request :rtype: Response
entailment
def set_response_headers(self, response: HttpResponse) -> HttpResponse: """ Appends default headers to every response returned by the API :param response HttpResponse :rtype: HttpResponse """ public_name = os.environ.get('SERVER_PUBLIC_NAME') response_headers = { 'access-control-allow-headers': self.app.allowed_headers, 'access-control-allow-methods': self.app.allowed_methods, 'access-control-allow-origin': self.app.allowed_origins, 'access-control-allow-credentials': True, 'www-authenticate': "Bearer", 'server-public-name': public_name if public_name else "No one", 'user-info': "Rinzler Framework rulez!" } response_headers.update(self.app.default_headers) for key in response_headers: response[key] = response_headers[key] status = response.status_code if status != 404: self.app.log.info("< {0}".format(status)) return response
Appends default headers to every response returned by the API :param response HttpResponse :rtype: HttpResponse
entailment
def get_json_ident(request_headers: dict) -> int: """ Defines whether the JSON response will be indented or not :param request_headers: dict :return: self """ if 'HTTP_USER_AGENT' in request_headers: indent = 2 if re.match("[Mozilla]{7}", request_headers['HTTP_USER_AGENT']) else 0 else: indent = 0 return indent
Defines whether the JSON response will be indented or not :param request_headers: dict :return: self
entailment
def prop(key, dct_or_obj): """ Implementation of prop (get_item) that also supports object attributes :param key: :param dct_or_obj: :return: """ # Note that hasattr is a builtin and getattr is a ramda function, hence the different arg position if isinstance(dict, dct_or_obj): if has(key, dct_or_obj): return dct_or_obj[key] else: raise Exception("No key %s found for dict %s" % (key, dct_or_obj)) elif isinstance(list, dct_or_obj): if isint(key): return dct_or_obj[key] else: raise Exception("Key %s not expected for list type: %s" % (key, dct_or_obj)) elif isinstance(object, dct_or_obj): if hasattr(dct_or_obj, key): return getattr(key, dct_or_obj) else: raise Exception("No key %s found for objects %s" % (key, dct_or_obj)) else: raise Exception("%s is neither a dict nor objects" % dct_or_obj)
Implementation of prop (get_item) that also supports object attributes :param key: :param dct_or_obj: :return:
entailment
def all_pass_dict(f, dct): """ Returns true if all dct values pass f :param f: binary lambda predicate :param dct: :return: True or false """ return all(map_with_obj_to_values( lambda key, value: f(key, value), dct ))
Returns true if all dct values pass f :param f: binary lambda predicate :param dct: :return: True or false
entailment
def prop_or(default, key, dct_or_obj): """ Ramda propOr implementation. This also resolves object attributes, so key can be a dict prop or an attribute of dct_or_obj :param default: Value if dct_or_obj doesn't have key_or_prop or the resolved value is null :param key: :param dct_or_obj: :return: """ # Note that hasattr is a builtin and getattr is a ramda function, hence the different arg position if isinstance(dict, dct_or_obj): value = dct_or_obj[key] if has(key, dct_or_obj) else default elif isinstance(object, dct_or_obj): value = getattr(key, dct_or_obj) if hasattr(dct_or_obj, key) else default else: value = default # 0 and False are ok, None defaults if value == None: return default return value
Ramda propOr implementation. This also resolves object attributes, so key can be a dict prop or an attribute of dct_or_obj :param default: Value if dct_or_obj doesn't have key_or_prop or the resolved value is null :param key: :param dct_or_obj: :return:
entailment
def prop_eq_or(default, key, value, dct): """ Ramda propEq plus propOr implementation :param default: :param key: :param value: :param dct: :return: """ return dct[key] and dct[key] == value if key in dct else default
Ramda propEq plus propOr implementation :param default: :param key: :param value: :param dct: :return:
entailment
def prop_eq_or_in_or(default, key, value, dct): """ Ramda propEq/propIn plus propOr :param default: :param key: :param value: :param dct: :return: """ return has(key, dct) and \ (dct[key] == value if key in dct else ( dct[key] in value if isinstance((list, tuple), value) and not isinstance(str, value) else default ))
Ramda propEq/propIn plus propOr :param default: :param key: :param value: :param dct: :return:
entailment
def item_path_or(default, keys, dict_or_obj): """ Optional version of item_path with a default value. keys can be dict keys or object attributes, or a combination :param default: :param keys: List of keys or dot-separated string :param dict_or_obj: A dict or obj :return: """ if not keys: raise ValueError("Expected at least one key, got {0}".format(keys)) resolved_keys = keys.split('.') if isinstance(str, keys) else keys current_value = dict_or_obj for key in resolved_keys: current_value = prop_or(default, key, default_to({}, current_value)) return current_value
Optional version of item_path with a default value. keys can be dict keys or object attributes, or a combination :param default: :param keys: List of keys or dot-separated string :param dict_or_obj: A dict or obj :return:
entailment
def item_str_path(keys, dct): """ Given a string of path segments separated by ., splits them into an array. Int strings are converted to numbers to serve as an array index :param keys: e.g. 'foo.bar.1.goo' :param dct: e.g. dict(foo=dict(bar=[dict(goo='a'), dict(goo='b')]) :return: The resolved value or an error. E.g. for above the result would be b """ return item_path(map(lambda segment: int(segment) if isint(segment) else segment, keys.split('.')), dct)
Given a string of path segments separated by ., splits them into an array. Int strings are converted to numbers to serve as an array index :param keys: e.g. 'foo.bar.1.goo' :param dct: e.g. dict(foo=dict(bar=[dict(goo='a'), dict(goo='b')]) :return: The resolved value or an error. E.g. for above the result would be b
entailment
def item_str_path_or(default, keys, dct): """ Given a string of path segments separated by ., splits them into an array. Int strings are converted to numbers to serve as an array index :param default: Value if any part yields None or undefined :param keys: e.g. 'foo.bar.1.goo' :param dct: e.g. dict(foo=dict(bar=[dict(goo='a'), dict(goo='b')]) :return: The resolved value or an error. E.g. for above the result would be b """ return item_path_or(default, map(lambda segment: int(segment) if isint(segment) else segment, keys.split('.')), dct)
Given a string of path segments separated by ., splits them into an array. Int strings are converted to numbers to serve as an array index :param default: Value if any part yields None or undefined :param keys: e.g. 'foo.bar.1.goo' :param dct: e.g. dict(foo=dict(bar=[dict(goo='a'), dict(goo='b')]) :return: The resolved value or an error. E.g. for above the result would be b
entailment
def has(prop, object_or_dct): """ Implementation of ramda has :param prop: :param object_or_dct: :return: """ return prop in object_or_dct if isinstance(dict, object_or_dct) else hasattr(object_or_dct, prop)
Implementation of ramda has :param prop: :param object_or_dct: :return:
entailment
def omit_deep(omit_props, dct): """ Implementation of omit that recurses. This tests the same keys at every level of dict and in lists :param omit_props: :param dct: :return: """ omit_partial = omit_deep(omit_props) if isinstance(dict, dct): # Filter out keys and then recurse on each value that wasn't filtered out return map_dict(omit_partial, compact_dict(omit(omit_props, dct))) if isinstance((list, tuple), dct): # run omit_deep on each value return map(omit_partial, dct) # scalar return dct
Implementation of omit that recurses. This tests the same keys at every level of dict and in lists :param omit_props: :param dct: :return:
entailment
def pick_deep(pick_dct, dct): """ Implementation of pick that recurses. This tests the same keys at every level of dict and in lists :param pick_dct: Deep dict matching some portion of dct. :param dct: Dct to filter. Any key matching pick_dct pass through. It doesn't matter what the pick_dct value is as long as the key exists. Arrays also pass through if the have matching values in pick_dct :return: """ if isinstance(dict, dct): # Filter out keys and then recurse on each value that wasn't filtered out return map_with_obj( lambda k, v: pick_deep(prop(k, pick_dct), v), pick(keys(pick_dct), dct) ) if isinstance((list, tuple), dct): # run pick_deep on each value return map( lambda tup: pick_deep(*tup), list(zip(pick_dct or [], dct)) ) # scalar return dct
Implementation of pick that recurses. This tests the same keys at every level of dict and in lists :param pick_dct: Deep dict matching some portion of dct. :param dct: Dct to filter. Any key matching pick_dct pass through. It doesn't matter what the pick_dct value is as long as the key exists. Arrays also pass through if the have matching values in pick_dct :return:
entailment
def map_with_obj_deep(f, dct): """ Implementation of map that recurses. This tests the same keys at every level of dict and in lists :param f: 2-ary function expecting a key and value and returns a modified value :param dct: Dict for deep processing :return: Modified dct with matching props mapped """ return _map_deep(lambda k, v: [k, f(k, v)], dct)
Implementation of map that recurses. This tests the same keys at every level of dict and in lists :param f: 2-ary function expecting a key and value and returns a modified value :param dct: Dict for deep processing :return: Modified dct with matching props mapped
entailment
def map_keys_deep(f, dct): """ Implementation of map that recurses. This tests the same keys at every level of dict and in lists :param f: 2-ary function expecting a key and value and returns a modified key :param dct: Dict for deep processing :return: Modified dct with matching props mapped """ return _map_deep(lambda k, v: [f(k, v), v], dct)
Implementation of map that recurses. This tests the same keys at every level of dict and in lists :param f: 2-ary function expecting a key and value and returns a modified key :param dct: Dict for deep processing :return: Modified dct with matching props mapped
entailment
def _map_deep(f, dct): """ Used by map_deep and map_keys_deep :param map_props: :param f: Expects a key and value and returns a pair :param dct: :return: """ if isinstance(dict, dct): return map_key_values(lambda k, v: f(k, _map_deep(f, v)), dct) elif isinstance((list, tuple), dct): # Call each value with the index as the key. Since f returns a key value discard the key that it returns # Even if this is called with map_keys_deep we can't manipulate index values here return map(lambda iv: f(iv[0], _map_deep(f, iv[1]))[1], enumerate(dct)) # scalar return dct
Used by map_deep and map_keys_deep :param map_props: :param f: Expects a key and value and returns a pair :param dct: :return:
entailment
def dict_matches_params_deep(params_dct, dct): """ Filters deeply by comparing dct to filter_dct's value at each depth. Whenever a mismatch occurs the whole thing returns false :param params_dct: dict matching any portion of dct. E.g. filter_dct = {foo: {bar: 1}} would allow {foo: {bar: 1, car: 2}} to pass, {foo: {bar: 2}} would fail, {goo: ...} would fail :param dct: Dict for deep processing :return: True if all pass else false """ def recurse_if_param_exists(params, key, value): """ If a param[key] exists, recurse. Otherwise return True since there is no param to contest value :param params: :param key: :param value: :return: """ return dict_matches_params_deep( prop(key, params), value ) if has(key, params) else True def recurse_if_array_param_exists(params, index, value): """ If a param[key] exists, recurse. Otherwise return True since there is no param to contest value :param params: :param index: :param value: :return: """ return dict_matches_params_deep( params[index], value ) if isinstance((list, tuple), params_dct) and index < length(params_dct) else True if isinstance(dict, dct): # Filter out keys and then recurse on each value return all_pass_dict( # Recurse on each value if there is a corresponding filter_dct[key]. If not we pass lambda key, value: recurse_if_param_exists(params_dct, key, value), # We shallow merge, giving dct priority with (hopefully) unmatchable values merge(map_with_obj(lambda k, v: 1 / (-e * pi), params_dct), dct) ) if isinstance((list, tuple), dct): if isinstance((list, tuple), params_dct) and length(dct) < length(params_dct): # if there are more param items then dct items fail return False # run map_deep on each value return all(map( lambda ivalue: recurse_if_array_param_exists(params_dct, *ivalue), enumerate(dct) )) # scalar. Not that anything not truthy, False, None, 0, are considered equal return params_dct == dct
Filters deeply by comparing dct to filter_dct's value at each depth. Whenever a mismatch occurs the whole thing returns false :param params_dct: dict matching any portion of dct. E.g. filter_dct = {foo: {bar: 1}} would allow {foo: {bar: 1, car: 2}} to pass, {foo: {bar: 2}} would fail, {goo: ...} would fail :param dct: Dict for deep processing :return: True if all pass else false
entailment
def join(strin, items): """ Ramda implementation of join :param strin: :param items: :return: """ return strin.join(map(lambda item: str(item), items))
Ramda implementation of join :param strin: :param items: :return:
entailment
def map_with_obj(f, dct): """ Implementation of Ramda's mapObjIndexed without the final argument. This returns the original key with the mapped value. Use map_key_values to modify the keys too :param f: Called with a key and value :param dct: :return {dict}: Keyed by the original key, valued by the mapped value """ f_dict = {} for k, v in dct.items(): f_dict[k] = f(k, v) return f_dict
Implementation of Ramda's mapObjIndexed without the final argument. This returns the original key with the mapped value. Use map_key_values to modify the keys too :param f: Called with a key and value :param dct: :return {dict}: Keyed by the original key, valued by the mapped value
entailment
def map_keys(f, dct): """ Calls f with each key of dct, possibly returning a modified key. Values are unchanged :param f: Called with each key and returns the same key or a modified key :param dct: :return: A dct with keys possibly modifed but values unchanged """ f_dict = {} for k, v in dct.items(): f_dict[f(k)] = v return f_dict
Calls f with each key of dct, possibly returning a modified key. Values are unchanged :param f: Called with each key and returns the same key or a modified key :param dct: :return: A dct with keys possibly modifed but values unchanged
entailment
def map_keys_with_obj(f, dct): """ Calls f with each key and value of dct, possibly returning a modified key. Values are unchanged :param f: Called with each key and value and returns the same key or a modified key :param dct: :return: A dct with keys possibly modifed but values unchanged """ f_dict = {} for k, v in dct.items(): f_dict[f(k, v)] = v return f_dict
Calls f with each key and value of dct, possibly returning a modified key. Values are unchanged :param f: Called with each key and value and returns the same key or a modified key :param dct: :return: A dct with keys possibly modifed but values unchanged
entailment
def merge_deep(dct1, dct2, merger=None): """ Deep merge by this spec below :param dct1: :param dct2: :param merger Optional merger :return: """ my_merger = merger or Merger( # pass in a list of tuples,with the # strategies you are looking to apply # to each type. [ (list, ["append"]), (dict, ["merge"]) ], # next, choose the fallback strategies, # applied to all other types: ["override"], # finally, choose the strategies in # the case where the types conflict: ["override"] ) return my_merger.merge(dct1, dct2)
Deep merge by this spec below :param dct1: :param dct2: :param merger Optional merger :return:
entailment
def merge_all(dcts): """ Shallow merge all the dcts :param dcts: :return: """ return reduce( lambda accum, dct: merge(accum, dct), dict(), dcts )
Shallow merge all the dcts :param dcts: :return:
entailment
def from_pairs_to_array_values(pairs): """ Like from pairs but combines duplicate key values into arrays :param pairs: :return: """ result = {} for pair in pairs: result[pair[0]] = concat(prop_or([], pair[0], result), [pair[1]]) return result
Like from pairs but combines duplicate key values into arrays :param pairs: :return:
entailment
def map_prop_value_as_index(prp, lst): """ Returns the given prop of each item in the list :param prp: :param lst: :return: """ return from_pairs(map(lambda item: (prop(prp, item), item), lst))
Returns the given prop of each item in the list :param prp: :param lst: :return:
entailment
def key_string_to_lens_path(key_string): """ Converts a key string like 'foo.bar.0.wopper' to ['foo', 'bar', 0, 'wopper'] :param {String} keyString The dot-separated key string :return {[String]} The lens array containing string or integers """ return map( if_else( isinstance(int), # convert to int lambda s: int(s), # Leave the string alone identity ), key_string.split('.') )
Converts a key string like 'foo.bar.0.wopper' to ['foo', 'bar', 0, 'wopper'] :param {String} keyString The dot-separated key string :return {[String]} The lens array containing string or integers
entailment
def fake_lens_path_view(lens_path, obj): """ Simulates R.view with a lens_path since we don't have lens functions :param lens_path: Array of string paths :param obj: Object containing the given path :return: The value at the path or None """ segment = head(lens_path) return if_else( both(lambda _: identity(segment), has(segment)), # Recurse on the rest of the path compose(fake_lens_path_view(tail(lens_path)), getitem(segment)), # Give up lambda _: None )(obj)
Simulates R.view with a lens_path since we don't have lens functions :param lens_path: Array of string paths :param obj: Object containing the given path :return: The value at the path or None
entailment
def fake_lens_path_set(lens_path, value, obj): """ Simulates R.set with a lens_path since we don't have lens functions :param lens_path: Array of string paths :param value: The value to set at the lens path :param obj: Object containing the given path :return: The value at the path or None """ segment = head(lens_path) obj_copy = copy.copy(obj) def set_array_index(i, v, l): # Fill the array with None up to the given index and set the index to v try: l[i] = v except IndexError: for _ in range(i - len(l) + 1): l.append(None) l[i] = v if not (length(lens_path) - 1): # Done new_value = value else: # Find the value at the path or create a {} or [] at obj[segment] found_or_created = item_path_or( if_else( lambda segment: segment.isnumeric(), always([]), always({}) )(head(tail(lens_path))), segment, obj ) # Recurse on the rest of the path new_value = fake_lens_path_set(tail(lens_path), value, found_or_created) # Set or replace if segment.isnumeric(): set_array_index(int(segment), new_value, obj_copy) else: obj_copy[segment] = new_value return obj_copy
Simulates R.set with a lens_path since we don't have lens functions :param lens_path: Array of string paths :param value: The value to set at the lens path :param obj: Object containing the given path :return: The value at the path or None
entailment
def unflatten_dct(obj): """ Undoes the work of flatten_dict @param {Object} obj 1-D object in the form returned by flattenObj @returns {Object} The original :param obj: :return: """ def reduce_func(accum, key_string_and_value): key_string = key_string_and_value[0] value = key_string_and_value[1] item_key_path = key_string_to_lens_path(key_string) # All but the last segment gives us the item container len container_key_path = init(item_key_path) container = unless( # If the path has any length (not []) and the value is set, don't do anything both(always(length(container_key_path)), fake_lens_path_view(container_key_path)), # Else we are at the top level, so use the existing accum or create a [] or {} # depending on if our item key is a number or not lambda x: default_to( if_else( lambda segment: segment.isnumeric(), always([]), always({}) )(head(item_key_path)) )(x) )(accum) # Finally set the container at the itemLensPath return fake_lens_path_set( item_key_path, value, container ) return compose( reduce( reduce_func, # null initial value None ), to_pairs )(obj)
Undoes the work of flatten_dict @param {Object} obj 1-D object in the form returned by flattenObj @returns {Object} The original :param obj: :return:
entailment
def ppj(json_data): """ppj :param json_data: dictionary to print """ return str(json.dumps( json_data, sort_keys=True, indent=4, separators=(',', ': ')))
ppj :param json_data: dictionary to print
entailment
def change_view(self, request, object_id, form_url='', extra_context=None): """ Override change view to add extra context enabling moderate tool. """ context = { 'has_moderate_tool': True } if extra_context: context.update(extra_context) return super(AdminModeratorMixin, self).change_view( request=request, object_id=object_id, form_url=form_url, extra_context=context )
Override change view to add extra context enabling moderate tool.
entailment
def get_urls(self): """ Add aditional moderate url. """ from django.conf.urls import url urls = super(AdminModeratorMixin, self).get_urls() info = self.model._meta.app_label, self.model._meta.model_name return [ url(r'^(.+)/moderate/$', self.admin_site.admin_view(self.moderate_view), name='%s_%s_moderate' % info), ] + urls
Add aditional moderate url.
entailment
def render(self, indent=0): """ Renders a HttpResponse for the ongoing request :param indent int :rtype: HttpResponse """ self.__indent = indent return HttpResponse( str(self), content_type=self.__content_type, charset=self.__charset, **self.__kwargs )
Renders a HttpResponse for the ongoing request :param indent int :rtype: HttpResponse
entailment
def setup_logging(default_path='logging.yaml', env_key='LOG_CFG'): """ Setup logging configuration """ path = default_path value = os.getenv(env_key, None) if value: path = value if os.path.exists(path): with open(path, 'rt') as f: configs = yaml.safe_load(f.read()) logging.config.dictConfig(configs) else: logging.config.dictConfig(config)
Setup logging configuration
entailment
def get(self, route: str(), callback: object()): """ Binds a GET route with the given callback :rtype: object """ self.__set_route('get', {route: callback}) return RouteMapping
Binds a GET route with the given callback :rtype: object
entailment
def post(self, route: str(), callback: object()): """ Binds a POST route with the given callback :rtype: object """ self.__set_route('post', {route: callback}) return RouteMapping
Binds a POST route with the given callback :rtype: object
entailment
def put(self, route: str(), callback: object()): """ Binds a PUT route with the given callback :rtype: object """ self.__set_route('put', {route: callback}) return RouteMapping
Binds a PUT route with the given callback :rtype: object
entailment
def patch(self, route: str(), callback: object()): """ Binds a PATCH route with the given callback :rtype: object """ self.__set_route('patch', {route: callback}) return RouteMapping
Binds a PATCH route with the given callback :rtype: object
entailment
def delete(self, route: str(), callback: object()): """ Binds a PUT route with the given callback :rtype: object """ self.__set_route('delete', {route: callback}) return RouteMapping
Binds a PUT route with the given callback :rtype: object
entailment
def head(self, route: str(), callback: object()): """ Binds a HEAD route with the given callback :rtype: object """ self.__set_route('head', {route: callback}) return RouteMapping
Binds a HEAD route with the given callback :rtype: object
entailment
def options(self, route: str(), callback: object()): """ Binds a OPTIONS route with the given callback :rtype: object """ self.__set_route('options', {route: callback}) return RouteMapping
Binds a OPTIONS route with the given callback :rtype: object
entailment
def __set_route(self, type_route, route): """ Sets the given type_route and route to the route mapping :rtype: object """ if type_route in self.__routes: if not self.verify_route_already_bound(type_route, route): self.__routes[type_route].append(route) else: self.__routes[type_route] = [route] return RouteMapping
Sets the given type_route and route to the route mapping :rtype: object
entailment
def operating_system(): """Return a string identifying the operating system the application is running on. :rtype: str """ if platform.system() == 'Darwin': return 'OS X Version %s' % platform.mac_ver()[0] distribution = ' '.join(platform.linux_distribution()).strip() os_platform = platform.platform(True, True) if distribution: os_platform += ' (%s)' % distribution return os_platform
Return a string identifying the operating system the application is running on. :rtype: str
entailment
def start(self): """Daemonize if the process is not already running.""" if self._is_already_running(): LOGGER.error('Is already running') sys.exit(1) try: self._daemonize() self.controller.start() except Exception as error: sys.stderr.write('\nERROR: Startup of %s Failed\n.' % sys.argv[0].split('/')[-1]) exception_log = self._get_exception_log_path() if exception_log: with open(exception_log, 'a') as handle: timestamp = datetime.datetime.now().isoformat() handle.write('{:->80}\n'.format(' [START]')) handle.write('%s Exception [%s]\n' % (sys.argv[0], timestamp)) handle.write('{:->80}\n'.format(' [INFO]')) handle.write('Interpreter: %s\n' % sys.executable) handle.write('CLI arguments: %s\n' % ' '.join(sys.argv)) handle.write('Exception: %s\n' % error) handle.write('Traceback:\n') output = traceback.format_exception(*sys.exc_info()) _dev_null = [(handle.write(line), sys.stdout.write(line)) for line in output] handle.write('{:->80}\n'.format(' [END]')) handle.flush() sys.stderr.write('\nException log: %s\n\n' % exception_log) sys.exit(1)
Daemonize if the process is not already running.
entailment