code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
device = api.list_connected_devices().first() print('using device #', device.id) api.delete_device_subscriptions(device.id) try: print('setting webhook url to:', ngrok_url) api.update_webhook(ngrok_url) print('requesting resource value for:', resource_path) deferred = api.get_resource_value_async(device_id=device.id, resource_path=resource_path) print('waiting for async #', deferred.async_id) result = deferred.wait(15) print('webhook sent us this payload value:', repr(result)) return result except Exception: print(traceback.format_exc()) finally: api.delete_webhook() print("Deregistered and unsubscribed from all resources. Exiting.") exit(1)
def my_application(api)
An example application. - Registers a webhook with mbed cloud services - Requests the value of a resource - Prints the value when it arrives
6.012287
5.837438
1.029953
body = request.stream.read().decode('utf8') print('webhook handler saw:', body) api.notify_webhook_received(payload=body) # nb. protected references are not part of the API. # this is just to demonstrate that the asyncid is stored print('key store contains:', api._db.keys())
def webhook_handler(request)
Receives the webhook from mbed cloud services Passes the raw http body directly to mbed sdk, to notify that a webhook was received
17.123005
15.877925
1.078416
print('getting started!...') t = threading.Thread(target=my_application, kwargs=dict(api=api)) t.daemon = True t.start() return 'ok, starting webhook to: %s' % (ngrok_url,)
def start_sequence()
Start the demo sequence We must start this thread in the same process as the webserver to be certain we are sharing the api instance in memory. (ideally in future the async id database will be capable of being more than just a dictionary)
10.093638
9.396449
1.074197
return self.create_async_request_with_http_info(device_id, async_id, body, **kwargs) # noqa: E501 else: (data) = self.create_async_request_with_http_info(device_id, async_id, body, **kwargs) # noqa: E501 return data
def create_async_request(self, device_id, async_id, body, **kwargs): # noqa: E501 kwargs['_return_http_data_only'] = True if kwargs.get('asynchronous')
Send an async request to device # noqa: E501 This API provides an interface to asynchronously call methods on a device. The `async-id` is provided by the client, enabling the client to track the end-to-end flow with an identifier that is relevant to the end application. For example, a web application's session ID along with the device ID and the resource path could be used as the `async-id`. This also avoids any race conditions with [the notification channel](/docs/current/integrate-web-app/event-notification.html). All responses are sent through the currently configured notification channel as an **AsyncIDResponse**. For `GET` methods, values may be fetched from an internal cache, instead of contacting the device. See also /v2/endpoints/{device-id}/{resourcePath}. ``` Example URI: POST /v2/device-requests/015f2fa34d310000000000010030036c?async-id=123e4567-e89b-12d3-a456-426655440000 Example payloads: { \"method\": \"GET\", \"uri\": \"/5/0/1\" } { \"method\": \"PUT\", \"uri\": \"/5/0/1%20?k1=v1&k2=v2%22\", \"accept\": \"text/plain\", \"content-type\": \"text/plain\", \"payload-b64\": \"dmFsdWUxCg==\" } Immediate response: 202 Accepted Example AsyncIDResponse, delivered via the notification channel: { \"async-responses\": [ { \"id\": \"123e4567-e89b-12d3-a456-426655440000\", \"status\": 200, \"payload\": \"dmFsdWUxCg==\", \"ct\": \"text/plain\", \"max-age\": 600 } ] } ``` # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass asynchronous=True >>> thread = api.create_async_request(device_id, async_id, body, asynchronous=True) >>> result = thread.get() :param asynchronous bool :param str device_id: The device ID generated by Device Management. (required) :param str async_id: The client-generated ID for matching the correct response delivered via a notification. (required) :param DeviceRequest body: Device request to send. (required) :return: None If the method is called asynchronously, returns the request thread.
1.4539
1.954498
0.743874
testrunner_image = get_testrunner_image() local_image = testrunner_image.rsplit(':')[-2].rsplit('/')[-1] version_file = 'testrunner_version.txt' template = yaml.safe_load(f) return 'preload', template
def new_preload()
Job running prior to builds - fetches TestRunner image
9.09323
7.171113
1.268036
cache_file = f'app_{py_ver.name}.tar' cache_path = f'{cache_dir}/{cache_file}' cache_key = f'v3-{py_ver.name}-{{{{ .Branch }}}}' template = yaml.safe_load(f) return build_name(py_ver), template
def new_build(py_ver: PyVer)
Job for building/caching different docker images
9.13877
7.934225
1.151816
cache_file = f'app_{py_ver.name}.tar' template = yaml.safe_load(f) return deploy_name(py_ver, release_target), template
def new_deploy(py_ver: PyVer, release_target: ReleaseTarget)
Job for deploying package to pypi
12.124477
10.53991
1.15034
base = new_base() workflow = networkx.DiGraph() LOG.info('%s python versions', len(python_versions)) LOG.info('%s mbed cloud hosts', len(mbed_cloud_hosts)) job, content = new_tpip() base['jobs'].update({job: content}) workflow.add_node(job) job, content = new_newscheck() base['jobs'].update({job: content}) workflow.add_node( job, workflow=dict( filters=dict( branches=dict( # we ignore this check for builds directly on the base branches ignore=['master', 'integration'] ) ) ) ) job, content = new_build_documentation() base['jobs'].update({job: content}) workflow.add_node( job, workflow=dict( requires=['build_py2', 'build_py3'], filters = dict( branches=dict( # Only update the documentation on release branches only=['master', 'beta'] ) ) ) ) preload_job, content = new_preload() base['jobs'].update({preload_job: content}) workflow.add_node(job) for py_ver in python_versions.values(): build_job, build_content = new_build(py_ver=py_ver) base['jobs'].update({build_job: build_content}) workflow.add_edge(preload_job, build_job) for cloud_host in mbed_cloud_hosts.values(): test_job, test_content = new_test(py_ver=py_ver, cloud_host=cloud_host) base['jobs'].update({test_job: test_content}) workflow.add_edge(build_job, test_job) for release_target in release_target_map.values(): deploy_job, deploy_content = new_deploy( py_ver=python_versions['three'], release_target=release_target ) base['jobs'].update({deploy_job: deploy_content}) # wire up the release gates (clicky buttons) workflow.add_edge( test_name(python_versions['three'], mbed_cloud_hosts['osii']), release_name(release_target_map['beta']), type='approval', ) workflow.add_edge( test_name(python_versions['three'], mbed_cloud_hosts['production']), release_name(release_target_map['prod']), type='approval', filters=dict(branches=dict(only='master')), ) workflow.add_edge( test_name(python_versions['two'], mbed_cloud_hosts['production']), release_name(release_target_map['prod']), ) # we only want to deploy in certain conditions workflow.add_edge( release_name(release_target_map['beta']), deploy_name(python_versions['three'], release_target_map['beta']) ) workflow.add_edge( release_name(release_target_map['prod']), deploy_name(python_versions['three'], release_target_map['prod']) ) workflow_jobs = base['workflows']['python_sdk_workflow']['jobs'] # build the workflow graph for job_name in networkx.topological_sort(workflow): job_config = {} per_node_config = workflow.nodes[job_name].get('workflow') if per_node_config: job_config.update(per_node_config) workflow_jobs.append({job_name: job_config}) for edge in workflow.in_edges(job_name): job_config.update(workflow.get_edge_data(*edge)) job_config.setdefault('requires', []).append(edge[0]) LOG.info('%s circle jobs', len(base['jobs'])) return dict(base)
def generate_circle_output()
Build sequence for Circle CI 2.0 config.yml builds the circleci structure also links individual jobs into a workflow graph
2.932394
2.912037
1.00699
with open(os.path.join(script_templates_root, 'Dockerfile')) as fh: return fh.read().format(py_ver=py_ver, author=author_file)
def generate_docker_file(py_ver: PyVer)
Templated docker files
6.515746
5.07964
1.282718
output = {} for py_ver in python_versions.values(): filepath = os.path.join(container_config_root, py_ver.docker_file) output[filepath] = generate_docker_file(py_ver) filepath = os.path.join(container_config_root, py_ver.compose_file) output[filepath] = generate_compose_file(py_ver) return output
def generate_docker_targets()
Write all templated container engine files
3.261121
2.944317
1.107598
config_output_file = output_path or os.path.join(PROJECT_ROOT, '.circleci', 'config.yml') yaml_structure = generate_circle_output() with open(config_output_file, 'w') as fh: yaml_content = yaml.safe_dump(data=yaml_structure, default_flow_style=False) fh.write( f'#\n' f'# This file is autogenerated, do not modify manually. ' f'See {author_file} for instructions.\n' f'#\n' f'{yaml_content}' ) for path, content in generate_docker_targets().items(): LOG.info('writing %s', path) with open(path, 'w') as fh: fh.write(content)
def main(output_path=None)
Writes out new python build system This is needed because CircleCI does not support build matrices nor parameterisation of cache paths or other aspects of their config There's also the added bonus of validating the yaml as we go. Additionally, we template and write Docker and docker-compose files for multiple python versions, as Docker `FROM` statements are also un-templatable using environment variables or similar. The bulk of the config structure is parsed templated yaml, which seems the most succinct way of building deeply nested dictionaries and lists, and also cleanly maps to the appearance of config.yml before & after templating. The main job blocks (build, test, deploy) are expanded as the product of python versions and mbed cloud environments, before being recombined into the job listing. Jobs are chained into a CircleCI workflow using a graph (in which nodes are job identifiers, and edges describe the dependencies and any additional parameters)
3.733895
3.863366
0.966488
return self.firmware_image_create_with_http_info(datafile, name, **kwargs) # noqa: E501 else: (data) = self.firmware_image_create_with_http_info(datafile, name, **kwargs) # noqa: E501 return data
def firmware_image_create(self, datafile, name, **kwargs): # noqa: E501 kwargs['_return_http_data_only'] = True if kwargs.get('asynchronous')
Create an image # noqa: E501 Create a firmware image. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass asynchronous=True >>> thread = api.firmware_image_create(datafile, name, asynchronous=True) >>> result = thread.get() :param asynchronous bool :param file datafile: The firmware image file to upload (required) :param str name: The name of the firmware image (required) :param str description: The description of the firmware image :return: FirmwareImage If the method is called asynchronously, returns the request thread.
1.495492
2.028194
0.737351
return self.firmware_image_destroy_with_http_info(image_id, **kwargs) # noqa: E501 else: (data) = self.firmware_image_destroy_with_http_info(image_id, **kwargs) # noqa: E501 return data
def firmware_image_destroy(self, image_id, **kwargs): # noqa: E501 kwargs['_return_http_data_only'] = True if kwargs.get('asynchronous')
Delete an image # noqa: E501 Delete a firmware image. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass asynchronous=True >>> thread = api.firmware_image_destroy(image_id, asynchronous=True) >>> result = thread.get() :param asynchronous bool :param str image_id: The firmware image ID (required) :return: None If the method is called asynchronously, returns the request thread.
1.547218
2.093571
0.739033
return self.firmware_image_retrieve_with_http_info(image_id, **kwargs) # noqa: E501 else: (data) = self.firmware_image_retrieve_with_http_info(image_id, **kwargs) # noqa: E501 return data
def firmware_image_retrieve(self, image_id, **kwargs): # noqa: E501 kwargs['_return_http_data_only'] = True if kwargs.get('asynchronous')
Get an image # noqa: E501 Retrieve a firmware image. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass asynchronous=True >>> thread = api.firmware_image_retrieve(image_id, asynchronous=True) >>> result = thread.get() :param asynchronous bool :param str image_id: The firmware image ID (required) :return: FirmwareImage If the method is called asynchronously, returns the request thread.
1.532149
2.131276
0.718888
return self.firmware_manifest_create_with_http_info(datafile, name, **kwargs) # noqa: E501 else: (data) = self.firmware_manifest_create_with_http_info(datafile, name, **kwargs) # noqa: E501 return data
def firmware_manifest_create(self, datafile, name, **kwargs): # noqa: E501 kwargs['_return_http_data_only'] = True if kwargs.get('asynchronous')
Create a manifest # noqa: E501 Create a firmware manifest. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass asynchronous=True >>> thread = api.firmware_manifest_create(datafile, name, asynchronous=True) >>> result = thread.get() :param asynchronous bool :param file datafile: The manifest file to create. The API gateway enforces the account-specific file size. (required) :param str name: The name of the firmware manifest (required) :param str description: The description of the firmware manifest :param file key_table: The key table of pre-shared keys for devices :return: FirmwareManifest If the method is called asynchronously, returns the request thread.
1.484177
2.033171
0.729981
return self.firmware_manifest_destroy_with_http_info(manifest_id, **kwargs) # noqa: E501 else: (data) = self.firmware_manifest_destroy_with_http_info(manifest_id, **kwargs) # noqa: E501 return data
def firmware_manifest_destroy(self, manifest_id, **kwargs): # noqa: E501 kwargs['_return_http_data_only'] = True if kwargs.get('asynchronous')
Delete a manifest # noqa: E501 Delete a firmware manifest. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass asynchronous=True >>> thread = api.firmware_manifest_destroy(manifest_id, asynchronous=True) >>> result = thread.get() :param asynchronous bool :param str manifest_id: The firmware manifest ID (required) :return: None If the method is called asynchronously, returns the request thread.
1.542539
2.114227
0.729599
return self.firmware_manifest_retrieve_with_http_info(manifest_id, **kwargs) # noqa: E501 else: (data) = self.firmware_manifest_retrieve_with_http_info(manifest_id, **kwargs) # noqa: E501 return data
def firmware_manifest_retrieve(self, manifest_id, **kwargs): # noqa: E501 kwargs['_return_http_data_only'] = True if kwargs.get('asynchronous')
Get a manifest # noqa: E501 Retrieve a firmware manifest. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass asynchronous=True >>> thread = api.firmware_manifest_retrieve(manifest_id, asynchronous=True) >>> result = thread.get() :param asynchronous bool :param str manifest_id: The firmware manifest ID (required) :return: FirmwareManifest If the method is called asynchronously, returns the request thread.
1.528808
2.137324
0.715291
return self.update_campaign_archive_with_http_info(campaign_id, **kwargs) # noqa: E501 else: (data) = self.update_campaign_archive_with_http_info(campaign_id, **kwargs) # noqa: E501 return data
def update_campaign_archive(self, campaign_id, **kwargs): # noqa: E501 kwargs['_return_http_data_only'] = True if kwargs.get('asynchronous')
Archive a campaign. # noqa: E501 This command will archive a campaign. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass asynchronous=True >>> thread = api.update_campaign_archive(campaign_id, asynchronous=True) >>> result = thread.get() :param asynchronous bool :param str campaign_id: The campaign ID (required) :return: None If the method is called asynchronously, returns the request thread.
1.538666
2.091991
0.735503
return self.update_campaign_create_with_http_info(campaign, **kwargs) # noqa: E501 else: (data) = self.update_campaign_create_with_http_info(campaign, **kwargs) # noqa: E501 return data
def update_campaign_create(self, campaign, **kwargs): # noqa: E501 kwargs['_return_http_data_only'] = True if kwargs.get('asynchronous')
Create a campaign # noqa: E501 Create an update campaign. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass asynchronous=True >>> thread = api.update_campaign_create(campaign, asynchronous=True) >>> result = thread.get() :param asynchronous bool :param UpdateCampaignPostRequest campaign: Update campaign (required) :return: UpdateCampaign If the method is called asynchronously, returns the request thread.
1.552879
2.177258
0.713227
return self.update_campaign_destroy_with_http_info(campaign_id, **kwargs) # noqa: E501 else: (data) = self.update_campaign_destroy_with_http_info(campaign_id, **kwargs) # noqa: E501 return data
def update_campaign_destroy(self, campaign_id, **kwargs): # noqa: E501 kwargs['_return_http_data_only'] = True if kwargs.get('asynchronous')
Delete a campaign # noqa: E501 Delete an update campaign. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass asynchronous=True >>> thread = api.update_campaign_destroy(campaign_id, asynchronous=True) >>> result = thread.get() :param asynchronous bool :param str campaign_id: The ID of the update campaign (required) :return: None If the method is called asynchronously, returns the request thread.
1.54553
2.072837
0.745611
return self.update_campaign_metadata_list_with_http_info(campaign_id, **kwargs) # noqa: E501 else: (data) = self.update_campaign_metadata_list_with_http_info(campaign_id, **kwargs) # noqa: E501 return data
def update_campaign_metadata_list(self, campaign_id, **kwargs): # noqa: E501 kwargs['_return_http_data_only'] = True if kwargs.get('asynchronous')
List all campaign device metadata # noqa: E501 Get campaign device metadata. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass asynchronous=True >>> thread = api.update_campaign_metadata_list(campaign_id, asynchronous=True) >>> result = thread.get() :param asynchronous bool :param str campaign_id: The update campaign ID (required) :param int limit: How many objects to retrieve in the page :param str order: ASC or DESC :param str after: The ID of the the item after which to retrieve the next page :param str include: A comma-separated list of data fields to return. Currently supported: total_count :return: CampaignDeviceMetadataPage If the method is called asynchronously, returns the request thread.
1.519406
1.996192
0.761152
return self.update_campaign_metadata_retrieve_with_http_info(campaign_id, campaign_device_metadata_id, **kwargs) # noqa: E501 else: (data) = self.update_campaign_metadata_retrieve_with_http_info(campaign_id, campaign_device_metadata_id, **kwargs) # noqa: E501 return data
def update_campaign_metadata_retrieve(self, campaign_id, campaign_device_metadata_id, **kwargs): # noqa: E501 kwargs['_return_http_data_only'] = True if kwargs.get('asynchronous')
Get a campaign device metadata # noqa: E501 Get update campaign metadata. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass asynchronous=True >>> thread = api.update_campaign_metadata_retrieve(campaign_id, campaign_device_metadata_id, asynchronous=True) >>> result = thread.get() :param asynchronous bool :param str campaign_id: The update campaign ID (required) :param str campaign_device_metadata_id: The campaign device metadata ID (required) :return: CampaignDeviceMetadata If the method is called asynchronously, returns the request thread.
1.414558
1.81263
0.78039
return self.update_campaign_metrics_with_http_info(campaign_id, **kwargs) # noqa: E501 else: (data) = self.update_campaign_metrics_with_http_info(campaign_id, **kwargs) # noqa: E501 return data
def update_campaign_metrics(self, campaign_id, **kwargs): # noqa: E501 kwargs['_return_http_data_only'] = True if kwargs.get('asynchronous')
Get campaign metrics # noqa: E501 Get detailed statistics of a campaign. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass asynchronous=True >>> thread = api.update_campaign_metrics(campaign_id, asynchronous=True) >>> result = thread.get() :param asynchronous bool :param str campaign_id: The campaign ID (required) :return: CampaignMetrics If the method is called asynchronously, returns the request thread.
1.556557
2.080881
0.748028
return self.update_campaign_retrieve_with_http_info(campaign_id, **kwargs) # noqa: E501 else: (data) = self.update_campaign_retrieve_with_http_info(campaign_id, **kwargs) # noqa: E501 return data
def update_campaign_retrieve(self, campaign_id, **kwargs): # noqa: E501 kwargs['_return_http_data_only'] = True if kwargs.get('asynchronous')
Get a campaign. # noqa: E501 Get an update campaign. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass asynchronous=True >>> thread = api.update_campaign_retrieve(campaign_id, asynchronous=True) >>> result = thread.get() :param asynchronous bool :param str campaign_id: The campaign ID (required) :return: UpdateCampaign If the method is called asynchronously, returns the request thread.
1.535307
2.157322
0.711673
return self.update_campaign_start_with_http_info(campaign_id, **kwargs) # noqa: E501 else: (data) = self.update_campaign_start_with_http_info(campaign_id, **kwargs) # noqa: E501 return data
def update_campaign_start(self, campaign_id, **kwargs): # noqa: E501 kwargs['_return_http_data_only'] = True if kwargs.get('asynchronous')
Start a campaign. # noqa: E501 This command will begin the process of starting a campaign. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass asynchronous=True >>> thread = api.update_campaign_start(campaign_id, asynchronous=True) >>> result = thread.get() :param asynchronous bool :param str campaign_id: The campaign ID (required) :return: None If the method is called asynchronously, returns the request thread.
1.554406
2.124259
0.73174
return self.update_campaign_stop_with_http_info(campaign_id, **kwargs) # noqa: E501 else: (data) = self.update_campaign_stop_with_http_info(campaign_id, **kwargs) # noqa: E501 return data
def update_campaign_stop(self, campaign_id, **kwargs): # noqa: E501 kwargs['_return_http_data_only'] = True if kwargs.get('asynchronous')
Stop a campaign. # noqa: E501 This command will begin the process of stopping a campaign. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass asynchronous=True >>> thread = api.update_campaign_stop(campaign_id, asynchronous=True) >>> result = thread.get() :param asynchronous bool :param str campaign_id: The campaign ID (required) :return: None If the method is called asynchronously, returns the request thread.
1.558603
2.101853
0.741538
return self.update_campaign_update_with_http_info(campaign_id, campaign, **kwargs) # noqa: E501 else: (data) = self.update_campaign_update_with_http_info(campaign_id, campaign, **kwargs) # noqa: E501 return data
def update_campaign_update(self, campaign_id, campaign, **kwargs): # noqa: E501 kwargs['_return_http_data_only'] = True if kwargs.get('asynchronous')
Modify a campaign # noqa: E501 Modify an update campaign. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass asynchronous=True >>> thread = api.update_campaign_update(campaign_id, campaign, asynchronous=True) >>> result = thread.get() :param asynchronous bool :param str campaign_id: (required) :param UpdateCampaignPutRequest campaign: Update campaign (required) :return: UpdateCampaign If the method is called asynchronously, returns the request thread.
1.524827
2.079264
0.73335
return self.upload_job_chunk_create_with_http_info(content_length, upload_job_id, **kwargs) # noqa: E501 else: (data) = self.upload_job_chunk_create_with_http_info(content_length, upload_job_id, **kwargs) # noqa: E501 return data
def upload_job_chunk_create(self, content_length, upload_job_id, **kwargs): # noqa: E501 kwargs['_return_http_data_only'] = True if kwargs.get('asynchronous')
Append a chunks to an upload job # noqa: E501 Append a chunks to an upload job # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass asynchronous=True >>> thread = api.upload_job_chunk_create(content_length, upload_job_id, asynchronous=True) >>> result = thread.get() :param asynchronous bool :param int content_length: (required) :param str upload_job_id: Upload job ID (required) :param str content_md5: :param str chunk: Chunk :return: UploadChunkInfo If the method is called asynchronously, returns the request thread.
1.40751
1.85251
0.759785
return self.upload_job_chunk_retreive_with_http_info(upload_job_id, chunk_id, **kwargs) # noqa: E501 else: (data) = self.upload_job_chunk_retreive_with_http_info(upload_job_id, chunk_id, **kwargs) # noqa: E501 return data
def upload_job_chunk_retreive(self, upload_job_id, chunk_id, **kwargs): # noqa: E501 kwargs['_return_http_data_only'] = True if kwargs.get('asynchronous')
Get metadata about a chunk # noqa: E501 Get metadata about a chunk # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass asynchronous=True >>> thread = api.upload_job_chunk_retreive(upload_job_id, chunk_id, asynchronous=True) >>> result = thread.get() :param asynchronous bool :param str upload_job_id: Upload job (required) :param str chunk_id: Chunk (required) :return: UploadChunkInfo If the method is called asynchronously, returns the request thread.
1.394634
1.827638
0.76308
return self.upload_job_create_with_http_info(upload_job, **kwargs) # noqa: E501 else: (data) = self.upload_job_create_with_http_info(upload_job, **kwargs) # noqa: E501 return data
def upload_job_create(self, upload_job, **kwargs): # noqa: E501 kwargs['_return_http_data_only'] = True if kwargs.get('asynchronous')
Create a new upload job # noqa: E501 Create a new upload job # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass asynchronous=True >>> thread = api.upload_job_create(upload_job, asynchronous=True) >>> result = thread.get() :param asynchronous bool :param UploadJob upload_job: Upload job (required) :return: UploadJob If the method is called asynchronously, returns the request thread.
1.512886
1.978407
0.764699
return self.upload_job_delete_with_http_info(upload_job_id, **kwargs) # noqa: E501 else: (data) = self.upload_job_delete_with_http_info(upload_job_id, **kwargs) # noqa: E501 return data
def upload_job_delete(self, upload_job_id, **kwargs): # noqa: E501 kwargs['_return_http_data_only'] = True if kwargs.get('asynchronous')
Delete an upload job # noqa: E501 Delete an upload job # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass asynchronous=True >>> thread = api.upload_job_delete(upload_job_id, asynchronous=True) >>> result = thread.get() :param asynchronous bool :param str upload_job_id: Upload job (required) :return: None If the method is called asynchronously, returns the request thread.
1.495706
1.963464
0.761769
return self.upload_job_retrieve_with_http_info(upload_job_id, **kwargs) # noqa: E501 else: (data) = self.upload_job_retrieve_with_http_info(upload_job_id, **kwargs) # noqa: E501 return data
def upload_job_retrieve(self, upload_job_id, **kwargs): # noqa: E501 kwargs['_return_http_data_only'] = True if kwargs.get('asynchronous')
Get an upload job # noqa: E501 Get an upload job # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass asynchronous=True >>> thread = api.upload_job_retrieve(upload_job_id, asynchronous=True) >>> result = thread.get() :param asynchronous bool :param str upload_job_id: Upload job (required) :return: UploadJob If the method is called asynchronously, returns the request thread.
1.493677
1.986528
0.751903
return self.upload_job_update_with_http_info(upload_job_id, upload_job, **kwargs) # noqa: E501 else: (data) = self.upload_job_update_with_http_info(upload_job_id, upload_job, **kwargs) # noqa: E501 return data
def upload_job_update(self, upload_job_id, upload_job, **kwargs): # noqa: E501 kwargs['_return_http_data_only'] = True if kwargs.get('asynchronous')
Update an upload job # noqa: E501 Update an upload job # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass asynchronous=True >>> thread = api.upload_job_update(upload_job_id, upload_job, asynchronous=True) >>> result = thread.get() :param asynchronous bool :param str upload_job_id: Upload job id (required) :param UploadJob1 upload_job: Upload job (required) :return: UploadJob If the method is called asynchronously, returns the request thread.
1.442321
1.887536
0.764129
if api_key is None: raise ValueError("Invalid value for `api_key`, must not be `None`") if api_key is not None and len(api_key) > 1000: raise ValueError("Invalid value for `api_key`, length must be less than or equal to `1000`") self._api_key = api_key
def api_key(self, api_key)
Sets the api_key of this GlobalSignCredentials. Unique ID for API client (provided by GlobalSign). :param api_key: The api_key of this GlobalSignCredentials. :type: str
1.621433
1.620909
1.000323
if api_secret is None: raise ValueError("Invalid value for `api_secret`, must not be `None`") if api_secret is not None and len(api_secret) > 250: raise ValueError("Invalid value for `api_secret`, length must be less than or equal to `250`") self._api_secret = api_secret
def api_secret(self, api_secret)
Sets the api_secret of this GlobalSignCredentials. API Secret matching the API key (provided by GlobalSign). :param api_secret: The api_secret of this GlobalSignCredentials. :type: str
1.715026
1.651394
1.038532
if client_certificate is None: raise ValueError("Invalid value for `client_certificate`, must not be `None`") if client_certificate is not None and len(client_certificate) > 3000: raise ValueError("Invalid value for `client_certificate`, length must be less than or equal to `3000`") self._client_certificate = client_certificate
def client_certificate(self, client_certificate)
Sets the client_certificate of this GlobalSignCredentials. The client certificate provided by GlobalSign to allow HTTPS connection over TLS/SSL. The certificate wraps a public key that matches a private key provided by the customer. The certificate must be in PEM format. :param client_certificate: The client_certificate of this GlobalSignCredentials. :type: str
1.715946
1.675382
1.024212
if passphrase is not None and len(passphrase) > 1000: raise ValueError("Invalid value for `passphrase`, length must be less than or equal to `1000`") self._passphrase = passphrase
def passphrase(self, passphrase)
Sets the passphrase of this GlobalSignCredentials. The passphrase to decrypt the private key in case it is encrypted. Empty if the private key is not encrypted. :param passphrase: The passphrase of this GlobalSignCredentials. :type: str
2.504466
2.620633
0.955672
if private_key is None: raise ValueError("Invalid value for `private_key`, must not be `None`") if private_key is not None and len(private_key) > 3000: raise ValueError("Invalid value for `private_key`, length must be less than or equal to `3000`") self._private_key = private_key
def private_key(self, private_key)
Sets the private_key of this GlobalSignCredentials. The private key that matches the client certificate to allow HTTPS connection over TLS/SSL. The private key may be encrypted using a symmetric encryption key derived from a passphrase. The private key must be in PEM format. :param private_key: The private_key of this GlobalSignCredentials. :type: str
1.796835
1.766787
1.017007
path = url.split('github.com', 1)[1][1:].strip() new = 'https://{GITHUB_TOKEN}@github.com/%s' % path print('rewriting git url to: %s' % new) return new.format(GITHUB_TOKEN=os.getenv('GITHUB_TOKEN'))
def git_url_ssh_to_https(url)
Convert a git url url will look like https://github.com/ARMmbed/mbed-cloud-sdk-python.git or git@github.com:ARMmbed/mbed-cloud-sdk-python.git we want: https://${GITHUB_TOKEN}@github.com/ARMmbed/mbed-cloud-sdk-python-private.git
4.062733
3.872268
1.049187
# see: # https://packaging.python.org/tutorials/distributing-packages/#uploading-your-project-to-pypi version = subprocess.check_output(['python', 'setup.py', '--version']).decode().strip() twine_repo = os.getenv('TWINE_REPOSITORY_URL') or os.getenv('TWINE_REPOSITORY') print('tagging and releasing %r as a %s release to %s as Twine user %s' % ( version, mode.name, twine_repo, os.getenv('TWINE_USERNAME') )) print('python - preparing environment') subprocess.check_call(['apk', 'update']) subprocess.check_call(['apk', 'add', 'git']) # Pin twine to 1.11 due to https://github.com/pypa/twine/issues/404 subprocess.check_call(['pip', 'install', 'twine<1.12']) url = subprocess.check_output(['git', 'remote', 'get-url', 'origin']) new_url = git_url_ssh_to_https(url.decode()) subprocess.check_call(['git', 'remote', 'set-url', 'origin', new_url]) branch_spec = 'origin/%s' % os.getenv('CIRCLE_BRANCH') subprocess.check_call(['git', 'branch', '--set-upstream-to', branch_spec]) subprocess.check_call(['git', 'fetch', '--tags', '--force']) # tags subprocess.check_call(['git', 'tag', '-a', version, '-m', 'release %s' % version]) subprocess.check_call(['git', 'tag', '-f', 'latest']) if mode == release_target_map['prod']: print('git - pushing %s tags' % mode.name) subprocess.check_call(['git', 'push', '-f', 'origin', '--tags']) print('git - add changes') subprocess.check_call(['git', 'add', 'src/mbed_cloud/_version.py']) subprocess.check_call(['git', 'add', 'CHANGELOG.rst']) subprocess.check_call(['git', 'add', 'docs/news/*']) message = ':checkered_flag: :newspaper: releasing version %s\n[skip ci]' % version subprocess.check_call(['git', 'commit', '-m', message]) if mode == release_target_map['prod']: print('git - pushing %s changelog commit' % mode.name) subprocess.check_call(['git', 'push', 'origin']) print('pypi - uploading') subprocess.check_call(['python', '-m', 'twine', 'upload', mode.bundle]) print('pypi - uploading successful')
def main(mode)
Tags the current repository and commits changes to news files :param mode: ReleaseTarget mode (i.e. beta or prod) :type mode: ReleaseTarget
2.950752
2.871835
1.02748
zipper = zipfile.ZipFile(os.path.join(output, 'unicodedata', '%s.zip' % version), 'w', zipfile.ZIP_DEFLATED) target = os.path.join(output, 'unicodedata', version) print('Zipping %s.zip...' % version) for root, dirs, files in os.walk(target): for file in files: if file.endswith('.txt'): zipper.write(os.path.join(root, file), arcname=file)
def zip_unicode(output, version)
Zip the Unicode files.
2.211454
2.104726
1.050709
unzipper = zipfile.ZipFile(os.path.join(output, 'unicodedata', '%s.zip' % version)) target = os.path.join(output, 'unicodedata', version) print('Unzipping %s.zip...' % version) os.makedirs(target) for f in unzipper.namelist(): # Do I need backslash on windows? Or is it forward as well? unzipper.extract(f, target)
def unzip_unicode(output, version)
Unzip the Unicode files.
4.063655
3.950908
1.028537
files = [ 'UnicodeData.txt', 'Scripts.txt', 'Blocks.txt', 'PropList.txt', 'DerivedCoreProperties.txt', 'DerivedNormalizationProps.txt', 'CompositionExclusions.txt', 'PropertyValueAliases.txt', 'PropertyAliases.txt', 'EastAsianWidth.txt', 'LineBreak.txt', 'HangulSyllableType.txt', 'DerivedAge.txt', 'auxiliary/WordBreakProperty.txt', 'auxiliary/SentenceBreakProperty.txt', 'auxiliary/GraphemeBreakProperty.txt', 'extracted/DerivedDecompositionType.txt', 'extracted/DerivedNumericType.txt', 'extracted/DerivedNumericValues.txt', 'extracted/DerivedJoiningType.txt', 'extracted/DerivedJoiningGroup.txt', 'extracted/DerivedCombiningClass.txt' ] files.append('ScriptExtensions.txt') if PY35: files.append('IndicPositionalCategory.txt') else: files.append('IndicMatraCategory.txt') files.append('IndicSyllabicCategory.txt') if PY34: files.append('BidiBrackets.txt') if PY37: files.append('VerticalOrientation.txt') http_url = 'http://www.unicode.org/Public/%s/ucd/' % version ftp_url = 'ftp://ftp.unicode.org/Public/%s/ucd/' % version destination = os.path.join(output, 'unicodedata', version) if not os.path.exists(destination): os.makedirs(destination) zip_data = not no_zip for f in files: file_location = os.path.join(destination, os.path.basename(f)) retrieved = False if not os.path.exists(file_location): for url in (ftp_url, http_url): furl = url + f try: print('Downloading: %s --> %s' % (furl, file_location)) response = urlopen(furl, timeout=30) data = response.read() except Exception: print('Failed: %s' % url) continue with open(file_location, 'w') as uf: uf.write(data.decode('utf-8')) retrieved = True break if not retrieved: print('Failed to acquire all needed Unicode files!') break else: retrieved = True print('Skipping: found %s' % file_location) if not retrieved: zip_data = False break if zip_data and not os.path.exists(os.path.join(output, 'unicodedata', '%s.zip' % version)): zip_unicode(output, version)
def download_unicodedata(version, output=HOME, no_zip=False)
Download Unicode data scripts and blocks.
2.388354
2.366489
1.009239
target = os.path.join(output, 'unicodedata', version) zip_target = os.path.join(output, 'unicodedata', '%s.zip' % version) if not os.path.exists(target) and os.path.exists(zip_target): unzip_unicode(output, version) # Download missing files if any. Zip if required. download_unicodedata(version, output, no_zip)
def get_unicodedata(version, output=HOME, no_zip=False)
Ensure we have Unicode data to generate Unicode tables.
3.434312
3.498692
0.981599
return ReplaceTemplate, (r.groups, r.group_slots, r.literals, r.pattern_hash, r.use_format, r._bytes)
def _pickle(r)
Pickle.
31.913105
31.118004
1.025551
escaped = False in_quotes = False current = [] quoted = [] i = _util.StringIter(text) iter(i) for t in i: if not escaped and t == "\\": escaped = True elif escaped: escaped = False if t == "E": if in_quotes: current.append(_re.escape("".join(quoted))) quoted = [] in_quotes = False elif t == "Q" and not in_quotes: in_quotes = True elif in_quotes: quoted.extend(["\\", t]) else: current.extend(["\\", t]) elif in_quotes: quoted.extend(t) else: current.append(t) if in_quotes and escaped: quoted.append("\\") elif escaped: current.append("\\") if quoted: current.append(_re.escape("".join(quoted))) return "".join(current)
def process_quotes(self, text)
Process quotes.
2.68879
2.607244
1.031277
current = [] escaped = False try: while t != "\n": if not escaped and t == "\\": escaped = True current.append(t) elif escaped: escaped = False if t in self._new_refs: current.append("\\") current.append(t) else: current.append(t) t = next(i) except StopIteration: pass if t == "\n": current.append(t) return current
def verbose_comment(self, t, i)
Handle verbose comments.
3.25794
3.225028
1.010205
global_retry = False if ('a' in text or 'L' in text) and self.unicode: self.unicode = False if not _SCOPED_FLAG_SUPPORT or not scoped: self.temp_global_flag_swap["unicode"] = True global_retry = True elif 'u' in text and not self.unicode and not self.is_bytes: self.unicode = True if not _SCOPED_FLAG_SUPPORT or not scoped: self.temp_global_flag_swap["unicode"] = True global_retry = True if _SCOPED_FLAG_SUPPORT and '-x' in text and self.verbose: self.verbose = False elif 'x' in text and not self.verbose: self.verbose = True if not _SCOPED_FLAG_SUPPORT or not scoped: self.temp_global_flag_swap["verbose"] = True global_retry = True if global_retry: raise GlobalRetryException('Global Retry')
def flags(self, text, scoped=False)
Analyze flags.
3.357909
3.290577
1.020462
index = i.index prop = [] value = [] try: c = next(i) if c.upper() in _ASCII_LETTERS: prop.append(c) elif c != '{': raise SyntaxError("Unicode property missing '{' at %d!" % (i.index - 1)) else: c = next(i) if c == '^': prop.append(c) c = next(i) while c not in (':', '=', '}'): if c not in _PROPERTY: raise SyntaxError('Invalid Unicode property character at %d!' % (i.index - 1)) if c not in _PROPERTY_STRIP: prop.append(c) c = next(i) if c in (':', '='): c = next(i) while c != '}': if c not in _PROPERTY: raise SyntaxError('Invalid Unicode property character at %d!' % (i.index - 1)) if c not in _PROPERTY_STRIP: value.append(c) c = next(i) if not value: raise SyntaxError('Invalid Unicode property!') except StopIteration: raise SyntaxError("Missing or unmatched '{' at %d!" % index) return ''.join(prop).lower(), ''.join(value).lower()
def get_unicode_property(self, i)
Get Unicode property.
2.718476
2.686592
1.011868
index = i.index value = [] try: if next(i) != '{': raise ValueError("Named Unicode missing '{' %d!" % (i.index - 1)) c = next(i) while c != '}': value.append(c) c = next(i) except Exception: raise SyntaxError("Unmatched '{' at %d!" % index) return ''.join(value)
def get_named_unicode(self, i)
Get Unicode name.
4.353153
4.160049
1.046419
current = [] if not in_group and t == "m": current.append(self._re_start_wb) elif not in_group and t == "M": current.append(self._re_end_wb) elif not in_group and t == "R": current.append(self._re_line_break) elif not in_group and t == "X": no_mark = self.unicode_props("^m", None, in_group=False)[0] mark = self.unicode_props("m", None, in_group=False)[0] current.extend(self._grapheme_cluster % (no_mark, mark, mark)) elif t == "e": current.append(self._re_escape) elif t == "l": current.extend(self.letter_case_props(_LOWER, in_group)) self.found_property = True elif t == "L": current.extend(self.letter_case_props(_LOWER, in_group, negate=True)) self.found_property = True elif t == "c": current.extend(self.letter_case_props(_UPPER, in_group)) self.found_property = True elif t == "C": current.extend(self.letter_case_props(_UPPER, in_group, negate=True)) self.found_property = True elif t == 'p': prop = self.get_unicode_property(i) current.extend(self.unicode_props(prop[0], prop[1], in_group=in_group)) if in_group: self.found_property = True elif t == 'P': prop = self.get_unicode_property(i) current.extend(self.unicode_props(prop[0], prop[1], in_group=in_group, negate=True)) if in_group: self.found_property = True elif t == "N": text = self.get_named_unicode(i) current.extend(self.unicode_name(text, in_group)) if in_group: self.found_named_unicode = True else: current.extend(["\\", t]) return current
def reference(self, t, i, in_group=False)
Handle references.
2.378803
2.365128
1.005782
index = i.index value = ['('] escaped = False try: c = next(i) if c != '?': i.rewind(1) return None value.append(c) c = next(i) if c != '#': i.rewind(2) return None value.append(c) c = next(i) while c != ')' or escaped is True: if escaped: escaped = False elif c == '\\': escaped = True value.append(c) c = next(i) value.append(c) except StopIteration: raise SyntaxError("Unmatched '(' at %d!" % (index - 1)) return ''.join(value)
def get_comments(self, i)
Get comments.
3.089822
3.024983
1.021434
if scoped and not _SCOPED_FLAG_SUPPORT: return None index = i.index value = ['('] toggle = False end = ':' if scoped else ')' try: c = next(i) if c != '?': i.rewind(1) return None value.append(c) c = next(i) while c != end: if toggle: if c not in _SCOPED_FLAGS: raise ValueError('Bad scope') toggle = False elif scoped and c == '-': toggle = True elif not _util.PY37 and scoped and c in _GLOBAL_FLAGS: raise ValueError("Bad flag") elif c not in _GLOBAL_FLAGS and c not in _SCOPED_FLAGS: raise ValueError("Bad flag") value.append(c) c = next(i) value.append(c) except Exception: i.rewind(i.index - index) value = [] return ''.join(value) if value else None
def get_flags(self, i, scoped=False)
Get flags.
3.78824
3.734342
1.014433
current = [] # (?flags) flags = self.get_flags(i) if flags: self.flags(flags[2:-1]) return [flags] # (?#comment) comments = self.get_comments(i) if comments: return [comments] verbose = self.verbose unicode_flag = self.unicode # (?flags:pattern) flags = self.get_flags(i, True) if flags: # pragma: no cover t = flags self.flags(flags[2:-1], scoped=True) current = [] try: while t != ')': if not current: current.append(t) else: current.extend(self.normal(t, i)) t = next(i) except StopIteration: pass # Restore flags after group self.verbose = verbose self.unicode = unicode_flag if t == ")": current.append(t) return current
def subgroup(self, t, i)
Handle parenthesis.
4.810045
4.517
1.064876
current = [] pos = i.index - 1 found = False escaped = False first = None found_property = False self.found_property = False self.found_named_unicode = False try: while True: # Prevent POSIX/Unicode class from being part of a range. if self.found_property and t == '-': current.append(_re.escape(t)) pos += 1 t = next(i) self.found_property = False continue else: self.found_property = False if not escaped and t == "\\": escaped = True elif escaped: escaped = False idx = len(current) - 1 current.extend(self.reference(t, i, True)) if self.found_property: # Prevent Unicode class from being part of a range. if idx >= 0 and current[idx] == '-': current[idx] = _re.escape('-') found_property = True elif t == "[" and not found: found = True first = pos current.append(t) elif t == "[": posix = self.get_posix(i) if posix: # Prevent POSIX class from being part of a range. if current[-1] == '-': current[-1] = _re.escape('-') current.extend(self.posix_props(posix, in_group=True)) found_property = True pos = i.index - 2 else: current.append(t) elif t == "^" and found and (pos == first + 1): first = pos current.append(t) elif t == "]" and found and (pos != first + 1): found = False current.append(t) break else: current.append(t) pos += 1 t = next(i) except StopIteration: pass if escaped: current.append(t) # Handle properties that return an empty string. # This will occur when a property's values exceed # either the Unicode char limit on a narrow system, # or the ASCII limit in a byte string pattern. if found_property or self.found_named_unicode: value = "".join(current) if value == '[]': # We specified some properties, but they are all # out of reach. Therefore we can match nothing. current = ['[^%s]' % ('\x00-\xff' if self.is_bytes else _uniprops.UNICODE_RANGE)] elif value == '[^]': current = ['[%s]' % ('\x00-\xff' if self.is_bytes else _uniprops.UNICODE_RANGE)] else: current = [value] return current
def char_groups(self, t, i)
Handle character groups.
3.991078
3.962595
1.007188
current = [] if t == "\\": try: t = next(i) current.extend(self.reference(t, i)) except StopIteration: current.append(t) elif t == "(": current.extend(self.subgroup(t, i)) elif self.verbose and t == "#": current.extend(self.verbose_comment(t, i)) elif t == "[": current.extend(self.char_groups(t, i)) else: current.append(t) return current
def normal(self, t, i)
Handle normal chars.
3.63762
3.26307
1.114785
try: if self.is_bytes or not self.unicode: pattern = _uniprops.get_posix_property( prop, (_uniprops.POSIX_BYTES if self.is_bytes else _uniprops.POSIX) ) else: pattern = _uniprops.get_posix_property(prop, _uniprops.POSIX_UNICODE) except Exception: raise ValueError('Invalid POSIX property!') if not in_group and not pattern: # pragma: no cover pattern = '^%s' % ('\x00-\xff' if self.is_bytes else _uniprops.UNICODE_RANGE) return [pattern]
def posix_props(self, prop, in_group=False)
Insert POSIX properties. Posix style properties are not as forgiving as Unicode properties. Case does matter, and whitespace and '-' and '_' will not be tolerated.
4.81036
4.799873
1.002185
value = ord(_unicodedata.lookup(name)) if (self.is_bytes and value > 0xFF): value = "" if not in_group and value == "": return '[^%s]' % ('\x00-\xff' if self.is_bytes else _uniprops.UNICODE_RANGE) elif value == "": return value else: return ['\\%03o' % value if value <= 0xFF else chr(value)]
def unicode_name(self, name, in_group=False)
Insert Unicode value by its name.
6.302791
6.268315
1.0055
# `'GC = Some_Unpredictable-Category Name' -> 'gc=someunpredictablecategoryname'` category = None # `\p{^negated}` Strip off the caret after evaluation. if props.startswith("^"): negate = not negate if props.startswith("^"): props = props[1:] # Get the property and value. # If a property is present and not block, # we can assume `GC` as that is all we support. # If we are wrong it will fail. if value: if _uniprops.is_enum(props): category = props props = value elif value in ('y', 'yes', 't', 'true'): category = 'binary' elif value in ('n', 'no', 'f', 'false'): category = 'binary' negate = not negate else: raise ValueError('Invalid Unicode property!') v = _uniprops.get_unicode_property(("^" if negate else "") + props, category, self.is_bytes) if not in_group: if not v: v = '^%s' % ('\x00-\xff' if self.is_bytes else _uniprops.UNICODE_RANGE) v = "[%s]" % v properties = [v] return properties
def unicode_props(self, props, value, in_group=False, negate=False)
Insert Unicode properties. Unicode properties are very forgiving. Case doesn't matter and `[ -_]` will be stripped out.
7.683326
7.410632
1.036798
# Use traditional ASCII upper/lower case unless: # 1. The strings fed in are not bytes # 2. And the the Unicode flag was used if not in_group: v = self.posix_props(("^" if negate else "") + ("upper" if case == _UPPER else "lower"), in_group=in_group) v[0] = "[%s]" % v[0] else: v = self.posix_props(("^" if negate else "") + ("upper" if case == _UPPER else "lower"), in_group=in_group) return v
def letter_case_props(self, case, in_group, negate=False)
Insert letter (ASCII or Unicode) case properties.
5.322755
5.444177
0.977697
current = [] while True: try: t = next(i) current.extend(self.normal(t, i)) except StopIteration: break return current
def main_group(self, i)
The main group: group 0.
6.050602
5.82565
1.038614
self.verbose = bool(self.re_verbose) self.unicode = bool(self.re_unicode) self.global_flag_swap = { "unicode": ((self.re_unicode is not None) if not _util.PY37 else False), "verbose": False } self.temp_global_flag_swap = { "unicode": False, "verbose": False } self.ascii = self.re_unicode is not None and not self.re_unicode if not self.unicode and not self.ascii: self.unicode = True new_pattern = [] text = self.process_quotes(self.search.decode('latin-1') if self.is_bytes else self.search) i = _util.StringIter(text) iter(i) retry = True while retry: retry = False try: new_pattern = self.main_group(i) except GlobalRetryException: # Prevent a loop of retry over and over for a pattern like ((?u)(?a)) # or (?-x:(?x)) if self.temp_global_flag_swap['unicode']: if self.global_flag_swap['unicode']: raise LoopException('Global unicode flag recursion.') else: self.global_flag_swap["unicode"] = True if self.temp_global_flag_swap['verbose']: if self.global_flag_swap['verbose']: raise LoopException('Global verbose flag recursion.') else: self.global_flag_swap['verbose'] = True self.temp_global_flag_swap = { "unicode": False, "verbose": False } i.rewind(i.index) retry = True return "".join(new_pattern).encode('latin-1') if self.is_bytes else "".join(new_pattern)
def parse(self)
Apply search template.
4.115204
3.998789
1.029113
base = 10 prefix = text[1:3] if text[0] == "-" else text[:2] if prefix[0:1] == "0": char = prefix[-1] if char == "b": base = 2 elif char == "o": base = 8 elif char == "x": base = 16 try: text = int(text, base) except Exception: pass return text
def parse_format_index(self, text)
Parse format index.
2.898931
2.759773
1.050424
if t == '{': t = self.format_next(i) if t == '{': self.get_single_stack() self.result.append(t) else: field, text = self.get_format(t, i) self.handle_format_group(field, text) else: t = self.format_next(i) if t == '}': self.get_single_stack() self.result.append(t) else: raise SyntaxError("Unmatched '}' at %d!" % (i.index - 2))
def handle_format(self, t, i)
Handle format.
3.784128
3.740864
1.011565
index = i.index value = [] zero_count = 0 try: if c == '0': for x in range(3): if c != '0': break value.append(c) c = next(i) zero_count = len(value) if zero_count < 3: for x in range(3 - zero_count): if c not in _OCTAL: break value.append(c) c = next(i) i.rewind(1) except StopIteration: pass octal_count = len(value) if not (self.use_format and octal_count) and not (zero_count and octal_count < 3) and octal_count != 3: i.rewind(i.index - index) value = [] return ''.join(value) if value else None
def get_octal(self, c, i)
Get octal.
3.273135
3.176822
1.030317
value = int(text, 8) if value > 0xFF and self.is_bytes: # Re fails on octal greater than `0o377` or `0xFF` raise ValueError("octal escape value outside of range 0-0o377!") else: single = self.get_single_stack() if self.span_stack: text = self.convert_case(chr(value), self.span_stack[-1]) value = ord(self.convert_case(text, single)) if single is not None else ord(text) elif single: value = ord(self.convert_case(chr(value), single)) if self.use_format and value in _CURLY_BRACKETS_ORD: self.handle_format(chr(value), i) elif value <= 0xFF: self.result.append('\\%03o' % value) else: self.result.append(chr(value))
def parse_octal(self, text, i)
Parse octal value.
4.876151
4.709089
1.035476
index = i.index value = [] try: if next(i) != '{': raise SyntaxError("Named Unicode missing '{'' at %d!" % (i.index - 1)) c = next(i) while c != '}': value.append(c) c = next(i) except StopIteration: raise SyntaxError("Unmatched '}' at %d!" % index) return ''.join(value)
def get_named_unicode(self, i)
Get named Unicode.
4.055282
3.873873
1.046829
value = ord(_unicodedata.lookup(self.get_named_unicode(i))) single = self.get_single_stack() if self.span_stack: text = self.convert_case(chr(value), self.span_stack[-1]) value = ord(self.convert_case(text, single)) if single is not None else ord(text) elif single: value = ord(self.convert_case(chr(value), single)) if self.use_format and value in _CURLY_BRACKETS_ORD: self.handle_format(chr(value), i) elif value <= 0xFF: self.result.append('\\%03o' % value) else: self.result.append(chr(value))
def parse_named_unicode(self, i)
Parse named Unicode.
4.653554
4.605513
1.010431
value = [] for x in range(3): c = next(i) if c == '0': value.append(c) else: # pragma: no cover raise SyntaxError('Invalid wide Unicode character at %d!' % (i.index - 1)) c = next(i) if c in ('0', '1'): value.append(c) else: # pragma: no cover raise SyntaxError('Invalid wide Unicode character at %d!' % (i.index - 1)) for x in range(4): c = next(i) if c.lower() in _HEX: value.append(c) else: # pragma: no cover raise SyntaxError('Invalid wide Unicode character at %d!' % (i.index - 1)) return ''.join(value)
def get_wide_unicode(self, i)
Get narrow Unicode.
2.306549
2.270751
1.015765
text = self.get_wide_unicode(i) if wide else self.get_narrow_unicode(i) value = int(text, 16) single = self.get_single_stack() if self.span_stack: text = self.convert_case(chr(value), self.span_stack[-1]) value = ord(self.convert_case(text, single)) if single is not None else ord(text) elif single: value = ord(self.convert_case(chr(value), single)) if self.use_format and value in _CURLY_BRACKETS_ORD: self.handle_format(chr(value), i) elif value <= 0xFF: self.result.append('\\%03o' % value) else: self.result.append(chr(value))
def parse_unicode(self, i, wide=False)
Parse Unicode.
4.008412
3.935524
1.018521
value = [] for x in range(2): c = next(i) if c.lower() in _HEX: value.append(c) else: # pragma: no cover raise SyntaxError('Invalid byte character at %d!' % (i.index - 1)) return ''.join(value)
def get_byte(self, i)
Get byte.
5.78509
5.429483
1.065496
value = int(self.get_byte(i), 16) single = self.get_single_stack() if self.span_stack: text = self.convert_case(chr(value), self.span_stack[-1]) value = ord(self.convert_case(text, single)) if single is not None else ord(text) elif single: value = ord(self.convert_case(chr(value), single)) if self.use_format and value in _CURLY_BRACKETS_ORD: self.handle_format(chr(value), i) else: self.result.append('\\%03o' % value)
def parse_bytes(self, i)
Parse byte.
5.241982
5.063665
1.035215
index = i.index value = [t] try: c = next(i) if c != "<": raise SyntaxError("Group missing '<' at %d!" % (i.index - 1)) value.append(c) c = next(i) if c in _DIGIT: value.append(c) c = next(i) while c != '>': if c in _DIGIT: value.append(c) c = next(i) value.append(c) elif c in _LETTERS_UNDERSCORE: value.append(c) c = next(i) while c != '>': if c in _WORD: value.append(c) c = next(i) value.append(c) else: raise SyntaxError("Invalid group character at %d!" % (i.index - 1)) except StopIteration: raise SyntaxError("Unmatched '<' at %d!" % index) return ''.join(value)
def get_named_group(self, t, i)
Get group number.
2.446145
2.434569
1.004755
try: value = [] if t in _DIGIT and t != '0': value.append(t) t = next(i) if t in _DIGIT: value.append(t) else: i.rewind(1) except StopIteration: pass return ''.join(value) if value else None
def get_group(self, t, i)
Get group number.
4.372909
4.346566
1.006061
c = next(i) return self.format_references(next(i), i) if c == '\\' else c
def format_next(self, i)
Get next format char.
12.444519
9.908557
1.255937
octal = self.get_octal(t, i) if octal: value = int(octal, 8) if value > 0xFF and self.is_bytes: # Re fails on octal greater than `0o377` or `0xFF` raise ValueError("octal escape value outside of range 0-0o377!") value = chr(value) elif t in _STANDARD_ESCAPES or t == '\\': value = _BACK_SLASH_TRANSLATION['\\' + t] elif not self.is_bytes and t == "U": value = chr(int(self.get_wide_unicode(i), 16)) elif not self.is_bytes and t == "u": value = chr(int(self.get_narrow_unicode(i), 16)) elif not self.is_bytes and t == "N": value = _unicodedata.lookup(self.get_named_unicode(i)) elif t == "x": value = chr(int(self.get_byte(i), 16)) else: i.rewind(1) value = '\\' return value
def format_references(self, t, i)
Handle format references.
3.596174
3.601885
0.998414
octal = self.get_octal(t, i) if t in _OCTAL and octal: self.parse_octal(octal, i) elif (t in _DIGIT or t == 'g') and not self.use_format: group = self.get_group(t, i) if not group: group = self.get_named_group(t, i) self.handle_group('\\' + group) elif t in _STANDARD_ESCAPES: self.get_single_stack() self.result.append('\\' + t) elif t == "l": self.single_case(i, _LOWER) elif t == "L": self.span_case(i, _LOWER) elif t == "c": self.single_case(i, _UPPER) elif t == "C": self.span_case(i, _UPPER) elif t == "E": self.end_found = True elif not self.is_bytes and t == "U": self.parse_unicode(i, True) elif not self.is_bytes and t == "u": self.parse_unicode(i) elif not self.is_bytes and t == "N": self.parse_named_unicode(i) elif t == "x": self.parse_bytes(i) elif self.use_format and t in _CURLY_BRACKETS: self.result.append('\\\\') self.handle_format(t, i) elif self.use_format and t == 'g': self.result.append('\\\\') self.result.append(t) else: value = '\\' + t self.get_single_stack() if self.span_stack: value = self.convert_case(value, self.span_stack[-1]) self.result.append(value)
def reference(self, t, i)
Handle references.
2.932286
2.950974
0.993667
# A new \L, \C or \E should pop the last in the stack. if self.span_stack: self.span_stack.pop() if self.single_stack: self.single_stack.pop() self.span_stack.append(case) count = len(self.span_stack) self.end_found = False try: while not self.end_found: t = next(i) if self.use_format and t in _CURLY_BRACKETS: self.handle_format(t, i) elif t == '\\': try: t = next(i) self.reference(t, i) except StopIteration: self.result.append(t) raise else: self.result.append(self.convert_case(t, case)) if self.end_found or count > len(self.span_stack): self.end_found = False break except StopIteration: pass if count == len(self.span_stack): self.span_stack.pop()
def span_case(self, i, case)
Uppercase or lowercase the next range of characters until end marker is found.
3.602557
3.549196
1.015035
if self.is_bytes: cased = [] for c in value: if c in _ASCII_LETTERS: cased.append(c.lower() if case == _LOWER else c.upper()) else: cased.append(c) return "".join(cased) else: return value.lower() if case == _LOWER else value.upper()
def convert_case(self, value, case)
Convert case.
2.569551
2.424265
1.05993
# Pop a previous case if we have consecutive ones. if self.single_stack: self.single_stack.pop() self.single_stack.append(case) try: t = next(i) if self.use_format and t in _CURLY_BRACKETS: self.handle_format(t, i) elif t == '\\': try: t = next(i) self.reference(t, i) except StopIteration: self.result.append(t) raise elif self.single_stack: self.result.append(self.convert_case(t, self.get_single_stack())) except StopIteration: pass
def single_case(self, i, case)
Uppercase or lowercase the next character.
4.769378
4.396649
1.084776
single = None while self.single_stack: single = self.single_stack.pop() return single
def get_single_stack(self)
Get the correct single stack item to use.
6.461015
4.417577
1.46257
# Handle auto incrementing group indexes if field == '': if self.auto: field = str(self.auto_index) text[0] = (_util.FMT_FIELD, field) self.auto_index += 1 elif not self.manual and not self.auto: self.auto = True field = str(self.auto_index) text[0] = (_util.FMT_FIELD, field) self.auto_index += 1 else: raise ValueError("Cannot switch to auto format during manual format!") elif not self.manual and not self.auto: self.manual = True elif not self.manual: raise ValueError("Cannot switch to manual format during auto format!") self.handle_group(field, tuple(text), True)
def handle_format_group(self, field, text)
Handle format group.
3.67577
3.618783
1.015748
if capture is None: capture = tuple() if self.is_bytes else '' if len(self.result) > 1: self.literal_slots.append("".join(self.result)) if is_format: self.literal_slots.extend(["\\g<", text, ">"]) else: self.literal_slots.append(text) del self.result[:] self.result.append("") self.slot += 1 elif is_format: self.literal_slots.extend(["\\g<", text, ">"]) else: self.literal_slots.append(text) self.group_slots.append( ( self.slot, ( (self.span_stack[-1] if self.span_stack else None), self.get_single_stack(), capture ) ) ) self.slot += 1
def handle_group(self, text, capture=None, is_format=False)
Handle groups.
3.349698
3.279486
1.021409
if isinstance(template, bytes): self.is_bytes = True else: self.is_bytes = False if isinstance(pattern.pattern, bytes) != self.is_bytes: raise TypeError('Pattern string type must match replace template string type!') self._original = template self.use_format = use_format self.parse_template(pattern) return ReplaceTemplate( tuple(self.groups), tuple(self.group_slots), tuple(self.literals), hash(pattern), self.use_format, self.is_bytes )
def parse(self, pattern, template, use_format=False)
Parse template.
4.842837
4.717953
1.02647
g_index = None for group in self.groups: if group[0] == index: g_index = group[1] break return g_index
def _get_group_index(self, index)
Find and return the appropriate group index.
3.120146
2.63409
1.184525
g_case = (None, None, -1) for group in self.group_slots: if group[0] == index: g_case = group[1] break return g_case
def _get_group_attributes(self, index)
Find and return the appropriate group case.
5.378843
4.104316
1.310533
if isinstance(l, str): l = l.encode('ascii', 'backslashreplace') elif not isinstance(l, bytes): l = str(l).encode('ascii', 'backslashreplace') return l
def _to_bstr(l)
Convert to byte string.
2.555546
2.173702
1.175665
for fmt_type, value in capture[1:]: if fmt_type == FMT_ATTR: # Attribute l = getattr(l, value) elif fmt_type == FMT_INDEX: # Index l = l[value] elif fmt_type == FMT_CONV: if is_bytes: # Conversion if value in ('r', 'a'): l = repr(l).encode('ascii', 'backslashreplace') elif value == 's': # If the object is not string or byte string already l = _to_bstr(l) else: # Conversion if value == 'a': l = ascii(l) elif value == 'r': l = repr(l) elif value == 's': # If the object is not string or byte string already l = str(l) elif fmt_type == FMT_SPEC: # Integers and floats don't have an explicit 's' format type. if value[3] and value[3] == 's': if isinstance(l, int): # pragma: no cover raise ValueError("Unknown format code 's' for object of type 'int'") if isinstance(l, float): # pragma: no cover raise ValueError("Unknown format code 's' for object of type 'float'") # Ensure object is a byte string l = _to_bstr(l) if is_bytes else str(l) spec_type = value[1] if spec_type == '^': l = l.center(value[2], value[0]) elif spec_type == ">": l = l.rjust(value[2], value[0]) else: l = l.ljust(value[2], value[0]) # Make sure the final object is a byte string return _to_bstr(l) if is_bytes else str(l)
def format_string(m, l, capture, is_bytes)
Perform a string format.
2.843392
2.800518
1.015309
if count > self._index: # pragma: no cover raise ValueError("Can't rewind past beginning!") self._index -= count
def rewind(self, count)
Rewind index.
7.997561
5.590652
1.430524
path = os.path.join(os.path.dirname(__file__), 'backrefs') fp, pathname, desc = imp.find_module('__meta__', [path]) try: vi = imp.load_module('__meta__', fp, pathname, desc).__version_info__ return vi._get_canonical(), vi._get_dev_status() except Exception: print(traceback.format_exc()) finally: fp.close()
def get_version()
Get version and version_info without importing the entire module.
4.658862
4.052288
1.149687
install_requires = [] with open("requirements/project.txt") as f: for line in f: if not line.startswith("#"): install_requires.append(line.strip()) return install_requires
def get_requirements()
Load list of dependencies.
2.796859
2.586444
1.081353
import unicodedata fail = False uver = unicodedata.unidata_version path = os.path.join(os.path.dirname(__file__), 'tools') fp, pathname, desc = imp.find_module('unidatadownload', [path]) try: unidatadownload = imp.load_module('unidatadownload', fp, pathname, desc) unidatadownload.get_unicodedata(uver, no_zip=True) except Exception: print(traceback.format_exc()) fail = True finally: fp.close() assert not fail, "Failed to obtain unicodedata!" return uver
def get_unicodedata()
Download the `unicodedata` version for the given Python version.
3.60362
3.382569
1.06535
uver = get_unicodedata() fail = False path = os.path.join(os.path.dirname(__file__), 'tools') fp, pathname, desc = imp.find_module('unipropgen', [path]) try: unipropgen = imp.load_module('unipropgen', fp, pathname, desc) unipropgen.build_tables( os.path.join( os.path.dirname(__file__), 'backrefs', 'uniprops', 'unidata' ), uver ) except Exception: print(traceback.format_exc()) fail = True finally: fp.close() assert not fail, "Failed uniprops.py generation!"
def generate_unicode_table()
Generate the Unicode table for the given Python version.
3.941365
3.844202
1.025275
return _bregex_parse._SearchParser(pattern, re_verbose, re_version).parse()
def _cached_search_compile(pattern, re_verbose, re_version, pattern_type)
Cached search compile.
17.546337
16.126751
1.088027
return _bregex_parse._ReplaceParser().parse(pattern, repl, bool(flags & FORMAT))
def _cached_replace_compile(pattern, repl, flags, pattern_type)
Cached replace compile.
45.102783
42.629532
1.058017
if not replace: size = _cached_search_compile.cache_info().currsize else: size = _cached_replace_compile.cache_info().currsize return size
def _get_cache_size(replace=False)
Get size of cache.
4.240882
4.002014
1.059687
if m is None: raise ValueError("Match is None!") else: if isinstance(repl, ReplaceTemplate): return repl.expand(m) elif isinstance(repl, (str, bytes)): return _bregex_parse._ReplaceParser().parse(m.re, repl, bool(flags & FORMAT)).expand(m)
def _apply_replace_backrefs(m, repl=None, flags=0)
Expand with either the `ReplaceTemplate` or compile on the fly, or return None.
8.507954
6.357653
1.338222
if isinstance(pattern, (str, bytes)): re_verbose = VERBOSE & flags if flags & V0: re_version = V0 elif flags & V1: re_version = V1 else: re_version = 0 if not (flags & DEBUG): pattern = _cached_search_compile(pattern, re_verbose, re_version, type(pattern)) else: # pragma: no cover pattern = _bregex_parse._SearchParser(pattern, re_verbose, re_version).parse() elif isinstance(pattern, Bregex): if flags: raise ValueError("Cannot process flags argument with a compiled pattern") pattern = pattern._pattern elif isinstance(pattern, _REGEX_TYPE): if flags: raise ValueError("Cannot process flags argument with a compiled pattern!") else: raise TypeError("Not a string or compiled pattern!") return pattern
def _apply_search_backrefs(pattern, flags=0)
Apply the search backrefs to the search pattern.
4.190724
4.064363
1.03109
if isinstance(repl, ReplaceTemplate): if repl.use_format != use_format: if use_format: raise ValueError("Replace not compiled as a format replace") else: raise ValueError("Replace should not be compiled as a format replace!") elif not isinstance(repl, (str, bytes)): raise TypeError("Expected string, buffer, or compiled replace!")
def _assert_expandable(repl, use_format=False)
Check if replace template is expandable.
4.803454
4.14516
1.15881
if auto_compile is not None: raise ValueError("Cannot compile Bregex with a different auto_compile!") elif flags != 0: raise ValueError("Cannot process flags argument with a compiled pattern") return pattern else: if auto_compile is None: auto_compile = True return Bregex(compile_search(pattern, flags, **kwargs), auto_compile)
def compile(pattern, flags=0, auto_compile=None, **kwargs): # noqa A001 if isinstance(pattern, Bregex)
Compile both the search or search and replace into one object.
4.518711
4.144927
1.090179