code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
app = AndroidApplication.instance() permission = (cls.ACCESS_FINE_PERMISSION if fine else cls.ACCESS_COARSE_PERMISSION) f = app.create_future() def on_result(perms): app.set_future_result(f, perms[permission]) app.request_permissions([permission]).then(on_result) return f
def request_permission(cls, fine=True)
Requests permission and returns an async result that returns a boolean indicating if the permission was granted or denied.
4.996559
4.62458
1.080435
d = self.declaration self.widget = WebView(self.get_context(), None, d.style)
def create_widget(self)
Create the underlying widget.
13.319452
9.698028
1.373419
# Create and init the client c = self.client = BridgedWebViewClient() c.setWebView(self.widget, c.getId()) c.onLoadResource.connect(self.on_load_resource) c.onPageFinished.connect(self.on_page_finished) c.onPageStarted.connect(self.on_page_started) c.onReceivedError.connect(self.on_received_error) c.onScaleChanged.connect(self.on_scale_changed) c.onProgressChanged.connect(self.on_progress_changed) c.onReceivedTitle.connect(self.on_page_title_changed) super(AndroidWebView, self).init_widget()
def init_widget(self)
Initialize the underlying widget.
2.982815
2.903487
1.027322
if self.client: #: Stop listening self.client.setWebView(self.widget, None) del self.client super(AndroidWebView, self).destroy()
def destroy(self)
Destroy the client
11.624346
11.04915
1.052058
super(UiKitSlider, self).init_widget() d = self.declaration if d.min: self.set_min(d.min) if d.max: self.set_max(d.max) if d.progress: self.set_progress(d.progress) #: A really ugly way to add the target #: would be nice if we could just pass the block pointer here :) self.get_app().bridge.addTarget( self.widget, forControlEvents=UISlider.UIControlEventValueChanged, andCallback=self.widget.getId(), usingMethod="onValueChanged", withValues=["value"]#,"selected"] ) self.widget.onValueChanged.connect(self.on_checked_changed)
def init_widget(self)
Bind the on property to the checked state
8.443063
8.003707
1.054894
#: Since iOS decides to call this like 100 times for each defer it d = self.declaration with self.widget.setValue.suppressed(): d.progress = int(value)
def on_checked_changed(self, value)
See https://stackoverflow.com/questions/19628310/
36.925159
38.675823
0.954735
super(AndroidRadioGroup, self).init_layout() d = self.declaration w = self.widget if d.checked: self.set_checked(d.checked) else: #: Check if any of the children have "checked = True" for c in d.children: if c.checked: d.checked = c w.setOnCheckedChangeListener(w.getId()) w.onCheckedChanged.connect(self.on_checked_changed)
def init_layout(self)
Set the checked state after all children have been populated.
5.083221
4.427757
1.148035
d = self.declaration if checked_id < 0: with self.widget.clearCheck.suppressed(): d.checked = None return else: for c in self.children(): if c.widget.getId() == checked_id: with self.widget.check.suppressed(): d.checked = c.declaration return
def on_checked_changed(self, group, checked_id)
Set the checked property based on the checked state of all the children
5.343835
4.985545
1.071866
if not checked: self.widget.clearCheck() else: #: Checked is a reference to the radio declaration #: so we need to get the ID of it rb = checked.proxy.widget if not rb: return self.widget.check(rb.getId())
def set_checked(self, checked)
Properly check the correct radio button.
9.698855
9.877234
0.98194
#: Add a ActivityLifecycleListener to update the application state activity = self.widget activity.addActivityLifecycleListener(activity.getId()) activity.onActivityLifecycleChanged.connect( self.on_activity_lifecycle_changed) #: Add BackPressedListener to trigger the event activity.addBackPressedListener(activity.getId()) activity.onBackPressed.connect(self.on_back_pressed) #: Add ConfigurationChangedListener to trigger the event activity.addConfigurationChangedListener(activity.getId()) activity.onConfigurationChanged.connect(self.on_configuration_changed)
def init_widget(self)
Initialize on the first call
3.744883
3.575608
1.047342
f = self.create_future() #: Old versions of android did permissions at install time if self.api_level < 23: f.set_result(True) return f def on_result(allowed): result = allowed == Activity.PERMISSION_GRANTED self.set_future_result(f, result) self.widget.checkSelfPermission(permission).then(on_result) return f
def has_permission(self, permission)
Return a future that resolves with the result of the permission
7.254249
6.057
1.197664
f = self.create_future() #: Old versions of android did permissions at install time if self.api_level < 23: f.set_result({p: True for p in permissions}) return f w = self.widget request_code = self._permission_code self._permission_code += 1 #: So next call has a unique code #: On first request, setup our listener, and request the permission if request_code == 0: w.setPermissionResultListener(w.getId()) w.onRequestPermissionsResult.connect(self._on_permission_result) def on_results(code, perms, results): #: Check permissions f.set_result({p: r == Activity.PERMISSION_GRANTED for (p, r) in zip(perms, results)}) #: Save a reference self._permission_requests[request_code] = on_results #: Send out the request self.widget.requestPermissions(permissions, request_code) return f
def request_permissions(self, permissions)
Return a future that resolves with the results of the permission requests
5.215843
4.948815
1.053958
from .android_toast import Toast def on_toast(ref): t = Toast(__id__=ref) t.show() Toast.makeText(self, msg, 1 if long else 0).then(on_toast)
def show_toast(self, msg, long=True)
Show a toast message for the given duration. This is an android specific api. Parameters ----------- msg: str Text to display in the toast message long: bool Display for a long or short (system defined) duration
7.165119
9.720443
0.737119
try: event = {'handled': False} self.back_pressed(event) return bool(event.get('handled', False)) except Exception as e: #: Must return a boolean or we will cause android to abort return False
def on_back_pressed(self)
Fire the `back_pressed` event with a dictionary with a 'handled' key when the back hardware button is pressed If 'handled' is set to any value that evaluates to True the default event implementation will be ignored.
10.205705
7.091384
1.43917
self.width = config['width'] self.height = config['height'] self.orientation = ('square', 'portrait', 'landscape')[ config['orientation']]
def on_configuration_changed(self, config)
Handles a screen configuration change.
5.278349
4.636492
1.138436
if not self.build_info: def on_build_info(info): self.dp = info['DISPLAY_DENSITY'] self.width = info['DISPLAY_WIDTH'] self.height = info['DISPLAY_HEIGHT'] self.orientation = ('square', 'portrait', 'landscape')[ info['DISPLAY_ORIENTATION']] self.api_level = info['SDK_INT'] self.build_info = info self._show_view() self.init_widget() self.widget.getBuildInfo().then(on_build_info) else: self._show_view()
def show_view(self)
Show the current `app.view`. This will fade out the previous with the new view.
4.38622
4.402029
0.996409
#: Get the handler for this request handler = self._permission_requests.get(code, None) if handler is not None: del self._permission_requests[code] #: Invoke that handler with the permission request response handler(code, perms, results)
def _on_permission_result(self, code, perms, results)
Handles a permission request result by passing it to the handler with the given code.
4.732623
4.239508
1.116314
def set_screen_on(window): from .android_window import Window window = Window(__id__=window) if self.keep_screen_on: window.addFlags(Window.FLAG_KEEP_SCREEN_ON) else: window.clearFlags(Window.FLAG_KEEP_SCREEN_ON) self.widget.getWindow().then(set_screen_on)
def _observe_keep_screen_on(self, change)
Sets or clears the flag to keep the screen on.
4.079452
4.072172
1.001788
for plugin in self.get_plugins(group='enaml_native_android_factories'): get_factories = plugin.load() PLUGIN_FACTORIES = get_factories() factories.ANDROID_FACTORIES.update(PLUGIN_FACTORIES)
def load_plugin_factories(self)
Add any plugin toolkit widgets to the ANDROID_FACTORIES
9.807252
8.020531
1.222768
super(UiKitEditText, self).init_widget() #: Init font properties etc... self.init_text() d = self.declaration if d.placeholder: self.set_placeholder(d.placeholder) if d.input_type != 'text': self.set_input_type(d.input_type) if d.style: self.set_style(d.style) #: A really ugly way to add the target #: would be nice if we could just pass the block pointer here :) self.get_app().bridge.addTarget( self.widget, forControlEvents=UITextField.UIControlEventEditingChanged, andCallback=self.widget.getId(), usingMethod="onValueChanged", withValues=["text"]#,"selected"] ) self.widget.onValueChanged.connect(self.on_value_changed)
def init_widget(self)
Bind the on property to the checked state
8.314068
8.113297
1.024746
d = self.declaration with self.widget.get_member('text').suppressed(self.widget): d.text = text
def on_value_changed(self, text)
Update text field
14.464787
14.353316
1.007766
d = self.declaration self.widget = DatePicker(self.get_context(), None, d.style or "@attr/datePickerStyle")
def create_widget(self)
Create the underlying widget.
14.330891
10.877582
1.31747
d = self.declaration w = self.widget date = d.date w.init(date.year, date.month-1, date.day, w.getId()) super(AndroidDatePicker, self).init_widget() w.onDateChanged.connect(self.on_date_changed)
def init_widget(self)
Initialize the underlying widget.
5.691607
5.117856
1.112108
from .core.import_hooks import ExtensionImporter importer = ExtensionImporter() sys.meta_path.append(importer) yield sys.meta_path.remove(importer)
def imports()
Install the import hook to load python extensions from app's lib folder during the context of this block. This method is preferred as it's faster than using install.
4.481592
4.556843
0.983486
from .core.import_hooks import ExtensionImporter importer = ExtensionImporter() sys.meta_path.append(importer)
def install()
Install the import hook to load extensions from the app Lib folder. Like imports but leaves it in the meta_path, thus it is slower.
7.155834
5.211618
1.373054
#if sys.implementation.cache_tag is None: # raise NotImplementedError('sys.implementation.cache_tag is None') #path = os.fspath(path) head, pycache_filename = os.path.split(path) head, pycache = os.path.split(head) if pycache != _PYCACHE: raise ValueError('{} not bottom-level directory in ' '{!r}'.format(_PYCACHE, path)) dot_count = pycache_filename.count('.') if dot_count not in {2, 3}: raise ValueError('expected only 2 or 3 dots in ' '{!r}'.format(pycache_filename)) elif dot_count == 3: optimization = pycache_filename.rsplit('.', 2)[-2] if not optimization.startswith(_OPT): raise ValueError("optimization portion of filename does not start " "with {!r}".format(_OPT)) opt_level = optimization[len(_OPT):] if not opt_level.isalnum(): raise ValueError("optimization level {!r} is not an alphanumeric " "value".format(optimization)) base_filename = pycache_filename.partition('.')[0] return os.path.join(head, base_filename + SOURCE_SUFFIXES[0])
def source_from_cache(path)
Given the path to a .pyc. file, return the path to its .py file. The .pyc file does not need to exist; this simply returns the path to the .py file calculated to correspond to the .pyc file. If path does not conform to PEP 3147/488 format, ValueError will be raised. If sys.implementation.cache_tag is None then NotImplementedError is raised.
3.077511
2.792762
1.10196
#: Build the request builder = Request.Builder() builder.url(self.url) #: Set any headers for k, v in self.headers.items(): builder.addHeader(k, v) #: Get the body or generate from the data given body = self.body if body: #: Create the request body media_type = MediaType( __id__=MediaType.parse(self.content_type)) request_body = RequestBody( __id__=RequestBody.create(media_type, body)) #: Set the request method builder.method(self.method, request_body) elif self.method in ['get', 'delete', 'head']: #: Set the method getattr(builder, self.method)() else: raise ValueError("Cannot do a '{}' request " "without a body".format(self.method)) #: Save the okhttp request self.request = Request(__id__=builder.build())
def init_request(self)
Init the native request using the okhttp3.Request.Builder
4.045003
3.680305
1.099094
if not self.data: return "" if self.content_type == 'application/json': import json return json.dumps(self.data) elif self.content_type == 'application/x-www-form-urlencoded': import urllib return urllib.urlencode(self.data) else: raise NotImplementedError( "You must manually encode the request " "body for '{}'".format(self.content_type) )
def _default_body(self)
If the body is not passed in by the user try to create one using the given data parameters.
2.757544
2.676789
1.030169
r = self.response r.request_time = time.time() - self.start_time if self.callback: self.callback(r)
def on_finish(self)
Called regardless of success or failure
4.594748
4.067291
1.129683
client = self.client #: Dispatch the async call call = Call(__id__=client.newCall(request.request)) call.enqueue(request.handler) #: Save the call reference request.call = call
def _fetch(self, request)
Fetch using the OkHttpClient
16.167059
12.799668
1.263084
d = self.declaration self.widget = CheckBox(self.get_context(), None, d.style or "@attr/checkboxStyle")
def create_widget(self)
Create the underlying widget.
16.904659
12.526323
1.349531
app = BridgedApplication.instance() f = app.create_future() #: Set callback for when response is in if callback is not None: f.then(callback) def handle_response(response): self.requests.remove(response.request) f.set_result(response) #: Create and dispatch the request object request = self.request_factory(url=url, callback=handle_response, **kwargs) #: Save a reference #: This gets removed in the handle response self.requests.append(request) #: Run the request self._fetch(request) #: Save it on the future so it can be accessed and observed #: from a view if needed f.request = request return f
def fetch(self, url, callback=None, raise_error=True, **kwargs)
Fetch the given url and fire the callback when ready. Optionally pass a `streaming_callback` to handle data from large requests. Parameters ---------- url: string The url to access. callback: callable The callback to invoke when the request completes. You can also use the return value. kwargs: The arguments to pass to the `HttpRequest` object. See it for which values are valid. Returns -------- result: Future A future that resolves with the `HttpResponse` object when the request is complete.
6.257145
7.022727
0.890985
d = self.declaration self.widget = GridLayout(self.get_context(), None, d.style)
def create_widget(self)
Create the underlying widget.
12.05177
9.332662
1.291354
d = self.declaration self.widget = Switch(self.get_context(), None, d.style or '@attr/switchStyle')
def create_widget(self)
Create the underlying widget.
19.190245
14.112432
1.359811
super(AndroidSwitch, self).init_widget() d = self.declaration self.set_show_text(d.show_text) if d.split_track: self.set_split_track(d.split_track) if d.text_off: self.set_text_off(d.text_off) if d.text_on: self.set_text_on(d.text_on)
def init_widget(self)
Initialize the underlying widget.
2.514648
2.212367
1.136633
print("[DEBUG] Loading plugin {} from {}".format(self.name, self.source)) import pydoc path, attr = self.source.split(":") module = pydoc.locate(path) return getattr(module, attr)
def load(self)
Load the object defined by the plugin entry point
5.461482
4.748501
1.150149
plugins = {} try: with open('entry_points.json') as f: entry_points = json.load(f) for ep, obj in entry_points.items(): plugins[ep] = [] for name, src in obj.items(): plugins[ep].append(Plugin(name=name, source=src)) except Exception as e: print("Failed to load entry points {}".format(e)) return plugins
def _default_plugins(self)
Get entry points to load any plugins installed. The build process should create an "entry_points.json" file with all of the data from the installed entry points.
3.217378
2.526949
1.273226
#: Schedule a load view if given and remote debugging is not active #: the remote debugging init call this after dev connection is ready if self.load_view and self.dev != "remote": self.deferred_call(self.load_view, self) self.loop.start()
def start(self)
Start the application's main event loop using either twisted or tornado.
25.812805
22.695158
1.137371
return self.loop.timed_call(ms, callback, *args, **kwargs)
def timed_call(self, ms, callback, *args, **kwargs)
Invoke a callable on the main event loop thread at a specified time in the future. Parameters ---------- ms : int The time to delay, in milliseconds, before executing the callable. callback : callable The callable object to execute at some point in the future. *args, **kwargs Any additional positional and keyword arguments to pass to the callback.
5.239682
9.200312
0.569511
if future is None: raise bridge.BridgeReferenceError( "Tried to add a callback to a nonexistent Future. " "Make sure you pass the `returns` argument to your JavaMethod") return self.loop.add_done_callback(future, callback)
def add_done_callback(self, future, callback)
Add a callback on a future object put here so it can be implemented with different event loops. Parameters ----------- future: Future or Deferred Future implementation for the current EventLoop callback: callable Callback to invoke when the future is done
11.0748
12.008183
0.922271
view = self.view if not view.is_initialized: view.initialize() if not view.proxy_is_active: view.activate_proxy() return view.proxy.widget
def get_view(self)
Get the root view to display. Make sure it is properly initialized.
4.933915
4.2504
1.160812
n = len(self._bridge_queue) # Add to queue self._bridge_queue.append((name, args)) if n == 0: # First event, send at next available time self._bridge_last_scheduled = time() self.deferred_call(self._bridge_send) return elif kwargs.get('now'): self._bridge_send(now=True) return # If it's been over 5 ms since we last scheduled, run now dt = time() - self._bridge_last_scheduled if dt > self._bridge_max_delay: self._bridge_send(now=True)
def send_event(self, name, *args, **kwargs)
Send an event to the native handler. This call is queued and batched. Parameters ---------- name : str The event name to be processed by MainActivity.processMessages. *args: args The arguments required by the event. **kwargs: kwargs Options for sending. These are: now: boolean Send the event now
4.287355
4.701306
0.91195
if len(self._bridge_queue): if self.debug: print("======== Py --> Native ======") for event in self._bridge_queue: print(event) print("===========================") self.dispatch_events(bridge.dumps(self._bridge_queue)) self._bridge_queue = []
def _bridge_send(self, now=False)
Send the events over the bridge to be processed by the native handler. Parameters ---------- now: boolean Send all pending events now instead of waiting for deferred calls to finish. Use this when you want to update the screen
5.829692
5.933049
0.98258
events = bridge.loads(data) if self.debug: print("======== Py <-- Native ======") for event in events: print(event) print("===========================") for event in events: if event[0] == 'event': self.handle_event(event)
def process_events(self, data)
The native implementation must use this call to
5.915507
5.970637
0.990766
result_id, ptr, method, args = event[1] obj = None result = None try: obj, handler = bridge.get_handler(ptr, method) result = handler(*[v for t, v in args]) except bridge.BridgeReferenceError as e: #: Log the event, don't blow up here msg = "Error processing event: {} - {}".format( event, e).encode("utf-8") print(msg) self.show_error(msg) except: #: Log the event, blow up in user's face msg = "Error processing event: {} - {}".format( event, traceback.format_exc()).encode("utf-8") print(msg) self.show_error(msg) raise finally: if result_id: if hasattr(obj, '__nativeclass__'): sig = getattr(type(obj), method).__returns__ else: sig = type(result).__name__ self.send_event( bridge.Command.RESULT, #: method result_id, bridge.msgpack_encoder(sig, result) #: args )
def handle_event(self, event)
When we get an 'event' type from the bridge handle it by invoking the handler and if needed sending back the result.
5.56903
5.345619
1.041793
self.loop.log_error(callback) msg = "\n".join([ "Exception in callback %r"%callback, traceback.format_exc() ]) self.show_error(msg.encode('utf-8'))
def handle_error(self, callback)
Called when an error occurs in an event loop callback. By default, sets the error view.
5.809328
4.909159
1.183365
try: from .dev import DevServerSession session = DevServerSession.initialize(host=self.dev) session.start() #: Save a reference self._dev_session = session except: self.show_error(traceback.format_exc())
def start_dev_session(self)
Start a client that attempts to connect to the dev server running on the host `app.dev`
7.1263
6.048865
1.178122
from enamlnative.widgets import api for plugin in self.get_plugins(group='enaml_native_widgets'): get_widgets = plugin.load() for name, widget in iter(get_widgets()): #: Update the core api with these widgets setattr(api, name, widget)
def load_plugin_widgets(self)
Pull widgets added via plugins using the `enaml_native_widgets` entry point. The entry point function must return a dictionary of Widget declarations to add to the core api. def install(): from charts.widgets.chart_view import BarChart, LineChart return { 'BarChart': BarChart, 'LineCart': LineChart, }
8.400188
5.825072
1.442075
receiver = cls() activity = receiver.__app__.widget receiver.setReceiver(receiver.getId()) def on_receive(ctx, intent): callback(intent) receiver.onReceive.connect(on_receive) activity.registerReceiver(receiver, IntentFilter(action)) return receiver
def for_action(cls, action, callback, single_shot=True)
Create a BroadcastReceiver that is invoked when the given action is received. Parameters ---------- action: String Action to receive callback: Callable Callback to invoke when the action is received single_shot: Bool Cleanup after one callback Returns ------- receiver: BroadcastReceiver The receiver that was created. You must hold on to this or the GC will clean it up.
7.745192
7.847202
0.987
name = Measurement.name_from_class(measurement_class) return self._construct_measurement(name)
def get(self, measurement_class)
Return the latest measurement for the given class or None if nothing has been received from the vehicle.
7.115838
7.701343
0.923974
if source is not None: self.sources.add(source) source.callback = self._receive if hasattr(source, 'start'): source.start()
def add_source(self, source)
Add a vehicle data source to the instance. The Vehicle instance will be set as the callback of the source, and the source will be started if it is startable. (i.e. it has a ``start()`` method).
4.758099
3.75622
1.266725
if sink is not None: self.sinks.add(sink) if hasattr(sink, 'start'): sink.start()
def add_sink(self, sink)
Add a vehicle data sink to the instance. ``sink`` should be a sub-class of ``DataSink`` or at least have a ``receive(message, **kwargs)`` method. The sink will be started if it is startable. (i.e. it has a ``start()`` method).
3.621849
3.654196
0.991148
self.callbacks[Measurement.name_from_class(measurement_class) ].add(callback)
def register(self, measurement_class, callback)
Call the ``callback`` with any new values of ``measurement_class`` received.
9.135688
10.652615
0.857601
self.callbacks[Measurement.name_from_class(measurement_class) ].remove(callback)
def unregister(self, measurement_class, callback)
Stop notifying ``callback`` of new values of ``measurement_class``. If the callback wasn't previously registered, this method will have no effect.
7.582695
10.521835
0.720663
self.device.ctrl_transfer(0x40, self.COMPLEX_CONTROL_COMMAND, 0, 0, self.streamer.serialize_for_stream(request))
def _send_complex_request(self, request)
Send a request via the USB control request endpoint, rather than as a bulk transfer.
11.548218
8.937069
1.292171
if getattr(self, '_out_endpoint', None) is None: config = self.device.get_active_configuration() interface_number = config[(0, 0)].bInterfaceNumber interface = usb.util.find_descriptor(config, bInterfaceNumber=interface_number) self._out_endpoint = usb.util.find_descriptor(interface, custom_match = \ lambda e: \ usb.util.endpoint_direction(e.bEndpointAddress) == \ usb.util.ENDPOINT_OUT) if not self._out_endpoint: raise ControllerError( "Couldn't find OUT endpoint on the USB device") return self._out_endpoint
def out_endpoint(self)
Open a reference to the USB device's only OUT endpoint. This method assumes that the USB device configuration has already been set.
2.39687
2.068898
1.158525
self.thread.join(self.COMMAND_RESPONSE_TIMEOUT_S) self.running = False return self.responses
def wait_for_responses(self)
Block the thread and wait for the response to the given request to arrive from the VI. If no matching response is received in COMMAND_RESPONSE_TIMEOUT_S seconds, returns anyway.
9.558778
5.348297
1.787256
while self.running: try: response = self.queue.get( timeout=self.COMMAND_RESPONSE_TIMEOUT_S) if self._response_matches_request(response): self.responses.append(response) if self.quit_after_first: self.running = False self.queue.task_done() except Empty: break
def handle_responses(self)
Block and wait for responses to this object's original request, or until a timeout (self.COMMAND_RESPONSE_TIMEOUT_S). This function is handy to use as the target function for a thread. The responses received (or None if none was received before the timeout) is stored in a list at self.responses.
3.975893
3.054949
1.30146
# Accept success/failure command responses if super(DiagnosticResponseReceiver, self)._response_matches_request(response): return True if ('bus' in self.diagnostic_request and response.get('bus', None) != self.diagnostic_request['bus']): return False if (self.diagnostic_request['id'] != 0x7df and response.get('id', None) != self.diagnostic_request['id']): return False if (response.get('success', True) and response.get('pid', None) != self.diagnostic_request.get('pid', None)): return False return response.get('mode', None) == self.diagnostic_request['mode']
def _response_matches_request(self, response)
Return true if the response is to a diagnostic request, and the bus, id, mode match. If the request was successful, the PID echo is also checked.
3.915258
3.145465
1.244731
receiver = self._prepare_response_receiver(request, receiver_class=CommandResponseReceiver) self._send_complex_request(request) responses = [] if wait_for_first_response: responses = receiver.wait_for_responses() return responses
def complex_request(self, request, wait_for_first_response=True)
Send a compound command request to the interface over the normal data channel. request - A dict storing the request to send to the VI. It will be serialized to the currently selected output format. wait_for_first_response - If true, this function will block waiting for a response from the VI and return it to the caller. Otherwise, it will send the command and return immediately and any response will be lost.
4.901248
5.469163
0.89616
request = self._build_diagnostic_request(message_id, mode, bus, pid, frequency, payload, decoded_type) diag_response_receiver = None if wait_for_first_response: diag_response_receiver = self._prepare_response_receiver( request, DiagnosticResponseReceiver) request['action'] = 'add' ack_responses = self.complex_request(request, wait_for_ack) diag_responses = None if diag_response_receiver is not None: diag_responses = diag_response_receiver.wait_for_responses() return ack_responses, diag_responses
def create_diagnostic_request(self, message_id, mode, bus=None, pid=None, frequency=None, payload=None, wait_for_ack=True, wait_for_first_response=False, decoded_type=None)
Send a new diagnostic message request to the VI Required: message_id - The message ID (arbitration ID) for the request. mode - the diagnostic mode (or service). Optional: bus - The address of the CAN bus controller to send the request, either 1 or 2 for current VI hardware. pid - The parameter ID, or PID, for the request (e.g. for a mode 1 request). frequency - The frequency in hertz to add this as a recurring diagnostic requests. Must be greater than 0, or None if it is a one-time request. payload - A bytearray to send as the request's optional payload. Only single frame diagnostic requests are supported by the VI firmware in the current version, so the payload has a maximum length of 6. wait_for_ack - If True, will wait for an ACK of the command message. wait_for_first_response - If True, this function will block waiting for a diagnostic response to be received for the request. It will return either after timing out or after 1 matching response is received - there may be more responses to functional broadcast requests that arrive after returning. Returns a tuple of ([list of ACK responses to create request], [list of diagnostic responses received])
3.077454
3.426688
0.898084
request = { "command": "passthrough", "bus": bus, "enabled": enabled } return self._check_command_response_status(request)
def set_passthrough(self, bus, enabled)
Control the status of CAN message passthrough for a bus. Returns True if the command was successful.
5.053048
4.926916
1.025601
request = { "command": "payload_format", "format": payload_format } status = self._check_command_response_status(request) # Always change the format regardless because if it was already in the # right format, the command will have failed. self.format = payload_format return status
def set_payload_format(self, payload_format)
Set the payload format for messages sent to and from the VI. Returns True if the command was successful.
6.590707
6.709345
0.982318
request = { "command": "rtc_configuration", "unix_time": unix_time } status = self._check_command_response_status(request) return status
def rtc_configuration(self, unix_time)
Set the Unix time if RTC is supported on the device. Returns True if the command was successful.
4.247895
4.073068
1.042923
request = { "command": "modem_configuration", "host": host, "port": port } status = self._check_command_response_status(request) return status
def modem_configuration(self, host, port)
Set the host:port for the Cellular device to send data to. Returns True if the command was successful.
3.760967
3.676038
1.023103
request = { "command": "af_bypass", "bus": bus, "bypass": bypass } return self._check_command_response_status(request)
def set_acceptance_filter_bypass(self, bus, bypass)
Control the status of CAN acceptance filter for a bus. Returns True if the command was successful.
5.63235
5.651774
0.996563
request = { "command": "sd_mount_status" } responses = self.complex_request(request) result = None if len(responses) > 0: result = responses[0].get('status') return result
def sd_mount_status(self)
Request for SD Mount status if available.
3.928328
3.304238
1.188876
if 'id' in kwargs and 'data' in kwargs: result = self.write_raw(kwargs['id'], kwargs['data'], bus=kwargs.get('bus', None), frame_format=kwargs.get('frame_format', None)) else: result = self.write_translated(kwargs['name'], kwargs['value'], event=kwargs.get('event', None)) return result
def write(self, **kwargs)
Serialize a raw or translated write request and send it to the VI, following the OpenXC message format.
3.846457
3.121475
1.232256
data = {'name': name} if value is not None: data['value'] = self._massage_write_value(value) if event is not None: data['event'] = self._massage_write_value(event); message = self.streamer.serialize_for_stream(data) bytes_written = self.write_bytes(message) assert bytes_written == len(message) return bytes_written
def write_translated(self, name, value, event=None)
Send a translated write request to the VI.
3.980195
3.913298
1.017095
if not isinstance(message_id, numbers.Number): try: message_id = int(message_id, 0) except ValueError: raise ValueError("ID must be numerical") data = {'id': message_id, 'data': data} if bus is not None: data['bus'] = bus if frame_format is not None: data['frame_format'] = frame_format message = self.streamer.serialize_for_stream(data) bytes_written = self.write_bytes(message) assert bytes_written == len(message) return bytes_written
def write_raw(self, message_id, data, bus=None, frame_format=None)
Send a raw write request to the VI.
2.62255
2.73123
0.960208
if not isinstance(value, numbers.Number): if value == "true": value = True elif value == "false": value = False elif value[0] == '"' and value[-1] == '"': value = value[1:-1] else: try: value = float(value) except ValueError: pass return value
def _massage_write_value(cls, value)
Convert string values from command-line arguments into first-order Python boolean and float objects, if applicable.
1.978863
1.810292
1.093119
valid = False if(('name' in message and 'value' in message) or ('id' in message and 'data' in message)): valid = True return valid
def _validate(cls, message)
Confirm the validitiy of a given dict as an OpenXC message. Returns: ``True`` if the message contains at least a ``name`` and ``value``.
5.409641
4.986245
1.084913
if self.unit != units.Undefined and new_value.unit != self.unit: raise AttributeError("%s must be in %s" % ( self.__class__, self.unit)) self._value = new_value
def value(self, new_value)
Set the value of this measurement. Raises: AttributeError: if the new value isn't of the correct units.
4.487616
4.378088
1.025017
args = [] if 'id' in data and 'data' in data: measurement_class = CanMessage args.append("Bus %s: 0x%x" % (data.get('bus', '?'), data['id'])) args.append(data['data']) # TODO grab bus else: measurement_class = cls._class_from_name(data['name']) if measurement_class == Measurement: args.append(data['name']) args.append(data['value']) return measurement_class(*args, event=data.get('event', None), override_unit=True)
def from_dict(cls, data)
Create a new Measurement subclass instance using the given dict. If Measurement.name_from_class was previously called with this data's associated Measurement sub-class in Python, the returned object will be an instance of that sub-class. If the measurement name in ``data`` is unrecognized, the returned object will be of the generic ``Measurement`` type. Args: data (dict): the data for the new measurement, including at least a name and value.
5.75871
5.782301
0.99592
if not getattr(cls, '_measurements_initialized', False): cls._measurement_map = dict((m.name, m) for m in all_measurements()) cls._measurements_initialized = True try: name = getattr(measurement_class, 'name') except AttributeError: raise UnrecognizedMeasurementError("No 'name' attribute in %s" % measurement_class) else: cls._measurement_map[name] = measurement_class return name
def name_from_class(cls, measurement_class)
For a given measurement class, return its generic name. The given class is expected to have a ``name`` attribute, otherwise this function will raise an execption. The point of using this method instead of just trying to grab that attribute in the application is to cache measurement name to class mappings for future use. Returns: the generic OpenXC name for a measurement class. Raise: UnrecognizedMeasurementError: if the class does not have a valid generic name
3.212452
3.383457
0.949459
if getattr(self, 'first_timestamp', None) is None: self.first_timestamp = timestamp LOG.debug("Storing %d as the first timestamp of the trace file %s", self.first_timestamp, self.filename)
def _store_timestamp(self, timestamp)
If not already saved, cache the first timestamp in the active trace file on the instance.
4.739956
3.550438
1.335034
line = self.trace_file.readline() if line == '': if self.loop: self._reopen_file() else: self.trace_file.close() self.trace_file = None raise DataSourceError() message = JsonFormatter.deserialize(line) timestamp = message.get('timestamp', None) if self.realtime and timestamp is not None: self._store_timestamp(timestamp) self._wait(self.starting_time, self.first_timestamp, timestamp) return line + "\x00"
def read(self)
Read a line of data from the input source at a time.
5.721424
5.4635
1.047208
if filename is None: raise DataSourceError("Trace filename is not defined") try: trace_file = open(filename, "r") except IOError as e: raise DataSourceError("Unable to open trace file %s" % filename, e) else: LOG.debug("Opened trace file %s", filename) return trace_file
def _open_file(filename)
Attempt to open the the file at ``filename`` for reading. Raises: DataSourceError, if the file cannot be opened.
3.391264
3.141198
1.079608
target_time = starting_time + (timestamp - first_timestamp) time.sleep(max(target_time - time.time(), 0))
def _wait(starting_time, first_timestamp, timestamp)
Given that the first timestamp in the trace file is ``first_timestamp`` and we started playing back the file at ``starting_time``, block until the current ``timestamp`` should occur.
3.229904
3.467296
0.931534
assert quacks_like_dict(a), quacks_like_dict(b) dst = a.copy() stack = [(dst, b)] while stack: current_dst, current_src = stack.pop() for key in current_src: if key not in current_dst: current_dst[key] = current_src[key] else: if (quacks_like_dict(current_src[key]) and quacks_like_dict(current_dst[key])): stack.append((current_dst[key], current_src[key])) elif (quacks_like_list(current_src[key]) and quacks_like_list(current_dst[key])): current_dst[key].extend(current_src[key]) else: current_dst[key] = current_src[key] return dst
def merge(a, b)
Merge two deep dicts non-destructively Uses a stack to avoid maximum recursion depth exceptions >>> a = {'a': 1, 'b': {1: 1, 2: 2}, 'd': 6} >>> b = {'c': 3, 'b': {2: 7}, 'd': {'z': [1, 2, 3]}} >>> c = merge(a, b) >>> from pprint import pprint; pprint(c) {'a': 1, 'b': {1: 1, 2: 7}, 'c': 3, 'd': {'z': [1, 2, 3]}}
1.701985
1.841302
0.924338
return cls(name=node.find("Name").text, bit_position=int(node.find("Bitposition").text), bit_size=int(node.find("Bitsize").text), factor=float(node.find("Factor").text), offset=float(node.find("Offset").text), min_value=float(node.find("Minimum").text), max_value=float(node.find("Maximum").text))
def from_xml_node(cls, node)
Construct a Signal instance from an XML node exported from a Vector CANoe .dbc file.
2.206335
2.015318
1.094783
message_buffer = b"" while self.running: try: message_buffer += self.source.read_logs() except DataSourceError as e: if self.running: LOG.warn("Can't read logs from data source -- stopping: %s", e) break except NotImplementedError as e: LOG.info("%s doesn't support logging" % self) break while True: if "\x00" not in message_buffer: break record, _, remainder = message_buffer.partition(b"\x00") self.record(record) message_buffer = remainder
def run(self)
Continuously read data from the source and attempt to parse a valid message from the buffer of bytes. When a message is parsed, passes it off to the callback if one is set.
4.939901
4.732327
1.043863
def version_to_tuple(version): # Trim off the leading v version_list = version[1:].split('.', 2) if len(version_list) <= 3: [version_list.append(0) for _ in range(3 - len(version_list))] try: return tuple((int(version) for version in version_list)) except ValueError: # not an integer, so it goes to the bottom return (0, 0, 0) x_major, x_minor, x_bugfix = version_to_tuple(x) y_major, y_minor, y_bugfix = version_to_tuple(y) return (cmp(x_major, y_major) or cmp(x_minor, y_minor) or cmp(x_bugfix, y_bugfix))
def compare_versions(x, y)
Expects 2 strings in the format of 'X.Y.Z' where X, Y and Z are integers. It will compare the items which will organize things properly by their major, minor and bugfix version. :: >>> my_list = ['v1.13', 'v1.14.2', 'v1.14.1', 'v1.9', 'v1.1'] >>> sorted(my_list, cmp=compare_versions) ['v1.1', 'v1.9', 'v1.13', 'v1.14.1', 'v1.14.2']
2.419058
2.367456
1.021796
t = np.atleast_1d(t) if check_sorted and np.any(np.diff(t) < 0.0): raise ValueError("the input coordinates must be sorted") if check_sorted and len(t.shape) > 1: raise ValueError("dimension mismatch") self._t = t self._yerr = np.empty_like(self._t) self._yerr[:] = yerr (alpha_real, beta_real, alpha_complex_real, alpha_complex_imag, beta_complex_real, beta_complex_imag) = self.kernel.coefficients self._A = np.empty(0) if A is None else A self._U = np.empty((0, 0)) if U is None else U self._V = np.empty((0, 0)) if V is None else V self.solver.compute( self.kernel.jitter, alpha_real, beta_real, alpha_complex_real, alpha_complex_imag, beta_complex_real, beta_complex_imag, self._A, self._U, self._V, t, self._yerr**2 ) self.dirty = False
def compute(self, t, yerr=1.123e-12, check_sorted=True, A=None, U=None, V=None)
Compute the extended form of the covariance matrix and factorize Args: x (array[n]): The independent coordinates of the data points. This array must be _sorted_ in ascending order. yerr (Optional[float or array[n]]): The measurement uncertainties for the data points at coordinates ``x``. These values will be added in quadrature to the diagonal of the covariance matrix. (default: ``1.123e-12``) check_sorted (bool): If ``True``, ``x`` will be checked to make sure that it is properly sorted. If ``False``, the coordinates will be assumed to be in the correct order. Raises: ValueError: For un-sorted data or mismatched dimensions. solver.LinAlgError: For non-positive definite matrices.
2.308806
2.447627
0.943283
y = self._process_input(y) resid = y - self.mean.get_value(self._t) try: self._recompute() except solver.LinAlgError: if quiet: return -np.inf raise if len(y.shape) > 1: raise ValueError("dimension mismatch") logdet = self.solver.log_determinant() if not np.isfinite(logdet): return -np.inf loglike = -0.5*(self.solver.dot_solve(resid)+logdet+len(y)*_const) if not np.isfinite(loglike): return -np.inf return loglike
def log_likelihood(self, y, _const=math.log(2.0*math.pi), quiet=False)
Compute the marginalized likelihood of the GP model The factorized matrix from the previous call to :func:`GP.compute` is used so ``compute`` must be called first. Args: y (array[n]): The observations at coordinates ``x`` from :func:`GP.compute`. quiet (bool): If true, return ``-numpy.inf`` for non-positive definite matrices instead of throwing an error. Returns: float: The marginalized likelihood of the GP model. Raises: ValueError: For mismatched dimensions. solver.LinAlgError: For non-positive definite matrices.
3.552099
3.367203
1.054911
if not solver.has_autodiff(): raise RuntimeError("celerite must be compiled with autodiff " "support to use the gradient methods") if not self.kernel.vector_size: return self.log_likelihood(y, quiet=quiet), np.empty(0) y = self._process_input(y) if len(y.shape) > 1: raise ValueError("dimension mismatch") resid = y - self.mean.get_value(self._t) (alpha_real, beta_real, alpha_complex_real, alpha_complex_imag, beta_complex_real, beta_complex_imag) = self.kernel.coefficients try: val, grad = self.solver.grad_log_likelihood( self.kernel.jitter, alpha_real, beta_real, alpha_complex_real, alpha_complex_imag, beta_complex_real, beta_complex_imag, self._A, self._U, self._V, self._t, resid, self._yerr**2 ) except solver.LinAlgError: if quiet: return -np.inf, np.zeros(self.vector_size) raise if self.kernel._has_coeffs: coeffs_jac = self.kernel.get_coeffs_jacobian() full_grad = np.dot(coeffs_jac, grad[1:]) else: full_grad = np.zeros(self.kernel.vector_size) if self.kernel._has_jitter: jitter_jac = self.kernel.get_jitter_jacobian() full_grad += jitter_jac * grad[0] if self.mean.vector_size: self._recompute() alpha = self.solver.solve(resid) g = self.mean.get_gradient(self._t) full_grad = np.append(full_grad, np.dot(g, alpha)) return val, full_grad
def grad_log_likelihood(self, y, quiet=False)
Compute the gradient of the marginalized likelihood The factorized matrix from the previous call to :func:`GP.compute` is used so ``compute`` must be called first. The gradient is taken with respect to the parameters returned by :func:`GP.get_parameter_vector`. This function requires the `autograd <https://github.com/HIPS/autograd>`_ package. Args: y (array[n]): The observations at coordinates ``x`` from :func:`GP.compute`. quiet (bool): If true, return ``-numpy.inf`` and a gradient vector of zeros for non-positive definite matrices instead of throwing an error. Returns: The gradient of marginalized likelihood with respect to the parameter vector. Raises: ValueError: For mismatched dimensions. solver.LinAlgError: For non-positive definite matrices.
3.188294
3.082791
1.034223
self._recompute() return self.solver.solve(self._process_input(y))
def apply_inverse(self, y)
Apply the inverse of the covariance matrix to a vector or matrix Solve ``K.x = y`` for ``x`` where ``K`` is the covariance matrix of the GP with the white noise and ``yerr`` components included on the diagonal. Args: y (array[n] or array[n, nrhs]): The vector or matrix ``y`` described above. Returns: array[n] or array[n, nrhs]: The solution to the linear system. This will have the same shape as ``y``. Raises: ValueError: For mismatched dimensions.
12.977538
24.856316
0.522102
if kernel is None: kernel = self.kernel if t is not None: t = np.atleast_1d(t) if check_sorted and np.any(np.diff(t) < 0.0): raise ValueError("the input coordinates must be sorted") if check_sorted and len(t.shape) > 1: raise ValueError("dimension mismatch") A = np.empty(0) if A is None else A U = np.empty((0, 0)) if U is None else U V = np.empty((0, 0)) if V is None else V else: if not self.computed: raise RuntimeError("you must call 'compute' first") t = self._t A = self._A U = self._U V = self._V (alpha_real, beta_real, alpha_complex_real, alpha_complex_imag, beta_complex_real, beta_complex_imag) = kernel.coefficients return self.solver.dot( kernel.jitter, alpha_real, beta_real, alpha_complex_real, alpha_complex_imag, beta_complex_real, beta_complex_imag, A, U, V, t, np.ascontiguousarray(y, dtype=float) )
def dot(self, y, t=None, A=None, U=None, V=None, kernel=None, check_sorted=True)
Dot the covariance matrix into a vector or matrix Compute ``K.y`` where ``K`` is the covariance matrix of the GP without the white noise or ``yerr`` values on the diagonal. Args: y (array[n] or array[n, nrhs]): The vector or matrix ``y`` described above. kernel (Optional[terms.Term]): A different kernel can optionally be provided to compute the matrix ``K`` from a different kernel than the ``kernel`` property on this object. Returns: array[n] or array[n, nrhs]: The dot product ``K.y`` as described above. This will have the same shape as ``y``. Raises: ValueError: For mismatched dimensions.
2.375245
2.453312
0.968179
y = self._process_input(y) if len(y.shape) > 1: raise ValueError("dimension mismatch") if t is None: xs = self._t else: xs = np.ascontiguousarray(t, dtype=float) if len(xs.shape) > 1: raise ValueError("dimension mismatch") # Make sure that the model is computed self._recompute() # Compute the predictive mean. resid = y - self.mean.get_value(self._t) if t is None: alpha = self.solver.solve(resid).flatten() alpha = resid - (self._yerr**2 + self.kernel.jitter) * alpha elif not len(self._A): alpha = self.solver.predict(resid, xs) else: Kxs = self.get_matrix(xs, self._t) alpha = np.dot(Kxs, alpha) mu = self.mean.get_value(xs) + alpha if not (return_var or return_cov): return mu # Predictive variance. Kxs = self.get_matrix(xs, self._t) KxsT = np.ascontiguousarray(Kxs.T, dtype=np.float64) if return_var: var = -np.sum(KxsT*self.apply_inverse(KxsT), axis=0) var += self.kernel.get_value(0.0) return mu, var # Predictive covariance cov = self.kernel.get_value(xs[:, None] - xs[None, :]) cov -= np.dot(Kxs, self.apply_inverse(KxsT)) return mu, cov
def predict(self, y, t=None, return_cov=True, return_var=False)
Compute the conditional predictive distribution of the model You must call :func:`GP.compute` before this method. Args: y (array[n]): The observations at coordinates ``x`` from :func:`GP.compute`. t (Optional[array[ntest]]): The independent coordinates where the prediction should be made. If this is omitted the coordinates will be assumed to be ``x`` from :func:`GP.compute` and an efficient method will be used to compute the prediction. return_cov (Optional[bool]): If ``True``, the full covariance matrix is computed and returned. Otherwise, only the mean prediction is computed. (default: ``True``) return_var (Optional[bool]): If ``True``, only return the diagonal of the predictive covariance; this will be faster to compute than the full covariance matrix. This overrides ``return_cov`` so, if both are set to ``True``, only the diagonal is computed. (default: ``False``) Returns: ``mu``, ``(mu, cov)``, or ``(mu, var)`` depending on the values of ``return_cov`` and ``return_var``. These output values are: (a) **mu** ``(ntest,)``: mean of the predictive distribution, (b) **cov** ``(ntest, ntest)``: the predictive covariance matrix, and (c) **var** ``(ntest,)``: the diagonal elements of ``cov``. Raises: ValueError: For mismatched dimensions.
3.299219
3.310091
0.996715
if x1 is None and x2 is None: if self._t is None or not self.computed: raise RuntimeError("you must call 'compute' first") K = self.kernel.get_value(self._t[:, None] - self._t[None, :]) if include_diagonal is None or include_diagonal: K[np.diag_indices_from(K)] += ( self._yerr**2 + self.kernel.jitter ) if (include_general is None or include_general) and len(self._A): K[np.diag_indices_from(K)] += self._A K += np.tril(np.dot(self._U.T, self._V), -1) K += np.triu(np.dot(self._V.T, self._U), 1) return K incl = False x1 = np.ascontiguousarray(x1, dtype=float) if x2 is None: x2 = x1 incl = include_diagonal is not None and include_diagonal K = self.kernel.get_value(x1[:, None] - x2[None, :]) if incl: K[np.diag_indices_from(K)] += self.kernel.jitter return K
def get_matrix(self, x1=None, x2=None, include_diagonal=None, include_general=None)
Get the covariance matrix at given independent coordinates Args: x1 (Optional[array[n1]]): The first set of independent coordinates. If this is omitted, ``x1`` will be assumed to be equal to ``x`` from a previous call to :func:`GP.compute`. x2 (Optional[array[n2]]): The second set of independent coordinates. If this is omitted, ``x2`` will be assumed to be ``x1``. include_diagonal (Optional[bool]): Should the white noise and ``yerr`` terms be included on the diagonal? (default: ``False``)
2.634676
2.554863
1.031239
self._recompute() if size is None: n = np.random.randn(len(self._t)) else: n = np.random.randn(len(self._t), size) n = self.solver.dot_L(n) if size is None: return self.mean.get_value(self._t) + n[:, 0] return self.mean.get_value(self._t)[None, :] + n.T
def sample(self, size=None)
Sample from the prior distribution over datasets Args: size (Optional[int]): The number of samples to draw. Returns: array[n] or array[size, n]: The samples from the prior distribution over datasets.
3.787884
4.026617
0.940711
mu, cov = self.predict(y, t, return_cov=True) return np.random.multivariate_normal(mu, cov, size=size)
def sample_conditional(self, y, t=None, size=None)
Sample from the conditional (predictive) distribution Note: this method scales as ``O(M^3)`` for large ``M``, where ``M == len(t)``. Args: y (array[n]): The observations at coordinates ``x`` from :func:`GP.compute`. t (Optional[array[ntest]]): The independent coordinates where the prediction should be made. If this is omitted the coordinates will be assumed to be ``x`` from :func:`GP.compute` and an efficient method will be used to compute the prediction. size (Optional[int]): The number of samples to draw. Returns: array[n] or array[size, n]: The samples from the conditional distribution over datasets.
2.692236
3.781408
0.711967
tau = np.asarray(tau) (alpha_real, beta_real, alpha_complex_real, alpha_complex_imag, beta_complex_real, beta_complex_imag) = self.coefficients k = get_kernel_value( alpha_real, beta_real, alpha_complex_real, alpha_complex_imag, beta_complex_real, beta_complex_imag, tau.flatten(), ) return np.asarray(k).reshape(tau.shape)
def get_value(self, tau)
Compute the value of the term for an array of lags Args: tau (array[...]): An array of lags where the term should be evaluated. Returns: The value of the term for each ``tau``. This will have the same shape as ``tau``.
3.037193
3.405367
0.891884
w = np.asarray(omega) (alpha_real, beta_real, alpha_complex_real, alpha_complex_imag, beta_complex_real, beta_complex_imag) = self.coefficients p = get_psd_value( alpha_real, beta_real, alpha_complex_real, alpha_complex_imag, beta_complex_real, beta_complex_imag, w.flatten(), ) return p.reshape(w.shape)
def get_psd(self, omega)
Compute the PSD of the term for an array of angular frequencies Args: omega (array[...]): An array of frequencies where the PSD should be evaluated. Returns: The value of the PSD for each ``omega``. This will have the same shape as ``omega``.
2.97069
3.177171
0.935011
return np.empty(0), np.empty(0), np.empty(0), np.empty(0)
def get_complex_coefficients(self, params)
Get the arrays ``alpha_complex_*`` and ``beta_complex_*`` This method should be overloaded by subclasses to return the arrays ``alpha_complex_real``, ``alpha_complex_imag``, ``beta_complex_real``, and ``beta_complex_imag`` given the current parameter settings. By default, this term is empty. Returns: (array[j_complex], array[j_complex], array[j_complex], array[j_complex]): ``alpha_complex_real``, ``alpha_complex_imag``, ``beta_complex_real``, and ``beta_complex_imag`` as described above. ``alpha_complex_imag`` can be omitted and it will be assumed to be zero.
3.631725
3.566725
1.018224
vector = self.get_parameter_vector(include_frozen=True) pars = self.get_all_coefficients(vector) if len(pars) != 6: raise ValueError("there must be 6 coefficient blocks") if any(len(p.shape) != 1 for p in pars): raise ValueError("coefficient blocks must be 1D") if len(pars[0]) != len(pars[1]): raise ValueError("coefficient blocks must have the same shape") if any(len(pars[2]) != len(p) for p in pars[3:]): raise ValueError("coefficient blocks must have the same shape") return pars
def coefficients(self)
All of the coefficient arrays This property is the concatenation of the results from :func:`terms.Term.get_real_coefficients` and :func:`terms.Term.get_complex_coefficients` but it will always return a tuple of length 6, even if ``alpha_complex_imag`` was omitted from ``get_complex_coefficients``. Returns: (array[j_real], array[j_real], array[j_complex], array[j_complex], array[j_complex], array[j_complex]): ``alpha_real``, ``beta_real``, ``alpha_complex_real``, ``alpha_complex_imag``, ``beta_complex_real``, and ``beta_complex_imag`` as described above. Raises: ValueError: For invalid dimensions for the coefficients.
3.039005
2.828437
1.074447
'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].' try: return self.maps[0].pop(key, *args) except KeyError: raise KeyError('Key not found in the first mapping: {!r}'.format(key))
def pop(self, key, *args)
Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].
4.547848
2.793597
1.627955
# Limited dimension. if self.dimensions[dim_name] is not None: return max_size def _find_dim(h5group, dim): if dim not in h5group: return _find_dim(h5group.parent, dim) return h5group[dim] dim_variable = _find_dim(self._h5group, dim_name) if "REFERENCE_LIST" not in dim_variable.attrs: return max_size root = self._h5group["/"] for ref, _ in dim_variable.attrs["REFERENCE_LIST"]: var = root[ref] for i, var_d in enumerate(var.dims): name = _name_from_dimension(var_d) if name == dim_name: max_size = max(var.shape[i], max_size) return max_size
def _determine_current_dimension_size(self, dim_name, max_size)
Helper method to determine the current size of a dimension.
3.541164
3.407426
1.039249
dim_order = self._dim_order.maps[0] for dim in sorted(dim_order, key=lambda d: dim_order[d]): if dim not in self._h5group: size = self._current_dim_sizes[dim] kwargs = {} if self._dim_sizes[dim] is None: kwargs["maxshape"] = (None,) self._h5group.create_dataset( name=dim, shape=(size,), dtype='S1', **kwargs) h5ds = self._h5group[dim] h5ds.attrs['_Netcdf4Dimid'] = dim_order[dim] if len(h5ds.shape) > 1: dims = self._variables[dim].dimensions coord_ids = np.array([dim_order[d] for d in dims], 'int32') h5ds.attrs['_Netcdf4Coordinates'] = coord_ids scale_name = dim if dim in self.variables else NOT_A_VARIABLE h5ds.dims.create_scale(h5ds, scale_name) for subgroup in self.groups.values(): subgroup._create_dim_scales()
def _create_dim_scales(self)
Create all necessary HDF5 dimension scale.
3.554897
3.343652
1.063178
for name, var in self.variables.items(): if name not in self.dimensions: for n, dim in enumerate(var.dimensions): var._h5ds.dims[n].attach_scale(self._all_h5groups[dim]) for subgroup in self.groups.values(): subgroup._attach_dim_scales()
def _attach_dim_scales(self)
Attach dimension scales to all variables.
4.466696
3.747283
1.191983