hexsha
stringlengths 40
40
| size
int64 4
1.02M
| ext
stringclasses 8
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
209
| max_stars_repo_name
stringlengths 5
121
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
209
| max_issues_repo_name
stringlengths 5
121
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
209
| max_forks_repo_name
stringlengths 5
121
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 4
1.02M
| avg_line_length
float64 1.07
66.1k
| max_line_length
int64 4
266k
| alphanum_fraction
float64 0.01
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f5cf563ad5f4520a3d477423e348053686acbf41
| 28,545
|
py
|
Python
|
django/contrib/formtools/wizard/views.py
|
deployed/django
|
9db4271bd11ac23a5a5652bbcdf8fb6d4b997651
|
[
"BSD-3-Clause"
] | 1
|
2016-06-06T07:21:04.000Z
|
2016-06-06T07:21:04.000Z
|
django/contrib/formtools/wizard/views.py
|
deployed/django
|
9db4271bd11ac23a5a5652bbcdf8fb6d4b997651
|
[
"BSD-3-Clause"
] | null | null | null |
django/contrib/formtools/wizard/views.py
|
deployed/django
|
9db4271bd11ac23a5a5652bbcdf8fb6d4b997651
|
[
"BSD-3-Clause"
] | null | null | null |
from collections import OrderedDict
import re
from django import forms
from django.shortcuts import redirect
from django.core.urlresolvers import reverse
from django.forms import formsets, ValidationError
from django.views.generic import TemplateView
from django.utils.decorators import classonlymethod
from django.utils.translation import ugettext as _
from django.utils import six
from django.contrib.formtools.wizard.storage import get_storage
from django.contrib.formtools.wizard.storage.exceptions import NoFileStorageConfigured
from django.contrib.formtools.wizard.forms import ManagementForm
def normalize_name(name):
"""
Converts camel-case style names into underscore separated words. Example::
>>> normalize_name('oneTwoThree')
'one_two_three'
>>> normalize_name('FourFiveSix')
'four_five_six'
"""
new = re.sub('(((?<=[a-z])[A-Z])|([A-Z](?![A-Z]|$)))', '_\\1', name)
return new.lower().strip('_')
class StepsHelper(object):
def __init__(self, wizard):
self._wizard = wizard
def __dir__(self):
return self.all
def __len__(self):
return self.count
def __repr__(self):
return '<StepsHelper for %s (steps: %s)>' % (self._wizard, self.all)
@property
def all(self):
"Returns the names of all steps/forms."
return list(self._wizard.get_form_list())
@property
def count(self):
"Returns the total number of steps/forms in this the wizard."
return len(self.all)
@property
def current(self):
"""
Returns the current step. If no current step is stored in the
storage backend, the first step will be returned.
"""
return self._wizard.storage.current_step or self.first
@property
def first(self):
"Returns the name of the first step."
return self.all[0]
@property
def last(self):
"Returns the name of the last step."
return self.all[-1]
@property
def next(self):
"Returns the next step."
return self._wizard.get_next_step()
@property
def prev(self):
"Returns the previous step."
return self._wizard.get_prev_step()
@property
def index(self):
"Returns the index for the current step."
return self._wizard.get_step_index()
@property
def step0(self):
return int(self.index)
@property
def step1(self):
return int(self.index) + 1
class WizardView(TemplateView):
"""
The WizardView is used to create multi-page forms and handles all the
storage and validation stuff. The wizard is based on Django's generic
class based views.
"""
storage_name = None
form_list = None
initial_dict = None
instance_dict = None
condition_dict = None
template_name = 'formtools/wizard/wizard_form.html'
def __repr__(self):
return '<%s: forms: %s>' % (self.__class__.__name__, self.form_list)
@classonlymethod
def as_view(cls, *args, **kwargs):
"""
This method is used within urls.py to create unique wizardview
instances for every request. We need to override this method because
we add some kwargs which are needed to make the wizardview usable.
"""
initkwargs = cls.get_initkwargs(*args, **kwargs)
return super(WizardView, cls).as_view(**initkwargs)
@classmethod
def get_initkwargs(cls, form_list=None, initial_dict=None,
instance_dict=None, condition_dict=None, *args, **kwargs):
"""
Creates a dict with all needed parameters for the form wizard instances.
* `form_list` - is a list of forms. The list entries can be single form
classes or tuples of (`step_name`, `form_class`). If you pass a list
of forms, the wizardview will convert the class list to
(`zero_based_counter`, `form_class`). This is needed to access the
form for a specific step.
* `initial_dict` - contains a dictionary of initial data dictionaries.
The key should be equal to the `step_name` in the `form_list` (or
the str of the zero based counter - if no step_names added in the
`form_list`)
* `instance_dict` - contains a dictionary whose values are model
instances if the step is based on a ``ModelForm`` and querysets if
the step is based on a ``ModelFormSet``. The key should be equal to
the `step_name` in the `form_list`. Same rules as for `initial_dict`
apply.
* `condition_dict` - contains a dictionary of boolean values or
callables. If the value of for a specific `step_name` is callable it
will be called with the wizardview instance as the only argument.
If the return value is true, the step's form will be used.
"""
kwargs.update({
'initial_dict': initial_dict or kwargs.pop('initial_dict',
getattr(cls, 'initial_dict', None)) or {},
'instance_dict': instance_dict or kwargs.pop('instance_dict',
getattr(cls, 'instance_dict', None)) or {},
'condition_dict': condition_dict or kwargs.pop('condition_dict',
getattr(cls, 'condition_dict', None)) or {}
})
form_list = form_list or kwargs.pop('form_list',
getattr(cls, 'form_list', None)) or []
computed_form_list = OrderedDict()
assert len(form_list) > 0, 'at least one form is needed'
# walk through the passed form list
for i, form in enumerate(form_list):
if isinstance(form, (list, tuple)):
# if the element is a tuple, add the tuple to the new created
# sorted dictionary.
computed_form_list[six.text_type(form[0])] = form[1]
else:
# if not, add the form with a zero based counter as unicode
computed_form_list[six.text_type(i)] = form
# walk through the new created list of forms
for form in six.itervalues(computed_form_list):
if issubclass(form, formsets.BaseFormSet):
# if the element is based on BaseFormSet (FormSet/ModelFormSet)
# we need to override the form variable.
form = form.form
# check if any form contains a FileField, if yes, we need a
# file_storage added to the wizardview (by subclassing).
for field in six.itervalues(form.base_fields):
if (isinstance(field, forms.FileField) and
not hasattr(cls, 'file_storage')):
raise NoFileStorageConfigured(
"You need to define 'file_storage' in your "
"wizard view in order to handle file uploads.")
# build the kwargs for the wizardview instances
kwargs['form_list'] = computed_form_list
return kwargs
def get_prefix(self, *args, **kwargs):
# TODO: Add some kind of unique id to prefix
return normalize_name(self.__class__.__name__)
def get_form_list(self):
"""
This method returns a form_list based on the initial form list but
checks if there is a condition method/value in the condition_list.
If an entry exists in the condition list, it will call/read the value
and respect the result. (True means add the form, False means ignore
the form)
The form_list is always generated on the fly because condition methods
could use data from other (maybe previous forms).
"""
form_list = OrderedDict()
for form_key, form_class in six.iteritems(self.form_list):
# try to fetch the value from condition list, by default, the form
# gets passed to the new list.
condition = self.condition_dict.get(form_key, True)
if callable(condition):
# call the value if needed, passes the current instance.
condition = condition(self)
if condition:
form_list[form_key] = form_class
return form_list
def dispatch(self, request, *args, **kwargs):
"""
This method gets called by the routing engine. The first argument is
`request` which contains a `HttpRequest` instance.
The request is stored in `self.request` for later use. The storage
instance is stored in `self.storage`.
After processing the request using the `dispatch` method, the
response gets updated by the storage engine (for example add cookies).
"""
# add the storage engine to the current wizardview instance
self.prefix = self.get_prefix(*args, **kwargs)
self.storage = get_storage(self.storage_name, self.prefix, request,
getattr(self, 'file_storage', None))
self.steps = StepsHelper(self)
response = super(WizardView, self).dispatch(request, *args, **kwargs)
# update the response (e.g. adding cookies)
self.storage.update_response(response)
return response
def get(self, request, *args, **kwargs):
"""
This method handles GET requests.
If a GET request reaches this point, the wizard assumes that the user
just starts at the first step or wants to restart the process.
The data of the wizard will be resetted before rendering the first step.
"""
self.storage.reset()
# reset the current step to the first step.
self.storage.current_step = self.steps.first
return self.render(self.get_form())
def post(self, *args, **kwargs):
"""
This method handles POST requests.
The wizard will render either the current step (if form validation
wasn't successful), the next step (if the current step was stored
successful) or the done view (if no more steps are available)
"""
# Look for a wizard_goto_step element in the posted data which
# contains a valid step name. If one was found, render the requested
# form. (This makes stepping back a lot easier).
wizard_goto_step = self.request.POST.get('wizard_goto_step', None)
if wizard_goto_step and wizard_goto_step in self.get_form_list():
return self.render_goto_step(wizard_goto_step)
# Check if form was refreshed
management_form = ManagementForm(self.request.POST, prefix=self.prefix)
if not management_form.is_valid():
raise ValidationError(
_('ManagementForm data is missing or has been tampered.'),
code='missing_management_form',
)
form_current_step = management_form.cleaned_data['current_step']
if (form_current_step != self.steps.current and
self.storage.current_step is not None):
# form refreshed, change current step
self.storage.current_step = form_current_step
# get the form for the current step
form = self.get_form(data=self.request.POST, files=self.request.FILES)
# and try to validate
if form.is_valid():
# if the form is valid, store the cleaned data and files.
self.storage.set_step_data(self.steps.current, self.process_step(form))
self.storage.set_step_files(self.steps.current, self.process_step_files(form))
# check if the current step is the last step
if self.steps.current == self.steps.last:
# no more steps, render done view
return self.render_done(form, **kwargs)
else:
# proceed to the next step
return self.render_next_step(form)
return self.render(form)
def render_next_step(self, form, **kwargs):
"""
This method gets called when the next step/form should be rendered.
`form` contains the last/current form.
"""
# get the form instance based on the data from the storage backend
# (if available).
next_step = self.steps.next
new_form = self.get_form(next_step,
data=self.storage.get_step_data(next_step),
files=self.storage.get_step_files(next_step))
# change the stored current step
self.storage.current_step = next_step
return self.render(new_form, **kwargs)
def render_goto_step(self, goto_step, **kwargs):
"""
This method gets called when the current step has to be changed.
`goto_step` contains the requested step to go to.
"""
self.storage.current_step = goto_step
form = self.get_form(
data=self.storage.get_step_data(self.steps.current),
files=self.storage.get_step_files(self.steps.current))
return self.render(form)
def render_done(self, form, **kwargs):
"""
This method gets called when all forms passed. The method should also
re-validate all steps to prevent manipulation. If any form fails to
validate, `render_revalidation_failure` should get called.
If everything is fine call `done`.
"""
final_forms = OrderedDict()
# walk through the form list and try to validate the data again.
for form_key in self.get_form_list():
form_obj = self.get_form(step=form_key,
data=self.storage.get_step_data(form_key),
files=self.storage.get_step_files(form_key))
if not form_obj.is_valid():
return self.render_revalidation_failure(form_key, form_obj, **kwargs)
final_forms[form_key] = form_obj
# render the done view and reset the wizard before returning the
# response. This is needed to prevent from rendering done with the
# same data twice.
done_response = self.done(final_forms.values(), form_dict=final_forms, **kwargs)
self.storage.reset()
return done_response
def get_form_prefix(self, step=None, form=None):
"""
Returns the prefix which will be used when calling the actual form for
the given step. `step` contains the step-name, `form` the form which
will be called with the returned prefix.
If no step is given, the form_prefix will determine the current step
automatically.
"""
if step is None:
step = self.steps.current
return str(step)
def get_form_initial(self, step):
"""
Returns a dictionary which will be passed to the form for `step`
as `initial`. If no initial data was provied while initializing the
form wizard, a empty dictionary will be returned.
"""
return self.initial_dict.get(step, {})
def get_form_instance(self, step):
"""
Returns a object which will be passed to the form for `step`
as `instance`. If no instance object was provied while initializing
the form wizard, None will be returned.
"""
return self.instance_dict.get(step, None)
def get_form_kwargs(self, step=None):
"""
Returns the keyword arguments for instantiating the form
(or formset) on the given step.
"""
return {}
def get_form(self, step=None, data=None, files=None):
"""
Constructs the form for a given `step`. If no `step` is defined, the
current step will be determined automatically.
The form will be initialized using the `data` argument to prefill the
new form. If needed, instance or queryset (for `ModelForm` or
`ModelFormSet`) will be added too.
"""
if step is None:
step = self.steps.current
form_class = self.form_list[step]
# prepare the kwargs for the form instance.
kwargs = self.get_form_kwargs(step)
kwargs.update({
'data': data,
'files': files,
'prefix': self.get_form_prefix(step, form_class),
'initial': self.get_form_initial(step),
})
if issubclass(form_class, (forms.ModelForm, forms.models.BaseInlineFormSet)):
# If the form is based on ModelForm or InlineFormSet,
# add instance if available and not previously set.
kwargs.setdefault('instance', self.get_form_instance(step))
elif issubclass(form_class, forms.models.BaseModelFormSet):
# If the form is based on ModelFormSet, add queryset if available
# and not previous set.
kwargs.setdefault('queryset', self.get_form_instance(step))
return form_class(**kwargs)
def process_step(self, form):
"""
This method is used to postprocess the form data. By default, it
returns the raw `form.data` dictionary.
"""
return self.get_form_step_data(form)
def process_step_files(self, form):
"""
This method is used to postprocess the form files. By default, it
returns the raw `form.files` dictionary.
"""
return self.get_form_step_files(form)
def render_revalidation_failure(self, step, form, **kwargs):
"""
Gets called when a form doesn't validate when rendering the done
view. By default, it changes the current step to failing forms step
and renders the form.
"""
self.storage.current_step = step
return self.render(form, **kwargs)
def get_form_step_data(self, form):
"""
Is used to return the raw form data. You may use this method to
manipulate the data.
"""
return form.data
def get_form_step_files(self, form):
"""
Is used to return the raw form files. You may use this method to
manipulate the data.
"""
return form.files
def get_all_cleaned_data(self):
"""
Returns a merged dictionary of all step cleaned_data dictionaries.
If a step contains a `FormSet`, the key will be prefixed with
'formset-' and contain a list of the formset cleaned_data dictionaries.
"""
cleaned_data = {}
for form_key in self.get_form_list():
form_obj = self.get_form(
step=form_key,
data=self.storage.get_step_data(form_key),
files=self.storage.get_step_files(form_key)
)
if form_obj.is_valid():
if isinstance(form_obj.cleaned_data, (tuple, list)):
cleaned_data.update({
'formset-%s' % form_key: form_obj.cleaned_data
})
else:
cleaned_data.update(form_obj.cleaned_data)
return cleaned_data
def get_cleaned_data_for_step(self, step):
"""
Returns the cleaned data for a given `step`. Before returning the
cleaned data, the stored values are revalidated through the form.
If the data doesn't validate, None will be returned.
"""
if step in self.form_list:
form_obj = self.get_form(step=step,
data=self.storage.get_step_data(step),
files=self.storage.get_step_files(step))
if form_obj.is_valid():
return form_obj.cleaned_data
return None
def get_next_step(self, step=None):
"""
Returns the next step after the given `step`. If no more steps are
available, None will be returned. If the `step` argument is None, the
current step will be determined automatically.
"""
if step is None:
step = self.steps.current
form_list = self.get_form_list()
keys = list(form_list.keys())
key = keys.index(step) + 1
if len(keys) > key:
return keys[key]
return None
def get_prev_step(self, step=None):
"""
Returns the previous step before the given `step`. If there are no
steps available, None will be returned. If the `step` argument is
None, the current step will be determined automatically.
"""
if step is None:
step = self.steps.current
form_list = self.get_form_list()
keys = list(form_list.keys())
key = keys.index(step) - 1
if key >= 0:
return keys[key]
return None
def get_step_index(self, step=None):
"""
Returns the index for the given `step` name. If no step is given,
the current step will be used to get the index.
"""
if step is None:
step = self.steps.current
return list(self.get_form_list().keys()).index(step)
def get_context_data(self, form, **kwargs):
"""
Returns the template context for a step. You can overwrite this method
to add more data for all or some steps. This method returns a
dictionary containing the rendered form step. Available template
context variables are:
* all extra data stored in the storage backend
* `form` - form instance of the current step
* `wizard` - the wizard instance itself
Example:
.. code-block:: python
class MyWizard(WizardView):
def get_context_data(self, form, **kwargs):
context = super(MyWizard, self).get_context_data(form=form, **kwargs)
if self.steps.current == 'my_step_name':
context.update({'another_var': True})
return context
"""
context = super(WizardView, self).get_context_data(form=form, **kwargs)
context.update(self.storage.extra_data)
context['wizard'] = {
'form': form,
'steps': self.steps,
'management_form': ManagementForm(prefix=self.prefix, initial={
'current_step': self.steps.current,
}),
}
return context
def render(self, form=None, **kwargs):
"""
Returns a ``HttpResponse`` containing all needed context data.
"""
form = form or self.get_form()
context = self.get_context_data(form=form, **kwargs)
return self.render_to_response(context)
def done(self, form_list, **kwargs):
"""
This method must be overridden by a subclass to process to form data
after processing all steps.
"""
raise NotImplementedError("Your %s class has not defined a done() "
"method, which is required." % self.__class__.__name__)
class SessionWizardView(WizardView):
"""
A WizardView with pre-configured SessionStorage backend.
"""
storage_name = 'django.contrib.formtools.wizard.storage.session.SessionStorage'
class CookieWizardView(WizardView):
"""
A WizardView with pre-configured CookieStorage backend.
"""
storage_name = 'django.contrib.formtools.wizard.storage.cookie.CookieStorage'
class NamedUrlWizardView(WizardView):
"""
A WizardView with URL named steps support.
"""
url_name = None
done_step_name = None
@classmethod
def get_initkwargs(cls, *args, **kwargs):
"""
We require a url_name to reverse URLs later. Additionally users can
pass a done_step_name to change the URL name of the "done" view.
"""
assert 'url_name' in kwargs, 'URL name is needed to resolve correct wizard URLs'
extra_kwargs = {
'done_step_name': kwargs.pop('done_step_name', 'done'),
'url_name': kwargs.pop('url_name'),
}
initkwargs = super(NamedUrlWizardView, cls).get_initkwargs(*args, **kwargs)
initkwargs.update(extra_kwargs)
assert initkwargs['done_step_name'] not in initkwargs['form_list'], \
'step name "%s" is reserved for "done" view' % initkwargs['done_step_name']
return initkwargs
def get_step_url(self, step):
return reverse(self.url_name, kwargs={'step': step})
def get(self, *args, **kwargs):
"""
This renders the form or, if needed, does the http redirects.
"""
step_url = kwargs.get('step', None)
if step_url is None:
if 'reset' in self.request.GET:
self.storage.reset()
self.storage.current_step = self.steps.first
if self.request.GET:
query_string = "?%s" % self.request.GET.urlencode()
else:
query_string = ""
return redirect(self.get_step_url(self.steps.current)
+ query_string)
# is the current step the "done" name/view?
elif step_url == self.done_step_name:
last_step = self.steps.last
return self.render_done(self.get_form(step=last_step,
data=self.storage.get_step_data(last_step),
files=self.storage.get_step_files(last_step)
), **kwargs)
# is the url step name not equal to the step in the storage?
# if yes, change the step in the storage (if name exists)
elif step_url == self.steps.current:
# URL step name and storage step name are equal, render!
return self.render(self.get_form(
data=self.storage.current_step_data,
files=self.storage.current_step_files,
), **kwargs)
elif step_url in self.get_form_list():
self.storage.current_step = step_url
return self.render(self.get_form(
data=self.storage.current_step_data,
files=self.storage.current_step_files,
), **kwargs)
# invalid step name, reset to first and redirect.
else:
self.storage.current_step = self.steps.first
return redirect(self.get_step_url(self.steps.first))
def post(self, *args, **kwargs):
"""
Do a redirect if user presses the prev. step button. The rest of this
is super'd from WizardView.
"""
wizard_goto_step = self.request.POST.get('wizard_goto_step', None)
if wizard_goto_step and wizard_goto_step in self.get_form_list():
return self.render_goto_step(wizard_goto_step)
return super(NamedUrlWizardView, self).post(*args, **kwargs)
def get_context_data(self, form, **kwargs):
"""
NamedUrlWizardView provides the url_name of this wizard in the context
dict `wizard`.
"""
context = super(NamedUrlWizardView, self).get_context_data(form=form, **kwargs)
context['wizard']['url_name'] = self.url_name
return context
def render_next_step(self, form, **kwargs):
"""
When using the NamedUrlWizardView, we have to redirect to update the
browser's URL to match the shown step.
"""
next_step = self.get_next_step()
self.storage.current_step = next_step
return redirect(self.get_step_url(next_step))
def render_goto_step(self, goto_step, **kwargs):
"""
This method gets called when the current step has to be changed.
`goto_step` contains the requested step to go to.
"""
self.storage.current_step = goto_step
return redirect(self.get_step_url(goto_step))
def render_revalidation_failure(self, failed_step, form, **kwargs):
"""
When a step fails, we have to redirect the user to the first failing
step.
"""
self.storage.current_step = failed_step
return redirect(self.get_step_url(failed_step))
def render_done(self, form, **kwargs):
"""
When rendering the done view, we have to redirect first (if the URL
name doesn't fit).
"""
if kwargs.get('step', None) != self.done_step_name:
return redirect(self.get_step_url(self.done_step_name))
return super(NamedUrlWizardView, self).render_done(form, **kwargs)
class NamedUrlSessionWizardView(NamedUrlWizardView):
"""
A NamedUrlWizardView with pre-configured SessionStorage backend.
"""
storage_name = 'django.contrib.formtools.wizard.storage.session.SessionStorage'
class NamedUrlCookieWizardView(NamedUrlWizardView):
"""
A NamedUrlFormWizard with pre-configured CookieStorageBackend.
"""
storage_name = 'django.contrib.formtools.wizard.storage.cookie.CookieStorage'
| 38.783967
| 90
| 0.625293
|
e3326ca8d7b7f34946af23656578298378330c65
| 1,627
|
py
|
Python
|
intro/part03-08_string_multiplied/test/test_string_multiplied.py
|
Hannah-Abi/python-pro-21
|
2ce32c4bf118054329d19afdf83c50561be1ada8
|
[
"MIT"
] | null | null | null |
intro/part03-08_string_multiplied/test/test_string_multiplied.py
|
Hannah-Abi/python-pro-21
|
2ce32c4bf118054329d19afdf83c50561be1ada8
|
[
"MIT"
] | null | null | null |
intro/part03-08_string_multiplied/test/test_string_multiplied.py
|
Hannah-Abi/python-pro-21
|
2ce32c4bf118054329d19afdf83c50561be1ada8
|
[
"MIT"
] | null | null | null |
import unittest
from unittest.mock import patch
from tmc import points
from tmc.utils import load_module, reload_module, get_stdout
from functools import reduce
from random import randint
exercise = 'src.string_multiplied'
def format_tuple(d : tuple):
return str(d).replace("'","")
@points('2.string_multiplied')
class StringMultipliedTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
with patch('builtins.input', side_effect =['a', '1']):
cls.module = load_module(exercise, 'en')
def test_strings(self):
values = [("hiya","1"),("abc",4),("xyx",7),("hello",2),("test",6)]
for test_case in values:
with patch('builtins.input', side_effect = test_case):
try:
reload_module(self.module)
except:
self.assertTrue(False, f"Make sure that your program works correctly with the input {test_case}")
out = get_stdout()
output = out.split("\n")
corr = test_case[0] * int(test_case[1])
self.assertTrue(len(out) > 0, "Your program does not print out anything with the inputs {}".format(test_case))
self.assertTrue(len(output) == 1, f"Instead of printing out only one row in addition to asking for the inputs from the user, your program's print out is now in {len(output)} rows.")
self.assertEqual(out.strip(), corr, f"The print out is incorrect with the inputs {test_case}: your program's print out is\n{out}\nwhen correct print out is\n{corr}")
if __name__ == '__main__':
unittest.main()
| 43.972973
| 197
| 0.634911
|
e53d22761ea0dcabde1eb28e4745d446ff7920ed
| 1,508
|
py
|
Python
|
airflow/example_dags/example_postgres_to_gcs.py
|
Densol92/airflow
|
ce3635d64f2eba9616709fb9d998676aa34b0875
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 5
|
2020-07-17T07:33:58.000Z
|
2022-03-02T06:23:47.000Z
|
airflow/example_dags/example_postgres_to_gcs.py
|
Densol92/airflow
|
ce3635d64f2eba9616709fb9d998676aa34b0875
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 7
|
2020-06-03T14:55:17.000Z
|
2021-12-30T00:01:50.000Z
|
airflow/example_dags/example_postgres_to_gcs.py
|
Densol92/airflow
|
ce3635d64f2eba9616709fb9d998676aa34b0875
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 12
|
2020-01-09T14:02:39.000Z
|
2022-01-24T07:18:51.000Z
|
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Example DAG using PostgresToGoogleCloudStorageOperator.
"""
from airflow import models
from airflow.operators.postgres_to_gcs import PostgresToGCSOperator
from airflow.utils.dates import days_ago
GCS_BUCKET = "postgres_to_gcs_example"
FILENAME = "test_file"
SQL_QUERY = "select * from test_table;"
default_args = {"start_date": days_ago(1)}
with models.DAG(
dag_id='example_postgres_to_gcs',
default_args=default_args,
schedule_interval=None, # Override to match your needs
tags=['example'],
) as dag:
upload_data = PostgresToGCSOperator(
task_id="get_data",
sql=SQL_QUERY,
bucket=GCS_BUCKET,
filename=FILENAME,
gzip=False
)
| 33.511111
| 67
| 0.746021
|
77ba81b9edf7456afc0753fe3a16556e3d93addf
| 570
|
py
|
Python
|
projects/migrations/0012_auto_20160421_0921.py
|
kilinger/marathon-rocketchat-hubot
|
682454b90265eb2c66ea222cf0c970370816a9e1
|
[
"BSD-3-Clause"
] | 1
|
2018-07-10T07:03:12.000Z
|
2018-07-10T07:03:12.000Z
|
projects/migrations/0012_auto_20160421_0921.py
|
kilinger/marathon-rocketchat-hubot
|
682454b90265eb2c66ea222cf0c970370816a9e1
|
[
"BSD-3-Clause"
] | null | null | null |
projects/migrations/0012_auto_20160421_0921.py
|
kilinger/marathon-rocketchat-hubot
|
682454b90265eb2c66ea222cf0c970370816a9e1
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('projects', '0011_auto_20160317_0943'),
]
operations = [
migrations.AddField(
model_name='project',
name='use_hsts',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='project',
name='redirect_https',
field=models.BooleanField(default=False),
),
]
| 22.8
| 53
| 0.589474
|
7acce6419f3657aac7df0add668c521fe24e4b51
| 2,862
|
py
|
Python
|
tensorflow_probability/python/internal/backend/numpy/internal/utils.py
|
nxdao2000/probability
|
33d2bc1cb0e7b6284579ea7f3692b9d056e0d700
|
[
"Apache-2.0"
] | null | null | null |
tensorflow_probability/python/internal/backend/numpy/internal/utils.py
|
nxdao2000/probability
|
33d2bc1cb0e7b6284579ea7f3692b9d056e0d700
|
[
"Apache-2.0"
] | null | null | null |
tensorflow_probability/python/internal/backend/numpy/internal/utils.py
|
nxdao2000/probability
|
33d2bc1cb0e7b6284579ea7f3692b9d056e0d700
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2018 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Helper functions for numpy backend."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import importlib
import types
import numpy as np
import tensorflow as tf
__all__ = [
'common_dtype',
'copy_docstring',
'numpy_dtype',
'try_import',
]
# TODO(jvdillon): Get decoration working. Eg,
# # Dependency imports
# import decorator
def copy_docstring(original_fn, new_fn): # pylint: disable=unused-argument
return new_fn
# TODO(jvdillon): Get decoration working. Eg,
# @decorator.decorator
# def wrap(wrapped_fn, *args, **kwargs):
# del wrapped_fn
# return new_fn(*args, **kwargs)
# return wrap(original_fn)
def numpy_dtype(dtype):
if dtype is None:
return None
if hasattr(dtype, 'as_numpy_dtype'):
return dtype.as_numpy_dtype
return dtype
def common_dtype(args_list, dtype_hint=None):
"""Returns explict dtype from `args_list` if exists, else dtype_hint."""
dtype = None
dtype_hint = None if dtype_hint is None else tf.as_dtype(dtype_hint)
for a in tf.nest.flatten(args_list):
if hasattr(a, 'dtype'):
dt = tf.as_dtype(a.dtype)
else:
continue
if dtype is None:
dtype = dt
elif dtype != dt:
raise TypeError('Found incompatible dtypes, {} and {}.'.format(dtype, dt))
if dtype is None and dtype_hint is None:
return None
return (dtype_hint if dtype is None else dtype).as_numpy_dtype
def is_complex(dtype):
"""Returns whether this is a complex floating point type."""
return np.issubdtype(np.dtype(dtype), np.complex)
class _FakeModule(types.ModuleType):
"""Dummy module which raises `NotImplementedError` on `getattr` access."""
def __init__(self, name, doc):
self._name = name
self._doc = doc
types.ModuleType.__init__(self, name, doc) # pylint: disable=non-parent-init-called
def __dir__(self):
return []
def __getattr__(self, attr):
raise NotImplementedError(self._doc)
def try_import(name): # pylint: disable=invalid-name
try:
return importlib.import_module(name)
except ImportError:
return _FakeModule(name, 'Error loading module "{}".'.format(name))
| 28.058824
| 88
| 0.701258
|
91e8c3dea059de23d9a228d72dab429e31a7904e
| 773
|
py
|
Python
|
Codes/Subscriber_Publisher/Tutorial/I_Hear.py
|
Diwij/ros-tutorials
|
5f372c5f15035d982ef3dffde08b330f38d8d4fb
|
[
"MIT"
] | null | null | null |
Codes/Subscriber_Publisher/Tutorial/I_Hear.py
|
Diwij/ros-tutorials
|
5f372c5f15035d982ef3dffde08b330f38d8d4fb
|
[
"MIT"
] | null | null | null |
Codes/Subscriber_Publisher/Tutorial/I_Hear.py
|
Diwij/ros-tutorials
|
5f372c5f15035d982ef3dffde08b330f38d8d4fb
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
import rospy
from std_msgs.msg import String
def chatter_callback(message):
#get_caller_id(): Get fully resolved name of local node
rospy.loginfo(rospy.get_caller_id() + " I heard %s", message.data)
def listener():
# In ROS, nodes are uniquely named. If two nodes with the same
# node are launched, the previous one is kicked off. The
# anonymous=True flag means that rospy will choose a unique
# name for our 'listener' node so that multiple listeners can
# run simultaneously.
rospy.init_node('listener', anonymous=True)
rospy.Subscriber("chatter", String, chatter_callback)
# spin() simply keeps python from exiting until this node is stopped
rospy.spin()
if __name__ == '__main__':
listener()
| 30.92
| 72
| 0.711514
|
159dcf5bc58c5ff5bb1d88027b29f045cb4e128a
| 713
|
py
|
Python
|
scripts/test_SBVAT.py
|
Sharpiless/GraphGallery
|
5e8895cc2ca2fc06a31bfc58bc3b7a52e1ceddd0
|
[
"MIT"
] | 1
|
2020-11-22T10:14:58.000Z
|
2020-11-22T10:14:58.000Z
|
scripts/test_SBVAT.py
|
mengliu1998/GraphGallery
|
025ac09e883f3e1e1b02000e086830c935884a6e
|
[
"MIT"
] | null | null | null |
scripts/test_SBVAT.py
|
mengliu1998/GraphGallery
|
025ac09e883f3e1e1b02000e086830c935884a6e
|
[
"MIT"
] | 1
|
2020-11-22T10:14:59.000Z
|
2020-11-22T10:14:59.000Z
|
import tensorflow as tf
import numpy as np
import networkx as nx
import scipy.sparse as sp
import graphgallery
from graphgallery.data import Planetoid
data = Planetoid('cora', root="~/GraphData/datasets/", verbose=False)
graph = data.graph
idx_train, idx_val, idx_test = data.split()
print(graph)
from graphgallery.nn.models import SBVAT
model = SBVAT(graph, attr_transform="normalize_attr", device="CPU", seed=123)
model.build()
his = model.train(idx_train, idx_val, verbose=1, epochs=200)
loss, accuracy = model.test(idx_test)
print(f'Test loss {loss:.5}, Test accuracy {accuracy:.2%}')
# for testing the predict method
print(f'Predict accuracy {model._test_predict(idx_test):.2%}')
| 29.708333
| 78
| 0.73913
|
2121495a8f471a5f315af2bdbc5cf3fb3223d689
| 5,542
|
py
|
Python
|
dCC_Python_SodaMachine/user_interface.py
|
dmartinez91/SodaMachineDebug
|
7cd47a52106422b10632fa208b9ef7d372c3f735
|
[
"MIT"
] | null | null | null |
dCC_Python_SodaMachine/user_interface.py
|
dmartinez91/SodaMachineDebug
|
7cd47a52106422b10632fa208b9ef7d372c3f735
|
[
"MIT"
] | null | null | null |
dCC_Python_SodaMachine/user_interface.py
|
dmartinez91/SodaMachineDebug
|
7cd47a52106422b10632fa208b9ef7d372c3f735
|
[
"MIT"
] | null | null | null |
import os
def simulation_main_menu():
"""Main menu prompting user to choose an option"""
validate_user_selection = (False, None)
while validate_user_selection[0] is False:
print("\t\t-Simulation menu-")
print("\tPress -1- to begin transaction")
print("\tPress -2- to check wallet for coins")
print("\tPress -3- to check backpack for cans")
print("\tPress -4- to terminate simulation")
user_input = try_parse_int(input())
validate_user_selection = validate_main_menu(user_input)
return validate_user_selection[1]
def validate_main_menu(user_input):
"""Validation function that checks if 'user_input' argument is an int 1-4. No errors."""
switcher = {
1: (True, 1),
2: (True, 2),
3: (True, 3),
4: (True, 4),
}
return switcher.get(user_input, (False, None))
def display_customer_wallet_info(coins_list, total_value):
"""Takes in a list of ints to display number of coins along with total value of coins."""
print(f'You have {coins_list[0]} Quarters')
print(f'You have {coins_list[1]} Dimes')
print(f'You have {coins_list[2]} Nickels')
print(f'You have {coins_list[3]} Pennies')
print(f'Your wallet\'s total value is {total_value}')
def display_welcome():
"""Initial method asking user if they'll make a purchase. No errors."""
print("\nWelcome to the soda machine. We only take coins as payment. \n")
user_response = continue_prompt("Would you like to make a purchase? (y/n):")
if user_response:
return True
else:
print("Please step aside to allow another customer to make a selection")
return False
def output_text(text):
"""User input method that will print to console any string passed in as an argument"""
print(text)
def clear_console():
"""Used for clearing out the console. No errors."""
os.system('cls' if os.name == 'nt' else "clear")
def continue_prompt(text):
"""Validates a 'y' or 'yes' string and returns a True value. No errors."""
switcher = {
"y": True,
"yes": True
}
user_input = input(text).lower()
return switcher.get(user_input, False)
def soda_selection(inventory):
"""Displays purchasable soda inventory and """
validated_user_selection = (False, None)
soda_options = get_unique_can_names(inventory)
while validated_user_selection[0] is False:
print("Please choose from the following options:")
i = 1
for can in soda_options:
print(f"\n\tEnter -{i}- for {can.name} : ${can.price}")
i += 1
user_selection = try_parse_int(input("Selection:"))
validated_user_selection = validate_coin_choice(user_selection, soda_options)
return validated_user_selection[1]
def validate_coin_choice(selection, unique_cans):
"""Translates user menu selection into the name of can that was chosen. No errors."""
if 0 < selection <= len(unique_cans):
return True, unique_cans[selection - 1].name
else:
print("Not a valid selection\n")
return False, None
def try_parse_int(value):
"""Attempts to parse a string into an integer, returns 0 if unable to parse. No errors."""
try:
return int(value)
except:
return 0
def get_unique_can_names(inventory):
"""Loops through inventory to create a list of all distinct types of sodas available. No errors."""
unique_cans = []
previous_names = []
for can in inventory:
if can.name in previous_names:
continue
else:
unique_cans.append(can)
previous_names.append(can.name)
return unique_cans
def display_can_cost(selected_can):
"""Displays the name of a can and its price"""
print(f'The price of a {selected_can.name} is ${selected_can.price}')
def display_payment_value(customer_payment):
"""Displays the value of selected coins as customer is choosing coins to deposit"""
total_payment_value = 0
for coin in customer_payment:
total_payment_value += coin.value
total_payment_value = round(total_payment_value, 2)
print(f'You currently have ${total_payment_value} in hand')
def coin_selection():
"""Prompts user to choose which coins to deposit and passes their selection in validate_coin_selection"""
validated_user_selection = (False, None)
while validated_user_selection[0] is False:
print("\n\tEnter -1- for Quarter")
print("\tEnter -2- for Dime")
print("\tEnter -3- for Nickel")
print("\tEnter -4- for Penny")
print("\tEnter -5- for when finished to deposit payment into machine")
user_input = try_parse_int(input())
validated_user_selection = validate_coin_selection(user_input)
if validated_user_selection[0] is False:
print("Not a valid selection try again")
return validated_user_selection[1]
def validate_coin_selection(selection):
"""Validation function that checks if 'selection' arugment is an int 1-5"""
switcher = {
1: (True, "Quarter"),
2: (True, "Dime"),
3: (True, "Nickel"),
4: (True, "Penny"),
5: (True, "Done")
}
return switcher.get(selection, (False, None))
def end_message(soda_name, change_amount):
"""Closing message displaying name of soda purchased and amount of change returned"""
print(f'Enjoy your {soda_name.name}')
if change_amount >= 0:
print(f'Dispensing ${change_amount}')
| 34
| 109
| 0.662035
|
2285f6cba370d9c312925c60c4f86c376956cd9e
| 2,997
|
py
|
Python
|
examples/demo/basic/draw_layers.py
|
burnpanck/chaco
|
6457cdd28625991ba69fbbee105051cab237aa51
|
[
"BSD-3-Clause"
] | 3
|
2017-09-17T17:32:06.000Z
|
2022-03-15T13:04:43.000Z
|
examples/demo/basic/draw_layers.py
|
burnpanck/chaco
|
6457cdd28625991ba69fbbee105051cab237aa51
|
[
"BSD-3-Clause"
] | null | null | null |
examples/demo/basic/draw_layers.py
|
burnpanck/chaco
|
6457cdd28625991ba69fbbee105051cab237aa51
|
[
"BSD-3-Clause"
] | 5
|
2015-05-17T16:08:11.000Z
|
2021-02-23T09:23:42.000Z
|
#!/usr/bin/env python
"""
Demonstrates the use of drawing layers in Chaco.
Three classes of functions are plotted: bessels, sines,
and Struve functions.
"""
# Major library imports
from numpy import linspace, sin
from scipy.special import jn, struve
# Enthought library imports
from enable.api import Component, ComponentEditor
from traits.api import HasTraits, Instance
from traitsui.api import Item, Group, View
# Chaco imports
from chaco.api import ArrayPlotData, Plot
from chaco.tools.api import PanTool, ZoomTool
#===============================================================================
# # Create the Chaco plot.
#===============================================================================
def _create_plot_component():
x = linspace(-2.0, 10.0, 100)
pd = ArrayPlotData(x = x)
# Create some line plots of some of the data
plot = Plot(pd, padding=50, border_visible=True, overlay_border=True)
plot.legend.visible = True
# Extend the plot's list of drawing layers
ndx = plot.draw_order.index("plot")
plot.draw_order[ndx:ndx] = ["bessel", "sine", "struve"]
# Draw struve
for i in range(3):
y_name = "struve" + str(i)
pd.set_data(y_name, struve(i, x))
renderer = plot.plot(("x", y_name), color="blue", name=y_name, line_width=2)[0]
renderer.set(draw_layer = "struve", unified_draw=True)
# Draw bessels
for i in range(3):
y_name = "bessel" + str(i)
pd.set_data(y_name, jn(i,x))
renderer = plot.plot(("x", y_name), color="green", name=y_name, line_width=2)[0]
renderer.set(draw_layer = "bessel", unified_draw=True)
# Draw sines
for i in range(3):
y_name = "sine" + str(i)
pd.set_data(y_name, sin(x * (i+1) / 1.5))
renderer = plot.plot(("x", y_name), color="red", name=y_name, line_width=2)[0]
renderer.set(draw_layer="sine", unified_draw=True)
# Attach some tools to the plot
plot.tools.append(PanTool(plot))
zoom = ZoomTool(component=plot, tool_mode="box", always_on=False)
plot.overlays.append(zoom)
return plot
#===============================================================================
# Attributes to use for the plot view.
size=(900,500)
title="Draw order demonstration"
#===============================================================================
# # Demo class that is used by the demo.py application.
#===============================================================================
class Demo(HasTraits):
plot = Instance(Component)
traits_view = View(
Group(
Item('plot', editor=ComponentEditor(size=size),
show_label=False),
orientation = "vertical"),
resizable=True, title=title
)
def _plot_default(self):
return _create_plot_component()
demo = Demo()
if __name__ == "__main__":
demo.configure_traits()
#--EOF---
| 31.882979
| 88
| 0.552219
|
7b625001d98ee53d25976120ce54f146e6753baa
| 2,323
|
py
|
Python
|
FSRNN.py
|
amujika/Fast-Slow-LSTM
|
f63a5998d0a3f347f6a4587893e57baa4bd15320
|
[
"Apache-2.0"
] | 110
|
2017-05-25T05:28:49.000Z
|
2021-06-07T12:08:40.000Z
|
FSRNN.py
|
amujika/Fast-Slow-LSTM
|
f63a5998d0a3f347f6a4587893e57baa4bd15320
|
[
"Apache-2.0"
] | 1
|
2019-10-17T19:56:35.000Z
|
2019-10-22T13:03:46.000Z
|
FSRNN.py
|
amujika/Fast-Slow-LSTM
|
f63a5998d0a3f347f6a4587893e57baa4bd15320
|
[
"Apache-2.0"
] | 21
|
2017-05-25T23:42:05.000Z
|
2020-12-17T03:32:58.000Z
|
import tensorflow as tf
class FSRNNCell(tf.contrib.rnn.RNNCell):
def __init__(self, fast_cells, slow_cell, keep_prob=1.0, training=True):
"""Initialize the basic Fast-Slow RNN.
Args:
fast_cells: A list of RNN cells that will be used for the fast RNN.
The cells must be callable, implement zero_state() and all have the
same hidden size, like for example tf.contrib.rnn.BasicLSTMCell.
slow_cell: A single RNN cell for the slow RNN.
keep_prob: Keep probability for the non recurrent dropout. Any kind of
recurrent dropout should be implemented in the RNN cells.
training: If False, no dropout is applied.
"""
self.fast_layers = len(fast_cells)
assert self.fast_layers >= 2, 'At least two fast layers are needed'
self.fast_cells = fast_cells
self.slow_cell = slow_cell
self.keep_prob = keep_prob
if not training: self.keep_prob = 1.0
def __call__(self, inputs, state, scope='FS-RNN'):
F_state = state[0]
S_state = state[1]
with tf.variable_scope(scope):
inputs = tf.nn.dropout(inputs, self.keep_prob)
with tf.variable_scope('Fast_0'):
F_output, F_state = self.fast_cells[0](inputs, F_state)
F_output_drop = tf.nn.dropout(F_output, self.keep_prob)
with tf.variable_scope('Slow'):
S_output, S_state = self.slow_cell(F_output_drop, S_state)
S_output_drop = tf.nn.dropout(S_output, self.keep_prob)
with tf.variable_scope('Fast_1'):
F_output, F_state = self.fast_cells[1](S_output_drop, F_state)
for i in range(2, self.fast_layers):
with tf.variable_scope('Fast_' + str(i)):
# Input cannot be empty for many RNN cells
F_output, F_state = self.fast_cells[i](F_output[:, 0:1] * 0.0, F_state)
F_output_drop = tf.nn.dropout(F_output, self.keep_prob)
return F_output_drop, (F_state, S_state)
def zero_state(self, batch_size, dtype):
F_state = self.fast_cells[0].zero_state(batch_size, dtype)
S_state = self.slow_cell.zero_state(batch_size, dtype)
return (F_state, S_state)
| 43.018519
| 91
| 0.620749
|
b3b1262583131a3c0a8b4cfc3d98ff7e00be7005
| 3,696
|
py
|
Python
|
sygma/models.py
|
tenth-speed-writer/sygma
|
f0cc33a2b7c22dc754979ffc871244995126d257
|
[
"bzip2-1.0.6"
] | null | null | null |
sygma/models.py
|
tenth-speed-writer/sygma
|
f0cc33a2b7c22dc754979ffc871244995126d257
|
[
"bzip2-1.0.6"
] | null | null | null |
sygma/models.py
|
tenth-speed-writer/sygma
|
f0cc33a2b7c22dc754979ffc871244995126d257
|
[
"bzip2-1.0.6"
] | null | null | null |
from django.db import models as m
class Grantmaker(m.Model):
name = m.CharField(max_length=250)
kind = m.CharField(max_length=20,
choices=[("OPEN", "Open"),
("PRIVATE", "Private"),
("GOVT", "Government")])
description = m.CharField(max_length=2500, null=True)
mission = m.CharField(max_length=5000, null=True)
address = m.CharField(max_length=250, null=True)
address2 = m.CharField(max_length=250, null=True)
city = m.CharField(max_length=250, null=True)
state = m.CharField(max_length=20, null=True)
zip_code = m.CharField(max_length=20, null=True)
country = m.CharField(max_length=250, null=True)
email = m.CharField(max_length=250, null=True)
url = m.CharField(max_length=250, null=True)
phone = m.CharField(max_length=20, null=True)
extension = m.CharField(max_length=20, null=True)
class Grant(m.Model):
grantmaker = m.ForeignKey('Grantmaker', on_delete=m.CASCADE)
name = m.CharField(max_length=250)
description = m.CharField(max_length=2500, null=True)
deadline = m.DateTimeField(null=True)
restricted = m.CharField(max_length=20,
choices=[("YES", "Restricted"),
("NO", "Unrestricted"),
("UNK", "Unknown")])
restrictions = m.CharField(max_length=5000, null=True)
# Valid status strings include: "not applied", "in progress," "loi submitted",
# "submitted", "rejected", "offered", "accepted", "received in part", and
# "received in full." Each might have an amount of money, and each might
# have details.
#
# I think it'll be more elegant to let the abstraction layer enforce what must
# and may not have what values, but here's the list for documentation purposes.
#
# Statuses
#
# not applied: none
# in progress: amount (desired ask), details (notes)
# loi submitted: details (notes)
# submitted: amount (ask made), details (notes)
# rejected: details (reason)
# offered: amount (give offered), details (notes)
# accepted: details (notes)
# received: amount (give received), details (notes)
# There may be multiple received statuses between an accepted status and the
# next cycle's in progress status. Each represents one check or other form of
# disbursement submitted by the grantor. A discrepancy here between amount
# offered and the sum of amounts received can indicate an accounting concern.
class Status(m.Model):
grant = m.ForeignKey('Grant', on_delete=m.CASCADE)
status = m.CharField(max_length=50,
choices=[("LOISENT", "Letter of Intent Sent"),
("LOIACCEPTED", "Letter of Intent Accepted"),
("INPROGRESS", "Application in progress"),
("SUBMITTED", "Submitted"),
("REJECTED", "Rejected"),
("OFFERED", "Offered"),
("ACCEPTED", "Accepted"),
("RECEIVED", "Received")])
amount = m.DecimalField(max_digits=18,
decimal_places=2,
null=True)
details = m.CharField(max_length=5000,
null=True)
updated_on = m.DateTimeField(auto_now_add=True)
class Obligation(m.Model):
grant = m.ForeignKey('Grant', on_delete=m.CASCADE)
due = m.DateTimeField(null=True)
title = m.CharField(max_length=250)
details = m.CharField(max_length=5000,
null=True)
fulfilled = m.BooleanField(default=False)
| 44.53012
| 81
| 0.603626
|
1244486c749bbc0b8fbad70022ed8e7f15f38d43
| 8,045
|
py
|
Python
|
TimeCockpit.Tasks.Jira/TimeCockpit.Tasks.Jira.py
|
software-architects/TimeCockpit.Scripts
|
06877bccdaf89dc4867f528205323d3dab233850
|
[
"MIT"
] | 1
|
2020-04-21T04:16:17.000Z
|
2020-04-21T04:16:17.000Z
|
TimeCockpit.Tasks.Jira/TimeCockpit.Tasks.Jira.py
|
software-architects/TimeCockpit.Scripts
|
06877bccdaf89dc4867f528205323d3dab233850
|
[
"MIT"
] | null | null | null |
TimeCockpit.Tasks.Jira/TimeCockpit.Tasks.Jira.py
|
software-architects/TimeCockpit.Scripts
|
06877bccdaf89dc4867f528205323d3dab233850
|
[
"MIT"
] | 3
|
2019-02-15T04:50:36.000Z
|
2020-06-19T07:19:54.000Z
|
def importJIRATasks(actionContext):
from TimeCockpit.Data import EntityObject, DataContextType
dc = actionContext.DataContext
if dc.DataContextType != DataContextType.Server:
raise ArgumentException("Action can only be executed on a server data context.")
clr.AddReference("TimeCockpit.Common")
from System.Collections.Generic import List
from System.Globalization import CultureInfo
from TimeCockpit.Common import Logger, LogLevel
from System import DateTime, String, Array, Convert
from System.Text import Encoding
clr.AddReference("System.Core")
import System
clr.ImportExtensions(System.Linq)
# JIRA API
class Issue(object):
def __init__(self, key=None, type=None, summary=None, link=None, status=None, updated=None, timeOriginalEstimate=None, subTaskKeys=None):
self.Key = key
self.Type = type
self.Summary = summary
self.Link = link
self.Status = status
self.Updated = updated
self.TimeOriginalEstimate = timeOriginalEstimate
self.SubTaskKeys = subTaskKeys
class Jira(object):
def __init__(self, repository, username, password):
from System import Uri
self.repository = Uri(repository)
self.username = username
self.password = password
self.requestedFields = [ "summary", "issuetype", "status", "updated", "timeoriginalestimate", "subtasks" ]
def search(self, jql):
clr.AddReference("System.Web")
from System.Web import HttpUtility
from System.Net import HttpWebRequest
from System.IO import StreamReader
clr.AddReference("Newtonsoft.Json")
from Newtonsoft.Json import JsonTextReader
from Newtonsoft.Json.Linq import JObject
from System import Decimal
import Newtonsoft.Json
clr.ImportExtensions(Newtonsoft.Json.Linq)
usernamepw = Convert.ToBase64String(Encoding.UTF8.GetBytes(String.Format("{0}:{1}", self.username, self.password)))
fieldsparam = String.Join(",", self.requestedFields)
requestUri = String.Format("{0}rest/api/2/search?jql={1}&fields={2}", self.repository.AbsoluteUri, HttpUtility.UrlEncode(jql), fieldsparam)
Logger.Write(LogLevel.Verbose, "Jira.Search: {0}", requestUri)
request = HttpWebRequest.Create(requestUri)
request.ContentType = "application/json"
request.Headers.Add("Authorization", "Basic " + usernamepw)
request.Method = "GET"
with request.GetResponse() as response:
with StreamReader(response.GetResponseStream()) as sr:
with JsonTextReader(sr) as jr:
result = JObject.Load(jr)
issues = result["issues"]
items = list()
for issue in issues:
item = Issue()
item.Key = Newtonsoft.Json.Linq.Extensions.Value[String](issue["key"])
fields = issue["fields"]
item.Updated = Newtonsoft.Json.Linq.Extensions.Value[DateTime](fields["updated"])
# transform seconds to hours
estimate = Newtonsoft.Json.Linq.Extensions.Value[System.Object](fields["timeoriginalestimate"])
if estimate is not None:
estimate = Newtonsoft.Json.Linq.Extensions.Value[Decimal](fields["timeoriginalestimate"])
estimate = estimate / (60.0 * 60.0)
item.TimeOriginalEstimate = estimate
status = fields["status"]
item.Status = Newtonsoft.Json.Linq.Extensions.Value[String](status["name"])
item.Summary = Newtonsoft.Json.Linq.Extensions.Value[String](fields["summary"])
type = fields["issuetype"]
item.Type = Newtonsoft.Json.Linq.Extensions.Value[String](type["name"])
item.Link = self.repository.ToString() + "browse/" + item.Key
subTasks = fields["subtasks"]
item.SubTaskKeys = System.Linq.Enumerable.Cast[JObject](subTasks).Select(lambda t: Newtonsoft.Json.Linq.Extensions.Value[String](t["key"])).ToArray[String]()
items.Add(item)
return items;
commit = True
timeDelta = 0.01
jira = Jira("https://....atlassian.net/", "...", "...")
jiraProjects = dc.Select("From P In Project Where :IsNullOrEmpty(P.JiraProject) = False Select P")
for jiraProject in jiraProjects:
dc.BeginTransaction()
try:
jiraName = jiraProject.JiraProject
Logger.Write(LogLevel.Information, "JiraImport: Handling project '{0}'", jiraName)
projectUuid = jiraProject.ProjectUuid
lastUpdated = dc.SelectSingleWithParams({ "Query": "From T In Task Where T.Project = @ProjectUuid Select New With { .LastUpdated = Max(T.JiraUpdated) }", "@ProjectUuid": projectUuid }).LastUpdated
if lastUpdated is None:
lastUpdated = DateTime(1970, 1, 1)
jqlAdditionalCondition = String.Format(" and updated >= '{0}' order by updated asc", lastUpdated.ToString("yyyy-MM-dd HH:mm", CultureInfo.InvariantCulture))
jql = String.Format("project='{0}'{1}", jiraName, jqlAdditionalCondition)
issues = jira.search(jql).ToDictionary(lambda i: i.Key)
if issues.Any():
query = String.Format("From T In Task.Include(*) Where T.Project = @ProjectUuid And T.Code In ({0}) Select T", String.Join(", ", issues.Select(lambda i: String.Format('"{0}"', i.Key)).ToArray()))
tasks = dc.SelectWithParams({ "Query": query, "@ProjectUuid": projectUuid }).GroupBy(lambda t: t.Code).ToDictionary(lambda g: g.Key, lambda g: g.Single())
newIssues = issues.Keys.Except(tasks.Keys).ToArray()
updatedIssues = issues.Keys.Except(newIssues).ToArray()
Logger.Write(LogLevel.Information, "JiraImport: {0} new issues, {1} updated issues for query {2}", newIssues.Length, updatedIssues.Length, jql)
for key in newIssues:
issue = issues[key]
task = dc.CreateTask()
task.APP_BudgetInHours = issue.TimeOriginalEstimate
task.APP_Code = issue.Key
task.APP_Project = jiraProject
task.USR_JiraLink = issue.Link
task.USR_JiraStatus = issue.Status
task.USR_JiraType = issue.Type
task.USR_JiraUpdated = issue.Updated
task.APP_Description = issue.Summary
Logger.Write(LogLevel.Information, "JiraImport: Adding task {0}", key)
dc.SaveObject(task)
for key in updatedIssues:
changed = False
task = tasks[key]
issue = issues[key]
if task.APP_BudgetInHours <> issue.TimeOriginalEstimate:
if (task.APP_BudgetInHours is None and issue.TimeOriginalEstimate is not None) or (task.APP_BudgetInHours is not None and issue.TimeOriginalEstimate is None) or (abs(task.APP_BudgetInHours - issue.TimeOriginalEstimate) > timeDelta):
Logger.Write(LogLevel.Verbose, "JiraImport: Changed property for task {0}: {1}", key, "TimeOriginalEstimate")
task.APP_BudgetInHours = issue.TimeOriginalEstimate
changed = True
if task.USR_JiraLink <> issue.Link:
Logger.Write(LogLevel.Verbose, "JiraImport: Changed property for task {0}: {1}", key, "Link")
task.USR_JiraLink = issue.Link
changed = True
if task.USR_JiraStatus <> issue.Status:
Logger.Write(LogLevel.Verbose, "JiraImport: Changed property for task {0}: {1}", key, "Status")
task.USR_JiraStatus = issue.Status
changed = True
if task.USR_JiraType <> issue.Type:
Logger.Write(LogLevel.Verbose, "JiraImport: Changed property for task {0}: {1}", key, "Type")
task.USR_JiraType = issue.Type
changed = True
if task.USR_JiraUpdated <> issue.Updated:
Logger.Write(LogLevel.Verbose, "JiraImport: Changed property for task {0}: {1}", key, "Updated")
task.USR_JiraUpdated = issue.Updated
changed = True
if task.APP_Description <> issue.Summary:
Logger.Write(LogLevel.Verbose, "JiraImport: Changed property for task {0}: {1}", key, "Summary")
task.APP_Description = issue.Summary
changed = True
if changed:
Logger.Write(LogLevel.Information, "JiraImport: Updating task {0}", key)
dc.SaveObject(task)
else:
Logger.Write(LogLevel.Information, "JiraImport: Skipping unchanged task {0}", key)
if commit:
dc.TryCommitTransaction()
else:
dc.TryRollbackTransaction()
except System.Exception, e:
dc.TryRollbackTransaction()
Logger.Write(LogLevel.Warning, "JiraImport: Exception while handling {0}: {1}\r\n{2}", jiraProject.JiraProject, e.Message, e.StackTrace)
| 43.722826
| 238
| 0.709633
|
ce69489cd87e15874a2a68b6197327de5dd967d7
| 20,478
|
py
|
Python
|
tools/mo/openvino/tools/mo/utils/custom_replacement_config.py
|
pazamelin/openvino
|
b7e8ef910d7ed8e52326d14dc6fd53b71d16ed48
|
[
"Apache-2.0"
] | 1
|
2021-04-20T08:14:51.000Z
|
2021-04-20T08:14:51.000Z
|
tools/mo/openvino/tools/mo/utils/custom_replacement_config.py
|
pazamelin/openvino
|
b7e8ef910d7ed8e52326d14dc6fd53b71d16ed48
|
[
"Apache-2.0"
] | 58
|
2020-11-06T12:13:45.000Z
|
2022-03-28T13:20:11.000Z
|
tools/mo/openvino/tools/mo/utils/custom_replacement_config.py
|
pazamelin/openvino
|
b7e8ef910d7ed8e52326d14dc6fd53b71d16ed48
|
[
"Apache-2.0"
] | 2
|
2019-09-20T01:33:37.000Z
|
2019-09-20T08:42:11.000Z
|
# Copyright (C) 2018-2021 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
import json
import logging as log
import os
from re import compile, match
from openvino.tools.mo.graph.graph import Node, Graph
from openvino.tools.mo.utils.error import Error
from openvino.tools.mo.utils.graph import nodes_matching_name_pattern, sub_graph_between_nodes
from openvino.tools.mo.utils.json_schema import schema_dict
from openvino.tools.mo.utils.utils import get_mo_root_dir, refer_to_faq_msg
class CustomReplacementDescriptor(object):
registered_types = dict()
def __init__(self, replacement_id: str, attrs: dict = None):
"""
Create class instance based on attrs dictionary which is read from the configuration file.
:param attrs:
"""
super(CustomReplacementDescriptor, self).__setattr__('replacement_id', replacement_id)
if attrs is not None:
super(CustomReplacementDescriptor, self).__setattr__('custom_attributes',
attrs.setdefault('custom_attributes', {}))
super(CustomReplacementDescriptor, self).__setattr__('_replacement_desc', attrs.copy())
def __getattr__(self, k):
return self._replacement_desc[k]
def __setattr__(self, k, v):
# you can assign only existing attributes
if k not in self._replacement_desc:
raise AttributeError
self._replacement_desc[k] = v
def has(self, attr):
"""
Check that attribute 'attr' is defined for the CustomReplacementDescriptor.
:param attr: attribute to check.
:return: True if the attribute exists and False otherwise.
"""
return attr in self._replacement_desc
@classmethod
def register_type(cls, match_kind: str, class_type: object):
if match_kind in cls.registered_types:
log.warning('Class for match kind "{}" is already registered'.format(match_kind))
else:
cls.registered_types[match_kind] = class_type
@classmethod
def create_instance(cls, match_kind: str, replacement_id: str, attrs: dict = None):
"""
Fabric method to create proper object based on match_kind.
:param match_kind: match kind.
:param replacement_id: id of the replacement.
:param attrs: optional attributes to be set.
:return: object of the sub-class of the CustomLayerDescriptor class or None if the match kind is not registered.
"""
if attrs is None:
attrs = dict()
if match_kind in cls.registered_types:
return cls.registered_types[match_kind](replacement_id, attrs)
else:
raise Error('No class registered for match kind "{}". Supported match kinds are "{}". '.format(
match_kind, list(cls.registered_types.keys())) +
refer_to_faq_msg(65))
def sub_graph_instances(self):
raise Exception("The function 'get_sub_graph_instances' must be implemented in the sub-class.")
def get_config_file_representation(self):
result = {
'match_kind': self.match_kind, 'instances': self.instances,
'inputs': self.inputs, 'outputs': self.outputs,
'custom_attributes': self.custom_attributes, 'id': self.id
}
if self.has('op'):
result.update({'op': self.op})
return result
def get_inputs_description(self):
"""
Returns description of inputs of the layer with id 'layer_id'. The format of inputs is the following: list of
lists where each list contains information about nodes consuming the same tensor from outside of the graph. Each
element of the list is a pair where first element is a regular expression for the name of the node in the
sub-graph and the second is the input port of this node.
:return: description of inputs or None if layer with such id is not registered or information about inputs is
not available.
"""
if 'inputs' not in self._replacement_desc:
log.error("Information about inputs of layer with id '{}' is not available".format(self.replacement_id))
return None
result = list()
for index, input_desc in enumerate(self._replacement_desc['inputs']):
result.append([(inp['node'], inp['port']) for inp in input_desc])
return result
def get_outputs_description(self):
"""
Returns description of outputs of the layer with id 'layer_id'. The format of outputs is the following: list of
pairs where the first element of the pair is a regular expression for the name of the node that produces output
of the sub-graph and the second is the output port of this node.
:return: description of outputs or None if layer with such id is not registered or information about outputs is
not available.
"""
if 'outputs' not in self._replacement_desc:
log.error("Information about outputs of layer with id '{}' is not available")
return None
return [(out['node'], out['port']) for out in self._replacement_desc['outputs']]
def update_custom_replacement_attributes(self, graph: Graph):
"""
The function run specific functions to update attributes of the custom replacement description. Currently it
updates information about input/output nodes.
:param graph: graph to operate on.
:return: True if the update process completed successfully.
"""
raise Exception("The function 'update_custom_layer_attributes' must be implemented in the sub-class.")
def validate_data(self):
"""
Validates layer description dictionary.
:return: list of errors identified.
"""
errors = list()
if not self.has('id'):
errors.append("Replacement id is not specified for custom replacement '{}'".format(self.replacement_id))
if not self.has('instances') or self.instances == '':
errors.append("Attribute 'instances' is not specified for replacement '{}'".format(self.replacement_id))
if not self.has('match_kind'):
errors.append("Replacement match type is not specified for replacement '{}'".format(self.replacement_id))
return errors
class CustomReplacementDescriptorPoints(CustomReplacementDescriptor):
"""
Class that is used to describe custom replacement which is a sub-graph specified by start and end points.
"""
def __init__(self, replacement_id: str, attrs: dict = None):
super().__init__(replacement_id, attrs)
if not self.has('include_inputs_to_sub_graph'):
super(CustomReplacementDescriptorPoints, self).__setattr__('include_inputs_to_sub_graph', True)
if not self.has('include_outputs_to_sub_graph'):
super(CustomReplacementDescriptorPoints, self).__setattr__('include_outputs_to_sub_graph', True)
def get_config_file_representation(self):
result = {
'match_kind': self.match_kind, 'instances': self.instances,
'custom_attributes': self.custom_attributes, 'id': self.id,
'include_inputs_to_sub_graph': bool(self.include_inputs_to_sub_graph),
'include_outputs_to_sub_graph': bool(self.include_outputs_to_sub_graph)
}
if self.has('op'):
result.update({'op': self.op})
return result
def get_inputs_description(self):
return [[('^' + node_name + '$', 0)] for node_name in self.instances['start_points']]
def get_outputs_description(self):
return [('^' + node_name + '$', 0) for node_name in self.instances['end_points']]
def get_internal_input_nodes(self, graph: Graph):
"""
Gets list of node names getting input from outside of the sub-graph. This function checks whether input nodes
specified in the configuration file should be added to the sub-graph or not. If they should not be added to the
sub-graph then input nodes of the sub-graph are children of these nodes.
:param graph: graph to operate on.
:return: list of input node names.
"""
if not self.include_inputs_to_sub_graph:
log.debug('Do not include inputs to sub-graph for replacement with id {}'.format(self.replacement_id))
new_start_nodes = set()
for start_node in self.instances['start_points']:
for _, out_node_name in graph.out_edges(start_node):
new_start_nodes.add(out_node_name)
start_nodes = list(new_start_nodes)
log.debug('New inputs are: {}'.format(start_nodes))
return start_nodes
else:
return self.instances['start_points']
def get_internal_output_nodes(self, graph: Graph):
"""
Gets list of node names producing output outside of the sub-graph. This function checks whether output nodes
specified in the configuration file should be added to the sub-graph or not. If they should not be added to the
sub-graph then output nodes of the sub-graph are parents of these nodes.
:param graph: graph to operate on.
:return: list of output node names.
"""
if not self.include_outputs_to_sub_graph:
log.debug('Do not include outputs of sub-graph for replacement with id {}'.format(self.replacement_id))
new_end_nodes = set()
for end_node in self.instances['end_points']:
for in_node_name, _ in graph.in_edges(end_node):
new_end_nodes.add(in_node_name)
end_nodes = list(new_end_nodes)
log.debug('New outputs are: {}'.format(end_nodes))
return end_nodes
else:
return self.instances['end_points']
def update_custom_replacement_attributes(self, graph: Graph):
if not self.has('instances'):
raise Error("No instance(s) is(are) defined for the custom replacement '{}'. ".format(self.replacement_id) +
refer_to_faq_msg(66))
if not isinstance(self.instances, dict):
raise Error("The instance must be a single dictionary for the custom replacement with id '{}'. ".format(
self.replacement_id) +
refer_to_faq_msg(67))
start_points = self.get_internal_input_nodes(graph)
end_points = self.get_internal_output_nodes(graph)
matched_nodes = sub_graph_between_nodes(graph, start_points, end_points, include_control_flow=False)
output_tensors = set()
input_nodes_mapping = dict() # key is the input tensor name, value is the pair: (input_port, output_node_name)
for src_node_name, dst_node_name, edge_attrs in graph.edges(data=True):
dst_node = graph.node[dst_node_name]
# edge outside sub-graph into sub-graph
if (src_node_name not in matched_nodes) and (dst_node_name in matched_nodes):
tensor_name = src_node_name + ":" + str(edge_attrs['out'])
if tensor_name not in input_nodes_mapping:
input_nodes_mapping[tensor_name] = list()
input_nodes_mapping[tensor_name].append(('^' + dst_node_name + '$', edge_attrs['in']))
# edge from inside sub-graph to outside sub-graph
if (src_node_name in matched_nodes) and (dst_node_name not in matched_nodes):
output_tensors.add(('^' + dst_node['pb'].input[edge_attrs['in']] + '$', edge_attrs['out']))
for node_name in graph.nodes():
node = Node(graph, node_name)
if node_name in matched_nodes and len(node.out_nodes()) == 0 and node['pb'].op != 'Const':
log.debug("Node {} doesn't have output edges. Consider it output".format(node_name))
output_tensors.add(('^' + node_name + '$', 0))
if not self.has('inputs'):
self._replacement_desc['inputs'] = [[{'node': desc[0], 'port': desc[1]} for desc in inp]
for inp in sorted(input_nodes_mapping.values())]
log.debug('Updated inputs of sub-graph for instance "{}"'.format(self.instances))
if not self.has('outputs'):
self._replacement_desc['outputs'] = [{'node': node, 'port': port} for node, port in sorted(output_tensors)]
log.debug('Updated outputs of sub-graph for instance "{}"'.format(self.instances))
def sub_graph_instances(self):
return [self.instances]
CustomReplacementDescriptor.register_type('points', CustomReplacementDescriptorPoints)
class CustomReplacementDescriptorScope(CustomReplacementDescriptor):
"""
Class that is used to describe custom layer which is a sub-graph specified by scope name.
"""
def __init__(self, replacement_id: str, attrs: dict = None):
super().__init__(replacement_id, attrs)
def update_custom_replacement_attributes(self, graph: Graph):
if not self.has('instances') or len(self.instances) == 0:
raise Error("No instances are defined for replacement with id '{}'. ".format(self.replacement_id) +
refer_to_faq_msg(68))
pattern = self.instances[0] # use the first instance pattern to find input/output nodes patterns
# TODO verify that all instances will produce the same sub-graph
matched_nodes = nodes_matching_name_pattern(graph, pattern)
output_tensors = set()
input_nodes_mapping = dict() # key is the input tensor name, value is the pair: (input_port, output_node_name)
for src_node_name, dst_node_name, edge_attrs in graph.edges(data=True):
dst_node = graph.node[dst_node_name]
# edge outside sub-graph into sub-graph
if (src_node_name not in matched_nodes) and (dst_node_name in matched_nodes):
tensor_name = src_node_name + ":" + str(edge_attrs['out'])
if tensor_name not in input_nodes_mapping:
input_nodes_mapping[tensor_name] = list()
input_nodes_mapping[tensor_name].append((generate_pattern_for_node(graph, pattern, dst_node_name),
edge_attrs['in']))
# edge from inside sub-graph to outside sub-graph
if (src_node_name in matched_nodes) and (dst_node_name not in matched_nodes):
output_tensors.add(
(generate_pattern_for_node(graph, pattern, dst_node['pb'].input[edge_attrs['in']]),
edge_attrs['out']))
for node_name in graph.nodes():
node = Node(graph, node_name)
if node_name in matched_nodes and len(node.out_nodes()) == 0 and node['pb'].op != 'Const':
log.debug("Node {} doesn't have output edges. Consider it output".format(node_name))
output_tensors.add((generate_pattern_for_node(graph, pattern, node_name), 0))
if not self.has('inputs') or len(self._replacement_desc['inputs']) == 0:
self._replacement_desc['inputs'] = [[{'node': desc[0], 'port': desc[1]} for desc in inp]
for inp in sorted(input_nodes_mapping.values())]
log.debug('Updated inputs of sub-graph for instance "{}"'.format(self.instances))
if not self.has('outputs') or len(self._replacement_desc['outputs']) == 0:
self._replacement_desc['outputs'] = [{'node': node, 'port': port} for node, port in sorted(output_tensors)]
log.debug('Updated outputs of sub-graph for instance "{}"'.format(self.instances))
def sub_graph_instances(self):
return self.instances
CustomReplacementDescriptor.register_type('scope', CustomReplacementDescriptorScope)
class CustomReplacementDescriptorGeneral(CustomReplacementDescriptor):
def __init__(self, replacement_id: str, attrs: dict = None):
super().__init__(replacement_id, attrs)
def validate_data(self):
"""
Validates layer description dictionary.
:return: list of errors identified.
"""
errors = list()
if not self.has('id'):
errors.append("Replacement id is not specified for custom replacement '{}'".format(self.replacement_id))
if not self.has('match_kind'):
errors.append("Replacement match type is not specified for replacement '{}'".format(self.replacement_id))
return errors
CustomReplacementDescriptor.register_type('general', CustomReplacementDescriptorGeneral)
def parse_custom_replacement_config_file(file_name: str):
"""
Reads custom replacement configuration file file_name.
:param file_name: name of the file to read from.
:return: The dictionary where key is the layer id and value is an instance of the CustomLayerDescriptor object.
"""
if not os.path.exists(file_name):
raise Error("Custom replacements configuration file '{}' does not exist. ".format(file_name) +
refer_to_faq_msg(69))
data = load_and_validate_json_config(file_name)
result = list()
validation_errors = list()
for attrs in data:
if 'id' not in attrs:
raise Error('One of the custom replacements in the configuration file "{}" does not contain attribute '
'"id". '.format(file_name) +
refer_to_faq_msg(71))
if 'match_kind' not in attrs:
raise Error('One of the custom replacements in the configuration file "{}" does not contain attribute '
'"match_kind". Possible values are "points", "scope" and "general". '.format(file_name) +
refer_to_faq_msg(71))
desc = CustomReplacementDescriptor.create_instance(attrs['match_kind'], attrs['id'], attrs)
validation_errors.extend(desc.validate_data())
result.append(desc)
if len(validation_errors) > 0:
raise Error("File '{}' validation failed:\n{}. ".format(file_name, "\n".join(validation_errors)) +
refer_to_faq_msg(72))
return result
def generate_pattern_for_node(graph: Graph, sub_graph_pattern: str, node_name: str):
if sub_graph_pattern == '':
return node_name
node_name_components = node_name.split("/")
cur_name = ''
matched_index = None # index of the node name component to start new pattern from
compiled_pattern = compile(sub_graph_pattern)
for index in range(0, len(node_name_components)):
cur_name += node_name_components[index] + "/"
if match(compiled_pattern, cur_name):
matched_index = index
break
if matched_index is None:
raise RuntimeError('Node name "{}" does not match pattern "{}"'.format(node_name, sub_graph_pattern))
if sub_graph_pattern == '' or sub_graph_pattern[-1] != '/':
sub_graph_pattern += '/'
sub_graph_nodes = nodes_matching_name_pattern(graph, sub_graph_pattern)
name_suffix = '/'.join(node_name_components[matched_index + 1:]) + '$'
if len([node for node in sub_graph_nodes if match(sub_graph_pattern + name_suffix, node)]) == 1:
return name_suffix
raise RuntimeError('The pattern that uniquely identifies node "{}" using sub-graph pattern "{}" has not been found'.
format(node_name, sub_graph_pattern))
def load_and_validate_json_config(config_file_name: str):
"""
Reads and validate custom replacement configuration file config_file_name.
:param config_file_name: name of the file to read from.
:return: A dictionary serialized from json config file.
"""
try:
with open(config_file_name, 'r') as f:
json_config = json.load(f)
try:
import fastjsonschema as json_validate
validator = json_validate.compile(schema_dict)
validator(json_config)
except ModuleNotFoundError as e:
log.error("Module 'fastjsonschema' for json validation not installed. Please update requirements.",
extra={'is_warning': True})
except Exception as e:
raise Error("Failed to parse custom replacements configuration file '{}': {}. ".format(config_file_name, e) +
refer_to_faq_msg(70)) from e
return json_config
| 48.990431
| 120
| 0.653286
|
0c79558344c939752aa45c34d9b7d682db9580dd
| 697
|
py
|
Python
|
server/src/administer/urls.py
|
ekbanasolutions/aditas
|
2e8291202cb6781a1a0855a1cccb86348b710b71
|
[
"MIT"
] | 2
|
2019-01-03T07:42:45.000Z
|
2019-03-15T09:06:12.000Z
|
server/src/administer/urls.py
|
ekbanatechnology/aditas
|
2e8291202cb6781a1a0855a1cccb86348b710b71
|
[
"MIT"
] | null | null | null |
server/src/administer/urls.py
|
ekbanatechnology/aditas
|
2e8291202cb6781a1a0855a1cccb86348b710b71
|
[
"MIT"
] | 2
|
2019-01-21T10:34:44.000Z
|
2021-07-06T08:49:41.000Z
|
from django.conf.urls import url
from nodes import nodes
from . import administer
# from django.conf.urls import include, url
from django.views.generic import TemplateView
urlpatterns = [
url(r'^$', nodes.Nodes_status.as_view(), name="index"),
url(r'^add_cluster$', administer.AddCluster.as_view(), name="add_cluster"),
url(r'^delete_cluster$', administer.DeleteCluster.as_view(), name="delete_cluster"),
url(r'^edit_cluster$', administer.EditCluster.as_view(), name="edit_cluster"),
url(r'^select_cluster/(?P<id>\d+)$', administer.SelectCluster.as_view(), name="select_cluster"),
url(r'^install_service/$', administer.InstallServices.as_view(), name="install_service")
]
| 46.466667
| 100
| 0.734577
|
7dc87e8ffd8c73ead27df67bc0a18f11cbc4a7f0
| 341
|
py
|
Python
|
Assignment1/svd.py
|
njucjc/machine-learning
|
1adcbad8d1e9f9a187036bec01d1a5ce798e44e0
|
[
"MIT"
] | null | null | null |
Assignment1/svd.py
|
njucjc/machine-learning
|
1adcbad8d1e9f9a187036bec01d1a5ce798e44e0
|
[
"MIT"
] | null | null | null |
Assignment1/svd.py
|
njucjc/machine-learning
|
1adcbad8d1e9f9a187036bec01d1a5ce798e44e0
|
[
"MIT"
] | null | null | null |
import numpy as np
def svd_train(raw_data, k):
"""
输入:原始数据矩阵raw_data、降维后的维度k
输出:投影矩阵
参考:https://blog.csdn.net/u012421852/article/details/80439403
"""
u, s, vt = np.linalg.svd(raw_data)
return vt[:k, :len(vt)].T
def svd_test(new_data, v):
"""
输入:新数据和投影矩阵
输出:降维后的数据
"""
return new_data.dot(v)
| 18.944444
| 64
| 0.612903
|
ea1daa9a7ec8fd959900f68b41adaf0120d44a68
| 9,909
|
py
|
Python
|
aea/cli/core.py
|
lrahmani/agents-aea
|
9bd1d51530fc21bf41b5adea031cda19a94b048b
|
[
"Apache-2.0"
] | null | null | null |
aea/cli/core.py
|
lrahmani/agents-aea
|
9bd1d51530fc21bf41b5adea031cda19a94b048b
|
[
"Apache-2.0"
] | null | null | null |
aea/cli/core.py
|
lrahmani/agents-aea
|
9bd1d51530fc21bf41b5adea031cda19a94b048b
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# ------------------------------------------------------------------------------
#
# Copyright 2018-2019 Fetch.AI Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ------------------------------------------------------------------------------
"""Core definitions for the AEA command-line tool."""
import os
import shutil
import sys
import time
from pathlib import Path
from typing import cast
import click
import aea
from aea.cli.add import add
from aea.cli.common import (
AgentDirectory,
Context,
_verify_or_create_private_keys,
check_aea_project,
logger,
)
from aea.cli.config import config
from aea.cli.create import create
from aea.cli.fetch import fetch
from aea.cli.fingerprint import fingerprint
from aea.cli.generate import generate
from aea.cli.init import init
from aea.cli.install import install
from aea.cli.launch import launch
from aea.cli.list import list as _list
from aea.cli.loggers import simple_verbosity_option
from aea.cli.login import login
from aea.cli.logout import logout
from aea.cli.publish import publish
from aea.cli.push import push
from aea.cli.register import register
from aea.cli.remove import remove
from aea.cli.run import run
from aea.cli.scaffold import scaffold
from aea.cli.search import search
from aea.configurations.base import DEFAULT_AEA_CONFIG_FILE
from aea.crypto.ethereum import EthereumCrypto
from aea.crypto.fetchai import FetchAICrypto
from aea.crypto.helpers import (
ETHEREUM_PRIVATE_KEY_FILE,
FETCHAI_PRIVATE_KEY_FILE,
TESTNETS,
_try_generate_testnet_wealth,
_validate_private_key_path,
)
from aea.crypto.ledger_apis import LedgerApis
from aea.crypto.wallet import Wallet
FUNDS_RELEASE_TIMEOUT = 10
@click.group(name="aea")
@click.version_option(aea.__version__, prog_name="aea")
@simple_verbosity_option(logger, default="INFO")
@click.option(
"--skip-consistency-check",
"skip_consistency_check",
is_flag=True,
required=False,
default=False,
help="Skip consistency check.",
)
@click.pass_context
def cli(click_context, skip_consistency_check: bool) -> None:
"""Command-line tool for setting up an Autonomous Economic Agent."""
verbosity_option = click_context.meta.pop("verbosity")
click_context.obj = Context(cwd=".", verbosity=verbosity_option)
click_context.obj.set_config("skip_consistency_check", skip_consistency_check)
@cli.command()
@click.argument(
"agent_name", type=AgentDirectory(), required=True,
)
@click.pass_context
def delete(click_context, agent_name):
"""Delete an agent."""
click.echo("Deleting AEA project directory './{}'...".format(agent_name))
# delete the agent's directory
try:
shutil.rmtree(agent_name, ignore_errors=False)
except OSError:
logger.error(
"An error occurred while deleting the agent directory. Aborting..."
)
sys.exit(1)
@cli.command()
@click.pass_context
@check_aea_project
def freeze(click_context):
"""Get the dependencies."""
ctx = cast(Context, click_context.obj)
for dependency_name, dependency_data in sorted(
ctx.get_dependencies().items(), key=lambda x: x[0]
):
print(dependency_name + dependency_data.get("version", ""))
@cli.command()
@click.option("-p", "--port", default=8080)
@click.pass_context
def gui(click_context, port):
"""Run the CLI GUI."""
import aea.cli_gui # pragma: no cover
click.echo("Running the GUI.....(press Ctrl+C to exit)") # pragma: no cover
aea.cli_gui.run(port) # pragma: no cover
@cli.command()
@click.argument(
"type_",
metavar="TYPE",
type=click.Choice([FetchAICrypto.identifier, EthereumCrypto.identifier, "all"]),
required=True,
)
@click.pass_context
def generate_key(click_context, type_):
"""Generate private keys."""
def _can_write(path) -> bool:
if Path(path).exists():
value = click.confirm(
"The file {} already exists. Do you want to overwrite it?".format(path),
default=False,
)
return value
else:
return True
if type_ in (FetchAICrypto.identifier, "all"):
if _can_write(FETCHAI_PRIVATE_KEY_FILE):
FetchAICrypto().dump(open(FETCHAI_PRIVATE_KEY_FILE, "wb"))
if type_ in (EthereumCrypto.identifier, "all"):
if _can_write(ETHEREUM_PRIVATE_KEY_FILE):
EthereumCrypto().dump(open(ETHEREUM_PRIVATE_KEY_FILE, "wb"))
def _try_add_key(ctx, type_, filepath):
try:
ctx.agent_config.private_key_paths.create(type_, filepath)
except ValueError as e: # pragma: no cover
logger.error(str(e))
sys.exit(1)
ctx.agent_loader.dump(
ctx.agent_config, open(os.path.join(ctx.cwd, DEFAULT_AEA_CONFIG_FILE), "w")
)
@cli.command()
@click.argument(
"type_",
metavar="TYPE",
type=click.Choice([FetchAICrypto.identifier, EthereumCrypto.identifier]),
required=True,
)
@click.argument(
"file",
metavar="FILE",
type=click.Path(exists=True, file_okay=True, dir_okay=False, readable=True),
required=True,
)
@click.pass_context
@check_aea_project
def add_key(click_context, type_, file):
"""Add a private key to the wallet."""
ctx = cast(Context, click_context.obj)
_validate_private_key_path(file, type_)
_try_add_key(ctx, type_, file)
def _try_get_address(ctx, type_):
private_key_paths = {
config_pair[0]: config_pair[1]
for config_pair in ctx.agent_config.private_key_paths.read_all()
}
try:
wallet = Wallet(private_key_paths)
address = wallet.addresses[type_]
return address
except ValueError as e: # pragma: no cover
logger.error(str(e))
sys.exit(1)
@cli.command()
@click.argument(
"type_",
metavar="TYPE",
type=click.Choice([FetchAICrypto.identifier, EthereumCrypto.identifier]),
required=True,
)
@click.pass_context
@check_aea_project
def get_address(click_context, type_):
"""Get the address associated with the private key."""
ctx = cast(Context, click_context.obj)
_verify_or_create_private_keys(ctx)
address = _try_get_address(ctx, type_)
click.echo(address)
def _try_get_balance(agent_config, wallet, type_):
try:
ledger_apis = LedgerApis(
agent_config.ledger_apis_dict, agent_config.default_ledger
)
address = wallet.addresses[type_]
return ledger_apis.token_balance(type_, address)
except (AssertionError, ValueError) as e: # pragma: no cover
logger.error(str(e))
sys.exit(1)
def _try_get_wealth(ctx, type_):
private_key_paths = {
config_pair[0]: config_pair[1]
for config_pair in ctx.agent_config.private_key_paths.read_all()
}
wallet = Wallet(private_key_paths)
return _try_get_balance(ctx.agent_config, wallet, type_)
@cli.command()
@click.argument(
"type_",
metavar="TYPE",
type=click.Choice([FetchAICrypto.identifier, EthereumCrypto.identifier]),
required=True,
)
@click.pass_context
@check_aea_project
def get_wealth(ctx: Context, type_):
"""Get the wealth associated with the private key."""
_verify_or_create_private_keys(ctx)
wealth = _try_get_wealth(ctx, type_)
click.echo(wealth)
def _wait_funds_release(agent_config, wallet, type_):
start_balance = _try_get_balance(agent_config, wallet, type_)
end_time = time.time() + FUNDS_RELEASE_TIMEOUT
while time.time() < end_time:
if start_balance != _try_get_balance(agent_config, wallet, type_):
break # pragma: no cover
else:
time.sleep(1)
def _try_generate_wealth(ctx, type_, sync):
private_key_paths = {
config_pair[0]: config_pair[1]
for config_pair in ctx.agent_config.private_key_paths.read_all()
}
wallet = Wallet(private_key_paths)
try:
address = wallet.addresses[type_]
testnet = TESTNETS[type_]
click.echo(
"Requesting funds for address {} on test network '{}'".format(
address, testnet
)
)
_try_generate_testnet_wealth(type_, address)
if sync:
_wait_funds_release(ctx.agent_config, wallet, type_)
except (AssertionError, ValueError) as e: # pragma: no cover
logger.error(str(e))
sys.exit(1)
@cli.command()
@click.argument(
"type_",
metavar="TYPE",
type=click.Choice([FetchAICrypto.identifier, EthereumCrypto.identifier]),
required=True,
)
@click.option(
"--sync", is_flag=True, help="For waiting till the faucet has released the funds."
)
@click.pass_context
@check_aea_project
def generate_wealth(click_context, sync, type_):
"""Generate wealth for address on test network."""
ctx = cast(Context, click_context.obj)
_verify_or_create_private_keys(ctx)
_try_generate_wealth(ctx, type_, sync)
cli.add_command(_list)
cli.add_command(add)
cli.add_command(create)
cli.add_command(config)
cli.add_command(fetch)
cli.add_command(fingerprint)
cli.add_command(generate)
cli.add_command(init)
cli.add_command(install)
cli.add_command(launch)
cli.add_command(login)
cli.add_command(logout)
cli.add_command(publish)
cli.add_command(push)
cli.add_command(register)
cli.add_command(remove)
cli.add_command(run)
cli.add_command(scaffold)
cli.add_command(search)
| 29.144118
| 88
| 0.694116
|
d1f94c908b073d78742d30e220d3e99477afa69c
| 32,419
|
py
|
Python
|
CodeIA/venv/Lib/site-packages/mpmath/calculus/optimization.py
|
Finasty-lab/IA-Python
|
286113504906fec11a5aa5fd1d12e38536b1c859
|
[
"Apache-2.0"
] | 445
|
2019-01-26T13:50:26.000Z
|
2022-03-18T05:17:38.000Z
|
Library/lib/python3.7/site-packages/mpmath/calculus/optimization.py
|
gengyong/Carnets
|
8930a14f69360d4db115a85ff9e0f6efa80fa2e7
|
[
"BSD-3-Clause"
] | 242
|
2019-01-29T15:48:27.000Z
|
2022-03-31T22:09:21.000Z
|
Library/lib/python3.7/site-packages/mpmath/calculus/optimization.py
|
gengyong/Carnets
|
8930a14f69360d4db115a85ff9e0f6efa80fa2e7
|
[
"BSD-3-Clause"
] | 31
|
2019-03-10T09:51:27.000Z
|
2022-02-14T23:11:12.000Z
|
from copy import copy
from ..libmp.backend import xrange, print_
class OptimizationMethods(object):
def __init__(ctx):
pass
##############
# 1D-SOLVERS #
##############
class Newton:
"""
1d-solver generating pairs of approximative root and error.
Needs starting points x0 close to the root.
Pro:
* converges fast
* sometimes more robust than secant with bad second starting point
Contra:
* converges slowly for multiple roots
* needs first derivative
* 2 function evaluations per iteration
"""
maxsteps = 20
def __init__(self, ctx, f, x0, **kwargs):
self.ctx = ctx
if len(x0) == 1:
self.x0 = x0[0]
else:
raise ValueError('expected 1 starting point, got %i' % len(x0))
self.f = f
if not 'df' in kwargs:
def df(x):
return self.ctx.diff(f, x)
else:
df = kwargs['df']
self.df = df
def __iter__(self):
f = self.f
df = self.df
x0 = self.x0
while True:
x1 = x0 - f(x0) / df(x0)
error = abs(x1 - x0)
x0 = x1
yield (x1, error)
class Secant:
"""
1d-solver generating pairs of approximative root and error.
Needs starting points x0 and x1 close to the root.
x1 defaults to x0 + 0.25.
Pro:
* converges fast
Contra:
* converges slowly for multiple roots
"""
maxsteps = 30
def __init__(self, ctx, f, x0, **kwargs):
self.ctx = ctx
if len(x0) == 1:
self.x0 = x0[0]
self.x1 = self.x0 + 0.25
elif len(x0) == 2:
self.x0 = x0[0]
self.x1 = x0[1]
else:
raise ValueError('expected 1 or 2 starting points, got %i' % len(x0))
self.f = f
def __iter__(self):
f = self.f
x0 = self.x0
x1 = self.x1
f0 = f(x0)
while True:
f1 = f(x1)
l = x1 - x0
if not l:
break
s = (f1 - f0) / l
if not s:
break
x0, x1 = x1, x1 - f1/s
f0 = f1
yield x1, abs(l)
class MNewton:
"""
1d-solver generating pairs of approximative root and error.
Needs starting point x0 close to the root.
Uses modified Newton's method that converges fast regardless of the
multiplicity of the root.
Pro:
* converges fast for multiple roots
Contra:
* needs first and second derivative of f
* 3 function evaluations per iteration
"""
maxsteps = 20
def __init__(self, ctx, f, x0, **kwargs):
self.ctx = ctx
if not len(x0) == 1:
raise ValueError('expected 1 starting point, got %i' % len(x0))
self.x0 = x0[0]
self.f = f
if not 'df' in kwargs:
def df(x):
return self.ctx.diff(f, x)
else:
df = kwargs['df']
self.df = df
if not 'd2f' in kwargs:
def d2f(x):
return self.ctx.diff(df, x)
else:
d2f = kwargs['df']
self.d2f = d2f
def __iter__(self):
x = self.x0
f = self.f
df = self.df
d2f = self.d2f
while True:
prevx = x
fx = f(x)
if fx == 0:
break
dfx = df(x)
d2fx = d2f(x)
# x = x - F(x)/F'(x) with F(x) = f(x)/f'(x)
x -= fx / (dfx - fx * d2fx / dfx)
error = abs(x - prevx)
yield x, error
class Halley:
"""
1d-solver generating pairs of approximative root and error.
Needs a starting point x0 close to the root.
Uses Halley's method with cubic convergence rate.
Pro:
* converges even faster the Newton's method
* useful when computing with *many* digits
Contra:
* needs first and second derivative of f
* 3 function evaluations per iteration
* converges slowly for multiple roots
"""
maxsteps = 20
def __init__(self, ctx, f, x0, **kwargs):
self.ctx = ctx
if not len(x0) == 1:
raise ValueError('expected 1 starting point, got %i' % len(x0))
self.x0 = x0[0]
self.f = f
if not 'df' in kwargs:
def df(x):
return self.ctx.diff(f, x)
else:
df = kwargs['df']
self.df = df
if not 'd2f' in kwargs:
def d2f(x):
return self.ctx.diff(df, x)
else:
d2f = kwargs['df']
self.d2f = d2f
def __iter__(self):
x = self.x0
f = self.f
df = self.df
d2f = self.d2f
while True:
prevx = x
fx = f(x)
dfx = df(x)
d2fx = d2f(x)
x -= 2*fx*dfx / (2*dfx**2 - fx*d2fx)
error = abs(x - prevx)
yield x, error
class Muller:
"""
1d-solver generating pairs of approximative root and error.
Needs starting points x0, x1 and x2 close to the root.
x1 defaults to x0 + 0.25; x2 to x1 + 0.25.
Uses Muller's method that converges towards complex roots.
Pro:
* converges fast (somewhat faster than secant)
* can find complex roots
Contra:
* converges slowly for multiple roots
* may have complex values for real starting points and real roots
http://en.wikipedia.org/wiki/Muller's_method
"""
maxsteps = 30
def __init__(self, ctx, f, x0, **kwargs):
self.ctx = ctx
if len(x0) == 1:
self.x0 = x0[0]
self.x1 = self.x0 + 0.25
self.x2 = self.x1 + 0.25
elif len(x0) == 2:
self.x0 = x0[0]
self.x1 = x0[1]
self.x2 = self.x1 + 0.25
elif len(x0) == 3:
self.x0 = x0[0]
self.x1 = x0[1]
self.x2 = x0[2]
else:
raise ValueError('expected 1, 2 or 3 starting points, got %i'
% len(x0))
self.f = f
self.verbose = kwargs['verbose']
def __iter__(self):
f = self.f
x0 = self.x0
x1 = self.x1
x2 = self.x2
fx0 = f(x0)
fx1 = f(x1)
fx2 = f(x2)
while True:
# TODO: maybe refactoring with function for divided differences
# calculate divided differences
fx2x1 = (fx1 - fx2) / (x1 - x2)
fx2x0 = (fx0 - fx2) / (x0 - x2)
fx1x0 = (fx0 - fx1) / (x0 - x1)
w = fx2x1 + fx2x0 - fx1x0
fx2x1x0 = (fx1x0 - fx2x1) / (x0 - x2)
if w == 0 and fx2x1x0 == 0:
if self.verbose:
print_('canceled with')
print_('x0 =', x0, ', x1 =', x1, 'and x2 =', x2)
break
x0 = x1
fx0 = fx1
x1 = x2
fx1 = fx2
# denominator should be as large as possible => choose sign
r = self.ctx.sqrt(w**2 - 4*fx2*fx2x1x0)
if abs(w - r) > abs(w + r):
r = -r
x2 -= 2*fx2 / (w + r)
fx2 = f(x2)
error = abs(x2 - x1)
yield x2, error
# TODO: consider raising a ValueError when there's no sign change in a and b
class Bisection:
"""
1d-solver generating pairs of approximative root and error.
Uses bisection method to find a root of f in [a, b].
Might fail for multiple roots (needs sign change).
Pro:
* robust and reliable
Contra:
* converges slowly
* needs sign change
"""
maxsteps = 100
def __init__(self, ctx, f, x0, **kwargs):
self.ctx = ctx
if len(x0) != 2:
raise ValueError('expected interval of 2 points, got %i' % len(x0))
self.f = f
self.a = x0[0]
self.b = x0[1]
def __iter__(self):
f = self.f
a = self.a
b = self.b
l = b - a
fb = f(b)
while True:
m = self.ctx.ldexp(a + b, -1)
fm = f(m)
sign = fm * fb
if sign < 0:
a = m
elif sign > 0:
b = m
fb = fm
else:
yield m, self.ctx.zero
l /= 2
yield (a + b)/2, abs(l)
def _getm(method):
"""
Return a function to calculate m for Illinois-like methods.
"""
if method == 'illinois':
def getm(fz, fb):
return 0.5
elif method == 'pegasus':
def getm(fz, fb):
return fb/(fb + fz)
elif method == 'anderson':
def getm(fz, fb):
m = 1 - fz/fb
if m > 0:
return m
else:
return 0.5
else:
raise ValueError("method '%s' not recognized" % method)
return getm
class Illinois:
"""
1d-solver generating pairs of approximative root and error.
Uses Illinois method or similar to find a root of f in [a, b].
Might fail for multiple roots (needs sign change).
Combines bisect with secant (improved regula falsi).
The only difference between the methods is the scaling factor m, which is
used to ensure convergence (you can choose one using the 'method' keyword):
Illinois method ('illinois'):
m = 0.5
Pegasus method ('pegasus'):
m = fb/(fb + fz)
Anderson-Bjoerk method ('anderson'):
m = 1 - fz/fb if positive else 0.5
Pro:
* converges very fast
Contra:
* has problems with multiple roots
* needs sign change
"""
maxsteps = 30
def __init__(self, ctx, f, x0, **kwargs):
self.ctx = ctx
if len(x0) != 2:
raise ValueError('expected interval of 2 points, got %i' % len(x0))
self.a = x0[0]
self.b = x0[1]
self.f = f
self.tol = kwargs['tol']
self.verbose = kwargs['verbose']
self.method = kwargs.get('method', 'illinois')
self.getm = _getm(self.method)
if self.verbose:
print_('using %s method' % self.method)
def __iter__(self):
method = self.method
f = self.f
a = self.a
b = self.b
fa = f(a)
fb = f(b)
m = None
while True:
l = b - a
if l == 0:
break
s = (fb - fa) / l
z = a - fa/s
fz = f(z)
if abs(fz) < self.tol:
# TODO: better condition (when f is very flat)
if self.verbose:
print_('canceled with z =', z)
yield z, l
break
if fz * fb < 0: # root in [z, b]
a = b
fa = fb
b = z
fb = fz
else: # root in [a, z]
m = self.getm(fz, fb)
b = z
fb = fz
fa = m*fa # scale down to ensure convergence
if self.verbose and m and not method == 'illinois':
print_('m:', m)
yield (a + b)/2, abs(l)
def Pegasus(*args, **kwargs):
"""
1d-solver generating pairs of approximative root and error.
Uses Pegasus method to find a root of f in [a, b].
Wrapper for illinois to use method='pegasus'.
"""
kwargs['method'] = 'pegasus'
return Illinois(*args, **kwargs)
def Anderson(*args, **kwargs):
"""
1d-solver generating pairs of approximative root and error.
Uses Anderson-Bjoerk method to find a root of f in [a, b].
Wrapper for illinois to use method='pegasus'.
"""
kwargs['method'] = 'anderson'
return Illinois(*args, **kwargs)
# TODO: check whether it's possible to combine it with Illinois stuff
class Ridder:
"""
1d-solver generating pairs of approximative root and error.
Ridders' method to find a root of f in [a, b].
Is told to perform as well as Brent's method while being simpler.
Pro:
* very fast
* simpler than Brent's method
Contra:
* two function evaluations per step
* has problems with multiple roots
* needs sign change
http://en.wikipedia.org/wiki/Ridders'_method
"""
maxsteps = 30
def __init__(self, ctx, f, x0, **kwargs):
self.ctx = ctx
self.f = f
if len(x0) != 2:
raise ValueError('expected interval of 2 points, got %i' % len(x0))
self.x1 = x0[0]
self.x2 = x0[1]
self.verbose = kwargs['verbose']
self.tol = kwargs['tol']
def __iter__(self):
ctx = self.ctx
f = self.f
x1 = self.x1
fx1 = f(x1)
x2 = self.x2
fx2 = f(x2)
while True:
x3 = 0.5*(x1 + x2)
fx3 = f(x3)
x4 = x3 + (x3 - x1) * ctx.sign(fx1 - fx2) * fx3 / ctx.sqrt(fx3**2 - fx1*fx2)
fx4 = f(x4)
if abs(fx4) < self.tol:
# TODO: better condition (when f is very flat)
if self.verbose:
print_('canceled with f(x4) =', fx4)
yield x4, abs(x1 - x2)
break
if fx4 * fx2 < 0: # root in [x4, x2]
x1 = x4
fx1 = fx4
else: # root in [x1, x4]
x2 = x4
fx2 = fx4
error = abs(x1 - x2)
yield (x1 + x2)/2, error
class ANewton:
"""
EXPERIMENTAL 1d-solver generating pairs of approximative root and error.
Uses Newton's method modified to use Steffensens method when convergence is
slow. (I.e. for multiple roots.)
"""
maxsteps = 20
def __init__(self, ctx, f, x0, **kwargs):
self.ctx = ctx
if not len(x0) == 1:
raise ValueError('expected 1 starting point, got %i' % len(x0))
self.x0 = x0[0]
self.f = f
if not 'df' in kwargs:
def df(x):
return self.ctx.diff(f, x)
else:
df = kwargs['df']
self.df = df
def phi(x):
return x - f(x) / df(x)
self.phi = phi
self.verbose = kwargs['verbose']
def __iter__(self):
x0 = self.x0
f = self.f
df = self.df
phi = self.phi
error = 0
counter = 0
while True:
prevx = x0
try:
x0 = phi(x0)
except ZeroDivisionError:
if self.verbose:
print_('ZeroDivisionError: canceled with x =', x0)
break
preverror = error
error = abs(prevx - x0)
# TODO: decide not to use convergence acceleration
if error and abs(error - preverror) / error < 1:
if self.verbose:
print_('converging slowly')
counter += 1
if counter >= 3:
# accelerate convergence
phi = steffensen(phi)
counter = 0
if self.verbose:
print_('accelerating convergence')
yield x0, error
# TODO: add Brent
############################
# MULTIDIMENSIONAL SOLVERS #
############################
def jacobian(ctx, f, x):
"""
Calculate the Jacobian matrix of a function at the point x0.
This is the first derivative of a vectorial function:
f : R^m -> R^n with m >= n
"""
x = ctx.matrix(x)
h = ctx.sqrt(ctx.eps)
fx = ctx.matrix(f(*x))
m = len(fx)
n = len(x)
J = ctx.matrix(m, n)
for j in xrange(n):
xj = x.copy()
xj[j] += h
Jj = (ctx.matrix(f(*xj)) - fx) / h
for i in xrange(m):
J[i,j] = Jj[i]
return J
# TODO: test with user-specified jacobian matrix, support force_type
class MDNewton:
"""
Find the root of a vector function numerically using Newton's method.
f is a vector function representing a nonlinear equation system.
x0 is the starting point close to the root.
J is a function returning the Jacobian matrix for a point.
Supports overdetermined systems.
Use the 'norm' keyword to specify which norm to use. Defaults to max-norm.
The function to calculate the Jacobian matrix can be given using the
keyword 'J'. Otherwise it will be calculated numerically.
Please note that this method converges only locally. Especially for high-
dimensional systems it is not trivial to find a good starting point being
close enough to the root.
It is recommended to use a faster, low-precision solver from SciPy [1] or
OpenOpt [2] to get an initial guess. Afterwards you can use this method for
root-polishing to any precision.
[1] http://scipy.org
[2] http://openopt.org/Welcome
"""
maxsteps = 10
def __init__(self, ctx, f, x0, **kwargs):
self.ctx = ctx
self.f = f
if isinstance(x0, (tuple, list)):
x0 = ctx.matrix(x0)
assert x0.cols == 1, 'need a vector'
self.x0 = x0
if 'J' in kwargs:
self.J = kwargs['J']
else:
def J(*x):
return ctx.jacobian(f, x)
self.J = J
self.norm = kwargs['norm']
self.verbose = kwargs['verbose']
def __iter__(self):
f = self.f
x0 = self.x0
norm = self.norm
J = self.J
fx = self.ctx.matrix(f(*x0))
fxnorm = norm(fx)
cancel = False
while not cancel:
# get direction of descent
fxn = -fx
Jx = J(*x0)
s = self.ctx.lu_solve(Jx, fxn)
if self.verbose:
print_('Jx:')
print_(Jx)
print_('s:', s)
# damping step size TODO: better strategy (hard task)
l = self.ctx.one
x1 = x0 + s
while True:
if x1 == x0:
if self.verbose:
print_("canceled, won't get more excact")
cancel = True
break
fx = self.ctx.matrix(f(*x1))
newnorm = norm(fx)
if newnorm < fxnorm:
# new x accepted
fxnorm = newnorm
x0 = x1
break
l /= 2
x1 = x0 + l*s
yield (x0, fxnorm)
#############
# UTILITIES #
#############
str2solver = {'newton':Newton, 'secant':Secant, 'mnewton':MNewton,
'halley':Halley, 'muller':Muller, 'bisect':Bisection,
'illinois':Illinois, 'pegasus':Pegasus, 'anderson':Anderson,
'ridder':Ridder, 'anewton':ANewton, 'mdnewton':MDNewton}
def findroot(ctx, f, x0, solver='secant', tol=None, verbose=False, verify=True, **kwargs):
r"""
Find a solution to `f(x) = 0`, using *x0* as starting point or
interval for *x*.
Multidimensional overdetermined systems are supported.
You can specify them using a function or a list of functions.
If the found root does not satisfy `|f(x)|^2 \leq \mathrm{tol}`,
an exception is raised (this can be disabled with *verify=False*).
**Arguments**
*f*
one dimensional function
*x0*
starting point, several starting points or interval (depends on solver)
*tol*
the returned solution has an error smaller than this
*verbose*
print additional information for each iteration if true
*verify*
verify the solution and raise a ValueError if `|f(x)|^2 > \mathrm{tol}`
*solver*
a generator for *f* and *x0* returning approximative solution and error
*maxsteps*
after how many steps the solver will cancel
*df*
first derivative of *f* (used by some solvers)
*d2f*
second derivative of *f* (used by some solvers)
*multidimensional*
force multidimensional solving
*J*
Jacobian matrix of *f* (used by multidimensional solvers)
*norm*
used vector norm (used by multidimensional solvers)
solver has to be callable with ``(f, x0, **kwargs)`` and return an generator
yielding pairs of approximative solution and estimated error (which is
expected to be positive).
You can use the following string aliases:
'secant', 'mnewton', 'halley', 'muller', 'illinois', 'pegasus', 'anderson',
'ridder', 'anewton', 'bisect'
See mpmath.calculus.optimization for their documentation.
**Examples**
The function :func:`~mpmath.findroot` locates a root of a given function using the
secant method by default. A simple example use of the secant method is to
compute `\pi` as the root of `\sin x` closest to `x_0 = 3`::
>>> from mpmath import *
>>> mp.dps = 30; mp.pretty = True
>>> findroot(sin, 3)
3.14159265358979323846264338328
The secant method can be used to find complex roots of analytic functions,
although it must in that case generally be given a nonreal starting value
(or else it will never leave the real line)::
>>> mp.dps = 15
>>> findroot(lambda x: x**3 + 2*x + 1, j)
(0.226698825758202 + 1.46771150871022j)
A nice application is to compute nontrivial roots of the Riemann zeta
function with many digits (good initial values are needed for convergence)::
>>> mp.dps = 30
>>> findroot(zeta, 0.5+14j)
(0.5 + 14.1347251417346937904572519836j)
The secant method can also be used as an optimization algorithm, by passing
it a derivative of a function. The following example locates the positive
minimum of the gamma function::
>>> mp.dps = 20
>>> findroot(lambda x: diff(gamma, x), 1)
1.4616321449683623413
Finally, a useful application is to compute inverse functions, such as the
Lambert W function which is the inverse of `w e^w`, given the first
term of the solution's asymptotic expansion as the initial value. In basic
cases, this gives identical results to mpmath's built-in ``lambertw``
function::
>>> def lambert(x):
... return findroot(lambda w: w*exp(w) - x, log(1+x))
...
>>> mp.dps = 15
>>> lambert(1); lambertw(1)
0.567143290409784
0.567143290409784
>>> lambert(1000); lambert(1000)
5.2496028524016
5.2496028524016
Multidimensional functions are also supported::
>>> f = [lambda x1, x2: x1**2 + x2,
... lambda x1, x2: 5*x1**2 - 3*x1 + 2*x2 - 3]
>>> findroot(f, (0, 0))
[-0.618033988749895]
[-0.381966011250105]
>>> findroot(f, (10, 10))
[ 1.61803398874989]
[-2.61803398874989]
You can verify this by solving the system manually.
Please note that the following (more general) syntax also works::
>>> def f(x1, x2):
... return x1**2 + x2, 5*x1**2 - 3*x1 + 2*x2 - 3
...
>>> findroot(f, (0, 0))
[-0.618033988749895]
[-0.381966011250105]
**Multiple roots**
For multiple roots all methods of the Newtonian family (including secant)
converge slowly. Consider this example::
>>> f = lambda x: (x - 1)**99
>>> findroot(f, 0.9, verify=False)
0.918073542444929
Even for a very close starting point the secant method converges very
slowly. Use ``verbose=True`` to illustrate this.
It is possible to modify Newton's method to make it converge regardless of
the root's multiplicity::
>>> findroot(f, -10, solver='mnewton')
1.0
This variant uses the first and second derivative of the function, which is
not very efficient.
Alternatively you can use an experimental Newtonian solver that keeps track
of the speed of convergence and accelerates it using Steffensen's method if
necessary::
>>> findroot(f, -10, solver='anewton', verbose=True)
x: -9.88888888888888888889
error: 0.111111111111111111111
converging slowly
x: -9.77890011223344556678
error: 0.10998877665544332211
converging slowly
x: -9.67002233332199662166
error: 0.108877778911448945119
converging slowly
accelerating convergence
x: -9.5622443299551077669
error: 0.107778003366888854764
converging slowly
x: 0.99999999999999999214
error: 10.562244329955107759
x: 1.0
error: 7.8598304758094664213e-18
ZeroDivisionError: canceled with x = 1.0
1.0
**Complex roots**
For complex roots it's recommended to use Muller's method as it converges
even for real starting points very fast::
>>> findroot(lambda x: x**4 + x + 1, (0, 1, 2), solver='muller')
(0.727136084491197 + 0.934099289460529j)
**Intersection methods**
When you need to find a root in a known interval, it's highly recommended to
use an intersection-based solver like ``'anderson'`` or ``'ridder'``.
Usually they converge faster and more reliable. They have however problems
with multiple roots and usually need a sign change to find a root::
>>> findroot(lambda x: x**3, (-1, 1), solver='anderson')
0.0
Be careful with symmetric functions::
>>> findroot(lambda x: x**2, (-1, 1), solver='anderson') #doctest:+ELLIPSIS
Traceback (most recent call last):
...
ZeroDivisionError
It fails even for better starting points, because there is no sign change::
>>> findroot(lambda x: x**2, (-1, .5), solver='anderson')
Traceback (most recent call last):
...
ValueError: Could not find root within given tolerance. (1.0 > 2.16840434497100886801e-19)
Try another starting point or tweak arguments.
"""
prec = ctx.prec
try:
ctx.prec += 20
# initialize arguments
if tol is None:
tol = ctx.eps * 2**10
kwargs['verbose'] = kwargs.get('verbose', verbose)
if 'd1f' in kwargs:
kwargs['df'] = kwargs['d1f']
kwargs['tol'] = tol
if isinstance(x0, (list, tuple)):
x0 = [ctx.convert(x) for x in x0]
else:
x0 = [ctx.convert(x0)]
if isinstance(solver, str):
try:
solver = str2solver[solver]
except KeyError:
raise ValueError('could not recognize solver')
# accept list of functions
if isinstance(f, (list, tuple)):
f2 = copy(f)
def tmp(*args):
return [fn(*args) for fn in f2]
f = tmp
# detect multidimensional functions
try:
fx = f(*x0)
multidimensional = isinstance(fx, (list, tuple, ctx.matrix))
except TypeError:
fx = f(x0[0])
multidimensional = False
if 'multidimensional' in kwargs:
multidimensional = kwargs['multidimensional']
if multidimensional:
# only one multidimensional solver available at the moment
solver = MDNewton
if not 'norm' in kwargs:
norm = lambda x: ctx.norm(x, 'inf')
kwargs['norm'] = norm
else:
norm = kwargs['norm']
else:
norm = abs
# happily return starting point if it's a root
if norm(fx) == 0:
if multidimensional:
return ctx.matrix(x0)
else:
return x0[0]
# use solver
iterations = solver(ctx, f, x0, **kwargs)
if 'maxsteps' in kwargs:
maxsteps = kwargs['maxsteps']
else:
maxsteps = iterations.maxsteps
i = 0
for x, error in iterations:
if verbose:
print_('x: ', x)
print_('error:', error)
i += 1
if error < tol * max(1, norm(x)) or i >= maxsteps:
break
else:
if not i:
raise ValueError('Could not find root using the given solver.\n'
'Try another starting point or tweak arguments.')
if not isinstance(x, (list, tuple, ctx.matrix)):
xl = [x]
else:
xl = x
if verify and norm(f(*xl))**2 > tol: # TODO: better condition?
raise ValueError('Could not find root within given tolerance. '
'(%s > %s)\n'
'Try another starting point or tweak arguments.'
% (norm(f(*xl))**2, tol))
return x
finally:
ctx.prec = prec
def multiplicity(ctx, f, root, tol=None, maxsteps=10, **kwargs):
"""
Return the multiplicity of a given root of f.
Internally, numerical derivatives are used. This might be inefficient for
higher order derviatives. Due to this, ``multiplicity`` cancels after
evaluating 10 derivatives by default. You can be specify the n-th derivative
using the dnf keyword.
>>> from mpmath import *
>>> multiplicity(lambda x: sin(x) - 1, pi/2)
2
"""
if tol is None:
tol = ctx.eps ** 0.8
kwargs['d0f'] = f
for i in xrange(maxsteps):
dfstr = 'd' + str(i) + 'f'
if dfstr in kwargs:
df = kwargs[dfstr]
else:
df = lambda x: ctx.diff(f, x, i)
if not abs(df(root)) < tol:
break
return i
def steffensen(f):
"""
linear convergent function -> quadratic convergent function
Steffensen's method for quadratic convergence of a linear converging
sequence.
Don not use it for higher rates of convergence.
It may even work for divergent sequences.
Definition:
F(x) = (x*f(f(x)) - f(x)**2) / (f(f(x)) - 2*f(x) + x)
Example
.......
You can use Steffensen's method to accelerate a fixpoint iteration of linear
(or less) convergence.
x* is a fixpoint of the iteration x_{k+1} = phi(x_k) if x* = phi(x*). For
phi(x) = x**2 there are two fixpoints: 0 and 1.
Let's try Steffensen's method:
>>> f = lambda x: x**2
>>> from mpmath.calculus.optimization import steffensen
>>> F = steffensen(f)
>>> for x in [0.5, 0.9, 2.0]:
... fx = Fx = x
... for i in xrange(9):
... try:
... fx = f(fx)
... except OverflowError:
... pass
... try:
... Fx = F(Fx)
... except ZeroDivisionError:
... pass
... print('%20g %20g' % (fx, Fx))
0.25 -0.5
0.0625 0.1
0.00390625 -0.0011236
1.52588e-05 1.41691e-09
2.32831e-10 -2.84465e-27
5.42101e-20 2.30189e-80
2.93874e-39 -1.2197e-239
8.63617e-78 0
7.45834e-155 0
0.81 1.02676
0.6561 1.00134
0.430467 1
0.185302 1
0.0343368 1
0.00117902 1
1.39008e-06 1
1.93233e-12 1
3.73392e-24 1
4 1.6
16 1.2962
256 1.10194
65536 1.01659
4.29497e+09 1.00053
1.84467e+19 1
3.40282e+38 1
1.15792e+77 1
1.34078e+154 1
Unmodified, the iteration converges only towards 0. Modified it converges
not only much faster, it converges even to the repelling fixpoint 1.
"""
def F(x):
fx = f(x)
ffx = f(fx)
return (x*ffx - fx**2) / (ffx - 2*fx + x)
return F
OptimizationMethods.jacobian = jacobian
OptimizationMethods.findroot = findroot
OptimizationMethods.multiplicity = multiplicity
if __name__ == '__main__':
import doctest
doctest.testmod()
| 29.633455
| 98
| 0.518554
|
415e116f3872bf8a85ee856f7329af97b54e0988
| 6,736
|
py
|
Python
|
force_verify.py
|
haakonvt/LearningTensorFlow
|
6988a15af2ac916ae1a5e23b2c5bde9630cc0519
|
[
"MIT"
] | 5
|
2018-09-06T12:52:12.000Z
|
2020-05-09T01:40:12.000Z
|
force_verify.py
|
haakonvt/LearningTensorFlow
|
6988a15af2ac916ae1a5e23b2c5bde9630cc0519
|
[
"MIT"
] | null | null | null |
force_verify.py
|
haakonvt/LearningTensorFlow
|
6988a15af2ac916ae1a5e23b2c5bde9630cc0519
|
[
"MIT"
] | 4
|
2018-02-06T08:42:06.000Z
|
2019-04-16T11:23:06.000Z
|
from create_train_data import generate_symmfunc_input_Si_Behler
import numpy as np
from file_management import findPathToData, readXYZ_Files
from plot_tools import plotErrorEvolutionSWvsNN, plotEvolutionSWvsNN_N_diff_epochs, plotForcesSWvsNN, plotLAMMPSforces1atomEvo
from create_train_data import PES_Stillinger_Weber
import sys
from derivatives_symm_func import force_calculation, create_neighbour_list
from nn_evaluation import neural_network
def test_structure_N_atom(neigh_cube, neural_network, plot_single=False, last_timestep=-1):
"""
Structure:
xyz = [[0, 0, 0 ], <--- must be origo
[x2,y2,z2],
[x3,y3,z3],
[........],
[xN,yN,zN]]
"""
# Will just be a list of numbers Ep
Ep_SW_list = []
Ep_NN_list = []
# Will contain tuples (Fx, Fy, Fz)
Fvec_SW_list = []
Fvec_NN_list = []
# Need Ep of all atoms in NN-calculation of forces:
tot_nmbr_of_atoms = neigh_cube[0].shape[0]
Ep_NN_all_atoms = [None]*tot_nmbr_of_atoms
_, nmbr_G = generate_symmfunc_input_Si_Behler()
dNNdG_matrix = np.zeros((tot_nmbr_of_atoms, nmbr_G))
# Loop through all timesteps
for t,xyz in enumerate(neigh_cube[0:last_timestep]):
# Make certain that atoms are centered around (0,0,0):
if not np.all(xyz[0,:] == 0):
print "Atoms not properly centered to origo. Exiting!"
sys.exit(0)
# Pick out neighbor atoms
xyz_only_neigh = xyz[1:,:]
# Potential and forces computed by Stillinger Weber:
Ep_SW = PES_Stillinger_Weber(xyz_only_neigh)
Fvec_SW = (0,0,0) # Only a placeholder! LAMMPS data filled in later
# Potential and forces computed by trained neural network:
# for i_atom in range(tot_nmbr_of_atoms):
# xyz_atom_centered = create_neighbour_list(xyz, i_atom, return_self=False)
# symm_vec = neural_network.create_symvec_from_xyz(xyz_atom_centered)
# Ep_NN_all_atoms[i_atom] = neural_network(symm_vec) # Evaluates the NN
# dNNdG_matrix[i_atom,:] = neural_network.derivative().reshape(nmbr_G,)
# # Now that we have all Ep of all atoms, run force calculation:
# f_tot = force_calculation(dNNdG_matrix, xyz)
# Finite difference derivative of NN:
i_a = 0 # Look at this particle only
off_value = 0.000001
num_force_atom_0 = [0,0,0]
for fdir in [0,1,2]: # Force in direction x, y, z
Ep_off = [0,0] # Reset Ep
for i_off, offset in enumerate([-off_value, off_value]):
xyz_c = np.copy(xyz)
xyz_c[i_a,fdir] -= offset # Moving the atom a tiny bit in direction "fdir"
for cur_atom in range(tot_nmbr_of_atoms):
xyz_atom_centered = create_neighbour_list(xyz_c, cur_atom, return_self=False)
symm_vec = neural_network.create_symvec_from_xyz(xyz_atom_centered)
Ep_off[i_off] += neural_network(symm_vec) # Evaluates the NN
# print i2xyz(fdir), (Ep_off[1]-Ep_off[0])/(2*off_value)
# Compute the force with central difference (Error: O(dx^2)) <-- big O-notation
num_force_atom_0[fdir] = (Ep_off[1]-Ep_off[0])/(2*off_value)
# Compute Ep with no offset:
xyz_atom_centered = create_neighbour_list(xyz, 0, return_self=False)
symm_vec = neural_network.create_symvec_from_xyz(xyz_atom_centered)
Ep_NN = neural_network(symm_vec) # Evaluates the NN
# Append all values to lists:
Ep_SW_list.append(Ep_SW)
Fvec_SW_list.append(Fvec_SW)
# Analytic NN:
# Ep_NN_list.append(Ep_NN_all_atoms[:]) # Pick out first atom (for comparison)
# Fvec_NN_list.append(f_tot[0])
# Finite diff. derivatives:
Ep_NN_list.append(Ep_NN)
Fvec_NN_list.append(num_force_atom_0)
# Print out progress
if t%20 == 0 and t > 50:
sys.stdout.write("\rTimestep: %d" %t)
sys.stdout.flush()
print " "
Ep_SW_list = np.array(Ep_SW_list)
Ep_NN_list = np.array(Ep_NN_list)
if plot_single:
plotErrorEvolutionSWvsNN(Ep_SW_list, Ep_NN_list, tot_nmbr_of_atoms)
# Return values for more plotting
return Ep_SW_list, Ep_NN_list, tot_nmbr_of_atoms, Fvec_SW_list, Fvec_NN_list
def i2xyz(i):
""" For easy reading of error checks """
if i == 0:
return "x"
elif i == 1:
return "y"
else:
return "z"
if __name__ == '__main__':
try:
N = int(sys.argv[1])
M = int(sys.argv[2])
last_timestep = M
except:
print "Usage:\n>>> python force_verify.py N M"
print "- N is the different NN-versions to visualize"
print "- M is the last timestep"
sys.exit(0)
n_atoms = int(raw_input("Number of atoms? "))
path_to_file = "Important_data/Test_nn/enfil_sw_%dp.xyz" %n_atoms
neigh_cube = readXYZ_Files(path_to_file, "no-save-file.txt", return_array=True)
loadPath = findPathToData(find_tf_savefile=True)
master_list = []
# Activation functions with derivatives:
sigmoid = lambda x: 1.0/(1+np.exp(-x))
ddx_sig = lambda x: sigmoid(x)*(1-sigmoid(x))
relu = lambda x: np.maximum(x, 0, x) # (in-place of x --> quick!!)
ddx_relu = lambda x: np.array((x >= 0), dtype=float)
act_tanh = lambda x: np.tanh(x)
ddx_tanh = lambda x: 1.0 - np.tanh(x)**2 #1.0/np.cosh(x)**2
# If showing single NN-version (trained to a certain epoch), then plot
if N == 1:
plot_single = True
else:
plot_single = False
# Loop over different trained versions of the NN:
for i in range(N):
nn_eval = neural_network(loadPath, sigmoid, ddx_sig)
Ep_SW, Ep_NN, N_atoms, F_SW, F_NN = test_structure_N_atom(neigh_cube,
nn_eval, plot_single=plot_single, last_timestep=last_timestep)
# diff = np.mean(np.abs(np.array([i-j for i,j in zip(Ep_SW, Ep_NN[:,0])])))
# print "Potential energy abs diff:", diff
plot_info = [Ep_SW, Ep_NN, N_atoms, nn_eval.what_epoch]
master_list.append(plot_info)
# Plot each epoch in a new subplot:
if N > 1:
plotEvolutionSWvsNN_N_diff_epochs(N, master_list)
# Pick out first atom (for comparison)
F_NN = np.array(F_NN)#[:,0]
# Grab LAMMPS force data, NB: must be perfectly consistent with XYZ-files!!!!!
F_LAMMPS = plotLAMMPSforces1atomEvo()[:len(F_NN)]
F_SW = np.array(F_LAMMPS, dtype=float)
# Plot comparison of forces
plotForcesSWvsNN(F_LAMMPS, F_NN, show=True)
| 41.073171
| 126
| 0.634056
|
ee1ed474924d4d90215aa95a07b29cf4f297b32a
| 4,367
|
py
|
Python
|
fusion_genes_compare.py
|
Hammarn/Scripts
|
eb9fb51b614d29aea425168aa16c58410d975f46
|
[
"MIT"
] | null | null | null |
fusion_genes_compare.py
|
Hammarn/Scripts
|
eb9fb51b614d29aea425168aa16c58410d975f46
|
[
"MIT"
] | null | null | null |
fusion_genes_compare.py
|
Hammarn/Scripts
|
eb9fb51b614d29aea425168aa16c58410d975f46
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import os
import re
import argparse
import sys
def read_files_store_data(input_files,output_file):
fusion_dict={}
for input_file in input_files:
if input_file.endswith("star-fusion.fusion_candidates.final.abridged"):
#We have a star fusion file
with open(input_file, 'r') as f:
for line in f:
if line.startswith("#"):
#Skip header
continue
else:
fusion=line.split("\t")[0]
# If we want to store to metadata then that can be inserted here
if fusion in fusion_dict.keys():
fusion_dict[fusion]='Both'
else:
fusion_dict[fusion]='STAR'
elif input_file.endswith("summary_candidate_fusions.txt"):
#We have a Fusion catcher file
with open(input_file, 'r') as f:
for line in f:
if line.startswith(" * "):
fusion=line.split(" ")[3]
if fusion in fusion_dict.keys():
fusion_dict[fusion]='Both'
else:
fusion_dict[fusion]='FusionCatcher'
else:
print"Found file with incorect file ending, omitting file {}".format(input_file)
make_report(fusion_dict, output_file)
def group_NGI_files(input_files,outputfile):
sample_pattern=re.compile("^(P[0-9]+_[0-9]+)")
matches=[]
for input_file in input_files:
try:
match=sample_pattern.search(os.path.basename(input_file)).group(1)
if match:
matches.append(match)
except AttributeError:
continue
NGI_names=matches
for NGI_name in NGI_names:
sample_files=[]
for fusion_file in input_files:
if os.path.basename(fusion_file).startswith(NGI_name):
sample_files.append(fusion_file)
outfile="{}.fusion_comparison.txt".format(NGI_name)
read_files_store_data(sample_files,outfile)
def make_report(fusion_dict, output_file):
content=str()
gene_in_both=[]
gene_star_only=[]
gene_fc_only=[]
len_fc=0
len_star=0
for fusion_gene in fusion_dict.keys():
if fusion_dict[fusion_gene] == 'Both':
gene_in_both.append(fusion_gene)
len_fc+=1
len_star+=1
elif fusion_dict[fusion_gene] == 'STAR':
gene_star_only.append(fusion_gene)
len_star+=1
elif fusion_dict[fusion_gene] == 'FusionCatcher':
gene_fc_only.append(fusion_gene)
len_fc+=1
content+="## Number of Fusion genes detected with STAR-fusion: {} \n".format(len_star)
content+="## Number of Fusion genes detected with FusionCatcher: {} \n".format(len_fc)
content +="##FUSIONCATCHER\tSTAR-FUSION\tBOTH\n"
##cleanup
gene_in_both=[item.rstrip() for item in gene_in_both]
gene_star_only=[item.rstrip() for item in gene_star_only]
gene_fc_only=[item.rstrip() for item in gene_fc_only]
maxlen = max([len(l) for l in [gene_in_both,gene_star_only,gene_fc_only]])
for idx in range(0, maxlen-1):
both_str = gene_in_both[idx] if len(gene_in_both) > idx else ''
star_str = gene_star_only[idx] if len(gene_star_only) > idx else ''
fc_str = gene_fc_only[idx] if len(gene_fc_only) > idx else ''
content += "{}\t{}\t{}\n".format(fc_str, star_str, both_str)
with open(output_file, 'w') as f:
f.write(content)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="""Compare two list of fusion genes and give which fusions are found in both """)
parser.add_argument("input_files", metavar='Input file', nargs='+', default='.',
help="Input files from STAR fusion and Fusion catcher ")
parser.add_argument("output_file", metavar='Output file', nargs='?', default='fusion_comparison.txt',
help="File to save output to. ")
args = parser.parse_args()
#merge_files(args.input_dir, args.dest_dir)
group_NGI_files(args.input_files,args.output_file)
read_files_store_data(args.input_files,args.output_file)
| 38.991071
| 130
| 0.596977
|
8441ad32d286aabf26e1e99035938be2fca3af1b
| 820
|
py
|
Python
|
ferenda/sources/legal/se/__init__.py
|
eckberg/ferenda
|
c420f43c508edb4c4a41e79cb188e9760347d432
|
[
"BSD-2-Clause"
] | null | null | null |
ferenda/sources/legal/se/__init__.py
|
eckberg/ferenda
|
c420f43c508edb4c4a41e79cb188e9760347d432
|
[
"BSD-2-Clause"
] | null | null | null |
ferenda/sources/legal/se/__init__.py
|
eckberg/ferenda
|
c420f43c508edb4c4a41e79cb188e9760347d432
|
[
"BSD-2-Clause"
] | 1
|
2022-01-04T09:15:03.000Z
|
2022-01-04T09:15:03.000Z
|
# flake8: noqa
from rdflib import Namespace
RPUBL = Namespace('http://rinfo.lagrummet.se/ns/2008/11/rinfo/publ#')
URISPACE = Namespace('http://rinfo.lagrummet.se/sys/uri/space#')
RINFOEX = Namespace("http://lagen.nu/terms#")
from .swedishlegalsource import SwedishLegalSource, SwedishLegalStore, SwedishLegalHandler, SwedishCitationParser
from .fixedlayoutsource import FixedLayoutStore, FixedLayoutSource
from .offtryck import Offtryck
from .regeringen import Regeringen
from .riksdagen import Riksdagen
from .trips import Trips, NoMoreLinks
from .arn import ARN
from .direktiv import Direktiv
from .ds import Ds
from .dv import DV
from .jk import JK
from .jo import JO
from .kommitte import Kommitte
from .myndfskr import MyndFskrBase
from .propositioner import Propositioner
from .sfs import SFS
from .sou import SOU
| 35.652174
| 113
| 0.812195
|
5d4c46dc9ac537f1e72dcbae2ff665b4dfff5e06
| 14,409
|
py
|
Python
|
eZmaxApi/model/ezsignformfieldgroup_edit_object_v1_response.py
|
ezmaxinc/eZmax-SDK-python
|
6794b8001abfb7d9ae18a3b87aba164839b925a0
|
[
"MIT"
] | null | null | null |
eZmaxApi/model/ezsignformfieldgroup_edit_object_v1_response.py
|
ezmaxinc/eZmax-SDK-python
|
6794b8001abfb7d9ae18a3b87aba164839b925a0
|
[
"MIT"
] | null | null | null |
eZmaxApi/model/ezsignformfieldgroup_edit_object_v1_response.py
|
ezmaxinc/eZmax-SDK-python
|
6794b8001abfb7d9ae18a3b87aba164839b925a0
|
[
"MIT"
] | null | null | null |
"""
eZmax API Definition (Full)
This API expose all the functionnalities for the eZmax and eZsign applications. # noqa: E501
The version of the OpenAPI document: 1.1.7
Contact: support-api@ezmax.ca
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from eZmaxApi.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
OpenApiModel
)
from eZmaxApi.exceptions import ApiAttributeError
def lazy_import():
from eZmaxApi.model.common_response import CommonResponse
from eZmaxApi.model.common_response_obj_debug import CommonResponseObjDebug
from eZmaxApi.model.common_response_obj_debug_payload import CommonResponseObjDebugPayload
globals()['CommonResponse'] = CommonResponse
globals()['CommonResponseObjDebug'] = CommonResponseObjDebug
globals()['CommonResponseObjDebugPayload'] = CommonResponseObjDebugPayload
class EzsignformfieldgroupEditObjectV1Response(ModelComposed):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'obj_debug_payload': (CommonResponseObjDebugPayload,), # noqa: E501
'obj_debug': (CommonResponseObjDebug,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'obj_debug_payload': 'objDebugPayload', # noqa: E501
'obj_debug': 'objDebug', # noqa: E501
}
read_only_vars = {
}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""EzsignformfieldgroupEditObjectV1Response - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
obj_debug_payload (CommonResponseObjDebugPayload): [optional] # noqa: E501
obj_debug (CommonResponseObjDebug): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
for arg in args:
if isinstance(arg, dict):
kwargs.update(arg)
else:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
constant_args = {
'_check_type': _check_type,
'_path_to_item': _path_to_item,
'_spec_property_naming': _spec_property_naming,
'_configuration': _configuration,
'_visited_composed_classes': self._visited_composed_classes,
}
composed_info = validate_get_composed_info(
constant_args, kwargs, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
discarded_args = composed_info[3]
for var_name, var_value in kwargs.items():
if var_name in discarded_args and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self._additional_properties_model_instances:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
'_composed_instances',
'_var_name_to_model_instances',
'_additional_properties_model_instances',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""EzsignformfieldgroupEditObjectV1Response - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
obj_debug_payload (CommonResponseObjDebugPayload): [optional] # noqa: E501
obj_debug (CommonResponseObjDebug): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
for arg in args:
if isinstance(arg, dict):
kwargs.update(arg)
else:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
constant_args = {
'_check_type': _check_type,
'_path_to_item': _path_to_item,
'_spec_property_naming': _spec_property_naming,
'_configuration': _configuration,
'_visited_composed_classes': self._visited_composed_classes,
}
composed_info = validate_get_composed_info(
constant_args, kwargs, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
discarded_args = composed_info[3]
for var_name, var_value in kwargs.items():
if var_name in discarded_args and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self._additional_properties_model_instances:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.")
@cached_property
def _composed_schemas():
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
lazy_import()
return {
'anyOf': [
],
'allOf': [
CommonResponse,
],
'oneOf': [
],
}
| 43.796353
| 121
| 0.58519
|
979fef75cdf725e9306ed164013ce1a463472fb3
| 16,130
|
py
|
Python
|
Lib/test/test_fractions.py
|
weimingtom/wpython
|
d51bfe48ec4c0ade1514f1351dff700c63ca112a
|
[
"PSF-2.0"
] | 5
|
2020-06-30T05:06:40.000Z
|
2021-05-24T08:38:33.000Z
|
Lib/test/test_fractions.py
|
weimingtom/wpython
|
d51bfe48ec4c0ade1514f1351dff700c63ca112a
|
[
"PSF-2.0"
] | null | null | null |
Lib/test/test_fractions.py
|
weimingtom/wpython
|
d51bfe48ec4c0ade1514f1351dff700c63ca112a
|
[
"PSF-2.0"
] | 2
|
2015-10-01T18:28:20.000Z
|
2020-09-09T16:25:27.000Z
|
"""Tests for Lib/fractions.py."""
from decimal import Decimal
from test.test_support import run_unittest
import math
import operator
import fractions
import unittest
from copy import copy, deepcopy
from cPickle import dumps, loads
F = fractions.Fraction
gcd = fractions.gcd
class GcdTest(unittest.TestCase):
def testMisc(self):
self.assertEquals(0, gcd(0, 0))
self.assertEquals(1, gcd(1, 0))
self.assertEquals(-1, gcd(-1, 0))
self.assertEquals(1, gcd(0, 1))
self.assertEquals(-1, gcd(0, -1))
self.assertEquals(1, gcd(7, 1))
self.assertEquals(-1, gcd(7, -1))
self.assertEquals(1, gcd(-23, 15))
self.assertEquals(12, gcd(120, 84))
self.assertEquals(-12, gcd(84, -120))
def _components(r):
return (r.numerator, r.denominator)
class FractionTest(unittest.TestCase):
def assertTypedEquals(self, expected, actual):
"""Asserts that both the types and values are the same."""
self.assertEquals(type(expected), type(actual))
self.assertEquals(expected, actual)
def assertRaisesMessage(self, exc_type, message,
callable, *args, **kwargs):
"""Asserts that callable(*args, **kwargs) raises exc_type(message)."""
try:
callable(*args, **kwargs)
except exc_type, e:
self.assertEquals(message, str(e))
else:
self.fail("%s not raised" % exc_type.__name__)
def testInit(self):
self.assertEquals((0, 1), _components(F()))
self.assertEquals((7, 1), _components(F(7)))
self.assertEquals((7, 3), _components(F(F(7, 3))))
self.assertEquals((-1, 1), _components(F(-1, 1)))
self.assertEquals((-1, 1), _components(F(1, -1)))
self.assertEquals((1, 1), _components(F(-2, -2)))
self.assertEquals((1, 2), _components(F(5, 10)))
self.assertEquals((7, 15), _components(F(7, 15)))
self.assertEquals((10**23, 1), _components(F(10**23)))
self.assertRaisesMessage(ZeroDivisionError, "Fraction(12, 0)",
F, 12, 0)
self.assertRaises(TypeError, F, 1.5)
self.assertRaises(TypeError, F, 1.5 + 3j)
self.assertRaises(TypeError, F, F(1, 2), 3)
self.assertRaises(TypeError, F, "3/2", 3)
def testFromString(self):
self.assertEquals((5, 1), _components(F("5")))
self.assertEquals((3, 2), _components(F("3/2")))
self.assertEquals((3, 2), _components(F(" \n +3/2")))
self.assertEquals((-3, 2), _components(F("-3/2 ")))
self.assertEquals((13, 2), _components(F(" 013/02 \n ")))
self.assertEquals((13, 2), _components(F(u" 013/02 \n ")))
self.assertEquals((16, 5), _components(F(" 3.2 ")))
self.assertEquals((-16, 5), _components(F(u" -3.2 ")))
self.assertEquals((-3, 1), _components(F(u" -3. ")))
self.assertEquals((3, 5), _components(F(u" .6 ")))
self.assertRaisesMessage(
ZeroDivisionError, "Fraction(3, 0)",
F, "3/0")
self.assertRaisesMessage(
ValueError, "Invalid literal for Fraction: '3/'",
F, "3/")
self.assertRaisesMessage(
ValueError, "Invalid literal for Fraction: '3 /2'",
F, "3 /2")
self.assertRaisesMessage(
# Denominators don't need a sign.
ValueError, "Invalid literal for Fraction: '3/+2'",
F, "3/+2")
self.assertRaisesMessage(
# Imitate float's parsing.
ValueError, "Invalid literal for Fraction: '+ 3/2'",
F, "+ 3/2")
self.assertRaisesMessage(
# Avoid treating '.' as a regex special character.
ValueError, "Invalid literal for Fraction: '3a2'",
F, "3a2")
self.assertRaisesMessage(
# Only parse ordinary decimals, not scientific form.
ValueError, "Invalid literal for Fraction: '3.2e4'",
F, "3.2e4")
self.assertRaisesMessage(
# Don't accept combinations of decimals and fractions.
ValueError, "Invalid literal for Fraction: '3/7.2'",
F, "3/7.2")
self.assertRaisesMessage(
# Don't accept combinations of decimals and fractions.
ValueError, "Invalid literal for Fraction: '3.2/7'",
F, "3.2/7")
self.assertRaisesMessage(
# Allow 3. and .3, but not .
ValueError, "Invalid literal for Fraction: '.'",
F, ".")
def testImmutable(self):
r = F(7, 3)
r.__init__(2, 15)
self.assertEquals((7, 3), _components(r))
self.assertRaises(AttributeError, setattr, r, 'numerator', 12)
self.assertRaises(AttributeError, setattr, r, 'denominator', 6)
self.assertEquals((7, 3), _components(r))
# But if you _really_ need to:
r._numerator = 4
r._denominator = 2
self.assertEquals((4, 2), _components(r))
# Which breaks some important operations:
self.assertNotEquals(F(4, 2), r)
def testFromFloat(self):
self.assertRaises(TypeError, F.from_float, 3+4j)
self.assertEquals((10, 1), _components(F.from_float(10)))
self.assertEquals((0, 1), _components(F.from_float(-0.0)))
self.assertEquals((10, 1), _components(F.from_float(10.0)))
self.assertEquals((-5, 2), _components(F.from_float(-2.5)))
self.assertEquals((99999999999999991611392, 1),
_components(F.from_float(1e23)))
self.assertEquals(float(10**23), float(F.from_float(1e23)))
self.assertEquals((3602879701896397, 1125899906842624),
_components(F.from_float(3.2)))
self.assertEquals(3.2, float(F.from_float(3.2)))
inf = 1e1000
nan = inf - inf
self.assertRaisesMessage(
TypeError, "Cannot convert inf to Fraction.",
F.from_float, inf)
self.assertRaisesMessage(
TypeError, "Cannot convert -inf to Fraction.",
F.from_float, -inf)
self.assertRaisesMessage(
TypeError, "Cannot convert nan to Fraction.",
F.from_float, nan)
def testFromDecimal(self):
self.assertRaises(TypeError, F.from_decimal, 3+4j)
self.assertEquals(F(10, 1), F.from_decimal(10))
self.assertEquals(F(0), F.from_decimal(Decimal("-0")))
self.assertEquals(F(5, 10), F.from_decimal(Decimal("0.5")))
self.assertEquals(F(5, 1000), F.from_decimal(Decimal("5e-3")))
self.assertEquals(F(5000), F.from_decimal(Decimal("5e3")))
self.assertEquals(1 - F(1, 10**30),
F.from_decimal(Decimal("0." + "9" * 30)))
self.assertRaisesMessage(
TypeError, "Cannot convert Infinity to Fraction.",
F.from_decimal, Decimal("inf"))
self.assertRaisesMessage(
TypeError, "Cannot convert -Infinity to Fraction.",
F.from_decimal, Decimal("-inf"))
self.assertRaisesMessage(
TypeError, "Cannot convert NaN to Fraction.",
F.from_decimal, Decimal("nan"))
self.assertRaisesMessage(
TypeError, "Cannot convert sNaN to Fraction.",
F.from_decimal, Decimal("snan"))
def testLimitDenominator(self):
rpi = F('3.1415926535897932')
self.assertEqual(rpi.limit_denominator(10000), F(355, 113))
self.assertEqual(-rpi.limit_denominator(10000), F(-355, 113))
self.assertEqual(rpi.limit_denominator(113), F(355, 113))
self.assertEqual(rpi.limit_denominator(112), F(333, 106))
self.assertEqual(F(201, 200).limit_denominator(100), F(1))
self.assertEqual(F(201, 200).limit_denominator(101), F(102, 101))
self.assertEqual(F(0).limit_denominator(10000), F(0))
def testConversions(self):
self.assertTypedEquals(-1, math.trunc(F(-11, 10)))
self.assertTypedEquals(-1, int(F(-11, 10)))
self.assertEquals(False, bool(F(0, 1)))
self.assertEquals(True, bool(F(3, 2)))
self.assertTypedEquals(0.1, float(F(1, 10)))
# Check that __float__ isn't implemented by converting the
# numerator and denominator to float before dividing.
self.assertRaises(OverflowError, float, long('2'*400+'7'))
self.assertAlmostEquals(2.0/3,
float(F(long('2'*400+'7'), long('3'*400+'1'))))
self.assertTypedEquals(0.1+0j, complex(F(1,10)))
def testArithmetic(self):
self.assertEquals(F(1, 2), F(1, 10) + F(2, 5))
self.assertEquals(F(-3, 10), F(1, 10) - F(2, 5))
self.assertEquals(F(1, 25), F(1, 10) * F(2, 5))
self.assertEquals(F(1, 4), F(1, 10) / F(2, 5))
self.assertTypedEquals(2, F(9, 10) // F(2, 5))
self.assertTypedEquals(10**23, F(10**23, 1) // F(1))
self.assertEquals(F(2, 3), F(-7, 3) % F(3, 2))
self.assertEquals(F(8, 27), F(2, 3) ** F(3))
self.assertEquals(F(27, 8), F(2, 3) ** F(-3))
self.assertTypedEquals(2.0, F(4) ** F(1, 2))
# Will return 1j in 3.0:
self.assertRaises(ValueError, pow, F(-1), F(1, 2))
def testMixedArithmetic(self):
self.assertTypedEquals(F(11, 10), F(1, 10) + 1)
self.assertTypedEquals(1.1, F(1, 10) + 1.0)
self.assertTypedEquals(1.1 + 0j, F(1, 10) + (1.0 + 0j))
self.assertTypedEquals(F(11, 10), 1 + F(1, 10))
self.assertTypedEquals(1.1, 1.0 + F(1, 10))
self.assertTypedEquals(1.1 + 0j, (1.0 + 0j) + F(1, 10))
self.assertTypedEquals(F(-9, 10), F(1, 10) - 1)
self.assertTypedEquals(-0.9, F(1, 10) - 1.0)
self.assertTypedEquals(-0.9 + 0j, F(1, 10) - (1.0 + 0j))
self.assertTypedEquals(F(9, 10), 1 - F(1, 10))
self.assertTypedEquals(0.9, 1.0 - F(1, 10))
self.assertTypedEquals(0.9 + 0j, (1.0 + 0j) - F(1, 10))
self.assertTypedEquals(F(1, 10), F(1, 10) * 1)
self.assertTypedEquals(0.1, F(1, 10) * 1.0)
self.assertTypedEquals(0.1 + 0j, F(1, 10) * (1.0 + 0j))
self.assertTypedEquals(F(1, 10), 1 * F(1, 10))
self.assertTypedEquals(0.1, 1.0 * F(1, 10))
self.assertTypedEquals(0.1 + 0j, (1.0 + 0j) * F(1, 10))
self.assertTypedEquals(F(1, 10), F(1, 10) / 1)
self.assertTypedEquals(0.1, F(1, 10) / 1.0)
self.assertTypedEquals(0.1 + 0j, F(1, 10) / (1.0 + 0j))
self.assertTypedEquals(F(10, 1), 1 / F(1, 10))
self.assertTypedEquals(10.0, 1.0 / F(1, 10))
self.assertTypedEquals(10.0 + 0j, (1.0 + 0j) / F(1, 10))
self.assertTypedEquals(0, F(1, 10) // 1)
self.assertTypedEquals(0.0, F(1, 10) // 1.0)
self.assertTypedEquals(10, 1 // F(1, 10))
self.assertTypedEquals(10**23, 10**22 // F(1, 10))
self.assertTypedEquals(10.0, 1.0 // F(1, 10))
self.assertTypedEquals(F(1, 10), F(1, 10) % 1)
self.assertTypedEquals(0.1, F(1, 10) % 1.0)
self.assertTypedEquals(F(0, 1), 1 % F(1, 10))
self.assertTypedEquals(0.0, 1.0 % F(1, 10))
# No need for divmod since we don't override it.
# ** has more interesting conversion rules.
self.assertTypedEquals(F(100, 1), F(1, 10) ** -2)
self.assertTypedEquals(F(100, 1), F(10, 1) ** 2)
self.assertTypedEquals(0.1, F(1, 10) ** 1.0)
self.assertTypedEquals(0.1 + 0j, F(1, 10) ** (1.0 + 0j))
self.assertTypedEquals(4 , 2 ** F(2, 1))
# Will return 1j in 3.0:
self.assertRaises(ValueError, pow, (-1), F(1, 2))
self.assertTypedEquals(F(1, 4) , 2 ** F(-2, 1))
self.assertTypedEquals(2.0 , 4 ** F(1, 2))
self.assertTypedEquals(0.25, 2.0 ** F(-2, 1))
self.assertTypedEquals(1.0 + 0j, (1.0 + 0j) ** F(1, 10))
def testMixingWithDecimal(self):
# Decimal refuses mixed comparisons.
self.assertRaisesMessage(
TypeError,
"unsupported operand type(s) for +: 'Fraction' and 'Decimal'",
operator.add, F(3,11), Decimal('3.1415926'))
self.assertNotEquals(F(5, 2), Decimal('2.5'))
def testComparisons(self):
self.assertTrue(F(1, 2) < F(2, 3))
self.assertFalse(F(1, 2) < F(1, 2))
self.assertTrue(F(1, 2) <= F(2, 3))
self.assertTrue(F(1, 2) <= F(1, 2))
self.assertFalse(F(2, 3) <= F(1, 2))
self.assertTrue(F(1, 2) == F(1, 2))
self.assertFalse(F(1, 2) == F(1, 3))
self.assertFalse(F(1, 2) != F(1, 2))
self.assertTrue(F(1, 2) != F(1, 3))
def testMixedLess(self):
self.assertTrue(2 < F(5, 2))
self.assertFalse(2 < F(4, 2))
self.assertTrue(F(5, 2) < 3)
self.assertFalse(F(4, 2) < 2)
self.assertTrue(F(1, 2) < 0.6)
self.assertFalse(F(1, 2) < 0.4)
self.assertTrue(0.4 < F(1, 2))
self.assertFalse(0.5 < F(1, 2))
def testMixedLessEqual(self):
self.assertTrue(0.5 <= F(1, 2))
self.assertFalse(0.6 <= F(1, 2))
self.assertTrue(F(1, 2) <= 0.5)
self.assertFalse(F(1, 2) <= 0.4)
self.assertTrue(2 <= F(4, 2))
self.assertFalse(2 <= F(3, 2))
self.assertTrue(F(4, 2) <= 2)
self.assertFalse(F(5, 2) <= 2)
def testBigFloatComparisons(self):
# Because 10**23 can't be represented exactly as a float:
self.assertFalse(F(10**23) == float(10**23))
# The first test demonstrates why these are important.
self.assertFalse(1e23 < float(F(math.trunc(1e23) + 1)))
self.assertTrue(1e23 < F(math.trunc(1e23) + 1))
self.assertFalse(1e23 <= F(math.trunc(1e23) - 1))
self.assertTrue(1e23 > F(math.trunc(1e23) - 1))
self.assertFalse(1e23 >= F(math.trunc(1e23) + 1))
def testBigComplexComparisons(self):
self.assertFalse(F(10**23) == complex(10**23))
self.assertTrue(F(10**23) > complex(10**23))
self.assertFalse(F(10**23) <= complex(10**23))
def testMixedEqual(self):
self.assertTrue(0.5 == F(1, 2))
self.assertFalse(0.6 == F(1, 2))
self.assertTrue(F(1, 2) == 0.5)
self.assertFalse(F(1, 2) == 0.4)
self.assertTrue(2 == F(4, 2))
self.assertFalse(2 == F(3, 2))
self.assertTrue(F(4, 2) == 2)
self.assertFalse(F(5, 2) == 2)
def testStringification(self):
self.assertEquals("Fraction(7, 3)", repr(F(7, 3)))
self.assertEquals("Fraction(6283185307, 2000000000)",
repr(F('3.1415926535')))
self.assertEquals("Fraction(-1, 100000000000000000000)",
repr(F(1, -10**20)))
self.assertEquals("7/3", str(F(7, 3)))
self.assertEquals("7", str(F(7, 1)))
def testHash(self):
self.assertEquals(hash(2.5), hash(F(5, 2)))
self.assertEquals(hash(10**50), hash(F(10**50)))
self.assertNotEquals(hash(float(10**23)), hash(F(10**23)))
def testApproximatePi(self):
# Algorithm borrowed from
# http://docs.python.org/lib/decimal-recipes.html
three = F(3)
lasts, t, s, n, na, d, da = 0, three, 3, 1, 0, 0, 24
while abs(s - lasts) > F(1, 10**9):
lasts = s
n, na = n+na, na+8
d, da = d+da, da+32
t = (t * n) / d
s += t
self.assertAlmostEquals(math.pi, s)
def testApproximateCos1(self):
# Algorithm borrowed from
# http://docs.python.org/lib/decimal-recipes.html
x = F(1)
i, lasts, s, fact, num, sign = 0, 0, F(1), 1, 1, 1
while abs(s - lasts) > F(1, 10**9):
lasts = s
i += 2
fact *= i * (i-1)
num *= x * x
sign *= -1
s += num / fact * sign
self.assertAlmostEquals(math.cos(1), s)
def test_copy_deepcopy_pickle(self):
r = F(13, 7)
self.assertEqual(r, loads(dumps(r)))
self.assertEqual(id(r), id(copy(r)))
self.assertEqual(id(r), id(deepcopy(r)))
def test_main():
run_unittest(FractionTest, GcdTest)
if __name__ == '__main__':
test_main()
| 40.325
| 79
| 0.563422
|
0ffa3309b09bb0093e3a77784f4f1032f426e979
| 7,819
|
py
|
Python
|
eeg_modelling/eeg_viewer/similarity_test.py
|
shaun95/google-research
|
d41bbaca1eb9bfd980ec2b3fd201c3ddb4d1f2e5
|
[
"Apache-2.0"
] | 1
|
2022-03-13T21:48:52.000Z
|
2022-03-13T21:48:52.000Z
|
eeg_modelling/eeg_viewer/similarity_test.py
|
shaun95/google-research
|
d41bbaca1eb9bfd980ec2b3fd201c3ddb4d1f2e5
|
[
"Apache-2.0"
] | null | null | null |
eeg_modelling/eeg_viewer/similarity_test.py
|
shaun95/google-research
|
d41bbaca1eb9bfd980ec2b3fd201c3ddb4d1f2e5
|
[
"Apache-2.0"
] | 1
|
2022-03-30T07:20:29.000Z
|
2022-03-30T07:20:29.000Z
|
# coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Copyright 2019 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python2, python3
"""Tests for similarity operations."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import absltest
import numpy as np
from eeg_modelling.eeg_viewer import similarity
from eeg_modelling.pyprotos import similarity_pb2
sampling_freq = 120
n_leads = 18
total_seconds = 20
n_samples = total_seconds * sampling_freq
def overlaps(pattern_a, pattern_b):
"""Returns a boolean indicating if two patterns are overlapped.
Args:
pattern_a: SimilarPattern or TimeSpan.
pattern_b: SimilarPattern or TimeSpan.
Returns:
boolean indicating if the patterns are overlapped.
"""
start_a = pattern_a.start_time
start_b = pattern_b.start_time
end_a = pattern_a.start_time + pattern_a.duration
end_b = pattern_b.start_time + pattern_b.duration
a_falls_in_b = start_b < end_a and end_a < end_b
b_falls_in_a = start_a < end_b and end_b < end_a
return a_falls_in_b or b_falls_in_a
def set_slice_value(base_array, new_value_array, start_seconds, samp_freq):
"""Sets a slice of a numpy array with a new_value_array.
Helper function used in the tests. It modifies the base_array in place.
Args:
base_array: numpy array of shape (n_channels, n_samples).
new_value_array: numpy array of shape (n_channels, m_samples),
where m_samples <= n_samples.
start_seconds: starting seconds to set the new_value_array into the
base_array.
samp_freq: sampling frequency used in the data.
"""
_, m_samples = new_value_array.shape
start_index = int(start_seconds * samp_freq)
end_index = start_index + m_samples
base_array[:, start_index:end_index] = new_value_array
class SimilarityTest(absltest.TestCase):
def setUp(self):
super(SimilarityTest, self).setUp()
self.base_data = np.zeros((n_leads, n_samples), dtype=np.float32)
def testCreateSimilarPatternsResponse(self):
settings = similarity_pb2.SimilaritySettings()
settings.top_n = 7
settings.merge_close_results = False
response = similarity.CreateSimilarPatternsResponse(self.base_data,
1,
2,
[],
sampling_freq,
settings)
self.assertIsInstance(response, similarity_pb2.SimilarPatternsResponse)
self.assertLen(response.similar_patterns, 7)
def testSearchSimilarPatterns(self):
template_start_time = 1
template_duration = 2
template = np.ones((n_leads, template_duration * sampling_freq))
set_slice_value(self.base_data, template, template_start_time,
sampling_freq)
target_start_time = 5
set_slice_value(self.base_data, template, target_start_time, sampling_freq)
patterns_found = similarity.SearchSimilarPatterns(self.base_data,
template_start_time,
template_duration,
[],
sampling_freq,
top_n=3)
self.assertLen(patterns_found, 3)
target_similar_pattern = similarity_pb2.SimilarPattern()
target_similar_pattern.score = 1
target_similar_pattern.start_time = target_start_time
target_similar_pattern.duration = template_duration
self.assertIn(target_similar_pattern, patterns_found)
def testSearchSimilarPatterns_ignoreSeen(self):
template_start_time = 1
template_duration = 1
seen_event = similarity_pb2.TimeSpan()
seen_event.start_time = 10
seen_event.duration = 2.5
patterns_found = similarity.SearchSimilarPatterns(self.base_data,
template_start_time,
template_duration,
[seen_event],
sampling_freq,
top_n=10)
for pattern in patterns_found:
end_time = pattern.start_time + pattern.duration
message = 'Overlaps with event between %s-%s' % (pattern.start_time,
end_time)
self.assertFalse(overlaps(seen_event, pattern), message)
def testSearchSimilarPatterns_merge(self):
template_start_time = 1
template_duration = 2
template = np.ones((n_leads, template_duration * sampling_freq))
template_span = similarity_pb2.TimeSpan()
template_span.start_time = template_start_time
template_span.duration = template_duration
seen_events = [template_span]
set_slice_value(self.base_data, template, template_start_time,
sampling_freq)
target_1_start_time = 5
set_slice_value(self.base_data, template, target_1_start_time,
sampling_freq)
target_2_start_time = 8.5
set_slice_value(self.base_data, template, target_2_start_time,
sampling_freq)
patterns_found = similarity.SearchSimilarPatterns(self.base_data,
template_start_time,
template_duration,
seen_events,
sampling_freq,
top_n=2,
merge_close_results=True,
merge_threshold=2)
target_2_end_time = target_2_start_time + template_duration
merged_duration = target_2_end_time - target_1_start_time
merged_targets_span = similarity_pb2.TimeSpan()
merged_targets_span.start_time = target_1_start_time
merged_targets_span.duration = merged_duration
self.assertTrue(overlaps(merged_targets_span, patterns_found[0]))
def testCreateSimilarityCurveResponse(self):
response = similarity.CreateSimilarityCurveResponse(self.base_data, 1, 2,
sampling_freq)
self.assertIsInstance(response, similarity_pb2.SimilarityCurveResponse)
self.assertLen(response.scores, self.base_data.shape[1])
if __name__ == '__main__':
absltest.main()
| 37.772947
| 79
| 0.638061
|
a26e3bb5967926f4cfa436a4b241ac15e21486b4
| 1,983
|
py
|
Python
|
demoproject/web/utils/jinja2backend.py
|
waleoyediran/django-starter
|
eb5df81ae503781746606902f24938693230dd54
|
[
"MIT"
] | null | null | null |
demoproject/web/utils/jinja2backend.py
|
waleoyediran/django-starter
|
eb5df81ae503781746606902f24938693230dd54
|
[
"MIT"
] | null | null | null |
demoproject/web/utils/jinja2backend.py
|
waleoyediran/django-starter
|
eb5df81ae503781746606902f24938693230dd54
|
[
"MIT"
] | null | null | null |
import sys
import six
from django.template.backends.jinja2 import Jinja2, Template
from django.template.backends.utils import csrf_token_lazy, csrf_input_lazy
from django.template.exceptions import TemplateDoesNotExist
from jinja2.exceptions import TemplateSyntaxError
from jinja2.utils import import_string
import jinja2
class Jinja2Backend(Jinja2):
def __init__(self, params):
self.context_processors = [
import_string(p)
for p in params['OPTIONS'].pop('context_processors', [])
]
super(Jinja2Backend, self).__init__(params)
def from_string(self, template_code):
return JTemplate(
self.env.from_string(template_code), self.context_processors)
def get_template(self, template_name):
try:
return JTemplate(
self.env.get_template(template_name), self.context_processors)
except jinja2.TemplateNotFound as exc:
six.reraise(TemplateDoesNotExist, TemplateDoesNotExist(exc.args),
sys.exc_info()[2])
except jinja2.TemplateSyntaxError as exc:
six.reraise(TemplateSyntaxError, TemplateSyntaxError(exc.args),
sys.exc_info()[2])
class JTemplate(Template):
def __init__(self, template, context_processors):
super(JTemplate, self).__init__(template)
self.template = template
self.context_processors = context_processors
def render(self, context=None, request=None):
if context is None:
context = {}
if request is not None:
context['request'] = request
lazy_csrf_input = csrf_input_lazy(request)
context['csrf'] = lambda: lazy_csrf_input
context['csrf_input'] = lazy_csrf_input
context['csrf_token'] = csrf_token_lazy(request)
for cp in self.context_processors:
context.update(cp(request))
return self.template.render(context)
| 36.054545
| 78
| 0.666667
|
f7ace920857a1ed8ec3e9a8cacc9d831b07aa5b5
| 7,656
|
py
|
Python
|
env/plane.py
|
mrernst/rl_robotics_research
|
0bc446cfb69591cb4ee3ce8d39815c463090a5f6
|
[
"MIT"
] | null | null | null |
env/plane.py
|
mrernst/rl_robotics_research
|
0bc446cfb69591cb4ee3ce8d39815c463090a5f6
|
[
"MIT"
] | null | null | null |
env/plane.py
|
mrernst/rl_robotics_research
|
0bc446cfb69591cb4ee3ce8d39815c463090a5f6
|
[
"MIT"
] | null | null | null |
import gym
import numpy as np
import cv2
from gym import spaces
def line_intersection(line1, line2):
# calculate the intersection point
xdiff = (line1[0][0] - line1[1][0], line2[0][0] - line2[1][0])
ydiff = (line1[0][1] - line1[1][1], line2[0]
[1] - line2[1][1]) # Typo was here
def det(a, b):
return a[0] * b[1] - a[1] * b[0]
div = det(xdiff, ydiff)
if div == 0:
raise Exception('lines do not intersect')
d = (det(*line1), det(*line2))
x = det(d, xdiff) / div
y = det(d, ydiff) / div
return x, y
def check_cross(x0, y0, x1, y1):
x0 = np.array(x0)
y0 = np.array(y0)
x1 = np.array(x1)
y1 = np.array(y1)
return np.cross(x1 - x0, y0 - x0), np.cross(y0 - x0, y1 - x0)
def check_itersection(x0, y0, x1, y1):
EPS = 1e-10
def sign(x):
if x > EPS:
return 1
if x < -EPS:
return -1
return 0
f1, f2 = check_cross(x0, y0, x1, y1)
f3, f4 = check_cross(x1, y1, x0, y0)
if sign(f1) == sign(f2) and sign(f3) == sign(f4) and sign(f1) != 0 and sign(f3) != 0:
return True
return False
class PlaneBase(gym.Env):
def __init__(self, rects, R, is_render=False, size=512):
self.rects = rects
self.n = len(self.rects)
self.size = size
self.map = np.ones((size, size, 3), dtype=np.uint8) * 255
self.R = R
self.R2 = R ** 2
self.board = np.array(
[[0, 0],
[1, 1]],
dtype='float32')
self.action_space = gym.spaces.Box(
low=-R, high=R, shape=(2,), dtype='float32')
self.observation_space = gym.spaces.Box(
low=0., high=1., shape=(2,), dtype='float32')
if is_render:
cv2.namedWindow('image', cv2.WINDOW_NORMAL)
self.image_name = 'image'
for i in range(self.n):
for j in range(i + 1, self.n):
if check_itersection(self.rects[i][0], self.rects[i][1], self.rects[j][0], self.rects[j][0]):
raise Exception("Rectangle interaction with each other")
for ((x0, y0), (x1, y1)) in rects:
x0, y0 = int(x0 * size), int(y0 * size)
x1, y1 = int(x1 * size), int(y1 * size)
cv2.rectangle(self.map, (x0, y0), (x1, y1), (0, 255, 0), 1)
ps = np.array([
[x0, y0],
[x1, y0],
[x1, y1],
[x0, y1],
], dtype=np.int32)
cv2.fillConvexPoly(self.map, ps, (127, 127, 127))
self.state = (0, 0)
self.reset()
def restore(self, obs):
self.state = (float(obs[0]), float(obs[1]))
def rect_lines(self, rect):
(x0, y0), (x1, y1) = rect
yield (x0, y0), (x1, y0)
yield (x1, y0), (x1, y1)
yield (x1, y1), (x0, y1)
yield (x0, y1), (x0, y0)
def l2dist(self, x, y):
return ((y[0] - x[0]) ** 2) + ((y[1] - x[1]) ** 2)
def check_inside(self, p):
EPS = 1e-10
for i in self.rects:
if p[0] > i[0][0] + EPS and p[0] < i[1][0] - EPS and p[1] > i[0][1] + EPS and p[1] < i[1][1] - EPS:
return True
return False
def step(self, action):
dx, dy = action
l = 0.0001
p = (self.state[0] + dx * l, self.state[1] + dy * l)
if self.check_inside(p) or p[0] > 1 or p[1] > 1 or p[0] < 0 or p[1] < 0:
return np.array(self.state), 0, False, {}
dest = (self.state[0] + dx, self.state[1] + dy)
md = self.l2dist(self.state, dest)
_dest = dest
line = (self.state, dest)
for i in list(self.rects) + [self.board]:
for l in self.rect_lines(i):
if check_itersection(self.state, dest, l[0], l[1]):
inter_point = line_intersection(line, l)
d = self.l2dist(self.state, inter_point)
if d < md:
md = d
_dest = inter_point
self.restore(_dest)
return np.array(self.state), -md, False, {}
def render(self, mode='human'):
image = self.map.copy()
x, y = self.state
x = int(x * self.size)
y = int(y * self.size)
cv2.circle(image, (x, y), 5, (255, 0, 255), -1)
if mode == 'human':
cv2.imshow('image', image)
cv2.waitKey(2)
else:
return image
def reset(self):
inside_rect = True
while inside_rect:
a, b = np.random.random(), np.random.random()
inside_rect = self.check_inside((a, b))
self.state = (a, b)
return np.array(self.state)
class NaivePlane(PlaneBase):
def __init__(self, is_render=True, R=300, size=512):
PlaneBase.__init__(self,
[
np.array([[128, 128], [300, 386]]) / 512,
np.array([[400, 400], [500, 500]]) / 512,
],
R, is_render=is_render, size=size),
class NaivePlane2(PlaneBase):
# two rectangle
def __init__(self, is_render=True, R=300, size=512):
PlaneBase.__init__(self,
[
np.array([[64, 64], [256, 256]]) / 512,
np.array([[300, 128], [400, 500]]) / 512,
],
R, is_render=is_render, size=size),
class NaivePlane3(PlaneBase):
# four rectangle
def __init__(self, is_render=True, R=300, size=512):
PlaneBase.__init__(self,
[
np.array([[64, 64], [192, 192]]) / 512,
np.array([[320, 64], [448, 192]]) / 512,
np.array([[320, 320], [448, 448]]) / 512,
np.array([[64, 320], [192, 448]]) / 512,
],
R, is_render=is_render, size=size),
class NaivePlane4(PlaneBase):
# four rectangle
def __init__(self, is_render=True, R=300, size=512):
PlaneBase.__init__(self,
[
np.array([[64, 64], [192, 512]]) / 512,
np.array([[320, 64], [448, 512]]) / 512,
],
R, is_render=is_render, size=size),
class NaivePlane5(PlaneBase):
# four rectangle
def __init__(self, is_render=False, R=300, size=512):
PlaneBase.__init__(self,
[
np.array([[0, 1. / 3], [2. / 3, 2. / 3]]),
],
R, is_render=is_render, size=size),
class NaivePlane6(PlaneBase):
# four rectangle
def __init__(self, is_render=False, R=300, size=512):
PlaneBase.__init__(self,
[
# np.array([[0, 1. / 3], [2. / 3, 2. / 3]]),
],
R, is_render=is_render, size=size),
if __name__ == '__main__':
env = NaivePlane5()
obs = env.reset()
while True:
print(obs)
env.render()
while True:
try:
print('entering the dir (x, y)')
act = input().strip().split(' ')
act = float(act[0]) / 512, float(act[1]) / 512
break
except KeyboardInterrupt as e:
raise e
except:
continue
obs, reward, _, _ = env.step(act)
| 31.377049
| 111
| 0.455199
|
560729c6e6f6a22d84c7558bb8007335207be519
| 38,065
|
py
|
Python
|
tests/fast_tests/test_environments.py
|
remusionita/flow_rl
|
f26cd13bdcc89c00fe980cd8d7873c88af8e2744
|
[
"MIT"
] | 2
|
2020-12-03T21:13:39.000Z
|
2022-03-13T09:12:43.000Z
|
tests/fast_tests/test_environments.py
|
remusionita/flow_rl
|
f26cd13bdcc89c00fe980cd8d7873c88af8e2744
|
[
"MIT"
] | 1
|
2019-12-05T09:04:05.000Z
|
2019-12-05T21:23:49.000Z
|
tests/fast_tests/test_environments.py
|
remusionita/flow_rl
|
f26cd13bdcc89c00fe980cd8d7873c88af8e2744
|
[
"MIT"
] | 3
|
2019-12-07T11:36:21.000Z
|
2020-01-04T16:29:57.000Z
|
import random
import numpy as np
import unittest
import os
from scipy.optimize import fsolve
from copy import deepcopy
from flow.core.params import VehicleParams
from flow.core.params import NetParams, EnvParams, SumoParams, InFlows
from flow.controllers import IDMController, RLController
from flow.networks import RingNetwork, MergeNetwork, BottleneckNetwork
from flow.networks import HighwayRampsNetwork
from flow.networks.ring import ADDITIONAL_NET_PARAMS as RING_PARAMS
from flow.networks.merge import ADDITIONAL_NET_PARAMS as MERGE_PARAMS
from flow.networks.highway_ramps import ADDITIONAL_NET_PARAMS as \
HIGHWAY_PARAMS
from flow.envs import LaneChangeAccelEnv, LaneChangeAccelPOEnv, AccelEnv, \
WaveAttenuationEnv, WaveAttenuationPOEnv, MergePOEnv, \
TestEnv, BottleneckDesiredVelocityEnv, BottleneckEnv, BottleneckAccelEnv
from flow.envs.ring.wave_attenuation import v_eq_max_function
from flow.envs.multiagent import MultiAgentHighwayPOEnv
os.environ["TEST_FLAG"] = "True"
class TestLaneChangeAccelEnv(unittest.TestCase):
def setUp(self):
vehicles = VehicleParams()
vehicles.add("rl", acceleration_controller=(RLController, {}),
num_vehicles=1)
vehicles.add("human", acceleration_controller=(IDMController, {}),
num_vehicles=1)
self.sim_params = SumoParams()
self.network = RingNetwork(
name="test_merge",
vehicles=vehicles,
net_params=NetParams(additional_params=RING_PARAMS.copy()),
)
self.env_params = EnvParams(
additional_params={
"max_accel": 3,
"max_decel": 3,
"target_velocity": 10,
"lane_change_duration": 5,
"sort_vehicles": False
}
)
def tearDown(self):
self.sim_params = None
self.network = None
self.env_params = None
def test_additional_env_params(self):
"""Ensures that not returning the correct params leads to an error."""
self.assertTrue(
test_additional_params(
env_class=LaneChangeAccelEnv,
sim_params=self.sim_params,
network=self.network,
additional_params={
"max_accel": 1,
"max_decel": 1,
"lane_change_duration": 5,
"target_velocity": 10,
"sort_vehicles": False
}
)
)
def test_observation_action_space(self):
"""Tests the observation and action spaces upon initialization."""
# create the environment
env = LaneChangeAccelEnv(
sim_params=self.sim_params,
network=self.network,
env_params=self.env_params
)
# check the observation space
self.assertTrue(test_space(
env.observation_space,
expected_size=3 * env.initial_vehicles.num_vehicles,
expected_min=0,
expected_max=1)
)
# check the action space
self.assertTrue(test_space(
env.action_space,
expected_size=2 * env.initial_vehicles.num_rl_vehicles,
expected_min=np.array([
-env.env_params.additional_params["max_decel"], -1]),
expected_max=np.array([
env.env_params.additional_params["max_accel"], 1]))
)
env.terminate()
def test_observed(self):
"""Ensures that the observed ids are returning the correct vehicles."""
self.assertTrue(
test_observed(
env_class=LaneChangeAccelEnv,
sim_params=self.sim_params,
network=self.network,
env_params=self.env_params,
expected_observed=["human_0"]
)
)
class TestLaneChangeAccelPOEnv(unittest.TestCase):
def setUp(self):
vehicles = VehicleParams()
vehicles.add("rl", acceleration_controller=(RLController, {}),
num_vehicles=1)
vehicles.add("human", acceleration_controller=(IDMController, {}),
num_vehicles=1)
self.sim_params = SumoParams()
self.network = RingNetwork(
name="test_merge",
vehicles=vehicles,
net_params=NetParams(additional_params=RING_PARAMS.copy()),
)
self.env_params = EnvParams(
additional_params={
"max_accel": 3,
"max_decel": 3,
"target_velocity": 10,
"lane_change_duration": 5,
"sort_vehicles": False
}
)
def tearDown(self):
self.sim_params = None
self.network = None
self.env_params = None
def test_additional_env_params(self):
"""Ensures that not returning the correct params leads to an error."""
self.assertTrue(
test_additional_params(
env_class=LaneChangeAccelPOEnv,
sim_params=self.sim_params,
network=self.network,
additional_params={
"max_accel": 1,
"max_decel": 1,
"lane_change_duration": 5,
"target_velocity": 10,
"sort_vehicles": False
}
)
)
def test_observation_action_space(self):
"""Tests the observation and action spaces upon initialization."""
# create the environment
env = LaneChangeAccelPOEnv(
sim_params=self.sim_params,
network=self.network,
env_params=self.env_params
)
# check the observation space
self.assertTrue(test_space(
env.observation_space, expected_size=5, expected_min=0,
expected_max=1))
# check the action space
self.assertTrue(test_space(
env.action_space,
expected_size=2,
expected_min=np.array([-3, -1]),
expected_max=np.array([3, 1]))
)
env.terminate()
def test_observed(self):
"""Ensures that the observed ids are returning the correct vehicles."""
self.assertTrue(
test_observed(
env_class=LaneChangeAccelPOEnv,
sim_params=self.sim_params,
network=self.network,
env_params=self.env_params,
expected_observed=["human_0"]
)
)
class TestAccelEnv(unittest.TestCase):
def setUp(self):
vehicles = VehicleParams()
vehicles.add("rl", acceleration_controller=(RLController, {}),
num_vehicles=1)
vehicles.add("human", acceleration_controller=(IDMController, {}),
num_vehicles=1)
self.sim_params = SumoParams()
self.network = RingNetwork(
name="test_merge",
vehicles=vehicles,
net_params=NetParams(additional_params=RING_PARAMS.copy()),
)
self.env_params = EnvParams(
additional_params={
"max_accel": 3,
"max_decel": 3,
"target_velocity": 10,
"sort_vehicles": False
}
)
def tearDown(self):
self.sim_params = None
self.network = None
self.env_params = None
def test_additional_env_params(self):
"""Ensures that not returning the correct params leads to an error."""
self.assertTrue(
test_additional_params(
env_class=AccelEnv,
sim_params=self.sim_params,
network=self.network,
additional_params={
"max_accel": 1,
"max_decel": 1,
"target_velocity": 10,
"sort_vehicles": False
}
)
)
def test_observation_action_space(self):
"""Tests the observation and action spaces upon initialization."""
env = AccelEnv(
sim_params=self.sim_params,
network=self.network,
env_params=self.env_params
)
# check the observation space
self.assertTrue(test_space(
env.observation_space,
expected_size=2 * env.initial_vehicles.num_vehicles,
expected_min=0, expected_max=1))
# check the action space
self.assertTrue(test_space(
env.action_space,
expected_size=env.initial_vehicles.num_rl_vehicles,
expected_min=-abs(env.env_params.additional_params["max_decel"]),
expected_max=env.env_params.additional_params["max_accel"])
)
env.terminate()
def test_observed(self):
"""Ensures that the observed ids are returning the correct vehicles."""
self.assertTrue(
test_observed(
env_class=AccelEnv,
sim_params=self.sim_params,
network=self.network,
env_params=self.env_params,
expected_observed=["human_0"]
)
)
def test_sorting(self):
"""
Tests that the sorting method returns a list of ids sorted by the
absolute_position variable when sorting is requested, and does
nothing if it is not requested.
"""
env_params = self.env_params
env_params.additional_params['sort_vehicles'] = True
self.network.initial_config.shuffle = True
env = AccelEnv(
sim_params=self.sim_params,
network=self.network,
env_params=env_params
)
env.reset()
env.additional_command()
sorted_ids = env.sorted_ids
positions = [env.absolute_position[veh_id] for veh_id in sorted_ids]
# ensure vehicles ids are in sorted order by positions
self.assertTrue(
all(positions[i] <= positions[i + 1]
for i in range(len(positions) - 1)))
def test_no_sorting(self):
# setup a environment with the "sort_vehicles" attribute set to False,
# and shuffling so that the vehicles are not sorted by their ids
env_params = self.env_params
env_params.additional_params['sort_vehicles'] = False
self.network.initial_config.shuffle = True
env = AccelEnv(
sim_params=self.sim_params,
network=self.network,
env_params=env_params
)
env.reset()
env.additional_command()
sorted_ids = list(env.sorted_ids)
ids = env.k.vehicle.get_ids()
# ensure that the list of ids did not change
self.assertListEqual(sorted_ids, ids)
class TestWaveAttenuationEnv(unittest.TestCase):
def setUp(self):
vehicles = VehicleParams()
vehicles.add("rl", acceleration_controller=(RLController, {}),
num_vehicles=1)
vehicles.add("human", acceleration_controller=(IDMController, {}),
num_vehicles=1)
self.sim_params = SumoParams(
restart_instance=True
)
self.network = RingNetwork(
name="test_merge",
vehicles=vehicles,
net_params=NetParams(additional_params=RING_PARAMS.copy()),
)
params = {
"max_accel": 1,
"max_decel": 1,
"ring_length": [220, 270]
}
self.env_params = EnvParams(additional_params=params)
def tearDown(self):
self.sim_params = None
self.network = None
self.env_params = None
def test_additional_env_params(self):
"""Ensures that not returning the correct params leads to an error."""
self.assertTrue(
test_additional_params(
env_class=WaveAttenuationEnv,
sim_params=self.sim_params,
network=self.network,
additional_params={
"max_accel": 1,
"max_decel": 1,
"ring_length": [220, 270],
}
)
)
def test_observation_action_space(self):
"""Tests the observation and action spaces upon initialization."""
env = WaveAttenuationEnv(
sim_params=self.sim_params,
network=self.network,
env_params=self.env_params
)
# check the observation space
self.assertTrue(test_space(
env.observation_space,
expected_size=2 * env.initial_vehicles.num_vehicles,
expected_min=0, expected_max=1))
# check the action space
self.assertTrue(test_space(
env.action_space,
expected_size=env.initial_vehicles.num_rl_vehicles,
expected_min=-abs(env.env_params.additional_params["max_decel"]),
expected_max=env.env_params.additional_params["max_accel"])
)
env.terminate()
def test_observed(self):
"""Ensures that the observed ids are returning the correct vehicles."""
self.assertTrue(
test_observed(
env_class=WaveAttenuationEnv,
sim_params=self.sim_params,
network=self.network,
env_params=self.env_params,
expected_observed=["human_0"]
)
)
def test_reset(self):
"""
Tests that the reset method creating new ring lengths within the
requested range.
"""
# set a random seed to ensure the network lengths are always the same
# during testing
random.seed(9001)
# create the environment
env = WaveAttenuationEnv(
sim_params=self.sim_params,
network=self.network,
env_params=self.env_params
)
# reset the network several times and check its length
self.assertEqual(env.k.network.non_internal_length(), 230)
env.reset()
self.assertEqual(env.k.network.non_internal_length(), 239)
env.reset()
self.assertEqual(env.k.network.non_internal_length(), 256)
def test_v_eq_max_function(self):
"""
Tests that the v_eq_max_function returns appropriate values.
"""
# for 230 m ring roads
self.assertAlmostEqual(
float(fsolve(v_eq_max_function, np.array([4]), args=(22, 230))[0]),
3.7136148111012934)
# for 270 m ring roads
self.assertAlmostEqual(
float(fsolve(v_eq_max_function, np.array([4]), args=(22, 270))[0]),
5.6143732387852054)
def test_reset_no_same_length(self):
"""
Tests that the reset method uses the original ring length when the
range is set to None.
"""
# setup env_params with not range
env_params = deepcopy(self.env_params)
env_params.additional_params["ring_length"] = None
# create the environment
env = WaveAttenuationEnv(
sim_params=self.sim_params,
network=self.network,
env_params=env_params
)
# reset the network several times and check its length
self.assertEqual(env.k.network.non_internal_length(), RING_PARAMS["length"])
env.reset()
self.assertEqual(env.k.network.non_internal_length(), RING_PARAMS["length"])
env.reset()
self.assertEqual(env.k.network.non_internal_length(), RING_PARAMS["length"])
class TestWaveAttenuationPOEnv(unittest.TestCase):
def setUp(self):
vehicles = VehicleParams()
vehicles.add("rl", acceleration_controller=(RLController, {}),
num_vehicles=1)
vehicles.add("human", acceleration_controller=(IDMController, {}),
num_vehicles=1)
self.sim_params = SumoParams()
self.network = RingNetwork(
name="test_merge",
vehicles=vehicles,
net_params=NetParams(additional_params=RING_PARAMS.copy()),
)
self.env_params = EnvParams(
additional_params={
"max_accel": 1,
"max_decel": 1,
"ring_length": [220, 270]
}
)
def tearDown(self):
self.sim_params = None
self.network = None
self.env_params = None
def test_additional_env_params(self):
"""Ensures that not returning the correct params leads to an error."""
self.assertTrue(
test_additional_params(
env_class=WaveAttenuationPOEnv,
sim_params=self.sim_params,
network=self.network,
additional_params={
"max_accel": 1,
"max_decel": 1,
"ring_length": [220, 270],
}
)
)
def test_observation_action_space(self):
"""Tests the observation and action spaces upon initialization."""
# create the environment
env = WaveAttenuationPOEnv(
sim_params=self.sim_params,
network=self.network,
env_params=self.env_params
)
# check the observation space
self.assertTrue(test_space(
env.observation_space,
expected_size=3,
expected_min=-float('inf'),
expected_max=float('inf')
))
# check the action space
self.assertTrue(test_space(
env.action_space,
expected_size=1, expected_min=-1, expected_max=1))
env.terminate()
def test_observed(self):
"""Ensures that the observed ids are returning the correct vehicles."""
self.assertTrue(
test_observed(
env_class=WaveAttenuationPOEnv,
sim_params=self.sim_params,
network=self.network,
env_params=self.env_params,
expected_observed=["human_0"]
)
)
def test_reward(self):
"""Check the reward function for different values.
The reward function should be a linear combination of the average speed
of all vehicles and a penalty on the requested accelerations by the
AVs.
"""
# create the environment
env = WaveAttenuationPOEnv(
sim_params=self.sim_params,
network=self.network,
env_params=self.env_params
)
env.reset()
# check the reward for no acceleration
env.k.vehicle.test_set_speed('human_0', 0)
env.k.vehicle.test_set_speed('rl_0', 0)
self.assertAlmostEqual(
env.compute_reward(rl_actions=[0], fail=False),
0
)
env.k.vehicle.test_set_speed('human_0', 0)
env.k.vehicle.test_set_speed('rl_0', 1)
self.assertAlmostEqual(
env.compute_reward(rl_actions=[0], fail=False),
0.1
)
env.k.vehicle.test_set_speed('human_0', 1)
env.k.vehicle.test_set_speed('rl_0', 1)
self.assertAlmostEqual(
env.compute_reward(rl_actions=[0], fail=False),
0.2
)
# check the fail option
env.k.vehicle.test_set_speed('human_0', 1)
env.k.vehicle.test_set_speed('rl_0', 1)
self.assertAlmostEqual(
env.compute_reward(rl_actions=[0], fail=True),
0
)
# check the effect of RL actions
env.k.vehicle.test_set_speed('human_0', 1)
env.k.vehicle.test_set_speed('rl_0', 1)
self.assertAlmostEqual(
env.compute_reward(rl_actions=None, fail=False),
0
)
env.k.vehicle.test_set_speed('human_0', 1)
env.k.vehicle.test_set_speed('rl_0', 1)
self.assertAlmostEqual(
env.compute_reward(rl_actions=[1], fail=False),
-3.8
)
class TestMergePOEnv(unittest.TestCase):
def setUp(self):
vehicles = VehicleParams()
vehicles.add("rl", acceleration_controller=(RLController, {}),
num_vehicles=1)
vehicles.add("human", acceleration_controller=(IDMController, {}),
num_vehicles=1)
self.sim_params = SumoParams()
self.network = MergeNetwork(
name="test_merge",
vehicles=vehicles,
net_params=NetParams(additional_params=MERGE_PARAMS.copy()),
)
self.env_params = EnvParams(
additional_params={
"max_accel": 3,
"max_decel": 3,
"target_velocity": 25,
"num_rl": 5,
}
)
def tearDown(self):
self.sim_params = None
self.network = None
self.env_params = None
def test_additional_env_params(self):
"""Ensures that not returning the correct params leads to an error."""
self.assertTrue(
test_additional_params(
env_class=MergePOEnv,
sim_params=self.sim_params,
network=self.network,
additional_params={
"max_accel": 1,
"max_decel": 1,
"target_velocity": 25,
"num_rl": 5
}
)
)
def test_observation_action_space(self):
"""Tests the observation and action spaces upon initialization."""
# create the environment
env = MergePOEnv(
sim_params=self.sim_params,
network=self.network,
env_params=self.env_params
)
# check the observation space
self.assertTrue(test_space(
env.observation_space,
expected_size=25, expected_min=0, expected_max=1))
# check the action space
self.assertTrue(test_space(
env.action_space,
expected_size=5, expected_min=-3, expected_max=3))
env.terminate()
def test_observed(self):
"""Ensures that the observed ids are returning the correct vehicles."""
self.assertTrue(
test_observed(
env_class=MergePOEnv,
sim_params=self.sim_params,
network=self.network,
env_params=self.env_params,
expected_observed=["human_0"]
)
)
class TestTestEnv(unittest.TestCase):
"""Tests the TestEnv environment in flow/envs/test.py"""
def setUp(self):
vehicles = VehicleParams()
vehicles.add("test", num_vehicles=1)
net_params = NetParams(additional_params=RING_PARAMS)
env_params = EnvParams()
sim_params = SumoParams()
network = RingNetwork("test_ring",
vehicles=vehicles,
net_params=net_params)
self.env = TestEnv(env_params, sim_params, network)
def tearDown(self):
self.env.terminate()
self.env = None
def test_obs_space(self):
self.assertEqual(self.env.observation_space.shape[0], 0)
self.assertEqual(len(self.env.observation_space.high), 0)
self.assertEqual(len(self.env.observation_space.low), 0)
def test_action_space(self):
self.assertEqual(self.env.action_space.shape[0], 0)
self.assertEqual(len(self.env.action_space.high), 0)
self.assertEqual(len(self.env.action_space.low), 0)
def test_get_state(self):
self.assertEqual(len(self.env.get_state()), 0)
def test_compute_reward(self):
# test the default
self.assertEqual(self.env.compute_reward([]), 0)
# test if the "reward_fn" parameter is defined
def reward_fn(*_):
return 1
self.env.env_params.additional_params["reward_fn"] = reward_fn
self.assertEqual(self.env.compute_reward([]), 1)
class TestBottleneckEnv(unittest.TestCase):
"""Tests the BottleneckEnv environment in flow/envs/bottleneck.py"""
def setUp(self):
self.sim_params = SumoParams(sim_step=0.5, restart_instance=True)
vehicles = VehicleParams()
vehicles.add(veh_id="human", num_vehicles=10)
env_params = EnvParams(
additional_params={
"max_accel": 3,
"max_decel": 3,
"lane_change_duration": 5,
"disable_tb": True,
"disable_ramp_metering": True,
}
)
net_params = NetParams(
additional_params={"scaling": 1, "speed_limit": 23})
self.network = BottleneckNetwork(
name="bay_bridge_toll",
vehicles=vehicles,
net_params=net_params)
self.env = BottleneckEnv(env_params, self.sim_params, self.network)
self.env.reset()
def tearDown(self):
self.env.terminate()
del self.env
def test_additional_env_params(self):
"""Ensures that not returning the correct params leads to an error."""
self.assertTrue(
test_additional_params(
env_class=BottleneckEnv,
sim_params=self.sim_params,
network=self.network,
additional_params={
"max_accel": 3,
"max_decel": 3,
"lane_change_duration": 5,
"disable_tb": True,
"disable_ramp_metering": True,
}
)
)
def test_get_bottleneck_density(self):
self.assertEqual(self.env.get_bottleneck_density(), 0)
def test_observation_action_space(self):
"""Tests the observation and action spaces upon initialization."""
# check the observation space
self.assertTrue(test_space(
self.env.observation_space,
expected_size=1,
expected_min=-float('inf'),
expected_max=float('inf'))
)
# check the action space
self.assertTrue(test_space(
self.env.action_space,
expected_size=1,
expected_min=-float('inf'),
expected_max=float('inf'))
)
class TestBottleneckAccelEnv(unittest.TestCase):
"""Tests BottleneckAccelEnv in flow/envs/bottleneck.py."""
def setUp(self):
self.sim_params = SumoParams(sim_step=0.5, restart_instance=True)
vehicles = VehicleParams()
vehicles.add(veh_id="human", num_vehicles=10)
env_params = EnvParams(
additional_params={
"max_accel": 3,
"max_decel": 3,
"lane_change_duration": 5,
"disable_tb": True,
"disable_ramp_metering": True,
"target_velocity": 30,
"add_rl_if_exit": True,
}
)
net_params = NetParams(
additional_params={"scaling": 1, "speed_limit": 23})
self.network = BottleneckNetwork(
name="bay_bridge_toll",
vehicles=vehicles,
net_params=net_params)
self.env = BottleneckAccelEnv(
env_params, self.sim_params, self.network)
self.env.reset()
def tearDown(self):
self.env.terminate()
del self.env
def test_additional_env_params(self):
"""Ensures that not returning the correct params leads to an error."""
self.assertTrue(
test_additional_params(
env_class=BottleneckAccelEnv,
sim_params=self.sim_params,
network=self.network,
additional_params={
"max_accel": 3,
"max_decel": 3,
"lane_change_duration": 5,
"disable_tb": True,
"disable_ramp_metering": True,
"target_velocity": 30,
"add_rl_if_exit": True,
}
)
)
def test_observation_action_space(self):
"""Tests the observation and action spaces upon initialization."""
# check the observation space
self.assertTrue(test_space(
self.env.observation_space,
expected_size=12,
expected_min=0,
expected_max=1)
)
class TestBottleneckDesiredVelocityEnv(unittest.TestCase):
"""Tests the BottleneckDesiredVelocityEnv environment in
flow/envs/bottleneck.py"""
def test_reset_inflows(self):
"""Tests that the inflow change within the expected range when calling
reset."""
# set a random seed for inflows to be the same every time
np.random.seed(seed=123)
sim_params = SumoParams(sim_step=0.5, restart_instance=True)
vehicles = VehicleParams()
vehicles.add(veh_id="human")
vehicles.add(veh_id="followerstopper")
# edge name, how many segments to observe/control, whether the segment
# is controlled
controlled_segments = [("1", 1, False), ("2", 2, True), ("3", 2, True),
("4", 2, True), ("5", 1, False)]
num_observed_segments = [("1", 1), ("2", 3), ("3", 3), ("4", 3),
("5", 1)]
env_params = EnvParams(
additional_params={
"target_velocity": 40,
"disable_tb": True,
"disable_ramp_metering": True,
"controlled_segments": controlled_segments,
"symmetric": False,
"observed_segments": num_observed_segments,
"reset_inflow": True, # this must be set to True for the test
"lane_change_duration": 5,
"max_accel": 3,
"max_decel": 3,
"inflow_range": [1000, 2000] # this is what we're testing
}
)
inflow = InFlows()
inflow.add(veh_type="human",
edge="1",
vehs_per_hour=1500, # the initial inflow we're checking for
departLane="random",
departSpeed=10)
net_params = NetParams(
inflows=inflow,
additional_params={"scaling": 1, "speed_limit": 23})
network = BottleneckNetwork(
name="bay_bridge_toll",
vehicles=vehicles,
net_params=net_params)
env = BottleneckDesiredVelocityEnv(env_params, sim_params, network)
# reset the environment and get a new inflow rate
env.reset()
expected_inflow = 1353.6 # just from checking the new inflow
# check that the first inflow rate is approximately what the seeded
# value expects it to be
for _ in range(500):
env.step(rl_actions=None)
self.assertAlmostEqual(
env.k.vehicle.get_inflow_rate(250)/expected_inflow, 1, 1)
class TestMultiAgentHighwayPOEnv(unittest.TestCase):
def setUp(self):
vehicles = VehicleParams()
vehicles.add("rl", acceleration_controller=(RLController, {}),
num_vehicles=1)
vehicles.add("human", acceleration_controller=(IDMController, {}),
num_vehicles=1)
self.sim_params = SumoParams()
self.network = HighwayRampsNetwork(
name="test_merge",
vehicles=vehicles,
net_params=NetParams(additional_params=HIGHWAY_PARAMS.copy()),
)
self.env_params = EnvParams(
additional_params={
'max_accel': 1, 'max_decel': 1, "target_velocity": 25
}
)
def tearDown(self):
self.sim_params = None
self.network = None
self.env_params = None
def test_additional_env_params(self):
"""Ensures that not returning the correct params leads to an error."""
self.assertTrue(
test_additional_params(
env_class=MultiAgentHighwayPOEnv,
sim_params=self.sim_params,
network=self.network,
additional_params={
"max_accel": 1,
"max_decel": 1,
"target_velocity": 10,
}
)
)
def test_observation_action_space(self):
"""Tests the observation and action spaces upon initialization."""
# create the environment
env = MultiAgentHighwayPOEnv(
sim_params=self.sim_params,
network=self.network,
env_params=self.env_params
)
# check the observation space
self.assertTrue(test_space(
env.observation_space,
expected_size=5,
expected_min=-float('inf'),
expected_max=float('inf')
))
# check the action space
self.assertTrue(test_space(
env.action_space,
expected_size=1, expected_min=-1, expected_max=1))
env.terminate()
def test_compute_reward(self):
# create the environment
env = MultiAgentHighwayPOEnv(
sim_params=self.sim_params,
network=self.network,
env_params=self.env_params
)
env.reset()
# test the no actions case
self.assertDictEqual(env.compute_reward(None), {})
# test the failure case
self.assertDictEqual(env.compute_reward({"rl_0": 0}, fail=True),
{"rl_0": 0})
# test the generic case
env.k.vehicle.test_set_speed("rl_0", 5)
self.assertDictEqual(env.compute_reward({"rl_0": 0}, fail=False),
{"rl_0": 0.09446148586775807})
# test the evaluate case
env.k.vehicle.test_set_speed("rl_0", 5)
env.env_params.evaluate = True
self.assertDictEqual(env.compute_reward({"rl_0": 0}, fail=False),
{"rl_0": 5})
def test_observed(self):
"""Ensures that the observed ids are returning the correct vehicles."""
self.assertTrue(
test_observed(
env_class=MultiAgentHighwayPOEnv,
sim_params=self.sim_params,
network=self.network,
env_params=self.env_params,
expected_observed=["human_0"]
)
)
###############################################################################
# Utility methods #
###############################################################################
def test_additional_params(env_class,
sim_params,
network,
additional_params):
"""Test that the environment raises an Error in any param is missing.
Parameters
----------
env_class : flow.envs.Env type
blank
sim_params : flow.core.params.SumoParams
sumo-specific parameters
network : flow.networks.Network
network that works for the environment
additional_params : dict
the valid and required additional parameters for the environment in
EnvParams
Returns
-------
bool
True if the test passed, False otherwise
"""
for key in additional_params.keys():
# remove one param from the additional_params dict
new_add = additional_params.copy()
del new_add[key]
try:
env_class(
sim_params=sim_params,
network=network,
env_params=EnvParams(additional_params=new_add)
)
# if no KeyError is raised, the test has failed, so return False
return False
except KeyError:
# if a KeyError is raised, test the next param
pass
# if removing all additional params led to KeyErrors, the test has passed,
# so return True
return True
def test_space(gym_space, expected_size, expected_min, expected_max):
"""Test that an action or observation space is the correct size and bounds.
Parameters
----------
gym_space : gym.spaces.Box
gym space object to be tested
expected_size : int
expected size
expected_min : float or array_like
expected minimum value(s)
expected_max : float or array_like
expected maximum value(s)
Returns
-------
bool
True if the test passed, False otherwise
"""
return gym_space.shape[0] == expected_size \
and all(gym_space.high == expected_max) \
and all(gym_space.low == expected_min)
def test_observed(env_class,
sim_params,
network,
env_params,
expected_observed):
"""Test that the observed vehicles in the environment are as expected.
Parameters
----------
env_class : flow.envs.Env class
blank
sim_params : flow.core.params.SumoParams
sumo-specific parameters
network : flow.networks.Network
network that works for the environment
env_params : flow.core.params.EnvParams
environment-specific parameters
expected_observed : array_like
expected list of observed vehicles
Returns
-------
bool
True if the test passed, False otherwise
"""
env = env_class(sim_params=sim_params,
network=network,
env_params=env_params)
env.reset()
env.step(None)
env.additional_command()
test_mask = np.all(
np.array(env.k.vehicle.get_observed_ids()) ==
np.array(expected_observed)
)
env.terminate()
return test_mask
###############################################################################
# End of utils #
###############################################################################
if __name__ == '__main__':
unittest.main()
| 32.506405
| 84
| 0.568606
|
f70f010c765e7d5b11c81cbf2a8a5f6563f97562
| 1,657
|
py
|
Python
|
pyrasterframes/python/geomesa_pyspark/types.py
|
jdenisgiguere/rasterframes
|
4226cde5223e67ab0e9f27c98bc1053d0a4b7a4e
|
[
"Apache-2.0"
] | null | null | null |
pyrasterframes/python/geomesa_pyspark/types.py
|
jdenisgiguere/rasterframes
|
4226cde5223e67ab0e9f27c98bc1053d0a4b7a4e
|
[
"Apache-2.0"
] | null | null | null |
pyrasterframes/python/geomesa_pyspark/types.py
|
jdenisgiguere/rasterframes
|
4226cde5223e67ab0e9f27c98bc1053d0a4b7a4e
|
[
"Apache-2.0"
] | null | null | null |
"""***********************************************************************
This file was created by Astraea, Inc., 2018 from an excerpt of the
original:
Copyright (c) 2013-2018 Commonwealth Computer Research, Inc.
All rights reserved. This program and the accompanying materials
are made available under the terms of the Apache License, Version 2.0
which accompanies this distribution and is available at
http://www.opensource.org/licenses/apache2.0.php.
+ ***********************************************************************/"""
from pyspark.sql.types import UserDefinedType
from pyspark.sql import Row
from pyspark.sql.types import *
from pyrasterframes.context import RFContext
class GeometryUDT(UserDefinedType):
@classmethod
def sqlType(self):
# return StructField("wkb", BinaryType(), False)
return StructType([StructField("wkb", BinaryType(), True)])
@classmethod
def module(cls):
return 'geomesa_pyspark.types'
@classmethod
def scalaUDT(cls):
return 'org.apache.spark.sql.jts.' + cls.__name__
def serialize(self, obj):
if (obj is None): return None
return Row(obj.toBytes)
def deserialize(self, datum):
return RFContext._jvm_mirror().generate_geometry(datum[0])
class PointUDT(GeometryUDT):
pass
class LineStringUDT(GeometryUDT):
pass
class PolygonUDT(GeometryUDT):
pass
class MultiPointUDT(GeometryUDT):
pass
class MultiLineStringUDT(GeometryUDT):
pass
class MultiPolygonUDT(GeometryUDT):
pass
class GeometryUDT(GeometryUDT):
pass
class GeometryCollectionUDT(GeometryUDT):
pass
| 24.014493
| 77
| 0.652384
|
4996016e19c0a4612f6785d846a3f53d4e3c28e1
| 2,055
|
py
|
Python
|
day_16/day_16.py
|
niccolomarcon/AoC_2020
|
1afe7453f7ad8c789a3f804c29159c0bb62a7499
|
[
"MIT"
] | null | null | null |
day_16/day_16.py
|
niccolomarcon/AoC_2020
|
1afe7453f7ad8c789a3f804c29159c0bb62a7499
|
[
"MIT"
] | null | null | null |
day_16/day_16.py
|
niccolomarcon/AoC_2020
|
1afe7453f7ad8c789a3f804c29159c0bb62a7499
|
[
"MIT"
] | null | null | null |
from functools import partial
from heap import Heap
from itertools import chain, groupby
from math import prod
from sys import argv
import re
def extract(rule):
regex = re.compile(r'([\w ]+): (\d+)-(\d+) or (\d+)-(\d+)')
match = regex.match(rule).groups()
name, (a, b, c, d) = match[0], map(int, match[1:])
return name, lambda x: a <= x <= b or c <= x <= d
def ticket_scanning_error_rate(rules, tickets):
values = chain(*tickets)
invalid = filter(lambda x: not any(r(x) for _, r in rules), values)
return sum(invalid)
def valid(rules, ticket):
return all(any(rule(value) for _, rule in rules) for value in ticket)
def sort_fields(indexs_per_field):
res = [None] * len(indexs_per_field)
heap = Heap((f, len(i)) for f, i in indexs_per_field.items())
while not heap.empty:
field = heap.pop()
for other_field in heap:
indexs_per_field[other_field] -= indexs_per_field[field]
new_weight = len(indexs_per_field[other_field])
heap.update(other_field, new_weight)
index = indexs_per_field[field].pop()
res[index] = field
return res
def departure_multiplication(rules, ticket, nearby):
valid_tickets = list(filter(partial(valid, rules), nearby))
valid_tickets.append(ticket)
columns = list(map(set, zip(*valid_tickets)))
possible_idxs_per_field = {
field: set(i for i, v in enumerate(columns) if all(map(rule, v)))
for field, rule in rules
}
fields = sort_fields(possible_idxs_per_field)
return prod(v for f, v in zip(fields, ticket) if f.startswith('departure'))
if __name__ == '__main__':
with open(argv[1]) as input_file:
groups = groupby(input_file, lambda x: x != '\n')
rules, ticket, nearby = (list(g) for v, g in groups if v)
rules = [extract(rule) for rule in rules]
ticket = [int(v) for v in ticket[1].split(',')]
nearby = [list(map(int, t.split(','))) for t in nearby[1:]]
print(departure_multiplication(rules, ticket, nearby))
| 33.145161
| 79
| 0.642336
|
558c9fb2258f416970464c4afd62fb7d64159be5
| 14,921
|
py
|
Python
|
sysmontask/disk.py
|
bastian-src/SysMonTask
|
95868e230efa130e820f91893a3c8d5664632ac4
|
[
"BSD-3-Clause"
] | null | null | null |
sysmontask/disk.py
|
bastian-src/SysMonTask
|
95868e230efa130e820f91893a3c8d5664632ac4
|
[
"BSD-3-Clause"
] | null | null | null |
sysmontask/disk.py
|
bastian-src/SysMonTask
|
95868e230efa130e820f91893a3c8d5664632ac4
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python3
# import gi
# gi.require_version("Gtk", "3.24")
from gi.repository import Gtk as g
import psutil as ps,cairo
from time import time
from os import popen
try:
from gi_composites import GtkTemplate
except:
from sysmontask.gi_composites import GtkTemplate
if __name__=='sysmontask.disk':
from sysmontask.sysmontask import files_dir
from sysmontask.gproc import sorting_func,byte_to_human
else:
from sysmontask import files_dir
from gproc import sorting_func,byte_to_human
@GtkTemplate(ui=files_dir+'/disk.glade')
class diskTabWidget(g.ScrolledWindow):
# Required else you would need to specify the full module
# name in mywidget.ui (__main__+MyWidget)
__gtype_name__ = 'diskTabWidget'
disktextlabel= GtkTemplate.Child()
diskinfolabel = GtkTemplate.Child()
diskdrawarea1=GtkTemplate.Child()
diskdrawarea2=GtkTemplate.Child()
disktextlabel=GtkTemplate.Child()
diskactivelabelvalue=GtkTemplate.Child()
diskreadlabelvalue=GtkTemplate.Child()
diskwritelabelvalue=GtkTemplate.Child()
diskcurrenspeedlabelvalue=GtkTemplate.Child()
diskUsagesTreeView=GtkTemplate.Child()
# Alternative way to specify multiple widgets
#label1, entry = GtkTemplate.Child.widgets(2)
def __init__(self):
super(g.ScrolledWindow, self).__init__()
# This must occur *after* you initialize your base
self.init_template()
self.diskmxfactor=1 #for the scaling of maximum value on the graph
def givedata(self,secondself,index):
self.diskactiveArray=secondself.diskActiveArray[index]
self.diskreadArray=secondself.diskReadArray[index]
self.diskwriteArray=secondself.diskWriteArray[index]
@GtkTemplate.Callback
def on_diskDrawArea2_draw(self,dr,cr):
cr.set_line_width(2)
w=self.diskdrawarea2.get_allocated_width()
h=self.diskdrawarea2.get_allocated_height()
speedstep=100
maximumcurrentspeed=max(max(self.diskreadArray),max(self.diskwriteArray))
currentscalespeed=self.diskmxfactor*speedstep
if(currentscalespeed<maximumcurrentspeed):
while(currentscalespeed<maximumcurrentspeed):
self.diskmxfactor+=1
currentscalespeed=self.diskmxfactor*speedstep
else:
while(currentscalespeed>maximumcurrentspeed+speedstep and self.diskmxfactor>1):
self.diskmxfactor-=1
currentscalespeed=self.diskmxfactor*speedstep
self.diskcurrenspeedlabelvalue.set_text(str(currentscalespeed)+'MB')
scalingfactor=h/currentscalespeed
#creating outer rectangle
cr.set_source_rgba(.109,.670,.0588,1)
cr.set_line_width(3)
cr.rectangle(0,0,w,h)
cr.stroke()
# creating grid lines
verticalGap=int(h/10)
horzontalGap=int(w/10)
for i in range(1,10):
cr.set_source_rgba(.109,.670,.0588,1) #for changing the outer line color
cr.set_line_width(0.5)
cr.move_to(0,i*verticalGap)
cr.line_to(w,i*verticalGap)
cr.move_to(i*horzontalGap,0)
cr.line_to(i*horzontalGap,h)
cr.stroke()
cr.stroke()
stepsize=w/99.0
#print("in draw stepsize",stepsize)
# for i in range(0,99):
# # not effcient way to fill the bars (drawing)
# cr.set_source_rgba(.431,1,.04,0.25) #for changing the fill color
# cr.move_to(i*stepsize,scalingfactor*(currentscalespeed-self.diskreadArray[i])+2)
# cr.line_to((i+1)*stepsize,scalingfactor*(currentscalespeed-self.diskreadArray[i+1])+2)
# cr.line_to((i+1)*stepsize,h)
# cr.line_to(i*stepsize,h)
# cr.move_to(i*stepsize,scalingfactor*(currentscalespeed-self.diskreadArray[i])+2)
# cr.fill()
# cr.stroke()
# # for outer line read speed
# cr.set_line_width(1.5)
# cr.set_source_rgba(.109,.670,.0588,1) #for changing the outer line color
# cr.move_to(i*stepsize,scalingfactor*(currentscalespeed-self.diskreadArray[i])+2)
# cr.line_to((i+1)*stepsize,scalingfactor*(currentscalespeed-self.diskreadArray[i+1])+2)
# cr.stroke()
# #for write
# cr.set_source_rgba(.207,.941,.682,0.3) #for changing the fill color
# cr.move_to(i*stepsize,scalingfactor*(currentscalespeed-self.diskwriteArray[i])+2)
# cr.line_to((i+1)*stepsize,scalingfactor*(currentscalespeed-self.diskwriteArray[i+1])+2)
# cr.line_to((i+1)*stepsize,h)
# cr.line_to(i*stepsize,h)
# cr.move_to(i*stepsize,scalingfactor*(currentscalespeed-self.diskwriteArray[i])+2)
# cr.fill()
# cr.stroke()
# #cr.set_dash([1.0])
# cr.set_source_rgba(.207,.941,.682,1) #for changing the outer line color
# cr.move_to(i*stepsize,scalingfactor*(currentscalespeed-self.diskwriteArray[i])+2)
# cr.line_to((i+1)*stepsize,scalingfactor*(currentscalespeed-self.diskwriteArray[i+1])+2)
# cr.stroke()
#efficient read speed drawing
cr.set_source_rgba(.109,.670,.0588,1) #for changing the outer line color
cr.set_line_width(1.5)
cr.move_to(0,scalingfactor*(currentscalespeed-self.diskreadArray[0])+2)
for i in range(0,99):
cr.line_to((i+1)*stepsize,scalingfactor*(currentscalespeed-self.diskreadArray[i+1])+2)
cr.stroke_preserve()
cr.set_source_rgba(.431,1,.04,0.25) #for changing the fill color
cr.line_to(w,h)
cr.line_to(0,h)
cr.move_to(0,scalingfactor*(currentscalespeed-self.diskreadArray[i])+2)
cr.fill()
cr.stroke()
#efficient drawing for write
cr.set_source_rgba(.207,.941,.682,1) #for changing the outer line color
cr.set_line_width(1.5)
cr.move_to(0,scalingfactor*(currentscalespeed-self.diskwriteArray[0])+2)
for i in range(0,99):
cr.line_to((i+1)*stepsize,scalingfactor*(currentscalespeed-self.diskwriteArray[i+1])+2)
cr.stroke_preserve()
cr.set_source_rgba(.207,.941,.682,0.3) #for changing the fill color
cr.line_to(w,h)
cr.line_to(0,h)
cr.move_to(0,scalingfactor*(currentscalespeed-self.diskwriteArray[0])+2)
cr.fill()
cr.stroke()
return False
@GtkTemplate.Callback
def on_diskDrawArea1_draw(self,dr,cr):
cr.set_line_width(2)
w=self.diskdrawarea1.get_allocated_width()
h=self.diskdrawarea1.get_allocated_height()
scalingfactor=h/100.0
#creating outer rectangle
cr.set_source_rgba(.109,.670,.0588,1)
cr.set_line_width(3)
cr.rectangle(0,0,w,h)
cr.stroke()
# creating grid lines
verticalGap=int(h/10)
horzontalGap=int(w/10)
for i in range(1,10):
cr.set_source_rgba(.109,.670,.0588,1) #for changing the outer line color
cr.set_line_width(0.5)
cr.move_to(0,i*verticalGap)
cr.line_to(w,i*verticalGap)
cr.move_to(i*horzontalGap,0)
cr.line_to(i*horzontalGap,h)
cr.stroke()
cr.stroke()
stepsize=w/99.0
#print("in draw stepsize",stepsize)
# for i in range(0,99):
# # not effcient way to fill the bars (drawing)
# cr.set_source_rgba(.431,1,.04,0.25) #for changing the fill color
# cr.move_to(i*stepsize,scalingfactor*(100-self.diskactiveArray[i])+2)
# cr.line_to((i+1)*stepsize,scalingfactor*(100-self.diskactiveArray[i+1])+2)
# cr.line_to((i+1)*stepsize,h)
# cr.line_to(i*stepsize,h)
# cr.move_to(i*stepsize,scalingfactor*(100-self.diskactiveArray[i])+2)
# cr.fill()
# cr.stroke()
# # for outer line
# cr.set_line_width(1.5)
# cr.set_source_rgba(.109,.670,.0588,1) #for changing the outer line color
# cr.move_to(i*stepsize,scalingfactor*(100-self.diskactiveArray[i])+2)
# cr.line_to((i+1)*stepsize,scalingfactor*(100-self.diskactiveArray[i+1])+2)
# cr.stroke()
cr.set_source_rgba(.109,.670,.0588,1) #for changing the outer line color
cr.set_line_width(1.5)
cr.move_to(0,scalingfactor*(100-self.diskactiveArray[0])+2)
for i in range(0,99):
cr.line_to((i+1)*stepsize,scalingfactor*(100-self.diskactiveArray[i+1])+2)
cr.stroke_preserve()
cr.set_source_rgba(.431,1,.04,0.25) #for changing the fill color
cr.line_to(w,h)
cr.line_to(0,h)
cr.move_to(0,scalingfactor*(100-self.diskactiveArray[0])+2)
cr.fill()
cr.stroke()
return False
def diskinit(self):
self.disklist=[]
self.disksize=[]
try:
p=popen('lsblk -d | grep -e ^NAME -e disk')
partitions=p.readlines()
p.close()
for parts in partitions:
tempparts=parts.split()
if 'NAME' not in tempparts[0] and 'zram' not in tempparts[0]:
self.disklist.append(tempparts[0])
self.disksize.append(tempparts[3])
print(tempparts[0])
except Exception as e:
print(f"Failed to get Disks: {e}")
pass
self.diskWidgetList={}
self.diskstate1=[]
self.diskActiveArray=[]
self.diskReadArray=[]
self.diskWriteArray=[]
self.numOfDisks=len(self.disklist)
# partitions
self.diskPartitions={}
self.diskListStores={}
self.diskListStoreItrs={}
partitions=ps.disk_partitions()
for i in range(0,self.numOfDisks):
self.diskWidgetList[i]=diskTabWidget()
self.performanceStack.add_titled(self.diskWidgetList[i],f'page{self.stack_counter}','Disk'+str(i))
self.stack_counter+=1
self.diskWidgetList[i].disktextlabel.set_text(self.disklist[i])
self.diskWidgetList[i].diskinfolabel.set_text(self.disksize[i])
disktemp=ps.disk_io_counters(perdisk=True)
self.diskt1=time()
for drives in disktemp:
if drives==self.disklist[i]:
self.diskstate1.append(disktemp[drives])
# partition info
self.diskPartitions[i]=[]
for part in partitions:
if self.disklist[i] in part[0]:
self.diskPartitions[i]+=[part]
## for treeview of disk usage
self.diskListStores[i]=g.ListStore(str,str,str,str,str,int,bool)
self.diskListStoreItrs[i]=[]
for part in self.diskPartitions[i]:
temp=ps.disk_usage(part[1])
itr=self.diskListStores[i].append([part[0],part[1],part[2],byte_to_human(temp[0],persec=False),byte_to_human(temp[1],persec=False),temp[3],False])
self.diskListStoreItrs[i].append(itr)
self.diskWidgetList[i].diskUsagesTreeView.set_model(self.diskListStores[i])
for k,col in enumerate(['Device','MountPoint','Type','Total','Used']):
renderer=g.CellRendererText()
if col=='Used':
column=g.TreeViewColumn(col)
progRenderer=g.CellRendererProgress()
# progRenderer.props.text='50%'
# progRenderer.props.fraction=0.5
column.pack_start(renderer,False)
column.add_attribute(renderer,"text",4)
column.pack_start(progRenderer,False)
column.add_attribute(progRenderer,"value",5)
# column=g.TreeViewColumn(col,progRenderer,value=5,inverted=6)
else:
column=g.TreeViewColumn(col,renderer,text=k)
column.set_sort_column_id(k)
column.set_resizable(True)
column.set_reorderable(True)
# column.set_expand(True)
column.set_alignment(0)
column.set_sort_indicator(True)
self.diskWidgetList[i].diskUsagesTreeView.append_column(column)
# self.processTreeStore.set_sort_func(i,sorting_func,None)
self.diskListStores[i].set_sort_func(3,sorting_func,None)
self.diskActiveArray.append([0]*100)
self.diskReadArray.append([0]*100)
self.diskWriteArray.append([0]*100)
self.diskWidgetList[i].givedata(self,i)
def diskTabUpdate(self):
disktemp=ps.disk_io_counters(perdisk=True)
self.diskt2=time()##
timediskDiff=self.diskt2-self.diskt1
self.diskstate2=[]
for i in range(0,self.numOfDisks):
try:
self.diskstate2.append(disktemp[self.disklist[i]])
for j,part in enumerate(self.diskPartitions[i]):
temp=ps.disk_usage(part[1])
self.diskListStores[i].set(self.diskListStoreItrs[i][j],3,byte_to_human(temp[0],persec=False),4,byte_to_human(temp[1],persec=False),5,temp[3])
except Exception as e:
print(f"error in diskliststore: {e}")
self.diskDiff,self.diskActiveString=[],[]
for i in range(0,self.numOfDisks):
try:
self.diskDiff.append([x2-x1 for x1,x2 in zip(self.diskstate1[i],self.diskstate2[i])])
self.diskActiveString.append(f'{int(self.diskDiff[i][8]/(10*timediskDiff))}%')
self.diskWidgetList[i].diskactivelabelvalue.set_text(self.diskActiveString[i])
self.diskWidgetList[i].diskreadlabelvalue.set_text("{:.1f} MiB/s".format(self.diskDiff[i][2]/(timediskDiff*1048576)))
self.diskWidgetList[i].diskwritelabelvalue.set_text("{:.1f} MiB/s".format(self.diskDiff[i][3]/(timediskDiff*1048576)))
if self.update_graph_direction:
self.diskActiveArray[i].pop(0)
self.diskActiveArray[i].append((self.diskDiff[i][8])/(10*timediskDiff))##
self.diskReadArray[i].pop(0)
self.diskReadArray[i].append(self.diskDiff[i][2]/(timediskDiff*1048576))
self.diskWriteArray[i].pop(0)
self.diskWriteArray[i].append(self.diskDiff[i][3]/(timediskDiff*1048576))
else:
self.diskActiveArray[i].pop()
self.diskActiveArray[i].insert(0,(self.diskDiff[i][8])/(10*timediskDiff))##
self.diskReadArray[i].pop()
self.diskReadArray[i].insert(0,self.diskDiff[i][2]/((timediskDiff)*1048576))
self.diskWriteArray[i].pop()
self.diskWriteArray[i].insert(0,self.diskDiff[i][3]/((timediskDiff)*1048576))
self.diskWidgetList[i].givedata(self,i)
except Exception as e:
print(f'error in disk update: {e}')
self.diskstate1=self.diskstate2
#print(self.diskt2-self.diskt1)
self.diskt1=self.diskt2
| 38.65544
| 158
| 0.622813
|
5114e508a171b59b04de1f47545c20cd8ab4ca67
| 3,182
|
py
|
Python
|
openGaussBase/testcase/SECURITY/PERMISSIONS/Opengauss_Function_Security_Schema_Case0003.py
|
opengauss-mirror/Yat
|
aef107a8304b94e5d99b4f1f36eb46755eb8919e
|
[
"MulanPSL-1.0"
] | null | null | null |
openGaussBase/testcase/SECURITY/PERMISSIONS/Opengauss_Function_Security_Schema_Case0003.py
|
opengauss-mirror/Yat
|
aef107a8304b94e5d99b4f1f36eb46755eb8919e
|
[
"MulanPSL-1.0"
] | null | null | null |
openGaussBase/testcase/SECURITY/PERMISSIONS/Opengauss_Function_Security_Schema_Case0003.py
|
opengauss-mirror/Yat
|
aef107a8304b94e5d99b4f1f36eb46755eb8919e
|
[
"MulanPSL-1.0"
] | null | null | null |
"""
Copyright (c) 2022 Huawei Technologies Co.,Ltd.
openGauss is licensed under Mulan PSL v2.
You can use this software according to the terms and conditions of the Mulan PSL v2.
You may obtain a copy of Mulan PSL v2 at:
http://license.coscl.org.cn/MulanPSL2
THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
See the Mulan PSL v2 for more details.
"""
"""
Case Type : security-schema
Case Name : 普通用户授予数据库的CREATE权限创建Schema
Description :
1.初始用户执行:create user wf with password '******';
create database wfdb;
GRANT CREATE ON DATABASE wfdb TO wf;
2.wf用户执行:CREATE SCHEMA schema01;
Expect :
1.CREATE SCHEMA1.CREATE ROLE
2.PERMISSION DENIED
History :
"""
import unittest
from yat.test import Node
from yat.test import macro
from testcase.utils.CommonSH import *
from testcase.utils.Constant import Constant
from testcase.utils.Logger import Logger
logger = Logger()
class Privategrant(unittest.TestCase):
def setUp(self):
logger.info('----------------Opengauss_Function_Security_Schema_Case0003 start-----------------')
self.userNode = Node('PrimaryDbUser')
self.DB_ENV_PATH = macro.DB_ENV_PATH
self.DB_INSTANCE_PATH = macro.DB_INSTANCE_PATH
self.sh_primy = CommonSH('PrimaryDbUser')
self.Constant = Constant()
def test_schema(self):
logger.info('----------------------------create user || table-----------------------------')
sql_cmd1 = f'''create user wf with password '{macro.COMMON_PASSWD}';
create database wfdb;
GRANT CREATE ON DATABASE wfdb TO wf;
'''
sql_cmd2 = '''CREATE SCHEMA schema01;'''
excute_cmd1 = f'''
source {self.DB_ENV_PATH};
gsql -d {self.userNode.db_name} -p {self.userNode.db_port} -c "{sql_cmd1}"
'''
# 登录自定义数据库
excute_cmd2 = f'''
source {self.DB_ENV_PATH};
gsql -d wfdb -p {self.userNode.db_port} -U wf -W '{macro.COMMON_PASSWD}' -c "{sql_cmd2}"
'''
logger.info(excute_cmd1)
logger.info(excute_cmd2)
msg1 = self.userNode.sh(excute_cmd1).result()
logger.info(msg1)
self.assertIn(self.Constant.GRANT_SUCCESS_MSG, msg1)
msg2 = self.userNode.sh(excute_cmd2).result()
logger.info(msg2)
self.assertIn(self.Constant.CREATE_SCHEMA_SUCCESS_MSG, msg2)
def tearDown(self):
sql_cmd1 = '''drop database wfdb;
drop user if exists wf cascade;
'''
excute_cmd1 = f'''
source {self.DB_ENV_PATH};
gsql -d {self.userNode.db_name} -p {self.userNode.db_port} -c "{sql_cmd1}"'''
logger.info(excute_cmd1)
msg1 = self.userNode.sh(excute_cmd1).result()
logger.info(msg1)
logger.info('-------------Opengauss_Function_Security_Schema_Case0003 finish------------------')
| 37.880952
| 109
| 0.604337
|
1deffff14047ed4609d3219a59fab9709345d9f7
| 22,856
|
py
|
Python
|
tests/inspectdb/tests.py
|
Mr-Destructive/django
|
d3a64bea51676fcf8a0ae593cf7b103939e12c87
|
[
"BSD-3-Clause",
"0BSD"
] | 2
|
2021-08-09T14:01:01.000Z
|
2021-11-11T14:02:53.000Z
|
tests/inspectdb/tests.py
|
Mr-Destructive/django
|
d3a64bea51676fcf8a0ae593cf7b103939e12c87
|
[
"BSD-3-Clause",
"0BSD"
] | 3
|
2020-01-21T17:58:28.000Z
|
2022-03-30T14:16:15.000Z
|
tests/inspectdb/tests.py
|
Mr-Destructive/django
|
d3a64bea51676fcf8a0ae593cf7b103939e12c87
|
[
"BSD-3-Clause",
"0BSD"
] | 1
|
2021-04-15T01:05:57.000Z
|
2021-04-15T01:05:57.000Z
|
import os
import re
from io import StringIO
from unittest import mock, skipUnless
from django.core.management import call_command
from django.db import connection
from django.db.backends.base.introspection import TableInfo
from django.test import TestCase, TransactionTestCase, skipUnlessDBFeature
from .models import PeopleMoreData, test_collation
def inspectdb_tables_only(table_name):
"""
Limit introspection to tables created for models of this app.
Some databases such as Oracle are extremely slow at introspection.
"""
return table_name.startswith('inspectdb_')
def inspectdb_views_only(table_name):
return (
table_name.startswith('inspectdb_') and
table_name.endswith(('_materialized', '_view'))
)
def special_table_only(table_name):
return table_name.startswith('inspectdb_special')
class InspectDBTestCase(TestCase):
unique_re = re.compile(r'.*unique_together = \((.+),\).*')
def test_stealth_table_name_filter_option(self):
out = StringIO()
call_command('inspectdb', table_name_filter=inspectdb_tables_only, stdout=out)
error_message = "inspectdb has examined a table that should have been filtered out."
# contrib.contenttypes is one of the apps always installed when running
# the Django test suite, check that one of its tables hasn't been
# inspected
self.assertNotIn("class DjangoContentType(models.Model):", out.getvalue(), msg=error_message)
def test_table_option(self):
"""
inspectdb can inspect a subset of tables by passing the table names as
arguments.
"""
out = StringIO()
call_command('inspectdb', 'inspectdb_people', stdout=out)
output = out.getvalue()
self.assertIn('class InspectdbPeople(models.Model):', output)
self.assertNotIn("InspectdbPeopledata", output)
def make_field_type_asserter(self):
"""Call inspectdb and return a function to validate a field type in its output"""
out = StringIO()
call_command('inspectdb', 'inspectdb_columntypes', stdout=out)
output = out.getvalue()
def assertFieldType(name, definition):
out_def = re.search(r'^\s*%s = (models.*)$' % name, output, re.MULTILINE)[1]
self.assertEqual(definition, out_def)
return assertFieldType
def test_field_types(self):
"""Test introspection of various Django field types"""
assertFieldType = self.make_field_type_asserter()
introspected_field_types = connection.features.introspected_field_types
char_field_type = introspected_field_types['CharField']
# Inspecting Oracle DB doesn't produce correct results (#19884):
# - it reports fields as blank=True when they aren't.
if not connection.features.interprets_empty_strings_as_nulls and char_field_type == 'CharField':
assertFieldType('char_field', "models.CharField(max_length=10)")
assertFieldType('null_char_field', "models.CharField(max_length=10, blank=True, null=True)")
assertFieldType('email_field', "models.CharField(max_length=254)")
assertFieldType('file_field', "models.CharField(max_length=100)")
assertFieldType('file_path_field', "models.CharField(max_length=100)")
assertFieldType('slug_field', "models.CharField(max_length=50)")
assertFieldType('text_field', "models.TextField()")
assertFieldType('url_field', "models.CharField(max_length=200)")
if char_field_type == 'TextField':
assertFieldType('char_field', 'models.TextField()')
assertFieldType('null_char_field', 'models.TextField(blank=True, null=True)')
assertFieldType('email_field', 'models.TextField()')
assertFieldType('file_field', 'models.TextField()')
assertFieldType('file_path_field', 'models.TextField()')
assertFieldType('slug_field', 'models.TextField()')
assertFieldType('text_field', 'models.TextField()')
assertFieldType('url_field', 'models.TextField()')
assertFieldType('date_field', "models.DateField()")
assertFieldType('date_time_field', "models.DateTimeField()")
if introspected_field_types['GenericIPAddressField'] == 'GenericIPAddressField':
assertFieldType('gen_ip_address_field', "models.GenericIPAddressField()")
elif not connection.features.interprets_empty_strings_as_nulls:
assertFieldType('gen_ip_address_field', "models.CharField(max_length=39)")
assertFieldType('time_field', 'models.%s()' % introspected_field_types['TimeField'])
if connection.features.has_native_uuid_field:
assertFieldType('uuid_field', "models.UUIDField()")
elif not connection.features.interprets_empty_strings_as_nulls:
assertFieldType('uuid_field', "models.CharField(max_length=32)")
@skipUnlessDBFeature('can_introspect_json_field', 'supports_json_field')
def test_json_field(self):
out = StringIO()
call_command('inspectdb', 'inspectdb_jsonfieldcolumntype', stdout=out)
output = out.getvalue()
if not connection.features.interprets_empty_strings_as_nulls:
self.assertIn('json_field = models.JSONField()', output)
self.assertIn('null_json_field = models.JSONField(blank=True, null=True)', output)
@skipUnlessDBFeature('supports_collation_on_charfield')
@skipUnless(test_collation, 'Language collations are not supported.')
def test_char_field_db_collation(self):
out = StringIO()
call_command('inspectdb', 'inspectdb_charfielddbcollation', stdout=out)
output = out.getvalue()
if not connection.features.interprets_empty_strings_as_nulls:
self.assertIn(
"char_field = models.CharField(max_length=10, "
"db_collation='%s')" % test_collation,
output,
)
else:
self.assertIn(
"char_field = models.CharField(max_length=10, "
"db_collation='%s', blank=True, null=True)" % test_collation,
output,
)
@skipUnlessDBFeature('supports_collation_on_textfield')
@skipUnless(test_collation, 'Language collations are not supported.')
def test_text_field_db_collation(self):
out = StringIO()
call_command('inspectdb', 'inspectdb_textfielddbcollation', stdout=out)
output = out.getvalue()
if not connection.features.interprets_empty_strings_as_nulls:
self.assertIn(
"text_field = models.TextField(db_collation='%s')" % test_collation,
output,
)
else:
self.assertIn(
"text_field = models.TextField(db_collation='%s, blank=True, "
"null=True)" % test_collation,
output,
)
def test_number_field_types(self):
"""Test introspection of various Django field types"""
assertFieldType = self.make_field_type_asserter()
introspected_field_types = connection.features.introspected_field_types
auto_field_type = connection.features.introspected_field_types['AutoField']
if auto_field_type != 'AutoField':
assertFieldType('id', "models.%s(primary_key=True) # AutoField?" % auto_field_type)
assertFieldType('big_int_field', 'models.%s()' % introspected_field_types['BigIntegerField'])
bool_field_type = introspected_field_types['BooleanField']
assertFieldType('bool_field', "models.{}()".format(bool_field_type))
assertFieldType('null_bool_field', 'models.{}(blank=True, null=True)'.format(bool_field_type))
if connection.vendor != 'sqlite':
assertFieldType('decimal_field', "models.DecimalField(max_digits=6, decimal_places=1)")
else: # Guessed arguments on SQLite, see #5014
assertFieldType('decimal_field', "models.DecimalField(max_digits=10, decimal_places=5) "
"# max_digits and decimal_places have been guessed, "
"as this database handles decimal fields as float")
assertFieldType('float_field', "models.FloatField()")
assertFieldType('int_field', 'models.%s()' % introspected_field_types['IntegerField'])
assertFieldType('pos_int_field', 'models.%s()' % introspected_field_types['PositiveIntegerField'])
assertFieldType('pos_big_int_field', 'models.%s()' % introspected_field_types['PositiveBigIntegerField'])
assertFieldType('pos_small_int_field', 'models.%s()' % introspected_field_types['PositiveSmallIntegerField'])
assertFieldType('small_int_field', 'models.%s()' % introspected_field_types['SmallIntegerField'])
@skipUnlessDBFeature('can_introspect_foreign_keys')
def test_attribute_name_not_python_keyword(self):
out = StringIO()
call_command('inspectdb', table_name_filter=inspectdb_tables_only, stdout=out)
output = out.getvalue()
error_message = "inspectdb generated an attribute name which is a Python keyword"
# Recursive foreign keys should be set to 'self'
self.assertIn("parent = models.ForeignKey('self', models.DO_NOTHING)", output)
self.assertNotIn(
"from = models.ForeignKey(InspectdbPeople, models.DO_NOTHING)",
output,
msg=error_message,
)
# As InspectdbPeople model is defined after InspectdbMessage, it should be quoted
self.assertIn(
"from_field = models.ForeignKey('InspectdbPeople', models.DO_NOTHING, db_column='from_id')",
output,
)
self.assertIn(
'people_pk = models.OneToOneField(InspectdbPeople, models.DO_NOTHING, primary_key=True)',
output,
)
self.assertIn(
'people_unique = models.OneToOneField(InspectdbPeople, models.DO_NOTHING)',
output,
)
@skipUnlessDBFeature('can_introspect_foreign_keys')
def test_foreign_key_to_field(self):
out = StringIO()
call_command('inspectdb', 'inspectdb_foreignkeytofield', stdout=out)
self.assertIn(
"to_field_fk = models.ForeignKey('InspectdbPeoplemoredata', "
"models.DO_NOTHING, to_field='people_unique_id')",
out.getvalue(),
)
def test_digits_column_name_introspection(self):
"""Introspection of column names consist/start with digits (#16536/#17676)"""
char_field_type = connection.features.introspected_field_types['CharField']
out = StringIO()
call_command('inspectdb', 'inspectdb_digitsincolumnname', stdout=out)
output = out.getvalue()
error_message = "inspectdb generated a model field name which is a number"
self.assertNotIn(' 123 = models.%s' % char_field_type, output, msg=error_message)
self.assertIn('number_123 = models.%s' % char_field_type, output)
error_message = "inspectdb generated a model field name which starts with a digit"
self.assertNotIn(' 4extra = models.%s' % char_field_type, output, msg=error_message)
self.assertIn('number_4extra = models.%s' % char_field_type, output)
self.assertNotIn(' 45extra = models.%s' % char_field_type, output, msg=error_message)
self.assertIn('number_45extra = models.%s' % char_field_type, output)
def test_special_column_name_introspection(self):
"""
Introspection of column names containing special characters,
unsuitable for Python identifiers
"""
out = StringIO()
call_command('inspectdb', table_name_filter=special_table_only, stdout=out)
output = out.getvalue()
base_name = connection.introspection.identifier_converter('Field')
integer_field_type = connection.features.introspected_field_types['IntegerField']
self.assertIn("field = models.%s()" % integer_field_type, output)
self.assertIn("field_field = models.%s(db_column='%s_')" % (integer_field_type, base_name), output)
self.assertIn("field_field_0 = models.%s(db_column='%s__')" % (integer_field_type, base_name), output)
self.assertIn("field_field_1 = models.%s(db_column='__field')" % integer_field_type, output)
self.assertIn("prc_x = models.{}(db_column='prc(%) x')".format(integer_field_type), output)
self.assertIn("tamaño = models.%s()" % integer_field_type, output)
def test_table_name_introspection(self):
"""
Introspection of table names containing special characters,
unsuitable for Python identifiers
"""
out = StringIO()
call_command('inspectdb', table_name_filter=special_table_only, stdout=out)
output = out.getvalue()
self.assertIn("class InspectdbSpecialTableName(models.Model):", output)
def test_managed_models(self):
"""By default the command generates models with `Meta.managed = False` (#14305)"""
out = StringIO()
call_command('inspectdb', 'inspectdb_columntypes', stdout=out)
output = out.getvalue()
self.longMessage = False
self.assertIn(" managed = False", output, msg='inspectdb should generate unmanaged models.')
def test_unique_together_meta(self):
out = StringIO()
call_command('inspectdb', 'inspectdb_uniquetogether', stdout=out)
output = out.getvalue()
self.assertIn(" unique_together = (('", output)
unique_together_match = self.unique_re.findall(output)
# There should be one unique_together tuple.
self.assertEqual(len(unique_together_match), 1)
fields = unique_together_match[0]
# Fields with db_column = field name.
self.assertIn("('field1', 'field2')", fields)
# Fields from columns whose names are Python keywords.
self.assertIn("('field1', 'field2')", fields)
# Fields whose names normalize to the same Python field name and hence
# are given an integer suffix.
self.assertIn("('non_unique_column', 'non_unique_column_0')", fields)
@skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific SQL')
def test_unsupported_unique_together(self):
"""Unsupported index types (COALESCE here) are skipped."""
with connection.cursor() as c:
c.execute(
'CREATE UNIQUE INDEX Findex ON %s '
'(id, people_unique_id, COALESCE(message_id, -1))' % PeopleMoreData._meta.db_table
)
try:
out = StringIO()
call_command(
'inspectdb',
table_name_filter=lambda tn: tn.startswith(PeopleMoreData._meta.db_table),
stdout=out,
)
output = out.getvalue()
self.assertIn('# A unique constraint could not be introspected.', output)
self.assertEqual(self.unique_re.findall(output), ["('id', 'people_unique')"])
finally:
with connection.cursor() as c:
c.execute('DROP INDEX Findex')
@skipUnless(connection.vendor == 'sqlite',
"Only patched sqlite's DatabaseIntrospection.data_types_reverse for this test")
def test_custom_fields(self):
"""
Introspection of columns with a custom field (#21090)
"""
out = StringIO()
orig_data_types_reverse = connection.introspection.data_types_reverse
try:
connection.introspection.data_types_reverse = {
'text': 'myfields.TextField',
'bigint': 'BigIntegerField',
}
call_command('inspectdb', 'inspectdb_columntypes', stdout=out)
output = out.getvalue()
self.assertIn("text_field = myfields.TextField()", output)
self.assertIn("big_int_field = models.BigIntegerField()", output)
finally:
connection.introspection.data_types_reverse = orig_data_types_reverse
def test_introspection_errors(self):
"""
Introspection errors should not crash the command, and the error should
be visible in the output.
"""
out = StringIO()
with mock.patch('django.db.connection.introspection.get_table_list',
return_value=[TableInfo(name='nonexistent', type='t')]):
call_command('inspectdb', stdout=out)
output = out.getvalue()
self.assertIn("# Unable to inspect table 'nonexistent'", output)
# The error message depends on the backend
self.assertIn("# The error was:", output)
class InspectDBTransactionalTests(TransactionTestCase):
available_apps = ['inspectdb']
def test_include_views(self):
"""inspectdb --include-views creates models for database views."""
with connection.cursor() as cursor:
cursor.execute(
'CREATE VIEW inspectdb_people_view AS '
'SELECT id, name FROM inspectdb_people'
)
out = StringIO()
view_model = 'class InspectdbPeopleView(models.Model):'
view_managed = 'managed = False # Created from a view.'
try:
call_command(
'inspectdb',
table_name_filter=inspectdb_views_only,
stdout=out,
)
no_views_output = out.getvalue()
self.assertNotIn(view_model, no_views_output)
self.assertNotIn(view_managed, no_views_output)
call_command(
'inspectdb',
table_name_filter=inspectdb_views_only,
include_views=True,
stdout=out,
)
with_views_output = out.getvalue()
self.assertIn(view_model, with_views_output)
self.assertIn(view_managed, with_views_output)
finally:
with connection.cursor() as cursor:
cursor.execute('DROP VIEW inspectdb_people_view')
@skipUnlessDBFeature('can_introspect_materialized_views')
def test_include_materialized_views(self):
"""inspectdb --include-views creates models for materialized views."""
with connection.cursor() as cursor:
cursor.execute(
'CREATE MATERIALIZED VIEW inspectdb_people_materialized AS '
'SELECT id, name FROM inspectdb_people'
)
out = StringIO()
view_model = 'class InspectdbPeopleMaterialized(models.Model):'
view_managed = 'managed = False # Created from a view.'
try:
call_command(
'inspectdb',
table_name_filter=inspectdb_views_only,
stdout=out,
)
no_views_output = out.getvalue()
self.assertNotIn(view_model, no_views_output)
self.assertNotIn(view_managed, no_views_output)
call_command(
'inspectdb',
table_name_filter=inspectdb_views_only,
include_views=True,
stdout=out,
)
with_views_output = out.getvalue()
self.assertIn(view_model, with_views_output)
self.assertIn(view_managed, with_views_output)
finally:
with connection.cursor() as cursor:
cursor.execute('DROP MATERIALIZED VIEW inspectdb_people_materialized')
@skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific SQL')
def test_include_partitions(self):
"""inspectdb --include-partitions creates models for partitions."""
with connection.cursor() as cursor:
cursor.execute('''\
CREATE TABLE inspectdb_partition_parent (name text not null)
PARTITION BY LIST (left(upper(name), 1))
''')
cursor.execute('''\
CREATE TABLE inspectdb_partition_child
PARTITION OF inspectdb_partition_parent
FOR VALUES IN ('A', 'B', 'C')
''')
out = StringIO()
partition_model_parent = 'class InspectdbPartitionParent(models.Model):'
partition_model_child = 'class InspectdbPartitionChild(models.Model):'
partition_managed = 'managed = False # Created from a partition.'
try:
call_command('inspectdb', table_name_filter=inspectdb_tables_only, stdout=out)
no_partitions_output = out.getvalue()
self.assertIn(partition_model_parent, no_partitions_output)
self.assertNotIn(partition_model_child, no_partitions_output)
self.assertNotIn(partition_managed, no_partitions_output)
call_command('inspectdb', table_name_filter=inspectdb_tables_only, include_partitions=True, stdout=out)
with_partitions_output = out.getvalue()
self.assertIn(partition_model_parent, with_partitions_output)
self.assertIn(partition_model_child, with_partitions_output)
self.assertIn(partition_managed, with_partitions_output)
finally:
with connection.cursor() as cursor:
cursor.execute('DROP TABLE IF EXISTS inspectdb_partition_child')
cursor.execute('DROP TABLE IF EXISTS inspectdb_partition_parent')
@skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific SQL')
def test_foreign_data_wrapper(self):
with connection.cursor() as cursor:
cursor.execute('CREATE EXTENSION IF NOT EXISTS file_fdw')
cursor.execute('CREATE SERVER inspectdb_server FOREIGN DATA WRAPPER file_fdw')
cursor.execute('''\
CREATE FOREIGN TABLE inspectdb_iris_foreign_table (
petal_length real,
petal_width real,
sepal_length real,
sepal_width real
) SERVER inspectdb_server OPTIONS (
filename %s
)
''', [os.devnull])
out = StringIO()
foreign_table_model = 'class InspectdbIrisForeignTable(models.Model):'
foreign_table_managed = 'managed = False'
try:
call_command(
'inspectdb',
table_name_filter=inspectdb_tables_only,
stdout=out,
)
output = out.getvalue()
self.assertIn(foreign_table_model, output)
self.assertIn(foreign_table_managed, output)
finally:
with connection.cursor() as cursor:
cursor.execute('DROP FOREIGN TABLE IF EXISTS inspectdb_iris_foreign_table')
cursor.execute('DROP SERVER IF EXISTS inspectdb_server')
cursor.execute('DROP EXTENSION IF EXISTS file_fdw')
| 48.016807
| 117
| 0.649632
|
1ce8132e6a9a756ae0a6c3f15f7c41c2fdbc4b29
| 2,646
|
py
|
Python
|
examples/howto/server_embed/flask_gunicorn_embed.py
|
kevin1kevin1k/bokeh
|
9f34b5b710e2748ec803c12918ec1706098a3477
|
[
"BSD-3-Clause"
] | 17
|
2020-06-14T03:47:35.000Z
|
2022-03-07T00:25:23.000Z
|
examples/howto/server_embed/flask_gunicorn_embed.py
|
kevin1kevin1k/bokeh
|
9f34b5b710e2748ec803c12918ec1706098a3477
|
[
"BSD-3-Clause"
] | 12
|
2020-07-22T22:40:09.000Z
|
2021-03-17T14:10:27.000Z
|
examples/howto/server_embed/flask_gunicorn_embed.py
|
kevin1kevin1k/bokeh
|
9f34b5b710e2748ec803c12918ec1706098a3477
|
[
"BSD-3-Clause"
] | 8
|
2020-06-14T03:47:23.000Z
|
2021-11-20T15:14:04.000Z
|
try:
import asyncio
except ImportError:
raise RuntimeError("This example requries Python3 / asyncio")
from flask import Flask, render_template
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
from bokeh.application import Application
from bokeh.application.handlers import FunctionHandler
from bokeh.embed import server_document
from bokeh.layouts import column
from bokeh.models import ColumnDataSource, Slider
from bokeh.plotting import figure
from bokeh.server.server import BaseServer
from bokeh.server.tornado import BokehTornado
from bokeh.server.util import bind_sockets
from bokeh.themes import Theme
if __name__ == '__main__':
print('This script is intended to be run with gunicorn. e.g.')
print()
print(' gunicorn -w 4 flask_gunicorn_embed:app')
print()
print('will start the app on four processes')
import sys
sys.exit()
from bokeh.sampledata.sea_surface_temperature import sea_surface_temperature
app = Flask(__name__)
def modify_doc(doc):
df = sea_surface_temperature.copy()
source = ColumnDataSource(data=df)
plot = figure(x_axis_type='datetime', y_range=(0, 25), y_axis_label='Temperature (Celsius)',
title="Sea Surface Temperature at 43.18, -70.43")
plot.line('time', 'temperature', source=source)
def callback(attr, old, new):
if new == 0:
data = df
else:
data = df.rolling('{0}D'.format(new)).mean()
source.data = ColumnDataSource(data=data).data
slider = Slider(start=0, end=30, value=0, step=1, title="Smoothing by N Days")
slider.on_change('value', callback)
doc.add_root(column(slider, plot))
doc.theme = Theme(filename="theme.yaml")
# can't use shortcuts here, since we are passing to low level BokehTornado
bkapp = Application(FunctionHandler(modify_doc))
# This is so that if this app is run using something like "gunicorn -w 4" then
# each process will listen on its own port
sockets, port = bind_sockets("localhost", 0)
@app.route('/', methods=['GET'])
def bkapp_page():
script = server_document('http://localhost:%d/bkapp' % port)
return render_template("embed.html", script=script, template="Flask")
def bk_worker():
asyncio.set_event_loop(asyncio.new_event_loop())
bokeh_tornado = BokehTornado({'/bkapp': bkapp}, extra_websocket_origins=["localhost:8000"])
bokeh_http = HTTPServer(bokeh_tornado)
bokeh_http.add_sockets(sockets)
server = BaseServer(IOLoop.current(), bokeh_tornado, bokeh_http)
server.start()
server.io_loop.start()
from threading import Thread
Thread(target=bk_worker).start()
| 32.268293
| 96
| 0.726757
|
74c2ababb2e4ff79c82bb2b693fd769f9893592f
| 125
|
py
|
Python
|
plugins/zmcms_cms.py
|
cflq3/getcms
|
6cf07da0ea3ec644866df715cff1f311a46ee378
|
[
"MIT"
] | 22
|
2016-09-01T08:27:07.000Z
|
2021-01-11T13:32:59.000Z
|
plugins/zmcms_cms.py
|
cflq3/getcms
|
6cf07da0ea3ec644866df715cff1f311a46ee378
|
[
"MIT"
] | null | null | null |
plugins/zmcms_cms.py
|
cflq3/getcms
|
6cf07da0ea3ec644866df715cff1f311a46ee378
|
[
"MIT"
] | 20
|
2015-11-07T19:09:48.000Z
|
2018-05-02T03:10:41.000Z
|
#!/usr/bin/env python
# encoding: utf-8
def run(whatweb, pluginname):
whatweb.recog_from_content(pluginname, "zmcms")
| 15.625
| 51
| 0.72
|
62f4b5007ef454a4a6ae90fa7b29317345e08317
| 427
|
py
|
Python
|
accounts/views.py
|
MasqueCrow/Social-Project
|
6a8c8c273208a4930acdb53f3e7265a152adc71c
|
[
"MIT"
] | null | null | null |
accounts/views.py
|
MasqueCrow/Social-Project
|
6a8c8c273208a4930acdb53f3e7265a152adc71c
|
[
"MIT"
] | 5
|
2021-03-19T03:52:05.000Z
|
2021-06-10T19:18:26.000Z
|
accounts/views.py
|
MasqueCrow/Social-Project
|
6a8c8c273208a4930acdb53f3e7265a152adc71c
|
[
"MIT"
] | 1
|
2021-08-24T18:06:27.000Z
|
2021-08-24T18:06:27.000Z
|
from django.contrib.auth import login,logout
from django.urls import reverse_lazy
from django.views.generic import CreateView
from accounts import forms
# Create your views here.
class SignUp(CreateView):
form_class = forms.UserCreateForm
#Once user has successfully submit their sign up form
#redirect user to login page
success_url = reverse_lazy("accounts:login")
template_name = 'accounts/signup.html'
| 32.846154
| 57
| 0.782201
|
961b41ac7e12348d2cd9bb21a06c9a3f33d3b4af
| 4,545
|
py
|
Python
|
tests/test_message.py
|
jfkinslow/flask-mailing
|
dda99214b783b60fabc7dfad209fff4438eaf61c
|
[
"MIT"
] | null | null | null |
tests/test_message.py
|
jfkinslow/flask-mailing
|
dda99214b783b60fabc7dfad209fff4438eaf61c
|
[
"MIT"
] | null | null | null |
tests/test_message.py
|
jfkinslow/flask-mailing
|
dda99214b783b60fabc7dfad209fff4438eaf61c
|
[
"MIT"
] | null | null | null |
import pytest
from flask_mailing.schemas import Message, MultipartSubtypeEnum
from flask_mailing.msg import MailMsg
import os
CONTENT = "file test content"
def test_initialize():
message = Message(
subject="test subject",
recipients=["uzezio22@gmail.com"],
body="test",
subtype="plain"
)
assert message.subject == "test subject"
def test_recipients_properly_initialized():
message = Message(
subject="test subject",
recipients=[],
body="test",
subtype="plain"
)
assert message.recipients == []
def test_add_recipient_method():
message = Message(
subject="test subject",
recipients=[],
body="test",
subtype="plain"
)
message.add_recipient("aniketsarkar@yahoo.com")
assert message.recipients == ["aniketsarkar@yahoo.com"]
def test_sendto_properly_set():
msg = Message(subject="subject", recipients=["somebody@here.com", "somebody2@here.com"],
cc=["cc@example.com"], bcc=["bcc@example.com"], reply_to=["replyto@example.com"])
assert len(msg.recipients) == 2
assert len(msg.cc) == 1
assert len(msg.bcc) == 1
assert len(msg.reply_to) == 1
def test_plain_message():
message = Message(
subject="test subject",
recipients=["uzezio22@gmail.com"],
body="test",
subtype="plain"
)
assert message.body == "test"
def test_charset():
message = Message(
subject="test subject",
recipients=["uzezio22@gmail.com"],
body="test",
subtype="plain"
)
assert message.charset == "utf-8"
def test_message_str():
message = Message(
subject="test subject",
recipients=["uzezio22@gmail.com"],
body="test",
subtype="plain"
)
assert type(message.body) == str
def test_plain_message_with_attachments():
directory = os.getcwd()
attachement = directory + "/files/attachement.txt"
msg = Message(subject="testing",
recipients=["to@example.com"],
attachments=[attachement],
body="test mail body")
with open(attachement, "w") as file:
file.write(CONTENT)
assert len(msg.attachments) == 1
def test_plain_message_with_attach_method():
directory = os.getcwd()
attachement = directory + "/files/attachement_1.txt"
msg = Message(subject="testing",
recipients=["to@example.com"],
body="test mail body")
with open(attachement, "w") as file:
file.write(CONTENT)
with open(attachement, "rb") as fp:
msg.attach("attachement_1.txt", fp.read())
assert len(msg.attachments) == 1
def test_empty_subject_header():
message = Message(
subject="",
recipients=["uzezio22@gmail.com"],
body="test",
subtype="plain"
)
assert len(message.subject) == 0
def test_bcc():
msg = Message(subject="subject", recipients=[],
bcc=["bcc@example.com"])
assert len(msg.bcc) == 1
assert msg.bcc == ["bcc@example.com"]
def test_replyto():
msg = Message(subject="subject", recipients=[],
reply_to=["replyto@example.com"])
assert len(msg.reply_to) == 1
assert msg.reply_to == ["replyto@example.com"]
def test_cc():
msg = Message(subject="subject", recipients=[],
cc=["cc@example.com"])
assert len(msg.cc) == 1
assert msg.cc == ["cc@example.com"]
def test_multipart_subtype():
message = Message(
subject="test subject",
recipients=["to@example.com"],
body="test",
subtype="plain"
)
assert message.multipart_subtype == MultipartSubtypeEnum.mixed
@pytest.mark.asyncio
async def test_msgid_header():
message = Message(
subject="test subject",
recipients=["sp001@gmail.com"],
body="test",
subtype="plain"
)
msg = MailMsg(**message.dict())
msg_object = await msg._message('test@example.com')
assert msg_object['Message-ID'] is not None
@pytest.mark.asyncio
async def test_message_charset():
message = Message(
subject="test subject",
recipients=["uzezio22@gmail.com"],
body="test",
subtype="plain"
)
msg = MailMsg(**message.dict())
msg_object = await msg._message('test@example.com')
assert msg_object._charset is not None
assert msg_object._charset == "utf-8"
| 24.175532
| 105
| 0.59824
|
61deb4d774467746acc59872062b6cf9cde48c46
| 3,762
|
py
|
Python
|
mapclientplugins/monodomain2dstep/step.py
|
mapclient-plugins/mapclientplugins.monodomain2d
|
7329558abba85f46d8802d2efe01966c188739e1
|
[
"Apache-2.0"
] | null | null | null |
mapclientplugins/monodomain2dstep/step.py
|
mapclient-plugins/mapclientplugins.monodomain2d
|
7329558abba85f46d8802d2efe01966c188739e1
|
[
"Apache-2.0"
] | null | null | null |
mapclientplugins/monodomain2dstep/step.py
|
mapclient-plugins/mapclientplugins.monodomain2d
|
7329558abba85f46d8802d2efe01966c188739e1
|
[
"Apache-2.0"
] | 1
|
2021-06-03T02:51:18.000Z
|
2021-06-03T02:51:18.000Z
|
'''
MAP Client Plugin Step
'''
import json
from PySide import QtGui
from mapclient.mountpoints.workflowstep import WorkflowStepMountPoint
from mapclientplugins.monodomain2dstep.configuredialog import ConfigureDialog
from mapclientplugins.monodomain2dstep.mono2dwidget import Mono2DWidget
class monodomain2dStep(WorkflowStepMountPoint):
'''
Skeleton step which is intended to be a helpful starting point
for new steps.
'''
def __init__(self, location):
super(monodomain2dStep, self).__init__('monodomain2d', location)
self._configured = False # A step cannot be executed until it has been configured.
self._category = 'Fitting'
# Add any other initialisation code here:
self._icon = QtGui.QImage(':/monodomain2dstep/images/fitting.png')
# Ports:
self.addPort(('http://physiomeproject.org/workflow/1.0/rdf-schema#port',
'http://physiomeproject.org/workflow/1.0/rdf-schema#uses',
'sedml'))
# Port data:
self._data_location = None # sedml location
# Config:
self._config = {}
self._config['identifier'] = ''
self._view = None
def execute(self):
'''
Add your code here that will kick off the execution of the step.
Make sure you call the _doneExecution() method when finished. This method
may be connected up to a button in a widget for example.
'''
if self._view is None:
self._view = Mono2DWidget()
self._view.registerCallback(self._doneExecution)
self._view.initialise(self._data_location)
self._setCurrentWidget(self._view)
def setPortData(self, index, dataIn):
'''
Add your code here that will set the appropriate objects for this step.
The index is the index of the port in the port list. If there is only one
uses port for this step then the index can be ignored.
'''
self._data_location = dataIn # something
def configure(self):
'''
This function will be called when the configure icon on the step is
clicked. It is appropriate to display a configuration dialog at this
time. If the conditions for the configuration of this step are complete
then set:
self._configured = True
'''
dlg = ConfigureDialog()
dlg.identifierOccursCount = self._identifierOccursCount
dlg.setConfig(self._config)
dlg.validate()
dlg.setModal(True)
if dlg.exec_():
self._config = dlg.getConfig()
self._configured = dlg.validate()
self._configuredObserver()
def getIdentifier(self):
'''
The identifier is a string that must be unique within a workflow.
'''
return self._config['identifier']
def setIdentifier(self, identifier):
'''
The framework will set the identifier for this step when it is loaded.
'''
self._config['identifier'] = identifier
def serialize(self):
'''
Add code to serialize this step to string. This method should
implement the opposite of 'deserialize'.
'''
return json.dumps(self._config, default=lambda o: o.__dict__, sort_keys=True, indent=4)
def deserialize(self, string):
'''
Add code to deserialize this step from string. This method should
implement the opposite of 'serialize'.
'''
self._config.update(json.loads(string))
d = ConfigureDialog()
d.identifierOccursCount = self._identifierOccursCount
d.setConfig(self._config)
self._configured = d.validate()
| 33
| 95
| 0.636098
|
6c9081be76d6dc85ae8f784e4027a8c6e4bdcf3f
| 468
|
py
|
Python
|
pdb_ipython_nose.py
|
ryneeverett/ipython_nose
|
14161dc23ac7aab123304874442044b9fe3e10ff
|
[
"Unlicense"
] | 41
|
2015-01-21T16:16:51.000Z
|
2022-01-20T08:49:49.000Z
|
pdb_ipython_nose.py
|
ryneeverett/ipython_nose
|
14161dc23ac7aab123304874442044b9fe3e10ff
|
[
"Unlicense"
] | 6
|
2017-09-01T12:51:10.000Z
|
2018-02-08T09:37:40.000Z
|
pdb_ipython_nose.py
|
ryneeverett/ipython_nose
|
14161dc23ac7aab123304874442044b9fe3e10ff
|
[
"Unlicense"
] | 8
|
2015-04-10T18:00:41.000Z
|
2020-05-31T15:17:28.000Z
|
import types
import ipython_nose
test_module = types.ModuleType('test_module')
from nose.plugins.skip import SkipTest
def test_foo():
assert True
def test_bar():
assert False
def test_baz():
raise Exception()
def test_quux():
raise SkipTest()
test_module.test_foo = test_foo
test_module.test_bar = test_bar
test_module.test_baz = test_baz
test_module.test_quux = test_quux
plugin = ipython_nose.nose('', test_module)
print plugin._repr_html_()
| 17.333333
| 45
| 0.762821
|
c300293233ff6c2007b83884e8c2183daef17484
| 23,080
|
py
|
Python
|
US_coronavirus_map.py
|
rashecl/COVID_Inisghts
|
c6be339512c4ffa9cf94cfc567f597e160c068b2
|
[
"MIT"
] | null | null | null |
US_coronavirus_map.py
|
rashecl/COVID_Inisghts
|
c6be339512c4ffa9cf94cfc567f597e160c068b2
|
[
"MIT"
] | null | null | null |
US_coronavirus_map.py
|
rashecl/COVID_Inisghts
|
c6be339512c4ffa9cf94cfc567f597e160c068b2
|
[
"MIT"
] | null | null | null |
import matplotlib
matplotlib.use('Agg')
import numpy as np
from bokeh.io import show
from bokeh.layouts import column, row
from bokeh.io import curdoc
from bokeh.models import LogColorMapper, LinearColorMapper, ColorBar, ColumnDataSource, LogTicker, RadioGroup, Div
from bokeh.models import WheelZoomTool, TapTool, SaveTool, ResetTool, PanTool, HoverTool, Range1d, BoxZoomTool, \
FuncTickFormatter
from bokeh.models import TickFormatter
from bokeh.palettes import RdYlBu10 as palette, all_palettes
from bokeh.plotting import figure
from COVID.extract import COVID_counts
import pandas as pd
import pickle
[stateBorders, countyBorders] = pickle.load(open("./COVID/extract/regionBorders.p", "rb"))
[usPopulation, statePopulations, countyPopulations] = pickle.load(open("./COVID/extract/regionPopulations.p", "rb"))
[countyDF, stateDF_NYT, stateDF_CT, usDF_NYT, usDF_CT, lastUpdated] = pickle.load(
open("./COVID/extract/CovidCounts.p", "rb"))
print(lastUpdated)
# palette = tuple(palette)
palette = tuple([all_palettes['Turbo'][256][idx] for idx in range(50, 256)])
# color_mapper = LinearColorMapper(palette=palette)
color_mapper = LogColorMapper(palette=palette, low=1, high=200000)
us_TOOLS = [BoxZoomTool(), PanTool(), WheelZoomTool(), TapTool(), HoverTool(), ResetTool()]
state_TOOLS = [BoxZoomTool(), PanTool(), WheelZoomTool(), TapTool(), HoverTool(), ResetTool()]
cumul_TOOLS = [BoxZoomTool(), PanTool(), WheelZoomTool(), ResetTool(), SaveTool()]
daily_TOOLS = [BoxZoomTool(), PanTool(), WheelZoomTool(), ResetTool(), SaveTool()]
cumulCritical_TOOLS = [BoxZoomTool(), PanTool(), WheelZoomTool(), ResetTool(), SaveTool()]
dailyCritical_TOOLS = [BoxZoomTool(), PanTool(), WheelZoomTool(), ResetTool(), SaveTool()]
dailyDeath_TOOLS = [BoxZoomTool(), PanTool(), WheelZoomTool(), ResetTool(), SaveTool()]
colorBySelector = RadioGroup(labels=["positive", "death", "totalTestResults",
"hospitalizedCurrently", 'inIcuCurrently'], active=0)
# A) Define data and plot structures
# 1) Map of US
usData = ColumnDataSource(data=dict(x=[], y=[], cases=[], state=[]))
usPlot = figure(title="Cases of Coronavirus", tools=us_TOOLS,
x_axis_location=None, y_axis_location=None,
tooltips=[("Current cases", "@cases{(0.00 a)}"), ('State', '@state')],
width=60 * 15, height=27 * 15)
usPlot.grid.grid_line_color = None
usPlot.x_range = Range1d(-125, -65, bounds=(-145, -45))
usPlot.y_range = Range1d(23, 50, bounds=(13, 60))
usPlot.hover.point_policy = "follow_mouse"
# usPlot.image_url(url=['https://www.your-vector-maps.com/_kepek/_grey_images/USA-mercator-vector-map.jpg'], x=-126.5, y=51.2, w=61, h=30)
usPlot.patches('x', 'y', source=usData,
fill_color={'field': 'cases', 'transform': color_mapper},
fill_alpha=0.7, line_color="white", line_width=0.5)
usPlot.toolbar.active_drag = us_TOOLS[0]
tick_labels = {'0': '0', '1': '1', '10': '10',
'100': '100', '1000': '1000',
'10000': '10,000', '100000': '100,000', '1,000,000': '1,000,000'}
us_color_bar = ColorBar(color_mapper=color_mapper, ticker=LogTicker(),
label_standoff=12, border_line_color=None, orientation='horizontal', location=(0, 0),
major_label_overrides=tick_labels)
usPlot.add_layout(us_color_bar, 'below')
# usColorBar.right[0].formatter.use_scientific = False
# 2) Map of state
stateData = ColumnDataSource(data={'x': [], 'y': [], 'name': [], 'cases': [], 'state': []})
statePlot = figure(title="State map", tools=state_TOOLS,
x_axis_location=None, y_axis_location=None,
tooltips=[('Name', '@name'), ("Current cases", "@cases{(0,00)}"), ('State', '@state')],
height=405, width=405)
statePlot.toolbar.active_drag = state_TOOLS[0]
statePlot.grid.grid_line_color = None
statePlot.hover.point_policy = "follow_mouse"
statePlot.patches('x', 'y', source=stateData,
fill_color={'field': 'cases', 'transform': color_mapper},
fill_alpha=0.7, line_color="white", line_width=0.5)
# 3,4) Cumulative temporal graphs (tests, positive):
cumulativeData_CT = ColumnDataSource(data=dict(time=[], total_positive=[], total_testResults=[],
total_hospitalized=[], total_ICU=[], total_deaths=[], source=[]))
cumulativeData_NYT = ColumnDataSource(data=dict(time=[], total_positive=[], total_deaths=[], source=[]))
cumulPlot = figure(tools=cumul_TOOLS, x_axis_type='datetime', width=650, height=250)
cumulPlot.left[0].formatter.use_scientific = False
total_positive_CT = cumulPlot.line('time', 'total_positive', source=cumulativeData_CT, line_color='blue', line_width=2,
legend_label='positive_CT')
total_positive_NYT = cumulPlot.line('time', 'total_positive', source=cumulativeData_NYT, line_color='lightblue',
line_width=2,
legend_label='positive_NYT')
total_testResults = cumulPlot.line('time', 'total_testResults', source=cumulativeData_CT, line_color='green',
line_width=2,
legend_label='total_testResults')
# total_positive_NYT.visible = False
cumulPlot.yaxis.axis_label = '# of people'
# cumulPlot.yaxis.formatter = FuncTickFormatter(code="""
# parts = tick.toString().split(".");
# parts[0] = parts[0].replace(/\B(?=(\d{3})+(?!\d))/g, ",");
# return parts.join(".");
# """)
cumulPlot.xaxis.axis_label = 'Date'
cumulPlot.legend.location = "top_left"
cumulPlot.legend.click_policy = "hide"
cumulPlot.add_tools(
HoverTool(renderers=[total_positive_CT],
tooltips=[("total_positive", "@total_positive{(0,00)}"), ("date", "@time{%F}")],
formatters={'@time': 'datetime'})
)
cumulPlot.add_tools(
HoverTool(renderers=[total_positive_NYT],
tooltips=[("total_positive", "@total_positive{(0,00)}"), ("date", "@time{%F}")],
formatters={'@time': 'datetime'})
)
cumulPlot.add_tools(
HoverTool(renderers=[total_testResults],
tooltips=[("total_testResults", "@total_testResults{(0,00)}"), ("date", "@time{%F}")],
formatters={'@time': 'datetime'})
)
# 4) Cumulative critical cases (deaths for now):
cumulCriticalPlot = figure(tools=cumulCritical_TOOLS, x_axis_type='datetime', width=650, height=250,
x_range=cumulPlot.x_range)
cumulCriticalPlot.left[0].formatter.use_scientific = False
total_deaths_CT = cumulCriticalPlot.line('time', 'total_deaths', source=cumulativeData_CT, line_color='red',
line_width=2,
legend_label='totalDeaths_CT')
total_deaths_NYT = cumulCriticalPlot.line('time', 'total_deaths', source=cumulativeData_NYT, line_color='magenta',
line_width=2, legend_label='totalDeaths_NYT')
# total_deaths_NYT.visible = False
cumulCriticalPlot.yaxis.axis_label = '# of people'
# cumulCriticalPlot.yaxis.formatter= FuncTickFormatter(code="""
# parts = tick.toString().split(".");
# parts[0] = parts[0].replace(/\B(?=(\d{3})+(?!\d))/g, ",");
# return parts.join(".");
# """)
cumulCriticalPlot.xaxis.axis_label = 'Date'
cumulCriticalPlot.legend.location = "top_left"
cumulCriticalPlot.legend.click_policy = "hide"
cumulCriticalPlot.add_tools(
HoverTool(renderers=[total_deaths_CT], tooltips=[("total_deaths", "@total_deaths{(0,00)}"), ("date", "@time{%F}")],
formatters={'@time': 'datetime'})
)
cumulCriticalPlot.add_tools(
HoverTool(renderers=[total_deaths_NYT], tooltips=[("total_deaths", "@total_deaths{(0,00)}"), ("date", "@time{%F}")],
formatters={'@time': 'datetime'})
)
# 5-7) Daily temporal graphs:
dailyData_CT = ColumnDataSource(data=dict(time=[], new_positive=[], new_testResults=[],
current_hospitalized=[], current_ICU=[], new_deaths=[], source=[]))
dailyData_NYT = ColumnDataSource(data=dict(time=[], new_positive=[], new_deaths=[], source=[]))
dailyPlot = figure(tools=daily_TOOLS, x_axis_type='datetime', width=650, height=250, title="Daily statistics",
x_range=cumulPlot.x_range)
dailyPlot.left[0].formatter.use_scientific = False
new_positive_CT = dailyPlot.line('time', 'new_positive', source=dailyData_CT, line_color='blue', line_width=2,
legend_label='new_positive_CT')
new_testResults = dailyPlot.line('time', 'new_testResults', source=dailyData_CT, line_color='green', line_width=2,
legend_label='new_testResults')
new_positive_NYT = dailyPlot.line('time', 'new_positive', source=dailyData_NYT, line_color='lightblue', line_width=2,
legend_label='new_positive_NYT')
# new_positive_NYT.visible = False
dailyPlot.add_tools(
HoverTool(renderers=[new_positive_CT], tooltips=[("new_positive", "@new_positive{(0,00)}"), ("date", "@time{%F}")],
formatters={'@time': 'datetime'})
)
dailyPlot.add_tools(
HoverTool(renderers=[new_testResults], tooltips=[("new_testResults", "@new_testResults{(0,00)}"), ("date", "@time{%F}")],
formatters={'@time': 'datetime'})
)
dailyPlot.add_tools(
HoverTool(renderers=[new_positive_NYT], tooltips=[("new_positive", "@new_positive{(0,00)}"), ("date", "@time{%F}")],
formatters={'@time': 'datetime'})
)
dailyPlot.toolbar.active_drag = daily_TOOLS[1]
dailyPlot.yaxis.axis_label = '# of people'
# dailyPlot.yaxis.formatter = formatter = FuncTickFormatter(code="""
# parts = tick.toString().split(".");
# parts[0] = parts[0].replace(/\B(?=(\d{3})+(?!\d))/g, ",");
# return parts.join(".");
# """)
dailyPlot.xaxis.axis_label = 'Date'
dailyPlot.legend.location = "top_left"
dailyPlot.legend.click_policy = "hide"
# 7 Daily death graph:
dailyDeathPlot = figure(tools=dailyCritical_TOOLS, x_axis_type='datetime', width=650, height=250,
title="Daily death statistics", x_range=cumulPlot.x_range)
dailyDeathPlot.left[0].formatter.use_scientific = False
new_deaths_CT = dailyDeathPlot.line('time', 'new_deaths', source=dailyData_CT, line_color='black', line_width=2,
legend_label='new_deaths_CT')
new_deaths_NYT = dailyDeathPlot.line('time', 'new_deaths', source=dailyData_NYT, line_color='grey', line_width=2,
legend_label='new_deaths_NYT')
# new_deaths_NYT.visible = False
dailyDeathPlot.add_tools(
HoverTool(renderers=[new_deaths_CT], tooltips=[("new_deaths", "@new_deaths{(0,00)}"), ("date", "@time{%F}")],
formatters={'@time': 'datetime'})
)
dailyDeathPlot.add_tools(
HoverTool(renderers=[new_deaths_NYT], tooltips=[("new_deaths", "@new_deaths{(0,00)}"), ("date", "@time{%F}")],
formatters={'@time': 'datetime'})
)
dailyDeathPlot.toolbar.active_drag = dailyDeath_TOOLS[1]
dailyDeathPlot.yaxis.axis_label = '# of people'
# dailyDeathPlot.yaxis.formatter = FuncTickFormatter(code="""
# parts = tick.toString().split(".");
# parts[0] = parts[0].replace(/\B(?=(\d{3})+(?!\d))/g, ",");
# return parts.join(".");
# """)
dailyDeathPlot.xaxis.axis_label = 'Date'
dailyDeathPlot.legend.location = "top_left"
dailyDeathPlot.legend.click_policy = "hide"
# 7) dailyCritical plot:
dailyCriticalPlot = figure(tools=dailyCritical_TOOLS, x_axis_type='datetime', width=650, height=250,
title="*Daily hospitalization statistics", x_range=cumulPlot.x_range)
dailyCriticalPlot.left[0].formatter.use_scientific = False
current_hospitalized = dailyCriticalPlot.line('time', 'current_hospitalized', source=dailyData_CT, line_color='orange',
line_width=2, legend_label='current_hospitalized')
current_ICU = dailyCriticalPlot.line('time', 'current_ICU', source=dailyData_CT, line_color='red', line_width=2,
legend_label='current_ICU')
# new_deaths_NYT.visible = False
dailyCriticalPlot.add_tools(HoverTool(renderers=[current_hospitalized],
tooltips=[("current_hospitalized", "@current_hospitalized{(0,00)}"), ("date", "@time{%F}")],
formatters={'@time': 'datetime'})
)
dailyCriticalPlot.add_tools(
HoverTool(renderers=[current_ICU], tooltips=[("current_ICU", "@current_ICU{(0,00)}"), ("date", "@time{%F}")],
formatters={'@time': 'datetime'})
)
dailyCriticalPlot.toolbar.active_drag = dailyCritical_TOOLS[1]
dailyCriticalPlot.yaxis.axis_label = '# of people'
# dailyCriticalPlot.yaxis.formatter = FuncTickFormatter(code="""
# parts = tick.toString().split(".");
# parts[0] = parts[0].replace(/\B(?=(\d{3})+(?!\d))/g, ",");
# return parts.join(".");
# """)
dailyCriticalPlot.xaxis.axis_label = 'Date'
dailyCriticalPlot.legend.location = "top_left"
dailyCriticalPlot.legend.click_policy = "hide"
print("Completed defining plot structures")
# B) Define the actual data for the plots:
# 1) Define data for US map plot:
state_xs = [stateBorders[state]["lons"] for state in stateBorders if state]
state_ys = [stateBorders[state]["lats"] for state in stateBorders if state]
state_names = [stateBorders[state]["name"] for state in stateBorders]
state_val = []
for state in stateBorders:
if (state in list(stateDF_CT.state.unique())):
state_val.append(stateDF_CT.query("state == '" + state + "'")['positive'].iloc[-1]) # latest positive
else:
print(state + ' does not have any records of cases')
state_val.append(0)
usData.data = dict(x=state_xs, y=state_ys, cases=state_val, state=state_names)
print("Completed defining data")
# 2) Define function on selection of new state:
def updateState():
global countyDF, state, stateCountyDF, stateBorders
print(state)
stateCountyDF = countyDF.query("state == '" + state + "'")
stateCountyBorders = countyBorders[countyBorders['state'] == state]
county_xs = [stateCountyBorders.iloc[i, :]['lons'] for i in range(len(stateCountyBorders))]
county_ys = [stateCountyBorders.iloc[i, :]['lats'] for i in range(len(stateCountyBorders))]
county_names = [stateCountyBorders.iloc[i, :]['county'] for i in range(len(stateCountyBorders))]
state_names = [state for i in range(len(stateCountyBorders))]
# county_val = [rand() for i in range(len(stateCounties))]
county_vals = []
for county in county_names:
if county in list(stateCountyDF['county'].unique()):
county_vals.append(
stateCountyDF[stateCountyDF['county'] == county].positive.values[-1]) # latest positive cases
else:
county_vals.append(0)
stateData.data = dict(
x=county_xs,
y=county_ys,
name=county_names,
cases=county_vals,
state=state_names)
# Set new limits and re-title state plot:
print('Setting limits: ' + state)
yrange = [np.nanmin(stateBorders[state]['lats']), np.nanmax(stateBorders[state]['lats'])]
xrange = [np.nanmin(stateBorders[state]['lons']), np.nanmax(stateBorders[state]['lons'])]
plotRange = np.diff(xrange)[0] if np.diff(xrange) > np.diff(yrange) else np.diff(yrange)[0]
# statePlot.x_range = Range1d((xrange[0] + plotRange/2) -.55*plotRange, (xrange[0] + plotRange/2) +.55*plotRange, bounds = ((xrange[0] + plotRange/2) -.55*plotRange, (xrange[0] + plotRange/2) +.55*plotRange))
statePlot.x_range.start = np.average(xrange) - .55 * plotRange
statePlot.x_range.end = np.average(xrange) + .55 * plotRange
# statePlot.y_range = Range1d((yrange[0] + plotRange/2) -.55*plotRange, (yrange[0] + plotRange/2) +.55*plotRange, bounds = ((yrange[0] + plotRange/2) -.55*plotRange, (yrange[0] + plotRange/2) +.55*plotRange))
statePlot.y_range.start = np.average(yrange) - .55 * plotRange
statePlot.y_range.end = np.average(yrange) + .55 * plotRange
state_name = stateBorders[state]['name']
cumulPlot.title.text = state_name + ': Cumulative testing data'
statePlot.title.text = state_name
cumulCriticalPlot.title.text = state_name + ': Cumulative deaths'
dailyPlot.title.text = state_name + ': Daily testing data'
dailyDeathPlot.title.text = state_name + ': Daily deaths'
dailyCriticalPlot.title.text = state_name + ': Daily hospitalization data*'
# Update stateData:
sourceStateData()
return
# 3) Define data for temporal graphs:
def sourceUSdata():
global usDF_CT
CTdf = usDF_CT
dailyData_CT.data = dict(
time=CTdf['date'],
# date=CTdf['date'].astype(str),
new_positive=CTdf['positiveIncrease'],
new_testResults=CTdf['totalTestResultsIncrease'],
current_hospitalized=CTdf['hospitalizedCurrently'],
current_ICU=CTdf['inIcuCurrently'],
new_deaths=CTdf['deathIncrease'],
source=CTdf['source'])
cumulativeData_CT.data = dict(
time=CTdf['date'],
# date=CTdf['date'].astype(str),
total_positive=CTdf['positive'],
total_testResults=CTdf['totalTestResults'],
total_hospitalized=CTdf['hospitalizedCumulative'],
total_ICU=CTdf['inIcuCumulative'],
total_deaths=CTdf['death'],
source=CTdf['source'])
NYTdf = usDF_NYT
dailyData_NYT.data = dict(
time=NYTdf['date'],
# date=NYTdf['date'].astype(str),
new_positive=NYTdf['positiveIncrease'],
new_deaths=NYTdf['deathIncrease'],
source=NYTdf['source'])
cumulativeData_NYT.data = dict(
time=NYTdf['date'],
# date=NYTdf['date'].astype(str),
total_positive=NYTdf['positive'],
total_deaths=NYTdf['death'],
source=NYTdf['source'])
cumulPlot.title.text = 'United States: Cumulative testing data'
cumulPlot.title.text = 'United States' + ': Cumulative testing data'
cumulCriticalPlot.title.text = 'United States' + ': Cumulative deaths'
dailyPlot.title.text = 'United States' + ': Daily testing data'
dailyDeathPlot.title.text = 'United States' + ': Daily deaths'
dailyCriticalPlot.title.text = 'United States' + ': Daily hospitalization data*'
return
def sourceStateData():
global stateCountyDF, county, state
# Update state level data:
CTdf = stateDF_CT.query("state == '" + state + "'")
dailyData_CT.data = dict(
time=CTdf['date'],
# date=CTdf['date'].astype(str),
new_positive=CTdf['positiveIncrease'],
new_testResults=CTdf['totalTestResultsIncrease'],
current_hospitalized=CTdf['hospitalizedCurrently'],
current_ICU=CTdf['inIcuCurrently'],
new_deaths=CTdf['deathIncrease'],
source=CTdf['source'])
cumulativeData_CT.data = dict(
time=CTdf['date'],
# date=CTdf['date'].astype(str),
total_positive=CTdf['positive'],
total_testResults=CTdf['totalTestResults'],
total_hospitalized=CTdf['hospitalizedCumulative'],
total_ICU=CTdf['inIcuCumulative'],
total_deaths=CTdf['death'],
source=CTdf['source'])
NYTdf = stateDF_NYT.query("state == '" + state + "'")
dailyData_NYT.data = dict(
time=NYTdf['date'],
# date=NYTdf['date'].astype(str),
new_positive=NYTdf['positiveIncrease'],
new_deaths=NYTdf['deathIncrease'],
source=NYTdf['source'])
cumulativeData_NYT.data = dict(
time=NYTdf['date'],
# date=NYTdf['date'].astype(str),
total_positive=NYTdf['positive'],
total_deaths=NYTdf['death'],
source=NYTdf['source'])
return
def sourceCountyData():
global stateCountyDF, county, state
NYTdf = stateCountyDF
dailyData_CT.data = dict(
time=[],
# date=[],
new_positive=[],
new_testResults=[],
current_hospitalized=[],
current_ICU=[],
new_deaths=[],
source=[])
cumulativeData_CT.data = dict(
time=[],
# date=[],
total_positive=[],
total_testResults=[],
total_hospitalized=[],
total_ICU=[],
total_deaths=[],
source=[])
dailyData_NYT.data = dict(
time=NYTdf['date'],
# date=NYTdf['date'].astype(str),
new_positive=NYTdf['positiveIncrease'],
new_deaths=NYTdf['deathIncrease'],
source=NYTdf['source'])
cumulativeData_NYT.data = dict(
time=NYTdf['date'],
# date=NYTdf['date'].astype(str),
total_positive=NYTdf['positive'],
total_deaths=NYTdf['death'],
source=NYTdf['source'])
cumulPlot.title.text = county + ': Cumulative data'
return
# C) Define interactivity functions
def us_tap_handler(attr, old, new):
global state
# index = new[0]
# print(attr)
# print([x for x in list(locals().keys()) if x[0] != '_'])
if len(new) == 0:
print('US')
sourceUSdata()
else:
state = stateBorders.columns[new[0]]
print(state)
updateState()
stateData.selected.indices = []
return
def state_tap(attr, old, new):
global state, stateCountyDF, statesDF, stateBorders, county
print(state)
print(new)
if len(new) == 0:
updateState()
else:
stateCountyBorders = countyBorders[countyBorders['state'] == state]
county = stateCountyBorders.county.iloc[new[0]]
print(stateBorders[state]['name'])
print(county)
stateCountyDF = countyDF.query("(state == '" + state + "') & (county == '" + county + "')")
if len(stateCountyDF) == 0:
print('No data for this county: ' + county)
else:
pass
sourceCountyData()
cumulPlot.title.text = county + ': Cumulative testing data'
cumulCriticalPlot.title.text = county + ': Cumulative deaths'
dailyPlot.title.text = county + ': Daily testing data'
dailyDeathPlot.title.text = county + ': Daily deaths'
dailyCriticalPlot.title.text = county + ': Daily hospitalization data*'
# state = stateBorders.columns[new[0]]
# print(state)
# statePlot.title.text = stateBorders[state]['name']
# updateState(state=state)
return
usData.selected.on_change("indices", us_tap_handler)
stateData.selected.on_change("indices", state_tap)
# D) Define and display layout
layout = column(row(usPlot, statePlot),
row(cumulPlot, dailyPlot),
row(cumulCriticalPlot, dailyDeathPlot),
row(Div(text='*Hospitalization statistics may be confounded by the # of states reporting. '+ ' \n Last updated: ' + str(lastUpdated)[0:16], width=300,
style={'font-size': '150%', 'color': 'black'}),
dailyCriticalPlot))
# Initiate with US data:
sourceUSdata()
# show(layout)
doc = curdoc()
doc.title = "US Coronavirus Map"
doc.add_root(layout)
# # Run this in the command line:
# bokeh serve --show --log-level=debug US_coronavirus_map.py
| 45.254902
| 212
| 0.648397
|
9f8b56fece9d54ceb3ea65def091258f35bd59e0
| 1,823
|
py
|
Python
|
troika/http/exceptions.py
|
troika-framework/troika-http
|
f27547e7567891db6b40cbbc8b9b03267b5e6fac
|
[
"BSD-3-Clause"
] | null | null | null |
troika/http/exceptions.py
|
troika-framework/troika-http
|
f27547e7567891db6b40cbbc8b9b03267b5e6fac
|
[
"BSD-3-Clause"
] | null | null | null |
troika/http/exceptions.py
|
troika-framework/troika-http
|
f27547e7567891db6b40cbbc8b9b03267b5e6fac
|
[
"BSD-3-Clause"
] | null | null | null |
"""Troika HTTP Exceptions"""
import http
class HTTPException(Exception):
"""Base exception for all Troika HTTP exceptions"""
class Finish(HTTPException):
"""Raise to finish processing the HTTP Request"""
class HTTPError(HTTPException):
"""HTTP Request Error
Raise to finish the HTTP Request handling, returning an HTTP error
as the response.
Status codes from the following RFCs are all observed:
* RFC 7231: Hypertext Transfer Protocol (HTTP/1.1), obsoletes 2616
* RFC 6585: Additional HTTP Status Codes
* RFC 3229: Delta encoding in HTTP
* RFC 4918: HTTP Extensions for WebDAV, obsoletes 2518
* RFC 5842: Binding Extensions to WebDAV
* RFC 7238: Permanent Redirect
* RFC 2295: Transparent Content Negotiation in HTTP
* RFC 2774: An HTTP Extension Framework
"""
def __init__(self,
status_code: int,
phrase: str = None,
description: str = None):
"""Raise a new HTTP Error
If``phrase`` or ``description`` are omitted and the default values
will be used.
:param status_code: The HTTP status code
:param phrase: Optionally override the HTTP response phrase
:param description: Optionally override the HTTP response
description
"""
super(HTTPError, self).__init__()
self.status_code = status_code
self._status = http.HTTPStatus(status_code, phrase, description)
@property
def description(self):
"""Return the HTTP response description
:rtype: str
"""
return self._status.description
@property
def phrase(self):
"""Return the HTTP response phrase
:rtype: str
"""
return self._status.phrase
| 27.208955
| 74
| 0.632474
|
0261dfe6123e598e2e98e668c5e8b71d387879a7
| 5,096
|
py
|
Python
|
ucscentralsdk/mometa/compute/ComputeIOHubEnvStats.py
|
ragupta-git/ucscentralsdk
|
2678008b5fb6b0fafafec388d0874147e95a1086
|
[
"Apache-2.0"
] | null | null | null |
ucscentralsdk/mometa/compute/ComputeIOHubEnvStats.py
|
ragupta-git/ucscentralsdk
|
2678008b5fb6b0fafafec388d0874147e95a1086
|
[
"Apache-2.0"
] | null | null | null |
ucscentralsdk/mometa/compute/ComputeIOHubEnvStats.py
|
ragupta-git/ucscentralsdk
|
2678008b5fb6b0fafafec388d0874147e95a1086
|
[
"Apache-2.0"
] | null | null | null |
"""This module contains the general information for ComputeIOHubEnvStats ManagedObject."""
from ...ucscentralmo import ManagedObject
from ...ucscentralcoremeta import UcsCentralVersion, MoPropertyMeta, MoMeta
from ...ucscentralmeta import VersionMeta
class ComputeIOHubEnvStatsConsts():
SUSPECT_FALSE = "false"
SUSPECT_NO = "no"
SUSPECT_TRUE = "true"
SUSPECT_YES = "yes"
TEMPERATURE_NOT_APPLICABLE = "not-applicable"
TEMPERATURE_AVG_NOT_APPLICABLE = "not-applicable"
TEMPERATURE_MAX_NOT_APPLICABLE = "not-applicable"
TEMPERATURE_MIN_NOT_APPLICABLE = "not-applicable"
class ComputeIOHubEnvStats(ManagedObject):
"""This is ComputeIOHubEnvStats class."""
consts = ComputeIOHubEnvStatsConsts()
naming_props = set([])
mo_meta = MoMeta("ComputeIOHubEnvStats", "computeIOHubEnvStats", "iohub-stats", VersionMeta.Version111a, "OutputOnly", 0xf, [], ["admin", "operations", "read-only"], [u'computeIOHub'], [u'computeIOHubEnvStatsHist'], [None])
prop_meta = {
"child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version111a, MoPropertyMeta.INTERNAL, None, None, None, r"""((deleteAll|ignore|deleteNonPresent),){0,2}(deleteAll|ignore|deleteNonPresent){0,1}""", [], []),
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version111a, MoPropertyMeta.READ_ONLY, 0x2, 0, 256, None, [], []),
"intervals": MoPropertyMeta("intervals", "intervals", "uint", VersionMeta.Version111a, MoPropertyMeta.READ_ONLY, None, None, None, None, [], []),
"normalized_time_col": MoPropertyMeta("normalized_time_col", "normalizedTimeCol", "string", VersionMeta.Version111a, MoPropertyMeta.READ_ONLY, None, None, None, r"""([0-9]){4}-([0-9]){2}-([0-9]){2}T([0-9]){2}:([0-9]){2}:([0-9]){2}((\.([0-9]){3})){0,1}""", [], []),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version111a, MoPropertyMeta.READ_ONLY, 0x4, 0, 256, None, [], []),
"stats_reported": MoPropertyMeta("stats_reported", "statsReported", "int", VersionMeta.Version111a, MoPropertyMeta.READ_ONLY, None, None, None, None, [], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version111a, MoPropertyMeta.READ_WRITE, 0x8, None, None, r"""((removed|created|modified|deleted),){0,3}(removed|created|modified|deleted){0,1}""", [], []),
"suspect": MoPropertyMeta("suspect", "suspect", "string", VersionMeta.Version111a, MoPropertyMeta.READ_ONLY, None, None, None, None, ["false", "no", "true", "yes"], []),
"temperature": MoPropertyMeta("temperature", "temperature", "string", VersionMeta.Version111a, MoPropertyMeta.READ_ONLY, None, None, None, None, ["not-applicable"], ["0-4294967295"]),
"temperature_avg": MoPropertyMeta("temperature_avg", "temperatureAvg", "string", VersionMeta.Version111a, MoPropertyMeta.READ_ONLY, None, None, None, None, ["not-applicable"], ["0-4294967295"]),
"temperature_max": MoPropertyMeta("temperature_max", "temperatureMax", "string", VersionMeta.Version111a, MoPropertyMeta.READ_ONLY, None, None, None, None, ["not-applicable"], ["0-4294967295"]),
"temperature_min": MoPropertyMeta("temperature_min", "temperatureMin", "string", VersionMeta.Version111a, MoPropertyMeta.READ_ONLY, None, None, None, None, ["not-applicable"], ["0-4294967295"]),
"thresholded": MoPropertyMeta("thresholded", "thresholded", "string", VersionMeta.Version111a, MoPropertyMeta.READ_ONLY, None, None, None, None, [], []),
"time_collected": MoPropertyMeta("time_collected", "timeCollected", "string", VersionMeta.Version111a, MoPropertyMeta.READ_ONLY, None, None, None, r"""([0-9]){4}-([0-9]){2}-([0-9]){2}T([0-9]){2}:([0-9]){2}:([0-9]){2}((\.([0-9]){3})){0,1}""", [], []),
"update": MoPropertyMeta("update", "update", "uint", VersionMeta.Version111a, MoPropertyMeta.READ_ONLY, None, None, None, None, [], []),
}
prop_map = {
"childAction": "child_action",
"dn": "dn",
"intervals": "intervals",
"normalizedTimeCol": "normalized_time_col",
"rn": "rn",
"statsReported": "stats_reported",
"status": "status",
"suspect": "suspect",
"temperature": "temperature",
"temperatureAvg": "temperature_avg",
"temperatureMax": "temperature_max",
"temperatureMin": "temperature_min",
"thresholded": "thresholded",
"timeCollected": "time_collected",
"update": "update",
}
def __init__(self, parent_mo_or_dn, **kwargs):
self._dirty_mask = 0
self.child_action = None
self.intervals = None
self.normalized_time_col = None
self.stats_reported = None
self.status = None
self.suspect = None
self.temperature = None
self.temperature_avg = None
self.temperature_max = None
self.temperature_min = None
self.thresholded = None
self.time_collected = None
self.update = None
ManagedObject.__init__(self, "ComputeIOHubEnvStats", parent_mo_or_dn, **kwargs)
| 62.91358
| 273
| 0.664443
|
de1f69d9efc211cc958211ea0c31ce23e7eed08a
| 467
|
py
|
Python
|
src/radical/saga/adaptors/cpi/context.py
|
wjlei1990/radical.saga
|
de022ea4fb29d95e8acffff8a68aa8648de807d4
|
[
"MIT"
] | 12
|
2019-04-13T21:41:45.000Z
|
2021-08-03T09:43:25.000Z
|
src/radical/saga/adaptors/cpi/context.py
|
wjlei1990/radical.saga
|
de022ea4fb29d95e8acffff8a68aa8648de807d4
|
[
"MIT"
] | 103
|
2019-04-10T14:23:41.000Z
|
2022-03-15T19:43:56.000Z
|
src/radical/saga/adaptors/cpi/context.py
|
wjlei1990/radical.saga
|
de022ea4fb29d95e8acffff8a68aa8648de807d4
|
[
"MIT"
] | 7
|
2019-07-11T07:59:56.000Z
|
2022-02-02T22:28:24.000Z
|
__author__ = "Andre Merzky"
__copyright__ = "Copyright 2012-2013, The SAGA Project"
__license__ = "MIT"
from .base import CPIBase
from .decorators import CPI_SYNC_CALL as SYNC
from .decorators import CPI_ASYNC_CALL as ASYNC
class Context (CPIBase) :
@SYNC
def init_instance (self, type) : pass
@SYNC
def _initialize (self, session) : pass
@SYNC
def _get_default_contexts (self, session) : pass
| 17.961538
| 55
| 0.657388
|
279510d13f633f760fc90910723a1a3018f760e4
| 432
|
py
|
Python
|
tests/digital_tickler_test.py
|
geritwagner/digital-tickler
|
bdca1f1c243808cd3fb9d184beaacd51e45b8b22
|
[
"MIT"
] | null | null | null |
tests/digital_tickler_test.py
|
geritwagner/digital-tickler
|
bdca1f1c243808cd3fb9d184beaacd51e45b8b22
|
[
"MIT"
] | null | null | null |
tests/digital_tickler_test.py
|
geritwagner/digital-tickler
|
bdca1f1c243808cd3fb9d184beaacd51e45b8b22
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
"""Tests for `digital_tickler` package."""
import unittest
from digital_tickler import digital_tickler # noqa
class TestDigital_tickler(unittest.TestCase):
"""Tests for `digital_tickler` package."""
def setUp(self):
"""Set up test fixtures, if any."""
def tearDown(self):
"""Tear down test fixtures, if any."""
def test_000_something(self):
"""Test something."""
| 22.736842
| 51
| 0.659722
|
101e71ae7c32afdaffc59a6504b6623021aca48c
| 29,240
|
py
|
Python
|
sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2020_12_01/operations/_ssh_public_keys_operations.py
|
vincenttran-msft/azure-sdk-for-python
|
348b56f9f03eeb3f7b502eed51daf494ffff874d
|
[
"MIT"
] | 1
|
2021-09-07T18:39:05.000Z
|
2021-09-07T18:39:05.000Z
|
sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2020_12_01/operations/_ssh_public_keys_operations.py
|
vincenttran-msft/azure-sdk-for-python
|
348b56f9f03eeb3f7b502eed51daf494ffff874d
|
[
"MIT"
] | null | null | null |
sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2020_12_01/operations/_ssh_public_keys_operations.py
|
vincenttran-msft/azure-sdk-for-python
|
348b56f9f03eeb3f7b502eed51daf494ffff874d
|
[
"MIT"
] | 1
|
2022-03-04T06:21:56.000Z
|
2022-03-04T06:21:56.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.mgmt.core.exceptions import ARMErrorFormat
from msrest import Serializer
from .. import models as _models
from .._vendor import _convert_request, _format_url_section
T = TypeVar('T')
JSONType = Any
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_list_by_subscription_request(
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2020-12-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/sshPublicKeys')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_list_by_resource_group_request(
resource_group_name: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2020-12-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/sshPublicKeys')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_create_request(
resource_group_name: str,
ssh_public_key_name: str,
subscription_id: str,
*,
json: JSONType = None,
content: Any = None,
**kwargs: Any
) -> HttpRequest:
content_type = kwargs.pop('content_type', None) # type: Optional[str]
api_version = "2020-12-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/sshPublicKeys/{sshPublicKeyName}')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"sshPublicKeyName": _SERIALIZER.url("ssh_public_key_name", ssh_public_key_name, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="PUT",
url=url,
params=query_parameters,
headers=header_parameters,
json=json,
content=content,
**kwargs
)
def build_update_request(
resource_group_name: str,
ssh_public_key_name: str,
subscription_id: str,
*,
json: JSONType = None,
content: Any = None,
**kwargs: Any
) -> HttpRequest:
content_type = kwargs.pop('content_type', None) # type: Optional[str]
api_version = "2020-12-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/sshPublicKeys/{sshPublicKeyName}')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"sshPublicKeyName": _SERIALIZER.url("ssh_public_key_name", ssh_public_key_name, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="PATCH",
url=url,
params=query_parameters,
headers=header_parameters,
json=json,
content=content,
**kwargs
)
def build_delete_request(
resource_group_name: str,
ssh_public_key_name: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2020-12-01"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/sshPublicKeys/{sshPublicKeyName}')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"sshPublicKeyName": _SERIALIZER.url("ssh_public_key_name", ssh_public_key_name, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
return HttpRequest(
method="DELETE",
url=url,
params=query_parameters,
**kwargs
)
def build_get_request(
resource_group_name: str,
ssh_public_key_name: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2020-12-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/sshPublicKeys/{sshPublicKeyName}')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"sshPublicKeyName": _SERIALIZER.url("ssh_public_key_name", ssh_public_key_name, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_generate_key_pair_request(
resource_group_name: str,
ssh_public_key_name: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2020-12-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/sshPublicKeys/{sshPublicKeyName}/generateKeyPair')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"sshPublicKeyName": _SERIALIZER.url("ssh_public_key_name", ssh_public_key_name, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="POST",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
class SshPublicKeysOperations(object):
"""SshPublicKeysOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.compute.v2020_12_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def list_by_subscription(
self,
**kwargs: Any
) -> Iterable["_models.SshPublicKeysGroupListResult"]:
"""Lists all of the SSH public keys in the subscription. Use the nextLink property in the response
to get the next page of SSH public keys.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either SshPublicKeysGroupListResult or the result of
cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.compute.v2020_12_01.models.SshPublicKeysGroupListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SshPublicKeysGroupListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_by_subscription_request(
subscription_id=self._config.subscription_id,
template_url=self.list_by_subscription.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_by_subscription_request(
subscription_id=self._config.subscription_id,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("SshPublicKeysGroupListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/sshPublicKeys'} # type: ignore
@distributed_trace
def list_by_resource_group(
self,
resource_group_name: str,
**kwargs: Any
) -> Iterable["_models.SshPublicKeysGroupListResult"]:
"""Lists all of the SSH public keys in the specified resource group. Use the nextLink property in
the response to get the next page of SSH public keys.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either SshPublicKeysGroupListResult or the result of
cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.compute.v2020_12_01.models.SshPublicKeysGroupListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SshPublicKeysGroupListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_by_resource_group_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
template_url=self.list_by_resource_group.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_by_resource_group_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("SshPublicKeysGroupListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/sshPublicKeys'} # type: ignore
@distributed_trace
def create(
self,
resource_group_name: str,
ssh_public_key_name: str,
parameters: "_models.SshPublicKeyResource",
**kwargs: Any
) -> "_models.SshPublicKeyResource":
"""Creates a new SSH public key resource.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param ssh_public_key_name: The name of the SSH public key.
:type ssh_public_key_name: str
:param parameters: Parameters supplied to create the SSH public key.
:type parameters: ~azure.mgmt.compute.v2020_12_01.models.SshPublicKeyResource
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SshPublicKeyResource, or the result of cls(response)
:rtype: ~azure.mgmt.compute.v2020_12_01.models.SshPublicKeyResource
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SshPublicKeyResource"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(parameters, 'SshPublicKeyResource')
request = build_create_request(
resource_group_name=resource_group_name,
ssh_public_key_name=ssh_public_key_name,
subscription_id=self._config.subscription_id,
content_type=content_type,
json=_json,
template_url=self.create.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('SshPublicKeyResource', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('SshPublicKeyResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/sshPublicKeys/{sshPublicKeyName}'} # type: ignore
@distributed_trace
def update(
self,
resource_group_name: str,
ssh_public_key_name: str,
parameters: "_models.SshPublicKeyUpdateResource",
**kwargs: Any
) -> "_models.SshPublicKeyResource":
"""Updates a new SSH public key resource.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param ssh_public_key_name: The name of the SSH public key.
:type ssh_public_key_name: str
:param parameters: Parameters supplied to update the SSH public key.
:type parameters: ~azure.mgmt.compute.v2020_12_01.models.SshPublicKeyUpdateResource
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SshPublicKeyResource, or the result of cls(response)
:rtype: ~azure.mgmt.compute.v2020_12_01.models.SshPublicKeyResource
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SshPublicKeyResource"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(parameters, 'SshPublicKeyUpdateResource')
request = build_update_request(
resource_group_name=resource_group_name,
ssh_public_key_name=ssh_public_key_name,
subscription_id=self._config.subscription_id,
content_type=content_type,
json=_json,
template_url=self.update.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('SshPublicKeyResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/sshPublicKeys/{sshPublicKeyName}'} # type: ignore
@distributed_trace
def delete(
self,
resource_group_name: str,
ssh_public_key_name: str,
**kwargs: Any
) -> None:
"""Delete an SSH public key.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param ssh_public_key_name: The name of the SSH public key.
:type ssh_public_key_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_request(
resource_group_name=resource_group_name,
ssh_public_key_name=ssh_public_key_name,
subscription_id=self._config.subscription_id,
template_url=self.delete.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/sshPublicKeys/{sshPublicKeyName}'} # type: ignore
@distributed_trace
def get(
self,
resource_group_name: str,
ssh_public_key_name: str,
**kwargs: Any
) -> "_models.SshPublicKeyResource":
"""Retrieves information about an SSH public key.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param ssh_public_key_name: The name of the SSH public key.
:type ssh_public_key_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SshPublicKeyResource, or the result of cls(response)
:rtype: ~azure.mgmt.compute.v2020_12_01.models.SshPublicKeyResource
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SshPublicKeyResource"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(
resource_group_name=resource_group_name,
ssh_public_key_name=ssh_public_key_name,
subscription_id=self._config.subscription_id,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('SshPublicKeyResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/sshPublicKeys/{sshPublicKeyName}'} # type: ignore
@distributed_trace
def generate_key_pair(
self,
resource_group_name: str,
ssh_public_key_name: str,
**kwargs: Any
) -> "_models.SshPublicKeyGenerateKeyPairResult":
"""Generates and returns a public/private key pair and populates the SSH public key resource with
the public key. The length of the key will be 3072 bits. This operation can only be performed
once per SSH public key resource.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param ssh_public_key_name: The name of the SSH public key.
:type ssh_public_key_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SshPublicKeyGenerateKeyPairResult, or the result of cls(response)
:rtype: ~azure.mgmt.compute.v2020_12_01.models.SshPublicKeyGenerateKeyPairResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SshPublicKeyGenerateKeyPairResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_generate_key_pair_request(
resource_group_name=resource_group_name,
ssh_public_key_name=ssh_public_key_name,
subscription_id=self._config.subscription_id,
template_url=self.generate_key_pair.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('SshPublicKeyGenerateKeyPairResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
generate_key_pair.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/sshPublicKeys/{sshPublicKeyName}/generateKeyPair'} # type: ignore
| 41.125176
| 203
| 0.678112
|
a6188877d3b0404a75c2004ab93de24527e45b74
| 1,416
|
py
|
Python
|
liesym/groups/_sp.py
|
npapapietro/liesym
|
56bce3290e35d111b86413191516c41a09f0a07d
|
[
"MIT"
] | 2
|
2021-09-09T22:25:25.000Z
|
2022-01-22T01:15:47.000Z
|
liesym/groups/_sp.py
|
npapapietro/liesym
|
56bce3290e35d111b86413191516c41a09f0a07d
|
[
"MIT"
] | 1
|
2021-12-20T00:15:26.000Z
|
2021-12-20T01:54:07.000Z
|
liesym/groups/_sp.py
|
npapapietro/liesym
|
56bce3290e35d111b86413191516c41a09f0a07d
|
[
"MIT"
] | 1
|
2021-09-09T22:25:31.000Z
|
2021-09-09T22:25:31.000Z
|
from __future__ import annotations
from sympy import Matrix, zeros
from ._base import LieGroup
from ..algebras import C
def _iachello_basis(dim):
"""Basis noted in Francesco Iachello's text.
"""
n = dim / 2
def E(i, j):
mat = zeros(dim)
mat[i, j] = 1
return mat
for k in range(n):
yield E(k, n+k)
yield E(k+n, k)
for k in range(n):
for m in range(n):
yield E(k, m) - E(n+m, n+k)
for m in range(n):
for k in range(m):
yield E(k, n+m) + E(m, n+k)
yield E(n+k, m) + E(n+m, k)
class Sp(LieGroup):
"""The Symplectic Group
"""
def __new__(cls, dim: int):
if dim % 2 != 0:
raise NotImplementedError("Sp is not defined for odd dimensions.")
return super().__new__(cls, "Sp", dim)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._algebra = C(self.dimension / 2)
def generators(self) -> list['Matrix']:
"""Generators for Sp(2N). There are a lot of possible choices, so
we choose one based on existing literature.
Returns:
list[Matrix]: list of (mathematical) generators in Iachello's basis.
Sources:
- Iachello, F (2006). Lie algebras and applications. ISBN 978-3-540-36236-4.
"""
return list(_iachello_basis(self.dimension))
| 24.842105
| 88
| 0.561441
|
2df6ef7871683c6d0d16bf0f8c577513a499be43
| 11,617
|
py
|
Python
|
pyscf/lo/pipek.py
|
shufay/pyscf
|
c7ea840b012a59fce5fa4114ef3274a7cf00165e
|
[
"Apache-2.0"
] | 1
|
2021-11-12T11:55:25.000Z
|
2021-11-12T11:55:25.000Z
|
pyscf/lo/pipek.py
|
shufay/pyscf
|
c7ea840b012a59fce5fa4114ef3274a7cf00165e
|
[
"Apache-2.0"
] | 2
|
2020-01-29T20:21:02.000Z
|
2020-08-03T18:28:52.000Z
|
pyscf/lo/pipek.py
|
shufay/pyscf
|
c7ea840b012a59fce5fa4114ef3274a7cf00165e
|
[
"Apache-2.0"
] | 1
|
2020-07-09T20:34:10.000Z
|
2020-07-09T20:34:10.000Z
|
#!/usr/bin/env python
# Copyright 2014-2019 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Author: Qiming Sun <osirpt.sun@gmail.com>
#
'''
Pipek-Mezey localization
ref. JCTC, 10, 642 (2014); DOI:10.1021/ct401016x
'''
import numpy
from functools import reduce
from pyscf import lib
from pyscf.lib import logger
from pyscf.lo import orth
from pyscf.lo import boys
from pyscf import __config__
def atomic_pops(mol, mo_coeff, method='meta_lowdin', mf=None):
'''
Kwargs:
method : string
The atomic population projection scheme. It can be mulliken,
lowdin, meta_lowdin, iao, or becke
Returns:
A 3-index tensor [A,i,j] indicates the population of any orbital-pair
density |i><j| for each species (atom in this case). This tensor is
used to construct the population and gradients etc.
You can customize the PM localization wrt other population metric,
such as the charge of a site, the charge of a fragment (a group of
atoms) by overwriting this tensor. See also the example
pyscf/examples/loc_orb/40-hubbard_model_PM_localization.py for the PM
localization of site-based population for hubbard model.
'''
method = method.lower().replace('_', '-')
nmo = mo_coeff.shape[1]
proj = numpy.empty((mol.natm,nmo,nmo))
if getattr(mol, 'pbc_intor', None): # whether mol object is a cell
s = mol.pbc_intor('int1e_ovlp_sph', hermi=1)
else:
s = mol.intor_symmetric('int1e_ovlp')
if method == 'becke':
from pyscf.dft import gen_grid
if not (getattr(mf, 'grids', None) and getattr(mf, '_numint', None)):
# Call DFT to initialize grids and numint objects
mf = mol.RKS()
grids = mf.grids
ni = mf._numint
if not isinstance(grids, gen_grid.Grids):
raise NotImplementedError('PM becke scheme for PBC systems')
# The atom-wise Becke grids (without concatenated to a vector of grids)
coords, weights = grids.get_partition(mol, concat=False)
for i in range(mol.natm):
ao = ni.eval_ao(mol, coords[i], deriv=0)
aow = numpy.einsum('pi,p->pi', ao, weights[i])
charge_matrix = lib.dot(aow.conj().T, ao)
proj[i] = reduce(lib.dot, (mo_coeff.conj().T, charge_matrix, mo_coeff))
elif method == 'mulliken':
for i, (b0, b1, p0, p1) in enumerate(mol.offset_nr_by_atom()):
csc = reduce(numpy.dot, (mo_coeff[p0:p1].conj().T, s[p0:p1], mo_coeff))
proj[i] = (csc + csc.conj().T) * .5
elif method in ('lowdin', 'meta-lowdin'):
c = orth.restore_ao_character(mol, 'ANO')
#csc = reduce(lib.dot, (mo_coeff.conj().T, s, orth_local_ao_coeff))
csc = reduce(lib.dot, (mo_coeff.conj().T, s, orth.orth_ao(mol, method, c, s=s)))
for i, (b0, b1, p0, p1) in enumerate(mol.offset_nr_by_atom()):
proj[i] = numpy.dot(csc[:,p0:p1], csc[:,p0:p1].conj().T)
elif method in ('iao', 'ibo'):
from pyscf.lo import iao
assert mf is not None
# FIXME: How to handle UHF/UKS object?
orb_occ = mf.mo_coeff[:,mf.mo_occ>0]
iao_coeff = iao.iao(mol, orb_occ)
#
# IAO is generally not orthogonalized. For simplicity, we take Lowdin
# orthogonalization here. Other orthogonalization can be used. Results
# should be very closed to the Lowdin-orth orbitals
#
# PM with Mulliken population of non-orth IAOs can be found in
# ibo.PipekMezey function
#
iao_coeff = orth.vec_lowdin(iao_coeff, s)
csc = reduce(lib.dot, (mo_coeff.conj().T, s, iao_coeff))
iao_mol = iao.reference_mol(mol)
for i, (b0, b1, p0, p1) in enumerate(iao_mol.offset_nr_by_atom()):
proj[i] = numpy.dot(csc[:,p0:p1], csc[:,p0:p1].conj().T)
else:
raise KeyError('method = %s' % method)
return proj
class PipekMezey(boys.Boys):
'''
The Pipek-Mezey localization optimizer that maximizes the orbital
population
Args:
mol : Mole object
Kwargs:
mo_coeff : size (N,N) np.array
The orbital space to localize for PM localization.
When initializing the localization optimizer ``bopt = PM(mo_coeff)``,
Note these orbitals ``mo_coeff`` may or may not be used as initial
guess, depending on the attribute ``.init_guess`` . If ``.init_guess``
is set to None, the ``mo_coeff`` will be used as initial guess. If
``.init_guess`` is 'atomic', a few atomic orbitals will be
constructed inside the space of the input orbitals and the atomic
orbitals will be used as initial guess.
Note when calling .kernel(orb) method with a set of orbitals as
argument, the orbitals will be used as initial guess regardless of
the value of the attributes .mo_coeff and .init_guess.
Attributes for PM class:
verbose : int
Print level. Default value equals to :class:`Mole.verbose`.
max_memory : float or int
Allowed memory in MB. Default value equals to :class:`Mole.max_memory`.
conv_tol : float
Converge threshold. Default 1e-6
conv_tol_grad : float
Converge threshold for orbital rotation gradients. Default 1e-3
max_cycle : int
The max. number of macro iterations. Default 100
max_iters : int
The max. number of iterations in each macro iteration. Default 20
max_stepsize : float
The step size for orbital rotation. Small step (0.005 - 0.05) is prefered.
Default 0.03.
init_guess : str or None
Initial guess for optimization. If set to None, orbitals defined
by the attribute .mo_coeff will be used as initial guess. If set
to 'atomic', atomic orbitals will be used as initial guess.
Default 'atomic'
pop_method : str
How the orbital population is calculated. By default, meta-lowdin
population (JCTC, 10, 3784) is used. It can be set to 'mulliken',
or 'lowdin' for other population definition
exponent : int
The power to define norm. It can be 2 or 4. Default 2.
Saved results
mo_coeff : ndarray
Localized orbitals
'''
pop_method = getattr(__config__, 'lo_pipek_PM_pop_method', 'meta_lowdin')
conv_tol = getattr(__config__, 'lo_pipek_PM_conv_tol', 1e-6)
exponent = getattr(__config__, 'lo_pipek_PM_exponent', 2) # should be 2 or 4
def __init__(self, mol, mo_coeff=None, mf=None):
boys.Boys.__init__(self, mol, mo_coeff)
self._scf = mf
self._keys = self._keys.union(['pop_method', 'exponent', '_scf'])
def dump_flags(self, verbose=None):
boys.Boys.dump_flags(self, verbose)
logger.info(self, 'pop_method = %s',self.pop_method)
def gen_g_hop(self, u):
mo_coeff = lib.dot(self.mo_coeff, u)
pop = self.atomic_pops(self.mol, mo_coeff, self.pop_method)
if self.exponent == 2:
g0 = numpy.einsum('xii,xip->pi', pop, pop)
g = -self.pack_uniq_var(g0-g0.conj().T) * 2
elif self.exponent == 4:
pop3 = numpy.einsum('xii->xi', pop)**3
g0 = numpy.einsum('xi,xip->pi', pop3, pop)
g = -self.pack_uniq_var(g0-g0.conj().T) * 4
else:
raise NotImplementedError('exponent %s' % self.exponent)
h_diag = numpy.einsum('xii,xpp->pi', pop, pop) * 2
g_diag = g0.diagonal()
h_diag-= g_diag + g_diag.reshape(-1,1)
h_diag+= numpy.einsum('xip,xip->pi', pop, pop) * 2
h_diag+= numpy.einsum('xip,xpi->pi', pop, pop) * 2
h_diag = -self.pack_uniq_var(h_diag) * 2
g0 = g0 + g0.conj().T
if self.exponent == 2:
def h_op(x):
x = self.unpack_uniq_var(x)
norb = x.shape[0]
hx = lib.dot(x.T, g0.T).conj()
hx+= numpy.einsum('xip,xi->pi', pop, numpy.einsum('qi,xiq->xi', x, pop)) * 2
hx-= numpy.einsum('xpp,xip->pi', pop,
lib.dot(pop.reshape(-1,norb), x).reshape(-1,norb,norb)) * 2
hx-= numpy.einsum('xip,xp->pi', pop, numpy.einsum('qp,xpq->xp', x, pop)) * 2
return -self.pack_uniq_var(hx-hx.conj().T)
else:
def h_op(x):
x = self.unpack_uniq_var(x)
norb = x.shape[0]
hx = lib.dot(x.T, g0.T).conj() * 2
pop2 = numpy.einsum('xii->xi', pop)**2
pop3 = numpy.einsum('xii->xi', pop)**3
tmp = numpy.einsum('qi,xiq->xi', x, pop) * pop2
hx+= numpy.einsum('xip,xi->pi', pop, tmp) * 12
hx-= numpy.einsum('xp,xip->pi', pop3,
lib.dot(pop.reshape(-1,norb), x).reshape(-1,norb,norb)) * 4
tmp = numpy.einsum('qp,xpq->xp', x, pop) * pop2
hx-= numpy.einsum('xip,xp->pi', pop, tmp) * 12
return -self.pack_uniq_var(hx-hx.conj().T)
return g, h_op, h_diag
def get_grad(self, u=None):
if u is None: u = numpy.eye(self.mo_coeff.shape[1])
mo_coeff = lib.dot(self.mo_coeff, u)
pop = self.atomic_pops(self.mol, mo_coeff, self.pop_method)
if self.exponent == 2:
g0 = numpy.einsum('xii,xip->pi', pop, pop)
g = -self.pack_uniq_var(g0-g0.conj().T) * 2
else:
pop3 = numpy.einsum('xii->xi', pop)**3
g0 = numpy.einsum('xi,xip->pi', pop3, pop)
g = -self.pack_uniq_var(g0-g0.conj().T) * 4
return g
def cost_function(self, u=None):
if u is None: u = numpy.eye(self.mo_coeff.shape[1])
mo_coeff = lib.dot(self.mo_coeff, u)
pop = self.atomic_pops(self.mol, mo_coeff, self.pop_method)
if self.exponent == 2:
return numpy.einsum('xii,xii->', pop, pop)
else:
pop2 = numpy.einsum('xii->xi', pop)**2
return numpy.einsum('xi,xi', pop2, pop2)
@lib.with_doc(atomic_pops.__doc__)
def atomic_pops(self, mol, mo_coeff, method=None):
if method is None:
method = self.pop_method
if method.lower() in ('iao', 'ibo') and self._scf is None:
logger.error(self, 'PM with IAO scheme should include an scf '
'object when creating PM object.\n PM(mol, mf=scf_object)')
raise ValueError('PM attribute method is not valid')
return atomic_pops(mol, mo_coeff, method, self._scf)
PM = Pipek = PipekMezey
if __name__ == '__main__':
from pyscf import gto, scf
mol = gto.Mole()
mol.atom = '''
O 0. 0. 0.2
H 0. -0.5 -0.4
H 0. 0.5 -0.4
'''
mol.basis = 'ccpvdz'
mol.build()
mf = scf.RHF(mol).run()
mo = PM(mol).kernel(mf.mo_coeff[:,5:9], verbose=4)
| 39.784247
| 93
| 0.593441
|
680797bdc320d921661bac084262c4d7f7243a98
| 595
|
py
|
Python
|
ContactBook/core/database.py
|
TheDesTrucToRR/Hacktoberfest-2021
|
2757383c7432909dfbdbda61fb9ca9f6de495cfe
|
[
"MIT"
] | 14
|
2021-10-01T16:53:27.000Z
|
2021-10-17T13:15:44.000Z
|
ContactBook/core/database.py
|
TheDesTrucToRR/Hacktoberfest-2021
|
2757383c7432909dfbdbda61fb9ca9f6de495cfe
|
[
"MIT"
] | 37
|
2021-10-01T17:14:52.000Z
|
2021-10-21T17:26:14.000Z
|
ContactBook/core/database.py
|
TheDesTrucToRR/Hacktoberfest-2021
|
2757383c7432909dfbdbda61fb9ca9f6de495cfe
|
[
"MIT"
] | 38
|
2021-10-01T16:59:16.000Z
|
2021-10-30T16:05:31.000Z
|
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from core.config import config
DATABASE_URL = config.db_url
engine = create_async_engine(
DATABASE_URL,
echo=config.debug
)
Base = declarative_base()
async_session = sessionmaker(
engine,
class_=AsyncSession,
expire_on_commit=False
)
async def init_models():
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.drop_all)
await conn.run_sync(Base.metadata.create_all)
| 22.884615
| 68
| 0.771429
|
7c9657579855c74aef3d80252786d64faa8ba108
| 1,146
|
py
|
Python
|
tor4/nn/modules/linear.py
|
kbrodt/tor4
|
d09740b746c534e67a72f492c7c03654f5888a46
|
[
"MIT"
] | null | null | null |
tor4/nn/modules/linear.py
|
kbrodt/tor4
|
d09740b746c534e67a72f492c7c03654f5888a46
|
[
"MIT"
] | null | null | null |
tor4/nn/modules/linear.py
|
kbrodt/tor4
|
d09740b746c534e67a72f492c7c03654f5888a46
|
[
"MIT"
] | null | null | null |
import math
import tor4
from ...tensor import Tensor
from .. import functional as F
from .. import init
from ..parameter import Parameter
from .module import Module
class Linear(Module):
def __init__(self, in_features: int, out_features: int, bias: bool = True) -> None:
self.in_features = in_features
super().__init__()
self.out_features = out_features
self.weight = Parameter(tor4.empty(out_features, in_features))
if bias:
self.bias = Parameter(tor4.zeros(out_features))
self.reset_parameters()
def reset_parameters(self) -> None:
init.kaiming_uniform_(self.weight, a=math.sqrt(5))
if hasattr(self, "bias"):
fan_in, _ = init._calculate_fan_in_and_fan_out(self.weight)
bound = 1 / math.sqrt(fan_in)
init.uniform_(self.bias, -bound, bound)
def extra_repr(self) -> str:
return f'in_features={self.in_features}, out_features={self.out_features}, bias={hasattr(self, "bias")}'
def forward(self, x: Tensor) -> Tensor:
return F.linear(x, self.weight, self.bias if hasattr(self, "bias") else None)
| 31.833333
| 112
| 0.659686
|
7a44aa5e5d3cdaa5f1f8d2442cc9541e03efc588
| 9,377
|
py
|
Python
|
reco_utils/recommender/lightfm/lightfm_utils.py
|
suhoy901/recommenders
|
8ec9f1950d694a5aeaa3d463ac23cad661a30a11
|
[
"MIT"
] | 28
|
2021-11-12T08:26:40.000Z
|
2022-03-27T07:21:24.000Z
|
reco_utils/recommender/lightfm/lightfm_utils.py
|
xynm/recommenders
|
839e0444fcf9f1a085de88417c61f8f938b932c9
|
[
"MIT"
] | 5
|
2021-11-10T02:58:32.000Z
|
2022-03-21T16:13:11.000Z
|
reco_utils/recommender/lightfm/lightfm_utils.py
|
xynm/recommenders
|
839e0444fcf9f1a085de88417c61f8f938b932c9
|
[
"MIT"
] | 9
|
2021-11-03T07:14:47.000Z
|
2022-02-22T13:42:04.000Z
|
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
import lightfm
from lightfm.evaluation import precision_at_k, recall_at_k
def model_perf_plots(df):
"""Function to plot model performance metrics.
Args:
df (pd.DataFrame): Dataframe in tidy format, with ['epoch','level','value'] columns
Returns:
obj: matplotlib axes
"""
g = sns.FacetGrid(df, col="metric", hue="stage", col_wrap=2, sharey=False)
g = g.map(sns.scatterplot, "epoch", "value").add_legend()
def compare_metric(df_list, metric="prec", stage="test"):
"""Function to combine and prepare list of dataframes into tidy format.
Args:
df_list (list): List of dataframes
metrics (str): name of metric to be extracted, optional
stage (str): name of model fitting stage to be extracted, optional
Returns:
pd.DataFrame: Metrics
"""
colnames = ["model" + str(x) for x in list(range(1, len(df_list) + 1))]
models = [
df[(df["stage"] == stage) & (df["metric"] == metric)]["value"]
.reset_index(drop=True)
.values
for df in df_list
]
output = pd.DataFrame(zip(*models), columns=colnames).stack().reset_index()
output.columns = ["epoch", "data", "value"]
return output
def track_model_metrics(
model,
train_interactions,
test_interactions,
k=10,
no_epochs=100,
no_threads=8,
show_plot=True,
**kwargs
):
"""Function to record model's performance at each epoch, formats the performance into tidy format,
plots the performance and outputs the performance data.
Args:
model (LightFM instance): fitted LightFM model
train_interactions (scipy sparse COO matrix): train interactions set
test_interactions (scipy sparse COO matrix): test interaction set
k (int): number of recommendations, optional
no_epochs (int): Number of epochs to run, optional
no_threads (int): Number of parallel threads to use, optional
**kwargs: other keyword arguments to be passed down
Returns:
pd.DataFrame: performance traces of the fitted model
LightFM model: fitted model
matplotlib axes: side effect of the method
"""
# initialising temp data storage
model_prec_train = [0] * no_epochs
model_prec_test = [0] * no_epochs
model_rec_train = [0] * no_epochs
model_rec_test = [0] * no_epochs
# fit model and store train/test metrics at each epoch
for epoch in range(no_epochs):
model.fit_partial(
interactions=train_interactions, epochs=1, num_threads=no_threads, **kwargs
)
model_prec_train[epoch] = precision_at_k(
model, train_interactions, k=k, **kwargs
).mean()
model_prec_test[epoch] = precision_at_k(
model, test_interactions, k=k, **kwargs
).mean()
model_rec_train[epoch] = recall_at_k(
model, train_interactions, k=k, **kwargs
).mean()
model_rec_test[epoch] = recall_at_k(
model, test_interactions, k=k, **kwargs
).mean()
# collect the performance metrics into a dataframe
fitting_metrics = pd.DataFrame(
zip(model_prec_train, model_prec_test, model_rec_train, model_rec_test),
columns=[
"model_prec_train",
"model_prec_test",
"model_rec_train",
"model_rec_test",
],
)
# convert into tidy format
fitting_metrics = fitting_metrics.stack().reset_index()
fitting_metrics.columns = ["epoch", "level", "value"]
# exact the labels for each observation
fitting_metrics["stage"] = fitting_metrics.level.str.split("_").str[-1]
fitting_metrics["metric"] = fitting_metrics.level.str.split("_").str[1]
fitting_metrics.drop(["level"], axis=1, inplace=True)
# replace the metric keys to improve visualisation
metric_keys = {"prec": "Precision", "rec": "Recall"}
fitting_metrics.metric.replace(metric_keys, inplace=True)
# plots the performance data
if show_plot == True:
model_perf_plots(fitting_metrics)
return fitting_metrics, model
def similar_users(user_id, user_features, model, N=10):
"""Function to return top N similar users based on https://github.com/lyst/lightfm/issues/244#issuecomment-355305681
Args:
user_id (int): id of user to be used as reference
user_features (scipy sparse CSR matrix): user feature matric
model (LightFM instance): fitted LightFM model
N (int): Number of top similar users to return
Returns:
pd.DataFrame: top N most similar users with score
"""
_, user_representations = model.get_user_representations(features=user_features)
# Cosine similarity
scores = user_representations.dot(user_representations[user_id, :])
user_norms = np.linalg.norm(user_representations, axis=1)
user_norms[user_norms == 0] = 1e-10
scores /= user_norms
best = np.argpartition(scores, -(N + 1))[-(N + 1) :]
return pd.DataFrame(
sorted(zip(best, scores[best] / user_norms[user_id]), key=lambda x: -x[1])[1:],
columns=["userID", "score"],
)
def similar_items(item_id, item_features, model, N=10):
"""Function to return top N similar items
based on https://github.com/lyst/lightfm/issues/244#issuecomment-355305681
Args:
item_id (int): id of item to be used as reference
item_features (scipy sparse CSR matrix): item feature matric
model (LightFM instance): fitted LightFM model
N (int): Number of top similar items to return
Returns:
pd.DataFrame: top N most similar items with score
"""
_, item_representations = model.get_item_representations(features=item_features)
# Cosine similarity
scores = item_representations.dot(item_representations[item_id, :])
item_norms = np.linalg.norm(item_representations, axis=1)
item_norms[item_norms == 0] = 1e-10
scores /= item_norms
best = np.argpartition(scores, -(N + 1))[-(N + 1) :]
return pd.DataFrame(
sorted(zip(best, scores[best] / item_norms[item_id]), key=lambda x: -x[1])[1:],
columns=["itemID", "score"],
)
def prepare_test_df(test_idx, uids, iids, uid_map, iid_map, weights):
"""Function to prepare test df for evaluation
Args:
test_idx (slice): slice of test indices
uids (np.array): Array of internal user indices
iids (np.array): Array of internal item indices
uid_map (dict): Keys to map internal user indices to external ids.
iid_map (dict): Keys to map internal item indices to external ids.
weights (np.float32 coo_matrix): user-item interaction
Returns:
pd.DataFrame: user-item selected for testing
"""
test_df = pd.DataFrame(
zip(
uids[test_idx],
iids[test_idx],
[list(uid_map.keys())[x] for x in uids[test_idx]],
[list(iid_map.keys())[x] for x in iids[test_idx]],
),
columns=["uid", "iid", "userID", "itemID"],
)
dok_weights = weights.todok()
test_df["rating"] = test_df.apply(lambda x: dok_weights[x.uid, x.iid], axis=1)
return test_df[["userID", "itemID", "rating"]]
def prepare_all_predictions(
data,
uid_map,
iid_map,
interactions,
model,
num_threads,
user_features=None,
item_features=None,
):
"""Function to prepare all predictions for evaluation.
Args:
data (pandas df): dataframe of all users, items and ratings as loaded
uid_map (dict): Keys to map internal user indices to external ids.
iid_map (dict): Keys to map internal item indices to external ids.
interactions (np.float32 coo_matrix): user-item interaction
model (LightFM instance): fitted LightFM model
num_threads (int): number of parallel computation threads
user_features (np.float32 csr_matrix): User weights over features
item_features (np.float32 csr_matrix): Item weights over features
Returns:
pd.DataFrame of all predictions
"""
users, items, preds = [], [], []
item = list(data.itemID.unique())
for user in data.userID.unique():
user = [user] * len(item)
users.extend(user)
items.extend(item)
all_predictions = pd.DataFrame(data={"userID": users, "itemID": items})
all_predictions["uid"] = all_predictions.userID.map(uid_map)
all_predictions["iid"] = all_predictions.itemID.map(iid_map)
dok_weights = interactions.todok()
all_predictions["rating"] = all_predictions.apply(
lambda x: dok_weights[x.uid, x.iid], axis=1
)
all_predictions = all_predictions[all_predictions.rating < 1].reset_index(drop=True)
all_predictions = all_predictions.drop("rating", axis=1)
all_predictions["prediction"] = all_predictions.apply(
lambda x: model.predict(
user_ids=x["uid"],
item_ids=[x["iid"]],
user_features=user_features,
item_features=item_features,
num_threads=num_threads,
)[0],
axis=1,
)
return all_predictions[["userID", "itemID", "prediction"]]
| 34.72963
| 120
| 0.65426
|
16565004e002e38ef4746e25368aeb015e499c7e
| 123
|
py
|
Python
|
controller/page/tools.py
|
supersheep/huixiang
|
7d209681ed6699ef7f28383b982f4e4a74c6eb37
|
[
"MIT"
] | 4
|
2015-10-10T08:34:37.000Z
|
2021-11-07T20:26:07.000Z
|
controller/page/tools.py
|
supersheep/huixiang
|
7d209681ed6699ef7f28383b982f4e4a74c6eb37
|
[
"MIT"
] | null | null | null |
controller/page/tools.py
|
supersheep/huixiang
|
7d209681ed6699ef7f28383b982f4e4a74c6eb37
|
[
"MIT"
] | null | null | null |
from config.setting import render
from base import base
class tools(base):
def GET(self):
return render.tools()
| 24.6
| 33
| 0.715447
|
7b84c9b3d4421a1ff235ee7bf3332ef0a383103d
| 8,324
|
py
|
Python
|
ct_tests/lib/ims_test_api_helpers.py
|
Cray-HPE/ims
|
d684cb1cd04cad4ee2246049ae27d3291dd5f469
|
[
"MIT"
] | 1
|
2022-01-26T21:18:52.000Z
|
2022-01-26T21:18:52.000Z
|
ct_tests/lib/ims_test_api_helpers.py
|
Cray-HPE/ims
|
d684cb1cd04cad4ee2246049ae27d3291dd5f469
|
[
"MIT"
] | 2
|
2021-12-17T21:47:53.000Z
|
2022-02-28T15:55:34.000Z
|
ct_tests/lib/ims_test_api_helpers.py
|
Cray-HPE/ims
|
d684cb1cd04cad4ee2246049ae27d3291dd5f469
|
[
"MIT"
] | null | null | null |
#
# MIT License
#
# (C) Copyright 2020-2022 Hewlett Packard Enterprise Development LP
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
"""
Test helper functions for API calls
"""
from ims_test_helpers import add_resource, error_exit, exception_exit, get_field_from_json, get_resource
from ims_test_k8s_helpers import get_k8s_secret
from ims_test_logger import info, section, subtest, debug, warn, error
import requests
import warnings
url_base = "https://api-gw-service-nmn.local"
ims_url_base = "%s/apis/ims" % url_base
token_url="%s/keycloak/realms/shasta/protocol/openid-connect/token" % url_base
ims_images_url_base="%s/images" % ims_url_base
ims_jobs_url_base="%s/jobs" % ims_url_base
ims_public_keys_url_base="%s/public-keys" % ims_url_base
ims_recipes_url_base="%s/recipes" % ims_url_base
#
# IMS endpoint construction functions
#
def ims_url(thing_type, id=None):
url_base = "{ims_url_base}/{ims_thing_type}s".format(
ims_url_base=ims_url_base,
ims_thing_type=thing_type.replace("_","-"))
if id:
return "%s/%s" % (url_base, id)
return url_base
#
# API utility functions
#
def show_response(resp):
"""
Displays and logs the contents of an API response
"""
debug("Status code of API response: %d" % resp.status_code)
for field in ['reason','text','headers']:
val = getattr(resp, field)
if val:
debug("API response %s: %s" % (field, str(val)))
def do_request(method, url, **kwargs):
"""
Wrapper for call to requests functions. Displays, logs, and makes the request,
then displays, logs, and returns the response.
"""
req_args = { "verify": False, "timeout": 30 }
req_args.update(kwargs)
debug("Sending %s request to %s with following arguments" % (method.__name__, url))
for k in req_args:
debug("%s = %s" % (k, str(req_args[k])))
with warnings.catch_warnings():
warnings.simplefilter("ignore",
category=requests.packages.urllib3.exceptions.InsecureRequestWarning)
try:
resp = method(url=url, **req_args)
show_response(resp)
return resp
except Exception as e:
exception_exit(e, "API request")
def check_response(resp, expected_sc=200, return_json=False):
"""
Checks to make sure the response has the expected status code. If requested,
returns the JSON object from thje response.
"""
if resp.status_code != expected_sc:
error_exit("Request status code expected to be %d, but was not" % expected_sc)
if return_json:
try:
return resp.json()
except Exception as e:
exception_exit(e, "to decode JSON object in response body")
#
# Auth functions
#
def validate_auth_token_response(token_resp):
auth_token = check_response(resp=token_resp, return_json=True)
for k in [ "access_token", "refresh_token" ]:
try:
if k not in auth_token:
error_exit("%s field not found in JSON object of response" % k)
except Exception as e:
exception_exit(e, "checking %s field from JSON object in response" % k)
add_resource("auth_token", auth_token)
return auth_token
def get_auth_token():
"""
Requests and stores a new auth token
"""
auth_token = get_resource("auth_token", not_found_okay=True)
if auth_token != None:
return auth_token
info("Getting auth token")
secret = get_k8s_secret()
request_data = {
"grant_type": "client_credentials",
"client_id": "admin-client",
"client_secret": secret }
token_resp = do_request(method=requests.post, url=token_url, data=request_data)
auth_token = validate_auth_token_response(token_resp)
info("Auth token successfully obtained")
return auth_token
def refresh_auth_token(auth_token):
"""
Refreshes a previously-obtained auth token
"""
info("Refreshing auth token")
secret = get_k8_secret()
request_data = {
"grant_type": "refresh_token",
"refresh_token": auth_token["refresh_token"],
"client_id": "admin-client",
"client_secret": secret }
token_resp = do_request(method=requests.post, url=token_url, data=request_data)
auth_token = validate_auth_token_response(token_resp)
info("Auth token successfully refreshed")
return auth_token
def do_request_with_auth_retry(method, expected_sc, return_json=False, **kwargs):
"""
Wrapper to our earlier requests wrapper. This wrapper calls the previous wrapper,
but if the response indicates an expired token error, then the token is refreshed
and the request is re-tried with the refreshed token. A maximum of one retry will
be attempted.
"""
auth_token = get_auth_token()
try:
kwargs["headers"]["Authorization"] = "Bearer %s" % auth_token["access_token"]
except KeyError:
kwargs["headers"] = { "Authorization": "Bearer %s" % auth_token["access_token"] }
resp = do_request(method=method, **kwargs)
if resp.status_code != 401 or expected_sc == 401:
if return_json:
return check_response(resp=resp, expected_sc=expected_sc, return_json=True)
check_response(resp=resp, expected_sc=expected_sc)
return resp
else:
json_obj = check_response(resp=resp, expected_sc=401, return_json=True)
try:
if json_obj["exp"] != "token expired":
error_exit("Expected response with status code %d" % expected_sc)
except KeyError:
error_exit("Expected response with status code %d" % expected_sc)
debug("Received token expired response (status code 401). Will attempt to refresh auth token and retry request")
auth_token = refresh_auth_token()
kwargs["headers"]["Authorization"] = "Bearer %s" % auth_token["access_token"]
debug("Retrying request")
resp = do_request(method, *args, **kwargs)
if return_json:
return check_response(resp=resp, expected_sc=expected_sc, return_json=True)
check_response(resp=resp, expected_sc=expected_sc)
return resp
#
# Requests functions
#
def requests_get(expected_sc=200, **kwargs):
"""
Calls our above requests wrapper for a GET request, and sets the default expected status code to 200
"""
return do_request_with_auth_retry(method=requests.get, expected_sc=expected_sc, **kwargs)
def requests_post(expected_sc=201, **kwargs):
"""
Calls our above requests wrapper for a POST request, and sets the default expected status code to 201.
If a JSON object is being included in the request, the appropriate content-type field is set in the
header, if not already set.
"""
if "json" in kwargs:
try:
if "Content-Type" not in kwargs["headers"]:
kwargs["headers"]["Content-Type"] = "application/json"
except KeyError:
kwargs["headers"] = { "Content-Type": "application/json" }
return do_request_with_auth_retry(method=requests.post, expected_sc=expected_sc, **kwargs)
def requests_delete(expected_sc=204, **kwargs):
"""
Calls our above requests wrapper for a DELETE request, and sets the default expected status code to 204
"""
return do_request_with_auth_retry(method=requests.delete, expected_sc=expected_sc, **kwargs)
| 38.716279
| 116
| 0.697381
|
54fb0c5222404ddf746e659676a586204705dc9b
| 587
|
py
|
Python
|
tasks/migrations/0005_alter_task_user.py
|
nrblt/ToDoAppWithAuth
|
8b1b3fb9b4717e78701de808eb3b063c86c84f7a
|
[
"MIT"
] | null | null | null |
tasks/migrations/0005_alter_task_user.py
|
nrblt/ToDoAppWithAuth
|
8b1b3fb9b4717e78701de808eb3b063c86c84f7a
|
[
"MIT"
] | null | null | null |
tasks/migrations/0005_alter_task_user.py
|
nrblt/ToDoAppWithAuth
|
8b1b3fb9b4717e78701de808eb3b063c86c84f7a
|
[
"MIT"
] | null | null | null |
# Generated by Django 4.0.1 on 2022-01-16 14:58
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('tasks', '0004_task_user'),
]
operations = [
migrations.AlterField(
model_name='task',
name='user',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
]
| 26.681818
| 133
| 0.67121
|
dbf3ad422db34702dac6459a7f0b54ab9cc445de
| 14,774
|
py
|
Python
|
sql/utils/tests.py
|
yetHandsome/Archery
|
784b6459478a9adad3dab657ab6a005964975be9
|
[
"Apache-2.0"
] | null | null | null |
sql/utils/tests.py
|
yetHandsome/Archery
|
784b6459478a9adad3dab657ab6a005964975be9
|
[
"Apache-2.0"
] | null | null | null |
sql/utils/tests.py
|
yetHandsome/Archery
|
784b6459478a9adad3dab657ab6a005964975be9
|
[
"Apache-2.0"
] | 1
|
2021-05-11T02:10:42.000Z
|
2021-05-11T02:10:42.000Z
|
# -*- coding: UTF-8 -*-
"""
@author: hhyo
@license: Apache Licence
@file: tests.py
@time: 2019/03/14
"""
import datetime
import json
from unittest.mock import patch
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Permission
from django.test import TestCase, Client
from common.config import SysConfig
from sql.models import SqlWorkflow, SqlWorkflowContent, Instance, ResourceGroup, ResourceGroupRelations
from sql.utils.extract_tables import TableReference
from sql.utils.sql_review import is_auto_review, can_execute, can_timingtask, can_cancel
from sql.utils.sql_utils import *
User = get_user_model()
__author__ = 'hhyo'
class TestSQLUtils(TestCase):
def test_get_syntax_type(self):
"""
测试语法判断
:return:
"""
dml_sql = "select * from users;"
ddl_sql = "alter table users add id not null default 0 comment 'id' "
self.assertEqual(get_syntax_type(dml_sql), 'DML')
self.assertEqual(get_syntax_type(ddl_sql), 'DDL')
def test_extract_tables(self):
"""
测试表解析
:return:
"""
sql = "select * from user.users a join logs.log b on a.id=b.id;"
self.assertEqual(extract_tables(sql), [{'name': 'users', 'schema': 'user'}, {'name': 'log', 'schema': 'logs'}])
def test_generate_sql_from_sql(self):
"""
测试从SQl文本中解析SQL
:return:
"""
text = "select * from sql_user;select * from sql_workflow;"
rows = generate_sql(text)
self.assertListEqual(rows, [{'sql_id': 1, 'sql': 'select * from sql_user;'},
{'sql_id': 2, 'sql': 'select * from sql_workflow;'}]
)
def test_generate_sql_from_xml(self):
"""
测试从XML文本中解析SQL
:return:
"""
text = """<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="Test">
<select id="testParameters">
SELECT
name,
category,
price
FROM
fruits
WHERE
category = #{category}
AND price > ${price}
</select>
</mapper>
"""
rows = generate_sql(text)
self.assertEqual(rows, [{'sql_id': 'testParameters',
'sql': '\nSELECT name,\n category,\n price\nFROM fruits\nWHERE category = ?\n AND price > ?'}]
)
class TestSQLReview(TestCase):
"""
测试sql review内的方法
"""
def setUp(self):
self.superuser = User.objects.create(username='super', is_superuser=True)
self.user = User.objects.create(username='user')
# 使用 travis.ci 时实例和测试service保持一致
self.master = Instance(instance_name='test_instance', type='master', db_type='mysql',
host=settings.DATABASES['default']['HOST'],
port=settings.DATABASES['default']['PORT'],
user=settings.DATABASES['default']['USER'],
password=settings.DATABASES['default']['PASSWORD'])
self.master.save()
self.sys_config = SysConfig()
self.client = Client()
self.group = ResourceGroup.objects.create(group_id=1, group_name='group_name')
self.wf1 = SqlWorkflow.objects.create(
workflow_name='workflow_name',
group_id=self.group.group_id,
group_name=self.group.group_name,
engineer=self.superuser.username,
engineer_display=self.superuser.display,
audit_auth_groups='audit_auth_groups',
create_time=datetime.datetime.now(),
status='workflow_review_pass',
is_backup=True,
instance=self.master,
db_name='db_name',
syntax_type=1,
)
self.wfc1 = SqlWorkflowContent.objects.create(
workflow=self.wf1,
sql_content='some_sql',
execute_result=''
)
def tearDown(self):
self.wf1.delete()
self.group.delete()
self.superuser.delete()
self.user.delete()
self.master.delete()
self.sys_config.replace(json.dumps({}))
@patch('sql.engines.get_engine')
def test_auto_review_hit_review_regex(self, _get_engine, ):
"""
测试自动审批通过的判定条件,命中判断正则
:return:
"""
# 开启自动审批设置
self.sys_config.set('auto_review', 'true')
self.sys_config.set('auto_review_regex', '^drop') # drop语句需要审批
self.sys_config.set('auto_review_max_update_rows', '50') # update影响行数大于50需要审批
self.sys_config.get_all_config()
# 修改工单为drop
self.wfc1.sql_content = "drop table users;"
self.wfc1.save(update_fields=('sql_content',))
r = is_auto_review(self.wfc1.workflow_id)
self.assertFalse(r)
@patch('sql.engines.mysql.MysqlEngine.execute_check')
@patch('sql.engines.get_engine')
def test_auto_review_gt_max_update_rows(self, _get_engine, _execute_check):
"""
测试自动审批通过的判定条件,影响行数大于auto_review_max_update_rows
:return:
"""
# 开启自动审批设置
self.sys_config.set('auto_review', 'true')
self.sys_config.set('auto_review_regex', '^drop') # drop语句需要审批
self.sys_config.set('auto_review_max_update_rows', '2') # update影响行数大于2需要审批
self.sys_config.get_all_config()
# 修改工单为update
self.wfc1.sql_content = "update table users set email='';"
self.wfc1.save(update_fields=('sql_content',))
# mock返回值,update影响行数=3
_execute_check.return_value.to_dict.return_value = [
{"id": 1, "stage": "CHECKED", "errlevel": 0, "stagestatus": "Audit completed", "errormessage": "None",
"sql": "use archer_test", "affected_rows": 0, "sequence": "'0_0_0'", "backup_dbname": "None",
"execute_time": "0", "sqlsha1": "", "actual_affected_rows": 'null'},
{"id": 2, "stage": "CHECKED", "errlevel": 0, "stagestatus": "Audit completed", "errormessage": "None",
"sql": "update table users set email=''", "affected_rows": 3, "sequence": "'0_0_1'",
"backup_dbname": "mysql_3306_archer_test", "execute_time": "0", "sqlsha1": "",
"actual_affected_rows": 'null'}]
r = is_auto_review(self.wfc1.workflow_id)
self.assertFalse(r)
@patch('sql.engines.mysql.MysqlEngine.execute_check')
@patch('sql.engines.get_engine')
def test_auto_review_true(self, _get_engine, _execute_check):
"""
测试自动审批通过的判定条件,
:return:
"""
# 开启自动审批设置
self.sys_config.set('auto_review', 'true')
self.sys_config.set('auto_review_regex', '^drop') # drop语句需要审批
self.sys_config.set('auto_review_max_update_rows', '2') # update影响行数大于2需要审批
self.sys_config.get_all_config()
# 修改工单为update
self.wfc1.sql_content = "update table users set email='';"
self.wfc1.save(update_fields=('sql_content',))
# mock返回值,update影响行数=3
_execute_check.return_value.to_dict.return_value = [
{"id": 1, "stage": "CHECKED", "errlevel": 0, "stagestatus": "Audit completed", "errormessage": "None",
"sql": "use archer_test", "affected_rows": 0, "sequence": "'0_0_0'", "backup_dbname": "None",
"execute_time": "0", "sqlsha1": "", "actual_affected_rows": 'null'},
{"id": 2, "stage": "CHECKED", "errlevel": 0, "stagestatus": "Audit completed", "errormessage": "None",
"sql": "update table users set email=''", "affected_rows": 1, "sequence": "'0_0_1'",
"backup_dbname": "mysql_3306_archer_test", "execute_time": "0", "sqlsha1": "",
"actual_affected_rows": 'null'}]
r = is_auto_review(self.wfc1.workflow_id)
self.assertTrue(r)
def test_can_execute_for_resource_group(self, ):
"""
测试是否能执行的判定条件,登录用户有资源组粒度执行权限,并且为组内用户
:return:
"""
# 修改工单为workflow_review_pass,登录用户有资源组粒度执行权限,并且为组内用户
self.wf1.status = 'workflow_review_pass'
self.wf1.save(update_fields=('status',))
sql_execute_for_resource_group = Permission.objects.get(codename='sql_execute_for_resource_group')
self.user.user_permissions.add(sql_execute_for_resource_group)
ResourceGroupRelations.objects.create(object_type=0, object_id=self.user.id, group_id=self.group.group_id)
r = can_execute(user=self.user, workflow_id=self.wfc1.workflow_id)
self.assertTrue(r)
def test_can_execute_true(self, ):
"""
测试是否能执行的判定条件,当前登录用户为提交人,并且有执行权限,工单状态为审核通过
:return:
"""
# 修改工单为workflow_review_pass,当前登录用户为提交人,并且有执行权限
self.wf1.status = 'workflow_review_pass'
self.wf1.engineer = self.user.username
self.wf1.save(update_fields=('status', 'engineer'))
sql_execute = Permission.objects.get(codename='sql_execute')
self.user.user_permissions.add(sql_execute)
r = can_execute(user=self.user, workflow_id=self.wfc1.workflow_id)
self.assertTrue(r)
def test_can_execute_workflow_timing_task(self, ):
"""
测试是否能执行的判定条件,当前登录用户为提交人,并且有执行权限,工单状态为定时执行
:return:
"""
# 修改工单为workflow_review_pass,当前登录用户为提交人,并且有执行权限
self.wf1.status = 'workflow_timingtask'
self.wf1.engineer = self.user.username
self.wf1.save(update_fields=('status', 'engineer'))
sql_execute = Permission.objects.get(codename='sql_execute')
self.user.user_permissions.add(sql_execute)
r = can_execute(user=self.user, workflow_id=self.wfc1.workflow_id)
self.assertTrue(r)
def test_can_execute_false_no_permission(self, ):
"""
当前登录用户为提交人,但是没有执行权限
:return:
"""
# 修改工单为workflow_review_pass,当前登录用户为提交人,并且有执行权限
self.wf1.status = 'workflow_timingtask'
self.wf1.engineer = self.user.username
self.wf1.save(update_fields=('status', 'engineer'))
r = can_execute(user=self.user, workflow_id=self.wfc1.workflow_id)
self.assertFalse(r)
def test_can_execute_false_not_in_group(self, ):
"""
当前登录用户为提交人,有资源组粒度执行权限,但是不是组内用户
:return:
"""
# 修改工单为workflow_review_pass,有资源组粒度执行权限,但是不是组内用户
self.wf1.status = 'workflow_review_pass'
self.wf1.save(update_fields=('status',))
sql_execute_for_resource_group = Permission.objects.get(codename='sql_execute_for_resource_group')
self.user.user_permissions.add(sql_execute_for_resource_group)
r = can_execute(user=self.user, workflow_id=self.wfc1.workflow_id)
self.assertFalse(r)
def test_can_execute_false_wrong_status(self, ):
"""
当前登录用户为提交人,前登录用户为提交人,并且有执行权限,但是工单状态为待审核
:return:
"""
# 修改工单为workflow_manreviewing,当前登录用户为提交人,并且有执行权限, 但是工单状态为待审核
self.wf1.status = 'workflow_manreviewing'
self.wf1.engineer = self.user.username
self.wf1.save(update_fields=('status', 'engineer'))
sql_execute = Permission.objects.get(codename='sql_execute')
self.user.user_permissions.add(sql_execute)
r = can_execute(user=self.user, workflow_id=self.wfc1.workflow_id)
self.assertFalse(r)
def test_can_timingtask_true(self, ):
"""
测试是否能定时执行的判定条件,当前登录用户为提交人,并且有执行权限,工单状态为审核通过
:return:
"""
# 修改工单为workflow_review_pass,当前登录用户为提交人,并且有执行权限
self.wf1.status = 'workflow_review_pass'
self.wf1.engineer = self.user.username
self.wf1.save(update_fields=('status', 'engineer'))
sql_execute = Permission.objects.get(codename='sql_execute')
self.user.user_permissions.add(sql_execute)
r = can_timingtask(user=self.user, workflow_id=self.wfc1.workflow_id)
self.assertTrue(r)
def test_can_timingtask_false(self, ):
"""
测试是否能定时执行的判定条件,当前登录有执行权限,工单状态为审核通过,但用户不是提交人
:return:
"""
# 修改工单为workflow_review_pass,当前登录用户为提交人,并且有执行权限
self.wf1.status = 'workflow_review_pass'
self.wf1.engineer = self.superuser.username
self.wf1.save(update_fields=('status', 'engineer'))
sql_execute = Permission.objects.get(codename='sql_execute')
self.user.user_permissions.add(sql_execute)
r = can_timingtask(user=self.user, workflow_id=self.wfc1.workflow_id)
self.assertFalse(r)
@patch('sql.utils.workflow_audit.Audit.can_review')
def test_can_cancel_true_for_apply_user(self, _can_review):
"""
测试是否能取消,审核中的工单,提交人可终止
:return:
"""
# 修改工单为workflow_review_pass,当前登录用户为提交人
self.wf1.status = 'workflow_manreviewing'
self.wf1.engineer = self.user.username
self.wf1.save(update_fields=('status', 'engineer'))
_can_review.return_value = False
r = can_cancel(user=self.user, workflow_id=self.wfc1.workflow_id)
self.assertTrue(r)
@patch('sql.utils.workflow_audit.Audit.can_review')
def test_can_cancel_true_for_audit_user(self, _can_review):
"""
测试是否能取消,审核中的工单,审核人可终止
:return:
"""
# 修改工单为workflow_review_pass,当前登录用户为提交人
self.wf1.status = 'workflow_manreviewing'
self.wf1.engineer = self.superuser.username
self.wf1.save(update_fields=('status', 'engineer'))
_can_review.return_value = True
r = can_cancel(user=self.user, workflow_id=self.wfc1.workflow_id)
self.assertTrue(r)
@patch('sql.utils.sql_review.can_execute')
def test_can_cancel_true_for_execute_user(self, _can_execute):
"""
测试是否能取消,审核通过但未执行的工单,有执行权限的用户终止
:return:
"""
# 修改工单为workflow_review_pass,当前登录用户为提交人
self.wf1.status = 'workflow_review_pass'
self.wf1.engineer = self.user.username
self.wf1.save(update_fields=('status', 'engineer'))
_can_execute.return_value = True
r = can_cancel(user=self.user, workflow_id=self.wfc1.workflow_id)
self.assertTrue(r)
@patch('sql.utils.sql_review.can_execute')
def test_can_cancel_false(self, _can_execute):
"""
测试是否能取消,审核通过但未执行的工单,无执行权限的用户无法终止
:return:
"""
# 修改工单为workflow_review_pass,当前登录用户为提交人
self.wf1.status = 'workflow_review_pass'
self.wf1.engineer = self.user.username
self.wf1.save(update_fields=('status', 'engineer'))
_can_execute.return_value = False
r = can_cancel(user=self.user, workflow_id=self.wfc1.workflow_id)
self.assertFalse(r)
| 40.476712
| 140
| 0.629755
|
2fab08ad9860059835e11bb6f2f414622de5bd5b
| 850
|
py
|
Python
|
045_ssd_mobilenet_v2_oid_v4/01_float32/02_weight_quantization.py
|
IgiArdiyanto/PINTO_model_zoo
|
9247b56a7dff37f28a8a7822a7ef4dd9adf7234d
|
[
"MIT"
] | 1,529
|
2019-12-11T13:36:23.000Z
|
2022-03-31T18:38:27.000Z
|
045_ssd_mobilenet_v2_oid_v4/01_float32/02_weight_quantization.py
|
IgiArdiyanto/PINTO_model_zoo
|
9247b56a7dff37f28a8a7822a7ef4dd9adf7234d
|
[
"MIT"
] | 200
|
2020-01-06T09:24:42.000Z
|
2022-03-31T17:29:08.000Z
|
045_ssd_mobilenet_v2_oid_v4/01_float32/02_weight_quantization.py
|
IgiArdiyanto/PINTO_model_zoo
|
9247b56a7dff37f28a8a7822a7ef4dd9adf7234d
|
[
"MIT"
] | 288
|
2020-02-21T14:56:02.000Z
|
2022-03-30T03:00:35.000Z
|
### tf-nightly==2.3.0-rc1
import tensorflow.compat.v1 as tf
# Weight Quantization - Input/Output=float32
graph_def_file="export/tflite_graph.pb"
input_arrays=["normalized_input_image_tensor"]
output_arrays=['TFLite_Detection_PostProcess','TFLite_Detection_PostProcess:1','TFLite_Detection_PostProcess:2','TFLite_Detection_PostProcess:3']
input_tensor={"normalized_input_image_tensor":[1,300,300,3]}
converter = tf.lite.TFLiteConverter.from_frozen_graph(graph_def_file, input_arrays, output_arrays,input_tensor)
converter.allow_custom_ops=True
converter.optimizations = [tf.lite.Optimize.OPTIMIZE_FOR_SIZE]
tflite_quant_model = converter.convert()
with open('ssd_mobilenet_v2_oid_v4_300x300_weight_quant.tflite', 'wb') as w:
w.write(tflite_quant_model)
print("Weight Quantization complete! - ssd_mobilenet_v2_oid_v4_300x300_weight_quant.tflite")
| 53.125
| 145
| 0.838824
|
7445a31ba6906dd89af106a660544edf16b35063
| 3,211
|
py
|
Python
|
mmedit/models/common/linear_module.py
|
Jian137/mmediting-1
|
e1ac6c93441ec96696d0b530f040b91b809015b6
|
[
"Apache-2.0"
] | 1,884
|
2020-07-09T18:53:43.000Z
|
2022-03-31T12:06:18.000Z
|
mmedit/models/common/linear_module.py
|
Jian137/mmediting-1
|
e1ac6c93441ec96696d0b530f040b91b809015b6
|
[
"Apache-2.0"
] | 622
|
2020-07-09T18:52:27.000Z
|
2022-03-31T14:41:09.000Z
|
mmedit/models/common/linear_module.py
|
Jian137/mmediting-1
|
e1ac6c93441ec96696d0b530f040b91b809015b6
|
[
"Apache-2.0"
] | 361
|
2020-07-09T19:21:47.000Z
|
2022-03-31T09:58:27.000Z
|
# Copyright (c) OpenMMLab. All rights reserved.
import torch.nn as nn
from mmcv.cnn import build_activation_layer, kaiming_init
class LinearModule(nn.Module):
"""A linear block that contains linear/norm/activation layers.
For low level vision, we add spectral norm and padding layer.
Args:
in_features (int): Same as nn.Linear.
out_features (int): Same as nn.Linear.
bias (bool): Same as nn.Linear.
act_cfg (dict): Config dict for activation layer, "relu" by default.
inplace (bool): Whether to use inplace mode for activation.
with_spectral_norm (bool): Whether use spectral norm in linear module.
order (tuple[str]): The order of linear/activation layers. It is a
sequence of "linear", "norm" and "act". Examples are
("linear", "act") and ("act", "linear").
"""
def __init__(self,
in_features,
out_features,
bias=True,
act_cfg=dict(type='ReLU'),
inplace=True,
with_spectral_norm=False,
order=('linear', 'act')):
super().__init__()
assert act_cfg is None or isinstance(act_cfg, dict)
self.act_cfg = act_cfg
self.inplace = inplace
self.with_spectral_norm = with_spectral_norm
self.order = order
assert isinstance(self.order, tuple) and len(self.order) == 2
assert set(order) == set(['linear', 'act'])
self.with_activation = act_cfg is not None
self.with_bias = bias
# build linear layer
self.linear = nn.Linear(in_features, out_features, bias=bias)
# export the attributes of self.linear to a higher level for
# convenience
self.in_features = self.linear.in_features
self.out_features = self.linear.out_features
if self.with_spectral_norm:
self.linear = nn.utils.spectral_norm(self.linear)
# build activation layer
if self.with_activation:
act_cfg_ = act_cfg.copy()
act_cfg_.setdefault('inplace', inplace)
self.activate = build_activation_layer(act_cfg_)
# Use msra init by default
self.init_weights()
def init_weights(self):
if self.with_activation and self.act_cfg['type'] == 'LeakyReLU':
nonlinearity = 'leaky_relu'
a = self.act_cfg.get('negative_slope', 0.01)
else:
nonlinearity = 'relu'
a = 0
kaiming_init(self.linear, a=a, nonlinearity=nonlinearity)
def forward(self, x, activate=True):
"""Forward Function.
Args:
x (torch.Tensor): Input tensor with shape of (n, \*, # noqa: W605
c). Same as ``torch.nn.Linear``.
activate (bool, optional): Whether to use activation layer.
Defaults to True.
Returns:
torch.Tensor: Same as ``torch.nn.Linear``.
"""
for layer in self.order:
if layer == 'linear':
x = self.linear(x)
elif layer == 'act' and activate and self.with_activation:
x = self.activate(x)
return x
| 35.677778
| 78
| 0.591093
|
1ebcc698b6d7bda036f6e3afcccd2eafed411ce6
| 509
|
py
|
Python
|
Packs/IntegrationsAndIncidentsHealthCheck/Scripts/IncidentsCheck_Widget_NumberofErrors/IncidentsCheck_Widget_NumberofErrors_test.py
|
diCagri/content
|
c532c50b213e6dddb8ae6a378d6d09198e08fc9f
|
[
"MIT"
] | 799
|
2016-08-02T06:43:14.000Z
|
2022-03-31T11:10:11.000Z
|
Packs/IntegrationsAndIncidentsHealthCheck/Scripts/IncidentsCheck_Widget_NumberofErrors/IncidentsCheck_Widget_NumberofErrors_test.py
|
diCagri/content
|
c532c50b213e6dddb8ae6a378d6d09198e08fc9f
|
[
"MIT"
] | 9,317
|
2016-08-07T19:00:51.000Z
|
2022-03-31T21:56:04.000Z
|
Packs/IntegrationsAndIncidentsHealthCheck/Scripts/IncidentsCheck_Widget_NumberofErrors/IncidentsCheck_Widget_NumberofErrors_test.py
|
diCagri/content
|
c532c50b213e6dddb8ae6a378d6d09198e08fc9f
|
[
"MIT"
] | 1,297
|
2016-08-04T13:59:00.000Z
|
2022-03-31T23:43:06.000Z
|
import pytest
import demistomock as demisto
from IncidentsCheck_Widget_NumberofErrors import main
@pytest.mark.parametrize('list_, expected', [
([{'Contents': '4@7,5@7,44@3,45@3,46@3,47@3,85@48,86@48'}], 8),
([{'Contents': ''}], 0),
([{}], 0)
])
def test_script(mocker, list_, expected):
mocker.patch.object(demisto, 'executeCommand', return_value=list_)
mocker.patch.object(demisto, 'results')
main()
contents = demisto.results.call_args[0][0]
assert contents == expected
| 26.789474
| 70
| 0.67387
|
861d977c7134d83756b34c8ee7dbdb0d27da6da1
| 2,763
|
py
|
Python
|
canvas_grab/config/organize_mode.py
|
Victrid/canvas_grab
|
cf03ec76e29ebe7a74b0ce99eadf920999c4e809
|
[
"MIT"
] | 173
|
2020-03-01T11:58:04.000Z
|
2022-03-08T06:38:34.000Z
|
canvas_grab/config/organize_mode.py
|
ADSWT518/canvas_grab
|
a91c9fa867319f050e547c109c071c91ff7daa63
|
[
"MIT"
] | 77
|
2020-03-03T04:25:24.000Z
|
2022-03-31T22:43:47.000Z
|
canvas_grab/config/organize_mode.py
|
ADSWT518/canvas_grab
|
a91c9fa867319f050e547c109c071c91ff7daa63
|
[
"MIT"
] | 35
|
2020-03-03T05:19:05.000Z
|
2022-03-29T08:46:14.000Z
|
import questionary
from ..configurable import Configurable, Interactable
from ..utils import find_choice
from ..snapshot import CanvasFileSnapshot, CanvasModuleSnapshot
from ..error import CanvasGrabCliError
class OrganizeMode(Configurable, Interactable):
"""OrganizeMode decides how data are stored on disk.
Currently, there are four modes: module (with link) and
as-is (with link).
"""
def __init__(self):
self.mode = 'module'
self.delete_file = False
def get_snapshots(self, course):
if self.mode == 'module_link':
canvas_snapshot_module = CanvasModuleSnapshot(
course, True)
else:
canvas_snapshot_module = CanvasModuleSnapshot(
course)
if self.mode == 'file_link':
canvas_snapshot_file = CanvasFileSnapshot(course, True)
else:
canvas_snapshot_file = CanvasFileSnapshot(course)
if self.mode == 'module' or self.mode == 'module_link':
canvas_snapshots = [canvas_snapshot_module, canvas_snapshot_file]
elif self.mode == 'file' or self.mode == 'file_link':
canvas_snapshots = [canvas_snapshot_file, canvas_snapshot_module]
else:
raise CanvasGrabCliError(f"Unsupported organize mode {self.mode}")
return self.mode, canvas_snapshots
def to_config(self):
return {
'mode': self.mode,
'delete_file': self.delete_file
}
def from_config(self, config):
self.mode = config['mode']
self.delete_file = config['delete_file']
def interact(self):
choices = [
questionary.Choice(
'Organize by module, only download files', 'module'),
questionary.Choice(
'Organize by module, download files, links and pages', 'module_link'),
questionary.Choice(
'As-is in file list', 'file'),
questionary.Choice(
'As-is in file list, plus pages', 'file_link'),
questionary.Choice('Custom', 'custom',
disabled='not supported yet')
]
self.mode = questionary.select(
'Select default file organization mode',
choices,
default=find_choice(choices, self.mode)
).unsafe_ask()
choices = [
questionary.Choice(
"Delete local files if they disappears on Canvas", True),
questionary.Choice("Always keep local files", False)
]
self.delete_file = questionary.select(
'How to handle deleted files on Canvas',
choices,
default=find_choice(choices, self.delete_file)
).unsafe_ask()
| 34.974684
| 86
| 0.601882
|
00d65368d3da628512e929b39f83f8830b21d744
| 581
|
py
|
Python
|
setup.py
|
pizzapanther/gae_boto
|
0de2d83cae8ccc951cecb6cbfd81aadb9a36dd41
|
[
"BSD-3-Clause"
] | 1
|
2016-11-20T03:36:06.000Z
|
2016-11-20T03:36:06.000Z
|
setup.py
|
pizzapanther/gae_boto
|
0de2d83cae8ccc951cecb6cbfd81aadb9a36dd41
|
[
"BSD-3-Clause"
] | 1
|
2017-03-20T03:32:37.000Z
|
2017-03-20T03:32:37.000Z
|
setup.py
|
pizzapanther/gae_boto
|
0de2d83cae8ccc951cecb6cbfd81aadb9a36dd41
|
[
"BSD-3-Clause"
] | null | null | null |
import os
from setuptools import setup, find_packages
MY_DIR = os.path.normpath(os.path.dirname(__file__))
setup(
name = "gae_boto",
version = '13.10.1',
description = "A recreation of Amazon Boto library that is is compatible with Google App Engine and easier to use.",
url = "https://github.com/pizzapanther/gae_boto",
author = "Paul Bailey",
author_email = "paul.m.bailey@gmail.com",
license = "BSD",
packages = ['gae_boto', 'gae_boto.apis', 'gae_boto.apis.templates'],
package_dir = {'gae_boto': MY_DIR},
install_requires = [
"requests>=2.0.0"
]
)
| 27.666667
| 118
| 0.693632
|
74aa7d5f3fcd9ad4e9e57b715ce16901b830cf1d
| 377
|
py
|
Python
|
app/admin/interview_scheduler.py
|
RandyDeng/InterviewScheduler
|
044d39873c4efb0d523772c42af62e8699336f63
|
[
"MIT"
] | null | null | null |
app/admin/interview_scheduler.py
|
RandyDeng/InterviewScheduler
|
044d39873c4efb0d523772c42af62e8699336f63
|
[
"MIT"
] | null | null | null |
app/admin/interview_scheduler.py
|
RandyDeng/InterviewScheduler
|
044d39873c4efb0d523772c42af62e8699336f63
|
[
"MIT"
] | null | null | null |
from flask import session
SESSION_METADATA = 'interview_scheduler_metadata'
SESSION_SCHEDULE = 'interview_scheduler_schedule'
def clean_metadata():
session.pop(SESSION_METADATA)
def clean_schedule():
session.pop(SESSION_SCHEDULE)
def clean_session():
clean_metadata()
clean_schedule()
def json_to_grid():
pass
def generate_timeslots():
pass
| 13.962963
| 49
| 0.753316
|
a15d7de586a5a1b933dd5abf377f00d87de30865
| 2,345
|
py
|
Python
|
skeletonutils.py
|
1fth3n3ls3/pylib
|
889298a2fe6faacfed838c89f1421880572523ff
|
[
"MIT"
] | 1
|
2020-05-26T15:59:26.000Z
|
2020-05-26T15:59:26.000Z
|
skeletonutils.py
|
1fth3n3ls3/pylib
|
889298a2fe6faacfed838c89f1421880572523ff
|
[
"MIT"
] | null | null | null |
skeletonutils.py
|
1fth3n3ls3/pylib
|
889298a2fe6faacfed838c89f1421880572523ff
|
[
"MIT"
] | null | null | null |
import pymel.core as pmc
import utils
# Version 3
def safe_setparent(node, parent):
"""`node.setParent(parent)` if `parent` is
not the same as `node`'s existing parent.
"""
if node.getParent() != parent:
node.setParent(parent)
GREEN = 14
BLUE = 6
YELLOW = 17
def _convert_to_joint(node, parent, prefix,
jnt_size, lcol, rcol, ccol):
pmc.select(clear=True)
j = pmc.joint(name=prefix + node.name())
safe_setparent(j, parent)
j.translate.set(node.translate.get())
j.rotate.set(node.rotate.get())
j.setRadius(jnt_size)
def calc_wirecolor():
x = j.translateX.get()
if x < -0.001:
return rcol
elif x > 0.001:
return lcol
else:
return ccol
j.overrideColor.set(calc_wirecolor())
return j
def convert_to_skeleton(rootnode,
prefix='skel_',
joint_size=1.0,
lcol=BLUE,
rcol=GREEN,
ccol=YELLOW,
_parent=None):
if _parent is None:
_parent = rootnode.getParent()
j = _convert_to_joint(
rootnode, _parent, prefix, joint_size, lcol, rcol, ccol)
children = [node for node in rootnode.getChildren() if utils.isType(node, 'transform')]
for c in children:
convert_to_skeleton(c, prefix, joint_size, lcol, rcol, ccol, j)
return j
def ancestors(node):
"""Return a list of ancestors, starting with the direct parent
and ending with the top-level (root) parent."""
result = []
parent = node.getParent()
while parent is not None:
result.append(parent)
parent = parent.getParent()
return result
def uniqueroots(nodes): #(1)
"""Returns a list of the nodes in `nodes` that are not
children of any node in `nodes`."""
result = []
def handle_node(node): #(2)
"""If any of the ancestors of n are in realroots,
just return, otherwise, append n to realroots.
"""
for ancestor in ancestors(node):
if ancestor in nodes: #(4)
return
result.append(node) #(5)
for node in nodes: #(3)
handle_node(node)
return result
| 28.950617
| 92
| 0.558635
|
aaa49b9ffcf5ed538b68fa67761454249a68b818
| 3,681
|
py
|
Python
|
tests/test_wos.py
|
lawlesst/wos2vivo
|
e0258aa5ccb3acb0924a61bf225f2e6aa439876e
|
[
"MIT"
] | 6
|
2018-04-06T00:06:27.000Z
|
2019-11-06T20:54:09.000Z
|
tests/test_wos.py
|
rpritchett/wos2vivo
|
e0258aa5ccb3acb0924a61bf225f2e6aa439876e
|
[
"MIT"
] | 5
|
2018-02-23T15:27:29.000Z
|
2019-10-30T00:19:32.000Z
|
tests/test_wos.py
|
rpritchett/wos2vivo
|
e0258aa5ccb3acb0924a61bf225f2e6aa439876e
|
[
"MIT"
] | null | null | null |
import base64
import os
from unittest import TestCase
import betamax
from betamax_serializers import pretty_json
betamax.Betamax.register_serializer(pretty_json.PrettyJSONSerializer)
# Directory where test data is stored.
TEST_PATH = os.path.join(
os.path.dirname(os.path.realpath(__file__))
)
# setup recorder
with betamax.Betamax.configure() as config:
config.define_cassette_placeholder(
'<base64 encoded WoS username:password>',
base64.b64encode("%s:%s" % (os.environ['WOS_USER'], os.environ['WOS_PASSWORD']))
)
config.default_cassette_options['serialize_with'] = 'prettyjson'
config.cassette_library_dir = os.path.join(TEST_PATH, 'fixtures')
from wos2vivo.client import WoSSession, QueryResponse
from wos2vivo.query import Query
from wos2vivo.record import BIBO, OBO, VIVO, VCARD
from rdflib import RDFS, Literal
class TestWos(TestCase):
def test_org_query(self):
session = WoSSession(login=False)
# Setup HTTP session recording
recorder = betamax.Betamax(session)
with recorder.use_cassette('query', record='once'):
# Login
session.authenticate()
# Run query
query = Query(
"OG=\"University of Florida\"",
span=dict(begin="2016-03-15", end="2016-03-15"),
count=1
)
rsp = session.query(query.to_string())
qrsp = QueryResponse(rsp)
assert qrsp.found == 33
assert qrsp.has_more() is True
# Check a record
rec = qrsp.records[0]
# Test returned metadata.
self.assertEqual(rec.ut(), "WOS:000371581900197")
self.assertTrue(rec.title().find('Parotid-area Lymph Nodes')> -1)
self.assertEqual(rec.doi(), "10.1016/j.ijrobp.2015.12.247")
# Test RDF output
uri = rec.pub_uri
g = rec.to_rdf()
self.assertEqual(
g.value(subject=uri, predicate=VIVO.identifier),
Literal("WOS:000371581900197")
)
self.assertEqual(
g.value(subject=uri, predicate=RDFS.label),
Literal("Elective Neck Management for Squamous Cell Carcinoma Metastatic to the Parotid-area Lymph Nodes")
)
self.assertEqual(
g.value(subject=uri, predicate=BIBO.doi),
Literal('10.1016/j.ijrobp.2015.12.247')
)
# Test number of authorships
for row in g.query("""
select (count(?aship) as ?authors)
where {
?aship vivo:relates ?pub ;
vivo:relates ?vcard .
?vcard vcard:hasName ?name .
}""",
initBindings=dict(pub=uri),
initNs=(dict(vivo=VIVO, vcard=VCARD))
):
assert row.authors.toPython() == 6
# web link
for row in g.query("""
select ?url
where {
?pub obo:ARG_2000028 ?vci .
?vci vcard:hasURL ?vurl .
?vurl vcard:url ?url .
}""",
initBindings=dict(pub=uri),
initNs=(dict(obo=OBO, vcard=VCARD))
):
self.assertEqual(
row.url.toPython(),
"http://ws.isiknowledge.com/cps/openurl/service?url_ver=Z39.88-2004&rft_id=info:ut/WOS:{}".format(rec.ut())
)
session.close()
| 34.083333
| 127
| 0.536811
|
0e64815944dbd7b89eb0ba47704f3593cb63f9cb
| 87,828
|
py
|
Python
|
Lib/datetime.py
|
mbeacom/cpython
|
65d98d0f53f558d7c799098da0abf376068c15fd
|
[
"CNRI-Python-GPL-Compatible"
] | 854
|
2017-09-11T16:42:28.000Z
|
2022-03-27T14:17:09.000Z
|
Lib/datetime.py
|
mbeacom/cpython
|
65d98d0f53f558d7c799098da0abf376068c15fd
|
[
"CNRI-Python-GPL-Compatible"
] | 164
|
2017-09-24T20:40:32.000Z
|
2021-10-30T01:35:05.000Z
|
Lib/datetime.py
|
mbeacom/cpython
|
65d98d0f53f558d7c799098da0abf376068c15fd
|
[
"CNRI-Python-GPL-Compatible"
] | 73
|
2017-09-13T18:07:48.000Z
|
2022-03-17T13:02:29.000Z
|
"""Concrete date/time and related types.
See http://www.iana.org/time-zones/repository/tz-link.html for
time zone and DST data sources.
"""
import time as _time
import math as _math
import sys
def _cmp(x, y):
return 0 if x == y else 1 if x > y else -1
MINYEAR = 1
MAXYEAR = 9999
_MAXORDINAL = 3652059 # date.max.toordinal()
# Utility functions, adapted from Python's Demo/classes/Dates.py, which
# also assumes the current Gregorian calendar indefinitely extended in
# both directions. Difference: Dates.py calls January 1 of year 0 day
# number 1. The code here calls January 1 of year 1 day number 1. This is
# to match the definition of the "proleptic Gregorian" calendar in Dershowitz
# and Reingold's "Calendrical Calculations", where it's the base calendar
# for all computations. See the book for algorithms for converting between
# proleptic Gregorian ordinals and many other calendar systems.
# -1 is a placeholder for indexing purposes.
_DAYS_IN_MONTH = [-1, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
_DAYS_BEFORE_MONTH = [-1] # -1 is a placeholder for indexing purposes.
dbm = 0
for dim in _DAYS_IN_MONTH[1:]:
_DAYS_BEFORE_MONTH.append(dbm)
dbm += dim
del dbm, dim
def _is_leap(year):
"year -> 1 if leap year, else 0."
return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
def _days_before_year(year):
"year -> number of days before January 1st of year."
y = year - 1
return y*365 + y//4 - y//100 + y//400
def _days_in_month(year, month):
"year, month -> number of days in that month in that year."
assert 1 <= month <= 12, month
if month == 2 and _is_leap(year):
return 29
return _DAYS_IN_MONTH[month]
def _days_before_month(year, month):
"year, month -> number of days in year preceding first day of month."
assert 1 <= month <= 12, 'month must be in 1..12'
return _DAYS_BEFORE_MONTH[month] + (month > 2 and _is_leap(year))
def _ymd2ord(year, month, day):
"year, month, day -> ordinal, considering 01-Jan-0001 as day 1."
assert 1 <= month <= 12, 'month must be in 1..12'
dim = _days_in_month(year, month)
assert 1 <= day <= dim, ('day must be in 1..%d' % dim)
return (_days_before_year(year) +
_days_before_month(year, month) +
day)
_DI400Y = _days_before_year(401) # number of days in 400 years
_DI100Y = _days_before_year(101) # " " " " 100 "
_DI4Y = _days_before_year(5) # " " " " 4 "
# A 4-year cycle has an extra leap day over what we'd get from pasting
# together 4 single years.
assert _DI4Y == 4 * 365 + 1
# Similarly, a 400-year cycle has an extra leap day over what we'd get from
# pasting together 4 100-year cycles.
assert _DI400Y == 4 * _DI100Y + 1
# OTOH, a 100-year cycle has one fewer leap day than we'd get from
# pasting together 25 4-year cycles.
assert _DI100Y == 25 * _DI4Y - 1
def _ord2ymd(n):
"ordinal -> (year, month, day), considering 01-Jan-0001 as day 1."
# n is a 1-based index, starting at 1-Jan-1. The pattern of leap years
# repeats exactly every 400 years. The basic strategy is to find the
# closest 400-year boundary at or before n, then work with the offset
# from that boundary to n. Life is much clearer if we subtract 1 from
# n first -- then the values of n at 400-year boundaries are exactly
# those divisible by _DI400Y:
#
# D M Y n n-1
# -- --- ---- ---------- ----------------
# 31 Dec -400 -_DI400Y -_DI400Y -1
# 1 Jan -399 -_DI400Y +1 -_DI400Y 400-year boundary
# ...
# 30 Dec 000 -1 -2
# 31 Dec 000 0 -1
# 1 Jan 001 1 0 400-year boundary
# 2 Jan 001 2 1
# 3 Jan 001 3 2
# ...
# 31 Dec 400 _DI400Y _DI400Y -1
# 1 Jan 401 _DI400Y +1 _DI400Y 400-year boundary
n -= 1
n400, n = divmod(n, _DI400Y)
year = n400 * 400 + 1 # ..., -399, 1, 401, ...
# Now n is the (non-negative) offset, in days, from January 1 of year, to
# the desired date. Now compute how many 100-year cycles precede n.
# Note that it's possible for n100 to equal 4! In that case 4 full
# 100-year cycles precede the desired day, which implies the desired
# day is December 31 at the end of a 400-year cycle.
n100, n = divmod(n, _DI100Y)
# Now compute how many 4-year cycles precede it.
n4, n = divmod(n, _DI4Y)
# And now how many single years. Again n1 can be 4, and again meaning
# that the desired day is December 31 at the end of the 4-year cycle.
n1, n = divmod(n, 365)
year += n100 * 100 + n4 * 4 + n1
if n1 == 4 or n100 == 4:
assert n == 0
return year-1, 12, 31
# Now the year is correct, and n is the offset from January 1. We find
# the month via an estimate that's either exact or one too large.
leapyear = n1 == 3 and (n4 != 24 or n100 == 3)
assert leapyear == _is_leap(year)
month = (n + 50) >> 5
preceding = _DAYS_BEFORE_MONTH[month] + (month > 2 and leapyear)
if preceding > n: # estimate is too large
month -= 1
preceding -= _DAYS_IN_MONTH[month] + (month == 2 and leapyear)
n -= preceding
assert 0 <= n < _days_in_month(year, month)
# Now the year and month are correct, and n is the offset from the
# start of that month: we're done!
return year, month, n+1
# Month and day names. For localized versions, see the calendar module.
_MONTHNAMES = [None, "Jan", "Feb", "Mar", "Apr", "May", "Jun",
"Jul", "Aug", "Sep", "Oct", "Nov", "Dec"]
_DAYNAMES = [None, "Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"]
def _build_struct_time(y, m, d, hh, mm, ss, dstflag):
wday = (_ymd2ord(y, m, d) + 6) % 7
dnum = _days_before_month(y, m) + d
return _time.struct_time((y, m, d, hh, mm, ss, wday, dnum, dstflag))
def _format_time(hh, mm, ss, us, timespec='auto'):
specs = {
'hours': '{:02d}',
'minutes': '{:02d}:{:02d}',
'seconds': '{:02d}:{:02d}:{:02d}',
'milliseconds': '{:02d}:{:02d}:{:02d}.{:03d}',
'microseconds': '{:02d}:{:02d}:{:02d}.{:06d}'
}
if timespec == 'auto':
# Skip trailing microseconds when us==0.
timespec = 'microseconds' if us else 'seconds'
elif timespec == 'milliseconds':
us //= 1000
try:
fmt = specs[timespec]
except KeyError:
raise ValueError('Unknown timespec value')
else:
return fmt.format(hh, mm, ss, us)
def _format_offset(off):
s = ''
if off is not None:
if off.days < 0:
sign = "-"
off = -off
else:
sign = "+"
hh, mm = divmod(off, timedelta(hours=1))
mm, ss = divmod(mm, timedelta(minutes=1))
s += "%s%02d:%02d" % (sign, hh, mm)
if ss or ss.microseconds:
s += ":%02d" % ss.seconds
if ss.microseconds:
s += '.%06d' % ss.microseconds
return s
# Correctly substitute for %z and %Z escapes in strftime formats.
def _wrap_strftime(object, format, timetuple):
# Don't call utcoffset() or tzname() unless actually needed.
freplace = None # the string to use for %f
zreplace = None # the string to use for %z
Zreplace = None # the string to use for %Z
# Scan format for %z and %Z escapes, replacing as needed.
newformat = []
push = newformat.append
i, n = 0, len(format)
while i < n:
ch = format[i]
i += 1
if ch == '%':
if i < n:
ch = format[i]
i += 1
if ch == 'f':
if freplace is None:
freplace = '%06d' % getattr(object,
'microsecond', 0)
newformat.append(freplace)
elif ch == 'z':
if zreplace is None:
zreplace = ""
if hasattr(object, "utcoffset"):
offset = object.utcoffset()
if offset is not None:
sign = '+'
if offset.days < 0:
offset = -offset
sign = '-'
h, rest = divmod(offset, timedelta(hours=1))
m, rest = divmod(rest, timedelta(minutes=1))
s = rest.seconds
u = offset.microseconds
if u:
zreplace = '%c%02d%02d%02d.%06d' % (sign, h, m, s, u)
elif s:
zreplace = '%c%02d%02d%02d' % (sign, h, m, s)
else:
zreplace = '%c%02d%02d' % (sign, h, m)
assert '%' not in zreplace
newformat.append(zreplace)
elif ch == 'Z':
if Zreplace is None:
Zreplace = ""
if hasattr(object, "tzname"):
s = object.tzname()
if s is not None:
# strftime is going to have at this: escape %
Zreplace = s.replace('%', '%%')
newformat.append(Zreplace)
else:
push('%')
push(ch)
else:
push('%')
else:
push(ch)
newformat = "".join(newformat)
return _time.strftime(newformat, timetuple)
# Helpers for parsing the result of isoformat()
def _parse_isoformat_date(dtstr):
# It is assumed that this function will only be called with a
# string of length exactly 10, and (though this is not used) ASCII-only
year = int(dtstr[0:4])
if dtstr[4] != '-':
raise ValueError('Invalid date separator: %s' % dtstr[4])
month = int(dtstr[5:7])
if dtstr[7] != '-':
raise ValueError('Invalid date separator')
day = int(dtstr[8:10])
return [year, month, day]
def _parse_hh_mm_ss_ff(tstr):
# Parses things of the form HH[:MM[:SS[.fff[fff]]]]
len_str = len(tstr)
time_comps = [0, 0, 0, 0]
pos = 0
for comp in range(0, 3):
if (len_str - pos) < 2:
raise ValueError('Incomplete time component')
time_comps[comp] = int(tstr[pos:pos+2])
pos += 2
next_char = tstr[pos:pos+1]
if not next_char or comp >= 2:
break
if next_char != ':':
raise ValueError('Invalid time separator: %c' % next_char)
pos += 1
if pos < len_str:
if tstr[pos] != '.':
raise ValueError('Invalid microsecond component')
else:
pos += 1
len_remainder = len_str - pos
if len_remainder not in (3, 6):
raise ValueError('Invalid microsecond component')
time_comps[3] = int(tstr[pos:])
if len_remainder == 3:
time_comps[3] *= 1000
return time_comps
def _parse_isoformat_time(tstr):
# Format supported is HH[:MM[:SS[.fff[fff]]]][+HH:MM[:SS[.ffffff]]]
len_str = len(tstr)
if len_str < 2:
raise ValueError('Isoformat time too short')
# This is equivalent to re.search('[+-]', tstr), but faster
tz_pos = (tstr.find('-') + 1 or tstr.find('+') + 1)
timestr = tstr[:tz_pos-1] if tz_pos > 0 else tstr
time_comps = _parse_hh_mm_ss_ff(timestr)
tzi = None
if tz_pos > 0:
tzstr = tstr[tz_pos:]
# Valid time zone strings are:
# HH:MM len: 5
# HH:MM:SS len: 8
# HH:MM:SS.ffffff len: 15
if len(tzstr) not in (5, 8, 15):
raise ValueError('Malformed time zone string')
tz_comps = _parse_hh_mm_ss_ff(tzstr)
if all(x == 0 for x in tz_comps):
tzi = timezone.utc
else:
tzsign = -1 if tstr[tz_pos - 1] == '-' else 1
td = timedelta(hours=tz_comps[0], minutes=tz_comps[1],
seconds=tz_comps[2], microseconds=tz_comps[3])
tzi = timezone(tzsign * td)
time_comps.append(tzi)
return time_comps
# Just raise TypeError if the arg isn't None or a string.
def _check_tzname(name):
if name is not None and not isinstance(name, str):
raise TypeError("tzinfo.tzname() must return None or string, "
"not '%s'" % type(name))
# name is the offset-producing method, "utcoffset" or "dst".
# offset is what it returned.
# If offset isn't None or timedelta, raises TypeError.
# If offset is None, returns None.
# Else offset is checked for being in range.
# If it is, its integer value is returned. Else ValueError is raised.
def _check_utc_offset(name, offset):
assert name in ("utcoffset", "dst")
if offset is None:
return
if not isinstance(offset, timedelta):
raise TypeError("tzinfo.%s() must return None "
"or timedelta, not '%s'" % (name, type(offset)))
if not -timedelta(1) < offset < timedelta(1):
raise ValueError("%s()=%s, must be strictly between "
"-timedelta(hours=24) and timedelta(hours=24)" %
(name, offset))
def _check_int_field(value):
if isinstance(value, int):
return value
if isinstance(value, float):
raise TypeError('integer argument expected, got float')
try:
value = value.__index__()
except AttributeError:
pass
else:
if not isinstance(value, int):
raise TypeError('__index__ returned non-int (type %s)' %
type(value).__name__)
return value
orig = value
try:
value = value.__int__()
except AttributeError:
pass
else:
if not isinstance(value, int):
raise TypeError('__int__ returned non-int (type %s)' %
type(value).__name__)
import warnings
warnings.warn("an integer is required (got type %s)" %
type(orig).__name__,
DeprecationWarning,
stacklevel=2)
return value
raise TypeError('an integer is required (got type %s)' %
type(value).__name__)
def _check_date_fields(year, month, day):
year = _check_int_field(year)
month = _check_int_field(month)
day = _check_int_field(day)
if not MINYEAR <= year <= MAXYEAR:
raise ValueError('year must be in %d..%d' % (MINYEAR, MAXYEAR), year)
if not 1 <= month <= 12:
raise ValueError('month must be in 1..12', month)
dim = _days_in_month(year, month)
if not 1 <= day <= dim:
raise ValueError('day must be in 1..%d' % dim, day)
return year, month, day
def _check_time_fields(hour, minute, second, microsecond, fold):
hour = _check_int_field(hour)
minute = _check_int_field(minute)
second = _check_int_field(second)
microsecond = _check_int_field(microsecond)
if not 0 <= hour <= 23:
raise ValueError('hour must be in 0..23', hour)
if not 0 <= minute <= 59:
raise ValueError('minute must be in 0..59', minute)
if not 0 <= second <= 59:
raise ValueError('second must be in 0..59', second)
if not 0 <= microsecond <= 999999:
raise ValueError('microsecond must be in 0..999999', microsecond)
if fold not in (0, 1):
raise ValueError('fold must be either 0 or 1', fold)
return hour, minute, second, microsecond, fold
def _check_tzinfo_arg(tz):
if tz is not None and not isinstance(tz, tzinfo):
raise TypeError("tzinfo argument must be None or of a tzinfo subclass")
def _cmperror(x, y):
raise TypeError("can't compare '%s' to '%s'" % (
type(x).__name__, type(y).__name__))
def _divide_and_round(a, b):
"""divide a by b and round result to the nearest integer
When the ratio is exactly half-way between two integers,
the even integer is returned.
"""
# Based on the reference implementation for divmod_near
# in Objects/longobject.c.
q, r = divmod(a, b)
# round up if either r / b > 0.5, or r / b == 0.5 and q is odd.
# The expression r / b > 0.5 is equivalent to 2 * r > b if b is
# positive, 2 * r < b if b negative.
r *= 2
greater_than_half = r > b if b > 0 else r < b
if greater_than_half or r == b and q % 2 == 1:
q += 1
return q
class timedelta:
"""Represent the difference between two datetime objects.
Supported operators:
- add, subtract timedelta
- unary plus, minus, abs
- compare to timedelta
- multiply, divide by int
In addition, datetime supports subtraction of two datetime objects
returning a timedelta, and addition or subtraction of a datetime
and a timedelta giving a datetime.
Representation: (days, seconds, microseconds). Why? Because I
felt like it.
"""
__slots__ = '_days', '_seconds', '_microseconds', '_hashcode'
def __new__(cls, days=0, seconds=0, microseconds=0,
milliseconds=0, minutes=0, hours=0, weeks=0):
# Doing this efficiently and accurately in C is going to be difficult
# and error-prone, due to ubiquitous overflow possibilities, and that
# C double doesn't have enough bits of precision to represent
# microseconds over 10K years faithfully. The code here tries to make
# explicit where go-fast assumptions can be relied on, in order to
# guide the C implementation; it's way more convoluted than speed-
# ignoring auto-overflow-to-long idiomatic Python could be.
# XXX Check that all inputs are ints or floats.
# Final values, all integer.
# s and us fit in 32-bit signed ints; d isn't bounded.
d = s = us = 0
# Normalize everything to days, seconds, microseconds.
days += weeks*7
seconds += minutes*60 + hours*3600
microseconds += milliseconds*1000
# Get rid of all fractions, and normalize s and us.
# Take a deep breath <wink>.
if isinstance(days, float):
dayfrac, days = _math.modf(days)
daysecondsfrac, daysecondswhole = _math.modf(dayfrac * (24.*3600.))
assert daysecondswhole == int(daysecondswhole) # can't overflow
s = int(daysecondswhole)
assert days == int(days)
d = int(days)
else:
daysecondsfrac = 0.0
d = days
assert isinstance(daysecondsfrac, float)
assert abs(daysecondsfrac) <= 1.0
assert isinstance(d, int)
assert abs(s) <= 24 * 3600
# days isn't referenced again before redefinition
if isinstance(seconds, float):
secondsfrac, seconds = _math.modf(seconds)
assert seconds == int(seconds)
seconds = int(seconds)
secondsfrac += daysecondsfrac
assert abs(secondsfrac) <= 2.0
else:
secondsfrac = daysecondsfrac
# daysecondsfrac isn't referenced again
assert isinstance(secondsfrac, float)
assert abs(secondsfrac) <= 2.0
assert isinstance(seconds, int)
days, seconds = divmod(seconds, 24*3600)
d += days
s += int(seconds) # can't overflow
assert isinstance(s, int)
assert abs(s) <= 2 * 24 * 3600
# seconds isn't referenced again before redefinition
usdouble = secondsfrac * 1e6
assert abs(usdouble) < 2.1e6 # exact value not critical
# secondsfrac isn't referenced again
if isinstance(microseconds, float):
microseconds = round(microseconds + usdouble)
seconds, microseconds = divmod(microseconds, 1000000)
days, seconds = divmod(seconds, 24*3600)
d += days
s += seconds
else:
microseconds = int(microseconds)
seconds, microseconds = divmod(microseconds, 1000000)
days, seconds = divmod(seconds, 24*3600)
d += days
s += seconds
microseconds = round(microseconds + usdouble)
assert isinstance(s, int)
assert isinstance(microseconds, int)
assert abs(s) <= 3 * 24 * 3600
assert abs(microseconds) < 3.1e6
# Just a little bit of carrying possible for microseconds and seconds.
seconds, us = divmod(microseconds, 1000000)
s += seconds
days, s = divmod(s, 24*3600)
d += days
assert isinstance(d, int)
assert isinstance(s, int) and 0 <= s < 24*3600
assert isinstance(us, int) and 0 <= us < 1000000
if abs(d) > 999999999:
raise OverflowError("timedelta # of days is too large: %d" % d)
self = object.__new__(cls)
self._days = d
self._seconds = s
self._microseconds = us
self._hashcode = -1
return self
def __repr__(self):
args = []
if self._days:
args.append("days=%d" % self._days)
if self._seconds:
args.append("seconds=%d" % self._seconds)
if self._microseconds:
args.append("microseconds=%d" % self._microseconds)
if not args:
args.append('0')
return "%s.%s(%s)" % (self.__class__.__module__,
self.__class__.__qualname__,
', '.join(args))
def __str__(self):
mm, ss = divmod(self._seconds, 60)
hh, mm = divmod(mm, 60)
s = "%d:%02d:%02d" % (hh, mm, ss)
if self._days:
def plural(n):
return n, abs(n) != 1 and "s" or ""
s = ("%d day%s, " % plural(self._days)) + s
if self._microseconds:
s = s + ".%06d" % self._microseconds
return s
def total_seconds(self):
"""Total seconds in the duration."""
return ((self.days * 86400 + self.seconds) * 10**6 +
self.microseconds) / 10**6
# Read-only field accessors
@property
def days(self):
"""days"""
return self._days
@property
def seconds(self):
"""seconds"""
return self._seconds
@property
def microseconds(self):
"""microseconds"""
return self._microseconds
def __add__(self, other):
if isinstance(other, timedelta):
# for CPython compatibility, we cannot use
# our __class__ here, but need a real timedelta
return timedelta(self._days + other._days,
self._seconds + other._seconds,
self._microseconds + other._microseconds)
return NotImplemented
__radd__ = __add__
def __sub__(self, other):
if isinstance(other, timedelta):
# for CPython compatibility, we cannot use
# our __class__ here, but need a real timedelta
return timedelta(self._days - other._days,
self._seconds - other._seconds,
self._microseconds - other._microseconds)
return NotImplemented
def __rsub__(self, other):
if isinstance(other, timedelta):
return -self + other
return NotImplemented
def __neg__(self):
# for CPython compatibility, we cannot use
# our __class__ here, but need a real timedelta
return timedelta(-self._days,
-self._seconds,
-self._microseconds)
def __pos__(self):
return self
def __abs__(self):
if self._days < 0:
return -self
else:
return self
def __mul__(self, other):
if isinstance(other, int):
# for CPython compatibility, we cannot use
# our __class__ here, but need a real timedelta
return timedelta(self._days * other,
self._seconds * other,
self._microseconds * other)
if isinstance(other, float):
usec = self._to_microseconds()
a, b = other.as_integer_ratio()
return timedelta(0, 0, _divide_and_round(usec * a, b))
return NotImplemented
__rmul__ = __mul__
def _to_microseconds(self):
return ((self._days * (24*3600) + self._seconds) * 1000000 +
self._microseconds)
def __floordiv__(self, other):
if not isinstance(other, (int, timedelta)):
return NotImplemented
usec = self._to_microseconds()
if isinstance(other, timedelta):
return usec // other._to_microseconds()
if isinstance(other, int):
return timedelta(0, 0, usec // other)
def __truediv__(self, other):
if not isinstance(other, (int, float, timedelta)):
return NotImplemented
usec = self._to_microseconds()
if isinstance(other, timedelta):
return usec / other._to_microseconds()
if isinstance(other, int):
return timedelta(0, 0, _divide_and_round(usec, other))
if isinstance(other, float):
a, b = other.as_integer_ratio()
return timedelta(0, 0, _divide_and_round(b * usec, a))
def __mod__(self, other):
if isinstance(other, timedelta):
r = self._to_microseconds() % other._to_microseconds()
return timedelta(0, 0, r)
return NotImplemented
def __divmod__(self, other):
if isinstance(other, timedelta):
q, r = divmod(self._to_microseconds(),
other._to_microseconds())
return q, timedelta(0, 0, r)
return NotImplemented
# Comparisons of timedelta objects with other.
def __eq__(self, other):
if isinstance(other, timedelta):
return self._cmp(other) == 0
else:
return False
def __le__(self, other):
if isinstance(other, timedelta):
return self._cmp(other) <= 0
else:
_cmperror(self, other)
def __lt__(self, other):
if isinstance(other, timedelta):
return self._cmp(other) < 0
else:
_cmperror(self, other)
def __ge__(self, other):
if isinstance(other, timedelta):
return self._cmp(other) >= 0
else:
_cmperror(self, other)
def __gt__(self, other):
if isinstance(other, timedelta):
return self._cmp(other) > 0
else:
_cmperror(self, other)
def _cmp(self, other):
assert isinstance(other, timedelta)
return _cmp(self._getstate(), other._getstate())
def __hash__(self):
if self._hashcode == -1:
self._hashcode = hash(self._getstate())
return self._hashcode
def __bool__(self):
return (self._days != 0 or
self._seconds != 0 or
self._microseconds != 0)
# Pickle support.
def _getstate(self):
return (self._days, self._seconds, self._microseconds)
def __reduce__(self):
return (self.__class__, self._getstate())
timedelta.min = timedelta(-999999999)
timedelta.max = timedelta(days=999999999, hours=23, minutes=59, seconds=59,
microseconds=999999)
timedelta.resolution = timedelta(microseconds=1)
class date:
"""Concrete date type.
Constructors:
__new__()
fromtimestamp()
today()
fromordinal()
Operators:
__repr__, __str__
__eq__, __le__, __lt__, __ge__, __gt__, __hash__
__add__, __radd__, __sub__ (add/radd only with timedelta arg)
Methods:
timetuple()
toordinal()
weekday()
isoweekday(), isocalendar(), isoformat()
ctime()
strftime()
Properties (readonly):
year, month, day
"""
__slots__ = '_year', '_month', '_day', '_hashcode'
def __new__(cls, year, month=None, day=None):
"""Constructor.
Arguments:
year, month, day (required, base 1)
"""
if (month is None and
isinstance(year, (bytes, str)) and len(year) == 4 and
1 <= ord(year[2:3]) <= 12):
# Pickle support
if isinstance(year, str):
try:
year = year.encode('latin1')
except UnicodeEncodeError:
# More informative error message.
raise ValueError(
"Failed to encode latin1 string when unpickling "
"a date object. "
"pickle.load(data, encoding='latin1') is assumed.")
self = object.__new__(cls)
self.__setstate(year)
self._hashcode = -1
return self
year, month, day = _check_date_fields(year, month, day)
self = object.__new__(cls)
self._year = year
self._month = month
self._day = day
self._hashcode = -1
return self
# Additional constructors
@classmethod
def fromtimestamp(cls, t):
"Construct a date from a POSIX timestamp (like time.time())."
y, m, d, hh, mm, ss, weekday, jday, dst = _time.localtime(t)
return cls(y, m, d)
@classmethod
def today(cls):
"Construct a date from time.time()."
t = _time.time()
return cls.fromtimestamp(t)
@classmethod
def fromordinal(cls, n):
"""Construct a date from a proleptic Gregorian ordinal.
January 1 of year 1 is day 1. Only the year, month and day are
non-zero in the result.
"""
y, m, d = _ord2ymd(n)
return cls(y, m, d)
@classmethod
def fromisoformat(cls, date_string):
"""Construct a date from the output of date.isoformat()."""
if not isinstance(date_string, str):
raise TypeError('fromisoformat: argument must be str')
try:
assert len(date_string) == 10
return cls(*_parse_isoformat_date(date_string))
except Exception:
raise ValueError(f'Invalid isoformat string: {date_string!r}')
@classmethod
def fromisocalendar(cls, year, week, day):
"""Construct a date from the ISO year, week number and weekday.
This is the inverse of the date.isocalendar() function"""
# Year is bounded this way because 9999-12-31 is (9999, 52, 5)
if not MINYEAR <= year <= MAXYEAR:
raise ValueError(f"Year is out of range: {year}")
if not 0 < week < 53:
out_of_range = True
if week == 53:
# ISO years have 53 weeks in them on years starting with a
# Thursday and leap years starting on a Wednesday
first_weekday = _ymd2ord(year, 1, 1) % 7
if (first_weekday == 4 or (first_weekday == 3 and
_is_leap(year))):
out_of_range = False
if out_of_range:
raise ValueError(f"Invalid week: {week}")
if not 0 < day < 8:
raise ValueError(f"Invalid weekday: {day} (range is [1, 7])")
# Now compute the offset from (Y, 1, 1) in days:
day_offset = (week - 1) * 7 + (day - 1)
# Calculate the ordinal day for monday, week 1
day_1 = _isoweek1monday(year)
ord_day = day_1 + day_offset
return cls(*_ord2ymd(ord_day))
# Conversions to string
def __repr__(self):
"""Convert to formal string, for repr().
>>> dt = datetime(2010, 1, 1)
>>> repr(dt)
'datetime.datetime(2010, 1, 1, 0, 0)'
>>> dt = datetime(2010, 1, 1, tzinfo=timezone.utc)
>>> repr(dt)
'datetime.datetime(2010, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)'
"""
return "%s.%s(%d, %d, %d)" % (self.__class__.__module__,
self.__class__.__qualname__,
self._year,
self._month,
self._day)
# XXX These shouldn't depend on time.localtime(), because that
# clips the usable dates to [1970 .. 2038). At least ctime() is
# easily done without using strftime() -- that's better too because
# strftime("%c", ...) is locale specific.
def ctime(self):
"Return ctime() style string."
weekday = self.toordinal() % 7 or 7
return "%s %s %2d 00:00:00 %04d" % (
_DAYNAMES[weekday],
_MONTHNAMES[self._month],
self._day, self._year)
def strftime(self, fmt):
"Format using strftime()."
return _wrap_strftime(self, fmt, self.timetuple())
def __format__(self, fmt):
if not isinstance(fmt, str):
raise TypeError("must be str, not %s" % type(fmt).__name__)
if len(fmt) != 0:
return self.strftime(fmt)
return str(self)
def isoformat(self):
"""Return the date formatted according to ISO.
This is 'YYYY-MM-DD'.
References:
- http://www.w3.org/TR/NOTE-datetime
- http://www.cl.cam.ac.uk/~mgk25/iso-time.html
"""
return "%04d-%02d-%02d" % (self._year, self._month, self._day)
__str__ = isoformat
# Read-only field accessors
@property
def year(self):
"""year (1-9999)"""
return self._year
@property
def month(self):
"""month (1-12)"""
return self._month
@property
def day(self):
"""day (1-31)"""
return self._day
# Standard conversions, __eq__, __le__, __lt__, __ge__, __gt__,
# __hash__ (and helpers)
def timetuple(self):
"Return local time tuple compatible with time.localtime()."
return _build_struct_time(self._year, self._month, self._day,
0, 0, 0, -1)
def toordinal(self):
"""Return proleptic Gregorian ordinal for the year, month and day.
January 1 of year 1 is day 1. Only the year, month and day values
contribute to the result.
"""
return _ymd2ord(self._year, self._month, self._day)
def replace(self, year=None, month=None, day=None):
"""Return a new date with new values for the specified fields."""
if year is None:
year = self._year
if month is None:
month = self._month
if day is None:
day = self._day
return type(self)(year, month, day)
# Comparisons of date objects with other.
def __eq__(self, other):
if isinstance(other, date):
return self._cmp(other) == 0
return NotImplemented
def __le__(self, other):
if isinstance(other, date):
return self._cmp(other) <= 0
return NotImplemented
def __lt__(self, other):
if isinstance(other, date):
return self._cmp(other) < 0
return NotImplemented
def __ge__(self, other):
if isinstance(other, date):
return self._cmp(other) >= 0
return NotImplemented
def __gt__(self, other):
if isinstance(other, date):
return self._cmp(other) > 0
return NotImplemented
def _cmp(self, other):
assert isinstance(other, date)
y, m, d = self._year, self._month, self._day
y2, m2, d2 = other._year, other._month, other._day
return _cmp((y, m, d), (y2, m2, d2))
def __hash__(self):
"Hash."
if self._hashcode == -1:
self._hashcode = hash(self._getstate())
return self._hashcode
# Computations
def __add__(self, other):
"Add a date to a timedelta."
if isinstance(other, timedelta):
o = self.toordinal() + other.days
if 0 < o <= _MAXORDINAL:
return type(self).fromordinal(o)
raise OverflowError("result out of range")
return NotImplemented
__radd__ = __add__
def __sub__(self, other):
"""Subtract two dates, or a date and a timedelta."""
if isinstance(other, timedelta):
return self + timedelta(-other.days)
if isinstance(other, date):
days1 = self.toordinal()
days2 = other.toordinal()
return timedelta(days1 - days2)
return NotImplemented
def weekday(self):
"Return day of the week, where Monday == 0 ... Sunday == 6."
return (self.toordinal() + 6) % 7
# Day-of-the-week and week-of-the-year, according to ISO
def isoweekday(self):
"Return day of the week, where Monday == 1 ... Sunday == 7."
# 1-Jan-0001 is a Monday
return self.toordinal() % 7 or 7
def isocalendar(self):
"""Return a 3-tuple containing ISO year, week number, and weekday.
The first ISO week of the year is the (Mon-Sun) week
containing the year's first Thursday; everything else derives
from that.
The first week is 1; Monday is 1 ... Sunday is 7.
ISO calendar algorithm taken from
http://www.phys.uu.nl/~vgent/calendar/isocalendar.htm
(used with permission)
"""
year = self._year
week1monday = _isoweek1monday(year)
today = _ymd2ord(self._year, self._month, self._day)
# Internally, week and day have origin 0
week, day = divmod(today - week1monday, 7)
if week < 0:
year -= 1
week1monday = _isoweek1monday(year)
week, day = divmod(today - week1monday, 7)
elif week >= 52:
if today >= _isoweek1monday(year+1):
year += 1
week = 0
return year, week+1, day+1
# Pickle support.
def _getstate(self):
yhi, ylo = divmod(self._year, 256)
return bytes([yhi, ylo, self._month, self._day]),
def __setstate(self, string):
yhi, ylo, self._month, self._day = string
self._year = yhi * 256 + ylo
def __reduce__(self):
return (self.__class__, self._getstate())
_date_class = date # so functions w/ args named "date" can get at the class
date.min = date(1, 1, 1)
date.max = date(9999, 12, 31)
date.resolution = timedelta(days=1)
class tzinfo:
"""Abstract base class for time zone info classes.
Subclasses must override the name(), utcoffset() and dst() methods.
"""
__slots__ = ()
def tzname(self, dt):
"datetime -> string name of time zone."
raise NotImplementedError("tzinfo subclass must override tzname()")
def utcoffset(self, dt):
"datetime -> timedelta, positive for east of UTC, negative for west of UTC"
raise NotImplementedError("tzinfo subclass must override utcoffset()")
def dst(self, dt):
"""datetime -> DST offset as timedelta, positive for east of UTC.
Return 0 if DST not in effect. utcoffset() must include the DST
offset.
"""
raise NotImplementedError("tzinfo subclass must override dst()")
def fromutc(self, dt):
"datetime in UTC -> datetime in local time."
if not isinstance(dt, datetime):
raise TypeError("fromutc() requires a datetime argument")
if dt.tzinfo is not self:
raise ValueError("dt.tzinfo is not self")
dtoff = dt.utcoffset()
if dtoff is None:
raise ValueError("fromutc() requires a non-None utcoffset() "
"result")
# See the long comment block at the end of this file for an
# explanation of this algorithm.
dtdst = dt.dst()
if dtdst is None:
raise ValueError("fromutc() requires a non-None dst() result")
delta = dtoff - dtdst
if delta:
dt += delta
dtdst = dt.dst()
if dtdst is None:
raise ValueError("fromutc(): dt.dst gave inconsistent "
"results; cannot convert")
return dt + dtdst
# Pickle support.
def __reduce__(self):
getinitargs = getattr(self, "__getinitargs__", None)
if getinitargs:
args = getinitargs()
else:
args = ()
getstate = getattr(self, "__getstate__", None)
if getstate:
state = getstate()
else:
state = getattr(self, "__dict__", None) or None
if state is None:
return (self.__class__, args)
else:
return (self.__class__, args, state)
_tzinfo_class = tzinfo
class time:
"""Time with time zone.
Constructors:
__new__()
Operators:
__repr__, __str__
__eq__, __le__, __lt__, __ge__, __gt__, __hash__
Methods:
strftime()
isoformat()
utcoffset()
tzname()
dst()
Properties (readonly):
hour, minute, second, microsecond, tzinfo, fold
"""
__slots__ = '_hour', '_minute', '_second', '_microsecond', '_tzinfo', '_hashcode', '_fold'
def __new__(cls, hour=0, minute=0, second=0, microsecond=0, tzinfo=None, *, fold=0):
"""Constructor.
Arguments:
hour, minute (required)
second, microsecond (default to zero)
tzinfo (default to None)
fold (keyword only, default to zero)
"""
if (isinstance(hour, (bytes, str)) and len(hour) == 6 and
ord(hour[0:1])&0x7F < 24):
# Pickle support
if isinstance(hour, str):
try:
hour = hour.encode('latin1')
except UnicodeEncodeError:
# More informative error message.
raise ValueError(
"Failed to encode latin1 string when unpickling "
"a time object. "
"pickle.load(data, encoding='latin1') is assumed.")
self = object.__new__(cls)
self.__setstate(hour, minute or None)
self._hashcode = -1
return self
hour, minute, second, microsecond, fold = _check_time_fields(
hour, minute, second, microsecond, fold)
_check_tzinfo_arg(tzinfo)
self = object.__new__(cls)
self._hour = hour
self._minute = minute
self._second = second
self._microsecond = microsecond
self._tzinfo = tzinfo
self._hashcode = -1
self._fold = fold
return self
# Read-only field accessors
@property
def hour(self):
"""hour (0-23)"""
return self._hour
@property
def minute(self):
"""minute (0-59)"""
return self._minute
@property
def second(self):
"""second (0-59)"""
return self._second
@property
def microsecond(self):
"""microsecond (0-999999)"""
return self._microsecond
@property
def tzinfo(self):
"""timezone info object"""
return self._tzinfo
@property
def fold(self):
return self._fold
# Standard conversions, __hash__ (and helpers)
# Comparisons of time objects with other.
def __eq__(self, other):
if isinstance(other, time):
return self._cmp(other, allow_mixed=True) == 0
else:
return False
def __le__(self, other):
if isinstance(other, time):
return self._cmp(other) <= 0
else:
_cmperror(self, other)
def __lt__(self, other):
if isinstance(other, time):
return self._cmp(other) < 0
else:
_cmperror(self, other)
def __ge__(self, other):
if isinstance(other, time):
return self._cmp(other) >= 0
else:
_cmperror(self, other)
def __gt__(self, other):
if isinstance(other, time):
return self._cmp(other) > 0
else:
_cmperror(self, other)
def _cmp(self, other, allow_mixed=False):
assert isinstance(other, time)
mytz = self._tzinfo
ottz = other._tzinfo
myoff = otoff = None
if mytz is ottz:
base_compare = True
else:
myoff = self.utcoffset()
otoff = other.utcoffset()
base_compare = myoff == otoff
if base_compare:
return _cmp((self._hour, self._minute, self._second,
self._microsecond),
(other._hour, other._minute, other._second,
other._microsecond))
if myoff is None or otoff is None:
if allow_mixed:
return 2 # arbitrary non-zero value
else:
raise TypeError("cannot compare naive and aware times")
myhhmm = self._hour * 60 + self._minute - myoff//timedelta(minutes=1)
othhmm = other._hour * 60 + other._minute - otoff//timedelta(minutes=1)
return _cmp((myhhmm, self._second, self._microsecond),
(othhmm, other._second, other._microsecond))
def __hash__(self):
"""Hash."""
if self._hashcode == -1:
if self.fold:
t = self.replace(fold=0)
else:
t = self
tzoff = t.utcoffset()
if not tzoff: # zero or None
self._hashcode = hash(t._getstate()[0])
else:
h, m = divmod(timedelta(hours=self.hour, minutes=self.minute) - tzoff,
timedelta(hours=1))
assert not m % timedelta(minutes=1), "whole minute"
m //= timedelta(minutes=1)
if 0 <= h < 24:
self._hashcode = hash(time(h, m, self.second, self.microsecond))
else:
self._hashcode = hash((h, m, self.second, self.microsecond))
return self._hashcode
# Conversion to string
def _tzstr(self):
"""Return formatted timezone offset (+xx:xx) or an empty string."""
off = self.utcoffset()
return _format_offset(off)
def __repr__(self):
"""Convert to formal string, for repr()."""
if self._microsecond != 0:
s = ", %d, %d" % (self._second, self._microsecond)
elif self._second != 0:
s = ", %d" % self._second
else:
s = ""
s= "%s.%s(%d, %d%s)" % (self.__class__.__module__,
self.__class__.__qualname__,
self._hour, self._minute, s)
if self._tzinfo is not None:
assert s[-1:] == ")"
s = s[:-1] + ", tzinfo=%r" % self._tzinfo + ")"
if self._fold:
assert s[-1:] == ")"
s = s[:-1] + ", fold=1)"
return s
def isoformat(self, timespec='auto'):
"""Return the time formatted according to ISO.
The full format is 'HH:MM:SS.mmmmmm+zz:zz'. By default, the fractional
part is omitted if self.microsecond == 0.
The optional argument timespec specifies the number of additional
terms of the time to include.
"""
s = _format_time(self._hour, self._minute, self._second,
self._microsecond, timespec)
tz = self._tzstr()
if tz:
s += tz
return s
__str__ = isoformat
@classmethod
def fromisoformat(cls, time_string):
"""Construct a time from the output of isoformat()."""
if not isinstance(time_string, str):
raise TypeError('fromisoformat: argument must be str')
try:
return cls(*_parse_isoformat_time(time_string))
except Exception:
raise ValueError(f'Invalid isoformat string: {time_string!r}')
def strftime(self, fmt):
"""Format using strftime(). The date part of the timestamp passed
to underlying strftime should not be used.
"""
# The year must be >= 1000 else Python's strftime implementation
# can raise a bogus exception.
timetuple = (1900, 1, 1,
self._hour, self._minute, self._second,
0, 1, -1)
return _wrap_strftime(self, fmt, timetuple)
def __format__(self, fmt):
if not isinstance(fmt, str):
raise TypeError("must be str, not %s" % type(fmt).__name__)
if len(fmt) != 0:
return self.strftime(fmt)
return str(self)
# Timezone functions
def utcoffset(self):
"""Return the timezone offset as timedelta, positive east of UTC
(negative west of UTC)."""
if self._tzinfo is None:
return None
offset = self._tzinfo.utcoffset(None)
_check_utc_offset("utcoffset", offset)
return offset
def tzname(self):
"""Return the timezone name.
Note that the name is 100% informational -- there's no requirement that
it mean anything in particular. For example, "GMT", "UTC", "-500",
"-5:00", "EDT", "US/Eastern", "America/New York" are all valid replies.
"""
if self._tzinfo is None:
return None
name = self._tzinfo.tzname(None)
_check_tzname(name)
return name
def dst(self):
"""Return 0 if DST is not in effect, or the DST offset (as timedelta
positive eastward) if DST is in effect.
This is purely informational; the DST offset has already been added to
the UTC offset returned by utcoffset() if applicable, so there's no
need to consult dst() unless you're interested in displaying the DST
info.
"""
if self._tzinfo is None:
return None
offset = self._tzinfo.dst(None)
_check_utc_offset("dst", offset)
return offset
def replace(self, hour=None, minute=None, second=None, microsecond=None,
tzinfo=True, *, fold=None):
"""Return a new time with new values for the specified fields."""
if hour is None:
hour = self.hour
if minute is None:
minute = self.minute
if second is None:
second = self.second
if microsecond is None:
microsecond = self.microsecond
if tzinfo is True:
tzinfo = self.tzinfo
if fold is None:
fold = self._fold
return type(self)(hour, minute, second, microsecond, tzinfo, fold=fold)
# Pickle support.
def _getstate(self, protocol=3):
us2, us3 = divmod(self._microsecond, 256)
us1, us2 = divmod(us2, 256)
h = self._hour
if self._fold and protocol > 3:
h += 128
basestate = bytes([h, self._minute, self._second,
us1, us2, us3])
if self._tzinfo is None:
return (basestate,)
else:
return (basestate, self._tzinfo)
def __setstate(self, string, tzinfo):
if tzinfo is not None and not isinstance(tzinfo, _tzinfo_class):
raise TypeError("bad tzinfo state arg")
h, self._minute, self._second, us1, us2, us3 = string
if h > 127:
self._fold = 1
self._hour = h - 128
else:
self._fold = 0
self._hour = h
self._microsecond = (((us1 << 8) | us2) << 8) | us3
self._tzinfo = tzinfo
def __reduce_ex__(self, protocol):
return (time, self._getstate(protocol))
def __reduce__(self):
return self.__reduce_ex__(2)
_time_class = time # so functions w/ args named "time" can get at the class
time.min = time(0, 0, 0)
time.max = time(23, 59, 59, 999999)
time.resolution = timedelta(microseconds=1)
class datetime(date):
"""datetime(year, month, day[, hour[, minute[, second[, microsecond[,tzinfo]]]]])
The year, month and day arguments are required. tzinfo may be None, or an
instance of a tzinfo subclass. The remaining arguments may be ints.
"""
__slots__ = date.__slots__ + time.__slots__
def __new__(cls, year, month=None, day=None, hour=0, minute=0, second=0,
microsecond=0, tzinfo=None, *, fold=0):
if (isinstance(year, (bytes, str)) and len(year) == 10 and
1 <= ord(year[2:3])&0x7F <= 12):
# Pickle support
if isinstance(year, str):
try:
year = bytes(year, 'latin1')
except UnicodeEncodeError:
# More informative error message.
raise ValueError(
"Failed to encode latin1 string when unpickling "
"a datetime object. "
"pickle.load(data, encoding='latin1') is assumed.")
self = object.__new__(cls)
self.__setstate(year, month)
self._hashcode = -1
return self
year, month, day = _check_date_fields(year, month, day)
hour, minute, second, microsecond, fold = _check_time_fields(
hour, minute, second, microsecond, fold)
_check_tzinfo_arg(tzinfo)
self = object.__new__(cls)
self._year = year
self._month = month
self._day = day
self._hour = hour
self._minute = minute
self._second = second
self._microsecond = microsecond
self._tzinfo = tzinfo
self._hashcode = -1
self._fold = fold
return self
# Read-only field accessors
@property
def hour(self):
"""hour (0-23)"""
return self._hour
@property
def minute(self):
"""minute (0-59)"""
return self._minute
@property
def second(self):
"""second (0-59)"""
return self._second
@property
def microsecond(self):
"""microsecond (0-999999)"""
return self._microsecond
@property
def tzinfo(self):
"""timezone info object"""
return self._tzinfo
@property
def fold(self):
return self._fold
@classmethod
def _fromtimestamp(cls, t, utc, tz):
"""Construct a datetime from a POSIX timestamp (like time.time()).
A timezone info object may be passed in as well.
"""
frac, t = _math.modf(t)
us = round(frac * 1e6)
if us >= 1000000:
t += 1
us -= 1000000
elif us < 0:
t -= 1
us += 1000000
converter = _time.gmtime if utc else _time.localtime
y, m, d, hh, mm, ss, weekday, jday, dst = converter(t)
ss = min(ss, 59) # clamp out leap seconds if the platform has them
result = cls(y, m, d, hh, mm, ss, us, tz)
if tz is None:
# As of version 2015f max fold in IANA database is
# 23 hours at 1969-09-30 13:00:00 in Kwajalein.
# Let's probe 24 hours in the past to detect a transition:
max_fold_seconds = 24 * 3600
# On Windows localtime_s throws an OSError for negative values,
# thus we can't perform fold detection for values of time less
# than the max time fold. See comments in _datetimemodule's
# version of this method for more details.
if t < max_fold_seconds and sys.platform.startswith("win"):
return result
y, m, d, hh, mm, ss = converter(t - max_fold_seconds)[:6]
probe1 = cls(y, m, d, hh, mm, ss, us, tz)
trans = result - probe1 - timedelta(0, max_fold_seconds)
if trans.days < 0:
y, m, d, hh, mm, ss = converter(t + trans // timedelta(0, 1))[:6]
probe2 = cls(y, m, d, hh, mm, ss, us, tz)
if probe2 == result:
result._fold = 1
else:
result = tz.fromutc(result)
return result
@classmethod
def fromtimestamp(cls, t, tz=None):
"""Construct a datetime from a POSIX timestamp (like time.time()).
A timezone info object may be passed in as well.
"""
_check_tzinfo_arg(tz)
return cls._fromtimestamp(t, tz is not None, tz)
@classmethod
def utcfromtimestamp(cls, t):
"""Construct a naive UTC datetime from a POSIX timestamp."""
return cls._fromtimestamp(t, True, None)
@classmethod
def now(cls, tz=None):
"Construct a datetime from time.time() and optional time zone info."
t = _time.time()
return cls.fromtimestamp(t, tz)
@classmethod
def utcnow(cls):
"Construct a UTC datetime from time.time()."
t = _time.time()
return cls.utcfromtimestamp(t)
@classmethod
def combine(cls, date, time, tzinfo=True):
"Construct a datetime from a given date and a given time."
if not isinstance(date, _date_class):
raise TypeError("date argument must be a date instance")
if not isinstance(time, _time_class):
raise TypeError("time argument must be a time instance")
if tzinfo is True:
tzinfo = time.tzinfo
return cls(date.year, date.month, date.day,
time.hour, time.minute, time.second, time.microsecond,
tzinfo, fold=time.fold)
@classmethod
def fromisoformat(cls, date_string):
"""Construct a datetime from the output of datetime.isoformat()."""
if not isinstance(date_string, str):
raise TypeError('fromisoformat: argument must be str')
# Split this at the separator
dstr = date_string[0:10]
tstr = date_string[11:]
try:
date_components = _parse_isoformat_date(dstr)
except ValueError:
raise ValueError(f'Invalid isoformat string: {date_string!r}')
if tstr:
try:
time_components = _parse_isoformat_time(tstr)
except ValueError:
raise ValueError(f'Invalid isoformat string: {date_string!r}')
else:
time_components = [0, 0, 0, 0, None]
return cls(*(date_components + time_components))
def timetuple(self):
"Return local time tuple compatible with time.localtime()."
dst = self.dst()
if dst is None:
dst = -1
elif dst:
dst = 1
else:
dst = 0
return _build_struct_time(self.year, self.month, self.day,
self.hour, self.minute, self.second,
dst)
def _mktime(self):
"""Return integer POSIX timestamp."""
epoch = datetime(1970, 1, 1)
max_fold_seconds = 24 * 3600
t = (self - epoch) // timedelta(0, 1)
def local(u):
y, m, d, hh, mm, ss = _time.localtime(u)[:6]
return (datetime(y, m, d, hh, mm, ss) - epoch) // timedelta(0, 1)
# Our goal is to solve t = local(u) for u.
a = local(t) - t
u1 = t - a
t1 = local(u1)
if t1 == t:
# We found one solution, but it may not be the one we need.
# Look for an earlier solution (if `fold` is 0), or a
# later one (if `fold` is 1).
u2 = u1 + (-max_fold_seconds, max_fold_seconds)[self.fold]
b = local(u2) - u2
if a == b:
return u1
else:
b = t1 - u1
assert a != b
u2 = t - b
t2 = local(u2)
if t2 == t:
return u2
if t1 == t:
return u1
# We have found both offsets a and b, but neither t - a nor t - b is
# a solution. This means t is in the gap.
return (max, min)[self.fold](u1, u2)
def timestamp(self):
"Return POSIX timestamp as float"
if self._tzinfo is None:
s = self._mktime()
return s + self.microsecond / 1e6
else:
return (self - _EPOCH).total_seconds()
def utctimetuple(self):
"Return UTC time tuple compatible with time.gmtime()."
offset = self.utcoffset()
if offset:
self -= offset
y, m, d = self.year, self.month, self.day
hh, mm, ss = self.hour, self.minute, self.second
return _build_struct_time(y, m, d, hh, mm, ss, 0)
def date(self):
"Return the date part."
return date(self._year, self._month, self._day)
def time(self):
"Return the time part, with tzinfo None."
return time(self.hour, self.minute, self.second, self.microsecond, fold=self.fold)
def timetz(self):
"Return the time part, with same tzinfo."
return time(self.hour, self.minute, self.second, self.microsecond,
self._tzinfo, fold=self.fold)
def replace(self, year=None, month=None, day=None, hour=None,
minute=None, second=None, microsecond=None, tzinfo=True,
*, fold=None):
"""Return a new datetime with new values for the specified fields."""
if year is None:
year = self.year
if month is None:
month = self.month
if day is None:
day = self.day
if hour is None:
hour = self.hour
if minute is None:
minute = self.minute
if second is None:
second = self.second
if microsecond is None:
microsecond = self.microsecond
if tzinfo is True:
tzinfo = self.tzinfo
if fold is None:
fold = self.fold
return type(self)(year, month, day, hour, minute, second,
microsecond, tzinfo, fold=fold)
def _local_timezone(self):
if self.tzinfo is None:
ts = self._mktime()
else:
ts = (self - _EPOCH) // timedelta(seconds=1)
localtm = _time.localtime(ts)
local = datetime(*localtm[:6])
# Extract TZ data
gmtoff = localtm.tm_gmtoff
zone = localtm.tm_zone
return timezone(timedelta(seconds=gmtoff), zone)
def astimezone(self, tz=None):
if tz is None:
tz = self._local_timezone()
elif not isinstance(tz, tzinfo):
raise TypeError("tz argument must be an instance of tzinfo")
mytz = self.tzinfo
if mytz is None:
mytz = self._local_timezone()
myoffset = mytz.utcoffset(self)
else:
myoffset = mytz.utcoffset(self)
if myoffset is None:
mytz = self.replace(tzinfo=None)._local_timezone()
myoffset = mytz.utcoffset(self)
if tz is mytz:
return self
# Convert self to UTC, and attach the new time zone object.
utc = (self - myoffset).replace(tzinfo=tz)
# Convert from UTC to tz's local time.
return tz.fromutc(utc)
# Ways to produce a string.
def ctime(self):
"Return ctime() style string."
weekday = self.toordinal() % 7 or 7
return "%s %s %2d %02d:%02d:%02d %04d" % (
_DAYNAMES[weekday],
_MONTHNAMES[self._month],
self._day,
self._hour, self._minute, self._second,
self._year)
def isoformat(self, sep='T', timespec='auto'):
"""Return the time formatted according to ISO.
The full format looks like 'YYYY-MM-DD HH:MM:SS.mmmmmm'.
By default, the fractional part is omitted if self.microsecond == 0.
If self.tzinfo is not None, the UTC offset is also attached, giving
giving a full format of 'YYYY-MM-DD HH:MM:SS.mmmmmm+HH:MM'.
Optional argument sep specifies the separator between date and
time, default 'T'.
The optional argument timespec specifies the number of additional
terms of the time to include.
"""
s = ("%04d-%02d-%02d%c" % (self._year, self._month, self._day, sep) +
_format_time(self._hour, self._minute, self._second,
self._microsecond, timespec))
off = self.utcoffset()
tz = _format_offset(off)
if tz:
s += tz
return s
def __repr__(self):
"""Convert to formal string, for repr()."""
L = [self._year, self._month, self._day, # These are never zero
self._hour, self._minute, self._second, self._microsecond]
if L[-1] == 0:
del L[-1]
if L[-1] == 0:
del L[-1]
s = "%s.%s(%s)" % (self.__class__.__module__,
self.__class__.__qualname__,
", ".join(map(str, L)))
if self._tzinfo is not None:
assert s[-1:] == ")"
s = s[:-1] + ", tzinfo=%r" % self._tzinfo + ")"
if self._fold:
assert s[-1:] == ")"
s = s[:-1] + ", fold=1)"
return s
def __str__(self):
"Convert to string, for str()."
return self.isoformat(sep=' ')
@classmethod
def strptime(cls, date_string, format):
'string, format -> new datetime parsed from a string (like time.strptime()).'
import _strptime
return _strptime._strptime_datetime(cls, date_string, format)
def utcoffset(self):
"""Return the timezone offset as timedelta positive east of UTC (negative west of
UTC)."""
if self._tzinfo is None:
return None
offset = self._tzinfo.utcoffset(self)
_check_utc_offset("utcoffset", offset)
return offset
def tzname(self):
"""Return the timezone name.
Note that the name is 100% informational -- there's no requirement that
it mean anything in particular. For example, "GMT", "UTC", "-500",
"-5:00", "EDT", "US/Eastern", "America/New York" are all valid replies.
"""
if self._tzinfo is None:
return None
name = self._tzinfo.tzname(self)
_check_tzname(name)
return name
def dst(self):
"""Return 0 if DST is not in effect, or the DST offset (as timedelta
positive eastward) if DST is in effect.
This is purely informational; the DST offset has already been added to
the UTC offset returned by utcoffset() if applicable, so there's no
need to consult dst() unless you're interested in displaying the DST
info.
"""
if self._tzinfo is None:
return None
offset = self._tzinfo.dst(self)
_check_utc_offset("dst", offset)
return offset
# Comparisons of datetime objects with other.
def __eq__(self, other):
if isinstance(other, datetime):
return self._cmp(other, allow_mixed=True) == 0
elif not isinstance(other, date):
return NotImplemented
else:
return False
def __le__(self, other):
if isinstance(other, datetime):
return self._cmp(other) <= 0
elif not isinstance(other, date):
return NotImplemented
else:
_cmperror(self, other)
def __lt__(self, other):
if isinstance(other, datetime):
return self._cmp(other) < 0
elif not isinstance(other, date):
return NotImplemented
else:
_cmperror(self, other)
def __ge__(self, other):
if isinstance(other, datetime):
return self._cmp(other) >= 0
elif not isinstance(other, date):
return NotImplemented
else:
_cmperror(self, other)
def __gt__(self, other):
if isinstance(other, datetime):
return self._cmp(other) > 0
elif not isinstance(other, date):
return NotImplemented
else:
_cmperror(self, other)
def _cmp(self, other, allow_mixed=False):
assert isinstance(other, datetime)
mytz = self._tzinfo
ottz = other._tzinfo
myoff = otoff = None
if mytz is ottz:
base_compare = True
else:
myoff = self.utcoffset()
otoff = other.utcoffset()
# Assume that allow_mixed means that we are called from __eq__
if allow_mixed:
if myoff != self.replace(fold=not self.fold).utcoffset():
return 2
if otoff != other.replace(fold=not other.fold).utcoffset():
return 2
base_compare = myoff == otoff
if base_compare:
return _cmp((self._year, self._month, self._day,
self._hour, self._minute, self._second,
self._microsecond),
(other._year, other._month, other._day,
other._hour, other._minute, other._second,
other._microsecond))
if myoff is None or otoff is None:
if allow_mixed:
return 2 # arbitrary non-zero value
else:
raise TypeError("cannot compare naive and aware datetimes")
# XXX What follows could be done more efficiently...
diff = self - other # this will take offsets into account
if diff.days < 0:
return -1
return diff and 1 or 0
def __add__(self, other):
"Add a datetime and a timedelta."
if not isinstance(other, timedelta):
return NotImplemented
delta = timedelta(self.toordinal(),
hours=self._hour,
minutes=self._minute,
seconds=self._second,
microseconds=self._microsecond)
delta += other
hour, rem = divmod(delta.seconds, 3600)
minute, second = divmod(rem, 60)
if 0 < delta.days <= _MAXORDINAL:
return type(self).combine(date.fromordinal(delta.days),
time(hour, minute, second,
delta.microseconds,
tzinfo=self._tzinfo))
raise OverflowError("result out of range")
__radd__ = __add__
def __sub__(self, other):
"Subtract two datetimes, or a datetime and a timedelta."
if not isinstance(other, datetime):
if isinstance(other, timedelta):
return self + -other
return NotImplemented
days1 = self.toordinal()
days2 = other.toordinal()
secs1 = self._second + self._minute * 60 + self._hour * 3600
secs2 = other._second + other._minute * 60 + other._hour * 3600
base = timedelta(days1 - days2,
secs1 - secs2,
self._microsecond - other._microsecond)
if self._tzinfo is other._tzinfo:
return base
myoff = self.utcoffset()
otoff = other.utcoffset()
if myoff == otoff:
return base
if myoff is None or otoff is None:
raise TypeError("cannot mix naive and timezone-aware time")
return base + otoff - myoff
def __hash__(self):
if self._hashcode == -1:
if self.fold:
t = self.replace(fold=0)
else:
t = self
tzoff = t.utcoffset()
if tzoff is None:
self._hashcode = hash(t._getstate()[0])
else:
days = _ymd2ord(self.year, self.month, self.day)
seconds = self.hour * 3600 + self.minute * 60 + self.second
self._hashcode = hash(timedelta(days, seconds, self.microsecond) - tzoff)
return self._hashcode
# Pickle support.
def _getstate(self, protocol=3):
yhi, ylo = divmod(self._year, 256)
us2, us3 = divmod(self._microsecond, 256)
us1, us2 = divmod(us2, 256)
m = self._month
if self._fold and protocol > 3:
m += 128
basestate = bytes([yhi, ylo, m, self._day,
self._hour, self._minute, self._second,
us1, us2, us3])
if self._tzinfo is None:
return (basestate,)
else:
return (basestate, self._tzinfo)
def __setstate(self, string, tzinfo):
if tzinfo is not None and not isinstance(tzinfo, _tzinfo_class):
raise TypeError("bad tzinfo state arg")
(yhi, ylo, m, self._day, self._hour,
self._minute, self._second, us1, us2, us3) = string
if m > 127:
self._fold = 1
self._month = m - 128
else:
self._fold = 0
self._month = m
self._year = yhi * 256 + ylo
self._microsecond = (((us1 << 8) | us2) << 8) | us3
self._tzinfo = tzinfo
def __reduce_ex__(self, protocol):
return (self.__class__, self._getstate(protocol))
def __reduce__(self):
return self.__reduce_ex__(2)
datetime.min = datetime(1, 1, 1)
datetime.max = datetime(9999, 12, 31, 23, 59, 59, 999999)
datetime.resolution = timedelta(microseconds=1)
def _isoweek1monday(year):
# Helper to calculate the day number of the Monday starting week 1
# XXX This could be done more efficiently
THURSDAY = 3
firstday = _ymd2ord(year, 1, 1)
firstweekday = (firstday + 6) % 7 # See weekday() above
week1monday = firstday - firstweekday
if firstweekday > THURSDAY:
week1monday += 7
return week1monday
class timezone(tzinfo):
__slots__ = '_offset', '_name'
# Sentinel value to disallow None
_Omitted = object()
def __new__(cls, offset, name=_Omitted):
if not isinstance(offset, timedelta):
raise TypeError("offset must be a timedelta")
if name is cls._Omitted:
if not offset:
return cls.utc
name = None
elif not isinstance(name, str):
raise TypeError("name must be a string")
if not cls._minoffset <= offset <= cls._maxoffset:
raise ValueError("offset must be a timedelta "
"strictly between -timedelta(hours=24) and "
"timedelta(hours=24).")
return cls._create(offset, name)
@classmethod
def _create(cls, offset, name=None):
self = tzinfo.__new__(cls)
self._offset = offset
self._name = name
return self
def __getinitargs__(self):
"""pickle support"""
if self._name is None:
return (self._offset,)
return (self._offset, self._name)
def __eq__(self, other):
if type(other) != timezone:
return False
return self._offset == other._offset
def __hash__(self):
return hash(self._offset)
def __repr__(self):
"""Convert to formal string, for repr().
>>> tz = timezone.utc
>>> repr(tz)
'datetime.timezone.utc'
>>> tz = timezone(timedelta(hours=-5), 'EST')
>>> repr(tz)
"datetime.timezone(datetime.timedelta(-1, 68400), 'EST')"
"""
if self is self.utc:
return 'datetime.timezone.utc'
if self._name is None:
return "%s.%s(%r)" % (self.__class__.__module__,
self.__class__.__qualname__,
self._offset)
return "%s.%s(%r, %r)" % (self.__class__.__module__,
self.__class__.__qualname__,
self._offset, self._name)
def __str__(self):
return self.tzname(None)
def utcoffset(self, dt):
if isinstance(dt, datetime) or dt is None:
return self._offset
raise TypeError("utcoffset() argument must be a datetime instance"
" or None")
def tzname(self, dt):
if isinstance(dt, datetime) or dt is None:
if self._name is None:
return self._name_from_offset(self._offset)
return self._name
raise TypeError("tzname() argument must be a datetime instance"
" or None")
def dst(self, dt):
if isinstance(dt, datetime) or dt is None:
return None
raise TypeError("dst() argument must be a datetime instance"
" or None")
def fromutc(self, dt):
if isinstance(dt, datetime):
if dt.tzinfo is not self:
raise ValueError("fromutc: dt.tzinfo "
"is not self")
return dt + self._offset
raise TypeError("fromutc() argument must be a datetime instance"
" or None")
_maxoffset = timedelta(hours=23, minutes=59)
_minoffset = -_maxoffset
@staticmethod
def _name_from_offset(delta):
if not delta:
return 'UTC'
if delta < timedelta(0):
sign = '-'
delta = -delta
else:
sign = '+'
hours, rest = divmod(delta, timedelta(hours=1))
minutes, rest = divmod(rest, timedelta(minutes=1))
seconds = rest.seconds
microseconds = rest.microseconds
if microseconds:
return (f'UTC{sign}{hours:02d}:{minutes:02d}:{seconds:02d}'
f'.{microseconds:06d}')
if seconds:
return f'UTC{sign}{hours:02d}:{minutes:02d}:{seconds:02d}'
return f'UTC{sign}{hours:02d}:{minutes:02d}'
timezone.utc = timezone._create(timedelta(0))
timezone.min = timezone._create(timezone._minoffset)
timezone.max = timezone._create(timezone._maxoffset)
_EPOCH = datetime(1970, 1, 1, tzinfo=timezone.utc)
# Some time zone algebra. For a datetime x, let
# x.n = x stripped of its timezone -- its naive time.
# x.o = x.utcoffset(), and assuming that doesn't raise an exception or
# return None
# x.d = x.dst(), and assuming that doesn't raise an exception or
# return None
# x.s = x's standard offset, x.o - x.d
#
# Now some derived rules, where k is a duration (timedelta).
#
# 1. x.o = x.s + x.d
# This follows from the definition of x.s.
#
# 2. If x and y have the same tzinfo member, x.s = y.s.
# This is actually a requirement, an assumption we need to make about
# sane tzinfo classes.
#
# 3. The naive UTC time corresponding to x is x.n - x.o.
# This is again a requirement for a sane tzinfo class.
#
# 4. (x+k).s = x.s
# This follows from #2, and that datimetimetz+timedelta preserves tzinfo.
#
# 5. (x+k).n = x.n + k
# Again follows from how arithmetic is defined.
#
# Now we can explain tz.fromutc(x). Let's assume it's an interesting case
# (meaning that the various tzinfo methods exist, and don't blow up or return
# None when called).
#
# The function wants to return a datetime y with timezone tz, equivalent to x.
# x is already in UTC.
#
# By #3, we want
#
# y.n - y.o = x.n [1]
#
# The algorithm starts by attaching tz to x.n, and calling that y. So
# x.n = y.n at the start. Then it wants to add a duration k to y, so that [1]
# becomes true; in effect, we want to solve [2] for k:
#
# (y+k).n - (y+k).o = x.n [2]
#
# By #1, this is the same as
#
# (y+k).n - ((y+k).s + (y+k).d) = x.n [3]
#
# By #5, (y+k).n = y.n + k, which equals x.n + k because x.n=y.n at the start.
# Substituting that into [3],
#
# x.n + k - (y+k).s - (y+k).d = x.n; the x.n terms cancel, leaving
# k - (y+k).s - (y+k).d = 0; rearranging,
# k = (y+k).s - (y+k).d; by #4, (y+k).s == y.s, so
# k = y.s - (y+k).d
#
# On the RHS, (y+k).d can't be computed directly, but y.s can be, and we
# approximate k by ignoring the (y+k).d term at first. Note that k can't be
# very large, since all offset-returning methods return a duration of magnitude
# less than 24 hours. For that reason, if y is firmly in std time, (y+k).d must
# be 0, so ignoring it has no consequence then.
#
# In any case, the new value is
#
# z = y + y.s [4]
#
# It's helpful to step back at look at [4] from a higher level: it's simply
# mapping from UTC to tz's standard time.
#
# At this point, if
#
# z.n - z.o = x.n [5]
#
# we have an equivalent time, and are almost done. The insecurity here is
# at the start of daylight time. Picture US Eastern for concreteness. The wall
# time jumps from 1:59 to 3:00, and wall hours of the form 2:MM don't make good
# sense then. The docs ask that an Eastern tzinfo class consider such a time to
# be EDT (because it's "after 2"), which is a redundant spelling of 1:MM EST
# on the day DST starts. We want to return the 1:MM EST spelling because that's
# the only spelling that makes sense on the local wall clock.
#
# In fact, if [5] holds at this point, we do have the standard-time spelling,
# but that takes a bit of proof. We first prove a stronger result. What's the
# difference between the LHS and RHS of [5]? Let
#
# diff = x.n - (z.n - z.o) [6]
#
# Now
# z.n = by [4]
# (y + y.s).n = by #5
# y.n + y.s = since y.n = x.n
# x.n + y.s = since z and y are have the same tzinfo member,
# y.s = z.s by #2
# x.n + z.s
#
# Plugging that back into [6] gives
#
# diff =
# x.n - ((x.n + z.s) - z.o) = expanding
# x.n - x.n - z.s + z.o = cancelling
# - z.s + z.o = by #2
# z.d
#
# So diff = z.d.
#
# If [5] is true now, diff = 0, so z.d = 0 too, and we have the standard-time
# spelling we wanted in the endcase described above. We're done. Contrarily,
# if z.d = 0, then we have a UTC equivalent, and are also done.
#
# If [5] is not true now, diff = z.d != 0, and z.d is the offset we need to
# add to z (in effect, z is in tz's standard time, and we need to shift the
# local clock into tz's daylight time).
#
# Let
#
# z' = z + z.d = z + diff [7]
#
# and we can again ask whether
#
# z'.n - z'.o = x.n [8]
#
# If so, we're done. If not, the tzinfo class is insane, according to the
# assumptions we've made. This also requires a bit of proof. As before, let's
# compute the difference between the LHS and RHS of [8] (and skipping some of
# the justifications for the kinds of substitutions we've done several times
# already):
#
# diff' = x.n - (z'.n - z'.o) = replacing z'.n via [7]
# x.n - (z.n + diff - z'.o) = replacing diff via [6]
# x.n - (z.n + x.n - (z.n - z.o) - z'.o) =
# x.n - z.n - x.n + z.n - z.o + z'.o = cancel x.n
# - z.n + z.n - z.o + z'.o = cancel z.n
# - z.o + z'.o = #1 twice
# -z.s - z.d + z'.s + z'.d = z and z' have same tzinfo
# z'.d - z.d
#
# So z' is UTC-equivalent to x iff z'.d = z.d at this point. If they are equal,
# we've found the UTC-equivalent so are done. In fact, we stop with [7] and
# return z', not bothering to compute z'.d.
#
# How could z.d and z'd differ? z' = z + z.d [7], so merely moving z' by
# a dst() offset, and starting *from* a time already in DST (we know z.d != 0),
# would have to change the result dst() returns: we start in DST, and moving
# a little further into it takes us out of DST.
#
# There isn't a sane case where this can happen. The closest it gets is at
# the end of DST, where there's an hour in UTC with no spelling in a hybrid
# tzinfo class. In US Eastern, that's 5:MM UTC = 0:MM EST = 1:MM EDT. During
# that hour, on an Eastern clock 1:MM is taken as being in standard time (6:MM
# UTC) because the docs insist on that, but 0:MM is taken as being in daylight
# time (4:MM UTC). There is no local time mapping to 5:MM UTC. The local
# clock jumps from 1:59 back to 1:00 again, and repeats the 1:MM hour in
# standard time. Since that's what the local clock *does*, we want to map both
# UTC hours 5:MM and 6:MM to 1:MM Eastern. The result is ambiguous
# in local time, but so it goes -- it's the way the local clock works.
#
# When x = 5:MM UTC is the input to this algorithm, x.o=0, y.o=-5 and y.d=0,
# so z=0:MM. z.d=60 (minutes) then, so [5] doesn't hold and we keep going.
# z' = z + z.d = 1:MM then, and z'.d=0, and z'.d - z.d = -60 != 0 so [8]
# (correctly) concludes that z' is not UTC-equivalent to x.
#
# Because we know z.d said z was in daylight time (else [5] would have held and
# we would have stopped then), and we know z.d != z'.d (else [8] would have held
# and we have stopped then), and there are only 2 possible values dst() can
# return in Eastern, it follows that z'.d must be 0 (which it is in the example,
# but the reasoning doesn't depend on the example -- it depends on there being
# two possible dst() outcomes, one zero and the other non-zero). Therefore
# z' must be in standard time, and is the spelling we want in this case.
#
# Note again that z' is not UTC-equivalent as far as the hybrid tzinfo class is
# concerned (because it takes z' as being in standard time rather than the
# daylight time we intend here), but returning it gives the real-life "local
# clock repeats an hour" behavior when mapping the "unspellable" UTC hour into
# tz.
#
# When the input is 6:MM, z=1:MM and z.d=0, and we stop at once, again with
# the 1:MM standard time spelling we want.
#
# So how can this break? One of the assumptions must be violated. Two
# possibilities:
#
# 1) [2] effectively says that y.s is invariant across all y belong to a given
# time zone. This isn't true if, for political reasons or continental drift,
# a region decides to change its base offset from UTC.
#
# 2) There may be versions of "double daylight" time where the tail end of
# the analysis gives up a step too early. I haven't thought about that
# enough to say.
#
# In any case, it's clear that the default fromutc() is strong enough to handle
# "almost all" time zones: so long as the standard offset is invariant, it
# doesn't matter if daylight time transition points change from year to year, or
# if daylight time is skipped in some years; it doesn't matter how large or
# small dst() may get within its bounds; and it doesn't even matter if some
# perverse time zone returns a negative dst()). So a breaking case must be
# pretty bizarre, and a tzinfo subclass can override fromutc() if it is.
try:
from _datetime import *
except ImportError:
pass
else:
# Clean up unused names
del (_DAYNAMES, _DAYS_BEFORE_MONTH, _DAYS_IN_MONTH, _DI100Y, _DI400Y,
_DI4Y, _EPOCH, _MAXORDINAL, _MONTHNAMES, _build_struct_time,
_check_date_fields, _check_int_field, _check_time_fields,
_check_tzinfo_arg, _check_tzname, _check_utc_offset, _cmp, _cmperror,
_date_class, _days_before_month, _days_before_year, _days_in_month,
_format_time, _format_offset, _is_leap, _isoweek1monday, _math,
_ord2ymd, _time, _time_class, _tzinfo_class, _wrap_strftime, _ymd2ord,
_divide_and_round, _parse_isoformat_date, _parse_isoformat_time,
_parse_hh_mm_ss_ff)
# XXX Since import * above excludes names that start with _,
# docstring does not get overwritten. In the future, it may be
# appropriate to maintain a single module level docstring and
# remove the following line.
from _datetime import __doc__
| 34.90779
| 94
| 0.564194
|
55bd50eb0b265e12741be5b117aa2da111e7ab5e
| 3,708
|
py
|
Python
|
train_USCL/loss/nt_xent.py
|
mrzhu666/USCL
|
8a4741046ef8f337b1e9439d1575db670a11355c
|
[
"MIT"
] | 22
|
2020-11-24T07:40:28.000Z
|
2022-03-29T11:33:44.000Z
|
train_USCL/loss/nt_xent.py
|
mrzhu666/USCL
|
8a4741046ef8f337b1e9439d1575db670a11355c
|
[
"MIT"
] | 2
|
2022-01-02T11:10:36.000Z
|
2022-02-25T02:09:26.000Z
|
train_USCL/loss/nt_xent.py
|
mrzhu666/USCL
|
8a4741046ef8f337b1e9439d1575db670a11355c
|
[
"MIT"
] | 5
|
2020-11-25T15:19:30.000Z
|
2022-01-01T16:14:59.000Z
|
import torch
import numpy as np
class NTXentLoss(torch.nn.Module):
def __init__(self, device, batch_size, temperature, use_cosine_similarity):
super(NTXentLoss, self).__init__()
self.batch_size = batch_size
self.temperature = temperature
self.device = device
self.softmax = torch.nn.Softmax(dim=-1)
self.similarity_function = self._get_similarity_function(use_cosine_similarity)
self.criterion = torch.nn.CrossEntropyLoss(reduction="sum") # sum all 2N terms of loss instead of getting mean val
def _get_similarity_function(self, use_cosine_similarity):
''' Cosine similarity or dot similarity for computing loss '''
if use_cosine_similarity:
self._cosine_similarity = torch.nn.CosineSimilarity(dim=-1)
return self._cosine_simililarity
else:
return self._dot_simililarity
def _get_correlated_mask(self):
diag = np.eye(2 * self.batch_size) # I(2Nx2N), identity matrix
l1 = np.eye((2 * self.batch_size), 2 * self.batch_size, k=-self.batch_size) # lower diagonal matrix, N non-zero elements
l2 = np.eye((2 * self.batch_size), 2 * self.batch_size, k=self.batch_size) # upper diagonal matrix, N non-zero elements
mask = torch.from_numpy((diag + l1 + l2)) # [2N, 2N], with 4N elements are non-zero
mask = (1 - mask).type(torch.bool) # [2N, 2N], with 4(N^2 - N) elements are "True"
return mask.to(self.device)
@staticmethod
def _dot_simililarity(x, y):
v = torch.tensordot(x.unsqueeze(1), y.T.unsqueeze(0), dims=2) # extend the dimensions before calculating similarity
# x shape: (N, 1, C)
# y shape: (1, C, 2N)
# v shape: (N, 2N)
return v
def _cosine_simililarity(self, x, y):
# x shape: (N, 1, C), N input samples
# y shape: (1, 2N, C), 2N output representations
# v shape: (N, 2N)
v = self._cosine_similarity(x.unsqueeze(1), y.unsqueeze(0)) # extend the dimensions before calculating similarity
return v
def forward(self, zis, zjs):
if self.batch_size != zis.shape[0]:
self.batch_size = zis.shape[0] # the last batch may not have the same batch size
self.mask_samples_from_same_repr = self._get_correlated_mask().type(torch.bool)
representations = torch.cat([zjs, zis], dim=0) # [N, C] => [2N, C]
similarity_matrix = self.similarity_function(representations, representations) # [2N, 2N]
# filter out the scores from the positive samples
l_pos = torch.diag(similarity_matrix, self.batch_size) # upper diagonal, N x [left, right] positive sample pairs
r_pos = torch.diag(similarity_matrix, -self.batch_size) # lower diagonal, N x [right, left] positive sample pairs
positives = torch.cat([l_pos, r_pos]).view(2 * self.batch_size, 1) # similarity of positive pairs, [2N, 1]
negatives = similarity_matrix[self.mask_samples_from_same_repr].view(2 * self.batch_size, -1) # [2N, 2N]
logits = torch.cat((positives, negatives), dim=1) # [2N, 2N+1], the 2N+1 elements of one column are used for one loss term
logits /= self.temperature
# labels are all 0, meaning the first value of each vector is the nominator term of CELoss
# each denominator contains 2N+1-2 = 2N-1 terms, corresponding to all similarities between the sample and other samples.
labels = torch.zeros(2 * self.batch_size).to(self.device).long()
loss = self.criterion(logits, labels)
return loss / (2 * self.batch_size) # Don't know why it is divided by 2N, the CELoss can set directly to reduction='mean'
| 42.136364
| 130
| 0.661003
|
10062c1d96db14031cb7f80ab6ed20945ee0e6bc
| 421
|
py
|
Python
|
script/module_load.py
|
ScarletAI/Arch-Apple-Installer
|
1614f27ac06dc28022d0d2fec40f5b896e4410e8
|
[
"MIT"
] | 4
|
2021-12-20T16:26:23.000Z
|
2021-12-21T10:21:20.000Z
|
script/module_load.py
|
ScarletAI/Arch-Apple-Installer
|
1614f27ac06dc28022d0d2fec40f5b896e4410e8
|
[
"MIT"
] | 1
|
2022-01-02T02:03:16.000Z
|
2022-01-02T02:03:16.000Z
|
script/module_load.py
|
ScarletAI/Arch-Apple-Installer
|
1614f27ac06dc28022d0d2fec40f5b896e4410e8
|
[
"MIT"
] | null | null | null |
import sys, struct, usb.core
data = open(sys.argv[1], 'rb').read()
dev = usb.core.find(idVendor=0x05ac, idProduct=0x4141)
if dev is None:
raise ValueError("Device not found")
dev.set_configuration()
dev.ctrl_transfer(0x21, 2, 0, 0, 0)
dev.ctrl_transfer(0x21, 1, 0, 0, struct.pack("I", len(data)))
dev.write(2, data, 1000000)
if len(data) % 512 == 0:
dev.write(0, "")
dev.ctrl_transfer(0x21, 3, 0, 0, "modload\n")
| 32.384615
| 61
| 0.67696
|
a36cdcc1e153c67f5992b234da1c22e63a270d2f
| 3,593
|
py
|
Python
|
cmd/lit-qt/rpcui.py
|
tnakagawa/lit
|
57c63ed5cc9584bff083047c8fc0b5be1c4fde2f
|
[
"MIT"
] | 560
|
2016-11-16T02:10:02.000Z
|
2022-03-26T16:28:58.000Z
|
cmd/lit-qt/rpcui.py
|
tnakagawa/lit
|
57c63ed5cc9584bff083047c8fc0b5be1c4fde2f
|
[
"MIT"
] | 374
|
2016-11-29T21:42:49.000Z
|
2021-02-16T13:30:44.000Z
|
cmd/lit-qt/rpcui.py
|
metalicjames/lit
|
8cbd0bc0cf689fca6a0dfcf13e4e0ed2672b8c15
|
[
"MIT"
] | 126
|
2016-12-15T21:26:19.000Z
|
2022-02-22T21:23:03.000Z
|
#!/usr/bin/env python
from PyQt4 import QtCore, QtGui
#Extend the library search path to our `qt_files` directory
import sys
sys.path.append("qt_files")
import socket
import json
#ui file import
import rpcui_ui
#Handles rpc communications and conjugate response handler functions
class rpcCom():
def __init__(self, addr, port):
#Open up the socket connection
self.conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.conn.connect((addr, port))
def getBal(self):
rpcCmd = {
"method": "LitRPC.Bal",
"params": [{
}]
}
#TODO: What is the purpose of this `id`
rpcCmd.update({"jsonrpc": "2.0", "id": "93"})
#print json.dumps(rpcCmd)
self.conn.sendall(json.dumps(rpcCmd))
r = json.loads(self.conn.recv(8000000))
#print r
return r["result"]["TxoTotal"]
def getAdr(self):
rpcCmd = {
"method": "LitRPC.Address",
"params": [{"NumToMake": 0}]
}
rpcCmd.update({"jsonrpc": "2.0", "id": "94"})
#print json.dumps(rpcCmd)
self.conn.sendall(json.dumps(rpcCmd))
r = json.loads(self.conn.recv(8000000))
#print r
return r["result"]["Addresses"][-1]
def prSend(self, adr, amt):
rpcCmd = {
"method": "LitRPC.Send",
"params": [{"DestAddrs": [adr], "Amts": [amt]}]
}
rpcCmd.update({"jsonrpc": "2.0", "id": "95"})
#print json.dumps(rpcCmd)
self.conn.sendall(json.dumps(rpcCmd))
r = json.loads(self.conn.recv(8000000))
#print r
if r["error"] != None:
raise RuntimeError(r["error"])
return "Sent. TXID: " + r["result"]["Txids"][0]
class mainWindow(QtGui.QMainWindow, rpcui_ui.Ui_MainWindow):
def __init__(self, parent):
#Set up calls to get QT working
QtGui.QMainWindow.__init__(self, parent)
self.setupUi(self)
#There is no need for a hint button
self.setWindowFlags(self.windowFlags() & ~QtCore.Qt.WindowContextHelpButtonHint)
#Set up the RPC communication object
self.rpcCom = rpcCom("127.0.0.1", 9750)
#Setup the connections to their triggers
self.setup_connections()
#Sets the text value for the balance label. Make this its own function to
# be used as a callback for the "Refresh" button
def set_bal_label(self):
bal = self.rpcCom.getBal()
self.bal_label.setText(str(bal))
#The trigger for the send button being clicked
def send_button_clicked(self):
#TODO: Implement address validity verification
to_addr = str(self.send_addr_line_edit.text())
amt = self.send_amt_spin_box.value()
try:
#TODO: Make this display something to the user that their input is poor
if amt == 0:
raise RuntimeError("Invalid input send amount")
self.rpcCom.prSend(to_addr, amt)
except RuntimeError as rterror:
print "Error: " + str(rterror)
def setup_connections(self):
#Populate the address label
addr = self.rpcCom.getAdr()
self.addr_label.setText(addr);
#Populate the balance label
self.set_bal_label()
#Connect the trigger for the "Refresh" button
self.bal_refresh_button.clicked.connect(self.set_bal_label)
#Connect the trigger for the "Send" button
self.send_button.clicked.connect(self.send_button_clicked)
def main(args):
app = QtGui.QApplication(args)
window = mainWindow(None)
window.show()
sys.exit(app.exec_())
if __name__ == '__main__':
main(sys.argv)
| 26.419118
| 88
| 0.628166
|
824514830fc36cfd8b49f70b6163d4c00d92c9ad
| 50,164
|
py
|
Python
|
hyper_resource/resources/FeatureCollectionResource.py
|
IDEHCO3/kanban-backend
|
902732f0fd569627925356d6b9d68a5ec7680b88
|
[
"MIT"
] | null | null | null |
hyper_resource/resources/FeatureCollectionResource.py
|
IDEHCO3/kanban-backend
|
902732f0fd569627925356d6b9d68a5ec7680b88
|
[
"MIT"
] | null | null | null |
hyper_resource/resources/FeatureCollectionResource.py
|
IDEHCO3/kanban-backend
|
902732f0fd569627925356d6b9d68a5ec7680b88
|
[
"MIT"
] | null | null | null |
import json
from operator import itemgetter
import requests
from django.core import cache
from django.contrib.gis.db.models import Extent, Union, MakeLine
from django.contrib.gis.geos import GeometryCollection, GEOSGeometry
from rest_framework.response import Response
from hyper_resource.resources.AbstractResource import *
from hyper_resource.resources.AbstractResource import RequiredObject
from hyper_resource.resources.FeatureResource import FeatureResource
from hyper_resource.resources.SpatialCollectionResource import SpatialCollectionResource
from hyper_resource.resources.AbstractCollectionResource import AbstractCollectionResource, COLLECTION_TYPE
from hyper_resource.models import SpatialCollectionOperationController, BaseOperationController, FactoryComplexQuery, \
ConverterType, FeatureModel, FeatureCollection
from copy import deepcopy
from image_generator.img_generator import BuilderPNG
class FeatureCollectionResource(SpatialCollectionResource):
def __init__(self):
super(FeatureCollectionResource, self).__init__()
self.operation_controller = SpatialCollectionOperationController()
#self.operation_controller.initialize()
def default_resource_representation(self):
return FeatureCollection
def geometry_operations(self):
return self.operation_controller.feature_collection_operations_dict()
def geometry_field_name(self):
return self.serializer_class.Meta.geo_field
def is_spatial_attribute(self, attribute_name):
return attribute_name == self.geometry_field_name()
def is_spatial_operation(self, operation_name):
return operation_name in self.geometry_operations()
def dict_list_as_feature_collection(self, dict_list):
return {'type': 'FeatureCollection', 'features': dict_list}
def dict_list_as_geometry_collection(self, dict_list):
return {'type': 'GeometryCollection', 'geometries': dict_list}
def default_content_type(self):
return self.temporary_content_type if self.temporary_content_type is not None else CONTENT_TYPE_GEOJSON
def define_content_type_by_only_attributes(self, request, attributes_functions_str):
content_type_by_accept = self.content_type_or_default_content_type(request)
attrs_arr = self.remove_last_slash(attributes_functions_str).split(',')
if self.geometry_field_name() in attrs_arr:
return content_type_by_accept
if content_type_by_accept != self.default_content_type():
return content_type_by_accept
return CONTENT_TYPE_JSON
def define_content_type_by_operation(self, request, operation_name):
content_type_by_accept = self.content_type_or_default_content_type(request)
oper_ret_type = self._dict_all_operation_dict()[operation_name].return_type
if content_type_by_accept != self.default_content_type():
return content_type_by_accept
if issubclass(oper_ret_type, GEOSGeometry):
return self.default_content_type()
return CONTENT_TYPE_JSON
def dict_by_accept_resource_representation(self):
dict = {
CONTENT_TYPE_OCTET_STREAM: 'GeobufCollection'
}
return dict
def define_resource_representation_by_operation(self, request, attributes_functions_str):
resource_representation_by_accept = self.resource_representation_or_default_resource_representation(request)
resource_representation_by_return_type = self.execute_method_to_get_return_type_from_operation(attributes_functions_str)
accept_is_binary = resource_representation_by_accept == self.dict_by_accept_resource_representation()[CONTENT_TYPE_OCTET_STREAM]
if not issubclass(resource_representation_by_return_type, GEOSGeometry):
if accept_is_binary:
return bytes
if resource_representation_by_accept != self.default_resource_representation():
if accept_is_binary:
if resource_representation_by_return_type not in [FeatureCollection, GeometryCollection]:
return "Geobuf"
return resource_representation_by_accept
return resource_representation_by_return_type
#todo: need prioritize in unity tests
def define_resource_representation_from_collect_operation(self, request, attributes_functions_str):
collected_attrs = self.extract_collect_operation_attributes(attributes_functions_str)
res_type_by_accept = self.resource_representation_or_default_resource_representation(request)
oper_in_collect_ret_type = self.get_operation_in_collect_return_type(attributes_functions_str)
if res_type_by_accept != self.default_resource_representation():
if self.geometry_field_name() not in collected_attrs:
return bytes
# the operated attribute isn't the geometric attribute
if self.geometry_field_name() != collected_attrs[-1]:
return res_type_by_accept
if issubclass(oper_in_collect_ret_type, GEOSGeometry):
return res_type_by_accept
return bytes
# at this point 'res_type_by_accept' current value is 'FeatureCollection'
if self.geometry_field_name() not in collected_attrs:
return COLLECTION_TYPE
# at this point collect operation has geometric attribute
if len(collected_attrs) == 1:
if issubclass(oper_in_collect_ret_type, GEOSGeometry):
return GeometryCollection
return COLLECTION_TYPE
return res_type_by_accept
def define_resource_representation_by_only_attributes(self, request, attributes_functions_str):
attr_arr = self.remove_last_slash(attributes_functions_str).split(",")
resource_type_by_accept = self.resource_representation_or_default_resource_representation(request)
accept_content_type = request.META.get(HTTP_ACCEPT, '')
alpha_dict_by_accept = super(FeatureCollectionResource, self).dict_by_accept_resource_representation()
if resource_type_by_accept != self.default_resource_representation():
if self.geometry_field_name() in attr_arr:
return resource_type_by_accept
return alpha_dict_by_accept[ accept_content_type ] if accept_content_type in alpha_dict_by_accept else "Thing"
if self.geometry_field_name() in attr_arr:
if len(attr_arr) > 1:
return self.default_resource_representation()
return GeometryCollection
return COLLECTION_TYPE
#todo
def path_request_is_ok(self, attributes_functions_str):
return True
def path_has_only_spatial_operation(self, attributes_functions_str):
att_funcs = attributes_functions_str.split('/')
spatial_operation_names = self.geometry_operations().keys()
if len(att_funcs) > 1 and (att_funcs[0].lower() in spatial_operation_names):
return True
return att_funcs[0].lower() in spatial_operation_names
def get_operation_name_from_path(self, attributes_functions_str):
first_part_name = super(FeatureCollectionResource, self).get_operation_name_from_path(attributes_functions_str)
# join operation has priority
if self.path_has_join_operation(attributes_functions_str):
return self.operation_controller.join_operation_name
if first_part_name not in self.array_of_operation_name():
return None
if (first_part_name == self.operation_controller.filter_collection_operation_name or
first_part_name == self.operation_controller.offset_limit_collection_operation_name) and '/*extent' in attributes_functions_str:
return 'extent'
if (first_part_name == self.operation_controller.filter_collection_operation_name or
first_part_name == self.operation_controller.offset_limit_collection_operation_name) and '/*union' in attributes_functions_str:
return 'union'
if (first_part_name == self.operation_controller.filter_collection_operation_name or
first_part_name == self.operation_controller.offset_limit_collection_operation_name) and '/*make_line' in attributes_functions_str:
return 'make-line'
return first_part_name
def is_filter_with_spatial_operation(self, attributes_functions_str):
att_funcs = attributes_functions_str.split('/')
return (len(att_funcs) > 1 and (att_funcs[0].lower() in self.geometry_operations().keys())) \
or self.attributes_functions_str_is_filter_with_spatial_operation(attributes_functions_str)
def operations_with_parameters_type(self):
return self.operation_controller.feature_collection_operations_dict()
def get_objects_from_spatial_operation(self, array_of_terms):
full_oper_snippet = "/".join(array_of_terms)
first_oper_snippet, second_oper_snippet = self.split_combined_operation(full_oper_snippet)
arr_to_q_object = array_of_terms
#defining array to Q object
if self.path_has_projection(full_oper_snippet):
arr_to_q_object = array_of_terms[2:]
if second_oper_snippet is not None:
second_oper_init = [k for k, v in enumerate(arr_to_q_object) if v.startswith('*collect') or v.startswith('*' + self.operation_controller.count_resource_collection_operation_name)]
arr_to_q_object = arr_to_q_object if len(second_oper_snippet) == 0 else arr_to_q_object[:second_oper_init[0]]
q_object = self.q_object_for_filter_array_of_terms(arr_to_q_object)
if second_oper_snippet is not None:
if second_oper_snippet.startswith('collect'):
collect_attrs = self.extract_collect_operation_attributes(second_oper_snippet)
queryset = self.model_class().objects.filter(q_object).values(*collect_attrs)
return self.get_objects_from_collect_operation(second_oper_snippet, queryset)
else: # the only options is 'collect' or 'count_resource'
return self.model_class().objects.filter(q_object).count()
if self.path_has_projection(full_oper_snippet):
projection_attrs = self.extract_projection_attributes(full_oper_snippet)
return self.model_class().objects.filter(q_object).values(*projection_attrs)
return self.model_class().objects.filter(q_object)
def q_object_for_filter_array_of_terms(self, array_of_terms):
fcq = FactoryComplexQuery()
return fcq.q_object_for_spatial_expression(None, self.model_class(), array_of_terms)
# Responds a path(string) normalized for spatial operation in IRI. Ex.: within/... => geom/within/...
def inject_geometry_attribute_in_spatial_operation_for_path(self, arr_of_term):
indexes = []
projection_snippet_arr = None
if arr_of_term[0] == self.operation_controller.projection_operation_name:
projection_snippet_arr, arr_of_term_without_projection = arr_of_term[:2], arr_of_term[2:]
else:
arr_of_term_without_projection = arr_of_term
for idx, term in enumerate(arr_of_term_without_projection):
array_django_name_operation = [type_called.name for type_called in self.operation_controller.feature_collection_operations_dict().values()]
if term in array_django_name_operation:
indexes.append(idx)
count = 0
for i in indexes:
arr_of_term_without_projection.insert(i + count, self.geometry_field_name())
count += 1
if projection_snippet_arr is not None and arr_of_term_without_projection is not None:
projection_snippet_arr.extend(arr_of_term_without_projection)
return projection_snippet_arr
return arr_of_term_without_projection
def path_has_geometry_attribute(self, term_of_path):
return term_of_path.lower() == self.geometry_field_name()
def execute_complex_request(self, request):
# using request.build_absolute_uri() will cause problems in the case use of GeoJson in request
absolute_uri = request.scheme + '://' + request.get_host() + request.path
absolute_uri = self.remove_last_slash(absolute_uri)
request_tuple = self.split_complex_uri(absolute_uri)
operation = request_tuple[1]
ct = ConverterType()
# requests for FeatureCollectionResource means that the first url request_list[0]
# is an url that corresponds to an FeatureCollection/GeometryCollection
geom_left = ct.get_geos_geometry_from_request(request_tuple[0])
if self.path_has_url(request_tuple[2]):
response = requests.get(request_tuple[2])
response_right = json.dumps(response.json())
else: # if request_list[2] is GeometryCollection (GeoJson) or WKT ...
response_right = request_tuple[2]
result = self._execute_attribute_or_method(geom_left, operation, [response_right])
return result
def operation_name_method_dic(self):
dicti = super(FeatureCollectionResource, self).operation_name_method_dic()
dicti.update({
self.operation_controller.bbcontaining_operation_name: self.required_object_for_specialized_operation,
self.operation_controller.contained_operation_name: self.required_object_for_specialized_operation,
self.operation_controller.containing_operation_name: self.required_object_for_specialized_operation,
self.operation_controller.containing_properly_operation_name: self.required_object_for_specialized_operation,
self.operation_controller.covering_by_operation_name: self.required_object_for_specialized_operation,
self.operation_controller.covering_operation_name: self.required_object_for_specialized_operation,
self.operation_controller.crossing_operation_name: self.required_object_for_specialized_operation,
self.operation_controller.disjointing_operation_name: self.required_object_for_specialized_operation,
self.operation_controller.intersecting_operation_name: self.required_object_for_specialized_operation,
self.operation_controller.isvalid_operation_name: self.required_object_for_specialized_operation,
self.operation_controller.overlaping_operation_name: self.required_object_for_specialized_operation,
self.operation_controller.relating_operation_name: self.required_object_for_specialized_operation,
self.operation_controller.touching_operation_name: self.required_object_for_specialized_operation,
self.operation_controller.within_operation_name: self.required_object_for_specialized_operation,
self.operation_controller.on_left_operation_name: self.required_object_for_specialized_operation,
self.operation_controller.on_right_operation_name: self.required_object_for_specialized_operation,
self.operation_controller.overlaping_left_operation_name: self.required_object_for_specialized_operation,
self.operation_controller.overlaping_right_operation_name: self.required_object_for_specialized_operation,
self.operation_controller.overlaping_above_operation_name: self.required_object_for_specialized_operation,
self.operation_controller.overlaping_below_operation_name: self.required_object_for_specialized_operation,
self.operation_controller.strictly_above_operation_name: self.required_object_for_specialized_operation,
self.operation_controller.strictly_below_operation_name: self.required_object_for_specialized_operation,
self.operation_controller.distance_gt_operation_name: self.required_object_for_specialized_operation,
self.operation_controller.distance_gte_operation_name: self.required_object_for_specialized_operation,
self.operation_controller.distance_lt_operation_name: self.required_object_for_specialized_operation,
self.operation_controller.distance_lte_operation_name: self.required_object_for_specialized_operation,
self.operation_controller.dwithin_operation_name: self.required_object_for_specialized_operation,
self.operation_controller.union_collection_operation_name: self.required_object_for_union_operation,
self.operation_controller.extent_collection_operation_name: self.required_object_for_extent_operation,
self.operation_controller.make_line_collection_operation_name: self.required_object_for_make_line_operation,
})
return dicti
def operation_name_context_dic(self):
dicti = super(FeatureCollectionResource, self).operation_name_context_dic()
dicti.update({
self.operation_controller.bbcontaining_operation_name: self.required_context_for_specialized_operation,
self.operation_controller.contained_operation_name: self.required_context_for_specialized_operation,
self.operation_controller.containing_operation_name: self.required_context_for_specialized_operation,
self.operation_controller.containing_properly_operation_name: self.required_context_for_specialized_operation,
self.operation_controller.covering_by_operation_name: self.required_context_for_specialized_operation,
self.operation_controller.covering_operation_name: self.required_context_for_specialized_operation,
self.operation_controller.crossing_operation_name: self.required_context_for_specialized_operation,
self.operation_controller.disjointing_operation_name: self.required_context_for_specialized_operation,
self.operation_controller.intersecting_operation_name: self.required_context_for_specialized_operation,
self.operation_controller.isvalid_operation_name: self.required_context_for_specialized_operation,
self.operation_controller.overlaping_operation_name: self.required_context_for_specialized_operation,
self.operation_controller.relating_operation_name: self.required_context_for_specialized_operation,
self.operation_controller.touching_operation_name: self.required_context_for_specialized_operation,
self.operation_controller.within_operation_name: self.required_context_for_specialized_operation,
self.operation_controller.on_left_operation_name: self.required_context_for_specialized_operation,
self.operation_controller.on_right_operation_name: self.required_context_for_specialized_operation,
self.operation_controller.overlaping_left_operation_name: self.required_context_for_specialized_operation,
self.operation_controller.overlaping_right_operation_name: self.required_context_for_specialized_operation,
self.operation_controller.overlaping_above_operation_name: self.required_context_for_specialized_operation,
self.operation_controller.overlaping_below_operation_name: self.required_context_for_specialized_operation,
self.operation_controller.strictly_above_operation_name: self.required_context_for_specialized_operation,
self.operation_controller.strictly_below_operation_name: self.required_context_for_specialized_operation,
self.operation_controller.distance_gt_operation_name: self.required_context_for_specialized_operation,
self.operation_controller.distance_gte_operation_name: self.required_context_for_specialized_operation,
self.operation_controller.distance_lt_operation_name: self.required_context_for_specialized_operation,
self.operation_controller.distance_lte_operation_name: self.required_context_for_specialized_operation,
self.operation_controller.dwithin_operation_name: self.required_context_for_specialized_operation,
self.operation_controller.union_collection_operation_name: self.required_context_for_union_operation,
self.operation_controller.extent_collection_operation_name: self.required_context_for_extent_operation,
self.operation_controller.make_line_collection_operation_name: self.required_context_for_make_line_operation,
self.operation_controller.join_operation_name: self.required_context_for_specialized_operation,
})
return dicti
def operation_name_return_type_dic(self):
dicti = super(FeatureCollectionResource, self).operation_name_return_type_dic()
dicti.update({
self.operation_controller.bbcontaining_operation_name: self.return_type_for_specialized_operation,
self.operation_controller.bboverlaping_operation_name: self.return_type_for_specialized_operation,
self.operation_controller.contained_operation_name: self.return_type_for_specialized_operation,
self.operation_controller.containing_operation_name: self.return_type_for_specialized_operation,
self.operation_controller.containing_properly_operation_name: self.return_type_for_specialized_operation,
self.operation_controller.covering_by_operation_name: self.return_type_for_specialized_operation,
self.operation_controller.covering_operation_name: self.return_type_for_specialized_operation,
self.operation_controller.crossing_operation_name: self.return_type_for_specialized_operation,
self.operation_controller.disjointing_operation_name: self.return_type_for_specialized_operation,
self.operation_controller.intersecting_operation_name: self.return_type_for_specialized_operation,
self.operation_controller.isvalid_operation_name: self.return_type_for_specialized_operation,
self.operation_controller.overlaping_operation_name: self.return_type_for_specialized_operation,
self.operation_controller.relating_operation_name: self.return_type_for_specialized_operation,
self.operation_controller.touching_operation_name: self.return_type_for_specialized_operation,
self.operation_controller.within_operation_name: self.return_type_for_specialized_operation,
self.operation_controller.on_left_operation_name: self.return_type_for_specialized_operation,
self.operation_controller.on_right_operation_name: self.return_type_for_specialized_operation,
self.operation_controller.overlaping_left_operation_name: self.return_type_for_specialized_operation,
self.operation_controller.overlaping_right_operation_name: self.return_type_for_specialized_operation,
self.operation_controller.overlaping_above_operation_name: self.return_type_for_specialized_operation,
self.operation_controller.overlaping_below_operation_name: self.return_type_for_specialized_operation,
self.operation_controller.strictly_above_operation_name: self.return_type_for_specialized_operation,
self.operation_controller.strictly_below_operation_name: self.return_type_for_specialized_operation,
self.operation_controller.distance_gt_operation_name: self.return_type_for_specialized_operation,
self.operation_controller.distance_gte_operation_name: self.return_type_for_specialized_operation,
self.operation_controller.distance_lt_operation_name: self.return_type_for_specialized_operation,
self.operation_controller.distance_lte_operation_name: self.return_type_for_specialized_operation,
self.operation_controller.dwithin_operation_name: self.return_type_for_specialized_operation,
self.operation_controller.union_collection_operation_name: self.return_type_for_union_operation,
self.operation_controller.extent_collection_operation_name: self.return_type_for_extent_operation,
self.operation_controller.make_line_collection_operation_name: self.return_type_for_make_line_operation
})
return dicti
def operation_name_resource_representation_dic(self):
dicti = super(FeatureCollectionResource, self).operation_name_resource_representation_dic()
dicti.update({
self.operation_controller.bbcontaining_operation_name: self.define_resource_representation_by_operation,
self.operation_controller.bboverlaping_operation_name: self.define_resource_representation_by_operation,
self.operation_controller.contained_operation_name: self.define_resource_representation_by_operation,
self.operation_controller.containing_operation_name: self.define_resource_representation_by_operation,
self.operation_controller.containing_properly_operation_name: self.define_resource_representation_by_operation,
self.operation_controller.covering_by_operation_name: self.define_resource_representation_by_operation,
self.operation_controller.covering_operation_name: self.define_resource_representation_by_operation,
self.operation_controller.crossing_operation_name: self.define_resource_representation_by_operation,
self.operation_controller.disjointing_operation_name: self.define_resource_representation_by_operation,
self.operation_controller.intersecting_operation_name: self.define_resource_representation_by_operation,
self.operation_controller.isvalid_operation_name: self.define_resource_representation_by_operation,
self.operation_controller.overlaping_operation_name: self.define_resource_representation_by_operation,
self.operation_controller.relating_operation_name: self.define_resource_representation_by_operation,
self.operation_controller.touching_operation_name: self.define_resource_representation_by_operation,
self.operation_controller.within_operation_name: self.define_resource_representation_by_operation,
self.operation_controller.on_left_operation_name: self.define_resource_representation_by_operation,
self.operation_controller.on_right_operation_name: self.define_resource_representation_by_operation,
self.operation_controller.overlaping_left_operation_name: self.define_resource_representation_by_operation,
self.operation_controller.overlaping_right_operation_name: self.define_resource_representation_by_operation,
self.operation_controller.overlaping_above_operation_name: self.define_resource_representation_by_operation,
self.operation_controller.overlaping_below_operation_name: self.define_resource_representation_by_operation,
self.operation_controller.strictly_above_operation_name: self.define_resource_representation_by_operation,
self.operation_controller.strictly_below_operation_name: self.define_resource_representation_by_operation,
self.operation_controller.distance_gt_operation_name: self.define_resource_representation_by_operation,
self.operation_controller.distance_gte_operation_name: self.define_resource_representation_by_operation,
self.operation_controller.distance_lt_operation_name: self.define_resource_representation_by_operation,
self.operation_controller.distance_lte_operation_name: self.define_resource_representation_by_operation,
self.operation_controller.dwithin_operation_name: self.define_resource_representation_by_operation,
self.operation_controller.union_collection_operation_name: self.define_resource_representation_by_operation,
self.operation_controller.extent_collection_operation_name: self.define_resource_representation_by_operation,
self.operation_controller.make_line_collection_operation_name: self.define_resource_representation_by_operation
})
return dicti
# Responds an array of operations name.
def array_of_operation_name(self):
collection_operations_array = super(FeatureCollectionResource, self).array_of_operation_name()
collection_operations_array.extend(self.operation_controller.feature_collection_operations_dict().keys())
return collection_operations_array
def return_type_by_only_attributes(self, attributes_functions_str):
attrs = self.remove_last_slash(attributes_functions_str).split(",")
if self.geometry_field_name() not in attrs:
return super(FeatureCollectionResource, self).return_type_by_only_attributes(attributes_functions_str)
if len(attrs) > 1:
return FeatureCollection
return GeometryCollection
def return_type_for_filter_operation(self, attributes_functions_str):
return FeatureCollection
def return_type_for_offset_limit_operation(self, attributes_functions_str):
return FeatureCollection
def return_type_for_distinct_operation(self, attributes_functions_str):
return FeatureCollection
def return_type_for_group_by_count_operation(self, attributes_functions_str):
grouped_attribute = self.remove_last_slash(attributes_functions_str).split("/")[-1]
if grouped_attribute != self.geometry_field_name():
return super(FeatureCollectionResource, self).return_type_for_group_by_count_operation(attributes_functions_str)
return FeatureCollection
def return_type_for_collect_operation(self, attributes_functions_str):
attributes_in_collect_arr = self.extract_collect_operation_attributes(attributes_functions_str)
if self.geometry_field_name() not in attributes_in_collect_arr:
return super(FeatureCollectionResource, self).return_type_for_collect_operation(attributes_functions_str)
operated_attribute = attributes_in_collect_arr[-1]
if not self.geometry_field_name() == operated_attribute:
return FeatureCollection
operation_in_collect_return_type = self.get_operation_in_collect_return_type(attributes_functions_str)
if not issubclass(operation_in_collect_return_type, GEOSGeometry):
return COLLECTION_TYPE
if len(attributes_in_collect_arr) > 1:
return FeatureCollection
return GeometryCollection
def return_type_for_specialized_operation(self, attributes_functions_str):
return self.default_resource_representation()
def return_type_for_union_operation(self, attributes_functions_str):
geometry_field_type = type(self.field_for(self.geometry_field_name()))
multi_object_class_for_geom_type = {
PointField: MultiPoint,
LineStringField: MultiLineString,
PolygonField: MultiPolygon,
MultiPointField: MultiPoint,
MultiLineString: MultiLineString,
MultiPolygonField: MultiPolygon,
GEOSGeometry: MultiPolygon
}
return multi_object_class_for_geom_type[geometry_field_type]
def return_type_for_make_line_operation(self, attributes_functions_str):
return LineString
def return_type_for_extent_operation(self, attributes_functions_str):
return list
def required_object_for_specialized_operation(self, request, attributes_functions_str):
first_oper_snippet, second_oper_snippet = self.split_combined_operation(attributes_functions_str)
if second_oper_snippet is not None:
if second_oper_snippet.startswith('collect'):
if self.path_has_projection(first_oper_snippet) and not self.projection_attrs_equals_collect_attrs(attributes_functions_str):
message = "Projection attribute list must be the same as collect operation attribute list"
return self.required_object_for_invalid_sintax(attributes_functions_str, message)
spatial_objects = self.get_objects_from_specialized_operation(attributes_functions_str)
serialized_data = self.get_objects_serialized_by_collect_operation(second_oper_snippet, spatial_objects)
#if second_oper_snippet.startswith('count_resource'):
else:
spatial_objects = self.get_objects_from_specialized_operation(attributes_functions_str)
serialized_data = {'count_resource': spatial_objects}
return RequiredObject(serialized_data, self.content_type_or_default_content_type(request), spatial_objects, 200)
spatial_objects = self.get_objects_from_specialized_operation(attributes_functions_str)
if self.path_has_projection(attributes_functions_str):
attrs_str = self.extract_projection_attributes(attributes_functions_str, as_string=True)
serialized_data = self.get_object_serialized_by_only_attributes(attrs_str, spatial_objects)
return RequiredObject(serialized_data,self.content_type_or_default_content_type(request), spatial_objects, 200)
else:
return self.required_object(request, spatial_objects)
def required_context_for_specialized_operation(self, request, attributes_functions_str):
context = self.get_context_for_specialized_operation(request, attributes_functions_str)
return RequiredObject(context, CONTENT_TYPE_LD_JSON, self.object_model, 200)
def required_context_for_union_operation(self, request, attributes_functions_str):
context = self.get_context_for_union_operation(request, attributes_functions_str)
return RequiredObject(context, CONTENT_TYPE_LD_JSON, self.object_model, 200)
def required_context_for_extent_operation(self, request, attributes_functions_str):
context = self.get_context_for_extent_operation(request, attributes_functions_str)
return RequiredObject(context, CONTENT_TYPE_LD_JSON, self.object_model, 200)
def required_context_for_make_line_operation(self, request, attributes_functions_str):
context = self.get_context_for_make_line_operation(request, attributes_functions_str)
return RequiredObject(context, CONTENT_TYPE_LD_JSON, self.object_model, 200)
def required_object_for_extent_operation(self, request, attributes_functions_str):
extent_dict = self.get_objects_from_extent_spatial_operation(attributes_functions_str)
extent_dict['extent'] = extent_dict.pop(self.geometry_field_name() + '__extent')
return self.required_object_for_aggregation_operation( request, extent_dict)
def required_object_for_union_operation(self,request, attributes_functions_str):
object_ = self.get_object_from_union_spatial_operation(attributes_functions_str)
a_dictionary = json.loads(object_[self.geometry_field_name() + '__union'].geojson)
return self.required_object_for_aggregation_operation(request, a_dictionary)
def required_object_for_make_line_operation(self,request, attributes_functions_str):
line = self.get_object_from_make_line_spatial_operation(attributes_functions_str)
a_dictionary = json.loads(line[self.geometry_field_name() + '__makeline'].geojson)
return self.required_object_for_aggregation_operation(request, a_dictionary)
#todo: Define header Content-Type depending of which type is returned (FeatureCollection, buffer, dict, etc)
def required_object_for_collect_operation(self, request, attributes_functions_str):
collect_operation_snippet = self.remove_last_slash(attributes_functions_str)
if self.path_has_projection(collect_operation_snippet):
if self.projection_attrs_equals_collect_attrs(collect_operation_snippet):
collect_operation_snippet = self.remove_projection_from_path(attributes_functions_str)
else:
message = 'Projection attributes list must be the same as collect operation attributes list'
return self.required_object_for_invalid_sintax(attributes_functions_str, message)
business_objects = self.get_objects_from_collect_operation(collect_operation_snippet)
serialized_data = self.get_objects_serialized_by_collect_operation(collect_operation_snippet, business_objects)
return RequiredObject(serialized_data, self.content_type_or_default_content_type(request), business_objects, 200)
def get_objects_from_join_operation(self, request, attributes_functions_str):
join_operation = self.build_join_operation(request, attributes_functions_str)
return self.join_feature_collection_on_dict_list(join_operation)
def join_feature_collection_on_dict_list(self, join_operation):
joined_data_list = []
for original_feature in join_operation.left_join_data['features']:
updated_feature = deepcopy(original_feature)
updated_feature['properties']['__joined__'] = []
for dict_to_join in join_operation.right_join_data:
if updated_feature['properties'][join_operation.left_join_attr] == dict_to_join[join_operation.right_join_attr]:
updated_feature['properties']['__joined__'].append( deepcopy(dict_to_join) )
# verify if the current feature was updated
#if sorted(list(updated_feature['properties'].keys())) != sorted(list(original_feature['properties'].keys())):
if len(updated_feature['properties']['__joined__']) > 0:
joined_data_list.append(updated_feature)
return {'type': 'FeatureCollection', 'features': joined_data_list}
def get_objects_from_specialized_operation(self, attributes_functions_str):
if self.path_has_url(attributes_functions_str):
arr = self.attribute_functions_str_with_url_splitted_by_slash(attributes_functions_str)
else:
arr = attributes_functions_str.split('/')
if not self.path_has_geometry_attribute(arr[0]):
arr = self.inject_geometry_attribute_in_spatial_operation_for_path(arr)
return self.get_objects_from_spatial_operation(arr)
def get_objects_from_extent_spatial_operation(self, attributes_functions_str):
first_part_name = super(FeatureCollectionResource, self).get_operation_name_from_path(attributes_functions_str)
if first_part_name == self.operation_controller.filter_collection_operation_name:
filter_snippet = attributes_functions_str[:attributes_functions_str.index('/*')]
queryset_or_model_class = self.get_objects_from_filter_operation(filter_snippet)
elif first_part_name == self.operation_controller.offset_limit_collection_operation_name:
offset_limit_snippet = attributes_functions_str[:attributes_functions_str.index('/*')]
queryset_or_model_class = self.get_objects_from_offset_limit_operation(offset_limit_snippet)
else:
queryset_or_model_class = self.model_class().objects
return queryset_or_model_class.aggregate(Extent(self.geometry_field_name()))
def get_object_from_union_spatial_operation(self, attributes_functions_str):
first_part_name = super(FeatureCollectionResource, self).get_operation_name_from_path(attributes_functions_str)
if first_part_name == self.operation_controller.filter_collection_operation_name:
filter_snippet = attributes_functions_str[:attributes_functions_str.index('/*')]
queryset_or_model_class = self.get_objects_from_filter_operation(filter_snippet)
elif first_part_name == self.operation_controller.offset_limit_collection_operation_name:
offset_limit_snippet = attributes_functions_str[:attributes_functions_str.index('/*')]
queryset_or_model_class = self.get_objects_from_offset_limit_operation(offset_limit_snippet)
else:
queryset_or_model_class = self.model_class().objects
return queryset_or_model_class.aggregate(Union(self.geometry_field_name()))
def get_object_from_make_line_spatial_operation(self, attributes_functions_str):
first_part_name = super(FeatureCollectionResource, self).get_operation_name_from_path(attributes_functions_str)
if first_part_name == self.operation_controller.filter_collection_operation_name:
filter_snippet = attributes_functions_str[:attributes_functions_str.index('/*')]
queryset_or_model_class = self.get_objects_from_filter_operation(filter_snippet)
elif first_part_name == self.operation_controller.offset_limit_collection_operation_name:
offset_limit_snippet = attributes_functions_str[:attributes_functions_str.index('/*')]
queryset_or_model_class = self.get_objects_from_offset_limit_operation(offset_limit_snippet)
else:
queryset_or_model_class = self.model_class().objects
return queryset_or_model_class.aggregate(MakeLine(self.geometry_field_name()))
def get_objects_from_collect_operation(self, attributes_functions_str, queryset=None):
attrs_funcs_arr = self.remove_last_slash(attributes_functions_str).split("/")
objects = self.model_class().objects.all() if queryset is None else queryset
obj_model_list_or_queryset = self.transform_queryset_in_object_model_list(objects)
collected_objects_list = []
collected_attrs = self.extract_collect_operation_attributes(attributes_functions_str)
attrs_out_of_operation = collected_attrs[:-1] # only the last one will be operated
operated_attr = collected_attrs[-1]
operation_name = attrs_funcs_arr[2]
operation_params = attrs_funcs_arr[3:]
for obj in obj_model_list_or_queryset:
collected_object = {}
for attr in attrs_out_of_operation:
collected_object[attr] = getattr(obj, attr)
# executing operation in selected attribute
if operated_attr == self.geometry_field_name():
operated_value = self._execute_attribute_or_method(obj, operation_name, operation_params)
else:
operated_attr_val = getattr(obj, operated_attr)
if operated_attr_val is not None:
operated_value = self._execute_attribute_or_method(operated_attr_val, operation_name, operation_params)
else:
operated_value = None
if isinstance(operated_value, GEOSGeometry):
collected_object[operated_attr] = operated_value
else:
collected_object[operation_name] = operated_value
collected_objects_list.append(collected_object)
return collected_objects_list
def get_object_serialized_by_only_attributes(self, attribute_names_str, object):
arr = []
attribute_names_str_as_array = self.remove_last_slash(attribute_names_str).split(',')
has_geo_field = self.geometry_field_name() in attribute_names_str_as_array
for dic in object:
a_dic = {}
for att_name in attribute_names_str_as_array:
if has_geo_field and att_name == self.geometry_field_name():
a_dic[att_name] = json.loads(dic[att_name].json)
else:
a_dic[att_name] = dic[att_name]
# reference to each geometry
if has_geo_field:
if len(attribute_names_str_as_array) > 1:
a_dic = self.dict_as_geojson(a_dic)
else:
a_dic = a_dic[self.geometry_field_name()]
arr.append(a_dic)
# reference to the entire collection
if has_geo_field:
if len(attribute_names_str_as_array) > 1:
arr = self.dict_list_as_feature_collection(arr)
else:
arr = self.dict_list_as_geometry_collection(arr)
else:
self.temporary_content_type = CONTENT_TYPE_JSON
return arr
def get_objects_from_within_operation(self, attributes_functions_str):
return self.get_objects_from_filter_operation(attributes_functions_str)
def get_objects_by_functions(self, attributes_functions_str):
objects = []
if self.path_has_filter_operation(attributes_functions_str):
objects = self.get_objects_from_filter_operation(attributes_functions_str)
return objects
def get_context_by_only_attributes(self, request, attributes_functions_str):
context = super(FeatureCollectionResource, self).get_context_by_only_attributes(request, attributes_functions_str)
if self.geometry_field_name() in context["@context"].keys():
context["@context"].pop(self.geometry_field_name())
return context
def get_context_for_specialized_operation(self, request, attributes_functions_str):
return self.get_context_for_filter_operation(request, attributes_functions_str)
#operation_name = self.get_operation_name_from_path(attributes_functions_str)
#resource_type = self.define_resource_representation_by_operation(request, operation_name)
#context = self.get_context_for_operation_resource_type(attributes_functions_str, resource_type)
#context["@context"].update(self.context_resource.attributes_contextualized_dict())
#return context
def get_context_for_union_operation(self, request, attributes_functions_str):
return self.get_context_for_operation(request, attributes_functions_str)
#resource_type_by_accept = self.resource_representation_or_default_resource_representation(request)
#resource_type = resource_type_by_accept if resource_type_by_accept != self.default_resource_representation() else 'Feature'
#return self.get_context_for_operation_resource_type(attributes_functions_str, resource_type)
def get_context_for_extent_operation(self, request, attributes_functions_str):
context = self.get_context_for_operation(request, attributes_functions_str)
operation_name = self.get_operation_name_from_path(attributes_functions_str)
context["@context"].update(self.context_resource.get_operation_return_type_term_definition(operation_name))
return context
def get_context_for_make_line_operation(self, request, attributes_functions_str):
return self.get_context_for_operation(request, attributes_functions_str)
#resource_type_by_accept = self.resource_representation_or_default_resource_representation(request)
#resource_type = resource_type_by_accept if resource_type_by_accept != self.default_resource_representation() else LineString
#return self.get_context_for_operation_resource_type(attributes_functions_str, resource_type)
def get_context_for_collect_operation(self, request, attributes_functions_str):
context = super(FeatureCollectionResource, self).get_context_for_collect_operation(request, attributes_functions_str)
attrs = self.extract_collect_operation_attributes(attributes_functions_str)
if self.geometry_field_name() not in attrs:
return context
operated_attribute = attrs[-1]
if self.geometry_field_name() != operated_attribute:
context["@context"].pop(self.geometry_field_name())
context["@id"] = self.context_resource.get_vocabulary_for("Feature")
return context
oper_in_collect_return_type = self.get_operation_in_collect_return_type(attributes_functions_str)
if not issubclass(oper_in_collect_return_type, GEOSGeometry):
return context
if len(attrs) > 1:
context["@id"] = self.context_resource.get_vocabulary_for("Feature")
else:
context["@id"] = self.context_resource.get_vocabulary_for(GEOSGeometry)
return context
def get_context_for_attributes_in_collect_operation(self, request, attributes_functions_str):
context = super(FeatureCollectionResource, self).get_context_for_attributes_in_collect_operation(request, attributes_functions_str)
operation_in_collect_name = self.extract_collect_operation_snippet(attributes_functions_str).split('/')[2]
oper_return_type = BaseOperationController().dict_all_operation_dict()[operation_in_collect_name].return_type
if issubclass(oper_return_type, GEOSGeometry):
context.pop(operation_in_collect_name)
return context
def get_png(self, queryset, request):
geom_type = None
wkt = 'GEOMETRYCOLLECTION('
for i, e in enumerate(queryset):
if isinstance(e, FeatureModel):
wkt += e.get_spatial_object().wkt # it is need to fix the case that the attribute is not called by geom
else:
geome = GEOSGeometry(json.dumps(e['geometry']))
wkt += geome.wkt
geom_type = geome.geom_type
wkt += ',' if i != len(queryset) - 1 else ')'
if len(queryset):
if isinstance(queryset[0], FeatureModel):
geom_type = queryset[0].get_spatial_object().geom_type
else:
geom_type = ''
config = {'wkt': wkt, 'type': geom_type}
style = self.get_style_file(request)
if style is not None:
config.update({
'style': style,
'deleteStyle': True
})
builder_png = BuilderPNG(config)
return builder_png.generate()
def get(self, request, format=None, *args, **kwargs):
self.change_request_if_image_png_into_IRI(request)
return super(FeatureCollectionResource,self).get(request, *args, **self.kwargs)
| 60.51146
| 191
| 0.757396
|
c4c35d4a65748711b557793c10a11059ae57b4d8
| 2,401
|
py
|
Python
|
alipay/aop/api/domain/AlipayFundTransCommonQueryModel.py
|
snowxmas/alipay-sdk-python-all
|
96870ced60facd96c5bce18d19371720cbda3317
|
[
"Apache-2.0"
] | 1
|
2022-03-07T06:11:10.000Z
|
2022-03-07T06:11:10.000Z
|
alipay/aop/api/domain/AlipayFundTransCommonQueryModel.py
|
snowxmas/alipay-sdk-python-all
|
96870ced60facd96c5bce18d19371720cbda3317
|
[
"Apache-2.0"
] | null | null | null |
alipay/aop/api/domain/AlipayFundTransCommonQueryModel.py
|
snowxmas/alipay-sdk-python-all
|
96870ced60facd96c5bce18d19371720cbda3317
|
[
"Apache-2.0"
] | 1
|
2021-10-05T03:01:09.000Z
|
2021-10-05T03:01:09.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class AlipayFundTransCommonQueryModel(object):
def __init__(self):
self._biz_scene = None
self._order_id = None
self._out_biz_no = None
self._product_code = None
@property
def biz_scene(self):
return self._biz_scene
@biz_scene.setter
def biz_scene(self, value):
self._biz_scene = value
@property
def order_id(self):
return self._order_id
@order_id.setter
def order_id(self, value):
self._order_id = value
@property
def out_biz_no(self):
return self._out_biz_no
@out_biz_no.setter
def out_biz_no(self, value):
self._out_biz_no = value
@property
def product_code(self):
return self._product_code
@product_code.setter
def product_code(self, value):
self._product_code = value
def to_alipay_dict(self):
params = dict()
if self.biz_scene:
if hasattr(self.biz_scene, 'to_alipay_dict'):
params['biz_scene'] = self.biz_scene.to_alipay_dict()
else:
params['biz_scene'] = self.biz_scene
if self.order_id:
if hasattr(self.order_id, 'to_alipay_dict'):
params['order_id'] = self.order_id.to_alipay_dict()
else:
params['order_id'] = self.order_id
if self.out_biz_no:
if hasattr(self.out_biz_no, 'to_alipay_dict'):
params['out_biz_no'] = self.out_biz_no.to_alipay_dict()
else:
params['out_biz_no'] = self.out_biz_no
if self.product_code:
if hasattr(self.product_code, 'to_alipay_dict'):
params['product_code'] = self.product_code.to_alipay_dict()
else:
params['product_code'] = self.product_code
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = AlipayFundTransCommonQueryModel()
if 'biz_scene' in d:
o.biz_scene = d['biz_scene']
if 'order_id' in d:
o.order_id = d['order_id']
if 'out_biz_no' in d:
o.out_biz_no = d['out_biz_no']
if 'product_code' in d:
o.product_code = d['product_code']
return o
| 27.918605
| 75
| 0.589338
|
75ed0e666fe64c1a95867afc54edfeda70f37860
| 30,324
|
py
|
Python
|
src/practice_problem3.py
|
craannj/13-Exam2Practice
|
1717a3db485efb0adfecf46f0f154b18fe0e2f75
|
[
"MIT"
] | null | null | null |
src/practice_problem3.py
|
craannj/13-Exam2Practice
|
1717a3db485efb0adfecf46f0f154b18fe0e2f75
|
[
"MIT"
] | null | null | null |
src/practice_problem3.py
|
craannj/13-Exam2Practice
|
1717a3db485efb0adfecf46f0f154b18fe0e2f75
|
[
"MIT"
] | null | null | null |
"""
PRACTICE Exam 2, practice_problem 3.
Authors: David Mutchler, Vibha Alangar, Matt Boutell, Dave Fisher,
Mark Hays, Amanda Stouder, Aaron Wilkin, their colleagues,
and Nathaniel Craan.
""" # DONE: 1. PUT YOUR NAME IN THE ABOVE LINE.
###############################################################################
# Students:
#
# These problems have DIFFICULTY and TIME ratings:
# DIFFICULTY rating: 1 to 10, where:
# 1 is very easy
# 3 is an "easy" Exam 2 question.
# 5 is a "typical" Exam 2 question.
# 7 is a "hard" Exam 2 question.
# 10 is an EXTREMELY hard problem (too hard for an Exam 2 question)
#
# TIME ratings: A ROUGH estimate of the number of minutes that we
# would expect a well-prepared student to take on the problem.
#
# IMPORTANT: For ALL the problems in this module,
# if you reach the time estimate and are NOT close to a solution,
# STOP working on that problem and ASK YOUR INSTRUCTOR FOR HELP
# on it, in class or via Piazza.
###############################################################################
import testing_helper
import time
import math
import rosegraphics as rg
def main():
""" Calls the TEST functions in this module. """
run_test_practice_problem3a()
run_test_practice_problem3b()
run_test_practice_problem3c()
run_test_practice_problem3d()
run_test_practice_problem3e()
def is_prime(n):
"""
What comes in: An integer.
What goes out: Returns True if the given integer is prime.
Returns False if the given integer is NOT prime.
Side effects: None.
Examples:
This function returns True or False, depending on whether
the given integer is prime or not. Since the smallest prime is 2,
this function returns False on all integers < 2.
It returns True on 2, 3, 5, 7, and other primes.
Note: The algorithm used here is simple and clear but slow.
Type hints:
:type n: int
"""
if n < 2:
return False
for k in range(2, int(math.sqrt(n) + 0.1) + 1):
if n % k == 0:
return False
return True
# -------------------------------------------------------------------------
# Students:
# Do NOT touch the above is_prime function - it has no _TODO_.
# Do NOT copy code from this function.
#
# Instead, ** CALL ** this function as needed in the problems below.
# -------------------------------------------------------------------------
###############################################################################
# Students: Some of the testing code below uses a simple testing framework.
# Ask for help if the tests that we supply are not clear to you.
###############################################################################
def run_test_practice_problem3a():
""" Tests the practice_problem3a function. """
print()
print('--------------------------------------------------')
print('Testing the practice_problem3a function:')
print('--------------------------------------------------')
format_string = ' practice_problem3a( {} )'
test_results = [0, 0] # Number of tests passed, failed.
# Test 1:
expected = 5 * 2 * 7 * 10 * 2 # which is 1400
circles = (rg.Circle(rg.Point(5, 10), 20),
rg.Circle(rg.Point(2, 20), 20),
rg.Circle(rg.Point(7, 30), 10),
rg.Circle(rg.Point(10, 40), 20),
rg.Circle(rg.Point(2, 50), 10))
print_expected_result_of_test([circles], expected, test_results,
format_string)
actual = practice_problem3a(circles)
print_actual_result_of_test(expected, actual, test_results)
# Test 2:
expected = 58
circles = (rg.Circle(rg.Point(58, 10), 20),)
print_expected_result_of_test([circles], expected, test_results,
format_string)
actual = practice_problem3a(circles)
print_actual_result_of_test(expected, actual, test_results)
# Test 3:
expected = 84 * 28 * 10005 # which is 23531760
circles = (rg.Circle(rg.Point(84, 100), 200),
rg.Circle(rg.Point(28, 200), 200),
rg.Circle(rg.Point(10005, 300), 100))
print_expected_result_of_test([circles], expected, test_results,
format_string)
actual = practice_problem3a(circles)
print_actual_result_of_test(expected, actual, test_results)
# Test 4:
expected = 1
circles = ()
print_expected_result_of_test([circles], expected, test_results,
format_string)
actual = practice_problem3a(circles)
print_actual_result_of_test(expected, actual, test_results)
# Test 5:
expected = 5 * 0 * 7 * 10 * 2 # which is 0
circles = (rg.Circle(rg.Point(5, 10), 20),
rg.Circle(rg.Point(0, 20), 20),
rg.Circle(rg.Point(7, 30), 10),
rg.Circle(rg.Point(10, 40), 20),
rg.Circle(rg.Point(2, 50), 10))
print_expected_result_of_test([circles], expected, test_results,
format_string)
actual = practice_problem3a(circles)
print_actual_result_of_test(expected, actual, test_results)
# Test 6:
circles = []
for k in range(1, 101):
circles.append(rg.Circle(rg.Point(k, k + 20), 5 * k))
expected = math.factorial(100)
print_expected_result_of_test([circles], expected, test_results,
format_string)
actual = practice_problem3a(circles)
print_actual_result_of_test(expected, actual, test_results)
# SUMMARY of test results:
print_summary_of_test_results(test_results)
def practice_problem3a(circles):
"""
What comes in: A sequence of rg.Circles.
What goes out: Returns the product of the x-coordinates
of the centers of the rg.Circles.
Returns 1 if the given sequence is empty.
Side effects: None.
Examples:
If the sequence is a list containing these 5 rg.Circles:
rg.Circle(rg.Point(5, 10), 20)
rg.Circle(rg.Point(2, 20), 20)
rg.Circle(rg.Point(7, 30), 10)
rg.Circle(rg.Point(10, 40), 20)
rg.Circle(rg.Point(2, 50), 10)
then this function returns:
5 x 2 x 7 x 10 x 2, which is 1400.
Type hints:
:type sequence: [rg.Circle]
"""
###########################################################################
# TODO: 2. Implement and test this function.
# The testing code is already written for you (above).
###########################################################################
# DIFFICULTY AND TIME RATINGS (see top of this file for explanation)
# DIFFICULTY: 7
# TIME ESTIMATE: 10 minutes.
###########################################################################
def run_test_practice_problem3b():
""" Tests the practice_problem3b function. """
print()
print('--------------------------------------------------')
print('Testing the practice_problem3b function:')
print('--------------------------------------------------')
format_string = ' practice_problem3b( {} )'
test_results = [0, 0] # Number of tests passed, failed.
# Test 1:
expected = True
sequence = [12, 33, 18, 'hello', 9, 13, 3, 9]
print_expected_result_of_test([sequence], expected, test_results,
format_string)
actual = practice_problem3b(sequence)
print_actual_result_of_test(expected, actual, test_results)
# Test 2:
expected = False
sequence = [12, 12, 33, 'hello', 5, 33, 5, 9]
print_expected_result_of_test([sequence], expected, test_results,
format_string)
actual = practice_problem3b(sequence)
print_actual_result_of_test(expected, actual, test_results)
# Test 3:
expected = True
sequence = (77, 112, 33, 'hello', 0, 43, 5, 77)
print_expected_result_of_test([sequence], expected, test_results,
format_string)
actual = practice_problem3b(sequence)
print_actual_result_of_test(expected, actual, test_results)
# Test 4:
expected = False
sequence = [1, 1, 1, 1, 1, 1, 2]
print_expected_result_of_test([sequence], expected, test_results,
format_string)
actual = practice_problem3b(sequence)
print_actual_result_of_test(expected, actual, test_results)
# Test 5:
expected = False
sequence = ['aa', 'a']
print_expected_result_of_test([sequence], expected, test_results,
format_string)
actual = practice_problem3b(sequence)
print_actual_result_of_test(expected, actual, test_results)
# Test 6:
expected = True
sequence = 'aaa'
print_expected_result_of_test([sequence], expected, test_results,
format_string)
actual = practice_problem3b(sequence)
print_actual_result_of_test(expected, actual, test_results)
# Test 7:
expected = True
sequence = ['aa', 'a', 'b', 'a', 'b', 'a']
print_expected_result_of_test([sequence], expected, test_results,
format_string)
actual = practice_problem3b(sequence)
print_actual_result_of_test(expected, actual, test_results)
# Test 8:
expected = False
sequence = [9]
print_expected_result_of_test([sequence], expected, test_results,
format_string)
actual = practice_problem3b(sequence)
print_actual_result_of_test(expected, actual, test_results)
# Test 9:
expected = True
sequence = [12, 33, 18, 'hello', 9, 13, 3, 9]
print_expected_result_of_test([sequence], expected, test_results,
format_string)
actual = practice_problem3b(sequence)
print_actual_result_of_test(expected, actual, test_results)
# Test 10:
expected = False
sequence = ['hello there', 'he', 'lo', 'hello']
print_expected_result_of_test([sequence], expected, test_results,
format_string)
actual = practice_problem3b(sequence)
print_actual_result_of_test(expected, actual, test_results)
# Test 11:
expected = False
sequence = ((8,), '8', (4 + 4, 4 + 4), [8, 8], 7, 8)
print_expected_result_of_test([sequence], expected, test_results,
format_string)
actual = practice_problem3b(sequence)
print_actual_result_of_test(expected, actual, test_results)
# Test 12:
expected = True
sequence = [(8,), '8', [4 + 4, 4 + 4], (8, 8), 7, [8, 8]]
print_expected_result_of_test([sequence], expected, test_results,
format_string)
actual = practice_problem3b(sequence)
print_actual_result_of_test(expected, actual, test_results)
# Test 13:
expected = False
sequence = [(8,), '8', [4 + 4, 4 + 4], [8, 8], 7, (8, 8)]
print_expected_result_of_test([sequence], expected, test_results,
format_string)
actual = practice_problem3b(sequence)
print_actual_result_of_test(expected, actual, test_results)
# SUMMARY of test results:
print_summary_of_test_results(test_results)
def practice_problem3b(sequence):
"""
What comes in: A non-empty sequence.
What goes out: Returns True if the last item of the sequence
appears again somewhere else in the sequence. Returns False
if the last item of the sequence does NOT appear somewhere
else in the sequence.
Side effects: None.
Examples:
If the sequence is [12, 33, 18, 'hello', 9, 13, 3, 9],
this function returns True because the last item (9)
DOES appear elsewhere in the sequence (namely, at index 4).
If the sequence is [12, 12, 33, 'hello', 5, 33, 5, 9],
this function returns False because the last item (9)
does NOT appear elsewhere in the sequence.
If the sequence is (77, 112, 33, 'hello', 0, 43, 5, 77),
this function returns True because the last item (77)
DOES appear elsewhere in the sequence (namely, at index 0).
If the sequence is [9], this function returns False
because the last item (9) does NOT appear elsewhere
in the sequence.
If the sequence is [12, 33, 8, 'hello', 99, 'hello'],
this function returns True since the last item ('hello')
DOES appear elsewhere in the sequence
(namely, at indices 3 and 5).
If the sequence is ['hello there', 'he', 'lo', 'hello'],
this function returns False because the last item ('hello')
does NOT appear elsewhere in the sequence.
If the sequence is 'hello there',
this function returns True since the last item ('e') DOES
appear elsewhere in the sequence (namely, at indices 1 and 8).
Type hints:
:type: sequence: list or tuple or string
"""
###########################################################################
# TODO: 3. Implement and test this function.
# The testing code is already written for you (above).
#
# IMPLEMENTATION REQUIREMENT: You are NOT allowed to use the
# 'count' or 'index' methods for sequences in this problem
# (because here we want you to demonstrate your ability
# to use explicit looping).
###########################################################################
# DIFFICULTY AND TIME RATINGS (see top of this file for explanation)
# DIFFICULTY: 5
# TIME ESTIMATE: 8 minutes.
###########################################################################
def run_test_practice_problem3c():
""" Tests the practice_problem3c function. """
print()
print('--------------------------------------------------')
print('Testing the practice_problem3c function:')
print('--------------------------------------------------')
format_string = ' practice_problem3c( {} )'
test_results = [0, 0] # Number of tests passed, failed.
# Test 1:
expected = [1, 3, 4, 7]
sequence = (9, 0, 8, 0, 0, 4, 4, 0)
print_expected_result_of_test([sequence], expected, test_results,
format_string)
actual = practice_problem3c(sequence)
print_actual_result_of_test(expected, actual, test_results)
# Test 2:
expected = [4]
sequence = (9, 9, 9, 9, 0, 9, 9, 9)
print_expected_result_of_test([sequence], expected, test_results,
format_string)
actual = practice_problem3c(sequence)
print_actual_result_of_test(expected, actual, test_results)
# Test 3:
expected = []
sequence = (4, 5, 4, 5, 4, 5, 4)
print_expected_result_of_test([sequence], expected, test_results,
format_string)
actual = practice_problem3c(sequence)
print_actual_result_of_test(expected, actual, test_results)
# Test 4:
expected = [0, 1, 2]
sequence = [0, 0, 0]
print_expected_result_of_test([sequence], expected, test_results,
format_string)
actual = practice_problem3c(sequence)
print_actual_result_of_test(expected, actual, test_results)
# Test 5:
expected = [0, 1]
sequence = [0, 0]
print_expected_result_of_test([sequence], expected, test_results,
format_string)
actual = practice_problem3c(sequence)
print_actual_result_of_test(expected, actual, test_results)
# Test 6:
expected = [0]
sequence = [0, 77]
print_expected_result_of_test([sequence], expected, test_results,
format_string)
actual = practice_problem3c(sequence)
print_actual_result_of_test(expected, actual, test_results)
# Test 7:
expected = [1]
sequence = [-40, 0]
print_expected_result_of_test([sequence], expected, test_results,
format_string)
actual = practice_problem3c(sequence)
print_actual_result_of_test(expected, actual, test_results)
# Test 8:
expected = []
sequence = [-40, 67]
print_expected_result_of_test([sequence], expected, test_results,
format_string)
actual = practice_problem3c(sequence)
print_actual_result_of_test(expected, actual, test_results)
# Test 9:
expected = [1, 3, 4, 5, 6, 9, 10]
sequence = (1, 0, 2, 0, 0, 0, 0, 6, 9, 0, 0, 12)
print_expected_result_of_test([sequence], expected, test_results,
format_string)
actual = practice_problem3c(sequence)
print_actual_result_of_test(expected, actual, test_results)
# SUMMARY of test results:
print_summary_of_test_results(test_results)
def practice_problem3c(sequence):
"""
What comes in: A non-empty sequence of integers.
What goes out: Returns a list of integers,
where the integers are the places (indices)
for which the item at that place equals 0.
Side effects: None.
Examples:
Given sequence (9, 0, 8, 0, 0, 4, 4, 0)
-- this function returns [1, 3, 4, 7]
since 0 appears at indices 1, 3, 4, and 7.
Given sequence [9, 9, 9, 9, 0, 9, 9, 9]
-- this function returns [4]
since 0 appears only at index 4.
Given sequence (4, 5, 4, 5, 4, 5, 4)
-- this function returns []
since none of the items are 0.
Given sequence [0, 0, 0]
-- this function returns [0, 1, 2]
since 0 appears at indices 0, 1 and 2.
Type hints:
:type: sequence: list or tuple or string
"""
###########################################################################
# TODO: 4. Implement and test this function.
# The testing code is already written for you (above).
###########################################################################
# DIFFICULTY AND TIME RATINGS (see top of this file for explanation)
# DIFFICULTY: 5
# TIME ESTIMATE: 8 minutes.
###########################################################################
def run_test_practice_problem3d():
""" Tests the practice_problem3d function. """
print()
print('--------------------------------------------------')
print('Testing the practice_problem3d function:')
print('--------------------------------------------------')
format_string = ' practice_problem3d( {} )'
test_results = [0, 0] # Number of tests passed, failed.
# Test 1:
expected = 1
sequence = (9, 0, 8, 0, 0, 4, 4, 0)
print_expected_result_of_test([sequence], expected, test_results,
format_string)
actual = practice_problem3d(sequence)
print_actual_result_of_test(expected, actual, test_results)
# Test 2:
expected = 4
sequence = (9, 9, 9, 9, 0, 9, 9, 9)
print_expected_result_of_test([sequence], expected, test_results,
format_string)
actual = practice_problem3d(sequence)
print_actual_result_of_test(expected, actual, test_results)
# Test 3:
expected = -1
sequence = (4, 5, 4, 5, 4, 5, 4)
print_expected_result_of_test([sequence], expected, test_results,
format_string)
actual = practice_problem3d(sequence)
print_actual_result_of_test(expected, actual, test_results)
# Test 4:
expected = 0
sequence = [0, 0, 0]
print_expected_result_of_test([sequence], expected, test_results,
format_string)
actual = practice_problem3d(sequence)
print_actual_result_of_test(expected, actual, test_results)
# Test 5:
expected = 0
sequence = [0, 0]
print_expected_result_of_test([sequence], expected, test_results,
format_string)
actual = practice_problem3d(sequence)
print_actual_result_of_test(expected, actual, test_results)
# Test 6:
expected = 0
sequence = [0, 77]
print_expected_result_of_test([sequence], expected, test_results,
format_string)
actual = practice_problem3d(sequence)
print_actual_result_of_test(expected, actual, test_results)
# Test 7:
expected = 1
sequence = [-40, 0]
print_expected_result_of_test([sequence], expected, test_results,
format_string)
actual = practice_problem3d(sequence)
print_actual_result_of_test(expected, actual, test_results)
# Test 8:
expected = -1
sequence = [-40, 67]
print_expected_result_of_test([sequence], expected, test_results,
format_string)
actual = practice_problem3d(sequence)
print_actual_result_of_test(expected, actual, test_results)
# Test 9:
expected = 1
sequence = (1, 0, 2, 0, 0, 0, 0, 6, 9, 0, 0, 12)
print_expected_result_of_test([sequence], expected, test_results,
format_string)
actual = practice_problem3d(sequence)
print_actual_result_of_test(expected, actual, test_results)
# SUMMARY of test results:
print_summary_of_test_results(test_results)
def practice_problem3d(sequence):
"""
What comes in: A sequence of integers.
What goes out: Returns the first (leftmost) place (index)
for which the item at that place equals 0.
Returns -1 if the sequence contains no items equal to 0.
Side effects: None.
Examples:
Given sequence (9, 0, 8, 0, 0, 4, 4, 0)
-- this function returns 1
since 0 first appears at index 1
Given sequence [9, 9, 9, 9, 0, 9, 9, 9]
-- this function returns 4
since 0 first appears at index 4
Given sequence (4, 5, 4, 5, 4, 5, 4)
-- this function returns -1
since none of the items are 0.
Given sequence [0, 0, 0]
-- this function returns 0
since 0 first appears at index 0
Type hints:
:type: sequence: list or tuple or string
"""
###########################################################################
# TODO: 5. Implement and test this function.
# The testing code is already written for you (above).
###########################################################################
# DIFFICULTY AND TIME RATINGS (see top of this file for explanation)
# DIFFICULTY: 5
# TIME ESTIMATE: 8 minutes for each part of this problem.
###########################################################################
###########################################################################
# TODO: 6. Just ABOVE this _TODO_, you should have implemented
# a solution for the practice_problem3d function.
# Here, put ANOTHER solution, as follows:
#
# -- Your FIRST solution (ABOVE this _TODO_)
# should be a solution that IGNORES
# practice_problem3c (the previous problem).
#
# -- Your SECOND solution (BELOW this _TODO_)
# should be a solution that USES (calls)
# practice_problem3c.
#
# This solution should *** HAVE NO LOOP (no FOR). ***
###########################################################################
def run_test_practice_problem3e():
""" Tests the practice_problem3e function. """
print()
print('--------------------------------------------------')
print('Testing the practice_problem3e function:')
print('--------------------------------------------------')
format_string = ' practice_problem3e( {} )'
test_results = [0, 0] # Number of tests passed, failed.
# Test 1:
expected = 161
sequence = (12, 33, 18, 9, 13, 3, 99, 20, 19, 20)
print_expected_result_of_test([sequence], expected, test_results,
format_string)
actual = practice_problem3e(sequence)
print_actual_result_of_test(expected, actual, test_results)
# Test 2:
expected = 29
sequence = (3, 12, 10, 8, 8, 9, 8, 11)
print_expected_result_of_test([sequence], expected, test_results,
format_string)
actual = practice_problem3e(sequence)
print_actual_result_of_test(expected, actual, test_results)
# Test 3:
expected = -9999999999
sequence = (-9999999999, 8888888888)
print_expected_result_of_test([sequence], expected, test_results,
format_string)
actual = practice_problem3e(sequence)
print_actual_result_of_test(expected, actual, test_results)
# Test 4:
expected = 8888888888
sequence = (8888888888, -9999999999)
print_expected_result_of_test([sequence], expected, test_results,
format_string)
actual = practice_problem3e(sequence)
print_actual_result_of_test(expected, actual, test_results)
# Test 5:
expected = -176
sequence = (-77, 20000, -33, 40000, -55, 60000, -11)
print_expected_result_of_test([sequence], expected, test_results,
format_string)
actual = practice_problem3e(sequence)
print_actual_result_of_test(expected, actual, test_results)
# Test 6:
expected = 0
sequence = ()
print_expected_result_of_test([sequence], expected, test_results,
format_string)
actual = practice_problem3e(sequence)
print_actual_result_of_test(expected, actual, test_results)
# Test 7:
expected = 0
sequence = []
print_expected_result_of_test([sequence], expected, test_results,
format_string)
actual = practice_problem3e(sequence)
print_actual_result_of_test(expected, actual, test_results)
# Test 8:
expected = 8
sequence = [8]
print_expected_result_of_test([sequence], expected, test_results,
format_string)
actual = practice_problem3e(sequence)
print_actual_result_of_test(expected, actual, test_results)
# Test 9:
expected = -77
sequence = (-77, 8)
print_expected_result_of_test([sequence], expected, test_results,
format_string)
actual = practice_problem3e(sequence)
print_actual_result_of_test(expected, actual, test_results)
# Test 10:
expected = 0
sequence = (-77, 8, 77)
print_expected_result_of_test([sequence], expected, test_results,
format_string)
actual = practice_problem3e(sequence)
print_actual_result_of_test(expected, actual, test_results)
# Test 11:
expected = 1
sequence = (-77, 8, 78)
print_expected_result_of_test([sequence], expected, test_results,
format_string)
actual = practice_problem3e(sequence)
print_actual_result_of_test(expected, actual, test_results)
# Test 12:
expected = 1
sequence = (-77, 8, 78, 100)
print_expected_result_of_test([sequence], expected, test_results,
format_string)
actual = practice_problem3e(sequence)
print_actual_result_of_test(expected, actual, test_results)
# SUMMARY of test results:
print_summary_of_test_results(test_results)
def practice_problem3e(sequence):
"""
What comes in:
A sequence of numbers.
What goes out:
Returns the sum of the numbers at EVEN INDICES of the sequence.
Side effects: None.
Examples:
If the sequence is:
(12, 33, 18, 9, 13, 3, 99, 20, 19, 20)
then this function returns
12 + 18 + 13 + 99 + 19, which is 161.
Type hints:
:type sequence: list(float) or tuple(float)
"""
# -------------------------------------------------------------------------
# TODO: 7. Implement and test this function.
# The testing code is already written for you (above).
###########################################################################
# DIFFICULTY AND TIME RATINGS (see top of this file for explanation)
# DIFFICULTY: 5
# TIME ESTIMATE: 8 minutes.
###########################################################################
###############################################################################
# Our tests use the following to print error messages in red.
# Do NOT change it. You do NOT have to do anything with it.
###############################################################################
def print_expected_result_of_test(arguments, expected,
test_results, format_string, suffix=''):
testing_helper.print_expected_result_of_test(arguments, expected,
test_results, format_string,
suffix)
def print_actual_result_of_test(expected, actual, test_results,
precision=None):
testing_helper.print_actual_result_of_test(expected, actual,
test_results, precision)
def print_summary_of_test_results(test_results):
testing_helper.print_summary_of_test_results(test_results)
# To allow color-coding the output to the console:
USE_COLORING = True # Change to False to revert to OLD style coloring
testing_helper.USE_COLORING = USE_COLORING
if USE_COLORING:
# noinspection PyShadowingBuiltins
print = testing_helper.print_colored
else:
# noinspection PyShadowingBuiltins
print = testing_helper.print_uncolored
# -----------------------------------------------------------------------------
# Calls main to start the ball rolling.
# The try .. except prevents error messages on the console from being
# intermingled with ordinary output to the console.
# -----------------------------------------------------------------------------
try:
main()
except Exception:
print('ERROR - While running this test,', color='red')
print('your code raised the following exception:', color='red')
print()
time.sleep(1)
raise
| 37.810474
| 79
| 0.574627
|
4680eafea5288fee85a15e2d61446c6cb75b5dee
| 328
|
py
|
Python
|
neo/io/spikeglxio.py
|
Mario-Kart-Felix/python-neo
|
951c97cf9eb56f5489da88940de920329e0f4c1b
|
[
"BSD-3-Clause"
] | 199
|
2015-01-20T13:49:13.000Z
|
2022-03-21T18:35:29.000Z
|
neo/io/spikeglxio.py
|
Mario-Kart-Felix/python-neo
|
951c97cf9eb56f5489da88940de920329e0f4c1b
|
[
"BSD-3-Clause"
] | 905
|
2015-01-07T09:21:15.000Z
|
2022-03-31T16:29:44.000Z
|
neo/io/spikeglxio.py
|
Mario-Kart-Felix/python-neo
|
951c97cf9eb56f5489da88940de920329e0f4c1b
|
[
"BSD-3-Clause"
] | 178
|
2015-01-05T12:34:39.000Z
|
2022-02-20T23:06:52.000Z
|
from neo.io.basefromrawio import BaseFromRaw
from neo.rawio.spikeglxrawio import SpikeGLXRawIO
class SpikeGLXIO(SpikeGLXRawIO, BaseFromRaw):
__doc__ = SpikeGLXRawIO.__doc__
mode = 'dir'
def __init__(self, dirname):
SpikeGLXRawIO.__init__(self, dirname=dirname)
BaseFromRaw.__init__(self, dirname)
| 27.333333
| 53
| 0.753049
|
760944f9c4c5f3f888722bcdd52cf188b30d29c9
| 6,365
|
py
|
Python
|
tests/unit/extractors/title/url_title_extractor_pt.py
|
victormartinez/ferret
|
2cadfa47d0842656c7c56136c004cf9ba488df9f
|
[
"Apache-2.0"
] | 11
|
2017-03-26T15:36:50.000Z
|
2020-03-05T01:42:07.000Z
|
tests/unit/extractors/title/url_title_extractor_pt.py
|
victormartinez/ferret
|
2cadfa47d0842656c7c56136c004cf9ba488df9f
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/extractors/title/url_title_extractor_pt.py
|
victormartinez/ferret
|
2cadfa47d0842656c7c56136c004cf9ba488df9f
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
import pytest
from ferret.extractors.title_extractor import UrlTitleExtractor
def _get_url_title_extractor(url, html):
return UrlTitleExtractor(url, html)
def _get_contents_of_file(path):
with open(path, 'r') as content_file:
return content_file.read()
@pytest.mark.parametrize("test_file_path,url_page,expected_title", [
('pt/abdc-novas-regras-da-ans-obrigam-planos-de-saude-a-qualificar-atendimento.html', 'http://abradecont.org.br/noticias/novas-regras-da-ans-obrigam-planos-de-saude-a-qualificar-atendimento/', 'Novas regras da ANS obrigam planos de saúde a qualificar atendimento'),
("pt/age-mg-rejeitado-pedido-de-danos-morais-coletivos-decorrentes-das-condicoes-do-sistema-carcerario.html", "http://www.age.mg.gov.br/comunicacao/banco-de-noticias/2407-rejeitado-pedido-de-danos-morais-coletivos-decorrentes-das-condicoes-do-sistema-carcerario", "Rejeitado pedido de danos morais coletivos decorrentes das condições do sistema carcerário"),
("pt/agencia-brasil-ministros-do-stf-veem-condicoes-para-reforma-politica-apos-eleicoes-municipais.html", "http://agenciabrasil.ebc.com.br/politica/noticia/2016-09/ministros-do-stf-veem-condicoes-para-reforma-politica-apos-eleicoes", "Ministros do STF veem condições para reforma política após eleições municipais"),
("pt/agencia-brasil-ministros-do-stf-veem-condicoes-para-reforma-politica-apos-eleicoes-municipais.html", "http://agenciabrasil.ebc.com.br/politica/noticia/2016-09/ministros-do-stf-veem-condicoes-para-reforma-politica-apos-eleicoes", "Ministros do STF veem condições para reforma política após eleições municipais"),
("pt/ajufe-5a-edicao-da-expedicao-da-cidadania-atende-ribeirinhos-do-delta-do-parnaiba-pi-a-partir-desta-segunda-feira.html", "http://www.ajufe.org/imprensa/noticias/5-edicao-da-expedicao-da-cidadania-atende-ribeirinhos-do-delta-do-parnaiba-pi-a-partir-desta-segunda-feira/", "5ª edição da Expedição da Cidadania atende ribeirinhos do Delta do Parnaíba/PI a partir desta segunda-feira"),
("pt/al-ce-servidores-voluntarios-da-al-realizam-trabalho-no-lar-amigos-de-jesus.html", "http://www.al.ce.gov.br/index.php/ultimas-noticias/item/56930-1909-ci-voluntarios-prosa", "Servidores voluntários da AL realizam trabalho no Lar Amigos de Jesus"),
("pt/anamatra-cnj-institui-politica-nacional-de-seguranca-do-poder-judiciario.html", "http://www.anamatra.org.br/index.php/noticias/cnj-institui-politica-nacional-de-seguranca-do-poder-judiciario", "CNJ institui Política Nacional de Segurança do Poder Judiciário"),
("pt/apeminas-medalha-juscelino-kubitschek.html", "http://apeminas.org.br/medalha-juscelino-kubitschek/", "Medalha Juscelino Kubitschek"),
("pt/cd_-comissao-amplia-prazo-para-quitar-credito-rural-obtido-por-meio-de-fundos-de-financiamento.html", "http://www2.camara.leg.br/camaranoticias/noticias/AGROPECUARIA/516481-COMISSAO-AMPLIA-PRAZO-PARA-QUITAR-CREDITO-RURAL-OBTIDO-POR-MEIO-DE-FUNDOS-DE-FINANCIAMENTO.html", "Comissão amplia prazo para quitar crédito rural obtido por meio de fundos de financiamento"),
("pt/cgu-fiscalizacao-do-ministerio-da-transparencia-resulta-em-punicoes-no-mato-grosso.html", "http://www.cgu.gov.br/noticias/2016/09/fiscalizacao-do-ministerio-da-transparencia-resulta-em-punicoes-no-mato-grosso", "Fiscalização do Ministério da Transparência resulta em punições no Mato Grosso"),
("pt/cnj-semana-da-execucao-mobiliza-justica-trabalhista-no-rio-grande-do-norte.html", "http://www.cnj.jus.br/noticias/judiciario/83463-semana-da-execucao-mobiliza-justica-trabalhista-no-rio-grande-do-norte", "Semana da Execução mobiliza Justiça Trabalhista no Rio Grande do Norte"),
("pt/cnmp-cddf-realiza-reuniao-para-desenvolvimento-da-politica-de-atuacao-resolutiva-do-mp.html", "http://www.cnmp.mp.br/portal_2015/todas-as-noticias/9684-cddf-realiza-reuniao-para-desenvolvimento-da-politica-de-atuacao-resolutiva-do-mp", "CDDF realiza reunião para desenvolvimento da Política de Atuação Resolutiva do MP"),
("pt/consultor-juridico-servidor-que-pede-remocao-nao-tem-direito-a-ajuda-de-custo-diz-tnu.html", "http://www.conjur.com.br/2016-set-19/servidor-remocao-nao-direito-ajuda-custo-tnu", "Servidor que pede remoção não tem direito a ajuda de custo, diz TNU"),
("pt/controle-publico-tribunais-de-contas-elegem-controle-social-e-educacao-como-instrumentos-indispensaveis-no-combate-a-corrupcao.html", "http://www.controlepublico.org.br/institucional/noticias/4213-tribunais-de-contas-elegem-controle-social-e-educacao-como-instrumentos-indispensaveis-no-combate-a-corrupcao", "Tribunais de Contas elegem Controle Social e Educação como instrumentos indispensáveis no combate à corrupção"),
("pt/direito-do-estado-governador-de-alagoas-encaminha-a-assembleia-projeto-da-loa-para-2017.html", "http://www.direitodoestado.com.br/noticias/governador-de-alagoas-encaminha-a-assembleia-projeto-da-loa-para-2017", "Governador de Alagoas encaminha à Assembleia projeto da LOA para 2017"),
("pt/dp-al-defensor-publico-participa-do-64o-conselho-nacional-de-entidades-gerais.html", "http://www.defensoria.al.gov.br/sala-de-imprensa/noticias2/defensor-publico-participa-do-64o-conselho-nacional-de-entidades-gerais", "Defensor Público participa do 64º Conselho Nacional de Entidades Gerais"),
("pt/direito-domestico-sai-a-nova-versao-do-manual-do-esocial.html", "http://direitodomestico.jornaldaparaiba.com.br/noticias/sai-nova-versao-do-manual-do-esocial/", "Sai a nova versão do Manual do eSocial"),
("pt/direito-legal-impeachment-como-devera-agir-o-supremo-tribunal-federal.html", "http://www.direitolegal.org/artigos/impeachment-como-devera-agir-o-supremo-tribunal-federal/", "Impeachment: como deverá agir o Supremo Tribunal Federal?"),
# TODO: Normalize space
("pt/carta-forense-vigilantes-do-peso-nao-indenizarao-orientadora-por-exigir-manutencao-do-peso.html", "http://www.cartaforense.com.br/conteudo/noticias/vigilantes-do-peso-nao-indenizarao-orientadora-por-exigir-manutencao-do-peso/16962", "TRABALHO Vigilantes do Peso não indenizarão orientadora por exigir manutenção do peso"),
])
def test_title_extractor(test_file_path, url_page, expected_title):
html = _get_contents_of_file("test/resources/{}".format(test_file_path))
extractor = _get_url_title_extractor(url_page, html)
assert extractor.extract() == expected_title
| 148.023256
| 431
| 0.79183
|
400287125754d9e7d5c535b97d4dfc755711da2a
| 3,776
|
py
|
Python
|
contrib/macdeploy/custom_dsstore.py
|
HostXcoin/HSX
|
ce7dc6e88d70b615126d67d222a0c673ed9e581d
|
[
"MIT"
] | null | null | null |
contrib/macdeploy/custom_dsstore.py
|
HostXcoin/HSX
|
ce7dc6e88d70b615126d67d222a0c673ed9e581d
|
[
"MIT"
] | null | null | null |
contrib/macdeploy/custom_dsstore.py
|
HostXcoin/HSX
|
ce7dc6e88d70b615126d67d222a0c673ed9e581d
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# Copyright (c) 2013-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from __future__ import division,print_function,unicode_literals
import biplist
from ds_store import DSStore
from mac_alias import Alias
import sys
output_file = sys.argv[1]
package_name_ns = sys.argv[2]
ds = DSStore.open(output_file, 'w+')
ds['.']['bwsp'] = {
'ShowStatusBar': False,
'WindowBounds': b'{{300, 280}, {500, 343}}',
'ContainerShowSidebar': False,
'SidebarWidth': 0,
'ShowTabView': False,
'PreviewPaneVisibility': False,
'ShowToolbar': False,
'ShowSidebar': False,
'ShowPathbar': True
}
icvp = {
'gridOffsetX': 0.0,
'textSize': 12.0,
'viewOptionsVersion': 1,
'backgroundImageAlias': b'\x00\x00\x00\x00\x02\x1e\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xd1\x94\\\xb0H+\x00\x05\x00\x00\x00\x98\x0fbackground.tiff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x99\xd19\xb0\xf8\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\r\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0b.background\x00\x00\x10\x00\x08\x00\x00\xd1\x94\\\xb0\x00\x00\x00\x11\x00\x08\x00\x00\xd19\xb0\xf8\x00\x00\x00\x01\x00\x04\x00\x00\x00\x98\x00\x0e\x00 \x00\x0f\x00b\x00a\x00c\x00k\x00g\x00r\x00o\x00u\x00n\x00d\x00.\x00t\x00i\x00f\x00f\x00\x0f\x00\x02\x00\x00\x00\x12\x00\x1c/.background/background.tiff\x00\x14\x01\x06\x00\x00\x00\x00\x01\x06\x00\x02\x00\x00\x0cMacintosh HD\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xce\x97\xab\xc3H+\x00\x00\x01\x88[\x88\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02u\xab\x8d\xd1\x94\\\xb0devrddsk\xff\xff\xff\xff\x00\x00\t \x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07bitcoin\x00\x00\x10\x00\x08\x00\x00\xce\x97\xab\xc3\x00\x00\x00\x11\x00\x08\x00\x00\xd1\x94\\\xb0\x00\x00\x00\x01\x00\x14\x01\x88[\x88\x00\x16\xa9\t\x00\x08\xfaR\x00\x08\xfaQ\x00\x02d\x8e\x00\x0e\x00\x02\x00\x00\x00\x0f\x00\x1a\x00\x0c\x00M\x00a\x00c\x00i\x00n\x00t\x00o\x00s\x00h\x00 \x00H\x00D\x00\x13\x00\x01/\x00\x00\x15\x00\x02\x00\x14\xff\xff\x00\x00\xff\xff\x00\x00',
'backgroundColorBlue': 1.0,
'iconSize': 96.0,
'backgroundColorGreen': 1.0,
'arrangeBy': 'none',
'showIconPreview': True,
'gridSpacing': 100.0,
'gridOffsetY': 0.0,
'showItemInfo': False,
'labelOnBottom': True,
'backgroundType': 2,
'backgroundColorRed': 1.0
}
alias = Alias.from_bytes(icvp['backgroundImageAlias'])
alias.volume.name = package_name_ns
alias.volume.posix_path = '/Volumes/' + package_name_ns
alias.volume.disk_image_alias.target.filename = package_name_ns + '.temp.dmg'
alias.volume.disk_image_alias.target.carbon_path = 'Macintosh HD:Users:\x00bitcoinuser:\x00Documents:\x00bitcoin:\x00bitcoin:\x00' + package_name_ns + '.temp.dmg'
alias.volume.disk_image_alias.target.posix_path = 'Users/bitcoinuser/Documents/bitcoin/bitcoin/' + package_name_ns + '.temp.dmg'
alias.target.carbon_path = package_name_ns + ':.background:\x00background.tiff'
icvp['backgroundImageAlias'] = biplist.Data(alias.to_bytes())
ds['.']['icvp'] = icvp
ds['.']['vSrn'] = ('long', 1)
ds['Applications']['Iloc'] = (370, 156)
ds['HostxCoin-Qt.app']['Iloc'] = (128, 156)
ds.flush()
ds.close()
| 61.901639
| 1,817
| 0.727489
|
53b9fac96ef86119fa06eb910b71330d5ce62c7f
| 709
|
py
|
Python
|
Collaboration/__init__.py
|
EnjoyLifeFund/macHighSierra-py36-pkgs
|
5668b5785296b314ea1321057420bcd077dba9ea
|
[
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | 4
|
2019-03-11T18:05:49.000Z
|
2021-05-22T21:09:09.000Z
|
Collaboration/__init__.py
|
EnjoyLifeFund/macHighSierra-py36-pkgs
|
5668b5785296b314ea1321057420bcd077dba9ea
|
[
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | null | null | null |
Collaboration/__init__.py
|
EnjoyLifeFund/macHighSierra-py36-pkgs
|
5668b5785296b314ea1321057420bcd077dba9ea
|
[
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | 1
|
2019-03-18T18:53:36.000Z
|
2019-03-18T18:53:36.000Z
|
'''
Python mapping for the Collaboration framework.
This module does not contain docstrings for the wrapped code, check Apple's
documentation for details on how to use these functions and classes.
'''
import sys
import objc
import Foundation
from Collaboration import _metadata
sys.modules['Collaboration'] = objc.ObjCLazyModule(
"Collaboration", "com.apple.Collaboration",
objc.pathForFramework("/System/Library/Frameworks/Collaboration.framework"),
_metadata.__dict__, None, {
'__doc__': __doc__,
'__path__': __path__,
'__loader__': globals().get('__loader__', None),
'objc': objc,
}, (Foundation,))
import sys
del sys.modules['Collaboration._metadata']
| 27.269231
| 80
| 0.724965
|
3879721df5d958f5d19eb9e5dd65e9fa56c2504b
| 2,359
|
py
|
Python
|
mamba/component/gui/qt/open_project_qt/__init__.py
|
ismaelJimenez/mamba_server
|
e6e2343291a0df24f226bde0d13e5bfa74cc3650
|
[
"MIT"
] | null | null | null |
mamba/component/gui/qt/open_project_qt/__init__.py
|
ismaelJimenez/mamba_server
|
e6e2343291a0df24f226bde0d13e5bfa74cc3650
|
[
"MIT"
] | null | null | null |
mamba/component/gui/qt/open_project_qt/__init__.py
|
ismaelJimenez/mamba_server
|
e6e2343291a0df24f226bde0d13e5bfa74cc3650
|
[
"MIT"
] | null | null | null |
############################################################################
#
# Copyright (c) Mamba Developers. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
############################################################################
""" Open Project component """
import os
import sys
from os.path import dirname
from typing import Optional, Dict, Any
from PySide2.QtWidgets import QApplication, QWidget, QFileDialog
from PySide2.QtCore import QCoreApplication
from mamba.core.component_base import GuiPlugin
from mamba.component.gui.msg import RunAction
from mamba.core.context import Context
from mamba.core.composer_parser import compose_parser
class OpenProjectComponent(GuiPlugin):
""" Open Project component in Qt5 """
def __init__(self,
context: Context,
local_config: Optional[Dict[str, dict]] = None) -> None:
super().__init__(os.path.dirname(__file__), context, local_config)
# Define custom variables
self._app: Optional[QCoreApplication] = None
def initialize(self):
super().initialize()
# Initialize custom variables
self._app = QApplication(
[]) if QApplication.instance() is None else QApplication.instance(
)
def publish_views(self, profiles: Dict[str, Any]) -> None:
for profile in profiles:
self._context.rx['run_plugin'].on_next(
RunAction(menu_title=profile['menu_title'],
action_name=profile['action_name'],
perspective=profile['data']))
def run(self, rx_value: RunAction) -> None:
""" Entry point for running the plugin
Args:
rx_value (RunAction): The value published by the subject.
"""
options = QFileDialog.Options()
options |= QFileDialog.DontUseNativeDialog
file_name, _ = QFileDialog.getOpenFileName(
QWidget(),
"Load Mamba Composer File",
"",
"View Files (*-compose.yml)",
options=options)
if file_name:
sys.path.insert(0, dirname(dirname(file_name)))
compose_parser(file_name, self._context.get('mamba_dir'),
dirname(dirname(file_name)))
| 34.188406
| 78
| 0.593048
|
595501118a7fc1f5c68ced5cf388344db4fdb8ad
| 8,943
|
py
|
Python
|
setup_wizard.py
|
hunterunger/emailAutoBlock
|
d058ebc4170ac727571f5d59607ed6ec5ba0d8a3
|
[
"MIT"
] | null | null | null |
setup_wizard.py
|
hunterunger/emailAutoBlock
|
d058ebc4170ac727571f5d59607ed6ec5ba0d8a3
|
[
"MIT"
] | null | null | null |
setup_wizard.py
|
hunterunger/emailAutoBlock
|
d058ebc4170ac727571f5d59607ed6ec5ba0d8a3
|
[
"MIT"
] | null | null | null |
import getpass
import os
from widgets import Style, clear_console, wait_indicator, config
class SetupWizard:
def __init__(self):
clear_console()
self.smtp_port = ''
self.username = ''
self.password = ''
self.imap_address = ''
self.smtp_address = ''
self.blacklist = []
self.default_config = {
'search_mail_folder': 'inbox',
'max_search_results': 20,
'update_interval': 10,
'also_reply_to_email': True,
'save_archive': True,
'block_emails': True,
}
self.config_explain = {
'search_mail_folder': 'Where blacklist emails will arrive for this program to search.\n'
'I recommend filtering blocked emails to a folder other than your inbox, such as the '
'"Archive" folder.\n'
'That way it will mark it as read and not give you notifications BEFORE it reaches '
'your inbox.'
'This setting can usually be found in your email account settings.',
'max_search_results': 'How many of the most recent emails will be checked for blacklist emails.\n',
'update_interval': 'The interval in minutes between each update of the search results.\n'
'Must be less than an hour.',
'also_reply_to_email': 'If enabled, the email will be replied to with the template provided in the config '
'files.\n'
'Feel free to edit this template.',
'save_archive': 'If enabled, the email will be saved in the emails archive after mailbox deletion as plain '
'text.',
'block_emails': 'Any emails on the blacklist will be permanently and unrecoverable deleted from your mail '
'account.',
}
# check if in the correct directory
current_folder = os.getcwd().split('/')[-1]
if current_folder != 'emailBlockBot':
print("It looks like your not running this from the correct directory!")
print(
'Make sure you "cd" into this file. It would look something like this but with the folder this program '
'is'
' in:\n')
print(Style.green + 'cd Users/<username>/blabla/setup_wizard.py' + Style.reset)
print("\nThen run this command from this folder:\n")
print(Style.green + 'python3 setup_wizard.py' + Style.reset)
print('\n\n')
exit()
# check if the config directory exists
if not os.path.isdir('config files'):
os.mkdir('config files')
# check if the config file exists
if not os.path.isfile('config files/config.txt'):
print('It looks like you\'re running this for the first time. Let\'s get started!')
else:
print('It looks like you\'re already setup. You can always run this program again to reset your settings,'
'or you can simply deletes the "config files" directory.')
input('\n\nPress "enter" to continue.')
def page_1(self):
clear_console()
# login
print(Style.blue + Style.inverted + ' Step 1/3 - Login' + Style.reset)
print('\nFirst you\'ll login to your account\n')
self.username = input(Style.blue + Style.inverted + ' Email/username: ' + Style.reset)
self.password = getpass.getpass(Style.blue + Style.inverted + ' Password: ' + Style.reset)
print("\033[A \033[A")
print(Style.blue + Style.inverted + ' Password: ' + Style.reset + ''.join(['*'] * len(self.password)))
print('\n')
self.imap_address = input(Style.blue + Style.inverted + ' Incoming Mail Server (IMAP): ' + Style.reset)
self.smtp_address = input(Style.blue + Style.inverted + ' Outgoing Mail Server (SMTP): ' + Style.reset)
self.smtp_port = input(Style.blue + Style.inverted + ' SMTP Port: ' + Style.reset)
while not self.smtp_port.isdigit():
print("\033[A \033[A")
print('It has to be a number.')
self.smtp_port = input(Style.red + Style.inverted + ' SMTP Port: ' + Style.reset)
input('\n\nPress "enter" to go to the next step.')
def page_2(self):
# second page
clear_console()
print(Style.blue + Style.inverted + ' Step 2/3 - Block list' + Style.reset)
print('Now you\'ll enter the addresses you want to block.')
print('They can either be full addresses (johnny@gmail.com) or words in the address (@gmail, or johnny)')
print('Just press enter when you\'re done.\n')
self.blacklist = []
blocked = 1
while True:
blacklist_addition = input(
Style.blue + Style.inverted + ' Block #' + str(blocked) + ': ' + Style.reset)
if blacklist_addition == '':
print('\033[A \033[A')
break
else:
blocked += 1
self.blacklist.append(blacklist_addition)
def page_3(self):
last_input = ' '
while last_input != '':
clear_console()
print(Style.blue + Style.inverted + ' Step 3/3 - Settings' + Style.reset)
print(
'\nHere are the default settings. Type the number you\'d like to view or press enter to apply settings.'
)
config_keys = list(self.default_config.keys())
for i in range(len(self.default_config)):
print('\n#' + str(i + 1) + ': ' + config_keys[i] + ': ' + str(list(self.default_config.values())[i]))
print('\n')
last_input = input(Style.blue + Style.inverted + ' Set item #:' + Style.reset + ' ')
# check if last input was a number
if last_input.isdigit():
last_input = int(last_input)
if 0 < last_input <= len(self.default_config):
clear_console()
print(Style.blue + Style.inverted + ' Setting "' + config_keys[
last_input - 1] + '" ' + Style.reset + '\n\n')
print(Style.yellow + 'Help:' + Style.reset)
print(self.config_explain[config_keys[last_input - 1]])
print('\n(press enter to go back or enter a new value)')
new_value = input('-> ')
if new_value != '':
# format the value
if new_value.isdigit():
new_value = int(new_value)
elif new_value.lower() == 'true':
new_value = True
elif new_value.lower() == 'false':
new_value = False
self.default_config[config_keys[last_input - 1]] = new_value
else:
print('\033[A \033[A')
print('It has to be a number between 1 and ' + str(len(self.default_config)) + '.')
last_input = ' '
print('\n\nGreat! Saving everything now...')
print('Note: you can always change these settings from "config files/config.yml", '
'or simply delete "config files" to reset them.')
wait_indicator(3)
# save config
self.default_config['username'] = self.username
self.default_config['password'] = self.password
self.default_config['imap_address'] = self.imap_address
self.default_config['smtp_address'] = self.smtp_address
self.default_config['smtp_port'] = self.smtp_port
self.default_config['blacklist'] = self.blacklist
config(self.default_config)
def setup(self):
clear_console()
self.page_1()
self.page_2()
self.page_3()
print('Great you are all setup now!')
if __name__ == '__main__':
try:
wizard = SetupWizard()
wizard.setup()
except KeyboardInterrupt:
print('\nIt looks like you exited early. Everything cancelled.')
print('You can now start the server by entering:')
print(Style.green + 'python3 main.py' + Style.reset)
print('\n\n')
| 43.202899
| 120
| 0.518059
|
eb6049b1929eda2618dd25a4be6ac4f47adda1cc
| 2,219
|
py
|
Python
|
y-stream/y-stream-trackers.py
|
sfowl/elliott
|
31df959bc286bbe55e1d97a18747d904ea8fb150
|
[
"Apache-2.0"
] | null | null | null |
y-stream/y-stream-trackers.py
|
sfowl/elliott
|
31df959bc286bbe55e1d97a18747d904ea8fb150
|
[
"Apache-2.0"
] | null | null | null |
y-stream/y-stream-trackers.py
|
sfowl/elliott
|
31df959bc286bbe55e1d97a18747d904ea8fb150
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
from __future__ import print_function
import bugzilla
import os
import sys
api_key=None
try:
api_key = open(os.path.expanduser('~/.bugzilla_apikey')).read().strip()
except FileNotFoundError:
# api key in ~/.config/python-bugzilla/bugzillarc will be used
pass
bz = bugzilla.RHBugzilla(url=None, api_key=api_key)
bz.connect('https://bugzilla.redhat.com')
PRODUCT_NAME = "OpenShift Container Platform"
try:
TARGET_RELEASE = sys.argv[1] # e.g. "4.5.0"
except IndexError:
sys.exit("Target Release required, e.g. '4.5.0'")
query = bz.build_query(
product=PRODUCT_NAME,
status=["MODIFIED", "ON_QA", "VERIFIED"],
target_release=TARGET_RELEASE,
keywords=["Security", "SecurityTracking"],
)
def _get_flaw_cve(flaw):
# python2 has no .get() for lists
try:
return flaw.alias[0]
except IndexError:
return None
y_stream_trackers = bz.query(query)
for y_stream_tracker in y_stream_trackers:
component = y_stream_tracker.summary.split(":")[0].split(" ")[-1]
blocking_bugs = bz.getbugs(y_stream_tracker.blocks)
flaw_bugs = list(filter(lambda x: x.product == "Security Response", blocking_bugs))
tracker_cves = list(filter(lambda x: x is not None, (_get_flaw_cve(flaw) for flaw in flaw_bugs)))
if len(tracker_cves) == 0:
continue
tracker_flaws = list(filter(lambda x: _get_flaw_cve(x) in tracker_cves, flaw_bugs))
flaw_tracker_ids = set([t for f in tracker_flaws for t in f.depends_on])
trackers = bz.getbugs(flaw_tracker_ids)
def filter_tracker(tracker):
if all([
tracker.product == PRODUCT_NAME,
component + ":" in tracker.summary,
tracker.target_release[0] <= TARGET_RELEASE,
tracker.target_release[0] > "4.0.0",
]):
return True
return False
sorted_trackers = sorted(trackers, key=lambda x: x.target_release, reverse=True)
filtered_trackers = filter(filter_tracker, sorted_trackers)
print("{} {}".format(" ".join(tracker_cves), component))
for tracker in filtered_trackers:
print("\t{} {} {} {}".format(tracker.id, tracker.target_release[0], tracker.status, tracker.resolution))
| 31.7
| 112
| 0.681839
|
059f8543104a70d9cb90e0be23aca41dae929463
| 15,787
|
py
|
Python
|
pandas/tests/indexing/multiindex/test_setitem.py
|
trevransom/pandas
|
a6311fb7197e20c5cad703d32e350787ac3430b2
|
[
"BSD-3-Clause"
] | 1
|
2020-10-30T15:34:46.000Z
|
2020-10-30T15:34:46.000Z
|
pandas/tests/indexing/multiindex/test_setitem.py
|
trevransom/pandas
|
a6311fb7197e20c5cad703d32e350787ac3430b2
|
[
"BSD-3-Clause"
] | null | null | null |
pandas/tests/indexing/multiindex/test_setitem.py
|
trevransom/pandas
|
a6311fb7197e20c5cad703d32e350787ac3430b2
|
[
"BSD-3-Clause"
] | null | null | null |
import numpy as np
import pytest
import pandas as pd
from pandas import DataFrame, MultiIndex, Series, Timestamp, date_range, isna, notna
import pandas._testing as tm
import pandas.core.common as com
class TestMultiIndexSetItem:
def test_setitem_multiindex(self):
for index_fn in ("loc",):
def assert_equal(a, b):
assert a == b
def check(target, indexers, value, compare_fn, expected=None):
fn = getattr(target, index_fn)
fn.__setitem__(indexers, value)
result = fn.__getitem__(indexers)
if expected is None:
expected = value
compare_fn(result, expected)
# GH7190
index = MultiIndex.from_product(
[np.arange(0, 100), np.arange(0, 80)], names=["time", "firm"]
)
t, n = 0, 2
df = DataFrame(
np.nan,
columns=["A", "w", "l", "a", "x", "X", "d", "profit"],
index=index,
)
check(target=df, indexers=((t, n), "X"), value=0, compare_fn=assert_equal)
df = DataFrame(
-999, columns=["A", "w", "l", "a", "x", "X", "d", "profit"], index=index
)
check(target=df, indexers=((t, n), "X"), value=1, compare_fn=assert_equal)
df = DataFrame(
columns=["A", "w", "l", "a", "x", "X", "d", "profit"], index=index
)
check(target=df, indexers=((t, n), "X"), value=2, compare_fn=assert_equal)
# gh-7218: assigning with 0-dim arrays
df = DataFrame(
-999, columns=["A", "w", "l", "a", "x", "X", "d", "profit"], index=index
)
check(
target=df,
indexers=((t, n), "X"),
value=np.array(3),
compare_fn=assert_equal,
expected=3,
)
# GH5206
df = DataFrame(
np.arange(25).reshape(5, 5), columns="A,B,C,D,E".split(","), dtype=float
)
df["F"] = 99
row_selection = df["A"] % 2 == 0
col_selection = ["B", "C"]
df.loc[row_selection, col_selection] = df["F"]
output = DataFrame(99.0, index=[0, 2, 4], columns=["B", "C"])
tm.assert_frame_equal(df.loc[row_selection, col_selection], output)
check(
target=df,
indexers=(row_selection, col_selection),
value=df["F"],
compare_fn=tm.assert_frame_equal,
expected=output,
)
# GH11372
idx = MultiIndex.from_product(
[["A", "B", "C"], date_range("2015-01-01", "2015-04-01", freq="MS")]
)
cols = MultiIndex.from_product(
[["foo", "bar"], date_range("2016-01-01", "2016-02-01", freq="MS")]
)
df = DataFrame(np.random.random((12, 4)), index=idx, columns=cols)
subidx = MultiIndex.from_tuples(
[("A", Timestamp("2015-01-01")), ("A", Timestamp("2015-02-01"))]
)
subcols = MultiIndex.from_tuples(
[("foo", Timestamp("2016-01-01")), ("foo", Timestamp("2016-02-01"))]
)
vals = DataFrame(np.random.random((2, 2)), index=subidx, columns=subcols)
check(
target=df,
indexers=(subidx, subcols),
value=vals,
compare_fn=tm.assert_frame_equal,
)
# set all columns
vals = DataFrame(np.random.random((2, 4)), index=subidx, columns=cols)
check(
target=df,
indexers=(subidx, slice(None, None, None)),
value=vals,
compare_fn=tm.assert_frame_equal,
)
# identity
copy = df.copy()
check(
target=df,
indexers=(df.index, df.columns),
value=df,
compare_fn=tm.assert_frame_equal,
expected=copy,
)
def test_multiindex_setitem(self):
# GH 3738
# setting with a multi-index right hand side
arrays = [
np.array(["bar", "bar", "baz", "qux", "qux", "bar"]),
np.array(["one", "two", "one", "one", "two", "one"]),
np.arange(0, 6, 1),
]
df_orig = DataFrame(
np.random.randn(6, 3), index=arrays, columns=["A", "B", "C"]
).sort_index()
expected = df_orig.loc[["bar"]] * 2
df = df_orig.copy()
df.loc[["bar"]] *= 2
tm.assert_frame_equal(df.loc[["bar"]], expected)
# raise because these have differing levels
msg = "cannot align on a multi-index with out specifying the join levels"
with pytest.raises(TypeError, match=msg):
df.loc["bar"] *= 2
# from SO
# https://stackoverflow.com/questions/24572040/pandas-access-the-level-of-multiindex-for-inplace-operation
df_orig = DataFrame.from_dict(
{
"price": {
("DE", "Coal", "Stock"): 2,
("DE", "Gas", "Stock"): 4,
("DE", "Elec", "Demand"): 1,
("FR", "Gas", "Stock"): 5,
("FR", "Solar", "SupIm"): 0,
("FR", "Wind", "SupIm"): 0,
}
}
)
df_orig.index = MultiIndex.from_tuples(
df_orig.index, names=["Sit", "Com", "Type"]
)
expected = df_orig.copy()
expected.iloc[[0, 2, 3]] *= 2
idx = pd.IndexSlice
df = df_orig.copy()
df.loc[idx[:, :, "Stock"], :] *= 2
tm.assert_frame_equal(df, expected)
df = df_orig.copy()
df.loc[idx[:, :, "Stock"], "price"] *= 2
tm.assert_frame_equal(df, expected)
def test_multiindex_assignment(self):
# GH3777 part 2
# mixed dtype
df = DataFrame(
np.random.randint(5, 10, size=9).reshape(3, 3),
columns=list("abc"),
index=[[4, 4, 8], [8, 10, 12]],
)
df["d"] = np.nan
arr = np.array([0.0, 1.0])
df.loc[4, "d"] = arr
tm.assert_series_equal(df.loc[4, "d"], Series(arr, index=[8, 10], name="d"))
# single dtype
df = DataFrame(
np.random.randint(5, 10, size=9).reshape(3, 3),
columns=list("abc"),
index=[[4, 4, 8], [8, 10, 12]],
)
df.loc[4, "c"] = arr
exp = Series(arr, index=[8, 10], name="c", dtype="float64")
tm.assert_series_equal(df.loc[4, "c"], exp)
# scalar ok
df.loc[4, "c"] = 10
exp = Series(10, index=[8, 10], name="c", dtype="float64")
tm.assert_series_equal(df.loc[4, "c"], exp)
# invalid assignments
msg = (
"cannot set using a multi-index selection indexer "
"with a different length than the value"
)
with pytest.raises(ValueError, match=msg):
df.loc[4, "c"] = [0, 1, 2, 3]
with pytest.raises(ValueError, match=msg):
df.loc[4, "c"] = [0]
# groupby example
NUM_ROWS = 100
NUM_COLS = 10
col_names = ["A" + num for num in map(str, np.arange(NUM_COLS).tolist())]
index_cols = col_names[:5]
df = DataFrame(
np.random.randint(5, size=(NUM_ROWS, NUM_COLS)),
dtype=np.int64,
columns=col_names,
)
df = df.set_index(index_cols).sort_index()
grp = df.groupby(level=index_cols[:4])
df["new_col"] = np.nan
f_index = np.arange(5)
def f(name, df2):
return Series(np.arange(df2.shape[0]), name=df2.index.values[0]).reindex(
f_index
)
# FIXME: dont leave commented-out
# TODO(wesm): unused?
# new_df = pd.concat([f(name, df2) for name, df2 in grp], axis=1).T
# we are actually operating on a copy here
# but in this case, that's ok
for name, df2 in grp:
new_vals = np.arange(df2.shape[0])
df.loc[name, "new_col"] = new_vals
def test_series_setitem(self, multiindex_year_month_day_dataframe_random_data):
ymd = multiindex_year_month_day_dataframe_random_data
s = ymd["A"]
s[2000, 3] = np.nan
assert isna(s.values[42:65]).all()
assert notna(s.values[:42]).all()
assert notna(s.values[65:]).all()
s[2000, 3, 10] = np.nan
assert isna(s.iloc[49])
with pytest.raises(KeyError, match="49"):
# GH#33355 dont fall-back to positional when leading level is int
s[49]
def test_frame_getitem_setitem_boolean(self, multiindex_dataframe_random_data):
frame = multiindex_dataframe_random_data
df = frame.T.copy()
values = df.values
result = df[df > 0]
expected = df.where(df > 0)
tm.assert_frame_equal(result, expected)
df[df > 0] = 5
values[values > 0] = 5
tm.assert_almost_equal(df.values, values)
df[df == 5] = 0
values[values == 5] = 0
tm.assert_almost_equal(df.values, values)
# a df that needs alignment first
df[df[:-1] < 0] = 2
np.putmask(values[:-1], values[:-1] < 0, 2)
tm.assert_almost_equal(df.values, values)
with pytest.raises(TypeError, match="boolean values only"):
df[df * 0] = 2
def test_frame_getitem_setitem_multislice(self):
levels = [["t1", "t2"], ["a", "b", "c"]]
codes = [[0, 0, 0, 1, 1], [0, 1, 2, 0, 1]]
midx = MultiIndex(codes=codes, levels=levels, names=[None, "id"])
df = DataFrame({"value": [1, 2, 3, 7, 8]}, index=midx)
result = df.loc[:, "value"]
tm.assert_series_equal(df["value"], result)
result = df.loc[df.index[1:3], "value"]
tm.assert_series_equal(df["value"][1:3], result)
result = df.loc[:, :]
tm.assert_frame_equal(df, result)
result = df
df.loc[:, "value"] = 10
result["value"] = 10
tm.assert_frame_equal(df, result)
df.loc[:, :] = 10
tm.assert_frame_equal(df, result)
def test_frame_setitem_multi_column(self):
df = DataFrame(
np.random.randn(10, 4), columns=[["a", "a", "b", "b"], [0, 1, 0, 1]]
)
cp = df.copy()
cp["a"] = cp["b"]
tm.assert_frame_equal(cp["a"], cp["b"])
# set with ndarray
cp = df.copy()
cp["a"] = cp["b"].values
tm.assert_frame_equal(cp["a"], cp["b"])
# ---------------------------------------
# #1803
columns = MultiIndex.from_tuples([("A", "1"), ("A", "2"), ("B", "1")])
df = DataFrame(index=[1, 3, 5], columns=columns)
# Works, but adds a column instead of updating the two existing ones
df["A"] = 0.0 # Doesn't work
assert (df["A"].values == 0).all()
# it broadcasts
df["B", "1"] = [1, 2, 3]
df["A"] = df["B", "1"]
sliced_a1 = df["A", "1"]
sliced_a2 = df["A", "2"]
sliced_b1 = df["B", "1"]
tm.assert_series_equal(sliced_a1, sliced_b1, check_names=False)
tm.assert_series_equal(sliced_a2, sliced_b1, check_names=False)
assert sliced_a1.name == ("A", "1")
assert sliced_a2.name == ("A", "2")
assert sliced_b1.name == ("B", "1")
def test_getitem_setitem_tuple_plus_columns(
self, multiindex_year_month_day_dataframe_random_data
):
# GH #1013
ymd = multiindex_year_month_day_dataframe_random_data
df = ymd[:5]
result = df.loc[(2000, 1, 6), ["A", "B", "C"]]
expected = df.loc[2000, 1, 6][["A", "B", "C"]]
tm.assert_series_equal(result, expected)
def test_getitem_setitem_slice_integers(self):
index = MultiIndex(
levels=[[0, 1, 2], [0, 2]], codes=[[0, 0, 1, 1, 2, 2], [0, 1, 0, 1, 0, 1]]
)
frame = DataFrame(
np.random.randn(len(index), 4), index=index, columns=["a", "b", "c", "d"]
)
res = frame.loc[1:2]
exp = frame.reindex(frame.index[2:])
tm.assert_frame_equal(res, exp)
frame.loc[1:2] = 7
assert (frame.loc[1:2] == 7).values.all()
series = Series(np.random.randn(len(index)), index=index)
res = series.loc[1:2]
exp = series.reindex(series.index[2:])
tm.assert_series_equal(res, exp)
series.loc[1:2] = 7
assert (series.loc[1:2] == 7).values.all()
def test_setitem_change_dtype(self, multiindex_dataframe_random_data):
frame = multiindex_dataframe_random_data
dft = frame.T
s = dft["foo", "two"]
dft["foo", "two"] = s > s.median()
tm.assert_series_equal(dft["foo", "two"], s > s.median())
# assert isinstance(dft._data.blocks[1].items, MultiIndex)
reindexed = dft.reindex(columns=[("foo", "two")])
tm.assert_series_equal(reindexed["foo", "two"], s > s.median())
def test_set_column_scalar_with_loc(self, multiindex_dataframe_random_data):
frame = multiindex_dataframe_random_data
subset = frame.index[[1, 4, 5]]
frame.loc[subset] = 99
assert (frame.loc[subset].values == 99).all()
col = frame["B"]
col[subset] = 97
assert (frame.loc[subset, "B"] == 97).all()
def test_nonunique_assignment_1750(self):
df = DataFrame(
[[1, 1, "x", "X"], [1, 1, "y", "Y"], [1, 2, "z", "Z"]], columns=list("ABCD")
)
df = df.set_index(["A", "B"])
ix = MultiIndex.from_tuples([(1, 1)])
df.loc[ix, "C"] = "_"
assert (df.xs((1, 1))["C"] == "_").all()
def test_astype_assignment_with_dups(self):
# GH 4686
# assignment with dups that has a dtype change
cols = MultiIndex.from_tuples([("A", "1"), ("B", "1"), ("A", "2")])
df = DataFrame(np.arange(3).reshape((1, 3)), columns=cols, dtype=object)
index = df.index.copy()
df["A"] = df["A"].astype(np.float64)
tm.assert_index_equal(df.index, index)
def test_setitem_nonmonotonic(self):
# https://github.com/pandas-dev/pandas/issues/31449
index = pd.MultiIndex.from_tuples(
[("a", "c"), ("b", "x"), ("a", "d")], names=["l1", "l2"]
)
df = DataFrame(data=[0, 1, 2], index=index, columns=["e"])
df.loc["a", "e"] = np.arange(99, 101, dtype="int64")
expected = DataFrame({"e": [99, 1, 100]}, index=index)
tm.assert_frame_equal(df, expected)
def test_frame_setitem_view_direct(multiindex_dataframe_random_data):
# this works because we are modifying the underlying array
# really a no-no
df = multiindex_dataframe_random_data.T
df["foo"].values[:] = 0
assert (df["foo"].values == 0).all()
def test_frame_setitem_copy_raises(multiindex_dataframe_random_data):
# will raise/warn as its chained assignment
df = multiindex_dataframe_random_data.T
msg = "A value is trying to be set on a copy of a slice from a DataFrame"
with pytest.raises(com.SettingWithCopyError, match=msg):
df["foo"]["one"] = 2
def test_frame_setitem_copy_no_write(multiindex_dataframe_random_data):
frame = multiindex_dataframe_random_data.T
expected = frame
df = frame.copy()
msg = "A value is trying to be set on a copy of a slice from a DataFrame"
with pytest.raises(com.SettingWithCopyError, match=msg):
df["foo"]["one"] = 2
result = df
tm.assert_frame_equal(result, expected)
| 33.950538
| 114
| 0.519161
|
bea4e9f0c5fa87dc6bf79ca729287ec6d3fffe64
| 1,493
|
py
|
Python
|
Colloquiums/2020-2021/Colloquium_3/Exercise_3_tests.py
|
Szymon-Budziak/ASD_exercises_solutions
|
36ccbdae03a6c7e4ad141a2b7b01bef9353574ee
|
[
"MIT"
] | 7
|
2021-12-28T23:38:42.000Z
|
2022-03-29T16:36:16.000Z
|
Colloquiums/2020-2021/Colloquium_3/Exercise_3_tests.py
|
Szymon-Budziak/ASD_exercises_solutions
|
36ccbdae03a6c7e4ad141a2b7b01bef9353574ee
|
[
"MIT"
] | null | null | null |
Colloquiums/2020-2021/Colloquium_3/Exercise_3_tests.py
|
Szymon-Budziak/ASD_exercises_solutions
|
36ccbdae03a6c7e4ad141a2b7b01bef9353574ee
|
[
"MIT"
] | 4
|
2021-06-29T20:21:52.000Z
|
2022-03-12T10:04:17.000Z
|
# ------------------------------------
G1 = [
[0, 1, 1, 0, 1],
[1, 0, 0, 1, 0],
[1, 0, 0, 0, 1],
[0, 1, 0, 0, 1],
[1, 0, 1, 1, 0],
]
K1 = ['B', 'B', 'G', 'G', 'B']
D1 = 2
R1 = 2
T1 = [G1, K1, D1, R1]
# ------------------------------------
G2 = [
[0, 1, 1, 1],
[1, 0, 1, 1],
[1, 1, 0, 1],
[1, 1, 1, 0],
]
K2 = ['B', 'G', 'G', 'B']
D2 = 1
R2 = 2
T2 = [G2, K2, D2, R2]
# ------------------------------------
G3 = [
[0, 1, 1, 1],
[1, 0, 1, 1],
[1, 1, 0, 1],
[1, 1, 1, 0],
]
K3 = ['B', 'G', 'G', 'B']
D3 = 2
R3 = 0
T3 = [G3, K3, D3, R3]
# ------------------------------------
G4 = [
[0, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 1, 0, 0, 0, 0],
[1, 1, 1, 0, 1, 0, 0, 0],
[0, 0, 0, 1, 0, 1, 1, 1],
[0, 0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0],
]
K4 = ['G', 'B', 'G', 'G', 'G', 'G', 'B', 'G']
D4 = 3
R4 = 2
T4 = [G4, K4, D4, R4]
# ------------------------------------
TESTS = [T1, T2, T3, T4]
def runtests(f):
OK = True
for T in TESTS:
print("-----------------------------------------")
print("Dane:")
res = f(T[0], T[1], T[2])
print("Oczekiwany rezultat: {}, Wynik: {}".format(T[3], res))
if res != T[3]:
print("Blad!")
OK = False
else:
print("OK")
print("-----------------------------------------")
if OK:
print("OK!")
else:
print("Bledy!")
| 20.175676
| 69
| 0.261889
|
b149e23b2c523dbb0034c7fcb5a096e9a89ac545
| 1,951
|
py
|
Python
|
test/test_ezsignsignature_response_compound.py
|
ezmaxinc/eZmax-SDK-python
|
6794b8001abfb7d9ae18a3b87aba164839b925a0
|
[
"MIT"
] | null | null | null |
test/test_ezsignsignature_response_compound.py
|
ezmaxinc/eZmax-SDK-python
|
6794b8001abfb7d9ae18a3b87aba164839b925a0
|
[
"MIT"
] | null | null | null |
test/test_ezsignsignature_response_compound.py
|
ezmaxinc/eZmax-SDK-python
|
6794b8001abfb7d9ae18a3b87aba164839b925a0
|
[
"MIT"
] | null | null | null |
"""
eZmax API Definition (Full)
This API expose all the functionnalities for the eZmax and eZsign applications. # noqa: E501
The version of the OpenAPI document: 1.1.7
Contact: support-api@ezmax.ca
Generated by: https://openapi-generator.tech
"""
import sys
import unittest
import eZmaxApi
from eZmaxApi.model.ezsignsignature_response import EzsignsignatureResponse
from eZmaxApi.model.ezsignsignature_response_compound_all_of import EzsignsignatureResponseCompoundAllOf
from eZmaxApi.model.ezsignsignaturecustomdate_response_compound import EzsignsignaturecustomdateResponseCompound
from eZmaxApi.model.field_e_ezsignsignature_font import FieldEEzsignsignatureFont
from eZmaxApi.model.field_e_ezsignsignature_tooltipposition import FieldEEzsignsignatureTooltipposition
from eZmaxApi.model.field_e_ezsignsignature_type import FieldEEzsignsignatureType
globals()['EzsignsignatureResponse'] = EzsignsignatureResponse
globals()['EzsignsignatureResponseCompoundAllOf'] = EzsignsignatureResponseCompoundAllOf
globals()['EzsignsignaturecustomdateResponseCompound'] = EzsignsignaturecustomdateResponseCompound
globals()['FieldEEzsignsignatureFont'] = FieldEEzsignsignatureFont
globals()['FieldEEzsignsignatureTooltipposition'] = FieldEEzsignsignatureTooltipposition
globals()['FieldEEzsignsignatureType'] = FieldEEzsignsignatureType
from eZmaxApi.model.ezsignsignature_response_compound import EzsignsignatureResponseCompound
class TestEzsignsignatureResponseCompound(unittest.TestCase):
"""EzsignsignatureResponseCompound unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testEzsignsignatureResponseCompound(self):
"""Test EzsignsignatureResponseCompound"""
# FIXME: construct object with mandatory attributes with example values
# model = EzsignsignatureResponseCompound() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| 39.816327
| 112
| 0.822142
|
308642c3ca3a6eb7f015f2a9e8edd349f3858413
| 1,151
|
py
|
Python
|
datajob/__init__.py
|
LorenzoCevolani/datajob
|
dbb0775c63df2cabcbff77b0df2015eac429a126
|
[
"Apache-2.0"
] | 90
|
2021-01-04T20:08:20.000Z
|
2022-03-14T11:20:24.000Z
|
datajob/__init__.py
|
LorenzoCevolani/datajob
|
dbb0775c63df2cabcbff77b0df2015eac429a126
|
[
"Apache-2.0"
] | 93
|
2020-12-12T22:10:33.000Z
|
2021-11-21T16:12:24.000Z
|
datajob/__init__.py
|
LorenzoCevolani/datajob
|
dbb0775c63df2cabcbff77b0df2015eac429a126
|
[
"Apache-2.0"
] | 13
|
2020-12-12T22:11:01.000Z
|
2021-09-22T14:37:09.000Z
|
import logging
import os
import pathlib
import shlex
import subprocess
from pathlib import Path
from rich.console import Console
ROOT_DIR = pathlib.Path(__file__).parent.absolute()
DEFAULT_STACK_STAGE = "dev"
# if someone tried to log something before basicConfig is called, Python creates a default handler that
# goes to the console and will ignore further basicConfig calls. Remove the handler if there is one.
# https://stackoverflow.com/a/45624044/1771155
root = logging.getLogger()
if root.handlers:
for handler in root.handlers:
root.removeHandler(handler)
log_level = os.environ.get("LOG_LEVEL", "INFO")
logging.basicConfig(level=logging.getLevelName(log_level))
project_name = Path(__file__).parent.stem
logger = logging.getLogger(project_name)
def call_subprocess(cmd: str) -> None:
"""
call a command as a subprocess in a secure way.
https://stackoverflow.com/a/59090212/1771155
:param cmd: the command to execute
:return: None
"""
print(f"datajob subprocess command: " f"{cmd}")
subprocess.check_call(shlex.split(cmd))
console = Console(style="bold green", soft_wrap=True, log_path=False)
| 31.108108
| 103
| 0.755864
|
4821c49de4801307b168bb63e9957499a7ca167b
| 21,691
|
py
|
Python
|
src/dataset/trans/jaad_trans.py
|
DongxuGuo1997/TransNet
|
a720c0b1ac18db19796409b51e1cab96b744a4f0
|
[
"MIT"
] | 6
|
2021-01-12T08:41:01.000Z
|
2021-05-01T16:26:29.000Z
|
src/dataset/trans/jaad_trans.py
|
DongxuGuo1997/VITA_stop_go
|
a720c0b1ac18db19796409b51e1cab96b744a4f0
|
[
"MIT"
] | null | null | null |
src/dataset/trans/jaad_trans.py
|
DongxuGuo1997/VITA_stop_go
|
a720c0b1ac18db19796409b51e1cab96b744a4f0
|
[
"MIT"
] | null | null | null |
import os
import numpy as np
import pickle
import copy
# --------------------------------------------------------------------
def get_split_vids(split_vids_path, image_set, subset='default') -> list:
"""
Returns a list of video ids for a given data split
:param: split_vids_path: path of JAAD split
image_set: Data split, train, test, val
subset: "all", "default" or "high_resolution"
:return: The list of video ids
"""
assert image_set in ["train", "test", "val", "all"]
vid_ids = []
sets = [image_set] if image_set != 'all' else ['train', 'test', 'val']
for s in sets:
vid_id_file = os.path.join(split_vids_path, subset, s + '.txt')
with open(vid_id_file, 'rt') as fid:
vid_ids.extend([x.strip() for x in fid.readlines()])
return vid_ids
def get_pedb_ids_jaad(annotations, vid):
"""
Get pedestrians'(with behavior tags) ids in specific video.
:param: dataset: JAAD raw data in dictionary form
vid : video id (str)
:return: pedestrians' ids
"""
pedb_ids = []
ped_keys = list(annotations[vid]['ped_annotations'].keys())
for key in ped_keys:
if 'b' in key:
pedb_ids.append(key)
return pedb_ids
def get_pedb_info_jaad(annotations, vid):
"""
Get pedb information,i.e. frames,bbox,occlusion, actions(walking or not),cross behavior.
:param: annotations: JAAD annotations in dictionary form
vid : single video id (str)
:return: information of all pedestrians in one video
"""
ids = get_pedb_ids_jaad(annotations, vid)
dataset = annotations
pedb_info = {}
for idx in ids:
pedb_info[idx] = {}
pedb_info[idx]['frames'] = []
pedb_info[idx]['bbox'] = []
pedb_info[idx]['occlusion'] = []
pedb_info[idx]['action'] = []
pedb_info[idx]['cross'] = []
# process atomic behavior label
pedb_info[idx]['behavior'] = []
pedb_info[idx]['traffic_light'] = []
frames = copy.deepcopy(dataset[vid]['ped_annotations'][idx]['frames'])
bbox = copy.deepcopy(dataset[vid]['ped_annotations'][idx]['bbox'])
occlusion = copy.deepcopy(dataset[vid]['ped_annotations'][idx]['occlusion'])
action = copy.deepcopy(dataset[vid]['ped_annotations'][idx]['behavior']['action'])
cross = copy.deepcopy(dataset[vid]['ped_annotations'][idx]['behavior']['cross'])
nod = copy.deepcopy(dataset[vid]['ped_annotations'][idx]['behavior']['nod'])
look = copy.deepcopy(dataset[vid]['ped_annotations'][idx]['behavior']['look'])
hand_gesture = copy.deepcopy(dataset[vid]['ped_annotations'][idx]['behavior']['hand_gesture'])
for i in range(len(frames)):
if action[i] in [0, 1]: # sanity check if behavior label exists
pedb_info[idx]['action'].append(action[i])
pedb_info[idx]['frames'].append(frames[i])
pedb_info[idx]['bbox'].append(bbox[i])
pedb_info[idx]['occlusion'].append(occlusion[i])
pedb_info[idx]['cross'].append(cross[i])
beh_vec = [0, 0, 0, 0]
beh_vec[0] = action[i]
beh_vec[1] = look[i]
beh_vec[2] = nod[i]
hg = hand_gesture[i]
if hg > 0:
beh_vec[3] = 1
pedb_info[idx]['behavior'].append(beh_vec)
# traffic light
pedb_info[idx]['traffic_light'].append(dataset[vid]['traffic_annotations'][frames[i]]['traffic_light'])
# attribute vector
atr_vec = [0, 0, 0, 0, 0, 0]
atr_vec[0] = dataset[vid]['ped_annotations'][idx]['attributes']['num_lanes']
atr_vec[1] = dataset[vid]['ped_annotations'][idx]['attributes']['intersection']
atr_vec[2] = dataset[vid]['ped_annotations'][idx]['attributes']['designated']
if dataset[vid]['ped_annotations'][idx]['attributes']['signalized'] > 0:
atr_vec[3] = 1
atr_vec[4] = dataset[vid]['ped_annotations'][idx]['attributes']['traffic_direction']
atr_vec[5] = dataset[vid]['ped_annotations'][idx]['attributes']['motion_direction']
pedb_info[idx]['attributes'] = copy.deepcopy(atr_vec)
return pedb_info
def filter_None(x):
# Small help function to filter None in list
if x is None:
return False
else:
return True
def pedb_info_clean_jaad(annotations, vid) -> dict:
"""
Remove all frames has occlusion tag = 2 (fully occluded)
Get pedb information,i.e. frames,bbox,occlusion, actions(walking or not),cross behavior.
:param: annotations: JAAD annotations in dictionary form
vid : single video id (str)
:return: cleaned information of all pedestrians in one video
"""
pedb_info = get_pedb_info_jaad(annotations, vid)
pids = list(pedb_info.keys())
# remove all frames with occlusion tag=2
for idx in pids:
occ = np.array(pedb_info[idx]['occlusion'])
full_occ = np.flatnonzero(occ == 2)
# set fully occluded frames to None
for i in range(len(full_occ)):
pedb_info[idx]['frames'][full_occ[i]] = None
pedb_info[idx]['bbox'][full_occ[i]] = None
pedb_info[idx]['action'][full_occ[i]] = None
pedb_info[idx]['occlusion'][full_occ[i]] = None
pedb_info[idx]['cross'][full_occ[i]] = None
pedb_info[idx]['behavior'][full_occ[i]] = None
pedb_info[idx]['traffic_light'][full_occ[i]] = None
# filter all None values
pedb_info[idx]['frames'] = list(filter(filter_None, pedb_info[idx]['frames']))
pedb_info[idx]['bbox'] = list(filter(filter_None, pedb_info[idx]['bbox']))
pedb_info[idx]['action'] = list(filter(filter_None, pedb_info[idx]['action']))
pedb_info[idx]['occlusion'] = list(filter(filter_None, pedb_info[idx]['occlusion']))
pedb_info[idx]['cross'] = list(filter(filter_None, pedb_info[idx]['cross']))
pedb_info[idx]['behavior'] = list(filter(filter_None, pedb_info[idx]['behavior']))
pedb_info[idx]['traffic_light'] = list(filter(filter_None, pedb_info[idx]['traffic_light']))
return pedb_info
def add_trans_label_jaad(dataset, verbose=False) -> None:
"""
Add stop & go transition labels for every frame
"""
all_wts = 0 # walking to standing(Stop)
all_stw = 0 # standing to walking (Go)
pids = list(dataset.keys())
for idx in pids:
action = dataset[idx]['action']
frames = dataset[idx]['frames']
n_frames = len(frames)
dataset[idx]['next_transition'] = []
stw_time = []
wts_time = []
for j in range(len(action) - 1):
# stop and go transition
if action[j] == 0 and action[j + 1] == 1:
all_stw += 1
stw_time.append(frames[j + 1])
elif action[j] == 1 and action[j + 1] == 0:
all_wts += 1
wts_time.append(frames[j + 1])
# merge
trans_time = np.array(sorted(stw_time + wts_time))
# set transition tag
for i in range(n_frames):
t_frame = frames[i]
future_trans = trans_time[trans_time >= t_frame]
if future_trans.size > 0:
next_trans = future_trans[0]
dataset[idx]['next_transition'].append(next_trans - t_frame)
else:
dataset[idx]['next_transition'].append(None)
if verbose:
print('----------------------------------------------------------------')
print("JAAD:")
print(f'Total number of standing to walking transitions(raw): {all_stw}')
print(f'Total number of walking to standing transitions(raw): {all_wts}')
return None
def build_pedb_dataset_jaad(jaad_anns_path, split_vids_path, image_set="all", subset='default', verbose=False) -> dict:
"""
Build pedestrian dataset from jaad annotations
"""
jaad_anns = pickle.load(open(jaad_anns_path, 'rb'))
pedb_dataset = {}
vids = get_split_vids(split_vids_path, image_set, subset)
for vid in vids:
pedb_info = pedb_info_clean_jaad(jaad_anns, vid)
pids = list(pedb_info.keys())
for idx in pids:
if len(pedb_info[idx]['action']) > 0:
pedb_dataset[idx] = {}
pedb_dataset[idx]['video_number'] = vid
pedb_dataset[idx]['frames'] = pedb_info[idx]['frames']
pedb_dataset[idx]['bbox'] = pedb_info[idx]['bbox']
pedb_dataset[idx]['action'] = pedb_info[idx]['action']
pedb_dataset[idx]['occlusion'] = pedb_info[idx]['occlusion']
pedb_dataset[idx]["cross"] = pedb_info[idx]["cross"]
pedb_dataset[idx]["behavior"] = pedb_info[idx]["behavior"]
pedb_dataset[idx]["attributes"] = pedb_info[idx]["attributes"]
pedb_dataset[idx]["traffic_light"] = pedb_info[idx]["traffic_light"]
add_trans_label_jaad(pedb_dataset, verbose)
return pedb_dataset
class JaadTransDataset:
"""
dataset class for transition-related pedestrian samples in JAAD
"""
def __init__(self, jaad_anns_path, split_vids_path, image_set="all", subset="default", verbose=False):
assert image_set in ['train', 'test', 'val', "all"], " Name should be train, test, val or all"
self.dataset = build_pedb_dataset_jaad(jaad_anns_path, split_vids_path, image_set, subset, verbose)
self.name = image_set
self.subset = subset
def __repr__(self):
return f"JaadTransDataset(image_set={self.name}, subset={self.subset})"
def extract_trans_frame(self, mode="GO", frame_ahead=0, fps=30, verbose=False) -> dict:
dataset = self.dataset
assert mode in ["GO", "STOP"], "Transition type should be STOP or GO"
ids = list(dataset.keys())
samples = {}
j = 0
step = 30 // fps
t_ahead = step * frame_ahead
for idx in ids:
vid_id = copy.deepcopy(dataset[idx]['video_number'])
frames = copy.deepcopy(dataset[idx]['frames'])
bbox = copy.deepcopy(dataset[idx]['bbox'])
action = copy.deepcopy(dataset[idx]['action'])
cross = copy.deepcopy(dataset[idx]['cross'])
behavior = copy.deepcopy(dataset[idx]['behavior'])
traffic_light = copy.deepcopy(dataset[idx]['traffic_light'])
attributes = copy.deepcopy(dataset[idx]['attributes'])
next_transition = copy.deepcopy(dataset[idx]["next_transition"])
for i in range(len(frames)):
key = None
old_id = None
d1 = min(i, 5)
d2 = min(len(frames) - i - 1, 5)
if mode == "GO":
if next_transition[i] == 0 and action[i] == 1 and action[i - d1] == 0 and action[i + d2] == 1:
j += 1
new_id = "{:04d}".format(j) + "_" + self.name
key = "JG_" + new_id
old_id = f'{idx}/{vid_id}/' + '{:03d}'.format(frames[i])
if mode == "STOP":
if next_transition[i] == 0 and action[i] == 0 and action[i - d1] == 1 and action[i + d2] == 0:
j += 1
new_id = "{:04d}".format(j) + "_" + self.name
key = "JS_" + new_id
old_id = f'{idx}/{vid_id}/' + '{:03d}'.format(frames[i])
if key is not None and i - t_ahead * step >= 0:
samples[key] = {}
samples[key]["source"] = "JAAD"
samples[key]["old_id"] = old_id
samples[key]['video_number'] = vid_id
samples[key]['frame'] = frames[i - t_ahead]
samples[key]['bbox'] = bbox[i - t_ahead]
samples[key]['action'] = action[i - t_ahead]
samples[key]['cross'] = cross[i - t_ahead]
samples[key]['behavior'] = behavior[i - t_ahead]
samples[key]['traffic_light'] = traffic_light[i - t_ahead]
samples[key]['attributes'] = attributes
samples[key]['frame_ahead'] = frame_ahead
samples[key]['type'] = mode
samples[key]['fps'] = fps
if verbose:
print(f"Extract {len(samples.keys())} {mode} sample frames from JAAD {self.name} set")
return samples
def extract_trans_history(self, mode="GO", fps=30, max_frames=None, post_frames=0, verbose=False) -> dict:
"""
Extract the whole history of pedestrian up to the frame when transition happens
:params: mode: target transition type, "GO" or "STOP"
fps: frame-per-second, sampling rate of extracted sequences, default 30
verbose: optional printing of sample statistics
"""
dataset = self.dataset
assert mode in ["GO", "STOP"], "Transition type should be STOP or GO"
ids = list(dataset.keys())
samples = {}
j = 0
step = 30 // fps
assert isinstance(step, int)
for idx in ids:
vid_id = copy.deepcopy(dataset[idx]['video_number'])
frames = copy.deepcopy(dataset[idx]['frames'])
bbox = copy.deepcopy(dataset[idx]['bbox'])
action = copy.deepcopy(dataset[idx]['action'])
cross = copy.deepcopy(dataset[idx]['cross'])
behavior = copy.deepcopy(dataset[idx]['behavior'])
traffic_light = copy.deepcopy(dataset[idx]['traffic_light'])
attributes = copy.deepcopy(dataset[idx]['attributes'])
next_transition = copy.deepcopy(dataset[idx]["next_transition"])
for i in range(len(frames)):
key = None
old_id = None
d1 = min(i, 5)
d2 = min(len(frames) - i - 1, 5)
if mode == "GO":
if next_transition[i] == 0 and action[i] == 1 and action[i - d1] == 0 and action[i + d2] == 1:
j += 1
new_id = "{:04d}".format(j) + "_" + self.name
key = "JG_" + new_id
old_id = idx
ae = np.array(action[i::-step])
ce = np.array(np.nonzero(ae == 1))
d_pre = ce[0][1] - 1 if ce.size > 1 else len(ae) - 1
ap = np.array(action[i::step])
cp = np.array(np.nonzero(ap == 0))
d_pos = cp[0][0] if cp.size > 0 else len(ap)
if mode == "STOP":
if next_transition[i] == 0 and action[i] == 0 and action[i - d1] == 1 and action[i + d2] == 0:
j += 1
new_id = "{:04d}".format(j) + "_" + self.name
key = "JS_" + new_id
old_id = idx
ae = np.array(action[i::-step])
ce = np.array(np.nonzero(ae == 0))
d_pre = ce[0][1] - 1 if ce.size > 1 else len(ae) - 1
ap = np.array(action[i::step])
cp = np.array(np.nonzero(ap == 1))
d_pos = cp[0][0] if cp.size > 0 else len(ap)
if key is not None:
if max_frames is None:
t = None
else:
t = i - max_frames * step if (i - max_frames * step >= 0) else None
i = i + min(post_frames, d_pos) * step
samples[key] = {}
samples[key]["source"] = "JAAD"
samples[key]["old_id"] = old_id
samples[key]['video_number'] = vid_id
samples[key]['frame'] = frames[i:t:-step]
samples[key]['frame'].reverse()
samples[key]['bbox'] = bbox[i:t:-step]
samples[key]['bbox'].reverse()
samples[key]['action'] = action[i:t:-step]
samples[key]['action'].reverse()
samples[key]['cross'] = cross[i:t:-step]
samples[key]['cross'].reverse()
samples[key]['behavior'] = behavior[i:t:-step]
samples[key]['behavior'].reverse()
samples[key]['traffic_light'] = traffic_light[i:t:-step]
samples[key]['traffic_light'].reverse()
samples[key]['attributes'] = attributes
samples[key]['pre_state'] = d_pre
samples[key]['post_state'] = d_pos
samples[key]['type'] = mode
samples[key]['fps'] = fps
if verbose:
keys = list(samples.keys())
pids = []
num_frames = 0
for k in keys:
pids.append(samples[k]['old_id'])
num_frames += len(samples[k]['frame'])
print(f"Extract {len(pids)} {mode} history samples from {self.name} dataset in JAAD ,")
print(f"samples contain {len(set(pids))} unique pedestrians and {num_frames} frames.")
return samples
def extract_non_trans(self, fps=30, max_frames=None, max_samples=None, verbose=False):
dataset = self.dataset
ids = list(dataset.keys())
samples = {'walking': {}, 'standing': {}}
step = 30 // fps
assert isinstance(step, int)
jw = 0
js = 0
for idx in ids:
vid_id = copy.deepcopy(dataset[idx]['video_number'])
frames = copy.deepcopy(dataset[idx]['frames'])
bbox = copy.deepcopy(dataset[idx]['bbox'])
action = copy.deepcopy(dataset[idx]['action'])
cross = copy.deepcopy(dataset[idx]['cross'])
behavior = copy.deepcopy(dataset[idx]['behavior'])
attributes = copy.deepcopy(dataset[idx]['attributes'])
traffic_light = copy.deepcopy(dataset[idx]['traffic_light'])
a = np.array(action) # action array
key = None
action_type = None
old_id = None
if a[a < 0.5].size == 0: # all walking
jw += 1
new_id = "{:04d}".format(jw) + "_" + self.name
key = "JW_" + new_id
old_id = idx
action_type = 'walking'
elif a[a > 0.5].size == 0: # all standing
js += 1
new_id = "{:04d}".format(js) + "_" + self.name
key = "JN_" + new_id
old_id = idx
action_type = 'standing'
if max_frames is None:
t = None
else:
t = len(frames) - max_frames * step if (len(frames) - max_frames * step >= 0) else None
if key is not None:
samples[action_type][key] = {}
samples[action_type][key]["source"] = "JAAD"
samples[action_type][key]["old_id"] = old_id
samples[action_type][key]['video_number'] = vid_id
samples[action_type][key]['frame'] = frames[-1:t:-step]
samples[action_type][key]['frame'].reverse()
samples[action_type][key]['bbox'] = bbox[-1:t:-step]
samples[action_type][key]['bbox'].reverse()
samples[action_type][key]['action'] = action[-1:t:-step]
samples[action_type][key]['action'].reverse()
samples[action_type][key]['cross'] = cross[-1:t:-step]
samples[action_type][key]['cross'].reverse()
samples[action_type][key]['behavior'] = behavior[-1:t:-step]
samples[action_type][key]['behavior'].reverse()
samples[action_type][key]['traffic_light'] = traffic_light[-1:t:-step]
samples[action_type][key]['traffic_light'].reverse()
samples[action_type][key]['attributes'] = attributes
samples[action_type][key]['action_type'] = action_type
samples[action_type][key]['fps'] = fps
samples_new = {'walking': {}, 'standing': {}}
if max_samples is not None:
keys_w = list(samples['walking'].keys())[:max_samples]
keys_s = list(samples['standing'].keys())[:max_samples]
for kw in keys_w:
samples_new['walking'][kw] = samples['walking'][kw]
for ks in keys_s:
samples_new['standing'][ks] = samples['standing'][ks]
else:
samples_new = samples
if verbose:
keys_w = list(samples_new['walking'].keys())
keys_s = list(samples_new['standing'].keys())
pid_w = []
pid_s = []
n_w = 0
n_s = 0
for kw in keys_w:
pid_w.append(samples_new['walking'][kw]['old_id'])
n_w += len(samples_new['walking'][kw]['frame'])
for ks in keys_s:
pid_s.append(samples_new['standing'][ks]['old_id'])
n_s += len(samples_new['standing'][ks]['frame'])
print(f"Extract Non-transition samples from {self.name} dataset in JAAD :")
print(f"Walking: {len(pid_w)} samples, {len(set(pid_w))} unique pedestrians and {n_w} frames.")
print(f"Standing: {len(pid_s)} samples, {len(set(pid_s))} unique pedestrians and {n_s} frames.")
return samples_new
| 46.151064
| 119
| 0.533447
|
5a92951a5bf91063b94691a38c939cef4b7b0c16
| 1,830
|
py
|
Python
|
km_api/functional_tests/know_me/journal/serialization_helpers.py
|
knowmetools/km-api
|
e4b72484c42e88a6c0087c9b1d5fef240e66cbb0
|
[
"Apache-2.0"
] | 4
|
2017-08-03T00:46:31.000Z
|
2018-11-06T03:32:32.000Z
|
km_api/functional_tests/know_me/journal/serialization_helpers.py
|
knowmetools/km-api
|
e4b72484c42e88a6c0087c9b1d5fef240e66cbb0
|
[
"Apache-2.0"
] | 526
|
2017-06-27T18:13:59.000Z
|
2021-06-10T18:00:21.000Z
|
km_api/functional_tests/know_me/journal/serialization_helpers.py
|
knowmetools/km-api
|
e4b72484c42e88a6c0087c9b1d5fef240e66cbb0
|
[
"Apache-2.0"
] | 1
|
2017-07-10T19:46:27.000Z
|
2017-07-10T19:46:27.000Z
|
from functional_tests.serialization_helpers import (
user_info,
build_full_file_url,
)
from test_utils import serialized_time
def serialize_comment(comment, build_full_url, is_list=False):
"""
Serialize a comment on a journal entry.
"""
def serialize(value):
return {
"id": value.pk,
"url": build_full_url(f"/know-me/journal/comments/{value.pk}/"),
"created_at": serialized_time(value.created_at),
"updated_at": serialized_time(value.updated_at),
"permissions": {"destroy": True, "read": True, "write": False},
"text": value.text,
"user": user_info(value.user),
}
if is_list:
return list(map(serialize, comment))
return serialize(comment)
def serialize_entry(entry, build_full_url):
"""
Serialize a journal entry.
Args:
entry:
The entry to serialize.
build_full_url:
The method to use to convert an absolute URL into a full
URI.
Returns:
The serialized version of the provided entry or entries.
"""
return {
"id": entry.pk,
"url": build_full_url(f"/know-me/journal/entries/{entry.pk}/"),
"created_at": serialized_time(entry.created_at),
"updated_at": serialized_time(entry.updated_at),
"attachment": build_full_file_url(entry.attachment, build_full_url),
"comment_count": entry.comments.count(),
"comments": serialize_comment(
entry.comments.all(), build_full_url, is_list=True
),
"comments_url": build_full_url(
f"/know-me/journal/entries/{entry.pk}/comments/"
),
"km_user_id": entry.km_user.pk,
"permissions": {"read": True, "write": True},
"text": entry.text,
}
| 30
| 76
| 0.608197
|
bfd9a1e280908acc6eb074e8c9f3e14466f5b4ec
| 3,807
|
py
|
Python
|
allennlp/training/metrics/auc.py
|
prateekkolhar/allennlp
|
b0aab6689e04271c8478cb662fd64384b1ab0298
|
[
"Apache-2.0"
] | null | null | null |
allennlp/training/metrics/auc.py
|
prateekkolhar/allennlp
|
b0aab6689e04271c8478cb662fd64384b1ab0298
|
[
"Apache-2.0"
] | null | null | null |
allennlp/training/metrics/auc.py
|
prateekkolhar/allennlp
|
b0aab6689e04271c8478cb662fd64384b1ab0298
|
[
"Apache-2.0"
] | null | null | null |
from typing import Optional
from overrides import overrides
import torch
from sklearn import metrics
from allennlp.common.checks import ConfigurationError
from allennlp.training.metrics.metric import Metric
@Metric.register("auc")
class Auc(Metric):
"""
The AUC Metric measures the area under the receiver-operating characteristic
(ROC) curve for binary classification problems.
"""
def __init__(self, positive_label=1):
super(Auc, self).__init__()
self._positive_label = positive_label
self._all_predictions = torch.FloatTensor()
self._all_gold_labels = torch.LongTensor()
def __call__(self,
predictions: torch.Tensor,
gold_labels: torch.Tensor,
mask: Optional[torch.Tensor] = None):
"""
Parameters
----------
predictions : ``torch.Tensor``, required.
A one-dimensional tensor of prediction scores of shape (batch_size).
gold_labels : ``torch.Tensor``, required.
A one-dimensional label tensor of shape (batch_size), with {1, 0}
entries for positive and negative class. If it's not binary,
`positive_label` should be passed in the initialization.
mask: ``torch.Tensor``, optional (default = None).
A one-dimensional label tensor of shape (batch_size).
"""
predictions, gold_labels, mask = self.unwrap_to_tensors(predictions, gold_labels, mask)
# Sanity checks.
if gold_labels.dim() != 1:
raise ConfigurationError("gold_labels must be one-dimensional, "
"but found tensor of shape: {}".format(gold_labels.size()))
if predictions.dim() != 1:
raise ConfigurationError("predictions must be one-dimensional, "
"but found tensor of shape: {}".format(predictions.size()))
unique_gold_labels = torch.unique(gold_labels)
if unique_gold_labels.numel() > 2:
raise ConfigurationError("AUC can be used for binary tasks only. gold_labels has {} unique labels, "
"expected at maximum 2.".format(unique_gold_labels.numel()))
gold_labels_is_binary = list(torch.sort(unique_gold_labels)[0].numpy()) == [0, 1]
if not gold_labels_is_binary and self._positive_label not in unique_gold_labels:
raise ConfigurationError("gold_labels should be binary with 0 and 1 or initialized positive_label "
"{} should be present in gold_labels".format(self._positive_label))
if mask is None:
batch_size = gold_labels.shape[0]
mask = torch.ones(batch_size)
mask = mask.byte()
self._all_predictions = torch.cat([self._all_predictions,
torch.masked_select(predictions, mask).float()], dim=0)
self._all_gold_labels = torch.cat([self._all_gold_labels,
torch.masked_select(gold_labels, mask).long()], dim=0)
def get_metric(self, reset: bool = False):
if self._all_gold_labels.shape[0] == 0:
return 0.5
false_positive_rates, true_positive_rates, _ = metrics.roc_curve(self._all_gold_labels.numpy(),
self._all_predictions.numpy(),
pos_label=self._positive_label)
auc = metrics.auc(false_positive_rates, true_positive_rates)
if reset:
self.reset()
return auc
@overrides
def reset(self):
self._all_predictions = torch.FloatTensor()
self._all_gold_labels = torch.LongTensor()
| 44.788235
| 112
| 0.603625
|
25b10133191788cd85085fd4612ae7cea0f122f3
| 14,125
|
py
|
Python
|
python/paddle/distributed/fleet/launch.py
|
TochkaAI/Paddle
|
481ee79fc92304f33165f7ed0679f16c36862cea
|
[
"Apache-2.0"
] | 3
|
2021-06-08T14:24:36.000Z
|
2021-06-08T14:24:38.000Z
|
python/paddle/distributed/fleet/launch.py
|
chenyanlei1/Paddle
|
f249a5f05f0f5832279244d88c8cb4eaaad1fbd4
|
[
"Apache-2.0"
] | null | null | null |
python/paddle/distributed/fleet/launch.py
|
chenyanlei1/Paddle
|
f249a5f05f0f5832279244d88c8cb4eaaad1fbd4
|
[
"Apache-2.0"
] | 1
|
2021-08-04T14:28:58.000Z
|
2021-08-04T14:28:58.000Z
|
# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
r"""
fleetrun is a module that spawns multiple distributed
process on each training node for gpu training and cpu training.
Usage:
In both of single node training or multiple node training, this module
launch a process on each of the given gpu card or cpu machine.
GPU training:
1. for single node training with all visible gpu cards:
fleetrun your_training_py (arg1 arg2 and all others)
2. for single node training with [0,4) cards
fleetrun --gpus="0,1,2,3" your_training_py (arg1 arg2 and all others)
3. for multiple node training such as two node:192.168.0.16, 192.168.0.17
on 192.168.0.16:
fleetrun --ips="192.168.0.16,192.168.0.17" \
your_training_py (arg1 arg2 and all others)
on 192.168.0.17:
fleetrun --ips="192.168.0.16,192.168.0.17" \
your_training_py (arg1 arg2 and all others)
CPU training:
1. for single node training with multi servers and workers:
fleetrun --server_num=2 --worker_num=2 your_training_py (arg1 arg2 and all others)
2. for multiple node training such as two node:192.168.0.16, 192.168.0.17 \
with 2 servers and 4 workers.
on 192.168.0.16:
fleetrun --servers="192.168.0.16:6170,192.168.0.17:6170" \
--workers="192.168.0.16,192.168.0.17,192.168.0.16,192.168.0.17" \
your_training_py (arg1 arg2 and all others)
on 192.168.0.17:
fleetrun --servers="192.168.0.16:6170,192.168.0.17:6171" \
--workers="192.168.0.16,192.168.0.17,192.168.0.16,192.168.0.17" \
your_training_py (arg1 arg2 and all others)
3. use gloo backend for multiple node training such as two node:192.168.0.16, 192.168.0.17 \
with 2 servers and 4 workers. (workers should set port)
on 192.168.0.16:
fleetrun --servers="192.168.0.16:6170,192.168.0.17:6170" \
--workers="192.168.0.16:6171,192.168.0.17:6171,192.168.0.16:6172,192.168.0.17:6172" \
your_training_py (arg1 arg2 and all others)
on 192.168.0.17:
fleetrun --servers="192.168.0.16:6170,192.168.0.17:6170" \
--workers="192.168.0.16:6171,192.168.0.17:6171,192.168.0.16:6172,192.168.0.17:6172" \
your_training_py (arg1 arg2 and all others)
"""
from __future__ import print_function
import shutil
import sys
import tempfile
from sys import version
import subprocess
import os
import time
import six
import copy
from argparse import ArgumentParser, REMAINDER
import paddle
import paddle.fluid as fluid
from paddle.distributed.fleet import launch_utils
# TODO(danleifeng): Don't import * from a module
from paddle.distributed.fleet.launch_utils import *
import paddle.distributed.fleet.cloud_utils as cloud_utils
import paddle.distributed.fleet.ascend_utils as ascend_utils
__all__ = []
def _print_arguments(args):
print("----------- Configuration Arguments -----------")
for arg, value in sorted(six.iteritems(vars(args))):
print("%s: %s" % (arg, value))
print("------------------------------------------------")
def _parse_args():
"""
Helper function parsing the command line options
@retval ArgumentParser
"""
parser = ArgumentParser(
description='''start paddle training using multi-process mode.
see: http://www.paddlepaddle.org/documentation/docs/zh/1.6/user_guides/howto/training/cluster_howto.html#permalink-8--nccl2-
''')
base_group = parser.add_argument_group("Base Parameters")
base_group.add_argument(
"--log_dir",
type=str,
default="log",
help="The path for each process's log.If it's not set, the log will printed to default pipe."
)
base_group.add_argument(
"--nproc_per_node",
type=int,
default=None,
help="The number of processes to launch on a node."
"In gpu training, it should be less or equal to the gpus number of you system(or you set by --gpus). And so each process can"
" bound to one or average number of gpus.")
base_group.add_argument(
"--run_mode",
type=str,
default=None,
help="run mode of job, can be:collective/ps/ps-heter")
if fluid.core.is_compiled_with_cuda():
base_group.add_argument(
"--gpus",
type=str,
default=None,
help="It's for gpu training."
"For example:"
"--gpus=\"0,1,2,3\" will launch four training processes each bound to one gpu."
)
base_group.add_argument("--selected_gpus", dest="gpus")
if fluid.core.is_compiled_with_xpu():
base_group.add_argument(
"--xpus",
type=str,
default=None,
help="It's for xpu training. For example: "
"--xpus=\"0,1,2,3\" will launch four training processes each bound to one xpu."
)
base_group.add_argument("--selected_xpus", dest="xpus")
base_group.add_argument(
"training_script",
type=str,
help="The full path to the single GPU training "
"program/script to be launched in parallel, "
"followed by all the arguments for the "
"training script")
base_group.add_argument('training_script_args', nargs=REMAINDER)
# Optional arguments for the launch helper
# for collective
collective_group = parser.add_argument_group("Collective Parameters")
collective_group.add_argument(
"--ips",
type=str,
default="127.0.0.1",
help="Paddle cluster nodes ips, such as 192.168.0.16,192.168.0.17..")
ps_group = parser.add_argument_group("Parameter-Server Parameters")
# for parameter server
ps_group.add_argument(
"--servers", type=str, default="", help="User defined servers ip:port")
ps_group.add_argument(
"--workers", type=str, default="", help="User defined workers ip:port")
ps_group.add_argument(
"--heter_workers",
type=str,
default="",
help="User defined heter workers ip:port")
ps_group.add_argument("--worker_num", type=int, help="number of workers")
ps_group.add_argument("--server_num", type=int, help="number of servers")
ps_group.add_argument(
"--heter_worker_num", type=int, help="number of heter_workers")
ps_group.add_argument("--http_port", type=int, help="Gloo http Port")
return parser.parse_args()
def get_cluster_from_args(args, device_mode, devices_per_proc):
node_ips = [x.strip() for x in args.ips.split(',')]
if len(node_ips) == 1:
node_ip = node_ips[0]
else:
_, node_ip = get_host_name_ip()
assert node_ip in node_ips, "Can't find your local ip {%s} in node_ips: {%s}" \
% (node_ip, node_ips)
node_rank = node_ips.index(node_ip)
logger.debug("parsed from args: node_ips:{} node_ip:{} node_rank:{}".format(
node_ips, node_ip, node_rank))
free_ports = None
if not cloud_utils.use_paddlecloud() and len(
node_ips) <= 1 and os.environ.get('FLAGS_START_PORT') is None:
free_ports = find_free_ports(len(devices_per_proc))
if free_ports is not None:
free_ports = list(free_ports)
else:
start_port = 6070
if os.environ.get('FLAGS_START_PORT') is not None:
start_port = int(os.environ.get('FLAGS_START_PORT'))
free_ports = [
x for x in range(start_port, start_port + len(devices_per_proc))
]
trainer_endpoints = []
for ip in node_ips:
trainer_endpoints.append(["%s:%d" % (ip, port) for port in free_ports])
return get_cluster(node_ips, node_ip, trainer_endpoints, device_mode,
devices_per_proc)
def launch_collective(args):
# parse arguments, used for cloud-single-machine and local
(device_mode, devices_per_proc) = launch_utils.get_device_proc_info(args)
trainers_num = cloud_utils.get_trainers_num()
logger.debug("parsed from args trainerss_num:{} mode:{} devices:{}".format(
trainers_num, device_mode, devices_per_proc))
cluster = None
pod = None
start_port = 6170
if os.environ.get('FLAGS_START_PORT') is not None:
start_port = os.environ.get('FLAGS_START_PORT')
if cloud_utils.use_paddlecloud() and trainers_num != 1:
cluster, pod = cloud_utils.get_cloud_cluster(
args.ips, device_mode, devices_per_proc, start_port)
logger.debug("get cluster from cloud:{}".format(cluster))
elif device_mode == DeviceMode.ASCEND_NPU:
# for ascend
cluster, pod = ascend_utils.get_cloud_cluster(
rank_table_file=os.getenv("RANK_TABLE_FILE", None),
device_mode=device_mode,
start_port=start_port)
else:
# trainers_num = 1 or not use paddlecloud ips="a,b"
cluster, pod = get_cluster_from_args(args, device_mode,
devices_per_proc)
logger.debug("get cluster from args:{}".format(cluster))
global_envs = copy.copy(os.environ.copy())
gloo_rendezvous_dir = tempfile.mkdtemp()
# add gloo env
global_envs["PADDLE_WITH_GLOO"] = str(os.getenv("PADDLE_WITH_GLOO", "0"))
global_envs["PADDLE_GLOO_RENDEZVOUS"] = "3"
global_envs["PADDLE_GLOO_FS_PATH"] = gloo_rendezvous_dir
procs = start_local_trainers(
cluster,
pod,
training_script=args.training_script,
training_script_args=args.training_script_args,
log_dir=args.log_dir,
envs=global_envs)
for idx, proc in enumerate(procs):
print("launch proc_id:{} idx:{}".format(proc.proc.pid, idx))
while True:
alive = watch_local_trainers(procs, cluster.trainers_nranks())
if not alive:
logger.info("Local processes completed.")
logger.debug("POD info:{}".format(pod))
break
time.sleep(3)
if os.path.exists(gloo_rendezvous_dir):
shutil.rmtree(gloo_rendezvous_dir)
def launch_ps(args, distribute_mode):
cloud_flag = cloud_utils.use_paddlecloud()
# for ps-cpu on paddlecloud
if cloud_flag and distribute_mode == DistributeMode.PS:
direct_start(args)
return
elif cloud_flag and distribute_mode == DistributeMode.PS_HETER:
cloud_ps_heter_env_set(args)
args.workers = os.getenv("PADDLE_TRAINER_ENDPOINTS")
args.servers = os.getenv("PADDLE_PSERVERS_IP_PORT_LIST")
args.heter_workers = os.getenv("PADDLE_HETER_TRAINER_IP_PORT_LIST")
ps_launcher = ParameterServerLauncher(args, distribute_mode)
ps_launcher.start_ps()
return
def which_distributed_mode(args):
if args.run_mode is not None:
assert args.run_mode in ["collective", "ps", "ps-heter"]
if args.run_mode == "collective":
return DistributeMode.COLLECTIVE
elif args.run_mode == "ps":
return DistributeMode.PS
elif args.run_mode == "ps-heter":
return DistributeMode.PS_HETER
ps_args = [
'--worker_num', '--server_num', '--heter_worker_num', '--servers',
'--workers', '--heter_workers', '--http_port'
]
collective_args = ['--ips']
ps_heter_args = ["--heter_worker_num", "--heter_workers"]
has_ps_args = [
ps_arg for ps_arg in ps_args if ps_arg in " ".join(sys.argv[1:-1])
]
has_collective_args = [
co_arg for co_arg in collective_args
if co_arg in " ".join(sys.argv[1:-1])
]
if len(has_ps_args) > 1 and len(has_collective_args) > 1:
raise ValueError(
"Only one mode(Collective or Parameter-Server) can be selected at the same time, but more than one configuration was received."
)
if fluid.core.is_compiled_with_cuda():
accelerators = fluid.core.get_cuda_device_count()
elif fluid.core.is_compiled_with_npu():
accelerators = fluid.core.get_npu_device_count()
elif fluid.core.is_compiled_with_xpu():
accelerators = fluid.core.get_xpu_device_count()
else:
accelerators = 0
if len(has_ps_args) > 0:
logger.info(
"Run parameter-sever mode. pserver arguments:{}, accelerators count:{}".
format(has_ps_args, accelerators))
has_ps_heter_args = list(set(has_ps_args) & set(ps_heter_args))
if len(has_ps_heter_args) > 0:
return DistributeMode.PS_HETER
else:
return DistributeMode.PS
elif len(has_collective_args) > 0:
logger.info("Run collective mode. gpu arguments:{}, cuda count:{}".
format(has_collective_args, accelerators))
return DistributeMode.COLLECTIVE
else:
if not fluid.core.is_compiled_with_cuda(
) and not fluid.core.is_compiled_with_xpu():
logger.warning(
"Not found distinct arguments and not compiled with cuda or xpu. Default use ps mode"
)
return DistributeMode.PS
else:
logger.warning(
"Not found distinct arguments and compiled with cuda or xpu. Default use collective mode"
)
return DistributeMode.COLLECTIVE
def launch():
args = _parse_args()
logger = get_logger()
_print_arguments(args)
distribute_mode = which_distributed_mode(args)
if distribute_mode == DistributeMode.COLLECTIVE:
launch_collective(args)
else:
launch_ps(args, distribute_mode)
if __name__ == "__main__":
launch()
| 37.367725
| 139
| 0.650478
|
6732283fc99c5437c07d5401b7fc0e82b5da0db5
| 23,222
|
py
|
Python
|
tamil/utf8.py
|
subramani95/open-tamil
|
eadb7192b685a5cdd6b1c86e7157b92a94a52e4d
|
[
"MIT"
] | null | null | null |
tamil/utf8.py
|
subramani95/open-tamil
|
eadb7192b685a5cdd6b1c86e7157b92a94a52e4d
|
[
"MIT"
] | null | null | null |
tamil/utf8.py
|
subramani95/open-tamil
|
eadb7192b685a5cdd6b1c86e7157b92a94a52e4d
|
[
"MIT"
] | null | null | null |
## This Python file uses the following encoding: utf-8
##
## (C) 2007, 2008, 2013, 2015, 2016 Muthiah Annamalai <ezhillang@gmail.com>
## (C) 2013 msathia <msathia@gmail.com>
##
## This file is dual licensed - originally GPL v3 from Ezhil, and
## then as part of open-tamil package in MIT license.
##
## Licensed under GPL Version 3
from sys import version
from copy import copy
import re
PYTHON3 = version > '3'
del version
## constants
TA_ACCENT_LEN = 13 #12 + 1
TA_AYUDHA_LEN = 1
TA_UYIR_LEN = 12
TA_MEI_LEN = 18
TA_AGARAM_LEN = 18
TA_SANSKRIT_LEN = 6
TA_UYIRMEI_LEN = 216
TA_GRANTHA_UYIRMEI_LEN = 24*12
TA_LETTERS_LEN = 247 + 6*12 + 22 + 4 - TA_AGARAM_LEN - 4 #323
def to_unicode_repr( _letter ):
""" helpful in situations where browser/app may recognize Unicode encoding
in the \u0b8e type syntax but not actual unicode glyph/code-point"""
# Python 2-3 compatible
return u"u'"+ u"".join( [ u"\\u%04x"%ord(l) for l in _letter ] ) + u"'"
def letters_to_py( _letters ):
""" return list of letters e.g. uyir_letters as a Python list """
return u"[u'"+u"',u'".join( _letters )+u"']"
# List of letters you can use
uyir_letters = [u"அ",u"ஆ",u"இ",
u"ஈ",u"உ",u"ஊ",u"எ",u"ஏ",u"ஐ",u"ஒ",u"ஓ",u"ஔ"]
ayudha_letter = u"ஃ"
kuril_letters = [u"அ", u"இ", u"உ", u"எ", u"ஒ"]
nedil_letters = [u"ஆ", u"ஈ", u"ஊ", u"ஏ", u"ஓ"]
vallinam_letters = [u"க்", u"ச்", u"ட்", u"த்", u"ப்", u"ற்"]
mellinam_letters = [u"ங்", u"ஞ்", u"ண்", u"ந்", u"ம்", u"ன்"]
idayinam_letters = [u"ய்", u"ர்", u"ல்", u"வ்", u"ழ்", u"ள்"]
mei_letters = [u"க்",u"ச்",u"ட்",u"த்",u"ப்",u"ற்",
u"ஞ்",u"ங்",u"ண்",u"ந்",u"ம்",u"ன்",
u"ய்",u"ர்",u"ல்",u"வ்",u"ழ்",u"ள்" ]
accent_symbols = [u"",u"ா",u"ி",u"ீ",u"ு",u"ூ",
u"ெ",u"ே",u"ை",u"ொ",u"ோ",u"ௌ",u"ஃ"]
pulli_symbols = [u"்"]
agaram_letters = [u"க",u"ச",u"ட",u"த",u"ப",u"ற",
u"ஞ",u"ங",u"ண",u"ந",u"ம",u"ன",
u"ய",u"ர",u"ல",u"வ",u"ழ",u"ள"]
sanskrit_letters = [u"ஶ",u"ஜ",u"ஷ", u"ஸ",u"ஹ",u"க்ஷ"]
sanskrit_mei_letters =[u"ஶ்",u"ஜ்",u"ஷ்", u"ஸ்",u"ஹ்",u"க்ஷ்"]
grantha_mei_letters = copy(mei_letters)
grantha_mei_letters.extend(sanskrit_mei_letters)
grantha_agaram_letters = copy(agaram_letters)
grantha_agaram_letters.extend(sanskrit_letters)
uyirmei_letters = [
u"க" ,u"கா" ,u"கி" ,u"கீ" ,u"கு" ,u"கூ" ,u"கெ" ,u"கே" ,u"கை" ,u"கொ" ,u"கோ" ,u"கௌ" ,
u"ச" ,u"சா" ,u"சி" ,u"சீ" ,u"சு" ,u"சூ" ,u"செ" ,u"சே" ,u"சை" ,u"சொ" ,u"சோ" ,u"சௌ" ,
u"ட" ,u"டா" ,u"டி" ,u"டீ" ,u"டு" ,u"டூ" ,u"டெ" ,u"டே" ,u"டை" ,u"டொ" ,u"டோ" ,u"டௌ",
u"த" ,u"தா" ,u"தி" ,u"தீ" ,u"து" ,u"தூ" ,u"தெ" ,u"தே" ,u"தை" ,u"தொ" ,u"தோ" ,u"தௌ",
u"ப" ,u"பா" ,u"பி" ,u"பீ" ,u"பு" ,u"பூ" ,u"பெ" ,u"பே" ,u"பை" ,u"பொ" ,u"போ" ,u"பௌ" ,
u"ற" ,u"றா" ,u"றி" ,u"றீ" ,u"று" ,u"றூ" ,u"றெ" ,u"றே" ,u"றை" ,u"றொ" ,u"றோ" ,u"றௌ",
u"ஞ" ,u"ஞா" ,u"ஞி" ,u"ஞீ" ,u"ஞு" ,u"ஞூ" ,u"ஞெ" ,u"ஞே" ,u"ஞை" ,u"ஞொ" ,u"ஞோ" ,u"ஞௌ" ,
u"ங" ,u"ஙா" ,u"ஙி" ,u"ஙீ" ,u"ஙு" ,u"ஙூ" ,u"ஙெ" ,u"ஙே" ,u"ஙை" ,u"ஙொ" ,u"ஙோ" ,u"ஙௌ" ,
u"ண" ,u"ணா" ,u"ணி" ,u"ணீ" ,u"ணு" ,u"ணூ" ,u"ணெ" ,u"ணே" ,u"ணை" ,u"ணொ" ,u"ணோ" ,u"ணௌ" ,
u"ந" ,u"நா" ,u"நி" ,u"நீ" ,u"நு" ,u"நூ" ,u"நெ" ,u"நே" ,u"நை" ,u"நொ" ,u"நோ" ,u"நௌ" ,
u"ம" ,u"மா" ,u"மி" ,u"மீ" ,u"மு" ,u"மூ" ,u"மெ" ,u"மே" ,u"மை" ,u"மொ" ,u"மோ" ,u"மௌ" ,
u"ன" ,u"னா" ,u"னி" ,u"னீ" ,u"னு" ,u"னூ" ,u"னெ" ,u"னே" ,u"னை" ,u"னொ" ,u"னோ" ,u"னௌ",
u"ய" ,u"யா" ,u"யி" ,u"யீ" ,u"யு" ,u"யூ" ,u"யெ" ,u"யே" ,u"யை" ,u"யொ" ,u"யோ" ,u"யௌ",
u"ர" ,u"ரா" ,u"ரி" ,u"ரீ" ,u"ரு" ,u"ரூ" ,u"ரெ" ,u"ரே" ,u"ரை" ,u"ரொ" ,u"ரோ" ,u"ரௌ",
u"ல" ,u"லா" ,u"லி" ,u"லீ" ,u"லு" ,u"லூ" ,u"லெ" ,u"லே" ,u"லை" ,u"லொ" ,u"லோ" ,u"லௌ" ,
u"வ" ,u"வா" ,u"வி" ,u"வீ" ,u"வு" ,u"வூ" ,u"வெ" ,u"வே" ,u"வை" ,u"வொ" ,u"வோ" ,u"வௌ" ,
u"ழ" ,u"ழா" ,u"ழி" ,u"ழீ" ,u"ழு" ,u"ழூ" ,u"ழெ" ,u"ழே" ,u"ழை" ,u"ழொ" ,u"ழோ" ,u"ழௌ" ,
u"ள" ,u"ளா" ,u"ளி" ,u"ளீ" ,u"ளு" ,u"ளூ" ,u"ளெ" ,u"ளே" ,u"ளை" ,u"ளொ" ,u"ளோ" ,u"ளௌ" ]
# Ref: https://en.wikipedia.org/wiki/Tamil_numerals
# tamil digits : Apart from the numerals (0-9), Tamil also has numerals for 10, 100 and 1000.
tamil_digit_1to10 = [u"௦", u"௧", u"௨",u"௩",u"௪",u"௫",u"௬",u"௭",u"௮",u"௯",u"௰"]
tamil_digit_100 = u"௱"
tamil_digit_1000 = u"௲"
# tamil symbols
_day = u"௳"
_month = u"௴"
_year = u"௵"
_debit = u"௶"
_credit = u"௷"
_rupee = u"௹"
_numeral = u"௺"
_sri = u"\u0bb6\u0bcd\u0bb0\u0bc0" #SRI - ஶ்ரீ
_ksha = u"\u0b95\u0bcd\u0bb7" #KSHA - க்ஷ
_ksh = u"\u0b95\u0bcd\u0bb7\u0bcd" #KSH - க்ஷ்
tamil_symbols = [_day, _month, _year, _debit, _credit, _rupee, _numeral, _sri, _ksha, _ksh]
## total tamil letters in use, including sanskrit letters
tamil_letters = [
## /* Uyir */
u"அ",u"ஆ",u"இ", u"ஈ",u"உ",u"ஊ",u"எ",u"ஏ",u"ஐ",u"ஒ",u"ஓ",u"ஔ",
##/* Ayuda Ezhuthu */
u"ஃ",
## /* Mei */
u"க்",u"ச்",u"ட்",u"த்",u"ப்",u"ற்",u"ஞ்",u"ங்",u"ண்",u"ந்",u"ம்",u"ன்",u"ய்",u"ர்",u"ல்",u"வ்",u"ழ்",u"ள்",
## /* Agaram */
## u"க",u"ச",u"ட",u"த",u"ப",u"ற",u"ஞ",u"ங",u"ண",u"ந",u"ம",u"ன",u"ய",u"ர",u"ல",u"வ",u"ழ",u"ள",
## /* Sanskrit (Vada Mozhi) */
## u"ஜ",u"ஷ", u"ஸ",u"ஹ",
##/* Sanskrit (Mei) */
u"ஜ்",u"ஷ்", u"ஸ்",u"ஹ்",
## /* Uyir Mei */
u"க" ,u"கா" ,u"கி" ,u"கீ" ,u"கு" ,u"கூ" ,u"கெ" ,u"கே" ,u"கை" ,u"கொ" ,u"கோ" ,u"கௌ"
,u"ச" ,u"சா" ,u"சி" ,u"சீ" ,u"சு" ,u"சூ" ,u"செ" ,u"சே" ,u"சை" ,u"சொ" ,u"சோ" ,u"சௌ"
,u"ட" ,u"டா" ,u"டி" ,u"டீ" ,u"டு" ,u"டூ" ,u"டெ" ,u"டே" ,u"டை" ,u"டொ" ,u"டோ" ,u"டௌ"
,u"த" ,u"தா" ,u"தி" ,u"தீ" ,u"து" ,u"தூ" ,u"தெ" ,u"தே" ,u"தை" ,u"தொ" ,u"தோ" ,u"தௌ"
,u"ப" ,u"பா" ,u"பி" ,u"பீ" ,u"பு" ,u"பூ" ,u"பெ" ,u"பே" ,u"பை" ,u"பொ" ,u"போ" ,u"பௌ"
,u"ற" ,u"றா" ,u"றி" ,u"றீ" ,u"று" ,u"றூ" ,u"றெ" ,u"றே" ,u"றை" ,u"றொ" ,u"றோ" ,u"றௌ"
,u"ஞ" ,u"ஞா" ,u"ஞி" ,u"ஞீ" ,u"ஞு" ,u"ஞூ" ,u"ஞெ" ,u"ஞே" ,u"ஞை" ,u"ஞொ" ,u"ஞோ" ,u"ஞௌ"
,u"ங" ,u"ஙா" ,u"ஙி" ,u"ஙீ" ,u"ஙு" ,u"ஙூ" ,u"ஙெ" ,u"ஙே" ,u"ஙை" ,u"ஙொ" ,u"ஙோ" ,u"ஙௌ"
,u"ண" ,u"ணா" ,u"ணி" ,u"ணீ" ,u"ணு" ,u"ணூ" ,u"ணெ" ,u"ணே" ,u"ணை" ,u"ணொ" ,u"ணோ" ,u"ணௌ"
,u"ந" ,u"நா" ,u"நி" ,u"நீ" ,u"நு" ,u"நூ" ,u"நெ" ,u"நே" ,u"நை" ,u"நொ" ,u"நோ" ,u"நௌ"
,u"ம" ,u"மா" ,u"மி" ,u"மீ" ,u"மு" ,u"மூ" ,u"மெ" ,u"மே" ,u"மை" ,u"மொ" ,u"மோ" ,u"மௌ"
,u"ன" ,u"னா" ,u"னி" ,u"னீ" ,u"னு" ,u"னூ" ,u"னெ" ,u"னே" ,u"னை" ,u"னொ" ,u"னோ" ,u"னௌ"
,u"ய" ,u"யா" ,u"யி" ,u"யீ" ,u"யு" ,u"யூ" ,u"யெ" ,u"யே" ,u"யை" ,u"யொ" ,u"யோ" ,u"யௌ"
,u"ர" ,u"ரா" ,u"ரி" ,u"ரீ" ,u"ரு" ,u"ரூ" ,u"ரெ" ,u"ரே" ,u"ரை" ,u"ரொ" ,u"ரோ" ,u"ரௌ"
,u"ல" ,u"லா" ,u"லி" ,u"லீ" ,u"லு" ,u"லூ" ,u"லெ" ,u"லே" ,u"லை" ,u"லொ" ,u"லோ" ,u"லௌ"
,u"வ" ,u"வா" ,u"வி" ,u"வீ" ,u"வு" ,u"வூ" ,u"வெ" ,u"வே" ,u"வை" ,u"வொ" ,u"வோ" ,u"வௌ"
,u"ழ" ,u"ழா" ,u"ழி" ,u"ழீ" ,u"ழு" ,u"ழூ" ,u"ழெ" ,u"ழே" ,u"ழை" ,u"ழொ" ,u"ழோ" ,u"ழௌ"
,u"ள" ,u"ளா" ,u"ளி" ,u"ளீ" ,u"ளு" ,u"ளூ" ,u"ளெ" ,u"ளே" ,u"ளை" ,u"ளொ" ,u"ளோ" ,u"ளௌ"
##/* Sanskrit Uyir-Mei */
,u"ஶ", u"ஶா", u"ஶி", u"ஶீ", u"ஶு", u"ஶூ", u"ஶெ", u"ஶே", u"ஶை", u"ஶொ", u"ஶோ", u"ஶௌ"
,u"ஜ" ,u"ஜா" ,u"ஜி" ,u"ஜீ" ,u"ஜு" ,u"ஜூ" ,u"ஜெ" ,u"ஜே" ,u"ஜை" ,u"ஜொ" ,u"ஜோ" ,u"ஜௌ"
,u"ஷ" ,u"ஷா" ,u"ஷி" ,u"ஷீ" ,u"ஷு" ,u"ஷூ" ,u"ஷெ" ,u"ஷே" ,u"ஷை" ,u"ஷொ" ,u"ஷோ" ,u"ஷௌ"
,u"ஸ" ,u"ஸா" ,u"ஸி" ,u"ஸீ" ,u"ஸு" ,u"ஸூ" ,u"ஸெ" ,u"ஸே" ,u"ஸை" ,u"ஸொ" ,u"ஸோ" ,u"ஸௌ"
,u"ஹ" ,u"ஹா" ,u"ஹி" ,u"ஹீ" ,u"ஹு" ,u"ஹூ" ,u"ஹெ" ,u"ஹே" ,u"ஹை" ,u"ஹொ" ,u"ஹோ" ,u"ஹௌ"
,u"க்ஷ" ,u"க்ஷா" ,u"க்ஷி" ,u"க்ஷீ" ,u"க்ஷு" ,u"க்ஷூ" ,u"க்ஷெ" ,u"க்ஷே" ,u"க்ஷை" ,u"க்ஷொ" ,u"க்ஷோ" ,u"க்ஷௌ" ]
grantha_uyirmei_letters = copy( tamil_letters[tamil_letters.index(u"கா")-1:] )
## length of the definitions
def accent_len( ):
return TA_ACCENT_LEN ## 13 = 12 + 1
def ayudha_len( ):
return TA_AYUDHA_LEN ## 1
def uyir_len( ):
return TA_UYIR_LEN ##12
def mei_len( ):
return TA_MEI_LEN ##18
def agaram_len( ):
return TA_AGARAM_LEN ##18
def uyirmei_len( ):
return TA_UYIRMEI_LEN ##216
def tamil_len( ):
return len(tamil_letters)
## access the letters
def uyir( idx ):
assert ( idx >= 0 and idx < uyir_len() )
return uyir_letters[idx]
def agaram( idx ):
assert ( idx >= 0 and idx < agaram_len() )
return agaram_letters[idx]
def mei( idx ):
assert ( idx >= 0 and idx < mei_len() )
return mei_letters[idx]
def uyirmei( idx ):
assert( idx >= 0 and idx < uyirmei_len() )
return uyirmei_letters[idx]
def mei_to_agaram(in_syllable):
if in_syllable in grantha_mei_letters:
mei_pos = grantha_mei_letters.index(in_syllable)
agaram_a_pos = 0
syllable = uyirmei_constructed(mei_pos,agaram_a_pos)
return syllable
return in_syllable
def uyirmei_constructed( mei_idx, uyir_idx):
""" construct uyirmei letter give mei index and uyir index """
idx,idy = mei_idx,uyir_idx
assert ( idy >= 0 and idy < uyir_len() )
assert ( idx >= 0 and idx < 6+mei_len() )
return grantha_agaram_letters[mei_idx]+accent_symbols[uyir_idx]
def tamil( idx ):
""" retrieve Tamil letter at canonical index from array utf8.tamil_letters """
assert ( idx >= 0 and idx < tamil_len() )
return tamil_letters[idx]
# companion function to @tamil()
def getidx(letter):
for itr in range(0,tamil_len()):
if tamil_letters[itr] == letter:
return itr
raise Exception("Cannot find letter in Tamil arichuvadi")
## useful part of the API:
def istamil_prefix( word ):
""" check if the given word has a tamil prefix. Returns
either a True/False flag """
for letter in tamil_letters:
if ( word.find(letter) == 0 ):
return True
return False
if not PYTHON3:
is_tamil_unicode_predicate = lambda x: x >= unichr(2946) and x <= unichr(3066)
else:
is_tamil_unicode_predicate = lambda x: x >= chr(2946) and x <= chr(3066)
def is_tamil_unicode( sequence ):
# Ref: languagetool-office-extension/src/main/java/org/languagetool/openoffice/TamilDetector.java
if type(sequence) is list:
return list(map( is_tamil_unicode_predicate, sequence ))
if len(sequence) > 1:
return list(map( is_tamil_unicode_predicate, get_letters(sequence) ))
return is_tamil_unicode_predicate( sequence )
def all_tamil( word_in ):
""" predicate checks if all letters of the input word are Tamil letters """
if isinstance(word_in,list):
word = word_in
else:
word = get_letters( word_in )
return all( [(letter in tamil_letters) for letter in word] )
def has_tamil( word ):
"""check if the word has any occurance of any tamil letter """
# list comprehension is not necessary - we bail at earliest
for letters in tamil_letters:
if ( word.find(letters) >= 0 ):
return True
return False
def istamil( tchar ):
""" check if the letter tchar is prefix of
any of tamil-letter. It suggests we have a tamil identifier"""
if (tchar in tamil_letters):
return True
return False
def istamil_alnum( tchar ):
""" check if the character is alphanumeric, or tamil.
This saves time from running through istamil() check. """
return ( tchar.isalnum( ) or istamil( tchar ) )
def reverse_word( word ):
""" reverse a Tamil word according to letters not unicode-points """
op = get_letters( word )
op.reverse()
return u"".join(op)
## find out if the letters like, "பொ" are written in canonical "ப + ொ"" graphemes then
## return True. If they are written like "ப + ெ + ா" then return False on first occurrence
def is_normalized( text ):
#print(text[0],text[1],text[2],text[-1],text[-2])
TLEN,idx = len(text),1
kaal = u"ா"
Laa = u"ள"
sinna_kombu, periya_kombu = u"ெ", u"ே"
kombugal = [sinna_kombu, periya_kombu]
# predicate measures if the normalization is violated
def predicate( last_letter, prev_letter):
if ((kaal == last_letter) and (prev_letter in kombugal)):
return True
if ((Laa == last_letter) and (prev_letter == sinna_kombu)):
return True
return False
if TLEN < 2:
return True
elif TLEN == 2:
if predicate( text[-1], text[-2] ):
return False
return True
idx = TLEN
a = text[idx-2]
b = text[idx-1]
while (idx >= 0):
if predicate(b,a):
return False
b=a
idx = idx - 1
if idx >= 0:
a=text[idx]
return True
def _make_set(args):
if PYTHON3:
return frozenset(args)
return set(args)
grantha_agaram_set = _make_set(grantha_agaram_letters)
accent_symbol_set = _make_set(accent_symbols)
uyir_letter_set = _make_set(uyir_letters)
## Split a tamil-unicode stream into
## tamil characters (individuals).
def get_letters( word ):
""" splits the word into a character-list of tamil/english
characters present in the stream """
ta_letters = list()
not_empty = False
WLEN,idx = len(word),0
while (idx < WLEN):
c = word[idx]
#print(idx,hex(ord(c)),len(ta_letters))
if c in uyir_letter_set or c == ayudha_letter:
ta_letters.append(c)
not_empty = True
elif c in grantha_agaram_set:
ta_letters.append(c)
not_empty = True
elif c in accent_symbol_set:
if not not_empty:
# odd situation
ta_letters.append(c)
not_empty = True
else:
#print("Merge/accent")
ta_letters[-1] += c
else:
if ord(c) < 256:
ta_letters.append( c )
else:
if not_empty:
#print("Merge/??")
ta_letters[-1]+= c
else:
ta_letters.append(c)
not_empty = True
idx = idx + 1
return ta_letters
_all_symbols = copy( accent_symbols )
_all_symbols.extend( pulli_symbols )
all_symbol_set = _make_set(_all_symbols)
# same as get_letters but use as iterable
def get_letters_iterable( word ):
""" splits the word into a character-list of tamil/english
characters present in the stream """
WLEN,idx = len(word),0
while (idx < WLEN):
c = word[idx]
#print(idx,hex(ord(c)),len(ta_letters))
if c in uyir_letter_set or c == ayudha_letter:
idx = idx + 1
yield c
elif c in grantha_agaram_set:
if idx + 1 < WLEN and word[idx+1] in all_symbol_set:
c2 = word[idx+1]
idx = idx + 2
yield (c + c2)
else:
idx = idx + 1
yield c
else:
idx = idx + 1
yield c
return
grantha_uyirmei_splits = {}
for _uyir_idx in range(0,12):
for _mei_idx, _mei in enumerate(grantha_mei_letters):
_uyirmei = uyirmei_constructed( _mei_idx, _uyir_idx )
grantha_uyirmei_splits[_uyirmei] = [_mei,uyir_letters[_uyir_idx]]
def get_letters_elementary_iterable(word):
for letter in get_letters_iterable(word):
letter_parts = grantha_uyirmei_splits.get(letter,None)
if letter_parts:
yield letter_parts[0]
yield letter_parts[1]
else:
yield letter
return
def get_letters_elementary(word):
rval = []
for letter in get_letters(word):
letter_parts = grantha_uyirmei_splits.get(letter,None)
if letter_parts:
rval.append( letter_parts[0] )
rval.append( letter_parts[1] )
else:
rval.append( letter )
return rval
def get_words(letters,tamil_only=False):
return [ word for word in get_words_iterable(letters,tamil_only) ]
def get_words_iterable( letters, tamil_only=False ):
""" given a list of UTF-8 letters section them into words, grouping them at spaces """
# correct algorithm for get-tamil-words
buf = []
for idx,let in enumerate(letters):
if not let.isspace():
if istamil(let) or (not tamil_only):
buf.append( let )
else:
if len(buf) > 0:
yield u"".join( buf )
buf = []
if len(buf) > 0:
yield u"".join(buf)
def get_tamil_words( letters ):
""" reverse a Tamil word according to letters, not unicode-points """
if not isinstance(letters,list):
raise Exception("metehod needs to be used with list generated from 'tamil.utf8.get_letters(...)'")
return [word for word in get_words_iterable( letters, tamil_only = True )]
if PYTHON3:
def cmp( x, y):
if x == y:
return 0
if x > y:
return 1
return -1
# answer if word_a ranks ahead of, or at same level, as word_b in a Tamil dictionary order...
# for use with Python : if a > 0
def compare_words_lexicographic( word_a, word_b ):
""" compare words in Tamil lexicographic order """
# sanity check for words to be all Tamil
if ( not all_tamil(word_a) ) or (not all_tamil(word_b)) :
#print("## ")
#print(word_a)
#print(word_b)
#print("Both operands need to be Tamil words")
pass
La = len(word_a)
Lb = len(word_b)
all_TA_letters = u"".join(tamil_letters)
for itr in range(0,min(La,Lb)):
pos1 = all_TA_letters.find( word_a[itr] )
pos2 = all_TA_letters.find( word_b[itr] )
if pos1 != pos2 :
#print not( pos1 > pos2), pos1, pos2
return cmp(pos1, pos2)
# result depends on if La is shorter than Lb, or 0 if La == Lb i.e. cmp
return cmp(La,Lb)
# return a list of ordered-pairs containing positions
# that are common in word_a, and word_b; e.g.
# தேடுக x தடங்கல் -> one common letter க [(2,3)]
# சொல் x தேடுக -> no common letters []
def word_intersection( word_a, word_b ):
""" return a list of tuples where word_a, word_b intersect """
positions = []
word_a_letters = get_letters( word_a )
word_b_letters = get_letters( word_b )
for idx,wa in enumerate(word_a_letters):
for idy,wb in enumerate(word_b_letters):
if ( wa == wb ):
positions.append( (idx, idy) )
return positions
def unicode_normalize(cplxchar):
Laa = u"ள"
kaal = u"ா"
sinna_kombu_a = u"ெ"
periya_kombu_aa = u"ே"
sinna_kombu_o = u"ொ"
periya_kombu_oo = u"ோ"
kombu_ak = u"ௌ"
lcplx = len(cplxchar)
if lcplx>=3 and cplxchar[-1] == Laa:
if cplxchar[-2] == sinna_kombu_a:
return ( cplxchar[:-2] + kombu_ak )
if lcplx >= 2 and cplxchar[-1] == kaal:
if cplxchar[-2] == sinna_kombu_a:
return ( cplxchar[:-2]+sinna_kombu_o )
if cplxchar[-2] == periya_kombu_aa:
return ( cplxchar[:-2]+periya_kombu_oo )
# no-op
return cplxchar
def splitMeiUyir(uyirmei_char):
"""
This function split uyirmei compound character into mei + uyir characters
and returns in tuple.
Input : It must be unicode tamil char.
Written By : Arulalan.T
Date : 22.09.2014
"""
if not isinstance(uyirmei_char, PYTHON3 and str or unicode):
raise ValueError("Passed input letter '%s' must be unicode, \
not just string" % uyirmei_char)
if uyirmei_char in mei_letters or uyirmei_char in uyir_letters or uyirmei_char in ayudha_letter:
return uyirmei_char
if uyirmei_char not in grantha_uyirmei_letters:
if not is_normalized( uyirmei_char ):
norm_char = unicode_normalize(uyirmei_char)
rval = splitMeiUyir( norm_char )
return rval
raise ValueError("Passed input letter '%s' is not tamil letter" % uyirmei_char)
idx = grantha_uyirmei_letters.index(uyirmei_char)
uyiridx = idx % 12
meiidx = int((idx - uyiridx)/ 12)
return (grantha_mei_letters[meiidx], uyir_letters[uyiridx])
# end of def splitMeiUyir(uyirmei_char):
def joinMeiUyir(mei_char, uyir_char):
"""
This function join mei character and uyir character, and retuns as
compound uyirmei unicode character.
Inputs:
mei_char : It must be unicode tamil mei char.
uyir_char : It must be unicode tamil uyir char.
Written By : Arulalan.T
Date : 22.09.2014
"""
if not isinstance(mei_char, PYTHON3 and str or unicode):
raise ValueError("Passed input mei character '%s' must be unicode, \
not just string" % mei_char)
if not isinstance(uyir_char, PYTHON3 and str or unicode):
raise ValueError("Passed input uyir character '%s' must be unicode, \
not just string" % uyir_char)
if mei_char not in grantha_mei_letters:
raise ValueError("Passed input character '%s' is not a"
"tamil mei character" % mei_char)
if uyir_char not in uyir_letters:
raise ValueError("Passed input character '%s' is not a"
"tamil uyir character" % uyir_char)
uyiridx = uyir_letters.index(uyir_char)
meiidx = grantha_mei_letters.index(mei_char)
# calculate uyirmei index
uyirmeiidx = meiidx*12 + uyiridx
return grantha_uyirmei_letters[uyirmeiidx]
def classify_letter(letter):
if not isinstance(letter, PYTHON3 and str or unicode):
raise TypeError("Input'%s' must be unicode, not just string" % letter)
kinds = [u'kuril',u'nedil',u'ayudham',u'vallinam',u'mellinam',u'idayinam',u'uyirmei',u'tamil_or_grantham']
if letter in uyir_letters:
if letter in kuril_letters:
return u'kuril'
elif letter in nedil_letters:
return u'nedil'
elif letter == ayudha_letter:
return 'ayudham'
if letter in mei_letters:
if letter in mellinam_letters:
return 'mellinam'
elif letter in vallinam_letters:
return 'vallinam'
elif letter in idayinam_letters:
return 'idayinam'
if letter in uyirmei_letters:
return 'uyirmei'
if letter in tamil_letters:
return 'tamil_or_grantham'
if letter.isalpha():
return 'english'
elif letter.isdigit():
return 'digit'
raise ValueError("Unknown letter '%s' neither Tamil nor English or number"%letter)
# Tamil Letters
# அ ஆ இ ஈ உ ஊ எ ஏ ஐ ஒ ஓ ஔ ஃ
# க் ச் ட் த் ப் ற் ஞ் ங் ண் ந் ம் ன் ய் ர் ல் வ் ழ் ள் ஜ் ஷ் ஸ் ஹ்
# க ச ட த ப ற ஞ ங ண ந ம ன ய ர ல வ ழ ள ஜ ஷ ஸ ஹ
# க கா கி கீ கு கூ கெ கே கை கௌ
# ச சா சி சீ சு சூ செ சே சை சொ சோ சௌ
# ட டா டி டீ டு டூ டெ டே டை டொ டோ டௌ
# த தா தி தீ து தூ தெ தே தை தொ தோ தௌ
# ப பா பி பீ பு பூ பெ பே பை பொ போ பௌ
# ற றா றி றீ று றூ றெ றே றை றொ றோ றௌ
# ஞ ஞா ஞி ஞீ ஞு ஞூ ஞெ ஞே ஞை ஞொ ஞோ ஞௌ
# ங ஙா ஙி ஙீ ஙு ஙூ ஙெ ஙே ஙை ஙொ ஙோ ஙௌ
# ண ணா ணி ணீ ணு ணூ ணெ ணே ணை ணொ ணோ ணௌ
# ந நா நி நீ நு நூ நெ நே நை நொ நோ நௌ
# ம மா மி மீ மு மூ மெ மே மை மொ மோ மௌ
# ன னா னி னீ னு னூ னெ னே னை னொ னோ னௌ
# ய யா யி யீ யு யூ யெ யே யை யொ யோ யௌ
# ர ரா ரி ரீ ரு ரூ ரெ ரே ரை ரொ ரோ ரௌ
# ல லா லி லீ லு லூ லெ லே லை லொ லோ லௌ
# வ வா வி வீ வு வூ வெ வே வை வொ வோ வௌ
# ழ ழா ழி ழீ ழு ழூ ழெ ழே ழை ழொ ழோ ழௌ
# ள ளா ளி ளீ ளு ளூ ளெ ளே ளை ளொ ளோ ளௌ
# ஶ ஶா ஶி ஶீ ஶு ஶூ ஶெ ஶே ஶை ஶொ ஶோ ஶௌ
# ஜ ஜா ஜி ஜீ ஜு ஜூ ஜெ ஜே ஜை ஜொ ஜோ ஜௌ
# ஷ ஷா ஷி ஷீ ஷு ஷூ ஷெ ஷே ஷை ஷொ ஷோ ஷௌ
# ஸ ஸா ஸி ஸீ ஸு ஸூ ஸெ ஸே ஸை ஸொ ஸோ ஸௌ
# ஹ ஹா ஹி ஹீ ஹு ஹூ ஹெ ஹே ஹை ஹொ ஹோ ஹௌ
# க்ஷ க்ஷா க்ஷி க்ஷீ க்ஷு க்ஷூ க்ஷெ க்ஷே க்ஷை க்ஷொ க்ஷோ க்ஷௌ
| 36.512579
| 118
| 0.538886
|
2e81c947dda8b8c2edf4d7a0c57d8dd2e4515a63
| 253
|
py
|
Python
|
app/api/api_v1/api.py
|
OPEN-NEXT/import-export
|
db3e720f29cdc30846667f7cd6ba3cc653146fc4
|
[
"MIT"
] | null | null | null |
app/api/api_v1/api.py
|
OPEN-NEXT/import-export
|
db3e720f29cdc30846667f7cd6ba3cc653146fc4
|
[
"MIT"
] | 25
|
2021-03-09T15:27:44.000Z
|
2021-06-09T10:09:43.000Z
|
app/api/api_v1/api.py
|
wikifactory/import-export
|
f7775d52d23b06a47cdaad13ae48e7727bb850fd
|
[
"MIT"
] | null | null | null |
from fastapi import APIRouter
from app.api.api_v1.endpoints import job, service
api_router = APIRouter()
api_router.include_router(job.router, prefix="/job", tags=["job"])
api_router.include_router(service.router, prefix="/service", tags=["service"])
| 31.625
| 78
| 0.774704
|
93d3fa678ebfc36d25fb4da9d874fd5796c38e63
| 323,971
|
py
|
Python
|
inspire_magpie/labels.py
|
jstypka/inspire-magpie
|
7294b9f5347197f59bf7b3f9d164f2ff35a52cef
|
[
"MIT"
] | 1
|
2017-11-17T17:30:36.000Z
|
2017-11-17T17:30:36.000Z
|
inspire_magpie/labels.py
|
jstypka/inspire-magpie
|
7294b9f5347197f59bf7b3f9d164f2ff35a52cef
|
[
"MIT"
] | 6
|
2016-05-03T09:25:19.000Z
|
2019-03-22T00:45:43.000Z
|
inspire_magpie/labels.py
|
jstypka/inspire-magpie
|
7294b9f5347197f59bf7b3f9d164f2ff35a52cef
|
[
"MIT"
] | 2
|
2016-04-13T13:53:36.000Z
|
2016-04-28T14:51:42.000Z
|
# -*- coding: utf-8 -*-
#
# This file is part of Inspire-Magpie.
# Copyright (c) 2016 CERN
#
# Inspire-Magpie is a free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for
# more details.
"""Labels.
.. codeauthor:: Jan Stypka <jan.stypka@cern.ch>
.. codeauthor:: Jan Aage Lavik <jan.age.lavik@cern.ch>
"""
from __future__ import absolute_import, print_function
def get_labels(n):
if n == 14:
return get_categories()
elif n == 500:
return get_experiments()
else:
return get_keywords(n)
def get_keywords(n):
""" Get a precomputed list of N most popular keywords in HEP dataset. """
if n == -1:
return []
elif n == 100:
return TOP_100_KEYWORDS
elif n == 1000:
return TOP_1000_KEYWORDS
elif n == 10000:
return TOP_10000_KEYWORDS
else:
raise ValueError("No value has been computed "
"for {} most popular keywords".format(n))
def get_categories():
return CATEGORIES
def get_experiments():
return EXPERIMENTS
CATEGORIES = [
u'Astrophysics',
u'Accelerators',
u'Computing',
u'Experiment-HEP',
u'Gravitation and Cosmology',
u'Instrumentation',
u'Lattice',
u'Math and Math Physics',
u'Theory-Nucl',
u'Other',
u'Phenomenology-HEP',
u'General Physics',
u'Theory-HEP',
u'Experiment-Nucl',
]
TOP_100_KEYWORDS = [
u'numerical calculations',
u'supersymmetry',
u'CERN LHC Coll',
u'quantum chromodynamics',
u'experimental results',
u'field theory: scalar',
u'bibliography',
u'numerical calculations: interpretation of experiments',
u'numerical calculations: Monte Carlo',
u'critical phenomena',
u'string model',
u'field theory: conformal',
u'electron positron: annihilation',
u'membrane model',
u'CP: violation',
u'boundary condition',
u'dimension: 2',
u'lattice field theory',
u'duality',
u'cosmological model',
u'dimension: 5',
u'supergravity',
u'perturbation theory: higher-order',
u'inflation',
u'dimension: 3',
u'gauge field theory: Yang-Mills',
u'dark matter',
u'field equations: solution',
u'Higgs particle: mass',
u'neutrino: oscillation',
u'heavy ion: scattering',
u'neutrino: mass',
u'gravitation',
u'effective action',
u'background',
u'effective Lagrangian',
u'renormalization',
u'space-time: anti-de Sitter',
u'black hole',
u'p p: inclusive reaction',
u'correlation function',
u'cosmological constant',
u'scattering: heavy ion',
u'stability',
u'Hamiltonian formalism',
u'finite temperature',
u'quantum chromodynamics: perturbation theory',
u'Batavia TEVATRON Coll',
u'energy: density',
u'gauge field theory',
u'space-time',
u'holography',
u'new physics',
u'dimension: 4',
u'supersymmetry: symmetry breaking',
u'Chern-Simons term',
u'Brookhaven RHIC Coll',
u'general relativity',
u'thermodynamics',
u'quark gluon: plasma',
u'quantum gravity',
u'renormalization group',
u'neutrino: mixing angle',
u'tensor: energy-momentum',
u'gold',
u'scaling',
u'partition function',
u'dark energy',
u'quantization',
u'Feynman graph',
u'quark: mass',
u'potential: chemical',
u'p p: scattering',
u'analytic properties',
u'symmetry: chiral',
u'AdS/CFT correspondence',
u'cosmic background radiation',
u'spontaneous symmetry breaking',
u'p p: interaction',
u'ATLAS',
u'invariance: gauge',
u'electron positron: colliding beams',
u'coupling: Yukawa',
u'minimal supersymmetric standard model',
u'Feynman graph: higher-order',
u'CMS',
u'gravitational radiation',
u'vacuum state',
u'perturbation theory',
u'equation of state',
u'data analysis method',
u'performance',
u'entropy',
u'quantum mechanics',
u'gauge field theory: SU(3)',
u'quantum electrodynamics',
u'fermion: lattice field theory',
u'unitarity',
u'phase space',
u'supernova',
]
EXPERIMENTS = [
u'CERN-LHC-ATLAS',
u'CERN-LHC-CMS',
u'CERN-LHC-LHCb',
u'FNAL-E-0740',
u'CERN-LHC-ALICE',
u'SLAC-PEP2-BABAR',
u'BNL-RHIC-STAR',
u'FNAL-E-0823',
u'KEK-BF-BELLE',
u'FNAL-E-0830',
u'DESY-HERA-H1',
u'BNL-RHIC-PHENIX',
u'DESY-HERA-ZEUS',
u'SDSS',
u'CERN-LEP-OPAL',
u'CERN-LEP-DELPHI',
u'FERMI-LAT',
u'AUGER',
u'LATTICE-UKQCD',
u'CESR-CLEO',
u'ICECUBE',
u'CERN-LEP-ALEPH',
u'FNAL-E-0741',
u'BEPC-BES',
u'CERN-LEP-L3',
u'MAGIC',
u'DESY-HERA-HERMES',
u'CERN-NA-058',
u'CESR-CLEO-II',
u'FRASCATI-DAFNE-KLOE',
u'SUPER-KAMIOKANDE',
u'MICE',
u'Dzero',
u'LIGO',
u'HESS',
u'GSI-FAIR-PANDA',
u'FNAL-E-0875',
u'ANTARES',
u'KEK-T2K',
u'SLAC-SLC-SLD',
u'CERN-NA-049',
u'BEPC-BES-III',
u'OPERA',
u'JLAB-CLAS',
u'AMS',
u'DESY-HERA-B',
u'CERN-NA-048',
u'BNL-RHIC-PHOBOS',
u'BOREXINO',
u'LISA',
u'VERITAS',
u'FNAL-E-0898',
u'CDMS',
u'GSI-HADES',
u'MACRO',
u'LATTICE-MILC',
u'SUDBURY-SNO',
u'PAMELA',
u'CALICE',
u'FNAL-E-0831',
u'FNAL-E-0904',
u'AMANDA',
u'NEW-EXP',
u'VIRGO',
u'ARGO-YBJ',
u'NEMO',
u'COSY-11',
u'GLAST',
u'BNL-RHIC-BRAHMS',
u'KASCADE',
u'FNAL-E-0791',
u'PLANCK',
u'WASA-COSY',
u'DES',
u'CESR-CLEO-III',
u'HIRES',
u'KEK-K2K',
u'JLAB-E-89-004',
u'XENON100',
u'AGILE',
u'FNAL-E-0938',
u'CERN-NA-061',
u'CERN-NA-062',
u'GSI-SIS-FOPI',
u'2dFGRS',
u'CERN-WA-095',
u'CERN-PS-197',
u'CERN-ISOLDE',
u'FNAL-E-0799',
u'CERN-WA-096',
u'TELESCOPE-ARRAY',
u'NOVOSIBIRSK-CMD-2',
u'MAINZ-A1',
u'KATRIN',
u'GERDA',
u'CERN-NA-060',
u'FNAL-E-0781',
u'NOVOSIBIRSK-SND',
u'HEGRA',
u'KamLAND',
u'BNL-E-0821',
u'ICARUS',
u'FRASCATI-DAFNE-FINUDA',
u'FNAL-E-0832',
u'DOUBLECHOOZ',
u'FNAL-E-0973',
u'GSI-SIS-ALADIN',
u'CERN-LHC-TOTEM',
u'CERN-NA-045',
u'EDELWEISS',
u'EUCLID',
u'AMS-02',
u'FNAL-E-0815',
u'CUORE',
u'DAYA-BAY',
u'LATTICE-ALPHA',
u'CERN-WA-098',
u'BONN-ELSA-CRYSTAL-BARREL',
u'PSI-R-99-05',
u'CERN-NA-048-2',
u'DESY-DORIS-ARGUS',
u'CERN-PS-195',
u'BEPPOSAX',
u'COSY-ANKE',
u'MAINZ-A2',
u'CERN-PS-214',
u'GSI-FAIR-CBM',
u'CERN-PS-201',
u'DAMA',
u'FNAL-E-0687',
u'KARMEN',
u'CTA',
u'BAIKAL',
u'WMAP',
u'MAJORANA',
u'GSI-TAPS',
u'LEPS',
u'CERN-NA-047',
u'FNAL-E-0897',
u'ASCA',
u'CERN-NA-050',
u'CERN-PS-212',
u'EXO-200',
u'CERN-NA-044',
u'SNLS',
u'ACT',
u'CERN-NA-022',
u'CERN-CAST',
u'CERN-WA-089',
u'FNAL-E-0835',
u'CERN-WA-102',
u'CRESST',
u'FNAL-E-0770',
u'J-PET',
u'BNL-E-0787',
u'SCP',
u'GALEX',
u'LAMPF-1173',
u'MILAGRO',
u'BOOMERANG',
u'MAMI',
u'LUNA',
u'BNL-E-0852',
u'NOVOSIBIRSK-KEDR',
u'CESR-CLEO-C',
u'TEXONO',
u'SOUDAN-2',
u'CERN-NA-057',
u'LATTICE-HPQCD',
u'LVD',
u'FNAL-E-0929',
u'BNL-E-0802',
u'SUDBURY-SNO+',
u'KAMIOKANDE',
u'EGRET',
u'FNAL-P-1028',
u'NEXT',
u'FNAL-E-0954',
u'GENIUS',
u'CANGAROO',
u'FNAL-E-0871',
u'HIGH-Z',
u'LUX',
u'EUSO',
u'ZEPLIN-III',
u'EAS-TOP',
u'DEAP-3600',
u'CERN-RD-050',
u'FNAL-E-0974',
u'TIBET-ASG',
u'BNL-E-0895',
u'CODALEMA',
u'DEEP2',
u'CERN-ATHENA',
u'FREJUS-NEMO-3',
u'BNL-E-0877',
u'XMASS',
u'BESS',
u'KM3NeT',
u'KEK-TE-002',
u'GRAAL',
u'GSI-SIS-INDRA',
u'LBL-E-0987',
u'CERN-ALPHA',
u'TRIUMF-614',
u'INTEGRAL',
u'FNAL-E-0907',
u'BNL-E-0864',
u'SNAP',
u'FNAL-E-0866',
u'FNAL-E-0886',
u'COSY-TOF',
u'FNAL-P-0929',
u'BNL-E-0814',
u'CERN-NA-035',
u'FREJUS-NEMO-2',
u'ANITA',
u'CERN-LHC-LHCf',
u'KEK-246',
u'NUSTAR',
u'MACHO',
u'ADMX',
u'CERN-WA-080',
u'CERN-NA-031',
u'PICASSO',
u'FNAL-T-0962',
u'SWIFT',
u'SWIRES',
u'NOVOSIBIRSK-CMD-3',
u'ASTROD',
u'SAGE',
u'SPT',
u'CERN-UA-001',
u'BNL-E-0865',
u'FNAL-E-0691',
u'STACEE',
u'FNAL-E-0706',
u'CERN-EMU-001',
u'Cosmic Ray',
u'SLAC-SP-032',
u'NESTOR',
u'SLAC-E-158',
u'CERN-WA-092',
u'FNAL-E-0760',
u'DESY-PETRA-JADE',
u'CERN-WA-076',
u'SERPUKHOV-164',
u'FNAL-E-0665',
u'AGASA',
u'LATTICE-FERMILAB',
u'COBRA',
u'GAIA',
u'SuperB',
u'CERN-UA-002',
u'AURIGA',
u'BNL-E-0949',
u'FNAL-E-0731',
u'CUORICINO',
u'FNAL-E-0735',
u'DESY-DORIS-CRYSTAL-BALL',
u'SATURNE-213',
u'KEK-BF-BELLE-II',
u'CERN-EMU-008',
u'RENO',
u'FNAL-E-0761',
u'CERN-WA-097',
u'FNAL-LC',
u'SLAC-E-143',
u'FNAL-E-0653',
u'FNAL-LARP',
u'XENON',
u'KM3Net',
u'GALLEX',
u'CHOOZ',
u'LBNE',
u'JLAB-E-01-104',
u'EROS',
u'FRASCATI-DAFNE-SIDDHARTA',
u'CELESTE',
u'JLAB-E-00-110',
u'CERN-ISOLDE-REX',
u'WARPS',
u'JLAB-E-00-006',
u'BNL-E-0917',
u'BATES-BLAST',
u'COSMIC-RAY',
u'CERN-PS-205',
u'LOPES',
u'CELSIUS/WASA',
u'FNAL-E-0989',
u'SLAC-E-155',
u'DARKSIDE',
u'SERPUKHOV-VES',
u'FNAL-LINEAR COLLIDER',
u'JLAB-E-02-020',
u'TRIUMF',
u'TAPS',
u'RICE',
u'CHANDRA',
u'FNAL-E-0690',
u'CELSIUS-WASA',
u'CERN-LHC-F',
u'BATES-E-94-11',
u'SLAC-E-164',
u'BONN-ELSA-SAPHIR',
u'FUSE',
u'LOFAR',
u'FNAL-E-0771',
u'FNAL-E-0918',
u'CERN-PS-185',
u'CERN-ATRAP',
u'FNAL-E-0901',
u'SLAC-SLC-06',
u'CERN-NA-052',
u'FNAL-E-0872',
u'GlueX',
u'FNAL-T-0969',
u'KASKA',
u'LAMPF',
u'IMB',
u'BNL-E-0910',
u'TAMA',
u'CERN-NA-038',
u'CERN-PS-202',
u'FNAL-E-0921',
u'TESLA',
u'CERN-PS-nTOF',
u'EDELWEISS-II',
u'GSI',
u'MUNU',
u'SERPUKHOV-SPHINX',
u'SSCL-SDC',
u'KEK-TE-003',
u'FNAL-E-0769',
u'BICEP2',
u'BNL-E-0766',
u'QUBIC',
u'MuCap',
u'CDMS-II',
u'SLNS',
u'BNL-E-0866',
u'MAINZ-GDH',
u'JLAB-E-93-050',
u'WARP',
u'Sudbury-SNO',
u'SKA',
u'FNAL-E-0632',
u'CERN-BASE',
u'DUNE',
u'JLAB-E-94-010',
u'KEK-TE-001',
u'CERN-NA-037',
u'CERN-NA-032',
u'PALO-VERDE',
u'SERP',
u'PSI-LADS',
u'MERIT',
u'JLAB-E-01-001',
u'AIGO',
u'CERN-NA-059',
u'PIBETA',
u'LCFI',
u'CESR-CUSB',
u'JLAB-E-91-010',
u'KEK-VENUS',
u'DRIFT',
u'FNAL-E-0961',
u'PROJECT-8',
u'EMMA',
u'ANL-ATLAS-APEX',
u'FNAL-E-0868',
u'XMM',
u'FNAL-T-0987',
u'PVLAS',
u'BNL-E-0845',
u'GSI-KAOS',
u'ITEP-912',
u'CDEX',
u'CERN-ASACUSA',
u'OGLE',
u'GSI-FRS',
u'CERN-LEAR',
u'SSCL-GEM',
u'BNL-RHIC-PP2PP',
u'CERN-WA-021',
u'J-PARC KOTO',
u'HELLAZ',
u'NICA',
u'BNL-E-0878',
u'HETE',
u'CERN-AEGIS',
u'SLAC-E-146',
u'TOTEM',
u'CERN-NA-014-2',
u'JLAB-E-99-105',
u'CERN-NA-027',
u'MAINZ-A4',
u'KEK-PS',
u'RCNP-GRAND-RAIDEN',
u'GEO600',
u'KEK',
u'CREAM',
u'KAMLAND',
u'JLAB-E-01-011',
u'CHIMERA',
u'SLAC-PPRC',
u'FNAL-E-0704',
u'CLOUD',
u'ASTROD-GW',
u'TUNKA-13',
u'BNL-E-0810',
u'BOSS',
u'BNL',
u'CERN-PS-203',
u'CERN-PS-208',
u'PSI',
u'SERPUKHOV-145',
u'CANDELS',
u'FNAL-E-0756',
u'J-PARC TREK',
u'MAXIMA',
u'JLAB-E-95-001',
u'LUCIFER',
u'CASA-MIA',
u'GEMS',
u'LAMPF-0225',
u'DAEdALUS',
u'FNAL-T-0994',
u'HOMESTAKE-CHLORINE',
u'MiniCLEAN',
u'CERN-PS-171',
u'LOFT',
u'SIMPLE',
u'KEK-391A',
u'CERN-IS-433',
u'CERN-WA-093',
u'CERN-WA-091',
u'FNAL-E-0906',
u'MERLIN',
u'NOE',
u'FNAL-E-0882',
u'BNL-E-0913-914',
u'BNL-E-0926',
u'SERPUKHOV-161',
u'FRASCATI-DEAR',
u'Fermi-LAT',
u'CERN-WA-079',
u'LZ',
u'LAGEOS',
u'GNO',
u'PSI-Z-90-12',
u'DM-ICE',
u'COSY-EDDA',
u'Saclay',
u'QUIET',
u'NIMROD',
u'EXPLORER',
u'LARP',
u'BABAR',
u'SATURNE-237',
u'BNL-E-0913',
u'XX',
u'TRIUMF-248',
u'SHADES',
u'CERN-LHC',
u'FNAL-E-0864',
u'TRIUMF-613',
u'HAWC',
u'SLAC-E-165',
u'SLAC-E-167',
u'JLAB-E-94-107',
u'ALMA',
u'CERN-RD-039',
u'FNAL-E-0853',
u'SERPUKHOV-146',
u'KEK-470',
u'FNAL-E-0773',
u'JUNO',
u'JLAB-E-01-012',
u'BNL-IFEL',
u'FNAL-E-0672',
u'SUPER-CDMS',
u'CERN-LHC-MOEDAL',
u'FNAL-E-0789',
u'CERN-NA-048-1',
u'KEK-325',
u'BNL-E-0791',
u'JLAB-E-91-016',
u'POLARBEAR',
u'TRIUMF-1072',
u'Gran Sasso',
u'JLAB-E-99-115',
u'BONN-ELSA-GDH',
u'CERN-NA-034',
u'SERPUKHOV-GAMS',
u'SERPUKHOV-EXCHARM',
u'BNL-E-0875',
u'BNL-E-0871',
u'BIMA',
u'FNAL-E-0683',
]
TOP_1000_KEYWORDS = [
u'numerical calculations',
u'supersymmetry',
u'CERN LHC Coll',
u'quantum chromodynamics',
u'experimental results',
u'field theory: scalar',
u'bibliography',
u'numerical calculations: interpretation of experiments',
u'numerical calculations: Monte Carlo',
u'critical phenomena',
u'string model',
u'field theory: conformal',
u'electron positron: annihilation',
u'membrane model',
u'CP: violation',
u'boundary condition',
u'dimension: 2',
u'lattice field theory',
u'duality',
u'cosmological model',
u'dimension: 5',
u'supergravity',
u'perturbation theory: higher-order',
u'inflation',
u'dimension: 3',
u'gauge field theory: Yang-Mills',
u'dark matter',
u'field equations: solution',
u'Higgs particle: mass',
u'neutrino: oscillation',
u'heavy ion: scattering',
u'neutrino: mass',
u'gravitation',
u'effective action',
u'background',
u'effective Lagrangian',
u'renormalization',
u'space-time: anti-de Sitter',
u'black hole',
u'p p: inclusive reaction',
u'correlation function',
u'cosmological constant',
u'scattering: heavy ion',
u'stability',
u'Hamiltonian formalism',
u'finite temperature',
u'quantum chromodynamics: perturbation theory',
u'Batavia TEVATRON Coll',
u'energy: density',
u'gauge field theory',
u'space-time',
u'holography',
u'new physics',
u'dimension: 4',
u'supersymmetry: symmetry breaking',
u'Chern-Simons term',
u'Brookhaven RHIC Coll',
u'general relativity',
u'thermodynamics',
u'quark gluon: plasma',
u'quantum gravity',
u'renormalization group',
u'neutrino: mixing angle',
u'tensor: energy-momentum',
u'gold',
u'scaling',
u'partition function',
u'dark energy',
u'quantization',
u'Feynman graph',
u'quark: mass',
u'potential: chemical',
u'p p: scattering',
u'analytic properties',
u'symmetry: chiral',
u'AdS/CFT correspondence',
u'cosmic background radiation',
u'spontaneous symmetry breaking',
u'p p: interaction',
u'ATLAS',
u'invariance: gauge',
u'electron positron: colliding beams',
u'coupling: Yukawa',
u'minimal supersymmetric standard model',
u'Feynman graph: higher-order',
u'CMS',
u'gravitational radiation',
u'vacuum state',
u'perturbation theory',
u'equation of state',
u'data analysis method',
u'performance',
u'entropy',
u'quantum mechanics',
u'gauge field theory: SU(3)',
u'quantum electrodynamics',
u'fermion: lattice field theory',
u'unitarity',
u'phase space',
u'supernova',
u'field equations',
u'expansion 1/N',
u'perturbation theory: chiral',
u'gauge field theory: SU(2)',
u'channel cross section',
u'higher-order: 1',
u'lead',
u'moduli space',
u'gauge field theory: SU(N)',
u'differential cross section',
u'compactification',
u'time dependence',
u'factorization',
u'lattice',
u'Wilson loop',
u'effective potential',
u'Kaluza-Klein model',
u'fluctuation',
u'sensitivity',
u'hydrodynamics',
u'thesis',
u'instanton',
u'integrability',
u'space-time: higher-dimensional',
u'singularity',
u'any-dimensional',
u'gauge field theory: U(1)',
u'anomaly',
u'space-time: Robertson-Walker',
u'potential',
u'channel cross section: energy dependence',
u'M-theory',
u'statistical analysis',
u'electroweak interaction',
u'review',
u'nonperturbative',
u'energy dependence',
u'mass spectrum',
u'programming',
u'magnetic field',
u'scattering amplitude',
u'temperature',
u'geometry',
u'path integral',
u'neutrino: cosmic radiation',
u'expansion: acceleration',
u'galaxy',
u'space-time: de Sitter',
u'dispersion relation',
u'horizon',
u'top: pair production',
u'B: hadronic decay',
u'higher-dimensional',
u'electromagnetic field',
u'neutrino: solar',
u'matrix model',
u'regularization',
u'interpretation of experiments',
u'operator: algebra',
u'tree approximation',
u'Randall-Sundrum model',
u'propagator',
u'angular distribution',
u'kinematics',
u'differential geometry: noncommutative',
u'symmetry breaking',
u'dimensional reduction',
u'transverse momentum: spectrum',
u'symmetry: rotation',
u'cross section',
u'Monte Carlo',
u'D-brane',
u'anti-p p: inclusive reaction',
u'operator product expansion',
u'violation: Lorentz',
u'CDF',
u'neutral current: flavor changing',
u'talk',
u'effective field theory',
u'dimension: 6',
u'seesaw model',
u'background field',
u'parton: distribution function',
u'gravitation: model',
u'upper limit',
u'group theory: representation',
u'continuum limit',
u'dilaton',
u'asymptotic behavior',
u'Einstein equation: solution',
u'parametrization',
u'radiative correction',
u'transverse momentum dependence',
u'energy',
u'heavy quark',
u'angular distribution: asymmetry',
u'charged current',
u'BaBar',
u'DESY HERA Stor',
u'proposed experiment',
u'supersymmetry: 4',
u'algebra: Lie',
u'dimension: 10',
u'anti-de Sitter',
u'orbifold',
u'neutrino: flux',
u'amplitude analysis',
u'neutrino: right-handed',
u'Higgs particle: hadroproduction',
u'sigma model: nonlinear',
u'S-matrix',
u'quark: matter',
u'angular momentum',
u'wave function',
u'confinement',
u'neutrino: Majorana',
u'electron p: deep inelastic scattering',
u'Hamiltonian',
u'differential forms',
u'Higgs model',
u'B: branching ratio',
u'lepton: pair production',
u'deformation',
u'strong coupling',
u'correlation',
u'transport theory',
u'cohomology',
u'radiation: Hawking',
u'flavor: 2',
u'mass: hierarchy',
u'cosmic radiation: particle source',
u'LHC-B',
u'fibre bundle',
u'spectral representation',
u'electroweak interaction: symmetry breaking',
u'expansion 1/N: color',
u'B: pair production',
u'polarization',
u'superpotential',
u'flavor: violation',
u'dark matter: annihilation',
u'correction: higher-order',
u'spin: asymmetry',
u'string: open',
u'BELLE',
u'bottom',
u'ghost',
u'operator: Dirac',
u'fluid',
u'symmetry breaking: chiral',
u'gamma ray: burst',
u'flavor: 3',
u'higher-order: 2',
u'efficiency',
u'Higgs particle: multiplet',
u'invariance: Lorentz',
u'Einstein equation',
u'cosmic radiation: flux',
u'approximation: quenching',
u'cosmic radiation: energy spectrum',
u'rapidity spectrum',
u'soliton',
u'spin',
u'effective Hamiltonian',
u'pressure',
u'Born-Infeld model',
u'neutrino: mass difference',
u'temperature dependence',
u'muon: magnetic moment',
u'quintessence',
u'charge: topological',
u'CERN LEP Stor',
u'grand unified theory',
u'Drell-Yan process',
u'two-point function',
u'jet: pair production',
u'quark: pair production',
u'fermion: mass',
u'conservation law',
u'causality',
u'B: semileptonic decay',
u'black hole: entropy',
u'algebra: representation',
u'hadron: multiple production',
u'quantum chromodynamics: correction',
u'quantum chromodynamics: sum rule',
u'field theory: action',
u'constraint',
u'power spectrum',
u'CERN SPS',
u'coupling: gauge',
u'SLAC PEP Stor',
u'form factor',
u'Dirac equation',
u'diffraction',
u'topology',
u'space: Calabi-Yau',
u'domain wall',
u'top',
u'transverse momentum: momentum spectrum',
u'supersymmetry: 2',
u'Jona-Lasinio-Nambu model',
u'impact parameter: dependence',
u'nuclear matter',
u'mathematical methods',
u'bound state',
u'photon: cosmic radiation',
u'tables',
u'central charge',
u'derivative: high',
u'transformation: gauge',
u'ground state',
u'charmonium',
u'R parity: violation',
u'Hilbert space',
u'tachyon',
u'viscosity',
u'neutrino: sterile',
u'threshold',
u'hadron spectroscopy',
u'black hole: Schwarzschild',
u'Dyson-Schwinger equation',
u'final-state interaction',
u'gravitation: duality',
u'DZERO',
u'strong interaction: coupling constant',
u'Upsilon(10570): electroproduction',
u'noise',
u'anti-p p: interaction',
u'spinor',
u'mean field approximation',
u'invariance: conformal',
u'black hole: horizon',
u'neutron star',
u'Higgs particle: coupling',
u'approximation: semiclassical',
u'rapidity dependence',
u'string model: heterotic',
u'p: structure function',
u'fermion',
u'coset space',
u'scale: Planck',
u'commutation relations',
u'geometry: algebra',
u'ALICE',
u'energy spectrum',
u'zero mode',
u'STAR',
u'excited state',
u'symmetry: gauge',
u'torsion',
u'parity: violation',
u'Higgs particle: decay',
u'quark',
u'coupling constant',
u'phi**n model: 4',
u'Friedman model',
u'scattering',
u'symmetry: SU(3)',
u'baryon: asymmetry',
u'Higgs particle',
u'CKM matrix',
u'sum rule',
u'neutral current',
u'GLAST',
u'sparticle: mass',
u'Goldstone particle',
u'flux',
u'field theory: tensor',
u'Schroedinger equation',
u'Ward identity',
u'dimension: 11',
u'electron: pair production',
u'electronics: readout',
u'pulsar',
u'field theory: vector',
u'supersymmetry: algebra',
u'curvature',
u'field theory: topological',
u'spin: high',
u'pi: pair production',
u'many-body problem',
u'Hubble constant',
u'pi: mass',
u'condensation: chiral',
u'back reaction',
u'Gauss-Bonnet term',
u'elliptic flow',
u'polarization: transverse',
u'infrared problem',
u'ILC Coll',
u'small-x',
u'infrared',
u'Regge poles',
u'symmetry: flavor',
u'potential: scalar',
u'energy loss',
u'cosmic radiation',
u'helicity',
u'nucleon: structure function',
u'gamma ray: emission',
u'quantum gravity: loop space',
u'Higgs particle: decay modes',
u'anti-p p: colliding beams',
u'covariance',
u'photon: emission',
u'momentum transfer dependence',
u'statistics',
u'partial wave',
u'superconductivity',
u'charm',
u'carbon',
u'symmetry: U(1)',
u'Polyakov loop',
u'tunneling',
u'p p: colliding beams',
u'universality',
u'anti-p p: annihilation',
u'luminosity',
u'gluon gluon: fusion',
u'vacuum state: energy',
u'gluon: propagator',
u'superstring',
u'effective Lagrangian: chiral',
u'vortex',
u'dark matter: mass',
u'soliton: BPS',
u'T-duality',
u'string: closed',
u'jet: hadroproduction',
u'symmetry: discrete',
u'transverse energy: missing-energy',
u'neutrino: mixing',
u'anisotropy',
u'operator: vertex',
u'lepton number: violation',
u'vertex function',
u'string',
u'gauge field theory: U(N)',
u'rapidity',
u'algebra',
u'baryon: density',
u'dimension: 1',
u'sigma model',
u'doublet: 2',
u'black hole: charge',
u'fixed point',
u'bottom: pair production',
u'Riemann surface',
u'energy: high',
u'CERN Lab',
u'activity report',
u'torus',
u'regularization: dimensional',
u'angular dependence',
u'gauge fixing',
u'polarization: longitudinal',
u'higher-order: 0',
u'density: perturbation',
u'axion',
u'anomalous dimension',
u'field theory: string',
u'quantum number',
u'free energy',
u'matter: effect',
u'flavor',
u'partial wave analysis',
u'space-time: Schwarzschild',
u'grand unified theory: SU(5)',
u'asymmetry: CP',
u'trigger',
u'J/psi(3100)',
u'cosmic string',
u'charge: conservation law',
u'angular correlation',
u'CKM matrix: unitarity',
u'chiral',
u'deep inelastic scattering',
u'data management',
u'twist',
u'muon: pair production',
u'calibration',
u'entropy: entanglement',
u'cosmic radiation: UHE',
u'deconfinement',
u'B0: hadronic decay',
u'differential cross section: angular dependence',
u'Klein-Gordon equation',
u'Lagrangian formalism',
u'Higgs particle: branching ratio',
u'symmetry: conformal',
u'cosmic radiation: primary',
u'grand unified theory: SO(10)',
u'photon: pair production',
u'photon: energy spectrum',
u'binding energy',
u'Landau gauge',
u'momentum spectrum: parton',
u'fermion: flavor',
u'energy levels',
u'quantum chromodynamics: nonrelativistic',
u'p: cosmic radiation',
u'jet: multiple production',
u'electron: polarized beam',
u'field theory: deformation',
u'dark matter: direct detection',
u'effect: higher-twist',
u'quantum chromodynamics: critical phenomena',
u'suppression',
u'transverse momentum: high',
u'space-time: Minkowski',
u'gauge field theory: SU(3) x SU(2) x U(1)',
u'W: pair production',
u'thermal',
u'black hole: anti-de Sitter',
u'Wess-Zumino-Witten model',
u'cosmic background radiation: anisotropy',
u'differential equations',
u'dark matter: density',
u'signature',
u'energy: low',
u'gravitation: massive',
u'differential cross section: momentum transfer',
u'lectures',
u'string tension',
u'neutrino: atmosphere',
u'renormalization group: flow',
u'Fock space',
u'decoupling',
u'astrophysics',
u'Bethe-Salpeter equation',
u'algebra: Virasoro',
u'gravitation: f(R)',
u'Higgs particle: charged particle',
u'charged particle: multiplicity',
u'space-time: noncommutative',
u'nuclear matter: effect',
u'yield: ratio',
u'branching ratio',
u'perturbation: scalar',
u'charge: asymmetry',
u'jet: quenching',
u'magnetic monopole',
u'dissipation',
u'gluon',
u'space-time: Bianchi',
u'charge: electric',
u'numerical methods',
u'correction: quantum',
u'potential: Kaehler',
u'graviton',
u'mixing',
u'nonlinear',
u'beta function',
u'energy resolution',
u'geodesic',
u'surface',
u'color',
u'perturbation',
u'off-shell',
u'PHENIX',
u'gamma ray: VHE',
u'n-point function',
u'quark: valence',
u'temperature: high',
u'form factor: transition',
u'differential geometry',
u'Brans-Dicke model',
u'light cone',
u'leptogenesis',
u'top: decay',
u'nucleon: form factor',
u'scintillation counter',
u'holonomy',
u'photomultiplier',
u'defect: topological',
u'standard model',
u'non-Gaussianity',
u'quantum chromodynamics: lattice',
u'quark: sea',
u'hadron hadron: interaction',
u'p-brane: 3',
u'black hole: mass',
u'quark: propagator',
u'data acquisition',
u'photon photon: interaction',
u'W: leptonic decay',
u'black hole: rotation',
u'black hole: binary',
u'lattice field theory: action',
u'KAMIOKANDE',
u'total cross section',
u'n: matter',
u'Higgs particle: doublet',
u'gravitational radiation: emission',
u'gravitation: fundamental constant',
u'black hole: Kerr',
u'mass dependence',
u'spectral',
u'redshift',
u'DGLAP equation',
u'flow',
u'transverse momentum',
u'entanglement',
u'dark matter: relic density',
u'p nucleus: nuclear reaction',
u'galaxy: cluster',
u'orientifold',
u'electroweak interaction: validity test',
u'decay: form factor',
u'sigma model: linear',
u'magnetic field: external field',
u'supersymmetry: superfield',
u'space-time: warped',
u'quark: condensation',
u'superspace',
u'lepton: mass',
u'hierarchy',
u'AGN',
u'gaugino: mass',
u'axiomatic field theory',
u'lepton: flavor: violation',
u'WKB approximation',
u'pi: form factor',
u'Einstein-Maxwell equation',
u'cross section: energy dependence',
u'B: radiative decay',
u'generalized parton distribution',
u'conformal',
u'nonrelativistic',
u'hadron: hadroproduction',
u'coherent state',
u'IceCube',
u'Yang-Mills',
u'plasma',
u'photon: direct production',
u'relativity theory',
u'energy: Casimir',
u'symmetry: SU(2)',
u'Maxwell equation',
u'statistical mechanics',
u'color glass condensate',
u'potential: Higgs',
u'particle: relativistic',
u'temperature: Hawking',
u'Dalitz plot',
u'self-duality',
u'SU(2)',
u'ultraviolet',
u'particle: multiplet',
u'BES',
u'vacuum polarization',
u'field theory',
u'acceleration',
u'quantum cosmology',
u'coupling: minimal',
u'localization',
u'model: parton',
u'Bethe ansatz',
u'CLEO',
u'symmetry: axial',
u'ZEUS',
u'interference',
u'neutrino: particle source',
u'measurement methods',
u'slow-roll approximation',
u'parton: showers',
u'impact parameter',
u'cosmic radiation: acceleration',
u'fragmentation function',
u'black hole: thermodynamics',
u'vector: Killing',
u'pole',
u'attractor',
u'matter: density',
u'transformation: conformal',
u'decay: inclusive reaction',
u'black hole: Reissner-Nordstroem',
u'signal processing',
u'exchange: two-photon',
u'nucleus',
u'gravitational radiation detector',
u'mass: gap',
u'supersymmetry: superspace',
u'fundamental constant: fine structure',
u'holomorphic',
u'Cherenkov counter: water',
u'quantum chromodynamics: factorization',
u'photon',
u'lepton nucleon: deep inelastic scattering',
u'fundamental constant: time dependence',
u'meson: mass',
u'monopole',
u'finite size: effect',
u'pi',
u'electron p: inclusive reaction',
u'potential: Coulomb',
u'gamma ray: flux',
u'parity',
u'gravitation: action',
u'supersymmetry: 1',
u'saturation',
u'polarization: asymmetry',
u'Z0: leptonic decay',
u'condensation',
u'helicity: amplitude analysis',
u'B: rare decay',
u'algebra: Hopf',
u'twistor',
u'quark: confinement',
u'neutrino: detector',
u'color: dipole',
u'cross section: annihilation',
u'velocity',
u'B0: branching ratio',
u'vector dominance',
u'top: mass',
u'fermion: Dirac',
u'gluon: momentum spectrum',
u'dynamical symmetry breaking',
u'KEK TRISTAN Stor',
u'velocity: acoustic',
u'symmetry: mirror',
u'geometry: noncommutative',
u'nuclear physics',
u'vector meson',
u'LIGO',
u'Boltzmann equation',
u'scale: TeV',
u'lower limit',
u'hard scattering',
u'fluctuation: quantum',
u'Higgs particle: width',
u'Phi(1020)',
u'bag model',
u'transformation: Becchi-Rouet-Stora',
u'chaos',
u'W: hadroproduction',
u'isospin',
u'Ising model',
u'model: statistical',
u'neutrino: Dirac',
u'effect: nonperturbative',
u'BPS',
u'density: finite',
u'coupling: Higgs',
u'critical phenomena: confinement',
u'gravitation: lens',
u'electron positron: linear collider',
u'H1',
u'spin: chain',
u'hadron: electroproduction',
u'black hole: BTZ',
u'coupling: nonminimal',
u'symmetry: left-right',
u'asymptotic expansion',
u'family: 4',
u'photon: velocity',
u'wormhole',
u'B0 anti-B0: mixing',
u'curvature: perturbation',
u'topological',
u'meson',
u'gravitation: collapse',
u'asymmetry',
u'rho(770)',
u'diffusion',
u'charge: density',
u'strong interaction',
u'effect: Casimir',
u'uncertainty relations',
u'baryogenesis',
u'entropy: density',
u'SU(3)',
u'p p: inelastic scattering',
u'photon p: interaction',
u'critical phenomena: chiral',
u'Z0: hadronic decay',
u'gravitation: scalar tensor',
u'track data analysis',
u'supergravity: minimal',
u'event shape analysis',
u'Landau-Ginzburg model',
u'field theory: Liouville',
u'weak coupling',
u'space: anti-de Sitter',
u'laser',
u'magnetic field: high',
u'triangulation',
u'category',
u'Higgs particle: production',
u'superfield',
u'string model: boson',
u'diffeomorphism',
u'propagator: renormalization',
u'gamma ray',
u'phantom',
u'loop space',
u'gauge field theory: nonabelian',
u'technicolor',
u'condensation: Bose-Einstein',
u'Kaluza-Klein',
u'spatial resolution',
u'squark: mass',
u'Higgs particle: electroproduction',
u'electric field',
u'Cornell CESR Stor',
u'n-point function: 3',
u'tachyon: condensation',
u'semiclassical',
u'tau: pair production',
u'spin: correlation',
u'orbit',
u'gas: Chaplygin',
u'laser: interferometer',
u'algebra: Clifford',
u'neutrino: flavor',
u'superfluid',
u'spin: foam',
u'nucleon nucleon: interaction',
u'blazar',
u'plane wave',
u'mediation: gauge',
u'interpretation of experiments: Brookhaven RHIC Coll',
u'n-point function: 4',
u'decay: exclusive reaction',
u'higher-order',
u'neutrino: beam',
u'nucleus nucleus: nuclear reaction',
u'duality: holography',
u'Higgs particle: hadronic decay',
u'channel cross section: mass',
u'thermodynamical',
u'sine-Gordon model',
u'history',
u'neutralino: mass',
u'WMAP',
u'photon p: exclusive reaction',
u'calorimeter: electromagnetic',
u'space-time: singularity',
u'symmetry: Z(2)',
u'coherence',
u'Seiberg-Witten model',
u'jet: electroproduction',
u'Higgs particle: associated production',
u'moment',
u'bremsstrahlung',
u'gamma ray: energy spectrum',
u'pp-wave',
u'jet',
u'multiple scattering',
u'oscillator: harmonic',
u'information theory',
u'radiation: damage',
u'neutrino: energy spectrum',
u'light cone gauge',
u'monodromy',
u'fireball',
u'multiple production',
u'sphere',
u'deuteron',
u'matrix model: random',
u'p: decay',
u'oscillation',
u'jet: associated production',
u'K*(892)',
u'hadronization',
u'radiation: Cherenkov',
u'space-time: perturbation',
u'moduli',
u'shock waves',
u'unified field theory',
u'semiconductor detector: pixel',
u'color: superconductivity',
u'p-brane: 5',
u'four-fermion interaction',
u'neutrino: energy',
u'dynamical system',
u'jet: multiplicity',
u'freeze-out',
u'x-dependence',
u'gluon: condensation',
u'Cherenkov counter',
u'Wheeler-DeWitt equation',
u'Skyrme model',
u'time reversal: violation',
u'embedding',
u'mass',
u'coupled channel',
u'quenching',
u'vacuum state: stability',
u'temperature: low',
u'R parity',
u'sparticle: mass spectrum',
u'pomeron',
u'dyon',
u'condensed matter',
u'space: Kaehler',
u'strangeness',
u'symplectic',
u'pomeron: exchange',
u'quark: momentum spectrum',
u'pi: hadroproduction',
u'mixing angle',
u'leading logarithm approximation',
u'fermion: chiral',
u'space-time: expansion',
u'U(1)',
u'Fermilab',
u'GZK effect',
u'gravitation: Horava-Lifshitz',
u'charged particle',
u'cross section: ratio',
u'hadron',
u'pi: decay constant',
u'scintillation counter: liquid',
u'quark model',
u'nucleus nucleus: colliding beams',
u'neutrino: mass generation',
u'resummation',
u'gluon: exchange',
u'pseudoscalar meson',
u'Z0: electroproduction',
u'Compton scattering',
u'programming: Monte Carlo',
u'quantum cosmology: loop space',
u'neutralino: dark matter',
u'perturbation: linear',
u'dark matter: halo',
u'quarkonium: heavy',
u'renormalization group: transformation',
u'muon: cosmic radiation',
u'photon: polarization',
u'Jefferson Lab',
u'W: mass',
u'bottomonium',
u'perturbation: tensor',
u'multiplicity',
u'statistical',
u'p-brane',
u'new interaction',
u'baryon: production',
u'neutralino: annihilation',
u'neutrino nucleon: interaction',
u'Z0: hadroproduction',
u'nucleosynthesis: big bang',
u'K',
u'lepton: production',
u'calorimeter',
u'rotation',
u'parton: momentum spectrum',
u'J/psi(3100): hadroproduction',
u'Dirac equation: solution',
u'Z0: associated production',
u'operator: higher-dimensional',
u'observatory',
u'approximation: strong coupling',
u'density',
u'helium: nuclide',
u'dependence: gauge',
u'PAMELA',
u'parameter space',
u'density: fluctuation',
u'bosonization',
u'invariance: Becchi-Rouet-Stora',
u'decoherence',
u'Calabi-Yau',
u'neutrino/mu',
u'B: decay modes',
u'Coulomb gauge',
u'Delta(1232)',
u'color: singlet',
u'copper',
u'gravitation: coupling',
u'quantum chromodynamics: nonperturbative',
u'helium',
u'renormalization group: beta function',
u'fermion: staggered',
u'Beijing Stor',
u'positron: cosmic radiation',
u'duality: transformation',
u'accelerator',
u'model: minimal',
u'fermion: propagator',
u'color: octet',
u'density matrix',
u'quasiparticle',
u'current: conservation law',
u'oscillator',
u'power spectrum: scalar',
u'muon: radiative decay',
u'transfer matrix',
u'nucleon: mass',
u'Hamilton-Jacobi equation',
u'quiver',
u'supersymmetry: transformation',
u'trajectory',
u'mass: transverse',
u'X-ray',
u'LISA',
]
TOP_10000_KEYWORDS = [
u"numerical calculations",
u"supersymmetry",
u"CERN LHC Coll",
u"quantum chromodynamics",
u"experimental results",
u"field theory: scalar",
u"bibliography",
u"numerical calculations: interpretation of experiments",
u"numerical calculations: Monte Carlo",
u"critical phenomena",
u"string model",
u"field theory: conformal",
u"electron positron: annihilation",
u"membrane model",
u"CP: violation",
u"boundary condition",
u"dimension: 2",
u"lattice field theory",
u"cosmological model",
u"duality",
u"dimension: 5",
u"inflation",
u"perturbation theory: higher-order",
u"supergravity",
u"dimension: 3",
u"dark matter",
u"gauge field theory: Yang-Mills",
u"field equations: solution",
u"Higgs particle: mass",
u"heavy ion: scattering",
u"neutrino: oscillation",
u"neutrino: mass",
u"gravitation",
u"effective action",
u"background",
u"renormalization",
u"effective Lagrangian",
u"black hole",
u"space-time: anti-de Sitter",
u"p p: inclusive reaction",
u"cosmological constant",
u"correlation function",
u"scattering: heavy ion",
u"stability",
u"Hamiltonian formalism",
u"finite temperature",
u"quantum chromodynamics: perturbation theory",
u"Batavia TEVATRON Coll",
u"energy: density",
u"holography",
u"gauge field theory",
u"space-time",
u"new physics",
u"dimension: 4",
u"supersymmetry: symmetry breaking",
u"Chern-Simons term",
u"Brookhaven RHIC Coll",
u"renormalization group",
u"thermodynamics",
u"quark gluon: plasma",
u"general relativity",
u"quantum gravity",
u"neutrino: mixing angle",
u"tensor: energy-momentum",
u"gold",
u"scaling",
u"partition function",
u"quantization",
u"dark energy",
u"quark: mass",
u"potential: chemical",
u"p p: scattering",
u"Feynman graph",
u"analytic properties",
u"symmetry: chiral",
u"cosmic background radiation",
u"spontaneous symmetry breaking",
u"p p: interaction",
u"AdS/CFT correspondence",
u"invariance: gauge",
u"ATLAS",
u"coupling: Yukawa",
u"minimal supersymmetric standard model",
u"electron positron: colliding beams",
u"CMS",
u"Feynman graph: higher-order",
u"vacuum state",
u"gravitational radiation",
u"entropy",
u"perturbation theory",
u"equation of state",
u"performance",
u"gauge field theory: SU(3)",
u"data analysis method",
u"fermion: lattice field theory",
u"quantum electrodynamics",
u"supernova",
u"quantum mechanics",
u"unitarity",
u"phase space",
u"field equations",
u"expansion 1/N",
u"lead",
u"perturbation theory: chiral",
u"gauge field theory: SU(2)",
u"differential cross section",
u"higher-order: 1",
u"channel cross section",
u"compactification",
u"gauge field theory: SU(N)",
u"moduli space",
u"factorization",
u"time dependence",
u"lattice",
u"effective potential",
u"Kaluza-Klein model",
u"Wilson loop",
u"fluctuation",
u"thesis",
u"hydrodynamics",
u"sensitivity",
u"any-dimensional",
u"gauge field theory: U(1)",
u"space-time: higher-dimensional",
u"anomaly",
u"instanton",
u"potential",
u"space-time: Robertson-Walker",
u"channel cross section: energy dependence",
u"mass spectrum",
u"integrability",
u"M-theory",
u"electroweak interaction",
u"review",
u"singularity",
u"energy dependence",
u"statistical analysis",
u"magnetic field",
u"programming",
u"nonperturbative",
u"temperature",
u"scattering amplitude",
u"neutrino: cosmic radiation",
u"expansion: acceleration",
u"path integral",
u"horizon",
u"space-time: de Sitter",
u"galaxy",
u"dispersion relation",
u"geometry",
u"higher-dimensional",
u"B: hadronic decay",
u"neutrino: solar",
u"propagator",
u"electromagnetic field",
u"top: pair production",
u"regularization",
u"angular distribution",
u"Randall-Sundrum model",
u"interpretation of experiments",
u"operator: algebra",
u"matrix model",
u"dimensional reduction",
u"tree approximation",
u"differential geometry: noncommutative",
u"symmetry breaking",
u"kinematics",
u"symmetry: rotation",
u"transverse momentum: spectrum",
u"operator product expansion",
u"background field",
u"Monte Carlo",
u"dimension: 6",
u"anti-p p: inclusive reaction",
u"cross section",
u"effective field theory",
u"CDF",
u"talk",
u"neutral current: flavor changing",
u"violation: Lorentz",
u"D-brane",
u"parton: distribution function",
u"gravitation: model",
u"seesaw model",
u"continuum limit",
u"Einstein equation: solution",
u"asymptotic behavior",
u"dilaton",
u"heavy quark",
u"upper limit",
u"parametrization",
u"transverse momentum dependence",
u"group theory: representation",
u"energy",
u"angular distribution: asymmetry",
u"charged current",
u"Higgs particle: hadroproduction",
u"proposed experiment",
u"BaBar",
u"anti-de Sitter",
u"sigma model: nonlinear",
u"radiative correction",
u"dimension: 10",
u"orbifold",
u"amplitude analysis",
u"wave function",
u"algebra: Lie",
u"DESY HERA Stor",
u"neutrino: flux",
u"S-matrix",
u"supersymmetry: 4",
u"angular momentum",
u"electron p: deep inelastic scattering",
u"quark: matter",
u"confinement",
u"differential forms",
u"neutrino: right-handed",
u"Hamiltonian",
u"neutrino: Majorana",
u"Higgs model",
u"cosmic radiation: particle source",
u"flavor: 2",
u"cohomology",
u"correlation",
u"lepton: pair production",
u"strong coupling",
u"mass: hierarchy",
u"polarization",
u"B: branching ratio",
u"fibre bundle",
u"transport theory",
u"spectral representation",
u"radiation: Hawking",
u"deformation",
u"expansion 1/N: color",
u"BELLE",
u"LHC-B",
u"B: pair production",
u"fluid",
u"superpotential",
u"correction: higher-order",
u"electroweak interaction: symmetry breaking",
u"dark matter: annihilation",
u"flavor: violation",
u"operator: Dirac",
u"efficiency",
u"higher-order: 2",
u"ghost",
u"symmetry breaking: chiral",
u"spin: asymmetry",
u"bottom",
u"string: open",
u"cosmic radiation: flux",
u"flavor: 3",
u"Born-Infeld model",
u"gamma ray: burst",
u"cosmic radiation: energy spectrum",
u"rapidity spectrum",
u"Higgs particle: multiplet",
u"invariance: Lorentz",
u"approximation: quenching",
u"pressure",
u"Einstein equation",
u"effective Hamiltonian",
u"fermion: mass",
u"quark: pair production",
u"temperature dependence",
u"muon: magnetic moment",
u"CERN LEP Stor",
u"CERN SPS",
u"soliton",
u"quintessence",
u"jet: pair production",
u"charge: topological",
u"Drell-Yan process",
u"neutrino: mass difference",
u"spin",
u"hadron: multiple production",
u"grand unified theory",
u"quantum chromodynamics: correction",
u"black hole: entropy",
u"B: semileptonic decay",
u"two-point function",
u"constraint",
u"conservation law",
u"quantum chromodynamics: sum rule",
u"power spectrum",
u"field theory: action",
u"supersymmetry: 2",
u"causality",
u"space: Calabi-Yau",
u"diffraction",
u"algebra: representation",
u"domain wall",
u"Dirac equation",
u"coupling: gauge",
u"form factor",
u"topology",
u"bound state",
u"transverse momentum: momentum spectrum",
u"Jona-Lasinio-Nambu model",
u"mathematical methods",
u"nuclear matter",
u"SLAC PEP Stor",
u"top",
u"impact parameter: dependence",
u"photon: cosmic radiation",
u"R parity: violation",
u"threshold",
u"charmonium",
u"derivative: high",
u"transformation: gauge",
u"black hole: Schwarzschild",
u"viscosity",
u"tables",
u"gravitation: duality",
u"hadron spectroscopy",
u"Dyson-Schwinger equation",
u"central charge",
u"tachyon",
u"mean field approximation",
u"DZERO",
u"ground state",
u"Higgs particle: coupling",
u"strong interaction: coupling constant",
u"Hilbert space",
u"neutrino: sterile",
u"invariance: conformal",
u"final-state interaction",
u"noise",
u"Upsilon(10570): electroproduction",
u"p: structure function",
u"black hole: horizon",
u"spinor",
u"anti-p p: interaction",
u"string model: heterotic",
u"zero mode",
u"scale: Planck",
u"approximation: semiclassical",
u"ALICE",
u"STAR",
u"neutron star",
u"fermion",
u"rapidity dependence",
u"geometry: algebra",
u"commutation relations",
u"Higgs particle: decay",
u"coset space",
u"quark",
u"Friedman model",
u"energy spectrum",
u"excited state",
u"parity: violation",
u"coupling constant",
u"symmetry: SU(3)",
u"symmetry: gauge",
u"phi**n model: 4",
u"Higgs particle",
u"scattering",
u"sum rule",
u"Schroedinger equation",
u"baryon: asymmetry",
u"CKM matrix",
u"flux",
u"GLAST",
u"Ward identity",
u"neutral current",
u"torsion",
u"Goldstone particle",
u"elliptic flow",
u"electronics: readout",
u"field theory: topological",
u"curvature",
u"supersymmetry: algebra",
u"dimension: 11",
u"field theory: tensor",
u"spin: high",
u"infrared problem",
u"sparticle: mass",
u"condensation: chiral",
u"Regge poles",
u"infrared",
u"many-body problem",
u"Hubble constant",
u"field theory: vector",
u"electron: pair production",
u"Higgs particle: decay modes",
u"anti-p p: colliding beams",
u"pulsar",
u"potential: scalar",
u"pi: pair production",
u"polarization: transverse",
u"pi: mass",
u"back reaction",
u"ILC Coll",
u"Gauss-Bonnet term",
u"gluon gluon: fusion",
u"cosmic radiation",
u"small-x",
u"gluon: propagator",
u"gamma ray: emission",
u"Polyakov loop",
u"momentum transfer dependence",
u"helicity",
u"symmetry: U(1)",
u"photon: emission",
u"carbon",
u"energy loss",
u"covariance",
u"charm",
u"superconductivity",
u"string: closed",
u"tunneling",
u"anti-p p: annihilation",
u"universality",
u"anisotropy",
u"vacuum state: energy",
u"partial wave",
u"nucleon: structure function",
u"p p: colliding beams",
u"effective Lagrangian: chiral",
u"gauge field theory: U(N)",
u"statistics",
u"soliton: BPS",
u"superstring",
u"luminosity",
u"dark matter: mass",
u"string",
u"quantum gravity: loop space",
u"transverse energy: missing-energy",
u"regularization: dimensional",
u"operator: vertex",
u"lepton number: violation",
u"symmetry: flavor",
u"vertex function",
u"baryon: density",
u"vortex",
u"jet: hadroproduction",
u"density: perturbation",
u"neutrino: mixing",
u"flavor",
u"Riemann surface",
u"gauge fixing",
u"dimension: 1",
u"T-duality",
u"bottom: pair production",
u"energy: high",
u"fixed point",
u"doublet: 2",
u"black hole: charge",
u"torus",
u"field theory: string",
u"free energy",
u"rapidity",
u"deep inelastic scattering",
u"activity report",
u"algebra",
u"angular dependence",
u"J/psi(3100)",
u"polarization: longitudinal",
u"trigger",
u"symmetry: discrete",
u"anomalous dimension",
u"CKM matrix: unitarity",
u"partial wave analysis",
u"charge: conservation law",
u"asymmetry: CP",
u"CERN Lab",
u"grand unified theory: SU(5)",
u"deconfinement",
u"calibration",
u"sigma model",
u"angular correlation",
u"space-time: Schwarzschild",
u"axion",
u"cosmic radiation: UHE",
u"quantum number",
u"muon: pair production",
u"matter: effect",
u"higher-order: 0",
u"cosmic string",
u"B0: hadronic decay",
u"fermion: flavor",
u"differential cross section: angular dependence",
u"binding energy",
u"entropy: entanglement",
u"data management",
u"twist",
u"Higgs particle: branching ratio",
u"Klein-Gordon equation",
u"photon: pair production",
u"chiral",
u"Landau gauge",
u"momentum spectrum: parton",
u"grand unified theory: SO(10)",
u"Lagrangian formalism",
u"cosmic radiation: primary",
u"photon: energy spectrum",
u"quantum chromodynamics: critical phenomena",
u"p: cosmic radiation",
u"suppression",
u"W: pair production",
u"field theory: deformation",
u"energy: low",
u"signature",
u"dark matter: direct detection",
u"cosmic background radiation: anisotropy",
u"quantum chromodynamics: nonrelativistic",
u"gauge field theory: SU(3) x SU(2) x U(1)",
u"space-time: Bianchi",
u"Bethe-Salpeter equation",
u"renormalization group: flow",
u"decoupling",
u"symmetry: conformal",
u"electron: polarized beam",
u"numerical methods",
u"black hole: anti-de Sitter",
u"nuclear matter: effect",
u"differential cross section: momentum transfer",
u"perturbation: scalar",
u"graviton",
u"energy levels",
u"charged particle: multiplicity",
u"differential equations",
u"transverse momentum: high",
u"jet: quenching",
u"lectures",
u"thermal",
u"effect: higher-twist",
u"algebra: Virasoro",
u"jet: multiple production",
u"non-Gaussianity",
u"gravitation: massive",
u"charge: electric",
u"beta function",
u"off-shell",
u"n-point function",
u"holonomy",
u"space-time: Minkowski",
u"gluon",
u"string tension",
u"Higgs particle: charged particle",
u"gamma ray: VHE",
u"temperature: high",
u"magnetic monopole",
u"top: decay",
u"Fock space",
u"dissipation",
u"Wess-Zumino-Witten model",
u"perturbation",
u"differential geometry",
u"surface",
u"astrophysics",
u"form factor: transition",
u"color",
u"potential: Kaehler",
u"branching ratio",
u"space-time: noncommutative",
u"scintillation counter",
u"entanglement",
u"nonlinear",
u"gravitation: fundamental constant",
u"charge: asymmetry",
u"neutrino: atmosphere",
u"light cone",
u"gravitation: f(R)",
u"leptogenesis",
u"Brans-Dicke model",
u"mixing",
u"geodesic",
u"nucleon: form factor",
u"black hole: rotation",
u"W: leptonic decay",
u"correction: quantum",
u"standard model",
u"dark matter: density",
u"energy resolution",
u"defect: topological",
u"yield: ratio",
u"photon photon: interaction",
u"magnetic field: external field",
u"lattice field theory: action",
u"Higgs particle: doublet",
u"photomultiplier",
u"PHENIX",
u"KAMIOKANDE",
u"supersymmetry: superfield",
u"quantum chromodynamics: lattice",
u"quark: sea",
u"n: matter",
u"redshift",
u"black hole: mass",
u"galaxy: cluster",
u"dark matter: relic density",
u"p-brane: 3",
u"quark: propagator",
u"quark: valence",
u"Einstein-Maxwell equation",
u"AGN",
u"flow",
u"spectral",
u"quark: condensation",
u"sigma model: linear",
u"hadron hadron: interaction",
u"lepton: flavor: violation",
u"gravitational radiation: emission",
u"data acquisition",
u"DGLAP equation",
u"cross section: energy dependence",
u"p nucleus: nuclear reaction",
u"symmetry: SU(2)",
u"mass dependence",
u"lepton: mass",
u"total cross section",
u"conformal",
u"axiomatic field theory",
u"electroweak interaction: validity test",
u"black hole: binary",
u"superspace",
u"Dalitz plot",
u"space-time: warped",
u"IceCube",
u"SU(2)",
u"hierarchy",
u"black hole: Kerr",
u"orientifold",
u"WKB approximation",
u"pi: form factor",
u"relativity theory",
u"color glass condensate",
u"nonrelativistic",
u"gaugino: mass",
u"supersymmetry: superspace",
u"transverse momentum",
u"plasma",
u"decay: form factor",
u"generalized parton distribution",
u"photon: direct production",
u"acceleration",
u"potential: Higgs",
u"particle: multiplet",
u"BES",
u"parton: showers",
u"self-duality",
u"matter: density",
u"B: radiative decay",
u"ultraviolet",
u"model: parton",
u"temperature: Hawking",
u"Maxwell equation",
u"energy: Casimir",
u"coherent state",
u"attractor",
u"localization",
u"statistical mechanics",
u"particle: relativistic",
u"vector: Killing",
u"Cherenkov counter: water",
u"symmetry: axial",
u"quantum cosmology",
u"monopole",
u"exchange: two-photon",
u"cosmic radiation: acceleration",
u"nucleus",
u"Yang-Mills",
u"interference",
u"coupling: minimal",
u"field theory",
u"neutrino: particle source",
u"transformation: conformal",
u"fragmentation function",
u"photon",
u"fundamental constant: time dependence",
u"gravitational radiation detector",
u"potential: Coulomb",
u"decay: inclusive reaction",
u"impact parameter",
u"slow-roll approximation",
u"hadron: hadroproduction",
u"parity",
u"color: dipole",
u"twistor",
u"CLEO",
u"black hole: thermodynamics",
u"pole",
u"vacuum polarization",
u"saturation",
u"quark: confinement",
u"gravitation: action",
u"measurement methods",
u"ZEUS",
u"Bethe ansatz",
u"lepton nucleon: deep inelastic scattering",
u"Phi(1020)",
u"fundamental constant: fine structure",
u"holomorphic",
u"quantum chromodynamics: factorization",
u"nuclear physics",
u"signal processing",
u"supersymmetry: 1",
u"condensation",
u"vector dominance",
u"meson: mass",
u"algebra: Hopf",
u"mass: gap",
u"fermion: Dirac",
u"symmetry: mirror",
u"finite size: effect",
u"polarization: asymmetry",
u"chaos",
u"helicity: amplitude analysis",
u"velocity",
u"electron p: inclusive reaction",
u"dynamical symmetry breaking",
u"pi",
u"neutrino: detector",
u"Z0: leptonic decay",
u"vector meson",
u"density: finite",
u"B: rare decay",
u"lower limit",
u"top: mass",
u"Ising model",
u"isospin",
u"black hole: Reissner-Nordstroem",
u"model: statistical",
u"Boltzmann equation",
u"gravitation: lens",
u"scale: TeV",
u"hadron: electroproduction",
u"bag model",
u"transformation: Becchi-Rouet-Stora",
u"KEK TRISTAN Stor",
u"B0: branching ratio",
u"B0 anti-B0: mixing",
u"cross section: annihilation",
u"hard scattering",
u"gamma ray: flux",
u"W: hadroproduction",
u"electron positron: linear collider",
u"effect: nonperturbative",
u"H1",
u"coupling: Higgs",
u"fluctuation: quantum",
u"neutrino: Dirac",
u"coupling: nonminimal",
u"weak coupling",
u"BPS",
u"gluon: momentum spectrum",
u"critical phenomena: confinement",
u"Higgs particle: width",
u"topological",
u"asymptotic expansion",
u"LIGO",
u"effect: Casimir",
u"velocity: acoustic",
u"black hole: BTZ",
u"space: anti-de Sitter",
u"family: 4",
u"baryogenesis",
u"photon: velocity",
u"field theory: Liouville",
u"laser",
u"photon p: interaction",
u"curvature: perturbation",
u"meson",
u"string model: boson",
u"propagator: renormalization",
u"wormhole",
u"entropy: density",
u"geometry: noncommutative",
u"SU(3)",
u"diffusion",
u"laser: interferometer",
u"track data analysis",
u"strong interaction",
u"superfield",
u"space-time: singularity",
u"critical phenomena: chiral",
u"asymmetry",
u"Z0: hadronic decay",
u"tachyon: condensation",
u"gravitation: scalar tensor",
u"symmetry: left-right",
u"mediation: gauge",
u"spin: chain",
u"higher-order",
u"p p: inelastic scattering",
u"Kaluza-Klein",
u"oscillation",
u"supergravity: minimal",
u"charge: density",
u"Landau-Ginzburg model",
u"event shape analysis",
u"plane wave",
u"radiation: damage",
u"gamma ray",
u"gravitation: collapse",
u"WMAP",
u"rho(770)",
u"history",
u"Higgs particle: production",
u"uncertainty relations",
u"loop space",
u"nucleon nucleon: interaction",
u"duality: holography",
u"diffeomorphism",
u"bremsstrahlung",
u"Higgs particle: electroproduction",
u"algebra: Clifford",
u"p: decay",
u"magnetic field: high",
u"condensation: Bose-Einstein",
u"blazar",
u"gauge field theory: nonabelian",
u"spatial resolution",
u"symmetry: Z(2)",
u"calorimeter: electromagnetic",
u"jet",
u"gas: Chaplygin",
u"tau: pair production",
u"thermodynamical",
u"orbit",
u"triangulation",
u"neutrino: beam",
u"pp-wave",
u"technicolor",
u"squark: mass",
u"n-point function: 4",
u"electric field",
u"neutralino: mass",
u"phantom",
u"moment",
u"decay: exclusive reaction",
u"Higgs particle: hadronic decay",
u"n-point function: 3",
u"Cornell CESR Stor",
u"channel cross section: mass",
u"nucleus nucleus: nuclear reaction",
u"fireball",
u"mass",
u"gamma ray: energy spectrum",
u"coherence",
u"superfluid",
u"monodromy",
u"neutrino: energy spectrum",
u"matrix model: random",
u"unified field theory",
u"Higgs particle: associated production",
u"sine-Gordon model",
u"jet: electroproduction",
u"oscillator: harmonic",
u"space-time: expansion",
u"shock waves",
u"photon p: exclusive reaction",
u"p-brane: 5",
u"jet: associated production",
u"space-time: perturbation",
u"semiclassical",
u"neutrino: energy",
u"light cone gauge",
u"spin: foam",
u"Seiberg-Witten model",
u"Fermilab",
u"gluon: condensation",
u"freeze-out",
u"hadron",
u"interpretation of experiments: Brookhaven RHIC Coll",
u"multiple production",
u"multiple scattering",
u"sphere",
u"embedding",
u"resummation",
u"vacuum state: stability",
u"spin: correlation",
u"K*(892)",
u"pi: hadroproduction",
u"temperature: low",
u"semiconductor detector: pixel",
u"category",
u"color: superconductivity",
u"jet: multiplicity",
u"strangeness",
u"neutrino: flavor",
u"pomeron",
u"charged particle",
u"mixing angle",
u"Wheeler-DeWitt equation",
u"four-fermion interaction",
u"Skyrme model",
u"quenching",
u"dynamical system",
u"leading logarithm approximation",
u"hadronization",
u"information theory",
u"GZK effect",
u"bottomonium",
u"R parity",
u"cross section: ratio",
u"condensed matter",
u"nucleus nucleus: colliding beams",
u"dyon",
u"space: Kaehler",
u"x-dependence",
u"neutrino: mass generation",
u"moduli",
u"radiation: Cherenkov",
u"nucleosynthesis: big bang",
u"gravitation: Horava-Lifshitz",
u"renormalization group: transformation",
u"Cherenkov counter",
u"pseudoscalar meson",
u"symplectic",
u"time reversal: violation",
u"deuteron",
u"calorimeter",
u"sparticle: mass spectrum",
u"neutrino nucleon: interaction",
u"perturbation: tensor",
u"scintillation counter: liquid",
u"dependence: gauge",
u"observatory",
u"helium: nuclide",
u"coupled channel",
u"rotation",
u"dark matter: halo",
u"pi: decay constant",
u"approximation: strong coupling",
u"statistical",
u"Jefferson Lab",
u"accelerator",
u"multiplicity",
u"quark model",
u"Dirac equation: solution",
u"operator: higher-dimensional",
u"B: decay modes",
u"quantum cosmology: loop space",
u"transfer matrix",
u"fermion: chiral",
u"quarkonium: heavy",
u"neutralino: dark matter",
u"programming: Monte Carlo",
u"new interaction",
u"Compton scattering",
u"lepton: production",
u"bosonization",
u"K",
u"power spectrum: scalar",
u"neutrino/mu",
u"nucleon: mass",
u"perturbation: linear",
u"parameter space",
u"photon: polarization",
u"U(1)",
u"Beijing Stor",
u"W: mass",
u"muon: cosmic radiation",
u"decoherence",
u"density",
u"quiver",
u"gluon: exchange",
u"flux tube",
u"Z0: hadroproduction",
u"Z0: electroproduction",
u"density: fluctuation",
u"neutralino: annihilation",
u"parton: momentum spectrum",
u"meson: exchange",
u"pomeron: exchange",
u"quark: momentum spectrum",
u"color: singlet",
u"Z0: associated production",
u"Coulomb gauge",
u"fermion: staggered",
u"p-brane",
u"trajectory",
u"J/psi(3100): hadroproduction",
u"coupling constant: energy dependence",
u"color: octet",
u"nonlocal",
u"Delta(1232)",
u"invariance: Becchi-Rouet-Stora",
u"monitoring",
u"density matrix",
u"Calabi-Yau",
u"p nucleus: inclusive reaction",
u"baryon: production",
u"PAMELA",
u"B: width",
u"omega(783)",
u"duality: transformation",
u"nonabelian",
u"Wess-Zumino term",
u"channel cross section: ratio",
u"model: minimal",
u"current: conservation law",
u"helium",
u"gravitation: coupling",
u"copper",
u"F-theory",
u"muon: radiative decay",
u"renormalization group: beta function",
u"neutrino: mass: hierarchy",
u"positron: cosmic radiation",
u"family: 3",
u"supersymmetry: transformation",
u"fermion: propagator",
u"X-ray",
u"LISA",
u"pentaquark",
u"binary",
u"membrane: interaction",
u"quark: flavor",
u"spin: 1/2",
u"resolution",
u"neutrino: production",
u"symmetry breaking: SU(3)",
u"quasiparticle",
u"Higgs particle: pair production",
u"structure",
u"KEK-B",
u"electron: energy spectrum",
u"mass: transverse",
u"gauge field theory: quiver",
u"quantum chromodynamics: nonperturbative",
u"scale dependence",
u"cosmic radiation: energy",
u"oscillator",
u"relativistic",
u"Hamilton-Jacobi equation",
u"D*(2010)",
u"spinor: Killing",
u"light nucleus: production",
u"neutralino",
u"gluino: mass",
u"parton",
u"Higgs particle: leptonic decay",
u"electron: beam",
u"polarized target",
u"potential: static",
u"glueball",
u"symmetry: O(N)",
u"eta(958)",
u"tracking detector",
u"J-PARC Lab",
u"quark: flavor: 3",
u"parton: energy loss",
u"electroweak interaction: correction",
u"Hanbury-Brown-Twiss effect",
u"momentum dependence",
u"dependence: mass number",
u"knot theory",
u"tau: semileptonic decay",
u"WIMP: dark matter",
u"WIMP: mass",
u"Higgs mechanism",
u"crystal",
u"mu-problem",
u"photon: associated production",
u"temperature: 0",
u"Lipatov equation",
u"B-factory",
u"time resolution",
u"field theory: Euclidean",
u"minisuperspace",
u"evolution equation",
u"photon: yield",
u"inflaton",
u"new physics: search for",
u"pair production",
u"diffeomorphism: invariance",
u"ABJM model",
u"matter: hadronic",
u"WIMP",
u"Cherenkov counter: imaging",
u"electron: cosmic radiation",
u"cavity",
u"anomaly: conformal",
u"BFKL equation",
u"gravitation: potential",
u"Z': mass",
u"D0: hadronic decay",
u"jet: bottom",
u"electromagnetic",
u"hidden sector",
u"fermion: Majorana",
u"total cross section: energy dependence",
u"gap",
u"algebra: conformal",
u"transverse momentum: missing-energy",
u"collective phenomena",
u"mechanics: classical",
u"error",
u"quark: polarization",
u"Born approximation",
u"gas",
u"computer",
u"mass: topological",
u"R symmetry",
u"inflaton: potential",
u"homology",
u"B+: branching ratio",
u"Gran Sasso",
u"experimental methods",
u"flux: magnetic",
u"equivalence principle",
u"jet: transverse momentum",
u"supercharge",
u"correction: nonperturbative",
u"sphere: fuzzy",
u"spin: 2",
u"weak interaction",
u"matter: coupling",
u"massless",
u"inflation: hybrid",
u"synchrotron radiation",
u"Weinberg angle",
u"particle identification",
u"leading logarithm approximation: higher-order",
u"mirror",
u"structure function",
u"group theory",
u"rapidity: gap",
u"isospin: violation",
u"gamma ray: cosmic radiation",
u"algebra: deformation",
u"meson: hadron spectroscopy",
u"iron",
u"fermion: pair production",
u"deep underground detector",
u"quark: Wilson",
u"p p: elastic scattering",
u"gluon: radiation",
u"field theory: classical",
u"fixed point: infrared",
u"Compton scattering: off-shell",
u"B: decay",
u"final state: two-particle",
u"differential equations: solution",
u"cryogenics",
u"asymptotic freedom",
u"HESS",
u"conductivity",
u"black hole: formation",
u"gauge field theory: abelian",
u"noncommutative",
u"scaling: violation",
u"electron p: colliding beams",
u"J/psi(3100): leptonic decay",
u"kinetic",
u"phase shift",
u"field theory: anti-de Sitter",
u"compactification: flux",
u"cluster",
u"gauge field theory: supersymmetry",
u"Z'",
u"charge: magnetic",
u"mass spectrum: dilepton",
u"neutrino: interaction",
u"quark: chiral",
u"D0 anti-D0: mixing",
u"master equation",
u"quark: mass dependence",
u"scale: electroweak interaction",
u"p: form factor",
u"Higgs particle: radiative decay",
u"gravitino",
u"quark: constituent",
u"regularization: zeta function",
u"enhancement",
u"hadron: production",
u"particle: massive",
u"quark antiquark: potential",
u"expansion: derivative",
u"fermion: domain wall",
u"hyperfine structure",
u"lepton: mixing angle",
u"interpretation of experiments: DESY HERA Stor",
u"hidden symmetry",
u"BF model",
u"defect",
u"gluon: fusion",
u"Newport News CEBAF Linac",
u"photon: polarized beam",
u"index theorem",
u"pi: photoproduction",
u"positron p: colliding beams",
u"electron",
u"electromagnetic interaction",
u"mean field approximation: relativistic",
u"density: high",
u"neutrino: pair production",
u"baryon: octet",
u"field theory: noncommutative",
u"charge",
u"neutrino: heavy",
u"scattering amplitude: higher-order",
u"charged particle: multiple production",
u"B/s0: hadronic decay",
u"mass generation",
u"operator: differential",
u"quark antiquark: annihilation",
u"satellite",
u"approximation: eikonal",
u"spin: network",
u"scaling: dimension",
u"PYTHIA",
u"Grid computing",
u"gravitino: mass",
u"three-body problem",
u"ghost: propagator",
u"baryon: mass",
u"inflation: model",
u"black hole: evaporation",
u"magnetic moment",
u"gauge boson: mass",
u"landscape",
u"mass: top",
u"algebra: Kac-Moody",
u"validity test",
u"group: Lie",
u"current: vector",
u"quasinormal mode",
u"energy: threshold",
u"quark: staggered",
u"differential cross section: measured",
u"gauge field theory: SU(2) x U(1)",
u"Auger",
u"graphene",
u"pi0: hadroproduction",
u"form factor: electric",
u"quark: mixing angle",
u"electroweak interaction: model",
u"interferometer",
u"gauge field theory: Yang-Mills: supersymmetry",
u"Bianchi identity",
u"quasar",
u"correlation: two-particle",
u"black hole: production",
u"Batavia TEVATRON PS",
u"scaling: finite size",
u"pi0: radiative decay",
u"correlation: length",
u"f0(600)",
u"invariance: modular",
u"electron p: exclusive reaction",
u"S-duality",
u"lepton: asymmetry",
u"cosmic radiation: anisotropy",
u"SU(N)",
u"field strength",
u"susceptibility",
u"dispersion",
u"dark matter: interaction",
u"anomaly: chiral",
u"violation: CPT",
u"strong interaction: CP",
u"scalar meson",
u"field theory: massless",
u"Higgs model: abelian",
u"effect: quantum",
u"bootstrap",
u"field theory: spinor",
u"space: S(5)",
u"space: de Sitter",
u"bounce",
u"star",
u"electroweak interaction: standard model",
u"potential: confinement",
u"symmetry: Lorentz",
u"reheating",
u"Higgs particle: neutral particle",
u"cosmological constant: negative",
u"f0(980)",
u"tensor: Weyl",
u"mass: Majorana",
u"temperature: reheating",
u"Poisson bracket",
u"quaternion",
u"spin: 1",
u"n: electric moment",
u"approximation",
u"central region",
u"detector: design",
u"neural network",
u"jet: production",
u"Wess-Zumino model",
u"space-time: Kerr",
u"potential: optical",
u"mass difference",
u"multiplet",
u"dark matter: decay",
u"supersymmetry: multiplet",
u"effect: finite size",
u"quark: flavor: 2",
u"soft collinear effective theory",
u"costs",
u"multiplet: vector",
u"pi: electroproduction",
u"spin dependence",
u"solar system",
u"space-time: dimension",
u"nucleon: spin",
u"moduli: stability",
u"standard model: supersymmetry",
u"optical",
u"fermion: massless",
u"angular resolution",
u"HERMES",
u"matter: strangeness",
u"geophysics",
u"structure function: spin",
u"background: stochastic",
u"baryon number: violation",
u"mass: texture",
u"WIMP: annihilation",
u"OPERA",
u"force: Casimir",
u"form factor: magnetic",
u"current: correlation function",
u"kink",
u"quantum chromodynamics: vacuum state",
u"pseudoscalar meson: pair production",
u"Compton scattering: inverse",
u"rho(770)0",
u"nucleon resonance",
u"differential cross section: transverse momentum",
u"p p: exclusive reaction",
u"toy model",
u"sparticle: hadroproduction",
u"initial state",
u"fermion: mixing angle",
u"coupling: scalar",
u"atom",
u"dark energy: holography",
u"B/s0 anti-B/s0: mixing",
u"tadpole",
u"dipole",
u"integrated circuit",
u"current: axial-vector",
u"baryon",
u"neutrino: mass spectrum",
u"quantum group",
u"form factor: electromagnetic",
u"eta",
u"K: pair production",
u"Higgs particle: triplet",
u"modulation",
u"quantum chromodynamics: holography",
u"COMPASS",
u"electroweak interaction: critical phenomena",
u"Yang-Mills: supersymmetry",
u"time",
u"oxygen",
u"hadron hadron: inclusive reaction",
u"beam transport",
u"matter",
u"transformation: Lorentz",
u"particle identification: flavor",
u"neutralino: LSP",
u"meson: mass spectrum",
u"hadron: gas",
u"K: semileptonic decay",
u"functional analysis",
u"curvature: scalar",
u"photon photon: exclusive reaction",
u"dimension: 7",
u"dark energy: equation of state",
u"scattering length",
u"baryon: heavy",
u"time projection chamber",
u"fermion: Wilson",
u"gauge boson",
u"nuclear reaction: model",
u"finite size",
u"field theory: Dirac",
u"integral equations",
u"distribution function",
u"decay rate",
u"K: hadronic decay",
u"Dvali-Gabadadze-Porrati model",
u"acceptance",
u"K-theory",
u"nucleus: recoil",
u"Juelich COSY PS",
u"cross section: upper limit",
u"big bang",
u"critical phenomena: deconfinement",
u"MINOS",
u"ionization",
u"meson: wave function",
u"p: polarized beam",
u"fermion: zero mode",
u"simplex",
u"Schroedinger equation: solution",
u"MSW effect",
u"nucleon",
u"baryon number",
u"renormalizable",
u"variational",
u"dilepton: final state",
u"Einstein-Maxwell equation: solution",
u"photon: flux",
u"deceleration",
u"gravitation: effect",
u"superfield: chiral",
u"transverse momentum: factorization",
u"radiative correction: higher-order",
u"anomaly: axial",
u"higher-order: 3",
u"star: compact",
u"linear collider: proposed",
u"bubble",
u"light front",
u"model: Glauber",
u"resonance",
u"diquark",
u"fermion: clover",
u"charge: fractional",
u"approximation: weak field",
u"heat kernel",
u"muon: flux",
u"deeply virtual Compton scattering",
u"fermion: determinant",
u"model: oscillator",
u"electron positron: inelastic scattering",
u"channel cross section: branching ratio",
u"energy: ground state",
u"little Higgs model",
u"angular distribution: anisotropy",
u"Langevin equation",
u"K0 anti-K0: mixing",
u"transverse",
u"semiconductor detector",
u"generalized uncertainty principle",
u"radioactivity",
u"vector meson: mass",
u"interpretation of experiments: CERN SPS",
u"XENON",
u"energy: conservation law",
u"space: noncommutative",
u"mass spectrum: transverse",
u"Unruh effect",
u"algebra: Poincare",
u"space-time: geometry",
u"design",
u"electron p: elastic scattering",
u"Thesis",
u"semiconductor detector: microstrip",
u"gravitation: Lovelock",
u"radiation: initial-state interaction",
u"length: minimal",
u"VERITAS",
u"spin: dependence",
u"energy: flux",
u"control system",
u"satellite: Planck",
u"dark energy: density",
u"GEANT",
u"neutrino: magnetic moment",
u"p: beam",
u"Seiberg-Witten map",
u"decay: amplitude analysis",
u"conifold",
u"neutron star: binary",
u"Z0: pair production",
u"cosmic radiation: spectrum",
u"particle flow",
u"p",
u"deep inelastic scattering: semi-inclusive reaction",
u"galaxy: halo",
u"model: chiral",
u"tau: branching ratio",
u"Gribov problem",
u"lepton",
u"VIRGO",
u"slope",
u"external field",
u"multiplicity: spectrum",
u"string model: topological",
u"photon p: inelastic scattering",
u"black hole: stability",
u"superconductivity: holography",
u"trace anomaly",
u"W: associated production",
u"quantum chromodynamics: matter",
u"density dependence",
u"neutrino: decay",
u"Frascati Stor",
u"silicon",
u"Poincare",
u"Lambda: hypernucleus",
u"quantization: canonical",
u"DAMA",
u"photon: absorption",
u"invariance: CPT",
u"screening",
u"neutrino: nuclear reactor",
u"fermion: overlap",
u"Brookhaven PS",
u"top: single production",
u"BooNE",
u"hard thermal loop approximation",
u"Feynman graph: penguin",
u"de Sitter",
u"operator",
u"quantum gravity: effect",
u"gravitation: induced",
u"algebra: Heisenberg",
u"missing-energy",
u"stochastic",
u"nucleosynthesis",
u"Hall effect",
u"nucleus: cosmic radiation",
u"black brane",
u"muon: storage ring",
u"universal extra dimension",
u"structure function: longitudinal",
u"neutrino nucleon: deep inelastic scattering",
u"gluon: mass",
u"preheating",
u"photon: production",
u"determinant",
u"photon: energy",
u"operator: composite",
u"Hall effect: quantum",
u"photon: electroproduction",
u"p nucleus: scattering",
u"gauge field theory: noncommutative",
u"positron p: deep inelastic scattering",
u"channel cross section: measured",
u"graph theory",
u"tetraquark",
u"shell model",
u"semiconductor detector: germanium",
u"black hole: spin",
u"nucleus: semileptonic decay",
u"scintillation counter: crystal",
u"susceptibility: topological",
u"Euclidean",
u"propagation",
u"Robertson-Walker",
u"Einstein",
u"bottom: radiative decay",
u"prepotential",
u"OPAL",
u"dependence: impact parameter",
u"Aharonov-Bohm effect",
u"quantization: constraint",
u"Ward-Takahashi identity",
u"network",
u"momentum transfer: high",
u"compactification: Calabi-Yau",
u"Higgs particle: composite",
u"multiverse",
u"fluctuation: vacuum",
u"field theory: finite temperature",
u"differential cross section: ratio",
u"Theta(1540)",
u"magnetic field: galaxy",
u"entropy: production",
u"gluon: density",
u"lepton: radiative decay",
u"Lambda",
u"collapse",
u"warped",
u"littlest Higgs model",
u"approximation: linear",
u"Ginsparg-Wilson relation",
u"neutrino: massive",
u"imaging",
u"abelian",
u"momentum spectrum",
u"electroweak interaction: radiative correction",
u"time delay",
u"lattice: anisotropy",
u"particle: massless",
u"symmetry: family",
u"homotopy",
u"positron: polarized beam",
u"binary: compact",
u"current",
u"gas electron multiplier",
u"renormalization: nonperturbative",
u"current algebra",
u"interface",
u"resistive plate chamber",
u"effect: nonlinear",
u"space: Minkowski",
u"CPT: violation",
u"gravitation: dilaton",
u"causality: violation",
u"zeta function",
u"LSP",
u"lifetime",
u"D",
u"threshold: correction",
u"differential cross section: x-dependence",
u"decay: flavor changing",
u"meson: hybrid",
u"flavor: 4",
u"Yang-Baxter equation",
u"galaxy: formation",
u"correction: relativistic",
u"upgrade",
u"family",
u"potential: thermodynamical",
u"glueball: mass",
u"neutrino/e",
u"Gross-Neveu model",
u"spin: density matrix",
u"hierarchy: gauge",
u"space-time: Reissner-Nordstroem",
u"Coulomb",
u"form factor: vector",
u"anti-p p: scattering",
u"fusion",
u"quantum molecular dynamics: relativistic",
u"charmed meson",
u"calorimeter: hadronic",
u"nuclear matter: density",
u"Borel transformation",
u"compactification: torus",
u"photon nucleus: nuclear reaction",
u"field theory: scalar: massless",
u"temperature: transition",
u"gravitational radiation: particle source",
u"quark antiquark: bound state",
u"neutrino/tau",
u"space-time: fluctuation",
u"D*(2010): hadronic decay",
u"KLOE",
u"pi: production",
u"pi pi: elastic scattering",
u"anomaly: U(1)",
u"electron: electric moment",
u"meson: heavy",
u"alignment",
u"Slavnov identity",
u"minimal supersymmetric standard model: parameter space",
u"electron p: inelastic scattering",
u"beam dynamics",
u"modular",
u"gluon: polarization",
u"pseudoscalar meson: mass",
u"fabrication",
u"B+: hadronic decay",
u"atmosphere",
u"approximation: nonrelativistic",
u"absorption",
u"field theory: relativistic",
u"spin: orbit",
u"particle: acceleration",
u"hadron: mass",
u"axion: mass",
u"electromagnetic field: nonlinear",
u"ladder approximation",
u"splitting",
u"Higgs model: composite",
u"loop integral",
u"electronics",
u"readout",
u"flavor: dependence",
u"lepton: flavor",
u"expansion: strong coupling",
u"vector meson: photoproduction",
u"R-matrix",
u"compactification: orbifold",
u"Becchi-Rouet-Stora",
u"mass: twist",
u"J/psi(3100): hadronic decay",
u"black string",
u"dilepton: mass spectrum",
u"lepton: mixing",
u"decay modes",
u"chargino",
u"hydrogen",
u"Mainz Linac",
u"galaxy: rotation",
u"constraint: Hamiltonian",
u"radiation: electromagnetic",
u"black hole: hair",
u"gauge boson: pair production",
u"approximation: classical",
u"lepton nucleus: deep inelastic scattering",
u"track data analysis: vertex",
u"top: hadroproduction",
u"interpretation of experiments: CERN LEP Stor",
u"fermion: massive",
u"symmetry: crossing",
u"phase: topological",
u"specific heat",
u"mass: correction",
u"fermion: condensation",
u"fragmentation",
u"Schwinger model",
u"Skyrmion",
u"gluon: jet",
u"turbulence",
u"radiation",
u"quantum geometry",
u"Fermi gas",
u"gravitation: scalar",
u"W: hadronic decay",
u"space-time: foam",
u"low-energy constant",
u"current: axial",
u"bispectrum",
u"model: fluid",
u"quark: jet",
u"dark matter: coupling",
u"optics",
u"tau: decay",
u"photon p: inclusive reaction",
u"muon",
u"pi: exchange",
u"hyperon",
u"boson: statistics",
u"tau: radiative decay",
u"nucleon: model",
u"photon: structure function",
u"inflation: slow-roll approximation",
u"spin: 3/2",
u"color: confinement",
u"diquark: condensation",
u"gravitation: linear",
u"operator: local",
u"p: polarized target",
u"reflection",
u"string: topological",
u"superconductivity: color",
u"CLAS",
u"neutrino: superluminal",
u"gravitation: correction",
u"liquid",
u"muon: leptonic decay",
u"coupling: anomaly",
u"PCAC model",
u"mediation",
u"cross section: elastic scattering",
u"photon photon: scattering",
u"slepton: mass",
u"black hole: static",
u"collective",
u"final state: two-photon",
u"special relativity",
u"binary: coalescence",
u"cosmic radiation: propagation",
u"hybrid",
u"scalar: Ricci",
u"psi(3685)",
u"Mellin transformation",
u"Bogolyubov transformation",
u"color flavor locked phase",
u"solution: static",
u"gauge: abelian",
u"beam emittance",
u"cross section: measured",
u"quantum chromodynamics: light cone",
u"nuclear properties",
u"sparticle: pair production",
u"cosmic radiation: diffusion",
u"air",
u"statistical analysis: Bayesian",
u"gravitation: interaction",
u"amplitude analysis: decay",
u"field theory: Toda",
u"quark model: constituent",
u"ionization: yield",
u"field theory: massive",
u"stop: mass",
u"CERN CLIC",
u"J/psi(3100): electroproduction",
u"quark: hadroproduction",
u"neutrino electron: elastic scattering",
u"mass: screening",
u"annihilation",
u"a0(980)",
u"charmed meson: hadronic decay",
u"J/psi(3100): yield",
u"beam: energy",
u"multiplet: chiral",
u"p: spin",
u"mass: scalar",
u"quark model: chiral",
u"contact interaction",
u"epsilon expansion",
u"neutrino",
u"space-time: deformation",
u"symmetry breaking: flavor",
u"string model: Type IIB",
u"DELPHI",
u"correction: finite size",
u"neutrino nucleus: interaction",
u"gravitation: teleparallel",
u"fermion: dark matter",
u"energy: kinetic",
u"scaling: invariance",
u"crossing",
u"mass ratio",
u"electron nucleus: deep inelastic scattering",
u"neutrino nucleus: nuclear reaction",
u"inflaton: decay",
u"particle: spectrum",
u"gravitational radiation: background",
u"scale: grand unified theory",
u"elastic scattering",
u"quark: potential",
u"matter: power spectrum",
u"benchmark",
u"adiabatic",
u"muon: branching ratio",
u"path integral: measure",
u"hydrogen: atom",
u"lepton: branching ratio",
u"WIMP nucleus: elastic scattering",
u"MAGIC",
u"gauge field theory: tensor",
u"singlet",
u"Bayesian",
u"RF system",
u"computer: network",
u"mass: renormalization",
u"differential forms: 3",
u"fermion: coupling",
u"Regge",
u"small-angle",
u"precision measurement",
u"dark matter: scalar",
u"differential forms: symplectic",
u"fractal",
u"Cherenkov Telescope Array",
u"inflation: chaos",
u"quantum space",
u"dark matter: detector",
u"form factor: Sudakov",
u"dimension: 8",
u"p: lifetime",
u"hadron: multiplicity",
u"group: SU(2)",
u"beam monitoring",
u"particle: spin",
u"quantization: Batalin-Vilkovisky",
u"supersymmetry: minimal",
u"cohomology: Becchi-Rouet-Stora",
u"hydrodynamics: relativistic",
u"model: spin",
u"momentum transfer",
u"star: mass",
u"analyzing power",
u"baryon resonance: exotic",
u"new particle",
u"chameleon",
u"quark: mixing",
u"X-ray: emission",
u"nuclear force",
u"pi0: photoproduction",
u"accretion",
u"mass: scale",
u"potential: vector",
u"graviton: massive",
u"xenon: liquid",
u"quantization: deformation",
u"saddle-point approximation",
u"cosmic background radiation: polarization",
u"mathematical methods: variational",
u"photon: propagator",
u"jet: energy",
u"magnetic spectrometer",
u"bottom: mass",
u"sfermion: mass",
u"graviton: mass",
u"nuclear emulsion",
u"mass: gravitation",
u"sparticle: decay",
u"energy: gap",
u"electron nucleus: nuclear reaction",
u"formation",
u"underlying event",
u"tritium: semileptonic decay",
u"invariance: Poincare",
u"isometry",
u"group theory: geometrical",
u"deuterium",
u"hadron: mass spectrum",
u"charge conjugation",
u"error: statistical",
u"singlet: scalar",
u"K: rare decay",
u"multiplicity: fluctuation",
u"potential: gauge",
u"black hole: Kerr-Newman",
u"constraint: algebra",
u"J/psi(3100): radiative decay",
u"Phi(1020): hadronic decay",
u"Immirzi parameter",
u"beryllium",
u"electron: capture",
u"L3",
u"neutrino: path length",
u"graviton: exchange",
u"fractional",
u"vector meson: electroproduction",
u"p nucleus: interaction",
u"mass number: dependence",
u"space-time: static",
u"sphaleron",
u"quark: semileptonic decay",
u"neutrino/mu: beam",
u"showers: atmosphere",
u"standard model: validity test",
u"power spectrum: tensor",
u"spectrometer",
u"algebra: fusion",
u"RICH",
u"neutrino: supernova",
u"magnetization",
u"effective field theory: chiral",
u"polarized beam",
u"charmed meson: pair production",
u"supersymmetry: split",
u"time reversal",
u"pi: decay",
u"interaction: model",
u"baryon: oscillation: acoustic",
u"gluon: distribution function",
u"Fermi liquid",
u"symmetry breaking: U(1)",
u"asymmetry: time dependence",
u"cosmic radiation: interaction",
u"renormalization group: fixed point",
u"correlation function: two-particle",
u"supersymmetry: 8",
u"heavy ion",
u"phase space: Hilbert space",
u"top: branching ratio",
u"electromagnetic field: external field",
u"Z0: width",
u"kappa symmetry",
u"nucleus: structure function",
u"tension",
u"radio wave",
u"sum rule: light cone",
u"low-energy theorem",
u"flavor: mixing",
u"B-L number",
u"metal",
u"membrane",
u"gauge field theory: SU(3) x SU(3) x U(1)",
u"density: spectral",
u"model: lattice",
u"photon: off-shell",
u"conference summary",
u"boundary condition: twist",
u"Weyl",
u"shadowing",
u"gamma ray: background",
u"cascade",
u"interaction",
u"charge: fluctuation",
u"long-range",
u"group: Lorentz",
u"anti-p: cosmic radiation",
u"pi pi: scattering amplitude",
u"quark: fragmentation",
u"unitarity: violation",
u"coupling: conformal",
u"duality: quark hadron",
u"massive",
u"sparticle: electroproduction",
u"lepton: transverse momentum",
u"metric",
u"B: decay constant",
u"current: electromagnetic",
u"quantum mechanics: relativistic",
u"random phase approximation",
u"meson: exotic",
u"gluino",
u"invariance: reparametrization",
u"algebra: affine",
u"cosmic radiation: secondary",
u"tetrad",
u"B/s: branching ratio",
u"gluon: saturation",
u"synchrotron",
u"supergravity: 2",
u"showers",
u"eta: hadroproduction",
u"spinor: Dirac",
u"holonomy: G(2)",
u"transformation: modular",
u"D-term",
u"quantum mechanics: supersymmetry",
u"metric: perturbation",
u"beam optics",
u"scalar particle",
u"polarization: tensor",
u"pseudoscalar meson: decay constant",
u"baryon: multiplet",
u"Lambda: hadroproduction",
u"mixing: kinetic",
u"model: solar",
u"showers: spatial distribution",
u"dark matter: production",
u"operator: Becchi-Rouet-Stora",
u"gap equation",
u"photon: mass",
u"electron positron: inclusive reaction",
u"gauge boson: coupling",
u"channel cross section: correction",
u"quark: radiative decay",
u"vector boson: fusion",
u"parton: density",
u"sparticle",
u"X(3872)",
u"higher-twist",
u"momentum transfer: low",
u"parton: multiple scattering",
u"Daya Bay",
u"dark energy: interaction",
u"mass: pole",
u"nuclear reactor",
u"gluon: Regge",
u"black ring",
u"mass: bottom",
u"pi: condensation",
u"operator: dimension: 6",
u"pi: multiple production",
u"J/psi(3100): photoproduction",
u"gravitational radiation detector: interferometer",
u"meson: decay",
u"group: representation",
u"quantum electrodynamics: scalar",
u"pseudoscalar",
u"potential model",
u"model: hadronization",
u"time-of-flight",
u"background: anti-de Sitter",
u"representation: nonlinear",
u"Type IIB",
u"space-time: horizon",
u"tau: leptonic decay",
u"anthropic principle",
u"Cartan",
u"Lambda: polarization",
u"form factor: ratio",
u"electron positron: elastic scattering",
u"Sivers function",
u"organic compounds",
u"master integral",
u"vacuum: stability",
u"Darmstadt SIS",
u"energy: phantom",
u"electron: mass",
u"duality: string",
u"quarkonium: hadroproduction",
u"curvaton",
u"anti-B0: hadronic decay",
u"high energy behavior",
u"impulse approximation",
u"D: semileptonic decay",
u"hadron: yield",
u"VHE",
u"scintillation counter: plastics",
u"cross section: longitudinal",
u"gluino: pair production",
u"rapidity: density",
u"algebraic geometry",
u"peripheral",
u"space-time: anisotropy",
u"K+: semileptonic decay",
u"oscillation: acoustic",
u"Z0: mass",
u"Lambda(1405)",
u"quarkonium",
u"eta: photoproduction",
u"space: fuzzy",
u"magnetic field: effect",
u"quark: massless",
u"f0(600): mass",
u"hadron: model",
u"partial wave analysis: multipole",
u"ALEPH",
u"B/s: leptonic decay",
u"longitudinal",
u"U-duality",
u"meson: width",
u"nuclear physics: effect",
u"measure",
u"quarkonium: width",
u"squark: pair production",
u"positron: energy spectrum",
u"p: pair production",
u"photon: resolved",
u"correlation: long-range",
u"xenon",
u"F-term",
u"symmetry: transformation",
u"kinematics: phase space",
u"equivalence principle: violation",
u"gravitational radiation: spectrum",
u"mass spectrum: missing-mass",
u"backscatter: laser",
u"measurement theory",
u"gravitino: LSP",
u"diffraction: dissociation",
u"Lifshitz",
u"interference: effect",
u"Hartree-Fock approximation",
u"Kaehler",
u"luminosity: high",
u"anti-p",
u"coalescence",
u"hydrodynamics: viscosity",
u"graviton: propagator",
u"phase space: Fock space",
u"particle: interaction",
u"hardware",
u"showers: electromagnetic",
u"lithium",
u"spin: parity",
u"lepton number",
u"cross section: transverse",
u"horizon: geometry",
u"quark: density",
u"dijet",
u"scaling: geometrical",
u"symmetry breaking: conformal",
u"D: hadronic decay",
u"Klein-Gordon equation: solution",
u"fluid: viscosity",
u"Hall effect: fractional",
u"isospin: asymmetry",
u"particle: model",
u"nucleon nucleon: potential",
u"transverse momentum: dependence",
u"chargino: mass",
u"photon photon: fusion",
u"cylinder",
u"transformation: canonical",
u"form factor: scalar",
u"linear collider",
u"gravitation: tensor",
u"Fourier transformation",
u"channel cross section: upper limit",
u"cross section: hadronic",
u"mass formula",
u"Higgs particle: heavy",
u"electroweak interaction: precision measurement",
u"Dyson-Schwinger equation: solution",
u"baryon resonance",
u"gaugino: condensation",
u"structure function: moment",
u"p-brane: 7",
u"particle: Dirac",
u"direct detection",
u"showers: parton",
u"quantum electrodynamics: correction",
u"string: compactification",
u"T-parity",
u"supersymmetry: parameter space",
u"capture",
u"parton: scattering",
u"messenger",
u"topological insulator",
u"scattering amplitude: forward scattering",
u"matter: accretion",
u"fermion: mass generation",
u"bottom: particle identification",
u"collinear",
u"flavor: SU(3)",
u"sparticle: decay modes",
u"correction",
u"D: pair production",
u"Upsilon(9460)",
u"up",
u"decay constant",
u"gauge boson: hadroproduction",
u"quark: star",
u"black hole: primordial",
u"perturbation: adiabatic",
u"affine",
u"potential: flat direction",
u"quark: mass: twist",
u"B/s0: branching ratio",
u"cumulant expansion",
u"supersymmetry: conformal",
u"symmetry",
u"wave function: renormalization",
u"space: sphere",
u"hadron: wave function",
u"power spectrum: primordial",
u"Higgs particle: pseudoscalar particle",
u"D0",
u"space-time: discrete",
u"neutralino: decay",
u"space-time: signature",
u"gravitation: gauge field theory",
u"symmetry: Becchi-Rouet-Stora",
u"calcium",
u"quark: energy loss",
u"supersymmetry: 6",
u"isospin: selection rule",
u"Higgs particle: singlet",
u"photon nucleus: inclusive reaction",
u"flat direction",
u"site",
u"transversity",
u"neutrino: Majorana: mass",
u"top: coupling",
u"operator: Casimir",
u"model: composite",
u"energy-momentum",
u"temperature: anisotropy",
u"Fokker-Planck equation",
u"baryon: wave function",
u"neutron star: mass",
u"charged particle: hadroproduction",
u"symmetry: Peccei-Quinn",
u"operator: Lax",
u"electron: magnetic moment",
u"scalar particle: mass",
u"Higgs particle: exchange",
u"cosmic censorship",
u"resonance: mass",
u"superluminal",
u"anomaly: gauge",
u"quark: spin",
u"photon nucleon: exclusive reaction",
u"multipole",
u"baryon: hadron spectroscopy",
u"tau: decay modes",
u"U(N)",
u"field theory: nonlocal",
u"matter: exotic",
u"Planck",
u"percolation",
u"meson: pair production",
u"spin: flavor",
u"beam profile",
u"Lorentz",
u"n: semileptonic decay",
u"gravitino: dark matter",
u"neutrino: background",
u"Micromegas",
u"Z0",
u"temperature: fluctuation",
u"neutrino: spectrum",
u"qubit",
u"potential: Yukawa",
u"cross section: absorption",
u"mechanics",
u"approximation: weak coupling",
u"lepton: multiple production",
u"black hole: quasinormal mode",
u"energy: symmetry",
u"argon",
u"star: formation",
u"Pade approximation",
u"deformation: marginal",
u"expansion: weak coupling",
u"SNO",
u"pi: wave function",
u"Taylor expansion",
u"neutrino nucleus: inelastic scattering",
u"dimuon: mass spectrum",
u"quantum chromodynamics: supersymmetry",
u"recoil: energy",
u"neutralino: density",
u"neutrino: velocity",
u"hypercharge",
u"B: leptonic decay",
u"jet: fragmentation",
u"rapidity: correlation",
u"particle flow: collective phenomena",
u"baryon: magnetic moment",
u"spin: transverse",
u"quantum chromodynamics: model",
u"overlap",
u"gauge dependence",
u"superspace: harmonic",
u"K0: hadronic decay",
u"gluon: pair production",
u"neutralino: pair production",
u"charge: 2",
u"interaction: Yukawa",
u"fibre",
u"gravitation: background",
u"gluon: production",
u"dielectric",
u"CDMS",
u"squark",
u"photon: hadroproduction",
u"numerical calculations: variational",
u"charge: U(1)",
u"bound state: wave function",
u"anyon",
u"nucleon: wave function",
u"membrane: fractional",
u"radiation: spectrum",
u"operator: dimension",
u"selection rule",
u"quark: flavor: 4",
u"symmetry: Z(3)",
u"K: hadroproduction",
u"Z': coupling",
u"K: condensation",
u"width: ratio",
u"squeezed state",
u"lepton: final state",
u"Jona-Lasinio-Nambu model: Polyakov loop",
u"electron positron: interaction",
u"deuteron nucleus: nuclear reaction",
u"resonance: decay",
u"star: hybrid",
u"cascade decay",
u"doublet",
u"radion",
u"leptoquark",
u"detector: performance",
u"symmetry: cylinder",
u"Cabibbo angle",
u"temperature: Hagedorn",
u"curvature: singularity",
u"correlation: short-range",
u"gravitino: production",
u"data compilation",
u"nuclear reaction",
u"communications",
u"positron: flux",
u"axial-vector meson",
u"gravitation: conformal",
u"null-energy condition",
u"pi0",
u"semileptonic decay",
u"pi: leptonic decay",
u"energy flow",
u"supergravity: 1",
u"star: strangeness",
u"Hubbard model",
u"fluctuation: scalar",
u"CERN LHC Coll: upgrade",
u"antiparticle",
u"jet: relativistic",
u"psi(3685): hadronic decay",
u"expansion: harmonic",
u"gravitation: surface",
u"exchange: one-pion",
u"group: conformal",
u"gravitational radiation: burst",
u"k-essence",
u"Witten index",
u"asymptotic safety",
u"dimension: conformal",
u"Casimir",
u"integral equations: nonlinear",
u"membrane: tension",
u"shielding",
u"space-time: any-dimensional",
u"spin: symmetry",
u"spatial distribution",
u"baryon: mass spectrum",
u"tau: hadronic decay",
u"charge: quantization",
u"phi**n model: 3",
u"glueball: scalar",
u"FPGA",
u"gauge field theory: SO(N)",
u"ether",
u"top: polarization",
u"Monte Carlo: Markov chain",
u"scalar meson: mass",
u"Theta parameter",
u"tracks",
u"spin: model",
u"B0: semileptonic decay",
u"vacuum polarization: hadronic",
u"photon nucleon: interaction",
u"grand unified theory: E(6)",
u"magnet: superconductivity",
u"mass: density",
u"anomaly: gravitation",
u"black hole: higher-dimensional",
u"Cherenkov counter: atmosphere",
u"tachyon: potential",
u"photon: spectrum",
u"quark: relativistic",
u"dark matter: scattering",
u"field equations: Yang-Mills",
u"antineutrino/e: flux",
u"penguin",
u"doubly special relativity",
u"p: polarization",
u"mass: Dirac",
u"introductory",
u"solids",
u"birefringence",
u"D-brane: 3",
u"neutrino/mu: secondary beam",
u"neutrino: UHE",
u"O(N)",
u"factorization: collinear",
u"cloud",
u"B/s: hadronic decay",
u"photomultiplier: silicon",
u"soliton: topological",
u"multiplicity: moment",
u"symmetry: space-time",
u"free electron laser",
u"Serpukhov PS",
u"black hole: background",
u"KamLAND",
u"energy-momentum: conservation law",
u"black hole: oscillation",
u"plasma: strong coupling",
u"model: hybrid",
u"algebra: chiral",
u"black hole: dilaton",
u"space-time: Euclidean",
u"p-brane: 6",
u"number theory",
u"isovector",
u"black hole: decay",
u"meson: decay constant",
u"interaction: Coulomb",
u"symmetry: SU(2) x SU(2)",
u"MOND",
u"electron positron",
u"symmetry breaking: gauge",
u"meson baryon: interaction",
u"multiplicity: high",
u"perturbation: spectrum",
u"random walk",
u"black hole: de Sitter",
u"integral equations: solution",
u"lepton: universality",
u"correlation: Bose-Einstein",
u"quark quark: interaction",
u"Grassmann",
u"hadron: pair production",
u"Bethe-Salpeter equation: solution",
u"Galileon",
u"polarized beam: longitudinal",
u"instanton: effect",
u"Phi(1020): radiative decay",
u"anti-p p: elastic scattering",
u"vertex detector",
u"top: production",
u"quark: transverse momentum",
u"fermion: heavy",
u"quark: flavor: violation",
u"interpretation of experiments: Batavia TEVATRON Coll",
u"tungsten",
u"isospin: symmetry breaking",
u"Higgsino: mass",
u"axion: dark matter",
u"quark: electroproduction",
u"graviton: giant",
u"conformal block",
u"symmetry: O(3)",
u"K0(S)",
u"transverse energy",
u"recombination",
u"Berry phase",
u"mass: lower limit",
u"vector meson: pair production",
u"Penrose limit",
u"SU(5)",
u"moment: multipole",
u"cavity: superconductivity",
u"Majorana",
u"fluid: relativistic",
u"space-time: AdS(5) x S(5)",
u"symmetry: custodial",
u"drift chamber",
u"quantum chromodynamics: radiative correction",
u"uranium",
u"photon: transverse momentum",
u"charge: Becchi-Rouet-Stora",
u"action: Einstein-Hilbert",
u"multiplet: tensor",
u"sneutrino: right-handed",
u"boson: gas",
u"Gauss law",
u"algebra: C*",
u"operator: chiral",
u"resonance: effect",
u"anti-B0: branching ratio",
u"fission",
u"quark: recombination",
u"model: Yukawa",
u"cosmic radiation: VHE",
u"K: decay",
u"electron: acceleration",
u"leptoquark: scalar",
u"Goldstone theorem",
u"magnetic field: chiral",
u"multiquark",
u"rho(770): mass",
u"symmetry: A(4)",
u"nucleon: magnetic moment",
u"muon: beam",
u"inverse scattering method",
u"exchange: two-pion",
u"meson: production",
u"symmetry: global",
u"symmetry: SU(6)",
u"dilepton: production",
u"mediation: gravitation",
u"time reversal: invariance",
u"K0(L): branching ratio",
u"expansion: gradient",
u"regularization: lattice",
u"photon: detector",
u"electric moment",
u"fermion: family",
u"hyperon: hadroproduction",
u"charge: axial",
u"K+: branching ratio",
u"wake field",
u"B-: branching ratio",
u"supergravity: 8",
u"coupling: vector",
u"chi mesons: charm",
u"background: radioactivity",
u"quark: branching ratio",
u"cosmic radiation: production",
u"magnet",
u"effect: magnetic",
u"bolometer",
u"superradiance",
u"operator: spectrum",
u"field theory: planar",
u"WIMP nucleon: elastic scattering",
u"dark matter: elastic scattering",
u"electron p: interaction",
u"fermion: parastatistics",
u"naturalness",
u"showers: hadronic",
u"braid group",
u"model: integrability",
u"ion",
u"lepton nucleon: inclusive reaction",
u"black hole: topological",
u"Potts model",
u"velocity: dispersion",
u"vacuum state: fluctuation",
u"gravitation: higher-dimensional",
u"flux: time dependence",
u"approximation: adiabatic",
u"dilaton: coupling",
u"hair: scalar",
u"octonion",
u"model: dipole",
u"vacuum state: decay",
u"background: low",
u"heavy quark: mass",
u"J/psi(3100): branching ratio",
u"supersymmetry: spontaneous symmetry breaking",
u"solution: stability",
u"Darmstadt GSI FAIR",
u"gauge field theory: duality",
u"baryon: decuplet",
u"angular correlation: two-particle",
u"Landau-Pomeranchuk-Migdal effect",
u"geometry: symplectic",
u"dijet: mass spectrum",
u"muon: angular distribution",
u"coupling",
u"spin: statistics",
u"symmetry: SU(4)",
u"horizon: Killing",
u"mass: strangeness",
u"perturbation: gravitation",
u"Higgs particle: photoproduction",
u"perturbation: primordial",
u"quantization: light front",
u"pi+: hadroproduction",
u"equivalent photon approximation",
u"symmetry: Z(N)",
u"force: entropy",
u"electroweak interaction: penguin",
u"charge: yield",
u"color: transparency",
u"invariance: CP",
u"lepton: angular distribution",
u"Clebsch-Gordan coefficients",
u"KEK PS",
u"ANTARES",
u"fluctuation: thermal",
u"space-time: background",
u"vacuum state: de Sitter",
u"eta/c(2980)",
u"ADD model",
u"charge: Noether",
u"Korteweg-de Vries equation",
u"quarkonium: production",
u"quark: domain wall",
u"twist: 3",
u"gravitational radiation: primordial",
u"D0: branching ratio",
u"invariance: Weyl",
u"meson meson: interaction",
u"neutrino/mu: flux",
u"boson: scalar",
u"isobar model",
u"Einstein-Yang-Mills theory",
u"particle: nonrelativistic",
u"p: hadroproduction",
u"correlation function: two-pion",
u"fluctuation: spectrum",
u"black hole: BPS",
u"constraint: Virasoro",
u"pi0: electroproduction",
u"particle physics",
u"K0(L): semileptonic decay",
u"space-time: topology",
u"space-time: model",
u"entropy: correction",
u"jet: mass",
u"singularity: infrared",
u"ghost: condensation",
u"microwaves",
u"time reversal: symmetry",
u"supergravity: solution",
u"composite",
u"Liouville",
u"eikonal",
u"black hole: temperature",
u"gauge field theory: SU(3) x SU(2) x U(1) x U(1)",
u"effect: relativistic",
u"bunching: length",
u"quantization: light cone",
u"antineutrino/e",
u"baryon number: density",
u"nucleus: heavy",
u"Iizuka-Okubo-Zweig rule",
u"fermion: composite",
u"germanium",
u"Z(2)",
u"field theory: transformation",
u"spin: effect",
u"antimatter",
u"relic density",
u"operator: primary",
u"caloron",
u"baryon: oscillation",
u"renormalon",
u"anti-p: flux",
u"potential: linear",
u"decay: width",
u"electron positron: scattering",
u"electron nucleus: inclusive reaction",
u"flavor: violation: minimal",
u"book",
u"gravitation: classical",
u"Heisenberg model",
u"tau-: semileptonic decay",
u"Fermi surface",
u"fifth force",
u"fermion: Weyl",
u"Vlasov equation",
u"mechanical engineering",
u"axion: solar",
u"jet: photoproduction",
u"electron: density",
u"magnon: giant",
u"lepton: energy spectrum",
u"white dwarf",
u"curvature: correction",
u"color: screening",
u"triplet",
u"N(1535)",
u"model: hadronic",
u"multiplicity: dependence",
u"rare decay",
u"gaugino",
u"D/s",
u"quark: angular momentum",
u"chargino: pair production",
u"finite energy",
u"charge: scalar",
u"dilepton",
u"M-theory: heterotic",
u"neutrino nucleus: deep inelastic scattering",
u"operator: nonlocal",
u"renormalization: holography",
u"Higgsless model",
u"rapidity: central region",
u"transformation",
u"SLAC SLC Linac",
u"quantum molecular dynamics",
u"covariance: Lorentz",
u"differential geometry: symplectic",
u"pi pi: scattering length",
u"power supply",
u"string: interaction",
u"scale: factorization",
u"vector meson: exchange",
u"bottom: semileptonic decay",
u"deuteron: structure function",
u"muon: detector",
u"space: Euclidean",
u"anomaly: Weyl",
u"freeze-out: temperature",
u"LSP: mass",
u"string model: Type IIA",
u"radiation: laser",
u"statistics: quantum",
u"nuclear model",
u"supernova: collapse",
u"space-time: Rindler",
u"pi+: electroproduction",
u"liquid argon",
u"supergravity: Type IIB",
u"heavy quark: production",
u"electron: energy",
u"meson quark: coupling",
u"recoil: energy spectrum",
u"hydrogen: muonic atom",
u"circle",
u"form factor: axial-vector",
u"gluino: decay",
u"eta: radiative decay",
u"fermion: mixing",
u"phase: CP",
u"quantization: 1",
u"fermion: mass spectrum",
u"symmetry: SU(N)",
u"jet: transverse energy",
u"black hole: hadroproduction",
u"susceptibility: magnetic",
u"p: acceleration",
u"antiferromagnet",
u"stop: pair production",
u"top: decay modes",
u"cosmic radiation: angular distribution",
u"meson resonance",
u"space: torus",
u"Einstein-Podolsky-Rosen paradox",
u"meson: hadroproduction",
u"mass spectrum: dimuon",
u"form factor: axial",
u"quantum cosmology: wave function",
u"hadron: resonance: gas",
u"B-: hadronic decay",
u"dipole: magnetic",
u"field theory: group",
u"pi0: pair production",
u"Z': model",
u"glueball: mass spectrum",
u"gas: Coulomb",
u"jet: energy loss",
u"expansion: chiral",
u"M-brane",
u"Isgur-Wise function",
u"boson: condensation",
u"quark: fragmentation function",
u"boron",
u"quantization: 2",
u"narrow resonance",
u"charmed meson: molecule",
u"N(1440)",
u"double-beta decay",
u"momentum",
u"force: Lorentz",
u"neutrino: left-handed",
u"radiation: emission",
u"jet: single production",
u"quark antiquark",
u"beam instability",
u"Q-ball",
u"photon: background",
u"momentum resolution",
u"recoil",
u"graviton: Kaluza-Klein",
u"polarization: vector",
u"photoelectron: yield",
u"transverse energy: dependence",
u"supersymmetry: dark matter",
u"pi nucleon: elastic scattering",
u"charm: mass",
u"F-theory: compactification",
u"electron: yield",
u"electron nucleon: deep inelastic scattering",
u"model: cascade",
u"vector boson",
u"baryon: model",
u"cosmic background radiation: temperature",
u"dark energy: phantom",
u"density matrix: reduced",
u"black hole: accretion",
u"R parity: invariance",
u"background: de Sitter",
u"dark matter: lifetime",
u"Jacobi identity",
u"muon: decay",
u"meson: photoproduction",
u"Affleck-Dine model",
u"mass: deformation",
u"Glauber",
u"neutron star: rotation",
u"quantum algebra",
u"time projection chamber: liquid argon",
u"gauge: covariance",
u"dipole: electric",
u"Thirring model",
u"cosmological constant: time dependence",
u"sneutrino: mass",
u"positron",
u"star: massive",
u"scaling: Casimir",
u"mass: operator",
u"quark: coupling",
u"scalar meson: width",
u"hair",
u"meson: hadronic decay",
u"halo",
u"transverse momentum: low",
u"excited state: energy",
u"vector meson: width",
u"gravitational radiation: polarization",
u"resonance: width",
u"stability: thermodynamical",
u"quark: width",
u"baryon resonance: width",
u"bubble: nucleation",
u"charm: production",
u"transverse energy: spectrum",
u"solution: BPS",
u"photon nucleon: inelastic scattering",
u"quality",
u"force",
u"fundamental constant: length",
u"ferromagnet",
u"Boltzmann equation: solution",
u"Higgs particle: mixing angle",
u"photon: angular distribution",
u"Cherenkov",
u"injection",
u"tau-function",
u"singlet: gauge",
u"fermion number: susceptibility",
u"wave: electromagnetic",
u"CPT",
u"flavor: model",
u"surface: minimal",
u"threshold: effect",
u"higher-order: 4",
u"PT symmetry",
u"gamma ray: detector",
u"Rarita-Schwinger equation",
u"quark gluon: string model",
u"charged particle: production",
u"numerical methods: efficiency",
u"luminosity: upgrade",
u"electrode",
u"strangeness: production",
u"down",
u"Reissner-Nordstroem",
u"fermion: interaction",
u"synchrotron radiation: emission",
u"quantization: stochastic",
u"momentum: conservation law",
u"algebra: Poisson",
u"gauge field theory: SU(2) x SU(2) x U(1)",
u"Higgs particle: mass spectrum",
u"branching ratio: ratio",
u"entanglement: quantum",
u"bifurcation",
u"neutrino: singlet",
u"space: S(3)",
u"noncompact",
u"operator: mixing",
u"Yukawa",
u"tritium",
u"decay: asymmetry",
u"strong field",
u"foliation",
u"neutrino: density",
u"Sakai-Sugimoto model",
u"fission: particle source",
u"shape analysis: jet",
u"cosmic radiation: polarization",
u"superconductivity: gap",
u"velocity dependence",
u"photon photon: colliding beams",
u"dilation",
u"quark: current",
u"tau: polarization",
u"Balitsky-Kovchegov equation",
u"baryon: matter",
u"group: finite",
u"baryon baryon: interaction",
u"technicolor: conformal",
u"electron positron: storage ring",
u"charge: symmetry breaking",
u"vortex: nonabelian",
u"gravitation: higher-order",
u"gauge field theory: chiral",
u"antifield",
u"energy: negative",
u"space-time: transformation",
u"electron: flux",
u"Schroedinger equation: nonlinear",
u"particle flow: anisotropy",
u"final state: dilepton",
u"Phi(1020): hadroproduction",
u"soliton: chiral",
u"black hole: quantum",
u"quark: photoproduction",
u"at rest",
u"force: gravitation",
u"quarkonium: hadronic decay",
u"J/psi(3100): production",
u"current: density",
u"vector boson: associated production",
u"new physics: signature",
u"symmetry: internal",
u"self-force",
u"coherent interaction",
u"photon: radiation",
u"symmetry: translation",
u"intermediate state",
u"muon: spectrometer",
u"freeze-out: chemical",
u"coherence: length",
u"nucleon: strangeness",
u"Z0: decay",
u"nucleus: wave function",
u"parametric",
u"mass spectrum: two-photon",
u"condensation: scalar",
u"quark gluon: interaction",
u"lepton: polarization",
u"black hole: radiation",
u"brick wall model",
u"symmetry: SO(3)",
u"semiconductor",
u"fermion: singlet",
u"SO(10)",
u"width: difference",
u"curvature: high",
u"expansion",
u"boson: mass",
u"forward scattering",
u"quark: cluster",
u"beam: width",
u"strange particle: hadroproduction",
u"superconductivity: duality",
u"D+: hadronic decay",
u"string model: compactification",
u"supersymmetry: nonlinear",
u"representation: unitarity",
u"supersymmetry: representation",
u"correction: vertex",
u"surface tension",
u"Type IIA",
u"rapidity: difference",
u"correction: oblique",
u"photon: exchange",
u"operator: scalar",
u"quantization: path integral",
u"flow: anisotropy",
u"magnetic field: constant",
u"water",
u"string: gas",
u"regularization: Pauli-Villars",
u"gauge field theory: strong coupling",
u"vector boson: pair production",
u"magnetic field: background",
u"space-time: Taub-NUT",
u"atom: energy levels",
u"topcolor",
u"space-time: stability",
u"PANDA",
u"model: higher-dimensional",
u"texture",
u"analog-to-digital converter",
u"quasinormal mode: frequency",
u"phase space: reduced",
u"Kerr/CFT correspondence",
u"effective potential: superpotential",
u"expansion: deceleration",
u"particle: density",
u"photon: hidden sector",
u"polymer",
u"critical phenomena: finite temperature",
u"scalar tensor",
u"energy: calibration",
u"atomic physics",
u"Borexino",
u"K: B-parameter",
u"optics: geometrical",
u"field equations: classical",
u"pi+: leptonic decay",
u"Schwarzschild",
u"structure function: ratio",
u"Z': hadroproduction",
u"lattice: optical",
u"quark antiquark: interaction",
u"accelerator: plasma",
u"triviality",
u"AMS",
u"field theory: affine",
u"cyclic",
u"W",
u"electron electron: elastic scattering",
u"beam loss",
u"muon: energy spectrum",
u"parton: transverse momentum",
u"double field theory",
u"charm: pair production",
u"gauge field theory: E(6)",
u"isoscalar",
u"B/s0: leptonic decay",
u"laser: beam",
u"symmetry: Noether",
u"AMANDA",
u"cosmological model: fluid",
u"fixed point: ultraviolet",
u"electron nucleus: inelastic scattering",
u"LSND",
u"Phi(1020): electroproduction",
u"scattering: beam-beam",
u"pair: Lax",
u"mass: effect",
u"expansion: mass",
u"Higgs particle: mixing",
u"boson: star",
u"K: branching ratio",
u"slepton",
u"matter: chiral",
u"false vacuum",
u"group: Weyl",
u"pi nucleon: interaction",
u"phantom: crossing",
u"gravitation: quantization",
u"radiative decay",
u"PHOBOS",
u"hadron: formation",
u"gluon: bremsstrahlung",
u"flux: ratio",
u"p-brane: 4",
u"quantum electrodynamics: radiative correction",
u"K+: hadroproduction",
u"stability: linear",
u"K: mass",
u"mass: time dependence",
u"tau: width",
u"polarization: linear",
u"leptoquark: mass",
u"expansion 1/N: flavor",
u"quarkonium: mass spectrum",
u"baryon: exotic",
u"near detector",
u"flavor: singlet",
u"quark antiquark: condensation",
u"psi(3685): electroproduction",
u"algebra: gauge",
u"D*(2010): electroproduction",
u"model: thermodynamical",
u"neutrino: secondary beam",
u"fluid: anisotropy",
u"photon: deflection",
u"neutrino: radiative decay",
u"quark: static",
u"quantization: geometrical",
u"black hole: coalescence",
u"structure function: charm",
u"cosmic background radiation: power spectrum",
u"gauge field theory: E(8) x E(8)",
u"flavor changing",
u"neon",
u"quantum chromodynamics: background",
u"fermion: field theory",
u"particle: heavy",
u"nucleon nucleon: elastic scattering",
u"squark: decay",
u"field theory: fluctuation",
u"inflaton: coupling",
u"deuteron: wave function",
u"cross section: mass",
u"S-matrix: unitarity",
u"quantum chromodynamics: Lambda parameter",
u"Wheeler-DeWitt equation: solution",
u"ground state: wave function",
u"neutrino: dark matter",
u"buildings",
u"mass: radiative correction",
u"inflation: multiple field",
u"statefinder",
u"vacuum state: alignment",
u"top: associated production",
u"Jordan",
u"dark matter: galaxy: halo",
u"interaction: long-range",
u"string: coupling",
u"quarkonium: radiative decay",
u"transition: chiral",
u"antineutrino p: interaction",
u"magnon",
u"axial-vector",
u"model: vector",
u"Pati-Salam model",
u"frequency: low",
u"interpretation of experiments: Batavia TEVATRON PS",
u"string: decay",
u"vector meson: radiative decay",
u"bunching",
u"polarization: recoil",
u"neutrino: particle identification",
u"Higgs particle: invisible decay",
u"X-ray: binary",
u"Brookhaven Lab",
u"flavor: oscillation",
u"emission: spectrum",
u"nucleus: lifetime",
u"invariance: scale",
u"polarized target: transverse",
u"monopole: global",
u"conifold: deformation",
u"Higgs particle: mass difference",
u"spectrum",
u"top: width",
u"f2(1270)",
u"inflaton: fluctuation",
u"Z': decay",
u"hadron: form factor",
u"boson",
u"baryon: pair production",
u"differential equations: nonlinear",
u"n: structure function",
u"expansion: multipole",
u"magnetic field: primordial",
u"correlation: two-pion",
u"symmetry: O(4)",
u"neutrino: coupling",
u"Z0: decay modes",
u"protoneutron star",
u"spin: 0",
u"two-photon",
u"resonance: production",
u"quark: wave function",
u"force: long-range",
u"pi: neutrinoproduction",
u"redshift: high",
u"meson: electroproduction",
u"collimator",
u"pulsar: binary",
u"mediation: anomaly",
u"Heisenberg",
u"spectator",
u"Hartree approximation",
u"transverse momentum: parton",
u"W: decay",
u"gauge boson: fusion",
u"nucleus: target",
u"Riemann",
u"decay: time",
u"exceptional",
u"squark: mixing angle",
u"geometry: complex",
u"glasma",
u"aluminum",
u"superselection rule",
u"solenoid",
u"operator: dilation",
u"Maxwell equation: solution",
u"upsilon mesons",
u"inflaton: mass",
u"phonon",
u"quark: massive",
u"critical phenomena: quark hadron",
u"black hole: angular momentum",
u"galaxy: radio wave",
u"neutrino: luminosity",
u"horizon: entropy",
u"space-time: black hole",
u"Z0: branching ratio",
u"scattering amplitude: factorization",
u"current: Noether",
u"Brownian motion",
u"cosmological model: bounce",
u"psi(3685): branching ratio",
u"rescaling",
u"Lamb shift",
u"tunneling: quantum",
u"algebra: von Neumann",
u"field theory: rational",
u"temperature: deconfinement",
u"gauge field theory: Poincare",
u"antineutrino",
u"space-time: Goedel",
u"pi+",
u"statistics: oscillation",
u"branching ratio: upper limit",
u"Polyakov loop: correlation function",
u"pi nucleon: scattering amplitude",
u"exchange: one-photon",
u"phase space: noncommutative",
u"exchange: two-gluon",
u"antineutrino: flux",
u"eta(958): mass",
u"electron: recoil",
u"vacuum system",
u"short-range",
u"diquark: scalar",
u"quantization: Dirac",
u"nitrogen",
u"Froissart bound",
u"lepton: polarized beam",
u"seesaw model: inverse",
u"path length",
u"linear accelerator",
u"helicity: violation",
u"gluon: scattering amplitude",
u"few-body problem",
u"Noether",
u"isospin: dependence",
u"inelastic scattering",
u"new particle: mass",
u"membrane model: solution",
u"cesium: iodine",
u"causal set",
u"charmed meson: width",
u"dilaton: potential",
u"symmetry breaking: scale",
u"nonrenormalizable",
u"lifetime: ratio",
u"vertex: primary",
u"renormalization group: nonperturbative",
u"tensor: Ricci",
u"wide-angle",
u"solar",
u"Moyal product",
u"deflection",
u"fluid: model",
u"gas: model",
u"supersymmetry: fractional",
u"W: longitudinal",
u"spectral triple",
u"meson: propagator",
u"n: energy spectrum",
u"instanton: liquid",
u"space-time: embedding",
u"LHeC",
u"interpretation of experiments: ATLAS",
u"XY model",
u"interaction: scalar",
u"Lambda/c+",
u"long-lived",
u"mass spectrum: two-pion",
u"operator: BPS",
u"cross section: dipole",
u"fundamental constant: Planck",
u"neutrino: model",
u"beam: polarization",
u"fermion: statistics",
u"hidden symmetry: local",
u"sparticle: branching ratio",
u"quark: exotic",
u"photon: particle identification",
u"sum rule: Bjorken",
u"field equations: linear",
u"pi-: hadroproduction",
u"energy: emission",
u"B",
u"hypernucleus",
u"ion: beam",
u"tau",
u"wave function: light cone",
u"hopping parameter expansion",
u"antineutrino: nuclear reactor",
u"operator: overlap",
u"elements: cosmic radiation",
u"postulated particle: decay",
u"muon: atmosphere",
u"big rip",
u"charged particle: yield",
u"quark: decay",
u"instanton: moduli space",
u"perturbation theory: linear",
u"spectrum: discrete",
u"fluctuation: tensor",
u"D+: branching ratio",
u"sulfur",
u"p: wave function",
u"W: production",
u"derivative: covariance",
u"charmed baryon",
u"quark: hadronization",
u"transparency",
u"baryon resonance: mass",
u"CoGeNT",
u"hadron: resonance",
u"quark model: relativistic",
u"meson meson: scattering amplitude",
u"anti-p: hadroproduction",
u"lepton p: deep inelastic scattering",
u"conformal gauge",
u"meson: correlation function",
u"correlation: time",
u"Einstein equation: vacuum",
u"magnetic moment: anomaly",
u"neutral particle",
u"star: collapse",
u"monopole: BPS",
u"Feynman gauge",
u"critical phenomena: temperature",
u"confinement: linear",
u"magnetar",
u"electron: particle identification",
u"phase space: density",
u"throat",
u"hadron hadron: colliding beams",
u"gauge boson: heavy",
u"quark: mass difference",
u"string: network",
u"B/s0 anti-B/s0: interference",
u"Z': leptonic decay",
u"scintillation counter: fibre",
u"pixel",
u"cavity: optical",
u"fine structure",
u"tensor",
u"proportional chamber",
u"calorimeter: liquid argon",
u"discrete",
u"space-time: torsion",
u"spontaneous symmetry breaking: chiral",
u"anti-B: semileptonic decay",
u"postulated particle: heavy",
u"SU(4)",
u"space-time: Vaidya",
u"B0 anti-B0: mixing angle",
u"J/psi(3100): polarization",
u"gravitation: self-force",
u"fermion: operator",
u"sum rule: finite energy",
u"meson: mixing",
u"nucleon: propagator",
u"B0: rare decay",
u"Double Chooz",
u"optics: interference",
u"gravitation: anti-de Sitter",
u"channel cross section: momentum transfer",
u"coupling: flavor changing",
u"fermion: model",
u"Gromov-Witten theory",
u"renormalization: on-shell",
u"electron: propagator",
u"bottom: branching ratio",
u"dark matter: velocity",
u"hadron: transverse momentum",
u"cosmic radiation: galaxy",
u"fermion: triplet",
u"electron: scattering",
u"radiation: final-state interaction",
u"n: form factor",
u"model: hydrodynamics",
u"K: leptonic decay",
u"quark: nonrelativistic",
u"far detector",
u"XXZ model",
u"galaxy: dark matter",
u"matter: production",
u"interpretation of experiments: CMS",
u"laser: pulsed",
u"Hamiltonian formalism: constraint",
u"antineutrino/e: energy spectrum",
u"pi: gas",
u"SO(3)",
u"temperature: time dependence",
u"lens",
u"particle identification: bottom",
u"symmetry: local",
u"fermion: representation",
u"screening: Debye",
u"Poisson",
u"pi: propagator",
u"quarkonium: mass",
u"dijet: production",
u"space: compact",
u"hadron: photoproduction",
u"detector: fluorescence",
u"surface: del Pezzo",
u"neutrino nucleus: inclusive reaction",
u"track data analysis: jet",
u"scattering amplitude: high energy behavior",
u"energy: interaction",
u"spectrum: perturbation",
u"pi nucleon: sigma term",
u"big bang: singularity",
u"instanton: correction",
u"transverse momentum: fluctuation",
u"coupling: derivative",
u"cross section: correction",
u"neutrino nucleon: inelastic scattering",
u"n: background",
u"electron: particle source",
u"photon photon: inclusive reaction",
u"cosmological model: cyclic",
u"quantum electrodynamics: noncommutative",
u"frequency: high",
u"axial",
u"p-brane: 1",
u"group: SU(3)",
u"entropy: perturbation",
u"interaction: nonlocal",
u"parton: cascade",
u"antigravitation",
u"W: width",
u"form factor: decay",
u"electron: storage ring",
u"algebra: SU(2)",
u"perturbation: vector",
u"differential cross section: slope",
u"accelerator: technology",
u"flux: upper limit",
u"intermediate boson: leptonic decay",
u"detector: pixel",
u"dimuon: final state",
u"gauge field theory: massive",
u"algebra: noncommutative",
u"time: asymmetry",
u"nuclear reaction: space-time",
u"photon: beam",
u"effect: nonlocal",
u"space-time: Lifshitz",
u"symmetry: SL(2,R)",
u"neutrino: propagation",
u"Lambda/b0: hadronic decay",
u"Goldstino",
u"B0 anti-B0: oscillation",
u"meson resonance: width",
u"group: Poincare",
u"string: spin",
u"unparticle",
u"dibaryon",
u"algebra: Grassmann",
u"dilaton: linear",
u"supersymmetry: flavor",
u"pressure: anisotropy",
u"hadron hadron: elastic scattering",
u"electron: relativistic",
u"resonance: parametric",
u"antiquark",
u"psi(3685): radiative decay",
u"optics: reflection",
u"matter: solar",
u"jet: thrust",
u"operator: determinant",
u"production",
u"hydrogen: liquid",
u"particle source",
u"W'",
u"Bethe ansatz: thermodynamical",
u"quantum chromodynamics: plasma",
u"transformation: unitarity",
u"black hole: information theory",
u"X(3872): hadronic decay",
u"momentum: correlation",
u"psi(3770): hadronic decay",
u"B/c: branching ratio",
u"orbit: stability",
u"invariance: topological",
u"K: form factor",
u"fundamental constant",
u"photon nucleus: interaction",
u"particle: energy",
u"flux: electric",
u"superconductivity: topological",
u"model: spectator",
u"deuteron: form factor",
u"interpretation of experiments: CDF",
u"p: semileptonic decay",
u"quark: mass ratio",
u"axial gauge",
u"field theory: finite",
u"magnetic field: production",
u"gravitation: topological",
u"baryon: form factor",
u"monopole: condensation",
u"torsion: discrete",
u"charge: color",
u"tensor: Killing",
u"Gepner model",
u"photon p: Compton scattering",
u"quark: excited state",
u"gravitation: energy",
u"differential forms: 2",
u"photon electron: Compton scattering",
u"forward spectrometer",
u"semiconductor detector: vertex",
u"photon: tagged beam",
u"vector boson: leptonic decay",
u"topology: transition",
u"horizon: topology",
u"Navier-Stokes equation",
u"gravitation: instanton",
u"current: electric",
u"gravitation: entropy",
u"nucleon hyperon: interaction",
u"coupling constant: axial-vector",
u"Einstein equation: linear",
u"irradiation",
u"operator: twist",
u"baryon: mass difference",
u"inflaton: Higgs particle",
u"algebra: Weyl",
u"BICEP",
u"graviton: production",
u"Minkowski",
u"stop: decay",
u"field theory: integrability",
u"top: mass: measured",
u"chiral ring",
u"gauge field theory: SO(3)",
u"experimental methods: proposed",
u"electron p: mass ratio",
u"space: vector",
u"symmetry: Galilei",
u"sparticle: cascade decay",
u"neutralino: relic density",
u"p: energy spectrum",
u"saturation: density",
u"Meissner effect",
u"n: flux",
u"Lambda/b0: semileptonic decay",
u"energy: asymmetry",
u"coupling: nonlinear",
u"resonance: scalar",
u"transition: quark hadron",
u"neutrino: massless",
u"gauge field theory: Z(2)",
u"boson: chiral",
u"beam tune",
u"representation",
u"vacuum state: anti-de Sitter",
u"spin: operator",
u"charmed meson: strange meson",
u"constraint: solution",
u"width",
u"carbon: fluorine",
u"photon axion: transition",
u"flux: background",
u"philosophy",
u"meson: model",
u"electron deuteron: deep inelastic scattering",
u"partial wave analysis: coupled channel",
u"K: decay constant",
u"synchrotron radiation: particle source",
u"quarkonium: wave function",
u"field equations: nonlinear",
u"B/s: semileptonic decay",
u"eta: hadronic decay",
u"algebra: vertex",
u"gauge boson: massive",
u"D: decay",
u"gauge boson: postulated particle",
u"quantum chromodynamics: massless",
u"Higgsino",
u"shower detector",
u"X-ray: flux",
u"redshift: dependence",
u"confidence limit",
u"nucleus: finite",
u"renormalization group: invariance",
u"charged particle: transverse momentum",
u"dark matter: signature",
u"symmetry: U(N)",
u"maximally helicity violating amplitude",
u"cross section: calculated",
u"spin: liquid",
u"wave function: collapse",
u"photon: multiple production",
u"quark gluon",
u"quantum gravity: correction",
u"baryon: hadroproduction",
u"beryllium: oxygen",
u"operator: penguin",
u"gauge boson: leptonic decay",
u"color: triplet",
u"quark: associated production",
u"gluon: plasma",
u"resonating group method",
u"cosmic radiation: hadronic component",
u"B/s0 anti-B/s0: oscillation",
u"field theory: Proca",
u"meson: excited state",
u"solution: vortex",
u"unitary gauge",
u"gravitation: semiclassical",
u"string: excited state",
u"supergravity: conformal",
u"computer: algebra",
u"UHE",
u"vacuum state: flux",
u"photon electron: interaction",
u"dilepton: same sign",
u"electric field: external field",
u"renormalization: dependence",
u"channel cross section: momentum dependence",
u"photon: wave function",
u"precession",
u"acceleration: shock waves",
u"jet: rapidity",
u"pi: structure function",
u"muon: transverse momentum",
u"screening: effect",
u"electron: linear accelerator",
u"entropy: statistical",
u"charmed meson: excited state",
u"quarkonium: dissociation",
u"W: polarization",
u"hadron: exotic",
u"beta beam",
u"CNGS",
u"psi mesons",
u"black brane: solution",
u"electron: production",
u"Foldy-Wouthuysen transformation",
u"quantum mechanics: nonrelativistic",
u"quark: helicity",
u"nucleon: polarization",
u"Dubna PS",
u"symmetry: abelian",
u"Omega-",
u"Verma module",
u"quintom",
u"Feynman graph: tadpole",
u"graded",
u"Breit-Wigner",
u"susceptibility: chiral",
u"pi: cloud",
u"particle: orbit",
u"action",
u"metric: fluctuation",
u"dilaton: background",
u"plasma: stability",
u"pi+: photoproduction",
u"jet: final state",
u"niobium",
u"drift chamber: drift tube",
u"correction: Coulomb",
u"gravitational radiation detector: network",
u"Bonn ELSA Stor",
u"photoproduction",
u"quarkonium: electroproduction",
u"excited nucleus",
u"bound state: mass",
u"renormalization: beta function",
u"dependence: density",
u"spectrum: BPS",
u"omega(783): radiative decay",
u"symmetry: CP",
u"compactification: warped",
u"electron: gas",
u"gravitation: emergence",
u"helium: superfluid",
u"nickel",
u"top: semileptonic decay",
u"K0(S): hadronic decay",
u"W: helicity",
u"lepton quark: symmetry",
u"muon: tracks",
u"matter: perturbation",
u"effect: thermal",
u"correction: electromagnetic",
u"potential: gravitation",
u"top: rare decay",
u"HERWIG",
u"photon: coupling",
u"charmed meson: hadroproduction",
u"charge exchange",
u"field theory: nonlinear",
u"space: internal",
u"leptoquark: pair production",
u"heavy quark: decay",
u"p-brane: 2",
u"D-brane: 7",
u"fermion: mass: hierarchy",
u"transformation: diffeomorphism",
u"Phi(1020): branching ratio",
u"scalar particle: singlet",
u"photon: final state",
u"neutrino nucleon: inclusive reaction",
u"optical theorem",
u"nonminimal",
u"effective potential: Higgs",
u"opacity",
u"accelerator: wake field",
u"engineering",
u"operator: nonrenormalizable",
u"gauge: nonabelian",
u"magnetic field: background field",
u"on-shell",
u"helicity: asymmetry",
u"meson: form factor",
u"string model: duality",
u"gravitational radiation: stochastic",
u"string: scattering amplitude",
u"neutrino/e: particle identification",
u"Crystal Ball",
u"dark matter: asymmetry",
u"correction: derivative",
u"space-time: Finsler",
u"Z': width",
u"space: deformation",
u"dispersion relation: deformation",
u"quark: interaction",
u"proton synchrotron",
u"Riemann surface: higher-order",
u"resonance: vector",
u"meson: mixing angle",
u"field theory: Higgs",
u"meson nucleon: interaction",
u"model: vertex",
u"WIMP nucleon: scattering",
u"vacuum state: bubble",
u"flow: Ricci",
u"LSP: dark matter",
u"black hole: Kaluza-Klein",
u"n anti-n: oscillation",
u"nucleus nucleus: scattering",
u"vector meson: polarization",
u"excited state: Kaluza-Klein",
u"mass: upper limit",
u"short-distance behavior",
u"D/s+: hadronic decay",
u"matter: density: high",
u"monopole: operator",
u"CALICE",
u"sparticle: spectrum",
u"stau",
u"sinh-Gordon equation",
u"electron p: scattering",
u"quark: composite",
u"gauge boson: propagator",
u"B/s0: rare decay",
u"fermion number",
u"B/s0 anti-B/s0: mass difference",
u"graviton: massless",
u"hadron hadron: scattering amplitude",
u"nucleon resonance: effect",
u"computer: communications",
u"spin: coupling",
u"moment: dipole",
u"B0: leptonic decay",
u"entropy: current",
u"quark gluon: vertex function",
u"field theory: local",
u"vector meson: hadronic decay",
u"potential: energy",
u"noise: thermal",
u"space-time: Kantowski-Sachs",
u"pi-",
u"fluctuation: primordial",
u"Weinberg",
u"bubble: production",
u"field theory: thermal",
u"computer: quantum",
u"muon: particle identification",
u"ionization: energy loss",
u"scattering: two-particle",
u"particle: composite",
u"slepton: pair production",
u"gauge boson: new particle",
u"field theory: Calabi-Yau",
u"electron: cloud",
u"antineutrino: oscillation",
u"gauge field theory: SU(4) x SU(2) x SU(2)",
u"jet: minijet",
u"photon photon",
u"trigger: efficiency",
u"spinor: Weyl",
u"power spectrum: angular dependence",
u"W: electroproduction",
u"beam cooling: ionization",
u"quantum gravity: nonperturbative",
u"quantum mechanics: noncommutative",
u"scalar particle: decay",
u"psi(3770): electroproduction",
u"space-time: emergence",
u"space-time: Kasner",
u"spin: polarization",
u"M-brane: 5",
u"drag force",
u"luminosity: monitoring",
u"WIMP: direct detection",
u"p: irradiation",
u"flux: quantization",
u"nucleon resonance: width",
u"black hole: Myers-Perry",
u"light nucleus",
u"electron positron: mass spectrum",
u"krypton",
u"electromagnetic field: classical",
u"energy: zero-point",
u"resummation: threshold",
u"transition: geometrical",
u"f0(1370)",
u"coupling: axial-vector",
u"leptonic decay",
u"spin: interaction",
u"anti-B: branching ratio",
u"B/s: decay constant",
u"model: production",
u"quark: coalescence",
u"interpretation of experiments: CLEO",
u"Lambda: hadronic decay",
u"B0: width",
u"W: exchange",
u"symmetry: sphere",
u"hadronic decay",
u"central charge: 1",
u"tau-: branching ratio",
u"semi-inclusive reaction",
u"violation: flavor",
u"string: classical",
u"algebra: W(N)",
u"mass: charm",
u"algebra: Lorentz",
u"gamma ray: energy",
u"multiplicity: ratio",
u"translation",
u"scale: string",
u"Lippmann-Schwinger equation",
u"quantum mechanics: wave function",
u"n: capture",
u"charmonium: production",
u"model: cluster",
u"ionization chamber",
u"stau: mass",
u"reparametrization",
u"gamma ray: galaxy",
u"B: form factor",
u"flavor: 1",
u"velocity: rotation",
u"interpretation of experiments: PHENIX",
u"decay: weak interaction",
u"potential: stability",
u"model: relativistic",
u"axion: decay constant",
u"tube",
u"orbifold: singularity",
u"interaction: nonlinear",
u"quarkonium: leptonic decay",
u"detector: surface",
u"anti-B: radiative decay",
u"two-particle",
u"Salam-Weinberg model",
u"stop",
u"space-time: dimension: 3",
u"space-time: dimension: 5",
u"indium",
u"gluon: transverse momentum",
u"BRAHMS",
u"diffraction: production",
u"accelerator: control system",
u"neutrino nucleus: scattering",
u"differential cross section: correction",
u"color: 2",
u"molecule",
u"n-point function: 1",
u"isospin: symmetry",
u"conductivity: electric",
u"B/c: hadronic decay",
u"time: Euclidean",
u"bending magnet",
u"model: scalar",
u"hadron: molecule",
u"electroweak interaction: vacuum state",
u"Sudakov",
u"nucleon resonance: mass",
u"quark: operator",
u"K+: associated production",
u"operator: dimension: 5",
u"positron p: inclusive reaction",
u"space: lens",
u"Z0: production",
u"matter: interaction",
u"pi: formation",
u"excited state: spectrum",
u"expansion: heat kernel",
u"model: nuclear reaction",
u"neutrino: lifetime",
u"threshold: production",
u"group: modular",
u"invariance: Galilei",
u"SO(8)",
u"cosmic coincidence",
u"baryon resonance: hadronic decay",
u"microprocessor",
u"nucleon: polarizability",
u"bottom meson",
u"category: tensor",
u"Calogero model",
u"M-brane: 2",
u"lepton: charged particle",
u"charge: screening",
u"minijet",
u"magnetic monopole: condensation",
u"gluon: Kaluza-Klein",
u"meson: mass difference",
u"coupling: tensor",
u"PSI Cycl",
u"plasma: nonabelian",
u"anti-p p: inelastic scattering",
u"group: exceptional",
u"photon: scattering",
u"pi- p: exclusive reaction",
u"geometry: warped",
u"radion: mass",
u"action: complex",
u"background: geometry",
u"hyperon: hadronic decay",
u"fractal: dimension",
u"spectrum: fluctuation",
u"SO(4)",
u"string model: landscape",
u"Yang-Mills-Higgs theory",
u"field theoretical model",
u"Goldstone particle: mass",
u"radiation: pressure",
u"field theory: algebra",
u"field equations: monopole",
u"expansion: cluster",
u"detector",
u"dimension: spectral",
u"symmetry: SU(3) x SU(3)",
u"weak field",
u"path integral: Euclidean",
u"phi**n model",
u"B: hadroproduction",
u"particle number",
u"coupling constant: gauge",
u"charge dependence",
u"interpretation of experiments: Brookhaven PS",
u"lepton: multiplicity",
u"condensation: vacuum",
u"f0(600): width",
u"Jones polynomial",
u"gauge field theory: SU(5)",
u"gauge field theory: Sp(N)",
u"antineutrino/mu",
u"bound state: energy",
u"diffraction: structure function",
u"hyperon: polarization",
u"B/c: semileptonic decay",
u"Hopf",
u"AdS(3)",
u"coupling: matter",
u"coupling: axial",
u"coupling: electromagnetic",
u"D: hadroproduction",
u"spinor: Majorana",
u"W': mass",
u"Knizhnik-Zamolodchikov equation",
u"compactification: orientifold",
u"pi: charged particle",
u"muon: trigger",
u"interpretation of experiments: STAR",
u"tensor meson",
u"charmed meson: semileptonic decay",
u"sloan digital sky survey",
u"Higgs particle: Goldstone particle",
u"meson baryon: scattering amplitude",
u"operator: Laplace",
u"energy: exchange",
u"fluctuation: statistical",
u"gauge field theory: SU(4)",
u"Soudan",
u"Z0: exchange",
u"detector: technology",
u"D/s: decay constant",
u"Wess-Zumino-Witten model: SU(2)",
u"fuzzy",
u"B0: pair production",
u"particle: exotic",
u"D/s*(2110)",
u"micro-pattern detector",
u"baryon: excited state",
u"SLAC Lab",
u"random phase approximation: quasiparticle",
u"intermediate boson: fusion",
u"temperature: effect",
u"field equations: gravitation",
u"radio wave: emission",
u"J/psi(3100): width",
u"gauge field theory: conformal",
u"electric field: spatial distribution",
u"gravitation: f(T)",
u"space-time: Kaehler",
u"quantum chromodynamics: Theta parameter",
u"gravitation: external field",
u"baryon resonance: dibaryon",
u"expansion: hydrodynamics",
u"quantum chromodynamics: finite temperature",
u"field theory: collective",
u"model: thermal",
u"D/s0*(2317)",
u"time: conformal",
u"pi pi: scattering",
u"numerical methods: variational",
u"interpretation of experiments: PAMELA",
u"electron muon: transition",
u"K+",
u"fermion: tunneling",
u"potential: model",
u"charmed baryon: mass",
u"Georgi-Glashow model",
u"charged particle: rapidity spectrum",
u"factorization: violation",
u"parity: invariance",
u"flavon",
u"Pioneer anomaly",
u"deuteron: photofission",
u"vector meson: hadroproduction",
u"SL(2)",
u"Bianchi",
u"axion: coupling",
u"dark energy: coupling",
u"quantum chromodynamics: thermodynamics",
u"fibre bundle: vector",
u"accelerator: design",
u"X-ray: energy spectrum",
u"longitudinal momentum",
u"Bagger-Lambert-Gustavsson model",
u"nucleon nucleon: inelastic scattering",
u"heavy quark: pair production",
u"null-energy condition: violation",
u"finite temperature: effect",
u"background: time dependence",
u"Theta(1540): width",
u"effect: off-shell",
u"plasma: wake field",
u"electron nucleon: inelastic scattering",
u"hadronization: model",
u"quantum chromodynamics: axion",
u"TOTEM",
u"gravitation: strong field",
u"quantum gravity: Euclidean",
u"Bjorken",
u"decay",
u"non-Gaussianity: primordial",
u"K+: rare decay",
u"parton: interaction",
u"storage ring",
u"ILD detector",
u"resonance: gas",
u"refractive index",
u"analysis: dimensional",
u"model: coupled channel",
u"AdS(5) x S(5)",
u"D: decay constant",
u"scaling: anisotropy",
u"analysis: harmonic",
u"neutrino: scattering",
u"propagator: scalar",
u"width: finite",
u"model: cyclic",
u"initial-state interaction",
u"quark: momentum",
u"charged particle: massive",
u"cascade: electromagnetic",
u"charmed meson: branching ratio",
u"Theta(1540): hadronic decay",
u"neutrino neutrino: interaction",
u"flow: Wilson",
u"chi mesons: radiative decay",
u"flavor: asymmetry",
u"neutrino: VHE",
u"model: ekpyrotic",
u"gravitation: Einstein-Cartan",
u"leptogenesis: thermal",
u"quasiparticle: model",
u"betatron oscillation",
u"Donaldson theory",
u"gauge boson: exchange",
u"cosmic radiation: cascade",
u"p: accelerator",
u"space-time: Gowdy",
u"lepton: magnetic moment",
u"potential: oscillator",
u"hyperon: semileptonic decay",
u"action: spectral",
u"magnetic moment: dipole",
u"omega(783): photoproduction",
u"form factor: parametrization",
u"supergravity: 4",
u"photon: interaction",
u"quark: distribution function",
u"cosmic radiation: spatial distribution",
u"axion-like particles",
u"critical phenomena: superconductivity",
u"pentaquark: mass",
u"string: massive",
u"black hole: acoustic",
u"interpretation of experiments: BELLE",
u"photon: on-shell",
u"gauge field theory: U(2)",
u"field theory: interaction",
u"symmetry: SL(2)",
u"eta: mass",
u"perturbation: electromagnetic",
u"D-brane: 5",
u"spin: 3",
u"attenuation",
u"string: spectrum",
u"transformation: Weyl",
u"f0(1710)",
u"scalar meson: hadronic decay",
u"intermittency",
u"temporal gauge",
u"p: linear accelerator",
u"group: SU(N)",
u"quark antiquark: pair",
u"hadron: decay",
u"soliton: mass",
u"neutrino: wave function",
u"quantum mechanics: model",
u"muon: energy",
u"phase: geometrical",
u"photon: thermal",
u"quantization: Becchi-Rouet-Stora",
u"gravitation: noncommutative",
u"screening: length",
u"quark: deconfinement",
u"noise: spectrum",
u"rho(770): hadronic decay",
u"fermion: excited state",
u"duality: invariance",
u"Homestake",
u"gravitation: nonlinear",
u"background: model",
u"K: production",
u"gauge field theory: boson",
u"a1(1260)",
u"Wilson loop: correlation function",
u"charmed meson: mass",
u"Heavy Quark Effective Theory",
u"Upsilon(10570)",
u"graviton: scattering amplitude",
u"wire",
u"perturbation theory: correction",
u"B/s0: semileptonic decay",
u"gas: pressure",
u"holonomy: Spin(7)",
u"neutrino/e: flux",
u"upgrade: proposed",
u"gluon: shadowing",
u"drift chamber: gas",
u"readout: optical",
u"Lambda Lambda: hypernucleus",
u"quantization: correction",
u"coupling constant: renormalization",
u"target: mass",
u"quadrupole lens",
u"pi: multiplicity",
u"cosmic string: network",
u"associated production",
u"vector meson: leptonic decay",
u"deuteron: binding energy",
u"Morse theory",
u"electron: polarization",
u"mass: solar",
u"p: flux",
u"isospin: density",
u"meson resonance: mass",
u"radiation: thermal",
u"p-brane: 0",
u"background: dependence",
u"Lambda: associated production",
u"photon electron: exclusive reaction",
u"matter: induced",
u"p: model",
u"f0(1500)",
u"heterotic",
u"unitarity: constraint",
u"correlation: quantum",
u"interpretation of experiments: KAMIOKANDE",
u"neutralino nucleon: elastic scattering",
u"ice",
u"oscillation: length",
u"domain wall: BPS",
u"superconductivity: model",
u"black hole: interaction",
u"model: geometrical",
u"cross section: factorization",
u"J/psi(3100): dissociation",
u"quantum chromodynamics: validity test",
u"space-time: Calabi-Yau",
u"valence",
u"expansion: semiclassical",
u"grand unified theory: SU(5) x U(1)",
u"B: wave function",
u"charge distribution",
u"pseudoscalar meson: radiative decay",
u"quantum chromodynamics: radiation",
u"meson: molecule",
u"left-right",
u"structure function: small-x",
u"upsilon mesons: hadroproduction",
u"high temperature expansion",
u"charmed meson: electroproduction",
u"curvature: tensor",
u"total cross section: calculated",
u"field theory: renormalizable",
u"scale: saturation",
u"energy eigenstate",
u"neutrino nucleus: coherent interaction",
u"star: stability",
u"Morita equivalence",
u"rho(770)0: photoproduction",
u"polarization: effect",
u"muon: production",
u"sea: Dirac",
u"quantization: nonperturbative",
u"halo: mass",
u"string model: fragmentation",
u"electron: transverse momentum",
u"K- nucleus: nuclear reaction",
u"radiation: energy",
u"WIMP: velocity",
u"photon gluon: fusion",
u"gas: admixture",
u"nuclear matter: asymmetry",
u"muon+: leptonic decay",
u"neutrino: decay modes",
u"computer: performance",
u"field theory: Kaehler",
u"star: rotation",
u"leptoquark: coupling",
u"gauge boson: decay",
u"cosmological model: anisotropy",
u"quark: family",
u"SU(2) x SU(2)",
u"space-time: dimension: 4",
u"dark matter: hidden sector",
u"B0 anti-B0: mass difference",
u"Mach principle",
u"bottom: hadroproduction",
u"gravitation: acceleration",
u"psi(3770)",
u"quantum chromodynamics: light front",
u"oscillation: frequency",
u"K0(L): leptonic decay",
u"diffusion: model",
u"Lambda/b0: branching ratio",
u"fermion: localization",
u"pi- nucleus: inclusive reaction",
u"electron: spectrum",
u"energy: fluctuation",
u"X-ray: irradiation",
u"renormalization group: effect",
u"top: electroproduction",
u"lead: tungsten",
u"atom: interferometer",
u"interference: quantum",
u"D: branching ratio",
u"space-time: Kerr-Newman",
u"mass: negative",
u"interaction: vector",
u"distribution amplitude",
u"hopping",
u"pomeron: coupling",
u"neutralino: decay modes",
u"formation: time",
u"tachyon: stability",
u"lepton: electric moment",
u"quantum chromodynamics: quenching",
u"nucleon: decay",
u"Sigma",
u"form factor: hadronic",
u"strangeness: enhancement",
u"Einstein-Hilbert",
u"new particle: hadroproduction",
u"vacuum state: Theta parameter",
u"helium: hypernucleus",
u"algebra: Frobenius",
u"structure function: polarization",
u"omega(783): hadroproduction",
u"chromomagnetic",
u"Lambda(1520)",
u"electroweak interaction: mixing angle",
u"coupling: pseudoscalar",
u"photon deuteron: exclusive reaction",
u"K0(L): rare decay",
u"quark model: nonrelativistic",
u"semiconductor: optical",
u"CRESST",
u"group: discrete",
u"baryon: width",
u"meson: multiplet",
u"approximation: static",
u"accelerator: proposed",
u"correction: thermal",
u"dissociation",
u"radiation: yield",
u"coupling: spin: orbit",
u"matter: strong interaction",
u"broadening",
u"NOvA",
u"n: irradiation",
u"nucleon nucleon: scattering amplitude",
u"torus: noncommutative",
u"regularization: heat kernel",
u"Lorentz gauge",
u"mass spectrum: Kaluza-Klein",
u"structure function: slope",
u"scattering amplitude: dipole",
u"black hole: orbit",
u"black hole: perturbation",
u"particle: stability",
u"cross section: enhancement",
u"boson: string model",
u"tau: mass",
u"D*(2010): photoproduction",
u"positron: production",
u"Palatini model",
u"universality: violation",
u"muon: energy loss",
u"quark: form factor",
u"Legendre transformation",
u"fermion: current",
u"lepton: family",
u"gravitation: stability",
u"matter: asymmetry",
u"dilepton: mass",
u"statistical analysis: error",
u"transition radiation detector",
u"string: partition function",
u"binary: mass ratio",
u"Lee-Wick model",
u"field equations: soliton",
u"tensor: Killing-Yano",
u"scale: inflation",
u"Urca process",
u"NA61",
u"neutrino: interference",
u"Lambda: pair production",
u"false vacuum: decay",
u"argon: organic compounds",
u"thrust",
u"K*(892): hadronic decay",
u"electron: wave function",
u"right-handed",
u"hypernucleus: decay",
u"lepton deuteron: deep inelastic scattering",
u"quantum electrodynamics: supersymmetry",
u"Penning trap",
u"weak interaction: coupling constant",
u"gravitation: back reaction",
u"time machine",
u"wave function: light front",
u"gravitational radiation detector: sensitivity",
u"gauge field theory: action",
u"particle identification: efficiency",
u"quantum gravity: canonical",
u"resonance: oscillation",
u"MACRO",
u"Yangian",
u"hydrogen: target",
u"meson: radiative decay",
u"mass: accretion",
u"neutrino antineutrino: annihilation",
u"freeze-out: thermal",
u"nucleus: form factor",
u"neutrino: propagator",
u"Baecklund transformation",
u"pi nucleon: inelastic scattering",
u"electron nucleus: interaction",
u"neutrino: decoupling",
u"bootstrap: conformal",
u"transverse momentum: correlation",
u"squark: decay modes",
u"horizontal symmetry",
u"squark: hadroproduction",
u"symmetry: U(1) x U(1)",
u"gluon: resummation",
u"n: density",
u"heavy quark: hadroproduction",
u"Collins",
u"neutralino: NLSP",
u"J/psi(3100): direct production",
u"deuteron: hadroproduction",
u"neutralino: branching ratio",
u"vertex",
u"bottom baryon",
u"lepton number: conservation law",
u"dark matter: capture",
u"fermion: matter",
u"photon photon: elastic scattering",
u"space-time: lattice",
u"D/s+: branching ratio",
u"evaporation",
u"quark hadron: transition",
u"photon: virtual",
u"symmetry: S(3)",
u"spinor: representation",
u"quantum chromodynamics: strong coupling",
u"neutrino: asymmetry",
u"curvature: coupling",
u"galaxy: redshift",
u"instanton: Yang-Mills",
u"space-time: Riemann-Cartan",
u"gravitational radiation: frequency",
u"supergravity: duality",
u"anti-B/s0: hadronic decay",
u"intermediate boson: hadroproduction",
u"neutrino: burst",
u"WIMP nucleon: cross section",
u"D+",
u"photon p: elastic scattering",
u"Xi-",
u"wormhole: traversable",
u"Feynman graph: planar",
u"SO(N)",
u"Markov chain",
u"polarization: rotation",
u"mathematical methods: stochastic",
u"model: impact parameter",
u"pi: radiative decay",
u"gravitation: redshift",
u"current: right-handed",
u"baryon number: asymmetry",
u"density: local",
u"LSP: decay",
u"CP(N) model",
u"KEK Lab",
u"quark: charge",
u"differential cross section: transverse energy",
u"model: left-right",
u"current: chiral",
u"operator: surface",
u"general relativity: validity test",
u"electron nucleon: elastic scattering",
u"symmetry: SO(4)",
u"equivalence theorem",
u"invariance",
u"Phi(1020): photoproduction",
u"neutrino/mu: energy spectrum",
u"rotation: invariance",
u"nuclear physics: correction",
u"spin: alignment",
u"gluon: fragmentation",
u"K0(L): hadronic decay",
u"optics: fibre",
u"radion: stability",
u"heavy quark: symmetry",
u"interpretation of experiments: WMAP",
u"decay: length",
u"heavy quark: spin",
u"quantum electrodynamics: nonrelativistic",
u"frequency",
u"SND",
u"Barrett-Crane model",
u"Sigma(1385)",
u"polarizability: electric",
u"perturbation: nonlinear",
u"gauge field theory: U(1) x U(1)",
u"Iizuka-Okubo-Zweig rule: violation",
u"energy: correction",
u"K0: pair production",
u"twist: 2",
u"differential equations: hierarchy",
u"Regge calculus",
u"fixed point: stability",
u"rho(770): width",
u"beam: injection",
u"staggered",
u"wormhole: stability",
u"density: wave",
u"gauge field theory: SU(3) x SU(2) x SU(2) x U(1)",
u"Wilson loop: BPS",
u"supergravity: Type IIA",
u"electron nucleus: colliding beams",
u"Virasoro",
u"nucleon resonance: hadronic decay",
u"electroweak interaction: rho parameter",
u"feedback",
u"mass: Debye",
u"K+: photoproduction",
u"plasma: anisotropy",
u"hadron: charged particle",
u"beam focusing",
u"gravitino: decay",
u"antineutrino nucleon: deep inelastic scattering",
u"Pauli principle",
u"moduli: Kaehler",
u"background: Robertson-Walker",
u"neutralino: production",
u"charge: tensor",
u"Regge poles: multi-Regge",
u"fast logic: time-of-flight",
u"gluon: multiple production",
u"quantum gravity: model",
u"chargino: decay",
u"baryon number: conservation law",
u"space: Moyal",
u"instanton: partition function",
u"D0: pair production",
u"titanium",
u"geometry: Weyl",
u"mechanics: conformal",
u"Lambda: electroproduction",
u"vacuum polarization: effect",
u"B0: lifetime",
u"fermion number: density",
u"symmetry: violation",
u"string model: Type II",
u"electron: injection",
u"charged particle: particle flow",
u"vector boson: hadroproduction",
u"diffeomorphism: constraint",
u"pressure: negative",
u"stability: classical",
u"estimator",
u"mass resolution",
u"pseudoscalar meson: mixing angle",
u"matter: multiplet",
u"multi-Regge",
u"K*0(1430)",
u"position dependence",
u"supersymmetry: potential",
u"muon+ muon-: annihilation",
u"current: induced",
u"anti-p p: exclusive reaction",
u"fermion: wave function",
u"sneutrino: LSP",
u"molybdenum",
u"D-particle",
u"dihadron: correlation",
u"CVC model",
u"interpretation of experiments: HERMES",
u"curvaton: decay",
u"tetraquark: mass",
u"monopole: Dirac",
u"lepton: hadroproduction",
u"B-L number: violation",
u"quantum mechanics: density matrix",
u"CBM",
u"pi: pole",
u"flow: spectral",
u"differential equations: Lax",
u"scalar meson: multiplet",
u"n-point function: 5",
u"oscillation: coherence",
u"momentum: low",
u"muon: rare decay",
u"current: operator",
u"vacuum state: supersymmetry",
u"nucleus nucleus",
u"quantization: semiclassical",
u"Wino: mass",
u"AGN: jet",
u"derivative: Lie",
u"mass: width",
u"neutrino deuteron: interaction",
u"space: conformal",
u"Feynman",
u"neutrino: exchange",
u"helicity: dependence",
u"cosmic radiation: time dependence",
u"upsilon mesons: leptonic decay",
u"Batalin-Vilkovisky",
u"CCFM equation",
u"Lie",
u"meson: cloud",
u"Verlinde",
u"model: renormalizable",
u"vertex: topological",
u"lattice: Toda",
u"dark matter: inelastic scattering",
u"Fermi gas: relativistic",
u"undulator",
u"spectrum: thermal",
u"muon: beam cooling",
u"decay: angular distribution",
u"quark: right-handed",
u"quark: overlap",
u"model: leptonic",
u"group: de Sitter",
u"star: oscillation",
u"symmetry breaking: rotation",
u"nucleon: density",
u"odderon",
u"cross section: mass dependence",
u"curvature: fluctuation",
u"excited state: collective",
u"amplifier",
u"matter: scalar",
u"model: eikonal",
u"exclusive reaction",
u"photon: bremsstrahlung",
u"showers: energy",
u"triality",
u"heavy quark: energy loss",
u"Starobinsky model",
u"background: fluctuation",
u"electron positron: plasma",
u"magnetic field: spatial distribution",
u"spectrum: scalar",
u"neutrino: energy: high",
u"gravitational radiation: plane wave",
u"fermion: multiplet",
u"rho(770)0: mass",
u"scattering amplitude: analytic properties",
u"gamma ray: irradiation",
u"charmonium: mass",
u"Higgs particle: transverse momentum",
u"vacuum state: Minkowski",
u"space-time: Lorentz",
u"atom: electric moment",
u"G(2)",
u"parity: time reversal",
u"spectrum: massless",
u"pi: transverse momentum",
u"algebra: Hecke",
u"lattice field theory: anisotropy",
u"quark model: light front",
u"electric field: constant",
u"NA49",
u"experimental equipment",
u"operator: derivative",
u"GERDA",
u"Phi(1020): width",
u"Cronin effect",
u"soliton: stability",
u"phase space: boundary condition",
u"exchange: pomeron",
u"isospin: 2",
u"Sachs-Wolfe effect",
u"charmed meson: decay modes",
u"leptoquark: decay",
u"B0: radiative decay",
u"quintessence: potential",
u"sine-Gordon equation",
u"photon: longitudinal",
u"nucleon nucleon: scattering",
u"detector: upgrade",
u"color: sextet",
u"baryon: semileptonic decay",
u"nucleation",
u"scalar particle: charged particle",
u"photon photon: inelastic scattering",
u"multiplicity: correlation",
u"fermion: right-handed",
u"formula",
u"wave: acoustic",
u"Ramond",
u"space: S(2)",
u"model: tensor",
u"black hole: dyon",
u"differential forms: modular",
u"rho(770)0: electroproduction",
u"string: rotation",
u"charmed meson: production",
u"Lambda/b0",
u"n: lifetime",
u"supersymmetry: signature",
u"gluon: helicity",
u"decay constant: ratio",
u"sneutrino",
u"quark: exchange",
u"parity: transformation",
u"rho(1450)",
u"dark energy: Ricci",
u"quark: decay modes",
u"model: pomeron",
u"scattering amplitude: interference",
u"iron: cosmic radiation",
u"electron deuteron: elastic scattering",
u"emission: model",
u"entropy: holography",
u"energy: injection",
u"chi/c0(3415)",
u"lepton: mass spectrum",
u"correction: mass",
u"SO(5)",
u"Upsilon(10020)",
u"quark: mass spectrum",
u"symmetry: scaling",
u"exchange: one-boson",
u"backscatter",
u"density: operator",
u"boson: field theory",
u"energy levels: transition",
u"hadron hadron: scattering",
u"transformation: mirror",
u"electron: energy loss",
u"multiplicity: energy dependence",
u"MICE",
u"horizon: crossing",
u"NA48",
u"nuclear reaction: peripheral",
u"stability: marginal",
u"parton: fragmentation",
u"tau: rare decay",
u"scalar particle: new particle",
u"symmetry: SU(2) x U(1)",
u"helicity: conservation law",
u"Darboux transformation",
u"n-point function: 6",
u"tau: final state",
u"neutrino nucleus: elastic scattering",
u"fermion: spectrum",
u"quark: production",
u"orbifold: Z(2)",
u"scintillation counter: sodium-iodide",
u"muon: lifetime",
u"gauge field theory: topological",
u"interaction: short-range",
u"chi mesons: hadronic decay",
u"spin: rotation",
u"calorimeter: performance",
u"vacuum polarization: tensor",
u"parton: saturation",
u"Nahm equation",
u"quark: pair",
u"information theory: quantum",
u"effective range",
u"spin: rotator",
u"perturbation theory: stochastic",
u"heavy quark: potential",
u"asymmetry: CP: measured",
u"Galilei",
u"noise: quantum",
u"optics: quantum",
u"Proca",
u"symmetry: enhancement",
u"admixture",
u"neutrino electron: interaction",
u"SU(6)",
u"HERA-B",
u"gauge field theory: SU(N) x SU(M)",
u"tin",
u"matter: mirror",
u"W W: fusion",
u"radiation: background",
u"triplet: scalar",
u"Einstein Telescope",
u"deep underground detector: proposed",
u"X-ray: detector",
u"fermion: charge",
u"dark energy: agegraphic",
u"fermion: left-handed",
u"fluctuation: effect",
u"field theory: light front",
u"parity: symmetry",
u"particle flow: elliptic flow",
u"quark: hadronic decay",
u"supergravity: action",
u"dilaton: mass",
u"geophysics: effect",
u"quarkonium: hadron spectroscopy",
u"sparticle: width",
u"quarkonium: photoproduction",
u"safety",
u"p: density",
u"gluon: coupling",
u"cosmic radiation: detector",
u"nucleon: cosmic radiation",
u"Frejus",
u"operator: dipole",
u"rho(770)0: hadronic decay",
u"fermion: doublet",
u"W: right-handed",
u"conservation law: energy-momentum",
u"p n: inelastic scattering",
u"SuperB",
u"B/s0: width",
u"target: fragmentation",
u"seesaw model: Type I",
u"resonance: Breit-Wigner",
u"transition: electromagnetic",
u"geometry: conformal",
u"spectrum: density",
u"W W: scattering",
u"Bogomolnyi equation",
u"Higgs",
u"beam: phase space",
u"spin: conformal",
u"magnetic spectrometer: satellite",
u"cosmic radiation: power spectrum",
u"compactification: heterotic",
u"gauge field theory: spin",
u"Goldstone particle: exchange",
u"brightness",
u"anti-K nucleon: interaction",
u"S-matrix: pole",
u"sigma model: Poisson",
u"neutrino: sterile: mass",
u"chemistry",
u"perturbation theory: lattice",
u"interpretation of experiments: CERN LHC Coll",
u"fermion: doubling",
u"B/s",
u"effect: space charge",
u"black hole: critical phenomena",
u"muon: background",
u"quantum chromodynamics: string",
u"rho parameter",
u"on-line",
u"vertex function: correction",
u"sigma model: supersymmetry",
u"Upsilon(10570): hadronic decay",
u"matter: background",
u"space-time: AdS(4) x CP(3)",
u"symmetry: U(3)",
u"CP(N)",
u"squark: mixing",
u"quantum electrodynamics: validity test",
u"dimension: 9",
u"magnetic field: solar",
u"gallium",
u"mass spectrum: hierarchy",
u"Glashow-Iliopoulos-Maiani model",
u"scattering amplitude: multigluon",
u"charm: decay",
u"WIMP nucleus: scattering",
u"photon: propagation",
u"accelerator: laser",
u"meson quark: model",
u"model: Breit-Wigner",
u"quark: singlet",
u"glass",
u"electron electron: interaction",
u"nucleon resonance: mass spectrum",
u"target",
u"Langevin equation: complex",
u"J/psi(3100): suppression",
u"spin: precession",
u"fibre: optical",
u"RF system: superconductivity",
u"effective potential: finite temperature",
u"group: SL(2,C)",
u"diquark: mass",
u"quantum chromodynamics: effect",
u"moduli space: Kaehler",
u"angular momentum: conservation law",
u"electromagnetic field: background field",
u"D-brane: 4",
u"B+: semileptonic decay",
u"neutrino: absorption",
u"sbottom",
u"spin: current",
u"loop integral: 4",
u"dilute gas approximation",
u"D-brane: charge",
u"indefinite metric",
u"p: mass",
u"space-time: rotation",
u"group: symplectic",
u"gauge boson: Kaluza-Klein",
u"spin: density",
u"momentum: operator",
u"thorium",
u"J/psi(3100): absorption",
u"momentum: anisotropy",
u"symmetry: SO(N)",
u"model: nonlinear",
u"effect: Hawking",
u"charm: hadroproduction",
u"gauge boson: production",
u"thermodynamics: Bethe ansatz",
u"Primakoff effect",
u"charmonium: hadron spectroscopy",
u"quantum chromodynamics: density",
u"Kerr",
u"K0: semileptonic decay",
u"slepton: decay",
u"wormhole: throat",
u"Y(4260)",
u"Delta",
u"sparticle: mass difference",
u"aerogel",
u"acoustic",
u"particle: classical",
u"coupling constant: upper limit",
u"pi K: elastic scattering",
u"gravitation: nonlocal",
u"dosimetry",
u"separable potential",
u"D*2(2460)",
u"n: particle source",
u"air: fluorescence",
u"hypernucleus: energy levels",
u"bound state: spectrum",
u"bino: mass",
u"boson: massive",
u"dimension: compactification",
u"TRIUMF Cycl",
u"symmetry: nonabelian",
u"operator: constraint",
u"hadron nucleus: nuclear reaction",
u"quantization: space-time",
u"K: width",
u"photon: decay",
u"neutrino: capture",
u"lepton: leptonic decay",
u"upsilon mesons: hadronic decay",
u"gadolinium",
u"Dubna Lab",
u"transport theory: relativistic",
u"SL(2,Z)",
u"superstring: open",
u"graviphoton",
u"binary: mass",
u"detector: network",
u"charmed meson: hadron spectroscopy",
u"proposal",
u"pentaquark: width",
u"critical phenomena: topological",
u"potential: local",
u"pi0: yield",
u"gravitation: weak field",
u"doubling",
u"horizon: Rindler",
u"positronium",
u"baryon resonance: hadroproduction",
u"neutrino: angular distribution",
u"neutrino nucleon: scattering",
u"regularization: infrared",
u"polarization: operator",
u"scattering amplitude: helicity",
u"spin: polarizability",
u"time: discrete",
u"graviton: coupling",
u"baryon: ground state",
u"B0: decay modes",
u"fixed point: conformal",
u"fermion: multiple production",
u"correlation function: vector",
u"WIMP: density",
u"moduli: decay",
u"transmutation: dimensional",
u"bottom baryon: mass",
u"photon: regeneration",
u"fluorescence",
u"hydrogen: mesic atom",
u"quarkonium: branching ratio",
u"scalar meson: mass spectrum",
u"correction: chiral",
u"technology",
u"random field",
u"infrared problem: fixed point",
u"pseudoscalar meson: form factor",
u"channeling",
u"amplitude analysis: multipole",
u"mass: threshold",
u"Higgs particle: interaction",
u"renormalization: effect",
u"Xi",
u"scattering: Coulomb",
u"nucleus: shadowing",
u"Rindler",
u"group: U(1)",
u"symmetry: U(2)",
u"meson dominance",
u"plasma: relativistic",
u"symmetry: Poincare",
u"gluon: energy spectrum",
u"string: confinement",
u"anti-D0: hadronic decay",
u"Killing",
u"avalanche",
u"track data analysis: cluster",
u"dilepton: yield",
u"droplet",
u"KATRIN",
u"lepton hadron: deep inelastic scattering",
u"SO(6)",
u"space: Rindler",
u"gravitation: torsion",
u"pi pi",
u"nuclide: lifetime",
u"correction: nonlinear",
u"nuclear reaction: temperature",
u"symmetry: Weyl",
u"color: coherence",
u"lepton: rare decay",
u"transverse momentum: broadening",
u"WIMP nucleon: interaction",
u"K0 anti-K0: oscillation",
u"bent crystal",
u"electron: spin",
u"Z0: transverse momentum",
u"symmetry: O(2)",
u"effect: electromagnetic",
u"propagator: pole",
u"operator: renormalization",
u"duality: violation",
u"charmed particle: hadroproduction",
u"pi- p: inelastic scattering",
u"DESY TESLA Linac",
u"field equations: kink",
u"Novosibirsk Stor2",
u"spurion",
u"electric field: high",
u"SL(2,R)",
u"gluon: excited state",
u"scalar particle: pair production",
u"cosmic background radiation: B-mode",
u"gamma ray: absorption",
u"chi/c1(3510)",
u"quantum gravity: discrete",
u"correlation function: Euclidean",
u"symmetry breaking: effect",
u"B/s: rare decay",
u"isospin: effect",
u"maximal abelian gauge",
u"photon deuteron: inelastic scattering",
u"Lax",
u"two-pion",
u"fermion: production",
u"gravitation: fluctuation",
u"geometry: Lyra",
u"axion: production",
u"tachyon: mass",
u"energy: dissipation",
u"gravitation: Weyl",
u"plasma: density",
u"quark: electric moment",
u"sneutrino: dark matter",
u"W: transverse momentum",
u"sum rule: Drell-Hearn-Gerasimov",
u"photon nucleon: Compton scattering",
u"scalar meson: radiative decay",
u"neutrino: geophysics",
u"n: beam",
u"neutrino: width",
u"constituent",
u"beam dump",
u"deuteron: polarized target",
u"photon nucleus: coherent interaction",
u"scalar meson: model",
u"hadron: structure function",
u"flow: transverse",
u"Wino",
u"regularization: dependence",
u"mass: sum rule",
u"calorimeter: iron",
u"operator: annihilation",
u"Higgs particle: propagator",
u"Upsilon(10870): hadronic decay",
u"meson: decay modes",
u"jet: forward production",
u"n: polarization",
u"hyperon: matter",
u"heavy ion: colliding beams",
u"trigger: performance",
u"gadolinium: admixture",
u"asymmetry: left-right",
u"coherence: effect",
u"K0(S): hadroproduction",
u"Tsallis",
u"baryon: current",
u"down: mass",
u"hadron: matter",
u"chi mesons",
u"X(3872): width",
u"gravitational radiation: signature",
u"D0: semileptonic decay",
u"conservation law: CP",
u"pi: polarizability",
u"kinematics: relativistic",
u"Upsilon(9460): radiative decay",
u"density: ratio",
u"cavity: design",
u"Theta(1540): photoproduction",
u"tachyon: matter",
u"orbifold: Z(N)",
u"momentum: missing-energy",
u"supersymmetry: constraint",
u"strangeness: mass",
u"space-time: symmetry",
u"neutrino/e: mass",
u"electron: charge",
u"D/s+",
u"p deuteron: nuclear reaction",
u"baryon number: fluctuation",
u"B/s0 anti-B/s0: mixing angle",
u"gluino: cascade decay",
u"production: small-angle",
u"liquid: quantum",
u"dependence: flavor",
u"background: supersymmetry",
u"quantum electrodynamics: massless",
u"effect: long-range",
u"kinematics: multi-Regge",
u"Veneziano",
u"fermion: scattering",
u"resonance: pole",
u"kink: solution",
u"deep inelastic scattering: structure function",
u"space-time: torus",
u"semiconductor detector: design",
u"quarkonium: decay modes",
u"field theory: model",
u"fluctuation: adiabatic",
u"gauge field theory: SU(3) x U(1)",
u"p: production",
u"photon: density",
u"Delta(1232): form factor",
u"matter: stability",
u"coupling constant: ratio",
u"operator: anomalous dimension",
u"neutrino: hierarchy",
u"frequency: dependence",
u"squark: cascade decay",
u"quark: determinant",
u"T-duality: nonabelian",
u"X(3872): mass",
u"absorption: spectrum",
u"detector: satellite",
u"isocurvature",
u"algebra: quaternion",
u"squark: branching ratio",
u"omega(783): hadronic decay",
u"momentum: saturation",
u"FLUKA",
u"Higgs particle: decay rate",
u"Theta(1540): mass",
u"p: polarizability",
u"n: production",
u"detector: acoustic",
u"effect: hadronic",
u"diamond",
u"boson: interaction",
u"orbifold: heterotic",
u"antineutrino nucleus: deep inelastic scattering",
u"charmonium: mass spectrum",
u"potential: harmonic",
u"pi: energy spectrum",
u"bound state: relativistic",
u"oscillator: model",
u"U(1) problem",
u"eta/c(2980): hadronic decay",
u"energy: surface",
u"neutrino electron: scattering",
u"vortex: BPS",
u"invariance: chiral",
u"scattering amplitude: energy dependence",
u"K: decay modes",
u"SLD",
u"gauge field theory: U(N) x U(N)",
u"unparticle: coupling",
u"p p: scattering amplitude",
u"fluid: conformal",
u"decoupling: temperature",
u"charmed meson: decay",
u"domain wall: network",
u"matrix: factorization",
u"quantization: polymer",
u"photon deuteron: nuclear reaction",
u"field theory: chiral",
u"top: hadronic decay",
u"intermediate boson: pair production",
u"gravitation: nonrelativistic",
u"interaction: effect",
u"Landau problem",
u"K0 anti-K0: mass difference",
u"Aharonov-Casher effect",
u"unparticle: scalar",
u"K0(L): radiative decay",
u"charmed baryon: hadronic decay",
u"model: nonlocal",
u"Theta(1540): hadroproduction",
u"drift tube",
u"gluon: transverse",
u"gravitation: localization",
u"f0(600): exchange",
u"gravitation: tetrad",
u"neutrino: diffusion",
u"scaling: KNO",
u"charge radius",
u"discrete light cone quantization",
u"particle: Majorana",
u"sodium-iodide",
u"mass: anomalous dimension",
u"Calogero-Sutherland model",
u"lepton: composite",
u"solution",
u"O(2)",
u"time-of-flight counter",
u"string: Dirac",
u"Horava-Lifshitz",
u"D/s: hadronic decay",
u"heat engineering",
u"curvature: Ricci",
u"singularity: formation",
u"K*2(1430)",
u"membrane model: action",
u"cross section: parametrization",
u"pi: beam",
u"HADES",
u"Einstein-Maxwell equation: dilaton",
u"Toda",
u"transition: magnetic",
u"particle: velocity",
u"bremsstrahlung: internal",
u"chi mesons: bottom",
u"J/psi(3100): pair production",
u"p: decay modes",
u"charmed meson: radiative decay",
u"Baxter",
u"supersymmetry: deformation",
u"uncertainty relations: Heisenberg",
u"baryon resonance: mass spectrum",
u"E(6)",
u"optics: absorption",
u"coupling: right-handed",
u"Higgs particle: effect",
u"scale: hierarchy",
u"charmed meson: photoproduction",
u"vacuum",
u"sigma term",
u"p: energy",
u"eta/b",
u"detector: sensitivity",
u"space-time: Einstein",
u"Z': branching ratio",
u"K nucleon: interaction",
u"K: electroproduction",
u"space-time: sphere",
u"gluino: condensation",
u"magnetic moment: transition",
u"antineutrino nucleon: interaction",
u"gauge field theory: deformation",
u"upsilon mesons: radiative decay",
u"numerical methods: performance",
u"vector meson: decay constant",
u"finite temperature: dependence",
u"beam cooling",
u"lattice: effect",
u"interaction: gauge",
u"gauge field theory: confinement",
u"neutrino nucleon: elastic scattering",
u"effect: higher-order",
u"inclusive production",
u"correlation function: scalar",
u"current: tensor",
u"SO(2)",
u"Higgs particle: coupling constant",
u"charge: nonlocal",
u"geometry: duality",
u"chromoelectric",
u"psi(3685): hadroproduction",
u"black hole: mass spectrum",
u"string: correction",
u"Born-Oppenheimer approximation",
u"jet: inclusive production",
u"coupled channel: unitarity",
u"field theory: nonrelativistic",
u"string: fragmentation",
u"DGLAP equation: solution",
u"K0(L)",
u"black hole: spectrum",
u"propagator: correction",
u"potential: external",
u"algebra: Jordan",
u"charge: renormalization",
u"action: Euclidean",
u"K0",
u"geometrodynamics",
u"photon: transverse energy",
u"field theory: torus",
u"wavelength shifter",
u"electron deuteron: inelastic scattering",
u"space: Sasaki-Einstein",
u"biology",
u"black hole: geometry",
u"odderon: exchange",
u"hadron: cosmic radiation",
u"meson: leptonic decay",
u"meson: semileptonic decay",
u"electron nucleus: scattering",
u"pi: yield",
u"vortex: magnetic",
u"polarized target: longitudinal",
u"quantum chromodynamics: equation of state",
u"n",
u"muonic atom",
u"positron: particle source",
u"vacuum state: stochastic",
u"Upsilon(10870)",
u"electron nucleon: exclusive reaction",
u"parity: conservation law",
u"lepton: decay",
u"fermion: gas",
u"symmetry breaking: Z(2)",
u"fermion: Goldstone particle",
u"algebra: Dirac",
u"M-theory: compactification",
u"D/s: semileptonic decay",
u"Higgs particle: spin",
u"hadron hadron: inelastic scattering",
u"hadron: excited state",
u"nucleus: equation of state",
u"supersymmetry: local",
u"propagator: Feynman",
u"photon: dispersion relation",
u"bottom: associated production",
u"cross section: formula",
u"multiple production: fragmentation",
u"group: SO(N)",
u"neutron star: matter",
u"pi pi: interaction",
u"distribution amplitude: light cone",
u"Baksan",
u"K0(L): secondary beam",
u"muon tau: symmetry",
u"ANKE",
u"particle: width",
u"Kadanoff-Baym equation",
u"crystal: channeling",
u"dark matter: heavy",
u"symmetry: planar",
u"jet: mass spectrum",
u"nucleon nucleon",
u"dijet: final state",
u"fermion: exotic",
u"axigluon",
u"diffeomorphism: transformation",
u"nucleon Lambda: interaction",
u"gauge boson: associated production",
u"gluon: massive",
u"isospin: 0",
u"nuclide: yield",
u"muon: multiplicity",
u"Z': decay modes",
u"space-time: conformal",
u"Eguchi-Kawai model",
u"operator: tensor",
u"K: radiative decay",
u"gluon gluon: interaction",
u"pi: distribution amplitude",
u"holography: light front",
u"gauge field theory: space-time",
u"preon",
u"model: blast wave",
u"relic density: thermal",
u"gluino: hadroproduction",
u"operator: expansion",
u"scaling: Bjorken",
u"antihydrogen",
u"fermion: mirror particle",
u"photon: showers",
u"gauge field theory: vector",
u"S(3)",
u"tellurium",
u"expansion: longitudinal",
u"fluctuation: dissipation",
u"magnetic field: transverse",
u"electroproduction",
u"trispectrum",
u"hypernucleus: hadroproduction",
u"hadron: beam",
u"beam: size",
u"Higgsino: dark matter",
u"particle flow: transverse",
u"initial state: fluctuation",
u"cyclotron",
u"transition: muon electron",
u"superpotential: nonperturbative",
u"mass: expansion",
u"n: spin",
u"differential cross section: mass",
u"transformation: chiral",
u"mass: scaling",
u"pi nucleus: nuclear reaction",
u"scale",
u"electronics: design",
u"quantum electrodynamics: nonlinear",
u"seesaw model: minimal",
u"Jona-Lasinio-Nambu model: nonlocal",
u"black hole: Lifshitz",
u"correction: shadowing",
u"isospin: amplitude analysis",
u"monopole: charge",
u"flavor: ratio",
u"fluid: dissipation",
u"Higgs particle: signature",
u"supergravity: background",
u"singularity: ultraviolet",
u"color: symmetry",
u"soliton: scattering",
u"antineutrino electron: elastic scattering",
u"current: scalar",
u"nucleon nucleon: correlation",
u"singularity: big rip",
u"amplitude analysis: helicity",
u"pseudoscalar meson: width",
u"Z0: radiative decay",
u"electrostatic",
u"lepton: charge",
u"CMD",
u"X-ray: cosmic radiation",
u"p deuteron: inclusive reaction",
u"B-mode",
u"Vainshtein",
u"Frobenius",
u"matter: mass",
u"symmetry: duality",
u"K-: hadroproduction",
u"electron: model",
u"polarization: dependence",
u"insulation",
u"coupling: chiral",
u"parity: doublet",
u"left-handed",
u"entropy: von Neumann",
u"blast wave",
u"Higgs particle: CP",
u"master equation: solution",
u"photoelectron",
u"D-brane: 2",
u"beam position",
u"neutrino: family",
u"pseudoscalar meson: leptonic decay",
u"omega(783): mass",
u"excited state: mass",
u"expansion: higher-order",
u"coupling: linear",
u"sparticle: production",
u"W': coupling",
u"Picard-Fuchs equation",
u"gravitation: relativistic",
u"up: mass",
u"postulated particle: pair production",
u"e*",
u"Yang-Mills: SU(2)",
u"propagator: massless",
u"operator: geometrical",
u"yield: temperature dependence",
u"multiplet: scalar",
u"meson resonance: hadroproduction",
u"psi(3770): branching ratio",
u"O'Raifeartaigh model",
u"operator: vector",
u"Phi(1020): yield",
u"eta(958): hadroproduction",
u"electron: angular distribution",
u"potential: Liouville",
u"W: propagator",
u"gravitation: chiral",
u"baryon: operator",
u"gluon: splitting",
u"Peccei-Quinn",
u"singularity: Calabi-Yau",
u"lepton p: inclusive reaction",
u"baryon resonance: multiplet",
u"inflation: stochastic",
u"color: correlation",
u"space-time: Milne",
u"excited state: topological",
u"muon: polarization",
u"scattering amplitude: coupled channel",
u"electromagnetic interaction: coupling constant",
u"pseudoscalar meson: photoproduction",
u"pi: mesic atom",
u"kappa meson",
u"Novosibirsk Stor4",
u"neutrino: form factor",
u"muon: final state",
u"coupling constant: vector",
u"operator: marginal",
u"gluon: operator",
u"detector: efficiency",
u"branching ratio: measured",
u"force: short-range",
u"electron: irradiation",
u"interaction: tensor",
u"f2(1525)",
u"NOMAD",
u"radiation detector",
u"Project X",
u"p: magnetic moment",
u"star: binary",
u"symmetry: Yangian",
u"NA62",
u"dimension: 0",
u"neutrino: spin",
u"K: exchange",
u"anti-B/s0: branching ratio",
u"hadron nucleus: interaction",
u"dilepton: hadroproduction",
u"Higgsino: LSP",
u"stability: nonlinear",
u"Novosibirsk VEPP-2000",
u"electron nucleus: elastic scattering",
u"energy: operator",
u"polarizability",
u"model: potential",
u"quark: mass: hierarchy",
u"energy: rotation",
u"color: flow",
u"vector meson: mass spectrum",
u"string: production",
u"eta: decay modes",
u"lifetime: difference",
u"Polyakov loop: susceptibility",
u"pi: Goldstone particle",
u"quantum mechanics: conformal",
u"D: decay modes",
u"variable speed of light",
u"mass: axial",
u"bottom meson: hadronic decay",
u"anti-kT algorithm",
u"gluino: decay modes",
u"Upsilon(10355)",
u"polarizability: magnetic",
u"cosmological constant: 0",
u"baryon: chiral",
u"omega(783): width",
u"fermion: electroproduction",
u"membrane model: stability",
u"sum rule: Weinberg",
u"potential: transition",
u"potential: electromagnetic",
u"symmetry: reflection",
u"microprocessor: graphics",
u"burst",
u"photon: particle source",
u"Lambda/c+: hadronic decay",
u"charged particle: density",
u"muon: secondary beam",
u"parity: negative",
u"ground state: stability",
u"Riemann surface: compact",
u"mirror: surface",
u"membrane: decay",
u"metric: Schwarzschild",
u"current: hadronic",
u"n: decay",
u"p p: storage ring",
u"neutralino: radiative decay",
u"K0 anti-K0",
u"neutrino/tau: cosmic radiation",
u"critical phenomena: percolation",
u"exclusive production",
u"path length: dependence",
u"multiple scattering: effect",
u"Thirring model: massive",
u"van der Waals",
u"algebra: symplectic",
u"dihadron: angular correlation",
u"pentaquark: hadronic decay",
u"neutrino: energy loss",
u"spin: triplet",
u"monopole: abelian",
u"Wess-Zumino-Witten model: SL(2,R)",
u"p nucleon: inclusive reaction",
u"Regge poles: linear",
u"cosmic background radiation: fluctuation",
u"charge correlation",
u"virial theorem",
u"Sp(N)",
u"RENO",
u"dark matter: Kaluza-Klein",
u"muon: polarized beam",
u"scalar particle: massive",
u"top: spin",
u"nucleus: density",
u"algebra: Galilei",
u"decay: path length",
u"B/c",
u"background: suppression",
u"beam: radioactivity",
u"Omega-: hadroproduction",
u"scalar",
u"meson meson: elastic scattering",
u"cosmic radiation: fluctuation",
u"deuteron: fission",
u"energy: internal",
u"electromagnetic field: multipole",
u"logic",
u"meson resonance: hadronic decay",
u"renormalization: wave function",
u"detector: proposed",
u"monopole: Kaluza-Klein",
u"p n: interaction",
u"black hole: singularity",
u"photomultiplier: avalanche",
u"p deuteron: exclusive reaction",
u"potential: electric",
u"p p: cross section",
u"charged particle: energy loss",
u"photon: multiplicity",
u"nucleon: charge",
u"constraint: Dirac",
u"X(3872): radiative decay",
u"graviton: excited state",
u"Lemaitre-Tolman-Bondi model",
u"resonance: heavy",
u"Lambda parameter",
u"electron positron: colliding beam detector",
u"resonance: hadroproduction",
u"D0: width",
u"halo: model",
u"cosmic radiation: energy loss",
u"spin: fractional",
u"hyperon: yield",
u"scalar particle: exchange",
u"potential: electrostatic",
u"B/c: decay modes",
u"tensor: embedding",
u"vector meson: production",
u"germanium: crystal",
u"space charge",
u"glueball: pseudoscalar",
u"bound state: width",
u"stau: NLSP",
u"sneutrino: decay",
u"new particle: decay",
u"quantum hadrodynamics",
u"hadronization: statistical",
u"neutralino: leptonic decay",
u"coupling constant: axial",
u"cosmological model: parameter space",
u"density: scalar",
u"Z0: coupling",
u"intranuclear cascade",
u"approximation: hydrodynamics",
u"standard model: minimal",
u"interaction: derivative",
u"n: detector",
u"momentum: high",
u"glueball: width",
u"time: dilation",
u"potential: nonrelativistic",
u"supersymmetry: 3",
u"antineutrino nucleus: inelastic scattering",
u"string: mass",
u"Born-Infeld model: nonabelian",
u"symmetry: S(4)",
u"splitting function",
u"force: tensor",
u"monopole: mass",
u"interpretation of experiments: ZEUS",
u"performance: time dependence",
u"Hadamard state",
u"star: cluster",
u"deuteron nucleus: inclusive reaction",
u"fermion: relativistic",
u"hadron: cascade",
u"quasiparticle: mass",
u"orbifold: S(1)/Z(2)",
u"family: 2",
u"transverse momentum: moment",
u"equivalence principle: validity test",
u"orbit: gauge",
u"gauge field theory: renormalizable",
u"expansion: adiabatic",
u"correction: hadronization",
u"vacuum state: nonperturbative",
u"lepton: width",
u"gauge boson: decay modes",
u"spin: resonance",
u"gauge boson: electroproduction",
u"black hole: attractor",
u"form factor: pseudoscalar",
u"interpretation of experiments: BaBar",
u"space-time: cylinder",
u"SU(8)",
u"field equations: Liouville",
u"p: multiplicity",
u"form factor: tensor",
u"B+: leptonic decay",
u"photon: mixing",
u"bottom meson: semileptonic decay",
u"silicon: oxygen",
u"ground state: mass",
u"Crystal Barrel",
u"field theoretical model: CP(1)",
u"charmed meson: bound state",
u"pi: final state",
u"spinless",
u"sigma model: chiral",
u"isocurvature: perturbation",
u"geometry: torus",
u"group: abelian",
u"silicon: crystal",
u"lepton: charge: asymmetry",
u"fluctuation: linear",
u"stau: lifetime",
u"effect: noncommutative",
u"approximation: pole",
u"pi- p: inclusive reaction",
u"gravitation: spin",
u"amplitude analysis: transition",
u"magnet: design",
u"Dubna Nuclotron",
u"coupling constant: dependence",
u"flavor: hierarchy",
u"rainbow",
u"transformation: Hopf",
u"vector meson: branching ratio",
u"force: Coulomb",
u"tantalum",
u"Zee model",
u"Lambda: photoproduction",
u"pi nucleon: scattering length",
u"star: relativistic",
u"supernova: redshift",
u"coil",
u"space: CP(3)",
u"baryon: hadronic decay",
u"Delta(1232): photoproduction",
u"K-: mesic atom",
u"U(2)",
u"electron: background",
u"background: Schwarzschild",
u"injection: spectrum",
u"interaction: flavor changing",
u"meson meson: molecule",
u"gluon: confinement",
u"taste: symmetry breaking",
u"colored particle",
u"flux: density",
u"B/s: decay",
u"nucleus: decay",
u"jet: fragmentation function",
u"same sign",
u"symmetry: deformation",
u"color: flux tube",
u"gravitation: perturbation",
u"scale: quantum chromodynamics",
u"multiple scattering: parton",
u"quality: monitoring",
u"wiggler",
u"helium: liquid",
u"fermion: nonrelativistic",
u"black hole: duality",
u"SU(3) x SU(3)",
u"amplitude analysis: CP",
u"mixing angle: flavor",
u"quantum electrodynamics: effect",
u"N(1520)",
u"meson: bound state",
u"strange particle: production",
u"intermediate boson: associated production",
u"muon: electric moment",
u"potential: quantum",
u"axino: dark matter",
u"gravitation: background field",
u"Faddeev equation",
u"Q-ball: decay",
u"velocity: superluminal",
u"charmed meson: mass spectrum",
u"W: coupling",
u"quark: postulated particle",
u"CCD",
u"quantum group: representation",
u"field equations: vortex",
u"temperature: surface",
u"algebra: spin",
u"boson: massless",
u"black hole: scattering",
u"E(8)",
u"H baryon",
u"p nucleus: inelastic scattering",
u"phase",
u"neutrino: gas",
u"Z': exchange",
u"quark gluon: condensation",
u"gauge field theory: massless",
u"K-",
u"black hole: warped",
u"anti-p: beam",
u"plasma: Yang-Mills",
u"current: topological",
u"cavity: surface",
u"field theory: nonperturbative",
u"D1(2420)",
u"space: discrete",
u"mass: finite",
u"fermion: decay",
u"scaling: Lifshitz",
u"deconstruction",
u"algebra: differential",
u"scalar meson: exchange",
u"statistics: nonabelian",
u"xenon: gas",
u"gravitation: discrete",
u"Weyl equation",
u"hadron: heavy",
u"gas: density",
u"hypernucleus: width",
u"space-time: foliation",
u"chi/c2(3555)",
u"new physics: sensitivity",
u"space-time: Weyl",
u"beam: current",
u"vector boson: mass",
u"p: charge radius",
u"membrane model: tension",
u"binary: orbit",
u"background: classical",
u"colliding beams: proposed",
u"electron deuteron: inclusive reaction",
u"tellurium: oxygen",
u"electron nucleon: inclusive reaction",
u"meson nucleus: bound state",
u"WISP",
u"random surface",
u"gaugino: Dirac",
u"flux tube: magnetic",
u"space dependence",
u"bound state: effect",
u"differential equations: stochastic",
u"p n: elastic scattering",
u"nucleon: charge: axial",
u"Doppler effect",
u"model: interaction",
u"space: cylinder",
u"algebra: W",
u"electric field: color",
u"Z': production",
u"radiation: effect",
u"pi: irradiation",
u"coupling: Yang-Mills",
u"quantum electrodynamics: plasma",
u"monopole: nonabelian",
u"beam damping",
u"coupling constant: time dependence",
u"fermion: bound state",
u"field theory: multiple",
u"final state: hadronic",
u"field theory: Regge",
u"color: symmetry breaking",
u"gauge field theory: E(8)",
u"nucleus nucleus: interaction",
u"string: semiclassical",
u"symmetry: U(3) x U(3)",
u"black string: rotation",
u"gauge boson: mass generation",
u"parton: fragmentation function",
u"gauge boson: U(1)",
u"baryon number: susceptibility",
u"gravitation: affine",
u"quark: mass generation",
u"Upsilon(9460): leptonic decay",
u"beam: lifetime",
u"p: yield",
u"strange particle",
u"Higgs particle: rare decay",
u"track data analysis: impact parameter",
u"blazar: spectrum",
u"BTZ",
u"fluid: equation of state",
u"quark: bound state",
u"stop: mixing",
u"pi: momentum spectrum",
u"renormalization group: c-function",
u"sigma model: O(N)",
u"background field: constant",
u"dilaton: scalar",
u"nucleus nucleus: inclusive reaction",
u"B/c: width",
u"effect: topological",
u"transformation: symplectic",
u"phi**n model: 6",
u"recoil: velocity",
u"N(1650)",
u"pseudoscalar meson: mass spectrum",
u"space-time: fuzzy",
u"model: coalescence",
u"D: rare decay",
u"vector meson: wave function",
u"silicon: bent crystal",
u"distorted wave impulse approximation",
u"SU(2) x U(1)",
u"nucleus: transparency",
u"B0",
u"space-time: fractal",
u"invariance: de Sitter",
u"W+: leptonic decay",
u"black hole: magnetic",
u"model: stochastic",
u"Q-ball: stability",
u"hidden symmetry: conformal",
u"strangelet",
u"beryllium: hypernucleus",
u"galaxy: power spectrum",
u"X(3872): model",
u"jet: hadronic",
u"string model: vacuum state",
u"interpretation of experiments: H1",
u"p: dissociation",
u"n nucleus: nuclear reaction",
u"nucleus: exotic",
u"eta(958): hadronic decay",
u"pi-: electroproduction",
u"gauge field theory: SU(5) x U(1)",
u"symmetry breaking: Wilson loop",
u"quark antiquark: static",
u"vacuum state: landscape",
u"scalar particle: coupling",
u"holonomy: correction",
u"fermion: density",
u"atom: excited state",
u"pi0: form factor",
u"nucleon: momentum",
u"power spectrum: perturbation",
u"form factor: slope",
u"Born-Infeld model: action",
u"postulated particle: mass",
u"oscillator: deformation",
u"bino: LSP",
u"absorption: effect",
u"entropy: quantum",
u"f0(980): hadronic decay",
u"quark gluon: matter",
u"D-brane: 6",
u"D+: semileptonic decay",
u"symmetry breaking: SU(2)",
u"J/psi(3100): associated production",
u"n: cosmic radiation",
u"flavor: 5",
u"matter: dielectric",
u"ATIC",
u"beam loading",
u"B/s0",
u"radiation: density",
u"proportional chamber: time projection",
u"moduli: mass",
u"microstrip",
u"gravitation: local",
u"carbon: target",
u"SPring-8 Stor",
u"energy-momentum: density",
u"space: Poisson",
u"yield: energy dependence",
u"tin: nuclide",
u"depolarization",
u"bottom: fragmentation",
u"S(2)",
u"model: discrete",
u"STU model",
u"galaxy: mass",
u"space-time: dimension: 2",
u"polarized beam: transverse",
u"charmonium: hadronic decay",
u"axial-vector meson: mass",
u"mass: complex",
u"gauge field theory: fluctuation",
u"Coxeter",
u"WIMP: capture",
u"transverse energy: high",
u"scattering: small-angle",
u"torus: twist",
u"wormhole: static",
u"calorimeter: design",
u"algebra: exchange",
u"positronium: radiative decay",
u"lattice field theory: supersymmetry",
u"string: length",
u"p: momentum spectrum",
u"p: charge",
u"electron nucleus",
u"B/s: width",
u"atom: exotic",
u"quark: spectator",
u"cosmic radiation: mass spectrum",
u"bottom particle: semileptonic decay",
u"magnesium",
u"B/s0: lifetime",
u"pi: semileptonic decay",
u"spin: isospin",
u"gluon: multiplicity",
u"effect: discrete",
u"vector meson: propagator",
u"transition radiation",
u"effect: screening",
u"renormalization group: solution",
u"sparticle: mixing angle",
u"plasma: magnetic",
u"background: oscillation",
u"rotation: effect",
u"photon: transverse",
u"solenoid: superconductivity",
u"charmonium: hadroproduction",
u"Majoron",
u"Delta(1232): mass",
u"J/psi(3100): transverse momentum",
u"weak interaction: model",
u"Taub-NUT",
u"threshold: expansion",
u"orientifold: Calabi-Yau",
u"scalar meson: mixing angle",
u"meson: branching ratio",
u"anti-B: width",
u"gluon: correlation function",
u"horizon: formation",
u"quantum chromodynamics: scattering amplitude",
u"cesium",
u"photon axion: coupling",
u"geon",
u"model: axion",
u"quantum chromodynamics: small-x",
u"neutrino/tau: particle identification",
u"boson: propagator",
u"photon p: scattering",
u"correction: hadronic",
u"energy loss: parton",
u"scalar particle: production",
u"charmonium: decay",
u"color: string",
u"muon: accelerator",
u"jet: energy resolution",
u"meson: spectral representation",
u"energy: correlation",
u"compactification: circle",
u"parton: polarization",
u"frequentist",
u"color: representation",
u"squark: production",
u"particle: spinless",
u"fermion: resonance",
u"muon: capture",
u"Z/b(10610)",
u"top: boosted particle",
u"unitarity: correction",
u"jet: angular distribution",
u"string: nonabelian",
u"hyperon hyperon: interaction",
u"supersymmetry: hierarchy",
u"total cross section: measured",
u"Lambda/b0: width",
u"Higgs particle: parity",
u"potential: tensor",
u"anti-B0: semileptonic decay",
u"particle",
u"effect: quenching",
u"equation of state: parametrization",
u"electromagnetic field: quantization",
u"Baxter equation",
u"sparticle: annihilation",
u"black hole: string",
u"vector meson: coupling constant",
u"hadroproduction",
u"gravitational radiation: scattering",
u"nucleon: recoil",
u"photon electron: colliding beams",
u"gravitational radiation: energy",
u"WIMP nucleus: interaction",
u"Heun equation",
u"hadron: momentum spectrum",
u"polarization: parton",
u"scalar meson: hadron spectroscopy",
u"magnetic",
u"neutrino: hadroproduction",
u"Baikal",
u"thermodynamics: potential",
u"space: affine",
u"dyon: BPS",
u"B: lifetime",
u"Schroedinger",
u"weak-energy condition",
u"quantum chromodynamics: instanton",
u"supersymmetry: correction",
u"pentaquark: mass spectrum",
u"eta/c(3590)",
u"detector: gas",
u"magnetic field: static",
u"deep underground detector: water",
u"model: local",
u"gamma ray: binary",
u"space: Krein",
u"quantum mechanics: time dependence",
u"nuclide",
u"neutrino: charge",
u"quantization: 3",
u"effective field theory: nonrelativistic",
u"Z(4430)",
u"zero-point",
u"resonance: energy",
u"space: topological",
u"Y(4140)",
u"electrode: microstrip",
u"D: width",
u"strange meson",
u"gauge boson: mixing",
u"Chern-Simons number",
u"gravitation: quantization: canonical",
u"p nucleus",
u"accelerator: magnet",
u"nucleus: energy",
u"gauge",
u"carbon: hydrogen",
u"Snyder",
u"space-time: classical",
u"impedance",
u"lattice field theory: continuum limit",
u"gauge boson: width",
u"momentum: representation",
u"Bell inequality",
u"density: low",
u"semiconductor detector: tracks",
u"effect: solar",
u"twist: topological",
u"hadron: scattering",
u"new interaction: effect",
u"eta: width",
u"hydrogen: energy levels",
u"antineutrino/e: particle source",
u"statistics: fractional",
u"current: exchange",
u"vector boson: production",
u"meson meson: bound state",
u"group theory: modular",
u"scalar particle: doublet",
u"Poisson equation",
u"finite element",
u"platinum",
u"color: condensation",
u"supersymmetry: effect",
u"gauge field theory: G(2)",
u"field theory: constructive",
u"radiation protection",
u"Fierz identity",
u"cross section: asymmetry",
u"star: size",
u"tau-: leptonic decay",
u"instanton: density",
u"symmetry: Z(4)",
u"bound state: decay",
u"quark gluon: fusion",
u"muon: showers",
u"spectrum: absorption",
u"electron: plasma",
u"cross section: polarization",
u"vector meson: coupling",
u"meson: field theory",
u"Sigma-",
u"charge: ratio",
u"moment: anapole",
u"Hagedorn",
u"antineutrino nucleus: nuclear reaction",
u"action: density",
u"constraint: quantum",
u"energy: hadronic",
u"Calogero-Moser model",
u"plastics",
u"twin Higgs model",
u"quarkonium: decay",
u"supersymmetry: duality",
u"shape analysis",
u"spin: singlet",
u"defect: conformal",
u"dynamical symmetry breaking: chiral",
u"parton: recombination",
u"inflaton: oscillation",
u"pi K: scattering amplitude",
u"neutralino nucleon: scattering",
u"nucleon resonance: photoproduction",
u"energy: time dependence",
u"field theory: twist",
u"white dwarf: binary",
u"charm: 2",
u"meson nucleon: scattering amplitude",
u"texture: Yukawa",
u"charged particle: pair production",
u"sigma model: O(3)",
u"beam: radiation",
u"fermion: pair",
u"multiprocessor",
u"group: Galilei",
u"Korteweg-de Vries equation: hierarchy",
u"Dirac-Kaehler equation",
u"bino: dark matter",
u"energy levels: density",
u"charged particle: tracks",
u"bottom: photoproduction",
u"dark matter: composite",
u"group: SO(3)",
u"Ricci",
u"distorted wave Born approximation",
u"top: radiative decay",
u"model: superconductivity",
u"scattering amplitude: elastic scattering",
u"WASA",
u"space-time: S(5)",
u"n: yield",
u"charge: induced",
u"phase space: analysis",
u"production: thermal",
u"representation: tensor",
u"interpretation of experiments: LHC-B",
u"vortex: lattice",
u"dimension: infinite",
u"K1(1270)",
u"hyperon: production",
u"scattering: parton",
u"pi0: neutrinoproduction",
u"eta(958): photoproduction",
u"bottom meson: pair production",
u"deuteron: electrofission",
u"U(3)",
u"matter: relativistic",
u"coset space: SL(2,R)/U(1)",
u"electron: bound state",
u"nucleon: momentum spectrum",
u"statistics: Bayesian",
u"isospin: singlet",
u"K0(S): branching ratio",
u"cosmic censorship: violation",
u"polarization: anisotropy",
u"mercury",
u"hydrogen: gas",
u"lepton: doublet",
u"operator: Konishi",
u"Bose-Einstein",
u"parton: correlation",
u"electron: final state",
u"detector: resolution",
u"black hole: noncommutative",
u"space-time: Melvin",
u"pi nucleon: coupling constant",
u"quark: symmetry",
u"lepton: coupling",
u"flux: gauge",
u"magnetic field: low",
u"bound state: stability",
u"Z/c(3900)",
u"gauge field theory: SU(6)",
u"pi0: production",
u"optics: transformation",
u"dark matter: pair production",
u"seesaw model: Type II",
u"deuteron: polarization",
u"propagator: massive",
u"nucleon: pair production",
u"charged particle: acceleration",
u"p: spectator",
u"electron: secondary",
u"AdS(4)",
u"free energy: density",
u"gluon: fragmentation function",
u"supernova: model",
u"eta: electroproduction",
u"duality: parton hadron",
u"n: scattering",
u"photon photon: annihilation",
u"pi+ pi-: mass spectrum",
u"deuteron: polarized beam",
u"Yang-Baxter equation: solution",
u"D-brane: instanton",
u"conductivity: thermal",
u"interpretation of experiments: ALEPH",
u"jet: interaction",
u"flavor: 8",
u"membrane: bound state",
u"charge: vector",
u"hadron hadron: correlation",
u"meson: rare decay",
u"supersymmetry: hidden symmetry",
u"tau: magnetic moment",
u"rho(770): electroproduction",
u"hyperon: associated production",
u"supersymmetry: parametrization",
u"fluid: interaction",
u"gluon gluon: scattering",
u"meron",
u"p: target",
u"beam optics: beta function",
u"Higgs particle: cascade decay",
u"quantum group: SU(2)",
u"fluid: pressure",
u"photon: trajectory",
u"form factor: charge",
u"coupling constant: Yukawa",
u"dispersion relation: nonlinear",
u"membrane: gas",
u"helium: cosmic radiation",
u"magnetic monopole: mass",
u"gauge field theory: SU(N) x SU(N)",
u"shock waves: relativistic",
u"Compton scattering: backscatter",
u"technicolor: topcolor",
u"lepton: excited state",
u"low temperature expansion",
u"electronics: background",
u"Einstein equation: semiclassical",
u"pi+ pi-",
u"upsilon mesons: width",
u"charmed meson: decay constant",
u"positronium: width",
u"gauge field theory: SU(2) x U(1) x U(1)",
u"quantum chromodynamics: hard scattering",
u"potential: exchange",
u"integrated circuit: design",
u"enhancon",
u"background: spectrum",
u"meson meson: inelastic scattering",
u"supergravity: correction",
u"muonium",
u"anti-p: hadronic atom",
u"K- p: inelastic scattering",
u"deuteron: model",
u"quasipotential",
u"charge: current",
u"D0: leptonic decay",
u"black hole: mass ratio",
u"correction: higher-twist",
u"electric field: time dependence",
u"top: transverse momentum",
u"mass: magnetic",
u"colliding beams",
u"black hole: mechanics",
u"anti-B: hadronic decay",
u"vortex: condensation",
u"hadron nucleus: inclusive reaction",
u"detector: imaging",
u"quarkonium: excited state",
u"scalar: Weyl",
u"Hitchin equation",
u"color: recombination",
u"quark: light front",
u"superfield: singlet",
u"GEO600",
u"electronics: communications",
u"booster",
u"photon: single production",
u"quantum mechanics: Bohmian",
u"pi nucleon: exclusive reaction",
u"blazar: emission",
u"differential forms: Kaehler",
u"quark: vector particle",
u"spinor: helicity",
u"expansion: vertex",
u"charged particle: elliptic flow",
u"electromagnetic field: high",
u"linear space",
u"gluon: structure function",
u"symmetry: SO(2)",
u"quasiparticle: excited state",
u"psi mesons: hadronic decay",
u"conifold: singularity",
u"nucleon: sigma term",
u"charge: operator",
u"gluino: production",
u"gas: relativistic",
u"fermion: mass: twist",
u"space: Grassmann",
u"fluid: velocity",
u"calorimeter: crystal",
u"matter: fluctuation",
u"CP(3)",
u"fermion: new particle",
u"effect: geometrical",
u"yield: enhancement",
u"space: quaternion",
u"scattering: dipole",
u"O(3)",
u"muon: density",
u"star: model",
u"upsilon mesons: electroproduction",
u"p: strangeness",
u"action: nonlocal",
u"trajectory: classical",
u"superspace: deformation",
u"excited state: decay",
u"K+: radiative decay",
u"beam: energy spectrum",
u"potential: nonlocal",
u"graphics",
u"path integral: determinant",
u"p nucleus: elastic scattering",
u"nuclear reactor: particle source",
u"operator: conformal",
u"mass: resonance",
u"invariance: supersymmetry",
u"LENA",
u"soliton: noncommutative",
u"drift chamber: tracks",
u"S-matrix: factorization",
u"dijet: angular correlation",
u"model: topological",
u"matter: density: perturbation",
u"neutrino: annihilation",
u"gravitational radiation: power spectrum",
u"bottomonium: hadron spectroscopy",
u"B0: decay",
u"impulse approximation: plane wave",
u"relativity theory: deformation",
u"photon: helicity",
u"quantization: fluctuation",
u"pseudoscalar meson: exchange",
u"bubble: scattering",
u"current: fragmentation",
u"symmetry: SO(5)",
u"potential: short-range",
u"magnetic field: multipole",
u"carbon: oxygen",
u"hadron: jet",
u"data analysis method: efficiency",
u"chlorine",
u"lepton: associated production",
u"p: particle identification",
u"phase space: deformation",
u"Stokes theorem: nonabelian",
u"deuteron: electric moment",
u"model: quark parton",
u"mass: constraint",
u"heavy lepton: pair production",
u"membrane model: fractional",
u"matter: conformal",
u"particle number: density",
u"photon: luminosity",
u"atmosphere: background",
u"helicity: scattering amplitude",
u"scintillation counter: hodoscope",
u"generalized parton distribution: parametrization",
u"string: quantization",
u"lattice: transverse",
u"photon: orbit",
u"monopole: density",
u"quantum electrodynamics: strong field",
u"muon: momentum spectrum",
u"horizon: acoustic",
u"form factor: strangeness",
u"energy: electromagnetic",
u"baryon: polarization",
u"Lambda/b0: lifetime",
u"hadronization: effect",
u"NICA",
u"scattering amplitude: pole",
u"supergravity: heterotic",
u"quarkonium: pair production",
u"axion: potential",
u"lifetime: lower limit",
u"radion: coupling",
u"charge conjugation: symmetry",
u"operator: higher-twist",
u"statistics: Poisson",
u"planarity",
u"photon: gas",
u"Higgs particle: particle source",
u"Upsilon(9460): branching ratio",
u"meson nucleon: coupling constant",
u"c-function",
u"Laurent expansion",
u"operator: kinetic",
u"anisotropy: statistical",
u"nucleus: fragmentation",
u"space-time: compact",
u"hypernucleus: electroproduction",
u"gravitational radiation: magnetic",
u"squark: right-handed",
u"bounce: quantum",
u"heavy lepton: mass",
u"singleton",
u"threshold: enhancement",
u"yield: time dependence",
u"MINERvA",
u"kinematics: Breit frame",
u"beam: heavy ion",
u"noise: low",
u"field equations: scalar",
u"representation: induced",
u"pi0: decay",
u"Bethe-Heitler",
u"p: angular distribution",
u"space-time: triangulation",
u"trapped surface",
u"lunar",
u"vector meson: mixing",
u"detector: optical",
u"antiquark: polarization",
u"KTeV",
u"antimatter: cosmic radiation",
u"gravitational radiation: direct detection",
u"photon nucleon: inclusive reaction",
u"spin: correlation function",
u"charge: diffusion",
u"plasmon",
u"direct production",
u"psi(3685): leptonic decay",
u"yield: dilepton",
u"Birkhoff theorem",
u"sum rule: Gottfried",
u"supersymmetry: dynamical symmetry breaking",
u"gluon: off-shell",
u"Cherenkov counter: aerogel",
u"RF system: power supply",
u"proportional chamber: wire",
u"charged particle: beam",
u"gluon: momentum",
u"sigma model: topological",
u"fermion antifermion: annihilation",
u"quantum chromodynamics: anomaly",
u"K: mesic atom",
u"antineutrino/mu: beam",
u"Salpeter equation",
u"B: mass",
u"quantum electrodynamics: spinor",
u"sparticle: coupling",
u"scalar particle: massless",
u"K: photoproduction",
u"group: Heisenberg",
u"gluon: spin",
u"Upsilon(9460): hadronic decay",
u"Dirac",
u"wavelet",
u"gluino: lifetime",
u"symmetry: SL(2,Z)",
u"scaling: dependence",
u"lepton: right-handed",
u"nitrogen: liquid",
u"algebra: tensor",
u"background: magnetic",
u"D/s: leptonic decay",
u"nucleon: polarized target",
u"numerical methods: stochastic",
u"lepton: rapidity",
u"Cherenkov counter: ice",
u"black ring: rotation",
u"target: internal",
u"Bell inequality: violation",
u"model: pole",
u"approximation: strong field",
u"birefringence: vacuum",
u"quantum chromodynamics: condensation",
u"Lovelock",
u"NEXT",
u"potential: random",
u"scintillation counter: sandwich",
u"eta: branching ratio",
u"antineutrino: beam",
u"gravitational radiation detector: satellite",
u"Klein-Gordon equation: nonlinear",
u"expansion: wavelet",
u"geophysics: magnetic field",
u"energy: quantization",
u"quarkonium: electromagnetic decay",
u"dependence: momentum transfer",
u"Monte Carlo: hybrid",
u"expansion: de Sitter",
u"tensor: Einstein",
u"vacuum state: wave function",
u"matter: phantom",
u"symmetry breaking: Peccei-Quinn",
u"Phi(1020): particle source",
u"SU(5) x U(1)",
u"correlation: effect",
u"charge: dilepton",
u"jet: rapidity spectrum",
u"B/s: radiative decay",
u"electrode: impedance",
u"Antilambda: polarization",
u"linear accelerator: energy recovery",
u"Horava-Witten model",
u"cosmological constant: induced",
u"differential cross section: energy",
u"energy: lower limit",
u"neutralino: width",
u"membrane: annihilation",
u"graviton: decay",
u"symmetry: SU(5)",
u"p n: exclusive reaction",
u"semiconductor detector: technology",
u"Hamilton-Jacobi equation: solution",
u"fluctuation: topological",
u"tungsten: oxygen",
u"differential forms: flux",
u"gravitation: deformation",
u"radon",
u"fermionization",
u"lepton: triplet",
u"total cross section: high energy behavior",
u"density: correlation function",
u"gluon: mass generation",
u"black hole: acceleration",
u"magnet: undulator",
u"entropy: quantization",
u"boson: symmetry",
u"p: width",
u"model: optical",
u"gauge field theory: U(1)**N",
u"pi: effect",
u"K-matrix",
u"bound state: two-particle",
u"invariance: diffeomorphism",
u"horizontal symmetry: U(1)",
u"black hole: lifetime",
u"baryon: decay",
u"electron nucleus: exclusive reaction",
u"germanium: detector",
u"instanton: expansion",
u"charged particle: interaction",
u"SL(2,C)",
u"duality: local",
u"crystal: liquid",
u"selectron: mass",
u"coloron",
u"amplitude analysis: isospin",
u"operator: linear",
u"gamma ray: angular distribution",
u"Xi-: hadroproduction",
u"DESY Lab",
u"hyperon: mass",
u"anti-D",
u"crystal: lattice",
u"symmetry: lattice",
u"X(3872): decay modes",
u"pseudoscalar meson: semileptonic decay",
u"particle antiparticle: asymmetry",
u"quark gluon: correlation function",
u"liquid: gas",
u"numerical methods: hydrodynamics",
u"fermion: dispersion relation",
u"beam: fragmentation",
u"microwaves: amplifier",
u"trigger: electronics",
u"new particle: spin",
u"isospin: 1/2",
u"gas: ionization",
u"Laplace",
u"W-: pair production",
u"Higgs particle: condensation",
u"correction: recoil",
u"quantum mechanics: measurement theory",
u"diffusion: relativistic",
u"partial wave: expansion",
u"ground state: hyperfine structure",
u"sociology",
u"superconductivity: temperature: high",
u"anti-p: production",
u"antineutrino: cosmic radiation",
u"commutation relations: deformation",
u"parton: scattering amplitude",
u"nucleon resonance: electroproduction",
u"representation: SU(2)",
u"magnetic field: confinement",
u"Boltzmann equation: relativistic",
u"vector meson: decay",
u"electric field: gradient",
u"meson: yield",
u"heavy lepton",
u"correlation: higher-order",
u"meson: multiple production",
u"axino: LSP",
u"optics: nonlinear",
u"Schwinger model: massive",
u"plasma: thermal",
u"current: U(1)",
u"muon: charge",
u"differential cross section: calculated",
u"zitterbewegung",
u"vector",
u"fluid: nonrelativistic",
u"nucleon: current",
u"baryon: yield",
u"matter: fluid",
u"gluon gluon: elastic scattering",
u"MACHO",
u"stability: rotation",
u"ion: particle source",
u"form factor: dipole",
u"pi0: width",
u"magnet: lattice",
u"string: energy",
u"background: effect",
u"amplitude analysis: partial wave",
u"p: energy loss",
u"data analysis method: proposed",
u"messenger: mass",
u"stau: annihilation",
u"effect: Coulomb",
u"review: introductory",
u"statistics: Tsallis",
u"rho(770)0: width",
u"space: Taub-NUT",
u"electron: accelerator",
u"X(3915)",
u"tau: particle identification",
u"jet: broadening",
u"B/s0: decay modes",
u"Antilambda",
u"magnetic field: geophysics",
u"Delta(1232): effect",
u"pi0: associated production",
u"flavor: SU(2)",
u"soliton: Hopf",
u"mass spectrum: recoil",
u"gauge boson: branching ratio",
u"hadron: inclusive production",
u"field theory: statistical",
u"calorimeter: forward spectrometer",
u"phase space: longitudinal",
u"mass enhancement",
u"radiation: scalar",
u"gauge boson: mass spectrum",
u"orbifold: torus",
u"one-particle",
u"positron: beam",
u"horizon: thermodynamics",
u"neutron star: surface",
u"curvature: higher-order",
u"hydrogen: ion",
u"charged particle: cosmic radiation",
u"Upsilon(9460): hadroproduction",
u"potential: screening",
u"supergravity: massive",
u"gluon gluon: scattering amplitude",
u"gas: mass",
u"gravitino: massive",
u"squark: search for",
u"electron nucleon: colliding beams",
u"boson: composite",
u"gauge: nonlinear",
u"K+: electroproduction",
u"baryon: charge",
u"n: leading particle",
u"spectator: model",
u"oscillon",
u"optics: laser",
u"gravitational radiation: flux",
u"photon hadron: interaction",
u"anti-top",
u"symmetry breaking: SU(6)",
u"electric field: effect",
u"force: van der Waals",
u"symmetry: exchange",
u"color: electric field",
u"algebra: SL(2)",
u"horizon: stability",
u"space: Einstein",
u"ekpyrotic",
u"flow: velocity",
u"charmed particle: semileptonic decay",
u"squark: heavy",
u"vortex: density",
u"photon: oscillation",
u"oscillator: coupling",
u"D: production",
u"vortex: abelian",
u"beamstrahlung",
u"approximation: plane wave",
u"resonance: magnetic",
u"gravitation: Liouville",
u"f0(600): hadronic decay",
u"neutrino/e: beam",
u"antiquark: momentum spectrum",
u"current: time dependence",
u"space-time: Kerr-Schild",
u"muon: width",
u"magnet: solenoid",
u"gravitational radiation: energy spectrum",
u"pi nucleon: scattering",
u"mass: recoil",
u"Upsilon(9460): width",
u"dijet: hadroproduction",
u"pressure: time dependence",
u"glueball: tensor",
u"bottom particle: hadronic decay",
u"quantum mechanics: entanglement",
u"fluid: quantum",
u"moment: higher-order",
u"scaling: conformal",
u"eta(958): radiative decay",
u"neutrino: opacity",
u"space-time: defect",
u"matter: antimatter",
u"positron p: elastic scattering",
u"vacuum state: condensation",
u"action: Regge",
u"background: anisotropy",
u"multiple field",
u"symmetry: SO(3,1)",
u"upsilon mesons: branching ratio",
u"black string: stability",
u"correlation function: pseudoscalar",
u"pressure: dependence",
u"Kodama state",
u"black hole: model",
u"fluctuation: operator",
u"superspace: noncommutative",
u"meson meson: scattering",
u"potential: correction",
u"deformation: noncommutative",
u"space-time: asymptotic behavior",
u"mediation: mirage",
u"electron electron: exclusive reaction",
u"mass: quantization",
u"boson: exchange",
u"transition",
u"boson: production",
u"molecule: cloud",
u"evolution equation: nonlinear",
u"curvature: effect",
u"power spectrum: temperature",
u"spin: glass",
u"neutralino: associated production",
u"O(4)",
u"gauge field theory: SO(4)",
u"quantum chromodynamics: penguin",
u"dark energy: parametrization",
u"background: curvature",
u"U(1) x U(1)",
u"transverse energy: energy flow",
u"laser: wake field",
u"instanton: noncommutative",
u"electroweak interaction: spontaneous symmetry breaking",
u"n: pair production",
u"plasma: effect",
u"sbottom: mass",
u"binary: spin",
u"quantization: collective",
u"partition function: 0",
u"atom: muonic atom",
u"fusion: photon gluon",
u"three-body problem: force",
u"classical",
u"photon electron: inelastic scattering",
u"quantum chromodynamics: nonlinear",
u"B-: semileptonic decay",
u"beam: background",
u"dark matter: parameter space",
u"graviton: interaction",
u"galaxy: model",
u"photon n: exclusive reaction",
u"K+: hadronic decay",
u"T-duality: transformation",
u"gravitation: Plebanski",
u"SASE",
u"density matrix: renormalization group",
u"expansion: character",
u"general relativity: quantum",
u"tau: electric moment",
u"bottom: decay",
u"quark: susceptibility",
u"neutralino nucleon: interaction",
u"graviton: hadroproduction",
u"multiplicity: density",
u"Kobayashi-Maskawa model",
u"positron p: exclusive reaction",
u"fermion: magnetic moment",
u"magnetic field: longitudinal",
u"bottom meson: mass",
u"nucleus: interaction",
u"color: magnetic field",
u"supernova: Type I",
u"nucleosynthesis: primordial",
u"charged particle: electroproduction",
u"photon: heavy",
u"neutrino: energy: low",
u"energy: accretion",
u"charge: symmetry",
u"model: classical",
u"Delta(1232): hadronic decay",
u"charge: static",
u"approximation: rainbow",
u"invariance: rotation",
u"string model: effective action",
u"charge conjugation: violation",
u"photon deuteron: interaction",
u"particle: yield",
u"dark matter: condensation",
u"missing-mass",
u"B/c: mass",
u"sphere: 3",
u"Higgs model: parameter space",
u"neutrino: yield",
u"algebra: SU(N)",
u"ground state: energy",
u"cross section: momentum dependence",
u"scalar particle: width",
u"baryon resonance: wave function",
u"model: semiclassical",
u"time projection chamber: xenon",
u"domain wall: stability",
u"slepton: decay modes",
u"beam: ejection",
u"electromagnetic field: coupling",
u"quark: left-handed",
u"finite temperature: correction",
u"stau: decay",
u"storage ring: heavy ion",
u"scattering amplitude: partial wave",
u"hidden symmetry: U(1)",
u"effect: Sudakov",
u"p: spectrum",
u"pi- p: charge exchange",
u"rapidity: correlation: long-range",
u"space: transverse",
u"color: interaction",
u"Telescope Array Experiment",
u"operator: Wilson",
u"radiation: coherence",
u"gravitation: holography",
u"scale: messenger",
u"space: S(4)",
u"spin: charge",
u"J/psi(3100): decay modes",
u"constructive",
u"doublet: 3",
u"wave: propagation",
u"W W: scattering amplitude",
u"W: decay modes",
u"radion: potential",
u"photon: leptonic decay",
u"unparticle: exchange",
u"synchrotron radiation: coherence",
u"unparticle: vector",
u"cosmic radiation: mass",
u"character",
u"gluon: scattering",
u"approximation: chiral",
u"photon: massive",
u"scintillation counter: readout",
u"eta/s",
u"Lambda(1520): photoproduction",
u"jet: angular correlation",
u"time-to-digital converter",
u"relativity theory: equivalence principle",
u"structure function: transverse",
u"flux: Hawking",
u"matter: collapse",
u"lepton: electroproduction",
u"chi mesons: branching ratio",
u"Ward identity: axial",
u"helium: production",
u"D/s: branching ratio",
u"muon: mass",
u"branching ratio: calculated",
u"W: branching ratio",
u"AdS(5)",
u"lepton: exotic",
u"anti-p p: bound state",
u"representation: higher-dimensional",
u"renormalization: finite",
u"quarkonium: yield",
u"conductivity: optical",
u"K-: absorption",
u"neutrino: electroproduction",
u"perturbation: axial",
u"D-brane: embedding",
u"torus: topology",
u"G parity",
u"Phi(1020): mass",
u"fluorine",
u"incoherent interaction",
u"jet: trigger",
u"moduli: string",
u"redshift: low",
u"photon: transition",
u"string: resonance",
u"carbon: hypernucleus",
u"bottom: electroproduction",
u"black hole: collapse",
u"pseudoscalar meson: electroproduction",
u"inflation: thermal",
u"bound state: BPS",
u"graviton: scalar",
u"color: dielectric",
u"Wilson loop: supersymmetry",
u"dip",
u"dipole: interaction",
u"meson meson",
u"strong-energy condition",
u"oblique",
u"metric: induced",
u"string: percolation",
u"water: heavy",
u"charge conjugation: invariance",
u"mass: induced",
u"effective Hamiltonian: nonrelativistic",
u"space-time: Kerr-NUT",
u"sbottom: pair production",
u"quark: color",
u"quark hadron",
u"hadron: spectrum",
u"integral equations: differential equations",
u"kink: mass",
u"bubble: interaction",
u"kinematics: Hilbert space",
u"hypernucleus: hadronic decay",
u"Higgs particle: model",
u"gravitation: TeVeS",
u"transverse energy: energy spectrum",
u"black hole: Vaidya",
u"K0 anti-K0: mixing angle",
u"fermion: correlation function",
u"single production",
u"algebra: cluster",
u"field equations: Proca",
u"gauge boson: longitudinal",
u"integrability: hierarchy",
u"anyon: nonabelian",
u"psi mesons: leptonic decay",
u"postulated particle",
u"Delta(1232): electroproduction",
u"string: Green-Schwarz",
u"amplitude analysis: interference",
u"baryon: electroproduction",
u"charge: chiral",
u"particle: vector",
u"boson: dark matter",
u"B+: rare decay",
u"effective Lagrangian: nonrelativistic",
u"group theory: loop space",
u"power spectrum: nonlinear",
u"energy: static",
u"cascade: hadronic",
u"p: recoil",
u"heavy ion: energy",
u"instanton: fractional",
u"thermodynamics: fluctuation",
u"fermion: split",
u"exciton",
u"halo: density",
u"muon: momentum",
u"calorimeter: readout",
u"charge: dipole",
u"electromagnetic field: duality",
u"Zeeman effect",
u"geometry: spectral",
u"nucleon: bound state",
u"unparticle: operator",
u"quantization: algebra",
u"anti-p p",
u"inflation: attractor",
u"Phi(1020): leptonic decay",
u"spinor: massive",
u"covariance: Poincare",
u"quasinormal mode: spectrum",
u"space-time: wormhole",
u"graviton: spectrum",
u"K0(S): secondary beam",
u"fragmentation function: interference",
u"temperature: correction",
u"gauge boson: mixing angle",
u"anti-p: polarized beam",
u"B-L number: symmetry breaking",
u"gravitation: renormalizable",
u"hadron: elliptic flow",
u"potential: long-range",
u"engineering: geometrical",
u"curvature: primordial",
u"pulsar: velocity",
u"selection rule: strangeness",
u"photon nucleus: exclusive reaction",
u"force: electromagnetic",
u"correlation function: higher-order",
u"cluster: percolation",
u"conifold: warped",
u"geometrothermodynamics",
u"Regge poles: slope",
u"boson: pair production",
u"dark matter: mirror particle",
u"charged particle: relativistic",
u"Goldstone particle: composite",
u"axino",
u"plastics: track sensitive",
u"soliton: energy",
u"axino: mass",
u"phase space: covariance",
u"p deuteron: interaction",
u"dark energy: perturbation",
u"fluid: mechanics",
u"dilepton: spectrum",
u"exchange: two-particle",
u"pentaquark: charm",
u"dark energy: fluid",
u"N(1710)",
u"D-brane: fractional",
u"LSP: density",
u"LHC-F",
u"hydrogen: molecule",
u"block spin transformation",
u"impulse approximation: relativistic",
u"pomeron: interaction",
u"star: surface",
u"quark: fermion number",
u"radiative capture",
u"radiation: temperature",
u"scalar particle: mass spectrum",
u"B/s0: radiative decay",
u"quantum chromodynamics: partition function",
u"mass difference: multiplet",
u"algebra: twist",
u"rho(770)0: pair production",
u"Y(3940)",
u"rho(770): exchange",
u"neutralino: lifetime",
u"n: polarizability",
u"potential: dependence",
u"D0: decay",
u"pi- nucleus: nuclear reaction",
u"membrane: embedding",
u"Dirac equation: massless",
u"jet: charm",
u"symmetry: Lie",
u"Gauss model",
u"photon axion: mixing",
u"temperature: freeze-out",
u"WIMP: relic density",
u"top: charge: asymmetry",
u"drift velocity",
u"fluid: phantom",
u"superstring: action",
u"space-time: quantum space",
u"gas: accretion",
u"diffraction: multiple",
u"decay: exotic",
u"black hole: energy",
u"meson: gas",
u"WIMP: interaction",
u"gravitational radiation: scalar",
u"electron deuteron: exclusive reaction",
u"supersymmetry: charge",
u"electrical engineering",
u"string: self-duality",
u"electroweak interaction: effect",
u"transformation: rational",
u"recoil: 0",
u"ion: acceleration",
u"grand unified theory: SU(6)",
u"minimal",
u"vector meson: electromagnetic decay",
u"lithium: nuclide",
u"resonance: coupling",
u"group: SU(4)",
u"D: mass",
u"radiation: undulator",
u"membrane model: infrared",
u"singularity: cosmological model",
u"wormhole: rotation",
u"hydrodynamics: magnetic",
u"dark matter: singlet",
u"homology: sphere",
u"string model: p-adic",
u"group: SO(4)",
u"eta/c(2980): width",
u"crystal: defect",
u"graviton: scattering",
u"oscillation: time",
u"resonance: frequency",
u"boson: coupling",
u"potential: Ernst",
u"random lattice",
u"spectrum: chiral",
u"quark quark: potential",
u"squark: exchange",
u"interaction: magnetic",
u"stability: quantum",
u"supersymmetry: superpotential",
u"symmetry: U(N) x U(N)",
u"quantum dot",
u"radiation: energy spectrum",
u"psi mesons: electroproduction",
u"meson baryon: bound state",
u"gravitation: Euclidean",
u"model: fragmentation",
u"energy: magnetic",
u"quark: energy",
u"nuclear matter: equation of state",
u"WIMP: decay",
u"top: condensation",
u"Higgs model: SU(2)",
u"pi: field theory",
u"space-time: Kundt",
u"quantum chromodynamics: duality",
u"approximation: Glauber",
u"lepton: cosmic radiation",
u"Painleve equation",
u"stability: magnetic",
u"loop integral: 3",
u"gauge field theory: SO(32)",
u"cellular automaton",
u"vortex: string",
u"rotator",
u"energy: violation",
u"soliton: background",
u"Schroedinger equation: time dependence",
u"K- p: elastic scattering",
u"effective potential: correction",
u"dilaton: stability",
u"fluid: background",
u"bottom meson: decay modes",
u"gaugino: mediation",
u"neutrino: condensation",
u"electron nucleon: interaction",
u"gluon: small-x",
u"flux: operator",
u"expansion: light cone",
u"electric field: background field",
u"baryon: antidecuplet",
u"yield",
u"current: pseudoscalar",
u"charmonium: radiative decay",
u"slepton: mixing angle",
u"lattice field theory: Euclidean",
u"Z(3)",
u"lepton: mass ratio",
u"gluon: energy",
u"vacuum state: Higgs",
u"Duffin-Kemmer-Petiau equation",
u"bubble chamber",
u"moose",
u"eta(958): width",
u"stau: pair production",
u"helicity: magnetic",
u"decay: model",
u"X(3872): branching ratio",
u"gauge field theory: SO(10)",
u"supergravity: chiral",
u"anti-p: yield",
u"Z/b(10650)",
u"fermion: vector",
u"meson baryon: scattering",
u"algebra: exceptional",
u"vertex: secondary",
u"fixed point: non-Gaussianity",
u"instanton: action",
u"S-brane",
u"energy spectrum: missing-energy",
u"scalar meson: wave function",
u"quantum number: CP",
u"WDVV equation",
u"sum rule: spectral representation",
u"constraint: violation",
u"instanton: background",
u"form factor: isovector",
u"pi: velocity",
u"p n: mass difference",
u"germanium: lifetime",
u"nucleus: binding energy",
u"density: topological",
u"pi: dispersion relation",
u"plasma: temperature",
u"Tokai J-PARC PS",
u"bottom meson: branching ratio",
u"a1(1260): hadronic decay",
u"deformation: twist",
u"glueball: production",
u"muon: particle source",
u"gravitation: asymptotic safety",
u"cosmic string: superconductivity",
u"energy: transition",
u"quantum chromodynamics: pomeron",
u"Konishi",
u"effect: dielectric",
u"Argonne Lab",
u"D0: rare decay",
u"neutron star: oscillation",
u"dimuon",
u"algebra: space-time",
u"field theory: collapse",
u"bottom: transverse momentum",
u"gravitational radiation: interaction",
u"transverse traceless gauge",
u"lepton nucleus: inclusive reaction",
u"top: particle identification",
u"quantum electrodynamics: compact",
u"absorption: correction",
u"pi- p: elastic scattering",
u"liquid: model",
u"Green-Schwarz",
u"beam: halo",
u"algebra: anti-de Sitter",
u"string: fluctuation",
u"diquark: axial-vector",
u"Moscow ITEF PS",
u"unimodular",
u"Kac-Moody",
u"field theory: de Sitter",
u"gauge boson: right-handed",
u"p-brane: 9",
u"particle antiparticle",
u"bound state: nonrelativistic",
u"dilepton: transverse momentum",
u"gauge: dependence",
u"supernova: luminosity",
u"a2(1320)",
u"neutrino: dispersion relation",
u"scattering amplitude: on-shell",
u"inflation: anisotropy",
u"scattering amplitude: correction",
u"Compton scattering: nonlinear",
u"nucleon nucleon: inclusive reaction",
u"recoil: effect",
u"susceptibility: scalar",
u"Omega-: hadronic decay",
u"mass: fluctuation",
u"ICARUS",
u"photon nucleon: elastic scattering",
u"n: polarized beam",
u"velocity: transverse",
u"electromagnetic field: constant",
u"supersymmetry: off-shell",
u"charmed baryon: excited state",
u"K-theory: twist",
u"CERN LEAR",
u"K+ K-: mass spectrum",
u"dipole: transition",
u"space: warped",
u"slepton: tachyon",
u"Wino: LSP",
u"string: BPS",
u"energy: production",
u"deuterium: target",
u"quantum chromodynamics: effective field theory",
u"hidden symmetry: gauge",
u"neutron star: stability",
u"nucleon: energy spectrum",
u"gluon: energy loss",
u"B/s0: hadroproduction",
u"Sigma0: hadroproduction",
u"beam emittance: transverse",
u"laser: backscatter",
u"Q-ball: production",
u"background: noise",
u"parity: asymmetry",
u"pi: topcolor",
u"quark antiquark: fusion",
u"energy spectrum: discrete",
u"luminosity: measurement methods",
u"THESIS",
u"energy: upper limit",
u"Q-ball: charge",
u"antineutrino nucleon: inelastic scattering",
u"flow: Yang-Mills",
u"operator: flavor changing",
u"thallium",
u"gauge field theory: USp(N)",
u"anti-p: energy spectrum",
u"nucleus: deformation",
u"sfermion",
u"differential cross section: formula",
u"quark gluon: coupling",
u"Thirring model: massless",
u"mass: pseudoscalar",
u"B-parameter",
u"baryon: parity",
u"deep inelastic scattering: inclusive reaction",
u"axion: string",
u"regularization: point splitting",
u"model",
u"model: peripheral",
u"muon-: leptonic decay",
u"coupling: renormalizable",
u"DESY FLASH",
u"pi+: neutrinoproduction",
u"representation: Lorentz",
u"quark: scalar",
u"ejection",
u"helium: ion",
u"black hole: pair",
u"field theory: oscillation",
u"baryon: exchange",
u"neutron star: accretion",
u"deconstruction: dimensional",
u"W W",
u"angular momentum: operator",
u"S(5)",
u"bottom: annihilation",
u"silver",
u"field theory: symplectic",
u"supersymmetry: current",
u"photon hadron: correlation",
u"quark: multiple production",
u"boson: decay",
u"charged particle: momentum spectrum",
u"correction: noncommutative",
u"background: deformation",
u"electron: trajectory",
u"superstring: Type IIB",
u"boson: vector",
u"potential: time dependence",
u"sparticle: signature",
u"excited state: massive",
u"momentum: external",
u"neutralino nucleus: elastic scattering",
u"neutrino: helicity",
u"symmetry: SO(6)",
u"Schwinger model: chiral",
u"form factor: multipole",
u"particle: multiplicity",
u"charmed meson: mass difference",
u"meson: coupling",
u"quark quark: correlation function",
u"gluon: hadroproduction",
u"hyperon: radiative decay",
u"NUT",
u"bottomonium: mass spectrum",
u"coupling: left-handed",
u"symmetry: nonlocal",
u"inclusive reaction",
u"Cabibbo model",
u"inflation: D-term",
u"texture: 0",
u"Wess-Zumino gauge",
u"Skyrmion: matter",
u"slepton: mass difference",
u"antineutrino nucleon: inclusive reaction",
u"meson: condensation",
u"KM3NeT",
u"parton: massive",
u"B: electroproduction",
u"teleportation",
u"bino",
u"charmed baryon: electroproduction",
u"gauge field theory: SU(2) x SU(2)",
u"KEDR",
u"Leibniz",
u"anti-p: storage ring",
u"nondiffractive",
u"boundary condition: mirror",
u"twin Higgs model: left-right",
u"cosmic background radiation: multipole",
u"neutrino: heavy: decay",
u"D/s+: semileptonic decay",
u"Skyrmion: stability",
u"cohomology: quantum",
u"singularity: collinear",
u"dihadron",
u"membrane model: warped",
u"group: U(N)",
u"representation: Lax",
u"boson: excited state",
u"Z(N)",
u"chargino: effect",
u"quantization: effect",
u"off-line",
u"squark: width",
u"K1(1400)",
u"astrophysics: magnetic field",
u"color: evaporation",
u"p: rapidity spectrum",
u"gaugino: pair production",
u"pi+: pair production",
u"Lyapunov exponent",
u"bottomonium: mass",
u"NLSP: decay",
u"polaron",
u"anomaly: Green-Schwarz",
u"loop integral: 1",
u"total cross section: ratio",
u"chiral quark soliton model",
u"D-brane: decay",
u"photon: dispersion",
u"gauge boson: massless",
u"gluino: branching ratio",
u"kinematics: effect",
u"correction: nonlocal",
u"dimuon: asymmetry",
u"angular distribution: moment",
u"neutrino: branching ratio",
u"Upsilon(10355): radiative decay",
u"gluon: pole",
u"rho(770): pair production",
u"interpretation of experiments: Auger",
u"pi+: branching ratio",
u"algebra: W(infinity)",
u"pi: model",
u"bound state: formation",
u"inflaton: perturbation",
u"correlation function: axial-vector",
u"horizon: temperature",
u"two-gluon",
u"space-time dependence",
u"calorimeter: trigger",
u"tritium: binding energy",
u"quark: gas",
u"effect: stochastic",
u"gap: spectral",
u"SciBooNE",
u"D0: hadroproduction",
u"baryon: vertex",
u"nuclear matter: strangeness",
u"statistical analysis: frequentist",
u"string: relativistic",
u"energy levels: hyperfine structure",
u"scattering amplitude: unitarity",
u"electron electron: inelastic scattering",
u"quark quark: elastic scattering",
u"gravitational radiation: propagation",
u"hadron: angular distribution",
u"transformation: harmonic",
u"hadron: showers",
u"space-time: Schroedinger",
u"algebra: Schroedinger",
u"Walecka model: nonlinear",
u"meson resonance: intermediate state",
u"Clifford",
u"parton: hadronization",
u"axino: production",
u"differential forms: harmonic",
u"CKM matrix: parametrization",
u"pseudoscalar meson: hadroproduction",
u"neutrino/mu: cosmic radiation",
u"effect: short-range",
u"beam monitoring: beam profile",
u"black body: radiation",
u"invariance: adiabatic",
u"differential cross section: longitudinal",
u"exchange: current",
u"angular momentum: flux",
u"fluid: holography",
u"charmonium: excited state",
u"category: representation",
u"cosmic radiation: deflection",
u"J/psi(3100): inclusive production",
u"lepton number: asymmetry",
u"quark: droplet",
u"Balitsky-Kovchegov equation: solution",
u"ZEPLIN",
u"pi+ p: elastic scattering",
u"n: particle identification",
u"production: asymmetry",
u"Higgs particle: multiple production",
u"beam focusing: colliding beams",
u"deuteron: target",
u"gravitation: stochastic",
u"WIMP: pair production",
u"D0: decay modes",
u"scalar particle: hadroproduction",
u"gluon: effect",
u"differential forms: 4",
u"mass: hadronic",
u"Michel parameter",
u"multiplet: mass difference",
u"cadmium",
u"showers: angular distribution",
u"magnetic field: measurement methods",
u"cross section: high energy behavior",
u"Walecka model",
u"neutrino: Dirac: mass",
u"nuclear matter: saturation",
u"Delta(1232): width",
u"damage",
u"Sp(2)",
u"neutralino: direct detection",
u"analyzing power: tensor",
u"n: interference",
u"compacton",
u"charmed meson: rare decay",
u"nucleon: interaction",
u"interference: laser",
u"exchange: one-meson",
u"spin: wave",
u"symmetry breaking: translation",
u"detector: acceleration",
u"quantization: topological",
u"hadron: final state",
u"action: local",
u"equation of state: time dependence",
u"membrane model: interaction",
u"cosmological model: ekpyrotic",
u"pi: mass difference",
u"gravitation: validity test",
u"set theory",
u"group: Mathieu",
u"tau-: width",
u"Kondo model",
u"boundary condition: anti-de Sitter",
u"antineutrino: energy spectrum",
u"glueball: hadronic decay",
u"action: topological",
u"Riccati equation",
u"intermediate boson: hadronic decay",
u"electromagnetic field: effect",
u"MIMAC",
u"muonium: hyperfine structure",
u"axion: density",
u"oscillation: spectrum",
u"vortex: moduli space",
u"sparticle: associated production",
u"mass number",
u"regeneration",
u"astrophysics: relativistic",
u"entropy: fluctuation",
u"top: angular distribution",
u"helium: density",
u"gluon: associated production",
u"electromagnetic decay",
u"quantum mechanics: deformation",
u"bottom meson: decay",
u"atomic physics: parity",
u"photon nucleus: inelastic scattering",
u"charmed baryon: width",
u"membrane: BPS",
u"nucleus: mass",
u"CREAM",
u"sulfur: fluorine",
u"energy: absorption",
u"expansion: nonrelativistic",
u"stability: topological",
u"top: multiple production",
u"Z': electroproduction",
u"geometry: deformation",
u"factorization: dependence",
u"geometry: Poisson",
u"spinor: massless",
u"quark: doublet",
u"background: warped",
u"rho(1700)",
u"effect: acoustic",
u"induction",
u"gauge field theory: quantization",
u"muon+: polarized beam",
u"omega(783): branching ratio",
u"surface: lunar",
u"string: scattering",
u"baryon antibaryon: asymmetry",
u"forward production",
u"K0: rare decay",
u"operator: transition",
u"tau: electroproduction",
u"quantum number: exotic",
u"higher-order: 5",
u"Lambda/c+: hadroproduction",
u"scattering: relativistic",
u"Tolman-Oppenheimer-Volkoff equation",
u"chi/c0(3415): hadronic decay",
u"hyperon: width",
u"scattering: W W",
u"a0(980): hadronic decay",
u"quantum gravity: simplex",
u"approximation: infrared",
u"neutral particle: long-lived",
u"detector: calibration",
u"sfermion: heavy",
u"expansion: momentum",
u"h/c(3526)",
u"approximation: dipole",
u"gap equation: solution",
u"spectrum: tensor",
u"effective potential: scalar",
u"parity: Z(2)",
u"Z': hadronic decay",
u"tritium: hypernucleus",
u"lepton: particle identification",
u"threshold: energy",
u"effect: beam-beam",
u"deuteron: beam",
u"Lambda Lambda",
u"general relativity: solution",
u"time: calibration",
u"symmetry: SU(6) x O(3)",
u"vector meson: form factor",
u"soliton: scalar",
u"magnetic monopole: Dirac",
u"D0: radiative decay",
u"M-theory: solution",
u"energy: Coulomb",
u"symmetry: Kac-Moody",
u"group theory: deformation",
u"ghost: scalar",
u"resonance: spectrum",
u"bottom: decay modes",
u"matter: wave",
u"Spin(7)",
u"gluon: interaction",
u"algebra: octonion",
u"fermion: confinement",
u"meson: neutral particle",
u"free electron laser: X-ray",
u"transformation: nonlinear",
u"resonance: model",
u"neutron star: collapse",
u"unparticle: tensor",
u"gauge boson: scattering",
u"scattering amplitude: parametrization",
u"quarkonium: mass difference",
u"domain wall: tension",
u"regularization: ultraviolet",
u"isospin: triplet",
u"jet: correlation",
u"current: conformal",
u"Sommerfeld enhancement",
u"germanium: nuclide",
u"Lipatov equation: solution",
u"electron: drift velocity",
u"string: fluid",
u"intermediate boson: mass",
u"K*(892): polarization",
u"coupling: kinetic",
u"model: confinement",
u"width: branching ratio",
u"static",
u"charged particle: long-lived",
u"positron: yield",
u"model: Dirac",
u"anisotropy: dipole",
u"ANITA",
u"CUORE",
u"jet: top",
u"K: multiplicity",
u"symmetry: nonlinear",
u"Boulby",
u"Fierz-Pauli equation",
u"B+",
u"bound state: energy spectrum",
u"energy: scaling",
u"hodoscope",
u"manual",
u"charmonium: width",
u"vector meson: spectral representation",
u"quark: energy spectrum",
u"dimuon: charge",
u"pseudoparticle",
u"plasma: frequency",
u"muon nucleus: nuclear reaction",
u"false vacuum: bubble",
u"exchange: Regge",
u"differential cross section: asymmetry",
u"topology: defect",
u"stacking",
u"fermion: valence",
u"Lambda Lambda: interaction",
u"nucleon nucleon: force",
u"muon deuteron: deep inelastic scattering",
u"polarization: power spectrum",
u"space: S(7)",
u"beam",
u"color: exchange",
u"neutrino nucleon: exclusive reaction",
u"beam dynamics: longitudinal",
u"axion: decay",
u"string: effect",
u"pi+ pi-: annihilation",
u"pi K: scattering length",
u"pi p: interaction",
u"geometry: discrete",
u"dilepton: thermal",
u"mass: sea",
u"metric: deformation",
u"string: fusion",
u"algebra: Temperley-Lieb",
u"tracking detector: performance",
u"gauge field theory: thermal",
u"nucleus: fission",
u"midisuperspace",
u"bremsstrahlung: emission",
u"p: showers",
u"Chan-Paton factor",
u"dibaryon: mass",
u"synchrotron oscillation",
u"resonance: massive",
u"charged particle: trajectory",
u"algebra: SU(3)",
u"space-time: Einstein-Cartan",
u"radiation: flux",
u"Vaidya",
u"critical phenomena: stability",
u"field equations: bounce",
u"power spectrum: oscillation",
u"p: interaction",
u"symmetry: Killing",
u"antimatter: production",
u"p: size",
u"cosmic radiation: density",
u"astrophysics: perturbation",
u"R-hadron",
u"pi pi: phase shift",
u"exchange: multiple",
u"D: excited state",
u"helium: yield",
u"color: 3",
u"neutrino: radiation",
u"pseudoscalar meson: wave function",
u"antideuteron",
u"hidden variable",
u"gauge field theory: Z(N)",
u"evolution equation: solution",
u"pi0: mass",
u"bending magnet: superconductivity",
u"eta(958): wave function",
u"correction: Yukawa",
u"Holst term",
u"bilepton: mass",
u"fluid: charge",
u"quark: correlation",
u"gravitino: lifetime",
u"toponium",
u"D0 anti-D0: oscillation",
u"operator: Virasoro",
u"n: confinement",
u"B+: lifetime",
u"space-time: S(3) x R(1)",
u"particle: charge",
u"pi0: electromagnetic decay",
u"jet: suppression",
u"quark quark: scattering amplitude",
u"isospin: 3/2",
u"radiation: length",
u"scaling: correction",
u"superfluid: relativistic",
u"form factor: momentum dependence",
u"pi: decay modes",
u"pseudoscalar meson: mixing",
u"fermion: symplectic",
u"potential: complex",
u"fragmentation: model",
u"space-time: bubble",
u"neutrino: right-handed: mass",
u"partial wave: interference",
u"slepton: hadroproduction",
u"fermion: technicolor",
u"Higgs particle: quantum number",
u"form factor: momentum transfer",
u"Hubble constant: time dependence",
u"muon: drift chamber",
u"eta: electromagnetic decay",
u"beam dynamics: nonlinear",
u"Lambda/b0: hadroproduction",
u"algebra: SO(2,1)",
u"neutrino/tau: flux",
u"pi: angular distribution",
u"pulsar: emission",
u"expansion: topological",
u"effective action: nonlocal",
u"torus: fuzzy",
u"local",
u"positron: annihilation",
u"viscosity: correction",
u"electron: coupling",
u"optics: background",
u"triplet: SU(2)",
u"quantum chromodynamics: action",
u"eta/c(2980): radiative decay",
u"aberration",
u"geometry: fluctuation",
u"conifold: transition",
u"bottom meson: rare decay",
u"baryon resonance: photoproduction",
u"vector meson: interaction",
u"pentaquark: wave function",
u"gravastar",
u"carbon: crystal",
u"lattice field theory: compact",
u"symmetry: SU(2) x SU(2) x U(1)",
u"B/c+: hadronic decay",
u"channel cross section: mass dependence",
u"color: charge",
u"detector: geometry",
u"W': hadroproduction",
u"momentum: fluctuation",
u"superfield: Higgs",
u"FINUDA",
u"p n: radiative capture",
u"Bethe-Salpeter equation: coupled channel",
u"lepton: momentum",
u"hyperon: pair production",
u"experimental methods: sensitivity",
u"field theory: messenger",
u"pi-: photoproduction",
u"superpotential: twist",
u"resistive plate chamber: glass",
u"velocity: spectrum",
u"p deuteron: inelastic scattering",
u"pi: absorption",
u"scalar particle: heavy",
u"beam: stability",
u"field equations: Toda",
u"K-: condensation",
u"quantum gravity: linear",
u"atom: gas",
u"symmetry: SU(N) x SU(N)",
u"Ward identity: conformal",
u"nucleon: mass spectrum",
u"nucleus: ground state",
u"tensor: conformal",
u"interpretation of experiments: DAMA",
u"deuteron nucleus: scattering",
u"charged particle: rapidity",
u"beam: orbit",
u"throat: warped",
u"gauge boson: multiple production",
u"nucleon nucleon: scattering length",
u"pi: associated production",
u"wormhole: Lorentz",
u"hadron: spin",
u"flux tube: electric",
u"pi pi: inelastic scattering",
u"anti-p p: ratio",
u"lepton: mass formula",
u"renormalization group: higher-order",
u"supersymmetry: vector",
u"pi: width",
u"star: energy loss",
u"Nambu bracket",
u"Ward identity: chiral",
u"quarkonium: hybrid",
u"B-L number: invariance",
u"transformation: local",
u"bottom baryon: hadronic decay",
u"potassium",
u"vorton",
u"twist: 4",
u"dual resonance model",
u"optics: communications",
u"pressure: Casimir",
u"pi nucleus: inclusive reaction",
u"model: nonrelativistic",
u"charm: semileptonic decay",
u"partition function: torus",
u"lepton: model",
u"B/s0: decay",
u"photon axion: oscillation",
u"smuon",
u"catastrophe theory",
u"membrane: production",
u"space-time: asymmetry",
u"plasma: oscillation",
u"scintillation counter: liquid argon",
u"capture: solar",
u"vacuum state: instanton",
u"Born-Infeld model: nonlinear",
u"dyon: condensation",
u"thermodynamics: critical phenomena",
u"pi+ nucleus: nuclear reaction",
u"magnet: multipole",
u"algebra: Cartan",
u"eta: pair production",
u"D/s+: leptonic decay",
u"Mathieu",
u"vacuum state: quantum",
u"Z': signature",
u"matter: rotation",
u"eta/c(2980): mass",
u"p deuteron: elastic scattering",
u"spin: fluctuation",
u"cesium: atom",
u"Chern-Simons term: induced",
u"fluctuation: stochastic",
u"field theory: monopole",
u"condensation: magnetic",
u"isospin: conservation law",
u"water: solids",
u"antineutrino: mixing angle",
u"p: relativistic",
u"radioactivity: background",
u"isospin: 1",
u"photon deuteron: inclusive reaction",
u"final focus",
u"muon+ muon-: storage ring",
u"instanton: gas",
u"final state: two-pion",
u"gradient",
u"anti-D0",
u"ruthenium",
u"vector boson: massive",
u"nucleus: spin",
u"tetraquark: hadronic decay",
u"muon+ muon-: ratio",
u"Hall effect: spin",
u"lepton: energy",
u"CERN SPS Coll",
u"Oak Ridge SNS PS",
u"lectures: introductory",
u"invisible decay",
u"quintessence: coupling",
u"quark quark: scattering",
u"p: electroproduction",
u"conservation law: Noether",
u"hadron: correlation",
u"Z0: pole",
u"superfield: vector",
u"string: pair production",
u"coupling: gravitation",
u"dissociation: electromagnetic",
u"production: strangeness",
u"field equations: relativistic",
u"D: form factor",
u"quantum number: conservation law",
u"p-adic",
u"photon: momentum",
u"p: radiation",
u"gravitino: density",
u"dilaton: coupling constant",
u"acceleration: stochastic",
u"condensation: vector",
u"black hole: semiclassical",
u"generalized parton distribution: moment",
u"model: liquid",
u"Ponzano-Regge model",
u"trigger: design",
u"zero mode: chiral",
u"programming: manual",
u"dark matter: power spectrum",
u"dark energy: decay",
u"pressure: perturbation",
u"quadrupole lens: superconductivity",
u"B/s: decay modes",
u"strange particle: yield",
u"microstate",
u"quantum gravity: renormalizable",
u"chargino: production",
u"supersymmetry: twist",
u"parity: operator",
u"magnetic field: time dependence",
u"sparticle: heavy",
u"bound state: Majorana",
u"fluctuation: vector",
u"operator: dimension: 4",
u"elements",
u"critical phenomena: conformal",
u"many-body problem: relativistic",
u"new particle: decay modes",
u"J/psi(3100): final state",
u"Yang-Baxter",
u"scale: compactification",
u"symmetry: Z(2) x Z(2)",
u"scalar particle: propagator",
u"bottom particle: hadroproduction",
u"screening: magnetic",
u"radiation: quantum",
u"scalar particle: triplet",
u"bottom: hadronic decay",
u"astrophysics: plasma",
u"mechanics: stability",
u"scattering: WIMP nucleus",
u"group: Coxeter",
u"second-class current",
u"magnetic field: color",
u"coupling: Coulomb",
u"perturbation: effect",
u"analysis",
u"efficiency: angular dependence",
u"transformation: CP",
u"quark hadron: duality",
u"baryon: propagator",
u"quantization: symplectic",
u"gauge boson: scattering amplitude",
u"Meissner effect: duality",
u"mechanics: action",
u"bottom particle: decay modes",
u"positron: spectrum",
u"loop equation",
u"tachyon: coupling",
u"gluon: angular momentum",
u"lattice field theory: finite temperature",
u"particle separator",
u"SU(3) x SU(2) x U(1)",
u"cross section: angular dependence",
u"matter: nonrelativistic",
u"jet: yield",
u"Z0: rare decay",
u"Higgs particle: off-shell",
u"symmetry: SL(2,C)",
u"photon pi: Compton scattering",
u"beam: absorption",
u"group: rotation",
u"group: SU(5)",
u"color: current",
u"charmed baryon: hadroproduction",
u"Wino: dark matter",
u"Lambda/b0: polarization",
u"charge: NUT",
u"chargino: associated production",
u"string tension: 0",
u"density: transverse",
u"gallium: arsenic",
u"geometrodynamics: quantum",
u"Kasner",
u"black ring: dipole",
u"string: magnetic",
u"knot theory: torus",
u"particle: long-lived",
u"action: deformation",
u"reheating: temperature",
u"vector meson: mixing angle",
u"asymmetry: difference",
u"bound state: energy levels",
u"LHC-B: upgrade",
u"hydrogen: polarized target",
u"meson: ground state",
u"gluon: Regge poles",
u"spin: longitudinal",
u"Sasaki-Einstein",
u"hadronic",
u"instanton: U(1)",
u"quark: neutrinoproduction",
u"B: excited state",
u"fibre bundle: U(1)",
u"fragmentation function: nonperturbative",
u"magnetic field: gravitation",
u"muon: secondary",
u"beam oscillation",
u"ghost: form factor",
u"approximation: narrow resonance",
u"heavy quark: jet",
u"space: CP(2)",
u"antineutrino/e: particle identification",
u"baryon resonance: electroproduction",
u"position sensitive",
u"spin: tensor",
u"B-L number: asymmetry",
u"cross section: spin",
u"fermion: spin",
u"soliton: model",
u"Theta(1540): parity",
u"storage ring: proposed",
u"vector boson: scattering",
u"energy: upgrade",
u"superconductivity: interference",
u"orbifold: asymmetry",
u"perturbation: time dependence",
u"charmed particle",
u"sum rule: spin",
u"stop: production",
u"ATLAS: upgrade",
u"xenon: double-beta decay",
u"K+: leptonic decay",
u"measure: 2",
u"gluon: matter",
u"transformation: wavelet",
u"sigma model: O(4)",
u"K- p: bound state",
u"nuclear reaction: particle flow",
u"bottom: coupling",
u"nucleon: mass difference",
u"black hole: embedding",
u"Morse",
u"operator: deformation",
u"charmonium: pair production",
u"antibaryon: hadroproduction",
u"quantum electrodynamics: vacuum state",
u"fermion: boundary condition",
u"transducer",
u"angular momentum: density",
u"gauge boson: excited state",
u"anti-K",
u"FOPI",
u"moduli space: stability",
u"carbon: nuclide",
u"flavor: 6",
u"Teukolsky equation",
u"pomeron: structure function",
u"MIT Linac",
u"membrane model: heterotic",
u"supergravity: off-shell",
u"f1(1285)",
u"D/s*(2110): radiative decay",
u"p: transverse momentum",
u"antineutrino/mu: secondary beam",
u"jet: multiplicity: high",
u"Z0: polarization",
u"scalar particle: decay modes",
u"photon pi: interaction",
u"fluctuation: power spectrum",
u"Polyakov action",
u"acceleration: wake field",
u"proposed experiment: satellite",
u"equation of state: linear",
u"pentaquark: multiplet",
u"W: mass: measured",
u"interaction: length",
u"quasar: spectrum",
u"Y(4660)",
u"star: production",
u"photon electron: deep inelastic scattering",
u"scaling: Weyl",
u"sparticle: lifetime",
u"parton: hard scattering",
u"Einstein-Cartan",
u"multiplet: coupling",
u"transformation: nonlocal",
u"quantum chromodynamics: effective action",
u"neutralino: hadroproduction",
u"scale: confinement",
u"p: momentum",
u"antineutrino/e: path length",
u"gauge boson: abelian",
u"delocalization",
u"jet: charge",
u"rho(770): radiative decay",
u"ion: radioactivity",
u"pi: generalized parton distribution",
u"cosmic radiation: correlation",
u"U(N) x U(N)",
u"antihydrogen: production",
u"rapidity: asymmetry",
u"form factor: semileptonic decay",
u"microwaves: absorption",
u"supersymmetry: flat direction",
u"pi: particle flow",
u"cosmological model: solution",
u"pseudovector",
u"anti-p nucleus: interaction",
u"Z': mixing",
u"momentum: diffusion",
u"charmed baryon: bottom baryon",
u"space-time: collapse",
u"neutrino: flavor: ratio",
u"neutralino: mass spectrum",
u"superstring: heterotic",
u"W': decay",
u"neutrino: new interaction",
u"field theory: higher-dimensional",
u"magnetic spectrometer: superconductivity",
u"positron: polarization",
u"scattering: gravitation",
u"momentum: density",
u"CAST",
u"resummation: transverse momentum",
u"tracking detector: upgrade",
u"pi0: energy spectrum",
u"freeze-out: kinetic",
u"antineutrino/e: nuclear reactor",
u"vector meson: excited state",
u"vacuum state: Yang-Mills",
u"squark: flavor: violation",
u"meson: dispersion relation",
u"gravitation: constraint",
u"gluon: field theory",
u"lattice field theory: Regge",
u"cyclotron: resonance",
u"anti-p nucleus: nuclear reaction",
u"correlation: error",
u"space: triangulation",
u"nucleus: charge",
u"a1(1260): width",
u"lepton: momentum spectrum",
u"gravitation: scattering amplitude",
u"accelerator: two-beam",
u"form factor: gravitation",
u"M-theory: duality",
u"Mellin transformation: moment",
u"GALLEX",
u"photon: photoproduction",
u"matrix model: deformation",
u"new interaction: four-fermion interaction",
u"cross section: x-dependence",
u"gauge field theory: SU(N) x U(1)",
u"Kadomtsev-Petviashvili equation",
u"quark: velocity",
u"sneutrino: pair production",
u"Upsilon(10570): branching ratio",
u"Pauli equation",
u"niobium: tin",
u"qubit: entanglement",
u"antineutrino: detector",
u"scintillation counter: efficiency",
u"WIMP: halo",
u"hadron: energy spectrum",
u"photon photon: scattering amplitude",
u"meson: nonet",
u"moment: sum rule",
u"nucleus: excited state",
u"redshift: transition",
u"Phi(1020): pair production",
u"Lambda(1520): hadroproduction",
u"W+: hadroproduction",
u"superfield: scalar",
u"quarkonium: suppression",
u"star: density",
u"gas: flow",
u"nucleus: many-body problem",
u"electron: Dirac",
u"time projection chamber: design",
u"mercury: electric moment",
u"tau: production",
u"doublet: 1",
u"resonance: enhancement",
u"charmed meson: direct production",
u"electric field: upper limit",
u"space: Teichmueller",
u"scintillation counter: design",
u"topology: effect",
u"symmetry: dilation",
u"teleparallel",
u"helium: primordial",
u"glueball: correlation function",
u"4/3",
u"string: coupling constant",
u"dark energy: accretion",
u"analyzing power: vector",
u"group: nonabelian",
u"quarkonium: energy levels",
u"tracking detector: design",
u"diquark: correlation",
u"pseudoscalar particle",
u"beam optics: design",
u"charge: resolution",
u"S-matrix: analytic properties",
u"time: reparametrization",
u"von Neumann",
u"beam: pulsed",
u"background: plane wave",
u"chi mesons: hadroproduction",
u"meson baryon: coupling",
u"mass: up",
u"momentum: broadening",
u"nucleon: pole",
u"grand unified theory: orbifold",
u"graviton: fluctuation",
u"background: D-brane",
u"nucleon: pair",
u"tunneling: semiclassical",
u"density: pseudoscalar",
u"helicity: angular distribution",
u"D-brane: BPS",
u"model: heterotic",
u"D: leptonic decay",
u"Dirac equation: nonlinear",
u"effect: rotation",
u"pi nucleus: interaction",
u"pulsed",
u"Chern-Simons term: nonabelian",
u"tracking detector: alignment",
u"SU(3) x SU(3) x U(1)",
u"Schwinger terms",
u"back reaction: quantum",
u"neutralino: electroproduction",
u"vortex: topological",
u"B/c: hadroproduction",
u"cross section: momentum transfer",
u"vector meson: exclusive production",
u"geometry: twist",
u"positron p: interaction",
u"photon: sphere",
u"meson: octet",
u"pi: interaction",
u"D: electroproduction",
u"field theory: induced",
u"meson: operator",
u"invariance: symplectic",
u"mass: energy dependence",
u"hadron hadron",
u"black hole: quantization",
u"form factor: expansion",
u"K: associated production",
u"current: constraint",
u"Neutrino Ettore Majorana Observatory",
u"Fokker-Planck equation: solution",
u"K: secondary beam",
u"postulated particle: massive",
u"Polyakov loop: renormalization",
u"Nicolai map",
u"WIMP: coupling",
u"central charge: Virasoro",
u"slepton: production",
u"magnetic field: turbulence",
u"chi/c1(3510): radiative decay",
u"CP",
u"antihyperon: hadroproduction",
u"bubble chamber: heavy liquid",
u"solid-state counter",
u"Boltzmann brain",
u"photon: isolated production",
u"cosmological model: dust",
u"p: storage ring",
u"minimal supersymmetric standard model: benchmark",
u"neutralino p: scattering",
u"cluster: mass",
u"quantization: Weyl",
u"graviton: condensation",
u"model: Toda",
u"dark energy: tachyon",
u"fermion: spinless",
u"chargino: decay modes",
u"S-duality: transformation",
u"expansion: twist",
u"invariance: U(1)",
u"mirror: model",
u"K0(L): width",
u"field equations: instanton",
u"cosmic radiation: scattering",
u"potential: nonperturbative",
u"correlation: nonlocal",
u"energy: dispersion",
u"gauge: linear",
u"moduli space: Calabi-Yau",
u"hydrogen: hypernucleus",
u"B+: hadroproduction",
u"bound state: binding energy",
u"space-time: BTZ",
u"electron cooling",
u"fermion: fluctuation",
u"energy spectrum: moment",
u"unitarity: operator",
u"symmetry: algebra",
u"bismuth",
u"spectrometer: acceptance",
u"Goldstone particle: decay constant",
u"oscillation: collective",
u"W+: pair production",
u"algebra: nonlinear",
u"hadron: colliding beams",
u"time: quantization",
u"current: magnetic",
u"mass: measurement methods",
u"yield: strangeness",
u"new particle: coupling",
u"transition: topological",
u"conservation law: nonlocal",
u"string: massless",
u"superspace: conformal",
u"quark quark: correlation",
u"flow: Bjorken",
u"pi: elliptic flow",
u"holonomy: SU(3)",
u"transformation: discrete",
u"soliton: static",
u"scaling: linear",
u"solution: topological",
u"scattering amplitude: hard scattering",
u"pi0: multiplicity",
u"isospin: invariance",
u"hierarchy: Toda",
u"radioactivity: chemistry",
u"background field: classical",
u"K+ K-",
u"scaling: longitudinal",
u"EDELWEISS",
u"dipole: electromagnetic",
u"string model: twistor",
u"symmetry: SO(10)",
u"space-time: simplex",
u"photon: spectrometer",
u"jet: background",
u"jet: energy flow",
u"hadron: correlation function",
u"background: hadronic",
u"wave",
u"charged particle: scattering",
u"a1(1260): mass",
u"multiple scattering: Coulomb",
u"meson: spin",
u"gluino: heavy",
u"inflation: topological",
u"quasar: absorption",
u"approximation: planar",
u"group",
u"space-time: plane wave",
u"fluctuation: thermodynamical",
u"S(4)",
u"n: magnetic moment",
u"transformation: linear",
u"b1(1235)",
u"fermion: electric moment",
u"pseudoscalar meson: production",
u"muon+ muon-: colliding beams",
u"statistics: transition",
u"electron: correlation",
u"K*(892): mass",
u"transverse momentum: ratio",
u"showers: fluctuation",
u"quantum mechanics: nonlinear",
u"expansion: effective range",
u"X(4350)",
u"gauge field theory: SU(3) x SU(3)",
u"effective action: chiral",
u"parastatistics",
u"neutron star: spin",
u"charmonium: electroproduction",
u"fermion: polarization",
u"K0(S): pair production",
u"photon n: inelastic scattering",
u"taste",
u"fermion fermion: interaction",
u"Z0: longitudinal",
u"Compton scattering: form factor",
u"positron: angular distribution",
u"group: E(7)",
u"string: topology",
u"silicon: glass",
u"rescaling: conformal",
u"positronium: lifetime",
u"phase space: parametrization",
u"vector manifestation",
u"structure function: bottom",
u"jet: blazar",
u"polarization: target",
u"psi(3685): decay modes",
u"nucleus: photofission",
u"stop: coupling",
u"Lambda/c+: branching ratio",
u"SVD",
u"meson baryon: elastic scattering",
u"pulsar: rotation",
u"string model: classical",
u"NuTeV",
u"XYZ model",
u"membrane: charge",
u"spin: torsion",
u"charged particle: velocity",
u"gluon: vertex function",
u"spectral representation: vector",
u"torsion: tensor",
u"Upsilon(10355): hadronic decay",
u"Kontsevich model",
u"dark matter: spin",
u"hyperon: form factor",
u"f0(600): radiative decay",
u"monopole: confinement",
u"quark antiquark: scattering",
u"eta(958): decay modes",
u"neutrino: time-of-flight",
u"nucleon: electric moment",
u"wave function: variational",
u"string: charge",
u"charmed baryon: mass spectrum",
u"multigluon",
u"parity: doubling",
u"beam emittance: longitudinal",
u"gravitation: rainbow",
u"approximation: variational",
u"quark: effect",
u"oscillation: model",
u"photon: diffusion",
u"Higgs particle: mediation",
u"nucleon: exchange",
u"algebra: constraint",
u"antineutrino/mu: flux",
u"coupling constant: hierarchy",
u"wavelength shifter: fibre",
u"quark: diffusion",
u"pi: scattering length",
u"strange particle: hadronic decay",
u"new interaction: fifth force",
u"observatory: proposed",
u"K nucleon: elastic scattering",
u"black hole: holography",
u"equilibrium",
u"pi: spectrum",
u"CP(1)",
u"background: Melvin",
u"baryon: spin",
u"f0(980): intermediate state",
u"charged particle: flux",
u"charge: effect",
u"dilepton: resonance",
u"jet: resolution",
u"quantum mechanics: effect",
u"fluid: turbulence",
u"top: charge",
u"matter: anisotropy",
u"new particle: production",
u"K0: radiative decay",
u"charged particle: orbit",
u"tau: trigger",
u"momentum transfer: spectrum",
u"gravitation: unimodular",
u"chargino: electroproduction",
u"kinematics: constraint",
u"symmetry: U(2) x U(2)",
u"lead: beam",
u"model: O(N)",
u"rho(770)+",
u"approximation: nonperturbative",
u"supergravity: parameter space",
u"approximation: on-shell",
u"orbifold: Z(2) x Z(2)",
u"gluon: longitudinal",
u"photon electron: scattering",
u"Finsler",
u"magnetic field: decay",
u"measure: quantum",
u"space: lattice",
u"radiation: fluid",
u"space-time: Bertotti-Robinson",
u"microwaves: emission",
u"cosmic radiation: injection",
u"bottom meson: hadroproduction",
u"plane wave: interaction",
u"detector: fabrication",
u"gravitation: spinor",
u"vector meson: decay modes",
u"velocity: expansion",
u"pi: density",
u"gauge field theory: higher-dimensional",
u"p p",
u"chargino: exchange",
u"B-: rare decay",
u"nonpolynomial",
u"n nucleus: elastic scattering",
u"space-time: complex",
u"Grenoble ESRF",
u"J/psi(3100): mass",
u"dark matter: spatial distribution",
u"Lambda/c+: electroproduction",
u"pi1(1600)",
u"perturbation theory: nonlinear",
u"black hole: deformation",
u"electromagnetic interaction: effect",
u"D*(2010): pair production",
u"star: orbit",
u"calorimeter: satellite",
u"baryon: hybrid",
u"cross section: helicity",
u"gravitation: Einstein-Hilbert",
u"fermion: quasiparticle",
u"entropy: Hawking",
u"neutrino: momentum",
u"damage: time dependence",
u"neutrino p: deep inelastic scattering",
u"superconductivity: string",
u"tau: jet",
u"star: velocity",
u"approximation: thermodynamical",
u"Delta(1232): propagator",
u"graviton: absorption",
u"pressure: transverse",
u"astrophysics: wave function",
u"electron: interaction",
u"bottom: width",
u"invariance: SU(2)",
u"fermion: Kaluza-Klein",
u"sum rule: momentum",
u"analyzing power: angular dependence",
u"sphericity",
u"alignment: error",
u"mass spectrum: discrete",
u"spinor: chiral",
u"surface: effect",
u"psi(3685): width",
u"event shape analysis: planarity",
u"dilaton: massless",
u"eta: production",
u"operator: lattice",
u"coupling constant: pseudoscalar",
u"electroweak interaction: dynamical symmetry breaking",
u"scaling: transformation",
u"meson: coupling constant",
u"gluon: constituent",
u"DIRAC",
u"geometry: Euclidean",
u"recoil: polarization",
u"parity: spontaneous symmetry breaking",
u"attractor: de Sitter",
u"fluid: collapse",
u"stau: LSP",
u"algebra: SL(2,R)",
u"dispersion relation: linear",
u"galaxy: density",
u"iodine",
u"quantum mechanics: Yang-Mills",
u"W-: hadroproduction",
u"momentum: finite",
u"baryon: mass formula",
u"Z0: final state",
u"background field: tensor",
u"quark antiquark: elastic scattering",
u"trigger: upgrade",
u"photofission",
u"fluctuation: random",
u"three-body problem: relativistic",
u"chromaticity",
u"solution: semiclassical",
u"magnetic field: dependence",
u"charged particle: capture",
u"neutrino: mass ratio",
u"algebra: Moyal",
u"WIMP nucleus: inelastic scattering",
u"photon: time delay",
u"Gross-Neveu model: chiral",
u"hypernucleus: binding energy",
u"space: S(N)",
u"charge conjugation: operator",
u"symmetry breaking: SU(2) x U(1)",
u"quasiparticle: width",
u"tau: lifetime",
u"hypernucleus: lifetime",
u"soliton: charge",
u"Beltrami",
u"emission",
u"beam transport: colliding beams",
u"muon-: pair production",
u"neodymium",
u"meson resonance: mass spectrum",
u"slepton: mixing",
u"neutrino/tau: mass",
u"J/psi(3100): decay",
u"jet: hadronization",
u"model: kinetic",
u"quantum gravity: fluctuation",
u"beam-beam: interaction",
u"gluon: gas",
u"zirconium",
u"conservation law: strangeness",
u"Drinfeld double",
u"parton: elastic scattering",
u"symmetry: SO(8)",
u"space-time: boundary condition",
u"Higgs particle: exclusive production",
u"curvature: induced",
u"pi: superfluid",
u"Skyrme model: SU(2)",
u"charmed particle: decay",
u"potential: chiral",
u"bolometer: crystal",
u"energy: gravitation",
u"deuterium: mesic atom",
u"matter: viscosity",
u"dissociation: temperature",
u"B*(5320)",
u"Klein-Gordon equation: massless",
u"Kerr-Newman",
u"nucleus: magnetic moment",
u"two-point function: scalar",
u"axion: flux",
u"tracks: resolution",
u"laser: polarization",
u"K+: width",
u"cosmological constant: decay",
u"charmonium: hybrid",
u"renormalization: algebra",
u"photon deuteron: Compton scattering",
u"LSP: production",
u"charge: correlation function",
u"energy: decay",
u"hadron: energy",
u"parton: momentum",
u"total cross section: correction",
u"atom: transition",
u"lepton: universality: violation",
u"sfermion: mixing angle",
u"multiplicity: scaling",
u"antideuteron: cosmic radiation",
u"neutrino p: inclusive reaction",
u"rho(770): form factor",
u"inflaton: axion",
u"lepton: photoproduction",
u"B0 anti-B0",
u"astrophysics: matter",
u"baryon: density: high",
u"gauge field theory: SO(5)",
u"cavity: microwaves",
u"space-time: oscillation",
u"halo: formation",
u"graviton: background",
u"hadron: particle identification",
u"charmed particle: photoproduction",
u"differential cross section: rapidity",
u"charged particle: angular distribution",
u"pi nucleon: coupling",
u"beam: energy loss",
u"rho(770): photoproduction",
u"channel cross section: transverse momentum",
u"photon: interference",
u"pi: correlation function",
u"quantum gravity: validity test",
u"Type I",
u"charged particle: heavy",
u"muon: acceleration",
u"B: B-parameter",
u"approximation: fluid",
u"symmetry breaking: SU(4)",
u"solenoid: design",
u"boson: heavy",
u"model: triplet",
u"pi0: multiple production",
u"baryon resonance: decay",
u"n: superfluid",
u"heavy quark: diffusion",
u"supersymmetry: 5",
u"resolution: impact parameter",
u"plasma: expansion",
u"field theory: Yang-Mills",
u"neutrino antineutrino: asymmetry",
u"Pauli-Villars",
u"radioactivity: induced",
u"length",
u"electron: photoproduction",
u"algebra: Yangian",
u"horizontal symmetry: SU(3)",
u"curvaton: mass",
u"antineutrino: mass difference",
u"membrane: rotation",
u"ion: capture",
u"helium: irradiation",
u"GNO",
u"foam",
u"top: right-handed",
u"invariance: relativistic",
u"X-ray: background",
u"attenuation: length",
u"f0(600): propagator",
u"model: weak interaction",
u"wormhole: mass",
u"neural network: Bayesian",
u"neutralino p: elastic scattering",
u"bound state: mass spectrum",
u"polarization: angular dependence",
u"pi: charge radius",
u"D anti-D: molecule",
u"asymmetry: Collins",
u"spectrum: x-dependence",
u"K- p: exclusive reaction",
u"interpretation of experiments: PHOBOS",
u"Goldstone particle: multiplet",
u"B/s: excited state",
u"interferometer: sensitivity",
u"calorimeter: imaging",
u"omega(783): electroproduction",
u"flux: U(1)",
u"string: bound state",
u"scattering amplitude: eikonal",
u"anti-B0: width",
u"electron: annihilation",
u"scalar particle: composite",
u"hydrodynamics: nonlinear",
u"family: 1",
u"interference: Bethe-Heitler",
u"f0(980): model",
u"muon nucleus: interaction",
u"K0(S): radiative decay",
u"mass spectrum: moment",
u"atmosphere: monitoring",
u"Hartree approximation: relativistic",
u"Darmstadt Lab",
u"Laplace transformation",
u"turbulence: hydrodynamics",
u"pi+ p: interaction",
u"p-brane: 8",
u"baryon: coupling",
u"algebra: lattice",
u"pi: charge",
u"saxion: decay",
u"anomaly: effect",
u"dilaton: massive",
u"Higgs particle: decoupling",
u"hyperon: coupling",
u"cosmic radiation: atmosphere",
u"Hamiltonian formalism: light front",
u"Uppsala CELSIUS Stor",
u"gauge field theory: nonlinear",
u"postulated particle: lifetime",
u"cosmic radiation: temperature",
u"positronium: decay",
u"scattering amplitude: singularity",
u"pi- p: interaction",
u"detector: alignment",
u"particle: mechanics",
u"tau: showers",
u"color spin locked phase",
u"symmetry breaking: SO(3)",
u"transverse energy: density",
u"nucleon nucleon: bremsstrahlung",
u"factorization: approximation",
u"invariance: SL(2,R)",
u"duality: cascade",
u"matrix model: Yang-Mills",
u"p: injection",
u"entropy: ratio",
u"Z0: penguin",
u"gluon: current",
u"cavity: rotation",
u"Antilambda: hadroproduction",
u"stop: NLSP",
u"soliton: string",
u"billiard",
u"wire: quantum",
u"inflation: tachyon",
u"J/psi(3100): elliptic flow",
u"quantum group: SL(2)",
u"dark energy: anisotropy",
u"background: thermal",
u"strong interaction: new interaction",
u"coupling: magnetic",
u"dimension: space-time",
u"plasma: model",
u"dimension: dependence",
u"flavor: universality",
u"lead: target",
u"field equations: nonlocal",
u"pressure: high",
u"gravitational radiation: effect",
u"effect: surface",
u"electric moment: multipole",
u"Sunyaev-Zel'dovich effect",
u"meson nucleon: coupling",
u"bottom meson: charmed meson",
u"K0: branching ratio",
u"particle: exchange",
u"symmetry breaking: SU(5)",
u"entropy: density: ratio",
u"rubidium",
u"Skyrmion: mass",
u"gravitation: metric",
u"defect: formation",
u"quantum mechanics: scattering",
u"strangeness: hadroproduction",
u"field theory: coupling",
u"Z': effect",
u"momentum: difference",
u"mass: position dependence",
u"CELLO",
u"fibre bundle: spin",
u"spin: 4",
u"neutrino: superfield",
u"muon nucleus: deep inelastic scattering",
u"orbit: circle",
u"gravitation: magnetic field",
u"SO(32)",
u"gravitational radiation: shock waves",
u"effect: anisotropy",
u"field theory: geometrical",
u"orientifold: torus",
u"time: operator",
u"quantum electrodynamics: perturbation theory",
u"quark: magnetic moment",
u"meson resonance: exotic",
u"potential: relativistic",
u"bound state: pole",
u"particle: energy spectrum",
u"pseudoscalar particle: mass",
u"decoherence: time",
u"space: embedding",
u"tachyon: field theory",
u"ion: cosmic radiation",
u"dimension: fractional",
u"monitoring: gas",
u"stop: decay modes",
u"Upsilon(9460): photoproduction",
u"decay: vertex",
u"bottom: production",
u"group: affine",
u"antimatter: asymmetry",
u"interaction: pseudoscalar",
u"eta/c(2980): associated production",
u"resonance: cavity",
u"algebra: E(11)",
u"quantum mechanics: validity test",
u"operator: Weinberg",
u"nucleon: three-body problem",
u"magnetic monopole: nonabelian",
u"algebra: Becchi-Rouet-Stora",
u"Higgsino: NLSP",
u"neutrino: anisotropy",
u"space-time: Nariai",
u"Sp(4)",
u"Sigma+",
u"Sigma0",
u"charge: confinement",
u"fermion: neutral particle",
u"magnetic monopole: density",
u"photon: angular momentum",
u"Kerr-Schild",
u"Klebanov-Strassler model",
u"transition: Hagedorn",
u"photon: statistics",
u"DESY XFEL",
u"pi0: leptonic decay",
u"particle: propagation",
u"symmetry: SU(1,1)",
u"quantum chromodynamics: bound state",
u"antinucleus",
u"correlation: two-photon",
u"space-time: causality",
u"SO(12)",
u"geometry: induced",
u"Nahm transformation",
u"p n",
u"renormalon: infrared",
u"operator: Weyl",
u"Upsilon(10020): radiative decay",
u"cross section: dissociation",
u"helium: energy spectrum",
u"dark matter: strong interaction",
u"B-: leptonic decay",
u"gauge field theory: discrete",
u"membrane model: higher-dimensional",
u"model: Veneziano",
u"holonomy: flux",
u"K: potential",
u"graviton: polarization",
u"symmetry: SU(3) x SU(3) x U(1)",
u"selenium",
u"resonance: exotic",
u"orientifold: Z(2) x Z(2)",
u"sneutrino: leptonic decay",
u"effect: viscosity",
u"pi: massless",
u"particle: wave function",
u"quark: scattering",
u"Z': associated production",
u"Y(2175)",
u"transition: photon axion",
u"electron: structure function",
u"saxion",
u"Pauli principle: violation",
u"electron electron: scattering",
u"neutrino: showers",
u"Sigma(1385): hadronic decay",
u"J/psi(3100): momentum spectrum",
u"zinc",
u"lepton nucleon: exclusive reaction",
u"D/s1(2536)",
u"gluino: effect",
u"neutrino nucleus",
u"p: leading particle",
u"quantum algebra: representation",
u"defect: integrability",
u"fixed point: orbifold",
u"electric field: static",
u"pi nucleon: inclusive reaction",
u"black hole: bound state",
u"meson nucleon: elastic scattering",
u"Sigma+: semileptonic decay",
u"electron: bremsstrahlung",
u"eta/c(3590): hadronic decay",
u"Z(3930)",
u"microwaves: coupling",
u"cosmological model: oscillation",
u"KASCADE",
u"proton synchrotron: booster",
u"tau: neutrinoproduction",
u"lepton: mass: hierarchy",
u"B/c: rare decay",
u"tunneling: effect",
u"charmonium: decay modes",
u"potential: D-term",
u"nucleus: scattering",
u"pi: fragmentation function",
u"quark: Regge",
u"tungsten: target",
u"invariance: SL(2,C)",
u"inflaton: trajectory",
u"K anti-K: molecule",
u"perturbation theory: string",
u"anti-K: condensation",
u"W W: elastic scattering",
u"Dirac equation: massive",
u"Los Alamos Lab",
u"interaction: topological",
u"magnetic monopole: flux",
u"K nucleon: scattering amplitude",
u"NA60",
u"W: photoproduction",
u"non-Gaussianity: scale dependence",
u"algebra: Calabi-Yau",
u"form factor: calculated",
u"horizon: mass",
u"correction: screening",
u"energy levels: width",
u"atom: ionization",
u"charmed particle: production",
u"fibre bundle: torus",
u"interpretation of experiments: Juelich COSY PS",
u"approximation: diffusion",
u"drift chamber: liquid argon",
u"meson: oscillation",
u"polarization: monitoring",
u"fermion: string model",
u"Delta(1232): magnetic moment",
u"dimuon: charge: asymmetry",
u"charmed particle: decay modes",
u"gluon: fluctuation",
u"mechanics: relativistic",
u"neutralino p: interaction",
u"gamma ray: pulsed",
u"operator: higher-order",
u"approximation: effective range",
u"space-time: Kaluza-Klein",
u"Dalitz plot: slope",
u"neutrino: associated production",
u"quantum chromodynamics: weak coupling",
u"gravitation: emission",
u"neutrino: bremsstrahlung",
u"geometry: thermodynamical",
u"radiation: absorption",
u"coupling: energy dependence",
u"flavor: correlation",
u"algebra: SU(4)",
u"scale: transformation",
u"superpotential: coupling",
u"deep inelastic scattering: neutral current",
u"O(N,N)",
u"wave function: Dirac",
u"neon: nuclide",
u"electron: synchrotron radiation",
u"correction: semiclassical",
u"rho(770)0: propagator",
u"colliding beam detector: proposed",
u"plasma: formation",
u"lepton: left-handed",
u"gluon: scalar",
u"inflation: de Sitter",
u"fermion: pole",
u"cosmic radiation: absorption",
u"nucleon: distribution amplitude",
u"Lambda(1520): hadronic decay",
u"eta/c(2980): electroproduction",
u"quark: mirror particle",
u"K+: momentum spectrum",
u"decoupling: effect",
u"nuclear matter: temperature",
u"chi mesons: width",
u"Lambda/b0: radiative decay",
u"geometry: background",
u"hadron: yield: ratio",
u"f0(980): width",
u"action: discrete",
u"p p: colliding beam detector",
u"angular momentum: orbit",
u"quarkonium: polarization",
u"fusion: W W",
u"particle: superluminal",
u"antineutrino nucleus: interaction",
u"deuteron: production",
u"K: wave function",
u"fermion number: scaling",
u"dimuon: same sign",
u"angular momentum: high",
u"group theory: orbit",
u"magnetic field: surface",
u"neutrino: elastic scattering",
u"baryon: mixing angle",
u"potential: deformation",
u"baryon: energy loss",
u"hypernucleus: production",
u"baryon baryon: potential",
u"flux: energy dependence",
u"quark: delocalization",
u"dijet: mass",
u"excited state: collective phenomena",
u"modulation: phase",
u"bottomonium: decay",
u"electron p: storage ring",
u"F(4)",
u"pi-: production",
u"group: SO(10)",
u"pi: coupling",
u"quasiparticle: energy",
u"grand unified theory: SU(5) x SU(5)",
u"pi p: elastic scattering",
u"final state",
u"eRHIC",
u"photon deuteron: elastic scattering",
u"space-time: Newton-Hooke",
u"potential: singularity",
u"scaling: infrared",
u"black ring: charge",
u"scale: Kaluza-Klein",
u"hadron hadron: hard scattering",
u"neutron star: magnetic field",
u"approximation: quasiparticle",
u"derivative",
u"positronium: energy levels",
u"Cherenkov counter: design",
u"Sigma/b",
u"spin: amplitude analysis",
u"threshold: singularity",
u"field strength: constant",
u"curvature: thermodynamical",
u"electric field: background",
u"gravitational radiation detector: correlation",
u"N(1535): width",
u"Langevin equation: solution",
u"radion: decay",
u"bottom meson: mass spectrum",
u"sextet",
u"spectrum: gap",
u"neutrino deuteron: nuclear reaction",
u"singlet: 1",
u"gravastar: stability",
u"algebra: W(3)",
u"effective Lagrangian: relativistic",
u"LSP: annihilation",
u"neutrino: tachyon",
u"Xi-: hadronic decay",
u"electron: vacuum polarization",
u"ion: negative particle",
u"solution: time dependence",
u"pseudoscalar meson: octet",
u"magnetic monopole: BPS",
u"tachyon: kink",
u"f0(980): mass",
u"defect: production",
u"energy: renormalization",
u"confinement: magnetic",
u"nucleon: semileptonic decay",
u"compact",
u"bag model: chiral",
u"membrane model: magnetic",
u"neutrino: transition",
u"Lambda/b0: decay",
u"equation of state: nonlinear",
u"texture: spin",
u"eta/c(2980): photoproduction",
u"string model: noncommutative",
u"photon: secondary",
u"pi: momentum",
u"diquark: vector",
u"renormalization group: correction",
u"lepton: polarization: asymmetry",
u"FFAG",
u"computer: cluster",
u"correlation function: ratio",
u"metric: Kerr",
u"color: spin",
u"coupling: constraint",
u"black body",
u"space-time: vacuum state",
u"GL(N)",
u"tachyon: decay",
u"carbon: ion",
u"unparticle: propagator",
u"radiation: shielding",
u"gluon: correlation",
u"general relativity: quantization",
u"cosmological model: higher-dimensional",
u"photon: spin",
u"sneutrino: decay modes",
u"coupling: time dependence",
u"instanton: deformation",
u"axion: radiative decay",
u"transport theory: semiclassical",
u"Burgers equation",
u"interpretation of experiments: DZERO",
u"duality: Poincare",
u"boson: operator",
u"baryon: correlation function",
u"Salpeter equation: spinless",
u"Hartree-Fock approximation: relativistic",
u"electron p",
u"p: particle source",
u"meson: fluctuation",
u"quark: quasiparticle",
u"electron nucleus: capture",
u"quark: mass: calculated",
u"orbifold: abelian",
u"scalar particle: branching ratio",
u"antineutrino: production",
u"gluon: recombination",
u"transformation: Poincare",
u"cosmic radiation: anomaly",
u"rho(770)0: radiative decay",
u"K0: width",
u"emission: two-photon",
u"NLSP",
u"pulsar: frequency",
u"D/s: excited state",
u"gauge field theory: external field",
u"SU(2) x SU(2) x U(1)",
u"spectrometer: time-of-flight",
u"angular momentum: complex",
u"quantum electrodynamics: effective action",
u"squark: coupling",
u"gas: nonrelativistic",
u"charm: hadronization",
u"Goldstone particle: equivalence theorem",
u"oscillator: relativistic",
u"geometry: fuzzy",
u"pomeron: resolved",
u"space-time: Snyder",
u"Higgs particle: interference",
u"charmed particle: hadronic decay",
u"neutrino/e: particle source",
u"top'",
u"approximation: relativistic",
u"vortex: multiple",
u"condensation: nonlocal",
u"nuclear medicine",
u"medicine: imaging",
u"gauge field theory: de Sitter",
u"field theory: decay",
u"scaling: local",
u"vector meson: heavy",
u"sneutrino: annihilation",
u"nucleon: sea",
u"parton: correlation function",
u"bottom: rare decay",
u"spin: susceptibility",
u"action: Yang-Mills",
u"matter: charge",
u"track data analysis: efficiency",
u"resonance: hadronic decay",
u"scaling: Feynman",
u"potential: approximation",
u"photon quark: interaction",
u"muon: pair",
u"hadron: polarization",
u"p: UHE",
u"chargino: hadroproduction",
u"dark energy: potential",
u"pi-: mesic atom",
u"differential geometry: Kaehler",
u"control system: feedback",
u"inflaton: massive",
u"crystal: dielectric",
u"muon+: radiative decay",
u"kinematics: Regge",
u"jet: cluster",
u"clover",
u"sneutrino: NLSP",
u"Higgsino: decay",
u"black hole: density",
u"approximation: Bethe-Heitler",
u"supergravity: Type II",
u"hadron: particle flow",
u"mass formula: Gell-Mann-Okubo",
u"detector: stability",
u"field equations: conformal",
u"hadron: single production",
u"scalar meson: mixing",
u"satellite: orbit",
u"strong interaction: effect",
u"color: multiple production",
u"rho(770)0: hadroproduction",
u"gravitational radiation: linear",
u"strong interaction: model",
u"photon: energy loss",
u"interpretation of experiments: CLAS",
u"solution: fluid",
u"pi: coupling constant",
u"atmosphere: effect",
u"bottom meson: molecule",
u"space-time: dimension: 6",
u"symmetry: Virasoro",
u"Q-ball: formation",
u"microwaves: energy loss",
u"dark energy: ghost",
u"solution: finite energy",
u"polarization: correlation",
u"baryon resonance: radiative decay",
u"charged particle: momentum",
u"K+: production",
u"Upsilon(10355): branching ratio",
u"flux: string",
u"multiplet: gauge",
u"D-brane: 1",
u"K0(S): final state",
u"approximation: higher-order",
u"f1(1420)",
u"constraint: scalar",
u"pi: mass spectrum",
u"plasma: coupling",
u"plane wave: electromagnetic",
u"mirror particle",
u"operator: string",
u"bottom: 2",
u"fast logic",
u"vector boson: heavy",
u"boundary condition: fluctuation",
u"leading particle",
u"USp(2N)",
u"gauge boson: hadronic decay",
u"effect: instanton",
u"pressure: longitudinal",
u"recoil: gravitation",
u"nucleon: coupling",
u"rapidity: dependence",
u"distorted wave impulse approximation: relativistic",
u"organic compounds: fluorine",
u"field theory: pseudoscalar",
u"quarkonium: model",
u"current: renormalization",
u"infrared problem: renormalon",
u"horizon: quantum",
u"strangeness: 2",
u"dimuon: mass",
u"moduli space: deformation",
u"superfluid: chiral",
u"galaxy: luminosity",
u"polarizability: electromagnetic",
u"dark matter: vector",
u"vortex: interaction",
u"extended particle",
u"charge: asymmetry: measured",
u"slepton: lifetime",
u"energy spectrum: recoil",
u"field theory: Lifshitz",
u"synchrotron radiation: energy spectrum",
u"quantum chromodynamics: potential",
u"effect: fluctuation",
u"detector: crystal",
u"algebra: de Sitter",
u"Lambda(1405): hadronic decay",
u"path integral: discrete",
u"expansion: weak field",
u"string: formation",
u"approximation: small-angle",
u"accelerator: upgrade",
u"fermion antifermion",
u"S-matrix: modular",
u"COSY-11",
u"jet: vertex",
u"hypercharge: flux",
u"pseudoscalar meson: multiplet",
u"black ring: solution",
u"Gross-Neveu model: massive",
u"electroweak interaction: sphaleron",
u"space-time: quantization",
u"n: mass",
u"photon: reflection",
u"Lippmann-Schwinger equation: solution",
u"beam: momentum",
u"meson resonance: decay",
u"gravitational radiation: recoil",
u"rho(770): propagator",
u"photon: fragmentation",
u"infrared problem: regularization",
u"horizon: fluctuation",
u"photon: Kaluza-Klein",
u"neutrino p: interaction",
u"multiplicity: angular correlation",
u"photon: momentum spectrum",
u"gauge field theory: SL(2,C)",
u"mixing angle: parametrization",
u"X(3872): quantum number",
u"algebra: SU(1,1)",
u"sigma model: conformal",
u"sfermion: mass spectrum",
u"sum rule: violation",
u"particle source: design",
u"vector meson: multiplet",
u"B0: hadroproduction",
u"gauge field theory: induced",
u"Neutrino Mediterranean Observatory",
u"hadron: leading particle",
u"n nucleus: interaction",
u"cavity: impedance",
u"leptoquark: decay modes",
u"hyperon: photoproduction",
u"superspace: anti-de Sitter",
u"chargino: branching ratio",
u"superfluid: density",
u"Sagnac effect",
u"diquark: bound state",
u"Higgs particle: boosted particle",
u"spinor: nonlinear",
u"beam: deflection",
u"mass: thermal",
u"charmed meson: leptonic decay",
u"quantum electrodynamics: bound state",
u"gamma ray: diffusion",
u"slepton: right-handed",
u"scalar meson: nonet",
u"matter: spectrum",
u"electromagnetic field: fluctuation",
u"jet: recoil",
u"charm: photoproduction",
u"symmetry: SU(3) x U(1)",
u"charmonium: photoproduction",
u"potential: nonlinear",
u"muon: neutrinoproduction",
u"grand unified theory: Yukawa",
u"Lambda(1670)",
u"color: global",
u"pi+ pi-: bound state",
u"temperature: decoupling",
u"p nucleus: coherent interaction",
u"string model: action",
u"ion: storage ring",
u"geometry: classical",
u"pi K: interaction",
u"light nucleus: yield",
u"representation: SU(3)",
u"particle: oscillation",
u"chargino: leptonic decay",
u"charge: violation",
u"pi- nucleon: inclusive reaction",
u"critical phenomena: higher-order",
u"beam profile: transverse",
u"field theory: discrete",
u"eta: decay",
u"symmetry: twist",
u"vector boson: coupling",
u"cross section: scale dependence",
u"JUNO",
u"wave function: correction",
u"coupling: suppression",
u"p n: inclusive reaction",
u"sparticle: decoupling",
u"AdS(2) x S(2)",
u"form factor: Dirac",
u"graviton: two-point function",
u"symmetry breaking: SO(10)",
u"hyperon: star",
u"superstring: closed",
u"charged particle: particle identification",
u"flux tube: confinement",
u"W-: leptonic decay",
u"p p: fusion",
u"quark model: nonlocal",
u"muon: tracking detector",
u"scalar particle: multiplet",
u"dimension: quantum",
u"fermion: delocalization",
u"temperature: scaling",
u"muon+: polarization",
u"structure function: parametrization",
u"meson baryon",
u"operator: dimension: 7",
u"Wino: NLSP",
u"nucleus: size",
u"pseudoscalar meson: branching ratio",
u"pi: scattering",
u"Lambda: binding energy",
u"energy levels: correction",
u"gauge boson: composite",
u"radion: branching ratio",
u"cross section: slope",
u"frequency: time dependence",
u"parton: radiation",
u"scattering amplitude: topological",
u"efficiency: quantum",
u"hadron hadron: exclusive reaction",
u"angular momentum: quantization",
u"neutrino: photoproduction",
u"bottom particle",
u"loop integral: 5",
u"Melvin",
u"leptoquark: production",
u"pi: radiation",
u"scalaron",
u"E(7)",
u"muon: rapidity",
u"electron: elliptic flow",
u"boson: matter",
u"chi/c2(3555): radiative decay",
u"energy: correlation function",
u"nuclide: hadroproduction",
u"Sp(2N)",
u"fermion: many-body problem",
u"category: modular",
u"particle: statistics",
u"astrophysics: density",
u"GL(2)",
u"sine-Gordon equation: solution",
u"density: correlation",
u"D: transverse momentum",
u"freeze-out: surface",
u"geometry: Cartan",
u"Delta: mass",
u"resonance: nonlinear",
u"quantum electrodynamics: model",
u"spectrum: linear",
u"beam: crossing",
u"cross section: difference",
u"resonance: transition",
u"muon- nucleus: nuclear reaction",
u"K0: decay",
u"n: bound state",
u"string: condensation",
u"calorimeter: calibration",
u"radiation: monitoring",
u"expansion: conformal",
u"gravitational radiation detector: proposed",
u"magnetic field: stability",
u"squark: flavor",
u"muon p: deep inelastic scattering",
u"resonance: scattering",
u"tetraquark: mass spectrum",
u"mixing angle: dependence",
u"multiple",
u"neutrino/e: energy spectrum",
u"heavy quark: semileptonic decay",
u"n deuteron: elastic scattering",
u"group: E(8)",
u"model: gas",
u"pi deuteron: scattering length",
u"pi: particle identification",
u"hadron: composite",
u"stau: decay modes",
u"mediation: messenger",
u"gauge field theory: SU(3) x SU(4) x U(1)",
u"quark: four-fermion interaction",
u"black hole: pair production",
u"background field: time dependence",
u"parton: massless",
u"photon: splitting",
u"orbifold: Z(3)",
u"mass: singularity",
u"meson: chiral",
u"neutrino: excited state",
u"soliton: star",
u"field strength: correlation function",
u"renormalization group: Monte Carlo",
u"fluorescence: yield",
u"proposed experiment: sensitivity",
u"chain",
u"quantum mechanics: interference",
u"model: nonminimal",
u"mass: regularization",
u"orbifold: boundary condition",
u"nucleus: spinless",
u"momentum: flux",
u"D/s0*(2317): mass",
u"dark energy: parameter space",
u"Moyal",
u"neutralino: effect",
u"fibre bundle: SU(2)",
u"polarization: spectrum",
u"chain: Toda",
u"B/s: mass",
u"unparticle: interaction",
u"baryon number: 2",
u"fermion: mass ratio",
u"velocity: anisotropy",
u"ground state: entropy",
u"efficiency: energy dependence",
u"boron: hypernucleus",
u"rho(770): intermediate state",
u"bottom: final state",
u"beam damping: feedback",
u"drift chamber: avalanche",
u"Xi/b",
u"symmetry breaking: model",
u"gravitation: charge",
u"kinematics: correlation",
u"K-: branching ratio",
u"neutrino/e: oscillation",
u"structure function: x-dependence",
u"showers: longitudinal",
u"factorization: correction",
u"gauge field theory: SO(5) x U(1)",
u"baryon: bound state",
u"equation of state: finite temperature",
u"transition: frequency",
u"cross section: inelastic scattering",
u"field theory: complex",
u"intermediate boson",
u"Delta(1700)",
u"hadron nucleon: interaction",
u"strangeness: exchange",
u"current: two-point function",
u"electron deuteron: nuclear reaction",
u"asymmetry: transverse",
u"invariance: Yangian",
u"DESY PETRA Stor",
u"lepton hadron: inclusive reaction",
u"parton: splitting",
u"W: multiple production",
u"quintessence: interaction",
u"group: Galois",
u"meson resonance: effect",
u"charmonium: dissociation",
u"vortex: mass",
u"muon-: radiative decay",
u"hadron: fragmentation function",
u"a1(1260): radiative decay",
u"scattering: operator",
u"optics: dispersion",
u"inflaton: interaction",
u"approximation: Bayesian",
u"membrane: deformation",
u"muon: multiple production",
u"category: Calabi-Yau",
u"parton: transport theory",
u"Navarro-Frenk-White profile",
u"effect: leading particle",
u"helicity: transition",
u"potential: symplectic",
u"asymmetry: flavor",
u"nucleon: production",
u"target: mass: correction",
u"synchrotron radiation: spectrum",
u"group: G(2)",
u"symmetry: Z(6)",
u"particle: absorption",
u"f0(980): radiative decay",
u"surface: impedance",
u"correlation function: two-photon",
u"D0 anti-D0: mass difference",
u"star: temperature",
u"dipole: chromomagnetic",
u"gas: target",
u"Zeno effect",
u"differential equations: hydrodynamics",
u"Laplace gauge",
u"showers: cascade",
u"inflation: non-Gaussianity",
u"Newton-Hooke",
u"gluino: Dirac",
u"Xi: hadroproduction",
u"neutron star: magnetic",
u"n: photoproduction",
u"symmetry breaking: anomaly",
u"charge: flavor",
u"n nucleus: radiative capture",
u"radiation: polarization",
u"effective Hamiltonian: light cone",
u"field theoretical model: Regge",
u"central charge: tensor",
u"velocity: perturbation",
u"particle number: conservation law",
u"detector: liquid argon",
u"deformation: geometrical",
u"quark: three-body problem",
u"metal: surface",
u"positron: particle identification",
u"string: superconductivity",
u"Lee-Yang model",
u"metric: Calabi-Yau",
u"electroweak interaction: coupling constant",
u"sparticle: exchange",
u"positronium: hyperfine structure",
u"Skyrmion: energy",
u"momentum spectrum: anisotropy",
u"potential: magnetic",
u"scalar meson: propagator",
u"information management",
u"nucleus: stability",
u"lepton: decay modes",
u"mesic atom: energy levels",
u"current: nonlocal",
u"velocity: fluctuation",
u"fermion: sea",
u"group: SL(2,R)",
u"coupling: impedance",
u"fragmentation function: parametrization",
u"new particle: effect",
u"dispersion relation: energy-momentum",
u"n: radiation",
u"instanton: model",
u"Schroedinger equation: relativistic",
u"transformation: Galilei",
u"K nucleon: scattering length",
u"Galois",
u"bottom meson: hadron spectroscopy",
u"dark matter: flux",
u"effect: decoherence",
u"production: wide-angle",
u"field theory: perturbation theory",
u"correlation function: parametrization",
u"interpretation of experiments: CERN LEAR",
u"star: capture",
u"hadron: width",
u"geometry: Riemann-Cartan",
u"scalar particle: scattering",
u"electron: form factor",
u"electronics: upgrade",
u"anti-p: leading particle",
u"yield: asymmetry",
u"correlation function: thermal",
u"optics: readout",
u"symmetry: Sp(N)",
u"string: oscillation",
u"flow: attractor",
u"magnetic monopole: abelian",
u"diffeomorphism: symmetry breaking",
u"meson: binding energy",
u"dihadron: fragmentation function",
u"fluid: rotation",
u"cosmon",
u"dark matter: mirror",
u"nuclear properties: effect",
u"negative particle: secondary beam",
u"mass: momentum dependence",
u"lepton: rapidity spectrum",
u"superconductivity: solenoid",
u"dark matter: decoupling",
u"pi pi: final-state interaction",
u"gauge boson: neutral particle",
u"data analysis method: error",
u"electron electron: colliding beams",
u"pomeron: propagator",
u"neutral particle: massive",
u"radiation: accretion",
u"radio wave: detector",
u"symmetry breaking: left-right",
u"isospin: quantum molecular dynamics",
u"control system: time",
u"Higgs particle: topcolor",
u"heavy lepton: decay",
u"dilepton: angular distribution",
u"power spectrum: linear",
u"hadron: density",
u"hair: quantum",
u"wave: velocity",
u"neutron star: model",
u"a0(1450)",
u"magnetic monopole: interaction",
u"boundary condition: cylinder",
u"dark matter: recoil",
u"spin: correction",
u"quarkonium: scalar",
u"neutralino: final state",
u"space: Spin(7)",
u"galaxy: spatial distribution",
u"EPRL model",
u"electron: momentum spectrum",
u"Delta(1232): neutrinoproduction",
u"gauge field theory: O(N)",
u"Y(4260): hadronic decay",
u"correction: off-shell",
u"string: perturbation",
u"membrane model: asymmetry",
u"density matrix: deformation",
u"mass: temperature dependence",
u"boson: spinless",
u"density: time dependence",
u"xenon: nuclide",
u"sliver",
u"spatial resolution: vertex",
u"temperature: spatial distribution",
u"xenon: organic compounds",
u"neutralino: cascade decay",
u"matrix model: supersymmetry",
u"top: photoproduction",
u"mass: factorization",
u"COSY-TOF",
u"D/s0*(2317): hadronic decay",
u"quark antiquark: mass spectrum",
u"CMS: upgrade",
u"graviton: wave function",
u"calorimeter: upgrade",
u"messenger: coupling",
u"gauge field theory: SU(4) x U(1)",
u"photon: decoupling",
u"mass: parametrization",
u"hadron: string",
u"viscosity: temperature dependence",
u"bottomonium: width",
u"K0: condensation",
u"black brane: thermodynamics",
u"neutron star: size",
u"W': leptonic decay",
u"K nucleon: potential",
u"graviton: electroproduction",
u"vortex: fractional",
u"gravitation: field theory",
u"neutrino: relic density",
u"spin: vector",
u"Jona-Lasinio-Nambu model: local",
u"neutrino: transport theory",
u"hadron: bound state",
u"electron: energy: low",
u"scale: interaction",
u"K-: semileptonic decay",
u"photon: model",
u"effect: flavor",
u"matrix model: chiral",
u"symmetry breaking: discrete",
u"tau-: pair production",
u"sum rule: Laplace",
u"nucleon nucleon: phase shift",
u"Lee model",
u"oscillator: wave function",
u"plasma: deconfinement",
u"K+: yield",
u"matrix model: action",
u"electron: tracks",
u"fermion: mass difference",
u"pi+: radiative decay",
u"glueball: model",
u"fermion antifermion: bound state",
u"decay constant: pseudoscalar",
u"pseudoscalar meson: associated production",
u"plasma: droplet",
u"photon: jet",
u"p nucleon: interaction",
u"K: transverse momentum",
u"Lambda: neutrinoproduction",
u"baryonium",
u"model: collapse",
u"fermion: liquid",
u"total cross section: momentum dependence",
u"synchrotron radiation: flux",
u"jet: momentum spectrum",
u"density: spatial distribution",
u"X(3872): hadroproduction",
u"Lambda: production",
u"neutron star: formation",
u"star: magnetic",
u"eta/b: radiative decay",
u"hadron: leptoproduction",
u"top: electric moment",
u"muon: spin",
u"photon: elliptic flow",
u"Kaluza-Klein model: monopole",
u"neutral particle: pair production",
u"meson: isoscalar",
u"effect: density",
u"K*(892): width",
u"sphaleron: energy",
u"fluid: sphere",
u"noise: stochastic",
u"R symmetry: violation",
u"baryon resonance: decay modes",
u"meson: current",
u"quarkonium: strangeness",
u"magnetization: density",
u"psi(4040)",
u"solution: nonlinear",
u"thermodynamics: relativistic",
u"meson: mass formula",
u"model: meson dominance",
u"statistics: Bose-Einstein",
u"boson: fluctuation",
u"differential equations: diffusion",
u"force: scalar",
u"supergravity: boundary condition",
u"quantum electrodynamics: vacuum polarization",
u"perturbation: anisotropy",
u"D: radiative decay",
u"psi mesons: branching ratio",
u"stochastic cooling",
u"interpretation of experiments: parametrization",
u"programming: interface",
u"electron nucleon: scattering",
u"phase space: trajectory",
u"nucleon nucleon: exclusive reaction",
u"tracks: energy loss",
u"Knizhnik-Zamolodchikov equation: solution",
u"domain wall: production",
u"transition: electric",
u"trigger: hardware",
u"electric field: induced",
u"approximation: collinear",
u"energy: current",
u"supersymmetry: D-term",
u"supergravity: deformation",
u"neutrino: relativistic",
u"asymptotic behavior: anti-de Sitter",
u"gravitational radiation: multipole",
u"D0 anti-D0: mixing angle",
u"multiquark: interaction",
u"gravitino: heavy",
u"vacuum system: impedance",
u"fermion: sterile",
u"flavor: interference",
u"effect: local",
u"WIMP: detector",
u"weak interaction: correction",
u"Higgsino: pair production",
u"Regge: trajectory",
u"B: decay rate",
u"plasma: production",
u"quarkonium: absorption",
u"gravitational radiation detector: performance",
u"resonance: saturation",
u"membrane: dielectric",
u"K*(892): exchange",
u"bottom meson: lifetime",
u"baryon: radiative decay",
u"gauge field theory: translation",
u"gluon: form factor",
u"AdS(3) x S(3) x T(4)",
u"flux tube: model",
u"partial wave: unitarity",
u"embedding: symplectic",
u"scalar meson: branching ratio",
u"string: twist",
u"field theory: vacuum state",
u"spectrum: time",
u"ghost: interaction",
u"field theory: nonrenormalizable",
u"K K",
u"psi(4160)",
u"black brane: charge",
u"pulsar: mass",
u"cross section: transverse momentum",
u"correlation function: factorization",
u"resonance: mass spectrum",
u"photoelectron: avalanche",
u"correlation function: two-point function",
u"galaxy: magnetic field",
u"eta/b: width",
u"pi nucleon",
u"membrane: de Sitter",
u"black hole: Horava-Lifshitz",
u"correlation function: chiral",
u"scalar meson: decay constant",
u"storage ring: beam damping",
u"cross section: geometrical",
u"channel cross section: enhancement",
u"Upsilon(10020): branching ratio",
u"fermion: sextet",
u"field theory: quantization",
u"graviton: resonance",
u"Delta(1600)",
u"nucleon: spin: transverse",
u"superstring: scattering amplitude",
u"radion: hadroproduction",
u"photon deuteron: coherent interaction",
u"nucleon: correlation",
u"four-pi-detector",
u"Hecke",
u"scattering: wide-angle",
u"helium: antinucleus",
u"rapidity: high",
u"scalar meson: hadroproduction",
u"magnet: technology",
u"W: single production",
u"quark gluon: model",
u"field equations: semiclassical",
u"expansion: background",
u"surface: density",
u"diffraction: effect",
u"Higgs particle: colored particle",
u"background: induced",
u"space: noncompact",
u"Lambda/c+: semileptonic decay",
u"charge: surface",
u"eta: exchange",
u"bino: NLSP",
u"ALPHA",
u"neutrino: electric moment",
u"gauge: longitudinal",
u"pi+: semileptonic decay",
u"gravitation: coupling constant",
u"nucleus: production",
u"model: soliton",
u"moduli space: approximation",
u"symmetry: affine",
]
| 27.739618
| 77
| 0.630072
|
bcfcf5fed290e0b8a57424c35bbc94da80ab0d00
| 4,514
|
py
|
Python
|
WeBlog/config.py
|
imrocky1976/web
|
3af7fbd82af8b4d2737672779d5742825b94a6fe
|
[
"MIT"
] | null | null | null |
WeBlog/config.py
|
imrocky1976/web
|
3af7fbd82af8b4d2737672779d5742825b94a6fe
|
[
"MIT"
] | 4
|
2020-03-24T17:13:37.000Z
|
2022-03-08T21:09:48.000Z
|
WeBlog/config.py
|
imrocky1976/web
|
3af7fbd82af8b4d2737672779d5742825b94a6fe
|
[
"MIT"
] | null | null | null |
# -*- coding:utf-8 -*-
import os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config:
# python -c 'import os; print(os.urandom(16))'
SECRET_KEY = os.environ.get('SECRET_KEY') or b'\xd4Y\x0bJ\x9e\x9fx+\x9bh\xba\x9a=\xee,\x08'
MAIL_SERVER = os.environ.get('MAIL_SERVER', 'smtp.126.com')
MAIL_PORT = int(os.environ.get('MAIL_PORT', '25'))
MAIL_USE_TLS = os.environ.get('MAIL_USE_TLS', 'true').lower() in \
['true', 'on', '1']
MAIL_USERNAME = os.environ.get('MAIL_USERNAME')
MAIL_PASSWORD = os.environ.get('MAIL_PASSWORD')
WEBLOG_MAIL_SUBJECT_PREFIX = '[WeBlog]'
WEBLOG_MAIL_SENDER = 'WeBlog Admin <%s>' % MAIL_USERNAME
WEBLOG_ADMIN = os.environ.get('WEBLOG_ADMIN') or MAIL_USERNAME
SQLALCHEMY_TRACK_MODIFICATIONS = False
WEBLOG_POSTS_PER_PAGE = 30
WEBLOG_FOLLOWERS_PER_PAGE = 50
WEBLOG_COMMENTS_PER_PAGE = 30
WEBLOG_SLOW_DB_QUERY_TIME = 0.5 # second
SQLALCHEMY_RECORD_QUERIES = True
SSL_DISABLE = True
@staticmethod
def init_app(app):
pass
@classmethod
def print_config(cls, app):
app.logger.info('Use config: %s' % cls.__name__)
for attr_name in dir(cls):
if (attr_name[0:2] != '__'):
attr = getattr(cls, attr_name)
app.logger.info("%s = %s" % (attr_name, attr))
class DevelopmentConfig(Config):
DEBUG = True
SQLALCHEMY_DATABASE_URI = os.environ.get('DEV_DATABASE_URL') or \
'sqlite:///' + os.path.join(basedir, 'data-dev.sqlite')
@classmethod
def init_app(cls, app):
Config.init_app(app)
# log errors to file
import logging
from logging.handlers import RotatingFileHandler
import os
basedir = os.path.abspath(os.path.dirname(__file__))
logdir = os.path.join(basedir, 'tmp/log')
if not os.path.isdir(logdir):
os.mkdir(logdir)
log_file_path = os.path.join(logdir, 'weblog.log')
file_handler = RotatingFileHandler(log_file_path, maxBytes=1024*1024*10, backupCount=3, encoding='utf-8')
file_handler.setLevel(logging.DEBUG)
file_handler.setFormatter(logging.Formatter('%(asctime)s - %(pathname)s[line:%(lineno)d] - %(levelname)s: %(message)s'))
app.logger.addHandler(file_handler)
class TestingConfig(Config):
TESTING = True
SQLALCHEMY_DATABASE_URI = os.environ.get('TEST_DATABASE_URL') or \
'sqlite://'
WTF_CSRF_ENABLED = False
class ProductionConfig(Config):
SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL') or \
'sqlite:///' + os.path.join(basedir, 'data.sqlite')
@classmethod
def init_app(cls, app):
Config.init_app(app)
# email errors to the administrators
import logging
from logging.handlers import SMTPHandler
credentials = None
secure = None
if getattr(cls, 'MAIL_USERNAME', None) is not None:
credentials = (cls.MAIL_USERNAME, cls.MAIL_PASSWORD)
if getattr(cls, 'MAIL_USE_TLS', None):
secure = ()
mail_handler = SMTPHandler(
mailhost=(cls.MAIL_SERVER, cls.MAIL_PORT),
fromaddr=cls.WEBLOG_MAIL_SENDER,
toaddrs=[cls.WEBLOG_ADMIN],
subject=cls.WEBLOG_MAIL_SUBJECT_PREFIX + ' Application Error',
credentials=credentials,
secure=secure)
mail_handler.setLevel(logging.ERROR)
mail_handler.setFormatter(logging.Formatter('%(asctime)s - %(pathname)s[line:%(lineno)d] - %(levelname)s: %(message)s'))
app.logger.addHandler(mail_handler)
class HerokuConfig(ProductionConfig):
SSL_DISABLE = bool(os.environ.get('SSL_DISABLE'))
@classmethod
def init_app(cls, app):
ProductionConfig.init_app(app)
# log to stderr
import logging
from logging import StreamHandler
app.logger.setLevel(logging.INFO)
#file_handler = StreamHandler()
#file_handler.setFormatter(logging.Formatter('%(asctime)s - %(pathname)s[line:%(lineno)d] - %(levelname)s: %(message)s'))
#file_handler.setLevel(logging.INFO)
#app.logger.addHandler(file_handler)
# handle reverse proxy server headers
from werkzeug.contrib.fixers import ProxyFix
app.wsgi_app = ProxyFix(app.wsgi_app)
config = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'production': ProductionConfig,
'heroku': HerokuConfig,
'default': DevelopmentConfig
}
| 35.265625
| 129
| 0.650421
|
34ac813e359d4f54d196f6b16a7e29929b0e4687
| 3,162
|
py
|
Python
|
grr/tools/export_plugins/collection_plugin_test.py
|
mikecb/grr
|
52fdd977729af2a09a147301c55b8b7f1eccfa67
|
[
"Apache-2.0"
] | 2
|
2019-06-02T13:11:16.000Z
|
2019-06-25T13:30:46.000Z
|
grr/tools/export_plugins/collection_plugin_test.py
|
mikecb/grr
|
52fdd977729af2a09a147301c55b8b7f1eccfa67
|
[
"Apache-2.0"
] | null | null | null |
grr/tools/export_plugins/collection_plugin_test.py
|
mikecb/grr
|
52fdd977729af2a09a147301c55b8b7f1eccfa67
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
"""Tests for the collection export tool plugin."""
import argparse
import mock
from grr.lib import access_control
from grr.lib import config_lib
from grr.lib import data_store
from grr.lib import email_alerts
from grr.lib import flags
from grr.lib import rdfvalue
from grr.lib import test_lib
from grr.lib import utils
from grr.lib.hunts import results
from grr.lib.output_plugins import email_plugin
from grr.lib.rdfvalues import client as rdf_client
from grr.lib.rdfvalues import flows as rdf_flows
from grr.lib.rdfvalues import paths as rdf_paths
from grr.tools.export_plugins import collection_plugin
class CollectionExportPluginTest(test_lib.GRRBaseTest):
def setUp(self):
super(CollectionExportPluginTest, self).setUp()
client_ids = self.SetupClients(1)
self.client_id = client_ids[0]
data_store.default_token = access_control.ACLToken(
username="user", reason="reason")
def testGetValuesForExportHuntResultCollection(self):
fd = results.HuntResultCollection("aff4:/huntcoll", token=self.token)
fd.Add(
rdf_flows.GrrMessage(
payload=rdf_client.StatEntry(pathspec=rdf_paths.PathSpec(
path="testfile", pathtype="OS")),
source=self.client_id))
plugin = collection_plugin.CollectionExportPlugin()
mock_args = mock.Mock()
mock_args.path = rdfvalue.RDFURN("aff4:/huntcoll")
mock_args.no_legacy_warning_pause = True
self.assertEqual(len(plugin.GetValuesForExport(mock_args)), 1)
def testExportCollectionWithEmailPlugin(self):
# Create a collection with URNs to some files.
fd = results.HuntResultCollection("aff4:/testcoll", token=self.token)
fd.Add(
rdf_flows.GrrMessage(
payload=rdf_client.StatEntry(pathspec=rdf_paths.PathSpec(
path="testfile", pathtype="OS")),
source=self.client_id))
plugin = collection_plugin.CollectionExportPlugin()
parser = argparse.ArgumentParser()
plugin.ConfigureArgParser(parser)
def SendEmail(address, sender, title, message, **_):
self.email_messages.append(
dict(address=address, sender=sender, title=title, message=message))
email_address = "notify@%s" % config_lib.CONFIG["Logging.domain"]
with utils.Stubber(email_alerts.EMAIL_ALERTER, "SendEmail", SendEmail):
self.email_messages = []
plugin.Run(
parser.parse_args(args=[
"--no_legacy_warning_pause",
"--path",
"aff4:/testcoll",
email_plugin.EmailOutputPlugin.name,
"--email_address",
email_address,
"--emails_limit",
"100",
]))
self.assertEqual(len(self.email_messages), 1)
for msg in self.email_messages:
self.assertEqual(msg["address"], email_address)
self.assertEqual("GRR got a new result in aff4:/testcoll.", msg["title"])
self.assertTrue(
"GRR got a new result in aff4:/testcoll" in msg["message"])
self.assertTrue("(Host-0)" in msg["message"])
def main(argv):
test_lib.main(argv)
if __name__ == "__main__":
flags.StartMain(main)
| 32.265306
| 79
| 0.692916
|
ca8899da5adb151d5164f78c77126014d6a2a524
| 1,477
|
py
|
Python
|
operant/base.py
|
williamhogman/operant
|
69fd07b5e4300ed27458b133acec5df186246172
|
[
"BSD-2-Clause"
] | 1
|
2015-01-24T10:54:47.000Z
|
2015-01-24T10:54:47.000Z
|
operant/base.py
|
williamhogman/operant
|
69fd07b5e4300ed27458b133acec5df186246172
|
[
"BSD-2-Clause"
] | null | null | null |
operant/base.py
|
williamhogman/operant
|
69fd07b5e4300ed27458b133acec5df186246172
|
[
"BSD-2-Clause"
] | null | null | null |
""" Bases shared by the different gamification components"""
from six import string_types
class Registry(object):
"""Class providing a collection of named objects.
Components use which components use to register objects to
indentifiers and provide smart regisration functions"""
def __init__(self, kind, name_prop):
self._kind = kind
self._name_prop = name_prop
self._spec = list()
self._classes = dict()
def _np_of(self, i):
return getattr(i, self._name_prop)
def set_handler(self, tp, fn):
""" Setup a special handler for special types"""
self._spec.append((tp, fn))
def set_str_handler(self, fn):
""" Setup a special handler for strings"""
self.set_handler(string_types, fn)
def _parse_with_handlers(self, obj):
fns = [fn for tps, fn in self._spec if isinstance(obj, tps)]
out = obj
for fn in fns:
out = fn(obj)
return out
def register(self, toreg):
"""Registers an object to the registry"""
toreg = self._parse_with_handlers(toreg)
if self._np_of(toreg) in self._classes:
raise RuntimeError("A {0} with the id {1} "
"has already been registered"
.format(self._kind, self._np_of(toreg)))
self._classes[self._np_of(toreg)] = toreg
def get(self, name):
return self._classes.get(name, None)
| 32.108696
| 71
| 0.608666
|
160d793e374b8491921c490b78cbe3656f28f1f5
| 3,881
|
py
|
Python
|
ThreeBotPackages/threebot/mail/actors/mail.py
|
Pishoy/jumpscaleX_threebot
|
781e839857fecfa601a31d98d86d304e3a6b3b4e
|
[
"Apache-2.0"
] | null | null | null |
ThreeBotPackages/threebot/mail/actors/mail.py
|
Pishoy/jumpscaleX_threebot
|
781e839857fecfa601a31d98d86d304e3a6b3b4e
|
[
"Apache-2.0"
] | null | null | null |
ThreeBotPackages/threebot/mail/actors/mail.py
|
Pishoy/jumpscaleX_threebot
|
781e839857fecfa601a31d98d86d304e3a6b3b4e
|
[
"Apache-2.0"
] | 1
|
2019-10-28T08:16:11.000Z
|
2019-10-28T08:16:11.000Z
|
from Jumpscale import j
import binascii
from io import BytesIO
import json
import os
from JumpscaleLibs.servers.mail.smtp import app
from JumpscaleLibs.servers.mail.imap.bcdbmailbox import BCDBMailboxdir
class mail(j.baseclasses.threebot_actor):
def _init(self, **kwargs):
models = j.servers.imap.get_models()
self.bcdb_mailbox = BCDBMailboxdir(models)
def send(self, mail, schema_out=None, user_session=None):
"""
```in
mail = (O) !email.message.1
```
```out
success = (B)
```
"""
server = app.MailServer()
mail_stored = server.store_mail(mail._ddict, is_send=True)
self.bcdb_mailbox.create_folder(mail_stored.folder)
out = schema_out.new()
out.success = True
return out
def list(self, date_from=None, date_to=None, user_session=None):
"""
```in
date_from = (D)
date_to = (D)
```
"""
if date_from and date_to:
date_from = j.data.types.date.clean(date_from)
date_to = j.data.types.date.clean(date_to)
query = "WHERE date BETWEEN {} and {}".format(date_from, date_to)
mails = self.bcdb_mailbox.get_messages(query).fetchall()
return json.dumps([self.bcdb_mailbox.get_object(o[0])._ddict for o in mails])
mails = self.bcdb_mailbox.get_messages()
return json.dumps([o._ddict for o in mails])
def list_folders(self, user_session=None):
"""
"""
folders = self.bcdb_mailbox.list_folders()
return folders
def create_folder(self, name, schema_out=None, user_session=None):
"""
```in
name = (S)
```
```out
success = (B)
```
"""
self.bcdb_mailbox.create_folder(name)
out = schema_out.new()
out.success = True
return out
def update_folder_name(self, old_name, new_name, schema_out=None, user_session=None):
"""
```in
old_name = (S)
new_name = (S)
```
```out
success = (B)
```
"""
self.bcdb_mailbox.rename_folder(old_name, new_name)
out = schema_out.new()
out.success = True
return out
def move_message(self, mail_id, folder_name, schema_out=None, user_session=None):
"""
```in
mail_id = (I)
folder_name = (S)
```
```out
success = (B)
```
"""
model = self.bcdb_mailbox.get_object(mail_id)
model.folder = folder_name
model.save()
out = schema_out.new()
out.success = True
return out
def delete(self, mail_id, schema_out=None, user_session=None):
"""
```in
mail_id = (I)
```
```out
success = (B)
```
"""
self.bcdb_mailbox.remove(mail_id)
out = schema_out.new()
out.success = True
return out
def update_priority(self, mail_id, priority, schema_out=None, user_session=None):
"""
```in
mail_id = (I)
priority = (B)
```
```out
success = (B)
```
"""
model = self.bcdb_mailbox.get_object(mail_id)
model.priority = priority
model.save()
out = schema_out.new()
out.success = True
return out
def receive(self, mail, schema_out=None, user_session=None):
"""
```in
mail = (O) !email.message.1
```
```out
success = (B)
```
"""
server = app.MailServer()
mail_stored = server.store_mail(mail._ddict)
self.bcdb_mailbox.create_folder(mail_stored.folder)
out = schema_out.new()
out.success = True
return out
| 25.03871
| 89
| 0.536975
|
5b409b12bfbe543323196bdd201be74daf8bc0da
| 2,560
|
py
|
Python
|
src/gamesbyexample/rockpaperscissors.py
|
spp2/PythonStdioGames
|
7edc6a07ef816a44579800e773f30217541971fa
|
[
"MIT"
] | null | null | null |
src/gamesbyexample/rockpaperscissors.py
|
spp2/PythonStdioGames
|
7edc6a07ef816a44579800e773f30217541971fa
|
[
"MIT"
] | null | null | null |
src/gamesbyexample/rockpaperscissors.py
|
spp2/PythonStdioGames
|
7edc6a07ef816a44579800e773f30217541971fa
|
[
"MIT"
] | null | null | null |
"""Rock, Paper, Scissors, by Al Sweigart al@inventwithpython.com
The classic hand game of luck.
This and other games are available at https://nostarch.com/XX
Tags: short, game"""
__version__ = 0
import random, time, sys
print('''Rock, Paper, Scissors, by Al Sweigart al@inventwithpython.com
- Rock beats scissors.
- Paper beats rocks.
- Scissors beats paper.
''')
# These variables keep track of the number of wins, losses, and ties.
wins = 0
losses = 0
ties = 0
while True: # Main game loop.
while True: # Keep asking until player enters R, P, S, or Q.
print('{} Wins, {} Losses, {} Ties'.format(wins, losses, ties))
print('Enter your move: (R)ock (P)aper (S)cissors or (Q)uit')
playerMove = input('> ').upper()
if playerMove == 'Q':
sys.exit()
if playerMove == 'R' or playerMove == 'P' or playerMove == 'S':
break
else:
print('Type one of R, P, S, or Q.')
# Display what the player chose:
if playerMove == 'R':
print('ROCK versus...')
playerMove = 'ROCK'
elif playerMove == 'P':
print('PAPER versus...')
playerMove = 'PAPER'
elif playerMove == 'S':
print('SCISSORS versus...')
playerMove = 'SCISSORS'
# Count to three with dramatic pauses:
time.sleep(0.5)
print('1...')
time.sleep(0.25)
print('2...')
time.sleep(0.25)
print('3...')
time.sleep(0.25)
# Display what the computer chose:
randomNumber = random.randint(1, 3)
if randomNumber == 1:
computerMove = 'ROCK'
elif randomNumber == 2:
computerMove = 'PAPER'
elif randomNumber == 3:
computerMove = 'SCISSORS'
print(computerMove)
time.sleep(0.5)
# Display and record the win/loss/tie:
if playerMove == computerMove:
print('It\'s a tie!')
ties = ties + 1
elif playerMove == 'ROCK' and computerMove == 'SCISSORS':
print('You win!')
wins = wins + 1
elif playerMove == 'PAPER' and computerMove == 'ROCK':
print('You win!')
wins = wins + 1
elif playerMove == 'SCISSORS' and computerMove == 'PAPER':
print('You win!')
wins = wins + 1
elif playerMove == 'ROCK' and computerMove == 'PAPER':
print('You lose!')
losses = losses + 1
elif playerMove == 'PAPER' and computerMove == 'SCISSORS':
print('You lose!')
losses = losses + 1
elif playerMove == 'SCISSORS' and computerMove == 'ROCK':
print('You lose!')
losses = losses + 1
| 30.117647
| 71
| 0.58125
|
74d28a79a462e9eaa7eac1ce3fdd58f8efb653e6
| 382
|
py
|
Python
|
clientmanagement/utilities.py
|
isstek/clientmanagement
|
26bd6bbd974f24211dd0ae4b1c75ee8e4b150767
|
[
"MIT"
] | null | null | null |
clientmanagement/utilities.py
|
isstek/clientmanagement
|
26bd6bbd974f24211dd0ae4b1c75ee8e4b150767
|
[
"MIT"
] | 11
|
2019-05-02T20:10:16.000Z
|
2022-02-10T07:10:25.000Z
|
clientmanagement/utilities.py
|
isstek/clientmanagement
|
26bd6bbd974f24211dd0ae4b1c75ee8e4b150767
|
[
"MIT"
] | 2
|
2020-11-04T03:05:23.000Z
|
2020-11-05T08:14:14.000Z
|
def humanize_bytes(bytes, precision=1):
abbrevs = (
(1<<50, 'PB'),
(1<<40, 'TB'),
(1<<30, 'GB'),
(1<<20, 'MB'),
(1<<10, 'kB'),
(1, 'bytes')
)
if bytes == 1:
return '1 byte'
for factor, suffix in abbrevs:
if bytes >= factor:
break
return '%.*f %s' % (precision, bytes / factor, suffix)
| 22.470588
| 58
| 0.431937
|
e1dd65d679c6a83fd8851d877406a0b20ec6f6fe
| 13,049
|
py
|
Python
|
src/models/regression.py
|
oxanozaep/Restaurant_visitors_forecasting
|
987dfa3c79da060e19bea6a79d991179c15e7d74
|
[
"MIT"
] | null | null | null |
src/models/regression.py
|
oxanozaep/Restaurant_visitors_forecasting
|
987dfa3c79da060e19bea6a79d991179c15e7d74
|
[
"MIT"
] | null | null | null |
src/models/regression.py
|
oxanozaep/Restaurant_visitors_forecasting
|
987dfa3c79da060e19bea6a79d991179c15e7d74
|
[
"MIT"
] | null | null | null |
import warnings
warnings.filterwarnings('ignore')
import pandas as pd
import numpy as np
import pickle
import statsmodels.formula.api as smf
import statsmodels.api as sm
from statsmodels.tsa.seasonal import seasonal_decompose
import seaborn as sns
import pandas_profiling
import datetime
import sqlite3
import calendar
import matplotlib.pyplot as plt
import matplotlib
matplotlib.use('TkAgg')
#Definition of the formula that will show the goodness of the model.
def RMSLE(predicted, actual):
msle = (np.log(predicted+1) - np.log(actual+1))**2
rmsle = np.sqrt(msle.sum()/msle.count())
return rmsle
def save_model(obj, name):
with open('../models/'+ name + '.pkl', 'wb') as f:
pickle.dump(obj, f, pickle.HIGHEST_PROTOCOL)
def forward(predictors, X, y):
remaining_predictors = [p for p in X.columns if p not in predictors]
results = []
for p in remaining_predictors:
results.append(processSubset(predictors + [p], X, y))
models = pd.DataFrame(results)
print("Processed ", models.shape[0], "models on", len(predictors)+1, "predictors.")
return models.loc[models['RSS'].argmin()]
def processSubset(feature_set, X, y):
model = sm.OLS(y, X[list(feature_set)])
regr = model.fit()
RSS = ((regr.predict(X[list(feature_set)]) - y) ** 2).sum()
return {"model":regr, "RSS":RSS}
def regression(data_train):
data_train = pd.get_dummies(data_train, columns=['genre','dow'])
#We will use the log of the visitors to get a more useful mean.
model_mean_pred = data_train.log_visitors.mean()
# And we'll store this value in the dataframe
data_train['visitors_mean'] = np.exp(model_mean_pred)
data_train.loc[:, ['visitors','visitors_mean']].plot(color=['#bbbbbb','r'], figsize=(16,8));
model_mean_RMSLE = RMSLE(data_train.visitors_mean, data_train.visitors)
results_df = pd.DataFrame(columns=["Model", "RMSLE"])
results_df.loc[0,"Model"] = "Mean"
results_df.loc[0,"RMSLE"] = model_mean_RMSLE
# Let's now see if and how much the model would enhance if we predicted always the mean number of visitors of the restaurant being predicted.
data_train = pd.merge(data_train, data_train[['air_store_id','visitors']].groupby(['air_store_id'], as_index=False).mean(), on='air_store_id', how='left')
data_train=data_train.rename(columns = {'visitors_y':'visitors_rest_mean','visitors_x':'visitors'})
model_mean_rest_RMSLE = RMSLE(data_train.visitors_rest_mean, data_train.visitors)
results_df.loc[1,"Model"] = "Mean_by_rest"
results_df.loc[1,"RMSLE"] = model_mean_rest_RMSLE
# Let's start creating the models with linear and polynomial regression. Starting with a model with multiple linear regressors, one for each variable in the data.
model = sm.OLS.from_formula('visitors ~ ' + '+'.join(data_train.columns.difference(['visitors', 'log_visitors', 'air_store_id','visitors_mean'])), data_train)
result = model.fit()
print(result.summary())
data_train["linear_regr"] = result.predict()
model_lin_RMSLE = RMSLE(data_train.linear_regr, data_train.visitors)
results_df.loc[2,"Model"] = "Multiple linear regressors"
results_df.loc[2,"RMSLE"] = model_lin_RMSLE
# We'll try and perform now some sort of random walk model: We'll just take the visitors of the restaurant from the previous similar day of the week, as this could be a good fit that includes seasonality for each restaurant. For that, we'll create 7 new columns containing the value of previous similar dow visitors and then create a new column, "past_dow_visitors", with the appropriate number for the specific day.
dows = ['Monday','Tuesday','Wednesday','Thursday','Friday','Saturday','Sunday']
for dow in dows:
data_train['past_'+dow]= 0
data_train.sort_values(by=['air_store_id','visit_year','visit_month','visit_day'], ascending=[True,True,True,True], inplace=True)
data_train['store_change'] = (data_train.air_store_id!=data_train.air_store_id.shift())
data_train['past_dow_visitors'] = data_train['visitors_rest_mean']
data_train.reset_index(drop=True, inplace=True)
for index, row in data_train.iterrows():
if not row.store_change:
for dow in dows:
if data_train.iloc[index-1, data_train.columns.get_loc('dow_'+dow)]:
data_train.set_value(index,'past_'+dow,data_train.iloc[index-1, data_train.columns.get_loc('visitors')])
else:
data_train.set_value(index,'past_'+dow,data_train.iloc[index-1, data_train.columns.get_loc('past_'+dow)])
for index, row in data_train.iterrows():
for dow in dows:
if row['dow_'+dow] and row['past_'+dow]>0:
data_train.set_value(index,'past_dow_visitors', row['past_'+dow])
for dow in dows:
data_train.drop(['past_'+dow], axis=1, inplace=True)
# The "random walk" model will include this new variable and the two other most powerful ones, the reserve visitors and wether if it's a holiday or not. We'll also include the intercept between the variables this time.
model = sm.OLS.from_formula('visitors ~ past_dow_visitors * reserve_visitors * holiday_flg',data_train)
result = model.fit()
print(result.summary())
# This time, all the variables have strong predictive power, being the newly created column of past day of week visitors the one with a higher t statistic (>100)
model_pred = result.predict()
data_train['past_dow_predict'] = model_pred
model_past_dow_RMSLE = RMSLE(data_train.past_dow_predict, data_train.visitors)
results_df.loc[3,"Model"] = "Past_DoW"
results_df.loc[3,"RMSLE"] = model_past_dow_RMSLE
# Residuals:
s_residuals = pd.Series(result.resid_pearson, name="S. Residuals")
fitted_values = pd.Series(result.fittedvalues, name="Fitted Values")
sns.regplot(fitted_values, s_residuals, fit_reg=False)
models = pd.DataFrame(columns=["RSS", "model"])
predictors = []
y=data_train.visitors
X = data_train[['visit_year', 'visit_month', 'visit_day', 'reserve_visitors','holiday_flg','latitude','longitude', 'dow_Friday','dow_Monday','dow_Tuesday','dow_Wednesday','dow_Thursday','dow_Saturday','dow_Sunday', 'visitors_rest_mean','past_dow_visitors']].astype('float64')
for i in range(1, len(X.columns) + 1):
models.loc[i] = forward(predictors, X, y)
predictors = models.loc[i]["model"].model.exog_names
# Let's inspect the correlation coefficient for each of the best possible models with the different number of predictors.
models.apply(lambda row: row[1].rsquared, axis=1)
# Let's show some graphs to see how these models compare to each other.
plt.figure(figsize=(20,10))
plt.rcParams.update({'font.size': 18, 'lines.markersize': 10})
plt.subplot(4, 1, 1)
plt.plot(models["RSS"])
plt.xlabel('# Predictors')
plt.ylabel('RSS')
rsquared_adj = models.apply(lambda row: row[1].rsquared_adj, axis=1)
plt.subplot(4, 1, 2)
plt.plot(rsquared_adj)
plt.plot(rsquared_adj.argmax(), rsquared_adj.max(), "ob")
plt.xlabel('# Predictors')
plt.ylabel('adjusted rsquared')
aic = models.apply(lambda row: row[1].aic, axis=1)
plt.subplot(4, 1, 3)
plt.plot(aic)
plt.plot(aic.argmin(), aic.min(), "ob")
plt.xlabel('# Predictors')
plt.ylabel('AIC')
bic = models.apply(lambda row: row[1].bic, axis=1)
plt.subplot(4, 1, 4)
plt.plot(bic)
plt.plot(bic.argmin(), bic.min(), "ob")
plt.xlabel('# Predictors')
plt.ylabel('BIC')
# We'll chose the model with 8 predictors to try to keep it simple, as after this point, the models only preform slightly better.
data_train["subset_selection"] = models.loc[8, "model"].predict()
model_subset_RMSLE = RMSLE(data_train.subset_selection, data_train.visitors)
results_df.loc[4,"Model"] = "Subset selection"
results_df.loc[4,"RMSLE"] = model_subset_RMSLE
# Let's try a polynomial regression model with the past dow visitors variable, as it is the one with the highest t statistic, up to a 5th degree polynomial.
poly_1 = smf.ols(formula='visitors ~ 1 + past_dow_visitors', data=data_train).fit()
poly_2 = smf.ols(formula='visitors ~ 1 + past_dow_visitors + I(past_dow_visitors ** 2.0)', data=data_train).fit()
poly_3 = smf.ols(formula='visitors ~ 1 + past_dow_visitors + I(past_dow_visitors ** 2.0) + I(past_dow_visitors ** 3.0)', data=data_train).fit()
poly_4 = smf.ols(formula='visitors ~ 1 + past_dow_visitors + I(past_dow_visitors ** 2.0) + I(past_dow_visitors ** 3.0) + I(past_dow_visitors ** 4.0)', data=data_train).fit()
poly_5 = smf.ols(formula='visitors ~ 1 + past_dow_visitors + I(past_dow_visitors ** 2.0) + I(past_dow_visitors ** 3.0) + I(past_dow_visitors ** 4.0) + I(past_dow_visitors ** 5.0)', data=data_train).fit()
print(sm.stats.anova_lm(poly_1, poly_2, poly_3, poly_4, poly_5, typ=1))
plt.figure(figsize=(6 * 1.618, 6))
plt.scatter(data_train.past_dow_visitors, data_train.visitors, s=10, alpha=0.3)
plt.xlabel('past_dow_visitors')
plt.ylabel('visitors')
x = pd.DataFrame({'past_dow_visitors': np.linspace(data_train.past_dow_visitors.min(), data_train.past_dow_visitors.max(), 100)})
plt.plot(x.past_dow_visitors, poly_1.predict(x), 'b-', label='Poly n=1 $R^2$=%.2f' % poly_1.rsquared, alpha=0.9)
plt.plot(x.past_dow_visitors, poly_2.predict(x), 'g-', label='Poly n=2 $R^2$=%.2f' % poly_2.rsquared, alpha=0.9)
plt.plot(x.past_dow_visitors, poly_3.predict(x), 'r-', alpha=0.9,label='Poly n=3 $R^2$=%.2f' % poly_3.rsquared)
plt.plot(x.past_dow_visitors, poly_4.predict(x), 'y-', alpha=0.9,label='Poly n=4 $R^2$=%.2f' % poly_4.rsquared)
plt.plot(x.past_dow_visitors, poly_5.predict(x), 'k-', alpha=0.9,label='Poly n=5 $R^2$=%.2f' % poly_5.rsquared)
plt.legend()
data_train["poly_regr"] = poly_5.predict()
model_poly_RMSLE = RMSLE(data_train.poly_regr, data_train.visitors)
results_df.loc[5,"Model"] = "Polynomial Regressor"
results_df.loc[5,"RMSLE"] = model_poly_RMSLE
results_df
return data_train,results_df
def multiple_model_creation(data_train,results_df):
# We'll start by creating a multiple linear regression model for each restaurant in the train data.
#Let's get rid of the columns that won't be used in the final predictions.
data_train.drop(data_train[['air_area_name', 'latitude','past_dow_visitors','longitude','visitors_mean','linear_regr','store_change','past_dow_predict','subset_selection','poly_regr','log_visitors']], axis=1, inplace=True)
data_train.drop(list(data_train.filter(regex = 'genre_')), axis = 1, inplace = True)
restaurants = data_train.air_store_id.unique()
RMSLEs = []
models_dict = {}
for i,restaurant in enumerate(restaurants):
if i%100 == 0 or i==(len(restaurants)-1):
print("Model {} of {}".format(i+1,len(restaurants)))
df_temp = data_train[data_train.air_store_id == restaurant]
df_temp.dropna(axis=0,how='any',inplace=True)
model = sm.OLS.from_formula('visitors ~ ' + '+'.join(df_temp.columns.difference(['visitors', 'air_store_id'])), df_temp).fit()
RMSLEs.append(RMSLE(model.predict(), df_temp.visitors))
models_dict[restaurant] = model
# We'll create now the models for the restaurants with no reserved visitors info, as this data is not complete for the forecasted weeks.
RMSLEhalf = []
half_models_dict = {}
for i,restaurant in enumerate(restaurants):
if i%100 == 0 or i==(len(restaurants)-1):
print("Model {} of {}".format(i+1,len(restaurants)))
df_temp = data_train[data_train.air_store_id == restaurant]
df_temp.dropna(axis=0,how='any',inplace=True)
model = sm.OLS.from_formula('visitors ~ ' + '+'.join(df_temp.columns.difference(['visitors', 'air_store_id','reserve_visitors'])), df_temp).fit()
RMSLEhalf.append(RMSLE(model.predict(), df_temp.visitors))
half_models_dict[restaurant] = model
# And finally, a last model for those restaurants that are new in the test dataframe.
nodata_model = sm.OLS.from_formula('visitors ~ ' + '+'.join(data_train.columns.difference(['visitors', 'air_store_id','reserve_visitors','visitors_rest_mean'])), data_train).fit()
RMSLE_rest = RMSLE(nodata_model.predict(), data_train.visitors)
# Let's see how these newly created models compare with the ones obtained in the modeling section.
results_df.loc[6,"Model"] = "Regressor per id"
results_df.loc[6,"RMSLE"] = np.mean(RMSLEs)
results_df.loc[7,"Model"] = "Regressor per id w/o reserves"
results_df.loc[7,"RMSLE"] = np.mean(RMSLEs)
results_df.loc[8,"Model"] = "New id model"
results_df.loc[8,"RMSLE"] = RMSLE_rest
results_df
# We'll store all the created models
save_model(models_dict,'full_models')
save_model(half_models_dict,'half_models')
save_model(nodata_model,'no_data_model')
return data_train, models_dict, half_models_dict, nodata_model
| 49.61597
| 420
| 0.699211
|
d78001c976f4b4829889bc787ebffcb1acebf123
| 130
|
py
|
Python
|
src/misc-experiments/ws-client.py
|
danja/chatterbox
|
59ebf9d65bac38854a6162bc0f6f4b9f6d43d330
|
[
"MIT"
] | 2
|
2021-02-19T22:30:59.000Z
|
2021-03-19T19:07:36.000Z
|
src/misc-experiments/ws-client.py
|
danja/chatterbox
|
59ebf9d65bac38854a6162bc0f6f4b9f6d43d330
|
[
"MIT"
] | null | null | null |
src/misc-experiments/ws-client.py
|
danja/chatterbox
|
59ebf9d65bac38854a6162bc0f6f4b9f6d43d330
|
[
"MIT"
] | null | null | null |
import websocket
ws = websocket.WebSocket()
ws.connect("ws://192.168.0.142/ws")
result = ws.recv()
print(result)
ws.close()
| 13
| 35
| 0.676923
|
1725f57eb54ba3369b0c4300defff19feee2406f
| 7,589
|
py
|
Python
|
model.py
|
jfrancis71/pixel-cnn-pp
|
150ab5dfeead19a598a49e8ecf427e519e433a68
|
[
"MIT"
] | null | null | null |
model.py
|
jfrancis71/pixel-cnn-pp
|
150ab5dfeead19a598a49e8ecf427e519e433a68
|
[
"MIT"
] | null | null | null |
model.py
|
jfrancis71/pixel-cnn-pp
|
150ab5dfeead19a598a49e8ecf427e519e433a68
|
[
"MIT"
] | null | null | null |
import pdb
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
from .layers import *
from .utils import *
import numpy as np
class PixelCNNLayer_up(nn.Module):
def __init__(self, nr_resnet, nr_filters, resnet_nonlinearity, nr_conditional=None):
super(PixelCNNLayer_up, self).__init__()
self.nr_resnet = nr_resnet
# stream from pixels above
self.u_stream = nn.ModuleList([gated_resnet(nr_filters, down_shifted_conv2d,
resnet_nonlinearity, skip_connection=0, num_conditional=nr_conditional)
for _ in range(nr_resnet)])
# stream from pixels above and to thes left
self.ul_stream = nn.ModuleList([gated_resnet(nr_filters, down_right_shifted_conv2d,
resnet_nonlinearity, skip_connection=1, num_conditional=nr_conditional)
for _ in range(nr_resnet)])
def forward(self, u, ul, conditional=None):
u_list, ul_list = [], []
for i in range(self.nr_resnet):
u = self.u_stream[i](u, conditional=conditional)
ul = self.ul_stream[i](ul, a=u, conditional=conditional)
u_list += [u]
ul_list += [ul]
return u_list, ul_list
class PixelCNNLayer_down(nn.Module):
def __init__(self, nr_resnet, nr_filters, resnet_nonlinearity, nr_conditional=None):
super(PixelCNNLayer_down, self).__init__()
self.nr_resnet = nr_resnet
# stream from pixels above
self.u_stream = nn.ModuleList([gated_resnet(nr_filters, down_shifted_conv2d, resnet_nonlinearity, skip_connection=1, num_conditional=nr_conditional)
for _ in range(nr_resnet)])
# stream from pixels above and to thes left
self.ul_stream = nn.ModuleList([gated_resnet(nr_filters, down_right_shifted_conv2d,
resnet_nonlinearity, skip_connection=2, num_conditional=nr_conditional)
for _ in range(nr_resnet)])
def forward(self, u, ul, u_list, ul_list, conditional):
for i in range(self.nr_resnet):
u = self.u_stream[i](u, a=u_list.pop(), conditional=conditional)
ul = self.ul_stream[i](ul, a=torch.cat((u, ul_list.pop()), 1), conditional=conditional)
return u, ul
class PixelCNN(nn.Module):
def __init__(self, nr_resnet=5, nr_filters=80, nr_params=1,
resnet_nonlinearity='concat_elu', input_channels=3, nr_conditional=None):
super(PixelCNN, self).__init__()
if resnet_nonlinearity == 'concat_elu' :
self.resnet_nonlinearity = lambda x : concat_elu(x)
else :
raise Exception('right now only concat elu is supported as resnet nonlinearity.')
self.nr_filters = nr_filters
self.input_channels = input_channels
self.right_shift_pad = nn.ZeroPad2d((1, 0, 0, 0))
self.down_shift_pad = nn.ZeroPad2d((0, 0, 1, 0))
down_nr_resnet = [nr_resnet] + [nr_resnet + 1] * 2
self.down_layers = nn.ModuleList([PixelCNNLayer_down(down_nr_resnet[i], nr_filters,
self.resnet_nonlinearity, nr_conditional) for i in range(3)])
self.up_layers = nn.ModuleList([PixelCNNLayer_up(nr_resnet, nr_filters,
self.resnet_nonlinearity, nr_conditional) for _ in range(3)])
self.downsize_u_stream = nn.ModuleList([down_shifted_conv2d(nr_filters, nr_filters,
stride=(2,2)) for _ in range(2)])
self.downsize_ul_stream = nn.ModuleList([down_right_shifted_conv2d(nr_filters,
nr_filters, stride=(2,2)) for _ in range(2)])
self.upsize_u_stream = nn.ModuleList([down_shifted_deconv2d(nr_filters, nr_filters,
stride=(2,2)) for _ in range(2)])
self.upsize_ul_stream = nn.ModuleList([down_right_shifted_deconv2d(nr_filters,
nr_filters, stride=(2,2)) for _ in range(2)])
self.u_init = down_shifted_conv2d(input_channels + 1, nr_filters, filter_size=(2,3),
shift_output_down=True)
self.ul_init = nn.ModuleList([down_shifted_conv2d(input_channels + 1, nr_filters,
filter_size=(1,3), shift_output_down=True),
down_right_shifted_conv2d(input_channels + 1, nr_filters,
filter_size=(2,1), shift_output_right=True)])
self.nin_out = nin(nr_filters, nr_params)
self.init_padding = None
def forward(self, x, sample=False, conditional=None):
# similar as done in the tf repo :
if self.init_padding is None and not sample:
xs = [int(y) for y in x.size()]
padding = Variable(torch.ones(xs[0], 1, xs[2], xs[3]), requires_grad=False)
self.init_padding = padding.cuda() if x.is_cuda else padding
if sample :
xs = [int(y) for y in x.size()]
padding = Variable(torch.ones(xs[0], 1, xs[2], xs[3]), requires_grad=False)
padding = padding.cuda() if x.is_cuda else padding
x = torch.cat((x, padding), 1)
### UP PASS ###
x = x if sample else torch.cat((x, self.init_padding), 1)
u_list = [self.u_init(x)]
ul_list = [self.ul_init[0](x) + self.ul_init[1](x)]
for i in range(3):
# resnet block
u_out, ul_out = self.up_layers[i](u_list[-1], ul_list[-1], conditional)
u_list += u_out
ul_list += ul_out
if i != 2:
# downscale (only twice)
u_list += [self.downsize_u_stream[i](u_list[-1])]
ul_list += [self.downsize_ul_stream[i](ul_list[-1])]
### DOWN PASS ###
u = u_list.pop()
ul = ul_list.pop()
for i in range(3):
# resnet block
u, ul = self.down_layers[i](u, ul, u_list, ul_list, conditional)
# upscale (only twice)
if i != 2 :
u = self.upsize_u_stream[i](u)
ul = self.upsize_ul_stream[i](ul)
x_out = self.nin_out(F.elu(ul))
assert len(u_list) == len(ul_list) == 0, pdb.set_trace()
return x_out
if __name__ == '__main__':
''' testing loss with tf version '''
np.random.seed(1)
xx_t = (np.random.rand(15, 32, 32, 100) * 3).astype('float32')
yy_t = np.random.uniform(-1, 1, size=(15, 32, 32, 3)).astype('float32')
x_t = Variable(torch.from_numpy(xx_t)).cuda()
y_t = Variable(torch.from_numpy(yy_t)).cuda()
loss = discretized_mix_logistic_loss(y_t, x_t)
''' testing model and deconv dimensions '''
x = torch.cuda.FloatTensor(32, 3, 32, 32).uniform_(-1., 1.)
xv = Variable(x).cpu()
ds = down_shifted_deconv2d(3, 40, stride=(2,2))
x_v = Variable(x)
''' testing loss compatibility '''
model = PixelCNN(nr_resnet=3, nr_filters=100, input_channels=x.size(1))
model = model.cuda()
out = model(x_v)
loss = discretized_mix_logistic_loss(x_v, out)
print('loss : %s' % loss.data[0])
| 44.380117
| 158
| 0.572671
|
1855c13c502f9f21832f1aef6b41ac62b04b3cac
| 1,620
|
py
|
Python
|
netta/get_image.py
|
zhangdafu12/web
|
64ce7db4697167215bf9ee25cd5bdc0bd15b5831
|
[
"MIT"
] | null | null | null |
netta/get_image.py
|
zhangdafu12/web
|
64ce7db4697167215bf9ee25cd5bdc0bd15b5831
|
[
"MIT"
] | 1
|
2020-03-30T09:26:59.000Z
|
2020-03-30T09:26:59.000Z
|
netta/get_image.py
|
zhangdafu12/web
|
64ce7db4697167215bf9ee25cd5bdc0bd15b5831
|
[
"MIT"
] | null | null | null |
import re
import time
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
def func(name):
result = []
browser = webdriver.Chrome()
browser.get('https://image.baidu.com')
time.sleep(1)
input = browser.find_element_by_id('kw')
input.send_keys(name)
input.send_keys(Keys.ENTER)
js = "window.scrollTo(0,1000);"
browser.execute_script(js)
time.sleep(2)
urls = browser.find_elements_by_xpath("//div[@class='imgbox']//a")
# imgs = browser.find_elements_by_xpath("//div[@class='imgbox']//a//img")
# for img in imgs[0:5]:
# data_imgurl = img.get_attribute("data-imgurl")
# print(data_imgurl)
for url in urls[0:5]:
n_url = url.get_attribute("href")
driver = webdriver.Chrome()
time.sleep(1)
driver.get(n_url)
try:
image_url = driver.find_element_by_xpath('//*[@id="currentImg"]').get_attribute("src")
except:
continue
print(image_url)
time.sleep(2)
driver.find_element_by_xpath("//div[@class='pic-title']//a").click()
time.sleep(1)
windows = driver.window_handles
driver.switch_to.window(windows[-1])
# now_url = driver.current_url
html = driver.page_source
text = ''
for n in re.findall(r'[\u4e00-\u9fff]+', html):
# print(n)
text += n
result.append((image_url,text))
driver.close()
driver.quit()
browser.quit()
print(result)
return result
if __name__ == '__main__':
name = input('请输入:')
func(name=name)
| 28.421053
| 98
| 0.6
|
29c2f3fa1ec5835d826b9dfc247f071f0b97305d
| 1,754
|
py
|
Python
|
src/exifread/utils.py
|
vincentchevrier/sortphotos_vc
|
edfb4c88a442a3215171c8bb67fa3b1ce056f3e7
|
[
"MIT"
] | 1
|
2016-05-02T21:24:00.000Z
|
2016-05-02T21:24:00.000Z
|
OSX/exifread/utils.py
|
stuporglue/ImportMedia
|
7388798b2c7ff6c0c87aa6a9971ec05f523ff359
|
[
"BSD-3-Clause"
] | null | null | null |
OSX/exifread/utils.py
|
stuporglue/ImportMedia
|
7388798b2c7ff6c0c87aa6a9971ec05f523ff359
|
[
"BSD-3-Clause"
] | 1
|
2021-05-15T21:08:50.000Z
|
2021-05-15T21:08:50.000Z
|
"""
Misc utilities.
"""
def make_string(seq):
"""
Don't throw an exception when given an out of range character.
"""
string = ''
for c in seq:
# Screen out non-printing characters
if 32 <= c and c < 256:
string += chr(c)
# If no printing chars
if not string:
return str(seq)
return string
def make_string_uc(seq):
"""
Special version to deal with the code in the first 8 bytes of a user comment.
First 8 bytes gives coding system e.g. ASCII vs. JIS vs Unicode.
"""
#code = seq[0:8]
seq = seq[8:]
# Of course, this is only correct if ASCII, and the standard explicitly
# allows JIS and Unicode.
return make_string( make_string(seq) )
def s2n_motorola(str):
"""Extract multibyte integer in Motorola format (little endian)."""
x = 0
for c in str:
x = (x << 8) | ord(c)
return x
def s2n_intel(str):
"""Extract multibyte integer in Intel format (big endian)."""
x = 0
y = 0
for c in str:
x = x | (ord(c) << y)
y = y + 8
return x
class Ratio:
"""
Ratio object that eventually will be able to reduce itself to lowest
common denominator for printing.
"""
def __init__(self, num, den):
self.num = num
self.den = den
def __repr__(self):
self.reduce()
if self.den == 1:
return str(self.num)
return '%d/%d' % (self.num, self.den)
def _gcd(self, a, b):
if b == 0:
return a
else:
return self._gcd(b, a % b)
def reduce(self):
div = self._gcd(self.num, self.den)
if div > 1:
self.num = self.num // div
self.den = self.den // div
| 22.779221
| 81
| 0.551311
|
d95b7617d5ee98872c36ec9b7b461044974cddff
| 933
|
py
|
Python
|
orders/utils.py
|
JaveBychkov/dmitriy-shop
|
71856b0c9dff375e5491b001790807b3c7b01ae8
|
[
"MIT"
] | null | null | null |
orders/utils.py
|
JaveBychkov/dmitriy-shop
|
71856b0c9dff375e5491b001790807b3c7b01ae8
|
[
"MIT"
] | null | null | null |
orders/utils.py
|
JaveBychkov/dmitriy-shop
|
71856b0c9dff375e5491b001790807b3c7b01ae8
|
[
"MIT"
] | null | null | null |
from django.core.mail import EmailMultiAlternatives
from django.template import loader
from django.utils.html import strip_tags
def get_email_obj(subject, context, template, mail_to):
"""
Return the instance of EmailMultiAlternative email with attached html
template that was rendered with given context.
Parameters
----------
subject : str
The subject of a mail.
context : dict
The context with which given template should be rendered.
template : str
Path to template.
mail_to : iterable
The list of emails that should be emailed.
Returns
-------
EmailMultiAlternatives
instance of email.
"""
html_body = loader.render_to_string(template, context)
text_body = strip_tags(html_body)
email = EmailMultiAlternatives(
subject, text_body, to=mail_to,
alternatives=[(html_body, 'text/html')]
)
return email
| 28.272727
| 73
| 0.681672
|
95e875d2e13aedf0ca1945f0a3102f6d0b3be80d
| 2,956
|
py
|
Python
|
parlai/mturk/tasks/dealnodeal/run.py
|
ricsinaruto/ParlAI
|
733b627ae456d6b11a2fc4624088a781bc6c1d03
|
[
"MIT"
] | 258
|
2020-04-10T07:01:06.000Z
|
2022-03-26T11:49:30.000Z
|
parlai/mturk/tasks/dealnodeal/run.py
|
ricsinaruto/ParlAI
|
733b627ae456d6b11a2fc4624088a781bc6c1d03
|
[
"MIT"
] | 33
|
2020-04-10T04:28:51.000Z
|
2022-03-31T02:52:02.000Z
|
parlai/mturk/tasks/dealnodeal/run.py
|
ricsinaruto/ParlAI
|
733b627ae456d6b11a2fc4624088a781bc6c1d03
|
[
"MIT"
] | 43
|
2020-04-14T10:43:33.000Z
|
2022-03-13T02:27:54.000Z
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from parlai.core.params import ParlaiParser
from parlai.mturk.core.mturk_manager import MTurkManager
from parlai.mturk.tasks.dealnodeal.worlds import \
MTurkDealNoDealDialogWorld
from parlai.agents.local_human.local_human import LocalHumanAgent
from parlai.core.agents import create_agent
from task_config import task_config
def main():
"""This task consists of one agent, model or MTurk worker, talking to an
MTurk worker to negotiate a deal.
"""
argparser = ParlaiParser(False, False)
argparser.add_parlai_data_path()
argparser.add_mturk_args()
argparser.add_argument('--two_mturk_agents', dest='two_mturk_agents',
action='store_true', help='data collection mode '
'with converations between two MTurk agents')
opt = argparser.parse_args()
opt['task'] = 'dealnodeal'
opt['datatype'] = 'valid'
opt.update(task_config)
local_agent_1_id = 'local_1'
mturk_agent_ids = ['mturk_agent_1']
if opt['two_mturk_agents']:
mturk_agent_ids.append('mturk_agent_2')
mturk_manager = MTurkManager(
opt=opt,
mturk_agent_ids=mturk_agent_ids
)
mturk_manager.setup_server()
try:
mturk_manager.start_new_run()
mturk_manager.create_hits()
mturk_manager.set_onboard_function(onboard_function=None)
mturk_manager.ready_to_accept_workers()
def check_worker_eligibility(worker):
return True
def assign_worker_roles(workers):
for index, worker in enumerate(workers):
worker.id = mturk_agent_ids[index % len(mturk_agent_ids)]
def run_conversation(mturk_manager, opt, workers):
agents = workers[:]
# Create a local agent
if not opt['two_mturk_agents']:
if 'model' in opt:
local_agent = create_agent(opt)
else:
local_agent = LocalHumanAgent(opt=None)
local_agent.id = local_agent_1_id
agents.append(local_agent)
opt["batchindex"] = mturk_manager.started_conversations
world = MTurkDealNoDealDialogWorld(
opt=opt,
agents=agents
)
while not world.episode_done():
world.parley()
world.shutdown()
mturk_manager.start_task(
eligibility_function=check_worker_eligibility,
assign_role_function=assign_worker_roles,
task_function=run_conversation
)
except BaseException:
raise
finally:
mturk_manager.expire_all_unassigned_hits()
mturk_manager.shutdown()
if __name__ == '__main__':
main()
| 30.474227
| 76
| 0.647497
|
835348119366391f23d5fe0baa57ad1158c97ca3
| 4,067
|
py
|
Python
|
matrix/kcompress.py
|
rebryant/Cloud-BDD
|
83731533f7cd97ee8c8bd8b1d062137ee9e85f67
|
[
"BSD-3-Clause"
] | 2
|
2017-02-20T06:33:34.000Z
|
2021-05-21T13:29:43.000Z
|
matrix/kcompress.py
|
rebryant/Cloud-BDD
|
83731533f7cd97ee8c8bd8b1d062137ee9e85f67
|
[
"BSD-3-Clause"
] | 2
|
2021-01-20T23:25:08.000Z
|
2021-08-03T15:42:56.000Z
|
matrix/kcompress.py
|
rebryant/Cloud-BDD
|
83731533f7cd97ee8c8bd8b1d062137ee9e85f67
|
[
"BSD-3-Clause"
] | 1
|
2021-06-22T08:56:23.000Z
|
2021-06-22T08:56:23.000Z
|
#!/usr/bin/python
# Given existing solution, extract its kernel
# Then generate kernels that reduce number of products in matrix multiplication
import sys
import circuit
import brent
dim = (3,3,3)
sourceAuxCount = 23
targetAuxCount = 22
ckt = circuit.Circuit()
sourceKernel = None
testCount = 0
saveFiles = True
# Mapping from generated kernel signatures to unique kernels
kernelDict = {}
def load(path):
global sourceKernel
try:
s = brent.MScheme(dim, sourceAuxCount, ckt).parseFromFile(path)
except Exception as ex:
print("ERROR: Could not extract solution from file '%s' (%s)" % (path, str(ex)))
return
sc = s.canonize()
sourceKernel = sc.kernelTerms
khash = sourceKernel.sign()
print("Loaded kernel %s from scheme %s" % (khash, path))
# Given newly generated kdlist, convert to kernel
# Canonize it, and record if unique
def catalog(kdlist):
global kernelDict, testCount
testCount += 1
k = brent.KernelSet(dim, targetAuxCount, kdlist)
kc, dlist = k.listCanonize()
sig = kc.signature()
if sig not in kernelDict:
print("Created new kernel %s" % kc.sign())
kernelDict[sig] = kc
else:
print("Created kernel that duplicates %s" % kc.sign())
def mergeSingles():
levelList = sourceKernel.levelize()
# Split into levels with single kernels and ones with multiple kernels
singleList = [ls for ls in levelList if len(ls) == 1]
multiList = [ls for ls in levelList if len(ls) > 1]
# Enumerate pairs from single lists
for idx1 in range(len(singleList)):
kt1 = singleList[idx1][0]
for idx2 in range(idx1+1, len(singleList)):
kt2 = singleList[idx2][0]
newSingleList = [ls for ls in singleList if ls[0] != kt1 and ls[0] != kt2]
newList = [[kt1, kt2]]
# Now generate list of kernel terms with new levels
nextLevel = 1
kdlist = []
for ls in multiList + newList + newSingleList:
for kt in ls:
nkt = kt.clone()
nkt.level = nextLevel
kdlist.append(nkt)
nextLevel += 1
catalog(kdlist)
def addToMulti():
levelList = sourceKernel.levelize()
# Split into levels with single kernels and ones with multiple kernels
singleList = [ls for ls in levelList if len(ls) == 1]
multiList = [ls for ls in levelList if len(ls) > 1]
# Enumerate pairs from single lists
for idxm in range(len(multiList)):
ktlist = multiList[idxm]
newMultiList = [ls for ls in multiList if ls != ktlist]
for idxs in range(len(singleList)):
kt = singleList[idxs][0]
newSingleList = [ls for ls in singleList if ls[0] != kt]
newList = list(ktlist)
newList.append(kt)
# Now generate list of kernel terms with new levels
nextLevel = 1
kdlist = []
for ls in newMultiList + [newList] + newSingleList:
for kt in ls:
nkt = kt.clone()
nkt.level = nextLevel
kdlist.append(nkt)
nextLevel += 1
catalog(kdlist)
def save(k):
khash = k.sign()
outName = khash + ".exp"
try:
outf = open(outName, 'w')
except Exception as ex:
print("Couldn't open output file '%s' (%s)" % (outName, str(ex)))
return
k.printPolynomial(outf)
outf.close()
print("Wrote to file %s" % outName)
def run(path):
load(path)
if sourceKernel is None:
return
mergeSingles()
addToMulti()
print("%d kernels tested. %d unique kernels generated" % (testCount, len(kernelDict)))
print("Original Signature:")
print(" " + sourceKernel.shortString())
print("New signatures:")
klist = kernelDict.values()
klist.sort(key = lambda k: k.shortString())
for k in klist:
print(k.shortString())
if saveFiles:
save(k)
run(sys.argv[1])
| 31.527132
| 91
| 0.596263
|
614ddb13a3cf960d884b7650fee762666fd6095a
| 43,241
|
py
|
Python
|
sdk/azurestack/azure-mgmt-azurestack/azure/mgmt/azurestack/models/_models.py
|
mohamedshabanofficial/azure-sdk-for-python
|
81c585f310cd2ec23d2ad145173958914a075a58
|
[
"MIT"
] | 2
|
2021-03-24T06:26:11.000Z
|
2021-04-18T15:55:59.000Z
|
sdk/azurestack/azure-mgmt-azurestack/azure/mgmt/azurestack/models/_models.py
|
mohamedshabanofficial/azure-sdk-for-python
|
81c585f310cd2ec23d2ad145173958914a075a58
|
[
"MIT"
] | 2
|
2021-11-03T06:10:36.000Z
|
2021-12-01T06:29:39.000Z
|
sdk/azurestack/azure-mgmt-azurestack/azure/mgmt/azurestack/models/_models.py
|
mohamedshabanofficial/azure-sdk-for-python
|
81c585f310cd2ec23d2ad145173958914a075a58
|
[
"MIT"
] | 1
|
2021-05-19T02:55:10.000Z
|
2021-05-19T02:55:10.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from azure.core.exceptions import HttpResponseError
import msrest.serialization
class ActivationKeyResult(msrest.serialization.Model):
"""The resource containing the Azure Stack activation key.
:param activation_key: Azure Stack activation key.
:type activation_key: str
"""
_attribute_map = {
'activation_key': {'key': 'activationKey', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ActivationKeyResult, self).__init__(**kwargs)
self.activation_key = kwargs.get('activation_key', None)
class CloudManifestFileDeploymentData(msrest.serialization.Model):
"""Cloud specific manifest data for AzureStack deployment.
:param external_dsms_certificates: Dsms external certificates.
:type external_dsms_certificates: str
:param custom_cloud_verification_key: Signing verification public key.
:type custom_cloud_verification_key: str
:param custom_cloud_arm_endpoint: ARM endpoint.
:type custom_cloud_arm_endpoint: str
:param external_dsms_endpoint: Dsms endpoint.
:type external_dsms_endpoint: str
"""
_attribute_map = {
'external_dsms_certificates': {'key': 'externalDsmsCertificates', 'type': 'str'},
'custom_cloud_verification_key': {'key': 'customCloudVerificationKey', 'type': 'str'},
'custom_cloud_arm_endpoint': {'key': 'customEnvironmentEndpoints.customCloudArmEndpoint', 'type': 'str'},
'external_dsms_endpoint': {'key': 'customEnvironmentEndpoints.externalDsmsEndpoint', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(CloudManifestFileDeploymentData, self).__init__(**kwargs)
self.external_dsms_certificates = kwargs.get('external_dsms_certificates', None)
self.custom_cloud_verification_key = kwargs.get('custom_cloud_verification_key', None)
self.custom_cloud_arm_endpoint = kwargs.get('custom_cloud_arm_endpoint', None)
self.external_dsms_endpoint = kwargs.get('external_dsms_endpoint', None)
class CloudManifestFileProperties(msrest.serialization.Model):
"""Cloud specific manifest JSON properties.
:param deployment_data: Cloud specific manifest data.
:type deployment_data: ~azure.mgmt.azurestack.models.CloudManifestFileDeploymentData
:param signature: Signature of the cloud specific manifest data.
:type signature: str
"""
_attribute_map = {
'deployment_data': {'key': 'deploymentData', 'type': 'CloudManifestFileDeploymentData'},
'signature': {'key': 'signature', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(CloudManifestFileProperties, self).__init__(**kwargs)
self.deployment_data = kwargs.get('deployment_data', None)
self.signature = kwargs.get('signature', None)
class Resource(msrest.serialization.Model):
"""Base resource object.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: ID of the resource.
:vartype id: str
:ivar name: Name of the resource.
:vartype name: str
:ivar type: Type of Resource.
:vartype type: str
:param etag: The entity tag used for optimistic concurrency when modifying the resource.
:type etag: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(Resource, self).__init__(**kwargs)
self.id = None
self.name = None
self.type = None
self.etag = kwargs.get('etag', None)
class CloudManifestFileResponse(Resource):
"""Cloud specific manifest GET response.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: ID of the resource.
:vartype id: str
:ivar name: Name of the resource.
:vartype name: str
:ivar type: Type of Resource.
:vartype type: str
:param etag: The entity tag used for optimistic concurrency when modifying the resource.
:type etag: str
:param properties: Cloud specific manifest data.
:type properties: ~azure.mgmt.azurestack.models.CloudManifestFileProperties
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'properties': {'key': 'properties', 'type': 'CloudManifestFileProperties'},
}
def __init__(
self,
**kwargs
):
super(CloudManifestFileResponse, self).__init__(**kwargs)
self.properties = kwargs.get('properties', None)
class Compatibility(msrest.serialization.Model):
"""Product compatibility.
:param is_compatible: Tells if product is compatible with current device.
:type is_compatible: bool
:param message: Short error message if any compatibility issues are found.
:type message: str
:param description: Full error message if any compatibility issues are found.
:type description: str
:param issues: List of all issues found.
:type issues: list[str or ~azure.mgmt.azurestack.models.CompatibilityIssue]
"""
_attribute_map = {
'is_compatible': {'key': 'isCompatible', 'type': 'bool'},
'message': {'key': 'message', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'issues': {'key': 'issues', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
super(Compatibility, self).__init__(**kwargs)
self.is_compatible = kwargs.get('is_compatible', None)
self.message = kwargs.get('message', None)
self.description = kwargs.get('description', None)
self.issues = kwargs.get('issues', None)
class CustomerSubscription(Resource):
"""Customer subscription.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: ID of the resource.
:vartype id: str
:ivar name: Name of the resource.
:vartype name: str
:ivar type: Type of Resource.
:vartype type: str
:param etag: The entity tag used for optimistic concurrency when modifying the resource.
:type etag: str
:param tenant_id: Tenant Id.
:type tenant_id: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'tenant_id': {'key': 'properties.tenantId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(CustomerSubscription, self).__init__(**kwargs)
self.tenant_id = kwargs.get('tenant_id', None)
class CustomerSubscriptionList(msrest.serialization.Model):
"""Pageable list of customer subscriptions.
:param next_link: URI to the next page.
:type next_link: str
:param value: List of customer subscriptions.
:type value: list[~azure.mgmt.azurestack.models.CustomerSubscription]
"""
_attribute_map = {
'next_link': {'key': 'nextLink', 'type': 'str'},
'value': {'key': 'value', 'type': '[CustomerSubscription]'},
}
def __init__(
self,
**kwargs
):
super(CustomerSubscriptionList, self).__init__(**kwargs)
self.next_link = kwargs.get('next_link', None)
self.value = kwargs.get('value', None)
class DataDiskImage(msrest.serialization.Model):
"""Data disk image.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar lun: The LUN.
:vartype lun: int
:ivar source_blob_sas_uri: SAS key for source blob.
:vartype source_blob_sas_uri: str
"""
_validation = {
'lun': {'readonly': True},
'source_blob_sas_uri': {'readonly': True},
}
_attribute_map = {
'lun': {'key': 'lun', 'type': 'int'},
'source_blob_sas_uri': {'key': 'sourceBlobSasUri', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(DataDiskImage, self).__init__(**kwargs)
self.lun = None
self.source_blob_sas_uri = None
class DeviceConfiguration(msrest.serialization.Model):
"""Device Configuration.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar device_version: Version of the device.
:vartype device_version: str
:ivar identity_system: Identity system of the device. Possible values include: "AzureAD",
"ADFS".
:vartype identity_system: str or ~azure.mgmt.azurestack.models.Category
"""
_validation = {
'device_version': {'readonly': True},
'identity_system': {'readonly': True},
}
_attribute_map = {
'device_version': {'key': 'deviceVersion', 'type': 'str'},
'identity_system': {'key': 'identitySystem', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(DeviceConfiguration, self).__init__(**kwargs)
self.device_version = None
self.identity_system = None
class Display(msrest.serialization.Model):
"""Contains the localized display information for this particular operation or action.
:param provider: The localized, friendly version of the resource provider name.
:type provider: str
:param resource: The localized, friendly version of the resource type related to this action or
operation; the resource type should match the public documentation for the resource provider.
:type resource: str
:param operation: The localized, friendly name for the operation. Use the name as it will
displayed to the user.
:type operation: str
:param description: The localized, friendly description for the operation. The description will
be displayed to the user. It should be thorough and concise for used in both tooltips and
detailed views.
:type description: str
"""
_attribute_map = {
'provider': {'key': 'provider', 'type': 'str'},
'resource': {'key': 'resource', 'type': 'str'},
'operation': {'key': 'operation', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(Display, self).__init__(**kwargs)
self.provider = kwargs.get('provider', None)
self.resource = kwargs.get('resource', None)
self.operation = kwargs.get('operation', None)
self.description = kwargs.get('description', None)
class ErrorDetails(msrest.serialization.Model):
"""The details of the error.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar code: Error code.
:vartype code: str
:ivar message: Error message indicating why the operation failed.
:vartype message: str
:ivar target: The target of the particular error.
:vartype target: str
"""
_validation = {
'code': {'readonly': True},
'message': {'readonly': True},
'target': {'readonly': True},
}
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ErrorDetails, self).__init__(**kwargs)
self.code = None
self.message = None
self.target = None
class ErrorResponse(msrest.serialization.Model):
"""Error response indicates that the service is not able to process the incoming request. The reason is provided in the error message.
:param error: The details of the error.
:type error: ~azure.mgmt.azurestack.models.ErrorDetails
"""
_attribute_map = {
'error': {'key': 'error', 'type': 'ErrorDetails'},
}
def __init__(
self,
**kwargs
):
super(ErrorResponse, self).__init__(**kwargs)
self.error = kwargs.get('error', None)
class ExtendedProduct(msrest.serialization.Model):
"""Extended description about the product required for installing it into Azure Stack.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar gallery_package_blob_sas_uri: The URI to the .azpkg file that provides information
required for showing product in the gallery.
:vartype gallery_package_blob_sas_uri: str
:ivar product_kind: Specifies the kind of the product (virtualMachine or
virtualMachineExtension).
:vartype product_kind: str
:ivar compute_role: Specifies kind of compute role included in the package. Possible values
include: "None", "IaaS", "PaaS".
:vartype compute_role: str or ~azure.mgmt.azurestack.models.ComputeRole
:ivar is_system_extension: Specifies if product is a Virtual Machine Extension.
:vartype is_system_extension: bool
:ivar support_multiple_extensions: Indicates if specified product supports multiple extensions.
:vartype support_multiple_extensions: bool
:ivar version_properties_version: Specifies product version.
:vartype version_properties_version: str
:ivar vm_os_type: Specifies operating system used by the product. Possible values include:
"None", "Windows", "Linux".
:vartype vm_os_type: str or ~azure.mgmt.azurestack.models.OperatingSystem
:ivar vm_scale_set_enabled: Indicates if virtual machine Scale Set is enabled in the specified
product.
:vartype vm_scale_set_enabled: bool
:ivar uri: The URI.
:vartype uri: str
:ivar version: Specifies product version.
:vartype version: str
:ivar os_disk_image: OS disk image used by product.
:vartype os_disk_image: ~azure.mgmt.azurestack.models.OsDiskImage
:ivar data_disk_images: List of attached data disks.
:vartype data_disk_images: list[~azure.mgmt.azurestack.models.DataDiskImage]
"""
_validation = {
'gallery_package_blob_sas_uri': {'readonly': True},
'product_kind': {'readonly': True},
'compute_role': {'readonly': True},
'is_system_extension': {'readonly': True},
'support_multiple_extensions': {'readonly': True},
'version_properties_version': {'readonly': True},
'vm_os_type': {'readonly': True},
'vm_scale_set_enabled': {'readonly': True},
'uri': {'readonly': True},
'version': {'readonly': True},
'os_disk_image': {'readonly': True},
'data_disk_images': {'readonly': True},
}
_attribute_map = {
'gallery_package_blob_sas_uri': {'key': 'galleryPackageBlobSasUri', 'type': 'str'},
'product_kind': {'key': 'productKind', 'type': 'str'},
'compute_role': {'key': 'properties.computeRole', 'type': 'str'},
'is_system_extension': {'key': 'properties.isSystemExtension', 'type': 'bool'},
'support_multiple_extensions': {'key': 'properties.supportMultipleExtensions', 'type': 'bool'},
'version_properties_version': {'key': 'properties.version', 'type': 'str'},
'vm_os_type': {'key': 'properties.vmOsType', 'type': 'str'},
'vm_scale_set_enabled': {'key': 'properties.vmScaleSetEnabled', 'type': 'bool'},
'uri': {'key': 'properties.sourceBlob.uri', 'type': 'str'},
'version': {'key': 'properties.version', 'type': 'str'},
'os_disk_image': {'key': 'properties.osDiskImage', 'type': 'OsDiskImage'},
'data_disk_images': {'key': 'properties.dataDiskImages', 'type': '[DataDiskImage]'},
}
def __init__(
self,
**kwargs
):
super(ExtendedProduct, self).__init__(**kwargs)
self.gallery_package_blob_sas_uri = None
self.product_kind = None
self.compute_role = None
self.is_system_extension = None
self.support_multiple_extensions = None
self.version_properties_version = None
self.vm_os_type = None
self.vm_scale_set_enabled = None
self.uri = None
self.version = None
self.os_disk_image = None
self.data_disk_images = None
class VirtualMachineProductProperties(msrest.serialization.Model):
"""Product information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar version: Specifies product version.
:vartype version: str
:ivar os_disk_image: OS disk image used by product.
:vartype os_disk_image: ~azure.mgmt.azurestack.models.OsDiskImage
:ivar data_disk_images: List of attached data disks.
:vartype data_disk_images: list[~azure.mgmt.azurestack.models.DataDiskImage]
"""
_validation = {
'version': {'readonly': True},
'os_disk_image': {'readonly': True},
'data_disk_images': {'readonly': True},
}
_attribute_map = {
'version': {'key': 'version', 'type': 'str'},
'os_disk_image': {'key': 'osDiskImage', 'type': 'OsDiskImage'},
'data_disk_images': {'key': 'dataDiskImages', 'type': '[DataDiskImage]'},
}
def __init__(
self,
**kwargs
):
super(VirtualMachineProductProperties, self).__init__(**kwargs)
self.version = None
self.os_disk_image = None
self.data_disk_images = None
class VirtualMachineExtensionProductProperties(msrest.serialization.Model):
"""Product information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar compute_role: Specifies kind of compute role included in the package. Possible values
include: "None", "IaaS", "PaaS".
:vartype compute_role: str or ~azure.mgmt.azurestack.models.ComputeRole
:ivar is_system_extension: Specifies if product is a Virtual Machine Extension.
:vartype is_system_extension: bool
:ivar support_multiple_extensions: Indicates if specified product supports multiple extensions.
:vartype support_multiple_extensions: bool
:ivar version: Specifies product version.
:vartype version: str
:ivar vm_os_type: Specifies operating system used by the product. Possible values include:
"None", "Windows", "Linux".
:vartype vm_os_type: str or ~azure.mgmt.azurestack.models.OperatingSystem
:ivar vm_scale_set_enabled: Indicates if virtual machine Scale Set is enabled in the specified
product.
:vartype vm_scale_set_enabled: bool
:ivar uri: The URI.
:vartype uri: str
"""
_validation = {
'compute_role': {'readonly': True},
'is_system_extension': {'readonly': True},
'support_multiple_extensions': {'readonly': True},
'version': {'readonly': True},
'vm_os_type': {'readonly': True},
'vm_scale_set_enabled': {'readonly': True},
'uri': {'readonly': True},
}
_attribute_map = {
'compute_role': {'key': 'computeRole', 'type': 'str'},
'is_system_extension': {'key': 'isSystemExtension', 'type': 'bool'},
'support_multiple_extensions': {'key': 'supportMultipleExtensions', 'type': 'bool'},
'version': {'key': 'version', 'type': 'str'},
'vm_os_type': {'key': 'vmOsType', 'type': 'str'},
'vm_scale_set_enabled': {'key': 'vmScaleSetEnabled', 'type': 'bool'},
'uri': {'key': 'sourceBlob.uri', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VirtualMachineExtensionProductProperties, self).__init__(**kwargs)
self.compute_role = None
self.is_system_extension = None
self.support_multiple_extensions = None
self.version = None
self.vm_os_type = None
self.vm_scale_set_enabled = None
self.uri = None
class ExtendedProductProperties(VirtualMachineExtensionProductProperties, VirtualMachineProductProperties):
"""Product information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar os_disk_image: OS disk image used by product.
:vartype os_disk_image: ~azure.mgmt.azurestack.models.OsDiskImage
:ivar data_disk_images: List of attached data disks.
:vartype data_disk_images: list[~azure.mgmt.azurestack.models.DataDiskImage]
:ivar compute_role: Specifies kind of compute role included in the package. Possible values
include: "None", "IaaS", "PaaS".
:vartype compute_role: str or ~azure.mgmt.azurestack.models.ComputeRole
:ivar is_system_extension: Specifies if product is a Virtual Machine Extension.
:vartype is_system_extension: bool
:ivar support_multiple_extensions: Indicates if specified product supports multiple extensions.
:vartype support_multiple_extensions: bool
:ivar version: Specifies product version.
:vartype version: str
:ivar vm_os_type: Specifies operating system used by the product. Possible values include:
"None", "Windows", "Linux".
:vartype vm_os_type: str or ~azure.mgmt.azurestack.models.OperatingSystem
:ivar vm_scale_set_enabled: Indicates if virtual machine Scale Set is enabled in the specified
product.
:vartype vm_scale_set_enabled: bool
:ivar uri: The URI.
:vartype uri: str
"""
_validation = {
'os_disk_image': {'readonly': True},
'data_disk_images': {'readonly': True},
'compute_role': {'readonly': True},
'is_system_extension': {'readonly': True},
'support_multiple_extensions': {'readonly': True},
'version': {'readonly': True},
'vm_os_type': {'readonly': True},
'vm_scale_set_enabled': {'readonly': True},
'uri': {'readonly': True},
}
_attribute_map = {
'os_disk_image': {'key': 'osDiskImage', 'type': 'OsDiskImage'},
'data_disk_images': {'key': 'dataDiskImages', 'type': '[DataDiskImage]'},
'compute_role': {'key': 'computeRole', 'type': 'str'},
'is_system_extension': {'key': 'isSystemExtension', 'type': 'bool'},
'support_multiple_extensions': {'key': 'supportMultipleExtensions', 'type': 'bool'},
'version': {'key': 'version', 'type': 'str'},
'vm_os_type': {'key': 'vmOsType', 'type': 'str'},
'vm_scale_set_enabled': {'key': 'vmScaleSetEnabled', 'type': 'bool'},
'uri': {'key': 'sourceBlob.uri', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExtendedProductProperties, self).__init__(**kwargs)
self.os_disk_image = None
self.data_disk_images = None
self.compute_role = None
self.is_system_extension = None
self.support_multiple_extensions = None
self.version = None
self.vm_os_type = None
self.vm_scale_set_enabled = None
self.uri = None
class IconUris(msrest.serialization.Model):
"""Links to product icons.
:param large: URI to large icon.
:type large: str
:param wide: URI to wide icon.
:type wide: str
:param medium: URI to medium icon.
:type medium: str
:param small: URI to small icon.
:type small: str
:param hero: URI to hero icon.
:type hero: str
"""
_attribute_map = {
'large': {'key': 'large', 'type': 'str'},
'wide': {'key': 'wide', 'type': 'str'},
'medium': {'key': 'medium', 'type': 'str'},
'small': {'key': 'small', 'type': 'str'},
'hero': {'key': 'hero', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(IconUris, self).__init__(**kwargs)
self.large = kwargs.get('large', None)
self.wide = kwargs.get('wide', None)
self.medium = kwargs.get('medium', None)
self.small = kwargs.get('small', None)
self.hero = kwargs.get('hero', None)
class MarketplaceProductLogUpdate(msrest.serialization.Model):
"""Update details for product log.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar operation: Operation to log.
:vartype operation: str
:ivar status: Operation status to log.
:vartype status: str
:ivar error: Error related to the operation.
:vartype error: str
:ivar details: Error details related to operation.
:vartype details: str
"""
_validation = {
'operation': {'readonly': True},
'status': {'readonly': True},
'error': {'readonly': True},
'details': {'readonly': True},
}
_attribute_map = {
'operation': {'key': 'operation', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'error': {'key': 'error', 'type': 'str'},
'details': {'key': 'details', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(MarketplaceProductLogUpdate, self).__init__(**kwargs)
self.operation = None
self.status = None
self.error = None
self.details = None
class Operation(msrest.serialization.Model):
"""Describes the supported REST operation.
:param name: The name of the operation being performed on this particular object.
:type name: str
:param display: Contains the localized display information for this particular operation or
action.
:type display: ~azure.mgmt.azurestack.models.Display
:param origin: The intended executor of the operation.
:type origin: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'display': {'key': 'display', 'type': 'Display'},
'origin': {'key': 'origin', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(Operation, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.display = kwargs.get('display', None)
self.origin = kwargs.get('origin', None)
class OperationList(msrest.serialization.Model):
"""List of Operations.
:param value: Array of operations.
:type value: list[~azure.mgmt.azurestack.models.Operation]
:param next_link: URI to the next page of operations.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[Operation]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(OperationList, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class OsDiskImage(msrest.serialization.Model):
"""OS disk image.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar operating_system: OS operating system type. Possible values include: "None", "Windows",
"Linux".
:vartype operating_system: str or ~azure.mgmt.azurestack.models.OperatingSystem
:ivar source_blob_sas_uri: SAS key for source blob.
:vartype source_blob_sas_uri: str
"""
_validation = {
'operating_system': {'readonly': True},
'source_blob_sas_uri': {'readonly': True},
}
_attribute_map = {
'operating_system': {'key': 'operatingSystem', 'type': 'str'},
'source_blob_sas_uri': {'key': 'sourceBlobSasUri', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(OsDiskImage, self).__init__(**kwargs)
self.operating_system = None
self.source_blob_sas_uri = None
class Product(Resource):
"""Product information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: ID of the resource.
:vartype id: str
:ivar name: Name of the resource.
:vartype name: str
:ivar type: Type of Resource.
:vartype type: str
:param etag: The entity tag used for optimistic concurrency when modifying the resource.
:type etag: str
:param display_name: The display name of the product.
:type display_name: str
:param description: The description of the product.
:type description: str
:param publisher_display_name: The user-friendly name of the product publisher.
:type publisher_display_name: str
:param publisher_identifier: Publisher identifier.
:type publisher_identifier: str
:param offer: The offer representing the product.
:type offer: str
:param offer_version: The version of the product offer.
:type offer_version: str
:param sku: The product SKU.
:type sku: str
:param billing_part_number: The part number used for billing purposes.
:type billing_part_number: str
:param vm_extension_type: The type of the Virtual Machine Extension.
:type vm_extension_type: str
:param gallery_item_identity: The identifier of the gallery item corresponding to the product.
:type gallery_item_identity: str
:param icon_uris: Additional links available for this product.
:type icon_uris: ~azure.mgmt.azurestack.models.IconUris
:param links: Additional links available for this product.
:type links: list[~azure.mgmt.azurestack.models.ProductLink]
:param legal_terms: The legal terms.
:type legal_terms: str
:param privacy_policy: The privacy policy.
:type privacy_policy: str
:param payload_length: The length of product content.
:type payload_length: long
:param product_kind: The kind of the product (virtualMachine or virtualMachineExtension).
:type product_kind: str
:param product_properties: Additional properties for the product.
:type product_properties: ~azure.mgmt.azurestack.models.ProductProperties
:param compatibility: Product compatibility with current device.
:type compatibility: ~azure.mgmt.azurestack.models.Compatibility
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'display_name': {'key': 'properties.displayName', 'type': 'str'},
'description': {'key': 'properties.description', 'type': 'str'},
'publisher_display_name': {'key': 'properties.publisherDisplayName', 'type': 'str'},
'publisher_identifier': {'key': 'properties.publisherIdentifier', 'type': 'str'},
'offer': {'key': 'properties.offer', 'type': 'str'},
'offer_version': {'key': 'properties.offerVersion', 'type': 'str'},
'sku': {'key': 'properties.sku', 'type': 'str'},
'billing_part_number': {'key': 'properties.billingPartNumber', 'type': 'str'},
'vm_extension_type': {'key': 'properties.vmExtensionType', 'type': 'str'},
'gallery_item_identity': {'key': 'properties.galleryItemIdentity', 'type': 'str'},
'icon_uris': {'key': 'properties.iconUris', 'type': 'IconUris'},
'links': {'key': 'properties.links', 'type': '[ProductLink]'},
'legal_terms': {'key': 'properties.legalTerms', 'type': 'str'},
'privacy_policy': {'key': 'properties.privacyPolicy', 'type': 'str'},
'payload_length': {'key': 'properties.payloadLength', 'type': 'long'},
'product_kind': {'key': 'properties.productKind', 'type': 'str'},
'product_properties': {'key': 'properties.productProperties', 'type': 'ProductProperties'},
'compatibility': {'key': 'properties.compatibility', 'type': 'Compatibility'},
}
def __init__(
self,
**kwargs
):
super(Product, self).__init__(**kwargs)
self.display_name = kwargs.get('display_name', None)
self.description = kwargs.get('description', None)
self.publisher_display_name = kwargs.get('publisher_display_name', None)
self.publisher_identifier = kwargs.get('publisher_identifier', None)
self.offer = kwargs.get('offer', None)
self.offer_version = kwargs.get('offer_version', None)
self.sku = kwargs.get('sku', None)
self.billing_part_number = kwargs.get('billing_part_number', None)
self.vm_extension_type = kwargs.get('vm_extension_type', None)
self.gallery_item_identity = kwargs.get('gallery_item_identity', None)
self.icon_uris = kwargs.get('icon_uris', None)
self.links = kwargs.get('links', None)
self.legal_terms = kwargs.get('legal_terms', None)
self.privacy_policy = kwargs.get('privacy_policy', None)
self.payload_length = kwargs.get('payload_length', None)
self.product_kind = kwargs.get('product_kind', None)
self.product_properties = kwargs.get('product_properties', None)
self.compatibility = kwargs.get('compatibility', None)
class ProductLink(msrest.serialization.Model):
"""Link with additional information about a product.
:param display_name: The description of the link.
:type display_name: str
:param uri: The URI corresponding to the link.
:type uri: str
"""
_attribute_map = {
'display_name': {'key': 'displayName', 'type': 'str'},
'uri': {'key': 'uri', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ProductLink, self).__init__(**kwargs)
self.display_name = kwargs.get('display_name', None)
self.uri = kwargs.get('uri', None)
class ProductList(msrest.serialization.Model):
"""Pageable list of products.
:param next_link: URI to the next page.
:type next_link: str
:param value: List of products.
:type value: list[~azure.mgmt.azurestack.models.Product]
"""
_attribute_map = {
'next_link': {'key': 'nextLink', 'type': 'str'},
'value': {'key': 'value', 'type': '[Product]'},
}
def __init__(
self,
**kwargs
):
super(ProductList, self).__init__(**kwargs)
self.next_link = kwargs.get('next_link', None)
self.value = kwargs.get('value', None)
class ProductLog(msrest.serialization.Model):
"""Product action log.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Log ID.
:vartype id: str
:ivar product_id: Logged product ID.
:vartype product_id: str
:ivar subscription_id: Logged subscription ID.
:vartype subscription_id: str
:ivar registration_name: Logged registration name.
:vartype registration_name: str
:ivar resource_group_name: Logged resource group name.
:vartype resource_group_name: str
:ivar operation: Logged operation.
:vartype operation: str
:ivar start_date: Operation start datetime.
:vartype start_date: str
:ivar end_date: Operation end datetime.
:vartype end_date: str
:ivar status: Operation status.
:vartype status: str
:ivar error: Operation error data.
:vartype error: str
:ivar details: Operation error details.
:vartype details: str
"""
_validation = {
'id': {'readonly': True},
'product_id': {'readonly': True},
'subscription_id': {'readonly': True},
'registration_name': {'readonly': True},
'resource_group_name': {'readonly': True},
'operation': {'readonly': True},
'start_date': {'readonly': True},
'end_date': {'readonly': True},
'status': {'readonly': True},
'error': {'readonly': True},
'details': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'product_id': {'key': 'productId', 'type': 'str'},
'subscription_id': {'key': 'subscriptionId', 'type': 'str'},
'registration_name': {'key': 'registrationName', 'type': 'str'},
'resource_group_name': {'key': 'resourceGroupName', 'type': 'str'},
'operation': {'key': 'operation', 'type': 'str'},
'start_date': {'key': 'startDate', 'type': 'str'},
'end_date': {'key': 'endDate', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'error': {'key': 'error', 'type': 'str'},
'details': {'key': 'details', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ProductLog, self).__init__(**kwargs)
self.id = None
self.product_id = None
self.subscription_id = None
self.registration_name = None
self.resource_group_name = None
self.operation = None
self.start_date = None
self.end_date = None
self.status = None
self.error = None
self.details = None
class ProductProperties(msrest.serialization.Model):
"""Additional properties of the product.
:param version: The version.
:type version: str
"""
_attribute_map = {
'version': {'key': 'version', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ProductProperties, self).__init__(**kwargs)
self.version = kwargs.get('version', None)
class TrackedResource(msrest.serialization.Model):
"""Base resource object.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: ID of the resource.
:vartype id: str
:ivar name: Name of the resource.
:vartype name: str
:ivar type: Type of Resource.
:vartype type: str
:param location: Required. Location of the resource. Possible values include: "global".
:type location: str or ~azure.mgmt.azurestack.models.Location
:param tags: A set of tags. Custom tags for the resource.
:type tags: dict[str, str]
:param etag: The entity tag used for optimistic concurrency when modifying the resource.
:type etag: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'location': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(TrackedResource, self).__init__(**kwargs)
self.id = None
self.name = None
self.type = None
self.location = kwargs['location']
self.tags = kwargs.get('tags', None)
self.etag = kwargs.get('etag', None)
class Registration(TrackedResource):
"""Registration information.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: ID of the resource.
:vartype id: str
:ivar name: Name of the resource.
:vartype name: str
:ivar type: Type of Resource.
:vartype type: str
:param location: Required. Location of the resource. Possible values include: "global".
:type location: str or ~azure.mgmt.azurestack.models.Location
:param tags: A set of tags. Custom tags for the resource.
:type tags: dict[str, str]
:param etag: The entity tag used for optimistic concurrency when modifying the resource.
:type etag: str
:param object_id: The object identifier associated with the Azure Stack connecting to Azure.
:type object_id: str
:param cloud_id: The identifier of the registered Azure Stack.
:type cloud_id: str
:param billing_model: Specifies the billing mode for the Azure Stack registration.
:type billing_model: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'location': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'object_id': {'key': 'properties.objectId', 'type': 'str'},
'cloud_id': {'key': 'properties.cloudId', 'type': 'str'},
'billing_model': {'key': 'properties.billingModel', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(Registration, self).__init__(**kwargs)
self.object_id = kwargs.get('object_id', None)
self.cloud_id = kwargs.get('cloud_id', None)
self.billing_model = kwargs.get('billing_model', None)
class RegistrationList(msrest.serialization.Model):
"""Pageable list of registrations.
:param next_link: URI to the next page.
:type next_link: str
:param value: List of Registrations.
:type value: list[~azure.mgmt.azurestack.models.Registration]
"""
_attribute_map = {
'next_link': {'key': 'nextLink', 'type': 'str'},
'value': {'key': 'value', 'type': '[Registration]'},
}
def __init__(
self,
**kwargs
):
super(RegistrationList, self).__init__(**kwargs)
self.next_link = kwargs.get('next_link', None)
self.value = kwargs.get('value', None)
class RegistrationParameter(msrest.serialization.Model):
"""Registration resource.
All required parameters must be populated in order to send to Azure.
:param location: Required. Location of the resource. Possible values include: "global".
:type location: str or ~azure.mgmt.azurestack.models.Location
:param registration_token: Required. The token identifying registered Azure Stack.
:type registration_token: str
"""
_validation = {
'location': {'required': True},
'registration_token': {'required': True},
}
_attribute_map = {
'location': {'key': 'location', 'type': 'str'},
'registration_token': {'key': 'properties.registrationToken', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(RegistrationParameter, self).__init__(**kwargs)
self.location = kwargs['location']
self.registration_token = kwargs['registration_token']
| 35.618616
| 138
| 0.63338
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.