sentence1 stringlengths 52 3.87M | sentence2 stringlengths 1 47.2k | label stringclasses 1 value |
|---|---|---|
def decode_bu64(b):
"""Encode bytes to a URL safe flavor of Base64 used by JWTs.
- Reverse of encode_bu64().
Args:
b: bytes
URL safe Base64 encoded bytes to encode.
Returns:
bytes: Decoded bytes.
"""
s = b
s = s.replace(b'-', b'+')
s = s.replace(b'_', b'/')
p = len(s) % 4
if p == 0:
pass
elif p == 2:
s += b'=='
elif p == 3:
s += b'='
else:
raise ValueError('Illegal Base64url string')
return base64.standard_b64decode(s) | Encode bytes to a URL safe flavor of Base64 used by JWTs.
- Reverse of encode_bu64().
Args:
b: bytes
URL safe Base64 encoded bytes to encode.
Returns:
bytes: Decoded bytes. | entailment |
def evaluate(self, data):
"""Evaluate the code needed to compute a given Data object."""
try:
inputs = copy.deepcopy(data.input)
hydrate_input_references(inputs, data.process.input_schema)
hydrate_input_uploads(inputs, data.process.input_schema)
# Include special 'proc' variable in the context.
inputs['proc'] = {
'data_id': data.id,
'data_dir': self.manager.get_executor().resolve_data_path(),
}
# Include special 'requirements' variable in the context.
inputs['requirements'] = data.process.requirements
# Inject default values and change resources according to
# the current Django configuration.
inputs['requirements']['resources'] = data.process.get_resource_limits()
script_template = data.process.run.get('program', '')
# Get the appropriate expression engine. If none is defined, do not evaluate
# any expressions.
expression_engine = data.process.requirements.get('expression-engine', None)
if not expression_engine:
return script_template
return self.get_expression_engine(expression_engine).evaluate_block(
script_template, inputs,
escape=self._escape,
safe_wrapper=SafeString,
)
except EvaluationError as error:
raise ExecutionError('{}'.format(error)) | Evaluate the code needed to compute a given Data object. | entailment |
def _escape(self, value):
"""Escape given value unless it is safe."""
if isinstance(value, SafeString):
return value
return shellescape.quote(value) | Escape given value unless it is safe. | entailment |
def open_sciobj_file_by_pid_ctx(pid, write=False):
"""Open the file containing the Science Object bytes of ``pid`` in the default
location within the tree of the local SciObj store.
If ``write`` is True, the file is opened for writing and any missing directories are
created. Return the file handle and file_url with the file location in a suitable
form for storing in the DB.
If nothing was written to the file, it is deleted.
"""
abs_path = get_abs_sciobj_file_path_by_pid(pid)
with open_sciobj_file_by_path_ctx(abs_path, write) as sciobj_file:
yield sciobj_file | Open the file containing the Science Object bytes of ``pid`` in the default
location within the tree of the local SciObj store.
If ``write`` is True, the file is opened for writing and any missing directories are
created. Return the file handle and file_url with the file location in a suitable
form for storing in the DB.
If nothing was written to the file, it is deleted. | entailment |
def open_sciobj_file_by_path_ctx(abs_path, write=False):
"""Open the file containing the Science Object bytes at the custom location
``abs_path`` in the local filesystem.
If ``write`` is True, the file is opened for writing and any missing directores are
created. Return the file handle and file_url with the file location in a suitable
form for storing in the DB.
If nothing was written to the file, delete it.
"""
if write:
d1_common.utils.filesystem.create_missing_directories_for_file(abs_path)
try:
with open(abs_path, 'wb' if write else 'rb') as sciobj_file:
yield sciobj_file
finally:
if os.path.exists(abs_path) and not os.path.getsize(abs_path):
os.unlink(abs_path) | Open the file containing the Science Object bytes at the custom location
``abs_path`` in the local filesystem.
If ``write`` is True, the file is opened for writing and any missing directores are
created. Return the file handle and file_url with the file location in a suitable
form for storing in the DB.
If nothing was written to the file, delete it. | entailment |
def open_sciobj_file_by_pid(pid, write=False):
"""Open the file containing the Science Object bytes at the custom location
``abs_path`` in the local filesystem for read."""
abs_path = get_abs_sciobj_file_path_by_pid(pid)
if write:
d1_common.utils.filesystem.create_missing_directories_for_file(abs_path)
return open_sciobj_file_by_path(abs_path, write) | Open the file containing the Science Object bytes at the custom location
``abs_path`` in the local filesystem for read. | entailment |
def open_sciobj_file_by_path(abs_path, write=False):
"""Open a SciObj file for read or write. If opened for write, create any missing
directories. For a SciObj stored in the default SciObj store, the path includes the
PID hash based directory levels.
This is the only method in GMN that opens SciObj files, so can be modified to
customize the SciObj storage locations and can be mocked for testing.
Note that when a SciObj is created by a client via MNStorage.create(), Django
streams the SciObj bytes to a temporary file or memory location as set by
``FILE_UPLOAD_TEMP_DIR`` and related settings.
"""
if write:
d1_common.utils.filesystem.create_missing_directories_for_file(abs_path)
return open(abs_path, 'wb' if write else 'rb') | Open a SciObj file for read or write. If opened for write, create any missing
directories. For a SciObj stored in the default SciObj store, the path includes the
PID hash based directory levels.
This is the only method in GMN that opens SciObj files, so can be modified to
customize the SciObj storage locations and can be mocked for testing.
Note that when a SciObj is created by a client via MNStorage.create(), Django
streams the SciObj bytes to a temporary file or memory location as set by
``FILE_UPLOAD_TEMP_DIR`` and related settings. | entailment |
def get_rel_sciobj_file_path(pid):
"""Get the relative local path to the file holding an object's bytes.
- The path is relative to settings.OBJECT_STORE_PATH
- There is a one-to-one mapping between pid and path
- The path is based on a SHA1 hash. It's now possible to craft SHA1 collisions, but
it's so unlikely that we ignore it for now
- The path may or may not exist (yet).
"""
hash_str = hashlib.sha1(pid.encode('utf-8')).hexdigest()
return os.path.join(hash_str[:2], hash_str[2:4], hash_str) | Get the relative local path to the file holding an object's bytes.
- The path is relative to settings.OBJECT_STORE_PATH
- There is a one-to-one mapping between pid and path
- The path is based on a SHA1 hash. It's now possible to craft SHA1 collisions, but
it's so unlikely that we ignore it for now
- The path may or may not exist (yet). | entailment |
def get_abs_sciobj_file_path_by_url(file_url):
"""Get the absolute path to the file holding an object's bytes.
- ``file_url`` is an absolute or relative file:// url as stored in the DB.
"""
assert_sciobj_store_exists()
m = re.match(r'file://(.*?)/(.*)', file_url, re.IGNORECASE)
if m.group(1) == RELATIVE_PATH_MAGIC_HOST_STR:
return os.path.join(get_abs_sciobj_store_path(), m.group(2))
assert os.path.isabs(m.group(2))
return m.group(2) | Get the absolute path to the file holding an object's bytes.
- ``file_url`` is an absolute or relative file:// url as stored in the DB. | entailment |
def get_gmn_version(base_url):
"""Return the version currently running on a GMN instance.
(is_gmn, version_or_error)
"""
home_url = d1_common.url.joinPathElements(base_url, 'home')
try:
response = requests.get(home_url, verify=False)
except requests.exceptions.ConnectionError as e:
return False, str(e)
if not response.ok:
return False, 'invalid /home. status={}'.format(response.status_code)
soup = bs4.BeautifulSoup(response.content, 'html.parser')
version_str = soup.find(string='GMN version:').find_next('td').string
if version_str is None:
return False, 'Parse failed'
return True, version_str | Return the version currently running on a GMN instance.
(is_gmn, version_or_error) | entailment |
def extract_subjects(subject_info_xml, primary_str):
"""Extract a set of authenticated subjects from a DataONE SubjectInfo.
- See subject_info_tree for details.
Args:
subject_info_xml : str
A SubjectInfo XML document.
primary_str : str
A DataONE subject, typically a DataONE compliant serialization of the DN of
the DataONE X.509 v3 certificate extension from which the SubjectInfo was
extracted.
The primary subject can be viewed as the root of a tree. Any subject in the
SubjectInfo that is directly or indirectly connected to the root subject is
included in the returned set of authenticated subjects.
Returns:
set: Set of authenticated subjects. Will always include the primary subject.
- All subjects in the returned set are equivalent to ``primary_str`` for the
purpose of access control for private science objects.
- If SubjectInfo does not contain all relevant records, it is still considered
to be valid, but the authenticated set will be incomplete.
- Only the subject strings and relationships in SubjectInfo are used by this
function. Other information about subjects, such as name and email address,
is ignored.
- No attempt should be made to infer type of subject from the content of a
subject string. Subject strings should be handled as random Unicode
sequences, each of which may designate an person subject, an equivalent
subject, or a group subject.
- To determine if an action is authorized, the returned set is checked against
the authorized_set for a given object. If one or more subjects exist in both
sets, the action is authorized. The check can be performed with high
performance using a set union operation in Python or an inner join in
Postgres.
- Subject types are only known and relevant while processing the SubjectInfo
type.
- The type of each subject in the authenticated_subjects and allowed_subjects
lists are unknown and irrelevant.
Notes:
Procedure:
The set of authenticated subjects is generated from the SubjectInfo and primary
subject using the following procedure:
- Start with empty set of subjects
- Add authenticatedUser
- If ``subject`` is not in set of subjects:
- Add ``subject``
- Iterate over Person records
- If Person.subject is ``subject``:
- If Person.verified is present and set:
- Add "verifiedUser"
- Iterate over Person.equivalentIdentity:
- Recursively add those subjects
- Iterate over Person.isMemberOf
- Recursively add those subjects, but ONLY check Group subjects
- Iterate over Group records
- If any Group.hasMember is ``subject``:
- Recursively add Group.subject (not group members)
Handling of various invalid SubjectInfo and corner cases:
- SubjectInfo XML doc that is not well formed
- Return an exception that includes a useful error message with the line number
of the issue
- person.isMemberOf and group.hasMember should always form pairs referencing
each other.
- One side of the pair is missing
- Process the available side as normal
- person.isMemberOf subject references a person or equivalent instead of a
group
- Only Group subjects are searched for isMemberOf references, so only the
referenced Group subject is added to the list of authorized subjects
- Multiple Person or Group records conflict by using the same subject
- The records are handled as equivalents
- person.isMemberOf subject does not reference a known subject
- If the Person containing the dangling isMemberOf IS NOT connected with the
authenticated subject, the whole record, including the isMemberOf subject is
simply ignored
- If it IS connected with an authenticated subject, the isMemberOf subject is
authenticated and recursive processing of the subject is skipped
- Circular references
- Handled by skipping recursive add for subjects that are already added
- See the unit tests for example SubjectInfo XML documents for each of these
issues and the expected results.
"""
subject_info_pyxb = deserialize_subject_info(subject_info_xml)
subject_info_tree = gen_subject_info_tree(subject_info_pyxb, primary_str)
return subject_info_tree.get_subject_set() | Extract a set of authenticated subjects from a DataONE SubjectInfo.
- See subject_info_tree for details.
Args:
subject_info_xml : str
A SubjectInfo XML document.
primary_str : str
A DataONE subject, typically a DataONE compliant serialization of the DN of
the DataONE X.509 v3 certificate extension from which the SubjectInfo was
extracted.
The primary subject can be viewed as the root of a tree. Any subject in the
SubjectInfo that is directly or indirectly connected to the root subject is
included in the returned set of authenticated subjects.
Returns:
set: Set of authenticated subjects. Will always include the primary subject.
- All subjects in the returned set are equivalent to ``primary_str`` for the
purpose of access control for private science objects.
- If SubjectInfo does not contain all relevant records, it is still considered
to be valid, but the authenticated set will be incomplete.
- Only the subject strings and relationships in SubjectInfo are used by this
function. Other information about subjects, such as name and email address,
is ignored.
- No attempt should be made to infer type of subject from the content of a
subject string. Subject strings should be handled as random Unicode
sequences, each of which may designate an person subject, an equivalent
subject, or a group subject.
- To determine if an action is authorized, the returned set is checked against
the authorized_set for a given object. If one or more subjects exist in both
sets, the action is authorized. The check can be performed with high
performance using a set union operation in Python or an inner join in
Postgres.
- Subject types are only known and relevant while processing the SubjectInfo
type.
- The type of each subject in the authenticated_subjects and allowed_subjects
lists are unknown and irrelevant.
Notes:
Procedure:
The set of authenticated subjects is generated from the SubjectInfo and primary
subject using the following procedure:
- Start with empty set of subjects
- Add authenticatedUser
- If ``subject`` is not in set of subjects:
- Add ``subject``
- Iterate over Person records
- If Person.subject is ``subject``:
- If Person.verified is present and set:
- Add "verifiedUser"
- Iterate over Person.equivalentIdentity:
- Recursively add those subjects
- Iterate over Person.isMemberOf
- Recursively add those subjects, but ONLY check Group subjects
- Iterate over Group records
- If any Group.hasMember is ``subject``:
- Recursively add Group.subject (not group members)
Handling of various invalid SubjectInfo and corner cases:
- SubjectInfo XML doc that is not well formed
- Return an exception that includes a useful error message with the line number
of the issue
- person.isMemberOf and group.hasMember should always form pairs referencing
each other.
- One side of the pair is missing
- Process the available side as normal
- person.isMemberOf subject references a person or equivalent instead of a
group
- Only Group subjects are searched for isMemberOf references, so only the
referenced Group subject is added to the list of authorized subjects
- Multiple Person or Group records conflict by using the same subject
- The records are handled as equivalents
- person.isMemberOf subject does not reference a known subject
- If the Person containing the dangling isMemberOf IS NOT connected with the
authenticated subject, the whole record, including the isMemberOf subject is
simply ignored
- If it IS connected with an authenticated subject, the isMemberOf subject is
authenticated and recursive processing of the subject is skipped
- Circular references
- Handled by skipping recursive add for subjects that are already added
- See the unit tests for example SubjectInfo XML documents for each of these
issues and the expected results. | entailment |
def deserialize_subject_info(subject_info_xml):
"""Deserialize SubjectInfo XML doc to native object.
Args:
subject_info_xml: str
SubjectInfo XML doc
Returns:
SubjectInfo PyXB object
"""
try:
return d1_common.xml.deserialize(subject_info_xml)
except ValueError as e:
raise d1_common.types.exceptions.InvalidToken(
0,
'Could not deserialize SubjectInfo. subject_info="{}", error="{}"'.format(
subject_info_xml, str(e)
),
) | Deserialize SubjectInfo XML doc to native object.
Args:
subject_info_xml: str
SubjectInfo XML doc
Returns:
SubjectInfo PyXB object | entailment |
def gen_subject_info_tree(subject_info_pyxb, authn_subj, include_duplicates=False):
"""Convert the flat, self referential lists in the SubjectInfo to a tree structure.
Args:
subject_info_pyxb: SubjectInfo PyXB object
authn_subj: str
The authenticated subject that becomes the root subject in the tree of
subjects built from the SubjectInfo.
Only subjects that are authenticated by a direct or indirect connection to
this subject are included in the tree.
include_duplicates:
Include branches of the tree that contain subjects that have already been
included via other branches.
If the tree is intended for rendering, including the duplicates will
provide a more complete view of the SubjectInfo.
Returns:
SubjectInfoNode : Tree of nodes holding information about subjects that are
directly or indirectly connected to the authenticated subject in the root.
"""
class State:
"""self."""
pass
state = State()
state.subject_info_pyxb = subject_info_pyxb
state.include_duplicates = include_duplicates
state.visited_set = set()
state.tree = SubjectInfoNode("Root", TYPE_NODE_TAG)
_add_subject(state, state.tree, authn_subj)
symbolic_node = state.tree.add_child("Symbolic", TYPE_NODE_TAG)
_add_subject(state, symbolic_node, d1_common.const.SUBJECT_AUTHENTICATED)
_trim_tree(state)
return state.tree | Convert the flat, self referential lists in the SubjectInfo to a tree structure.
Args:
subject_info_pyxb: SubjectInfo PyXB object
authn_subj: str
The authenticated subject that becomes the root subject in the tree of
subjects built from the SubjectInfo.
Only subjects that are authenticated by a direct or indirect connection to
this subject are included in the tree.
include_duplicates:
Include branches of the tree that contain subjects that have already been
included via other branches.
If the tree is intended for rendering, including the duplicates will
provide a more complete view of the SubjectInfo.
Returns:
SubjectInfoNode : Tree of nodes holding information about subjects that are
directly or indirectly connected to the authenticated subject in the root. | entailment |
def _trim_tree(state):
"""Trim empty leaf nodes from the tree.
- To simplify the tree conversion, empty nodes are added before it is known if they
will contain items that connect back to the authenticated subject. If there are
no connections, the nodes remain empty, which causes them to be removed here.
- Removing a leaf node may cause the parent to become a new empty leaf node, so the
function is repeated until there are no more empty leaf nodes.
"""
for n in list(state.tree.leaf_node_gen):
if n.type_str == TYPE_NODE_TAG:
n.parent.child_list.remove(n)
return _trim_tree(state) | Trim empty leaf nodes from the tree.
- To simplify the tree conversion, empty nodes are added before it is known if they
will contain items that connect back to the authenticated subject. If there are
no connections, the nodes remain empty, which causes them to be removed here.
- Removing a leaf node may cause the parent to become a new empty leaf node, so the
function is repeated until there are no more empty leaf nodes. | entailment |
def add_child(self, label_str, type_str):
"""Add a child node."""
child_node = SubjectInfoNode(label_str, type_str)
child_node.parent = self
self.child_list.append(child_node)
return child_node | Add a child node. | entailment |
def get_path_str(self, sep=os.path.sep, type_str=None):
"""Get path from root to this node.
Args:
sep: str
One or more characters to insert between each element in the path.
Defaults to "/" on Unix and "\" on Windows.
type_str:
SUBJECT_NODE_TAG, TYPE_NODE_TAG or None. If set, only include
information from nodes of that type.
Returns:
str: String describing the path from the root to this node.
"""
return sep.join(
list(
reversed(
[
v.label_str
for v in self.parent_gen
if type_str in (None, v.type_str)
]
)
)
) | Get path from root to this node.
Args:
sep: str
One or more characters to insert between each element in the path.
Defaults to "/" on Unix and "\" on Windows.
type_str:
SUBJECT_NODE_TAG, TYPE_NODE_TAG or None. If set, only include
information from nodes of that type.
Returns:
str: String describing the path from the root to this node. | entailment |
def get_leaf_node_path_list(self, sep=os.path.sep, type_str=None):
"""Get paths for all leaf nodes for the tree rooted at this node.
Args:
sep: str
One or more characters to insert between each element in the path.
Defaults to "/" on Unix and "\" on Windows.
type_str:
SUBJECT_NODE_TAG, TYPE_NODE_TAG or None. If set, only include
information from nodes of that type.
Returns:
list of str: The paths to the leaf nodes for the tree rooted at this node.
"""
return [v.get_path_str(sep, type_str) for v in self.leaf_node_gen] | Get paths for all leaf nodes for the tree rooted at this node.
Args:
sep: str
One or more characters to insert between each element in the path.
Defaults to "/" on Unix and "\" on Windows.
type_str:
SUBJECT_NODE_TAG, TYPE_NODE_TAG or None. If set, only include
information from nodes of that type.
Returns:
list of str: The paths to the leaf nodes for the tree rooted at this node. | entailment |
def get_path_list(self, type_str=None):
"""Get list of the labels of the nodes leading up to this node from the root.
Args:
type_str:
SUBJECT_NODE_TAG, TYPE_NODE_TAG or None. If set, only include
information from nodes of that type.
Returns:
list of str: The labels of the nodes leading up to this node from the root.
"""
return list(
reversed(
[v.label_str for v in self.parent_gen if type_str in (None, v.type_str)]
)
) | Get list of the labels of the nodes leading up to this node from the root.
Args:
type_str:
SUBJECT_NODE_TAG, TYPE_NODE_TAG or None. If set, only include
information from nodes of that type.
Returns:
list of str: The labels of the nodes leading up to this node from the root. | entailment |
def get_label_set(self, type_str=None):
"""Get a set of label_str for the tree rooted at this node.
Args:
type_str:
SUBJECT_NODE_TAG, TYPE_NODE_TAG or None. If set, only include
information from nodes of that type.
Returns:
set: The labels of the nodes leading up to this node from the root.
"""
return {v.label_str for v in self.node_gen if type_str in (None, v.type_str)} | Get a set of label_str for the tree rooted at this node.
Args:
type_str:
SUBJECT_NODE_TAG, TYPE_NODE_TAG or None. If set, only include
information from nodes of that type.
Returns:
set: The labels of the nodes leading up to this node from the root. | entailment |
def start_task_type(self, task_type_str, total_task_count):
"""Call when about to start processing a new type of task, typically just before
entering a loop that processes many task of the given type.
Args:
task_type_str (str):
The name of the task, used as a dict key and printed in the progress
updates.
total_task_count (int):
The total number of the new type of task that will be processed.
This starts the timer that is used for providing an ETA for completing all tasks
of the given type.
The task type is included in progress updates until end_task_type() is called.
"""
assert (
task_type_str not in self._task_dict
), "Task type has already been started"
self._task_dict[task_type_str] = {
"start_time": time.time(),
"total_task_count": total_task_count,
"task_idx": 0,
} | Call when about to start processing a new type of task, typically just before
entering a loop that processes many task of the given type.
Args:
task_type_str (str):
The name of the task, used as a dict key and printed in the progress
updates.
total_task_count (int):
The total number of the new type of task that will be processed.
This starts the timer that is used for providing an ETA for completing all tasks
of the given type.
The task type is included in progress updates until end_task_type() is called. | entailment |
def end_task_type(self, task_type_str):
"""Call when processing of all tasks of the given type is completed, typically
just after exiting a loop that processes many tasks of the given type.
Progress messages logged at intervals will typically not include the final entry
which shows that processing is 100% complete, so a final progress message is
logged here.
"""
assert (
task_type_str in self._task_dict
), "Task type has not been started yet: {}".format(task_type_str)
self._log_progress()
del self._task_dict[task_type_str] | Call when processing of all tasks of the given type is completed, typically
just after exiting a loop that processes many tasks of the given type.
Progress messages logged at intervals will typically not include the final entry
which shows that processing is 100% complete, so a final progress message is
logged here. | entailment |
def start_task(self, task_type_str, current_task_index=None):
"""Call when processing is about to start on a single task of the given task
type, typically at the top inside of the loop that processes the tasks.
Args:
task_type_str (str):
The name of the task, used as a dict key and printed in the progress
updates.
current_task_index (int):
If the task processing loop may skip or repeat tasks, the index of the
current task must be provided here. This parameter can normally be left
unset.
"""
assert (
task_type_str in self._task_dict
), "Task type has not been started yet: {}".format(task_type_str)
if current_task_index is not None:
self._task_dict[task_type_str]["task_idx"] = current_task_index
else:
self._task_dict[task_type_str]["task_idx"] += 1
self._log_progress_if_interval_elapsed() | Call when processing is about to start on a single task of the given task
type, typically at the top inside of the loop that processes the tasks.
Args:
task_type_str (str):
The name of the task, used as a dict key and printed in the progress
updates.
current_task_index (int):
If the task processing loop may skip or repeat tasks, the index of the
current task must be provided here. This parameter can normally be left
unset. | entailment |
def event(self, event_name):
"""Register an event that occurred during processing of a task of the given
type.
Args: event_name: str A name for a type of events. Events of the
same type are displayed as a single entry and a total count of
occurences.
"""
self._event_dict.setdefault(event_name, 0)
self._event_dict[event_name] += 1
self._log_progress_if_interval_elapsed() | Register an event that occurred during processing of a task of the given
type.
Args: event_name: str A name for a type of events. Events of the
same type are displayed as a single entry and a total count of
occurences. | entailment |
def get_permissible_wcs(classname, f):
r"""For some classes (in particular $\Delta F=1$), only a subset of Wilson
coefficients exist in WET-3 and WET-4. Therefore, depending on the number
of flavours `f`, the dimensionality of the ADM has to be reduced."""
# these are the problematic sectors
classes = ['cu', 'db', 'sb', 'sd', 'mue', 'mutau', 'taue', 'dF0']
if classname not in classes or f == 5:
# for 5-flavour WET, nothing to do.
# Neither for other classes (I, II, ...) because they exist either all
# or not at all in WET-3 and WET-4 (they have specific flavours).
return 'all'
if f not in [3, 4]:
raise ValueError("f must be 3, 4, or 5.")
if classname == 'dF0':
sector = 'dF=0'
else:
sector = classname
perm_keys = wcxf.Basis['WET-{}'.format(f), 'JMS'].sectors[sector].keys()
all_keys = coeffs[sector]
return [i for i, c in enumerate(all_keys) if c in perm_keys] | r"""For some classes (in particular $\Delta F=1$), only a subset of Wilson
coefficients exist in WET-3 and WET-4. Therefore, depending on the number
of flavours `f`, the dimensionality of the ADM has to be reduced. | entailment |
def admeig(classname, f, m_u, m_d, m_s, m_c, m_b, m_e, m_mu, m_tau):
"""Compute the eigenvalues and eigenvectors for a QCD anomalous dimension
matrix that is defined in `adm.adm_s_X` where X is the name of the sector.
Supports memoization. Output analogous to `np.linalg.eig`."""
args = f, m_u, m_d, m_s, m_c, m_b, m_e, m_mu, m_tau
A = getattr(adm, 'adm_s_' + classname)(*args)
perm_keys = get_permissible_wcs(classname, f)
if perm_keys != 'all':
# remove disallowed rows & columns if necessary
A = A[perm_keys][:, perm_keys]
w, v = np.linalg.eig(A.T)
return w, v | Compute the eigenvalues and eigenvectors for a QCD anomalous dimension
matrix that is defined in `adm.adm_s_X` where X is the name of the sector.
Supports memoization. Output analogous to `np.linalg.eig`. | entailment |
def getUs(classname, eta_s, f, alpha_s, alpha_e, m_u, m_d, m_s, m_c, m_b, m_e, m_mu, m_tau):
"""Get the QCD evolution matrix."""
w, v = admeig(classname, f, m_u, m_d, m_s, m_c, m_b, m_e, m_mu, m_tau)
b0s = 11 - 2 * f / 3
a = w / (2 * b0s)
return v @ np.diag(eta_s**a) @ np.linalg.inv(v) | Get the QCD evolution matrix. | entailment |
def getUe(classname, eta_s, f, alpha_s, alpha_e, m_u, m_d, m_s, m_c, m_b, m_e, m_mu, m_tau):
"""Get the QCD evolution matrix."""
args = f, m_u, m_d, m_s, m_c, m_b, m_e, m_mu, m_tau
A = getattr(adm, 'adm_e_' + classname)(*args)
perm_keys = get_permissible_wcs(classname, f)
if perm_keys != 'all':
# remove disallowed rows & columns if necessary
A = A[perm_keys][:, perm_keys]
w, v = admeig(classname, *args)
b0s = 11 - 2 * f / 3
a = w / (2 * b0s)
K = np.linalg.inv(v) @ A.T @ v
for i in range(K.shape[0]):
for j in range(K.shape[1]):
if a[i] - a[j] != 1:
K[i, j] *= (eta_s**(a[j] + 1) - eta_s**a[i]) / (a[i] - a[j] - 1)
else:
K[i, j] *= eta_s**a[i] * log(1 / eta_s)
return -alpha_e / (2 * b0s * alpha_s) * v @ K @ np.linalg.inv(v) | Get the QCD evolution matrix. | entailment |
def run_sector(sector, C_in, eta_s, f, p_in, p_out, qed_order=1, qcd_order=1):
r"""Solve the WET RGE for a specific sector.
Parameters:
- sector: sector of interest
- C_in: dictionary of Wilson coefficients
- eta_s: ratio of $\alpha_s$ at input and output scale
- f: number of active quark flavours
- p_in: running parameters at the input scale
- p_out: running parameters at the output scale
"""
Cdictout = OrderedDict()
classname = sectors[sector]
keylist = coeffs[sector]
if sector == 'dF=0':
perm_keys = get_permissible_wcs('dF0', f)
else:
perm_keys = get_permissible_wcs(sector, f)
if perm_keys != 'all':
# remove disallowed keys if necessary
keylist = np.asarray(keylist)[perm_keys]
C_input = np.array([C_in.get(key, 0) for key in keylist])
if np.count_nonzero(C_input) == 0 or classname == 'inv':
# nothing to do for SM-like WCs or RG invariant operators
C_result = C_input
else:
C_scaled = np.asarray([C_input[i] * scale_C(key, p_in) for i, key in enumerate(keylist)])
if qcd_order == 0:
Us = np.eye(len(C_scaled))
elif qcd_order == 1:
Us = getUs(classname, eta_s, f, **p_in)
if qed_order == 0:
Ue = np.zeros(C_scaled.shape)
elif qed_order == 1:
if qcd_order == 0:
Ue = getUe(classname, 1, f, **p_in)
else:
Ue = getUe(classname, eta_s, f, **p_in)
C_out = (Us + Ue) @ C_scaled
C_result = [C_out[i] / scale_C(key, p_out) for i, key in enumerate(keylist)]
for j in range(len(C_result)):
Cdictout[keylist[j]] = C_result[j]
return Cdictout | r"""Solve the WET RGE for a specific sector.
Parameters:
- sector: sector of interest
- C_in: dictionary of Wilson coefficients
- eta_s: ratio of $\alpha_s$ at input and output scale
- f: number of active quark flavours
- p_in: running parameters at the input scale
- p_out: running parameters at the output scale | entailment |
def _merge_region_trees(self, dst_tree, src_tree, pid):
"""Merge conflicts occur if a folder in one tree is a file in the other.
As the files are PIDs, this can only happen if a PID matches one of the
geographical areas that the dataset covers and should be very rare. In such
conflicts, the destination wins.
"""
for k, v in list(src_tree.items()):
# Prepend an underscore to the administrative area names, to make them
# sort separately from the identifiers.
# k = '_' + k
if k not in dst_tree or dst_tree[k] is None:
dst_tree[k] = {}
dst_tree[k][pid] = None
if v is not None:
self._merge_region_trees(dst_tree[k], v, pid) | Merge conflicts occur if a folder in one tree is a file in the other.
As the files are PIDs, this can only happen if a PID matches one of the
geographical areas that the dataset covers and should be very rare. In such
conflicts, the destination wins. | entailment |
def _get_region_tree_item_and_unconsumed_path(
self, region_tree, path, parent_key=''
):
"""Return the region_tree item specified by path. An item can be a a folder
(represented by a dictionary) or a PID (represented by None).
This function is also used for determining which section of a path is within
the region tree and which section should be passed to the next resolver. To
support this, the logic is as follows:
- If the path points to an item in the region tree, the item is returned and
the path, having been fully consumed, is returned as an empty list.
- If the path exits through a valid PID in the region tree, the PID is returned
for the item and the section of the path that was not consumed within the
region tree is returned.
- If the path exits through a valid folder in the region tree, an "invalid
path" PathException is raised. This is because only the PIDs are valid "exit
points" in the tree.
- If the path goes to an invalid location within the region tree, an "invalid
path" PathException is raised.
"""
# Handle valid item within region tree.
if not path:
if region_tree is None:
return parent_key, []
else:
return region_tree, []
# Handle valid exit through PID.
if region_tree is None:
return parent_key, path
# Handle next level in path.
if path[0] in list(region_tree.keys()):
return self._get_region_tree_item_and_unconsumed_path(
region_tree[path[0]], path[1:], path[0]
)
else:
raise onedrive_exceptions.PathException('Invalid path') | Return the region_tree item specified by path. An item can be a a folder
(represented by a dictionary) or a PID (represented by None).
This function is also used for determining which section of a path is within
the region tree and which section should be passed to the next resolver. To
support this, the logic is as follows:
- If the path points to an item in the region tree, the item is returned and
the path, having been fully consumed, is returned as an empty list.
- If the path exits through a valid PID in the region tree, the PID is returned
for the item and the section of the path that was not consumed within the
region tree is returned.
- If the path exits through a valid folder in the region tree, an "invalid
path" PathException is raised. This is because only the PIDs are valid "exit
points" in the tree.
- If the path goes to an invalid location within the region tree, an "invalid
path" PathException is raised. | entailment |
def save(self, **kwargs):
"""Override save method to catch handled errors and repackage them as 400 errors."""
try:
return super().save(**kwargs)
except SlugError as error:
raise ParseError(error) | Override save method to catch handled errors and repackage them as 400 errors. | entailment |
def get_api_major_by_base_url(base_url, *client_arg_list, **client_arg_dict):
"""Read the Node document from a node and return an int containing the latest D1 API
version supported by the node.
The Node document can always be reached through the v1 API and will list services
for v1 and any later APIs versions supported by the node.
"""
api_major = 0
client = d1_client.mnclient.MemberNodeClient(
base_url, *client_arg_list, **client_arg_dict
)
node_pyxb = client.getCapabilities()
for service_pyxb in node_pyxb.services.service:
if service_pyxb.available:
api_major = max(api_major, int(service_pyxb.version[-1]))
return api_major | Read the Node document from a node and return an int containing the latest D1 API
version supported by the node.
The Node document can always be reached through the v1 API and will list services
for v1 and any later APIs versions supported by the node. | entailment |
def delete_unused_subjects():
"""Delete any unused subjects from the database.
This is not strictly required as any unused subjects will automatically be reused if
needed in the future.
"""
# This causes Django to create a single join (check with query.query)
query = d1_gmn.app.models.Subject.objects.all()
query = query.filter(scienceobject_submitter__isnull=True)
query = query.filter(scienceobject_rights_holder__isnull=True)
query = query.filter(eventlog__isnull=True)
query = query.filter(permission__isnull=True)
query = query.filter(whitelistforcreateupdatedelete__isnull=True)
logger.debug('Deleting {} unused subjects:'.format(query.count()))
for s in query.all():
logging.debug(' {}'.format(s.subject))
query.delete() | Delete any unused subjects from the database.
This is not strictly required as any unused subjects will automatically be reused if
needed in the future. | entailment |
def extract_subject_from_dn(cert_obj):
"""Serialize a DN to a DataONE subject string.
Args:
cert_obj: cryptography.Certificate
Returns:
str:
Primary subject extracted from the certificate DN.
The certificate DN (DistinguishedName) is a sequence of RDNs
(RelativeDistinguishedName). Each RDN is a set of AVAs (AttributeValueAssertion /
AttributeTypeAndValue). A DataONE subject is a plain string. As there is no single
standard specifying how to create a string representation of a DN, DataONE selected
one of the most common ways, which yield strings such as:
CN=Some Name A123,O=Some Organization,C=US,DC=Some Domain,DC=org
In particular, the sequence of RDNs is reversed. Attribute values are escaped,
attribute type and value pairs are separated by "=", and AVAs are joined together
with ",". If an RDN contains an unknown OID, the OID is serialized as a dotted
string.
As all the information in the DN is preserved, it is not possible to create the
same subject with two different DNs, and the DN can be recreated from the subject.
"""
return ",".join(
"{}={}".format(
OID_TO_SHORT_NAME_DICT.get(v.oid.dotted_string, v.oid.dotted_string),
rdn_escape(v.value),
)
for v in reversed(list(cert_obj.subject))
) | Serialize a DN to a DataONE subject string.
Args:
cert_obj: cryptography.Certificate
Returns:
str:
Primary subject extracted from the certificate DN.
The certificate DN (DistinguishedName) is a sequence of RDNs
(RelativeDistinguishedName). Each RDN is a set of AVAs (AttributeValueAssertion /
AttributeTypeAndValue). A DataONE subject is a plain string. As there is no single
standard specifying how to create a string representation of a DN, DataONE selected
one of the most common ways, which yield strings such as:
CN=Some Name A123,O=Some Organization,C=US,DC=Some Domain,DC=org
In particular, the sequence of RDNs is reversed. Attribute values are escaped,
attribute type and value pairs are separated by "=", and AVAs are joined together
with ",". If an RDN contains an unknown OID, the OID is serialized as a dotted
string.
As all the information in the DN is preserved, it is not possible to create the
same subject with two different DNs, and the DN can be recreated from the subject. | entailment |
def create_d1_dn_subject(common_name_str):
"""Create the DN Subject for certificate that will be used in a DataONE environment.
The DN is formatted into a DataONE subject, which is used in authentication,
authorization and event tracking.
Args:
common_name_str: str
DataONE uses simple DNs without physical location information, so only the
``common_name_str`` (``CommonName``) needs to be specified.
For Member Node Client Side certificates or CSRs, ``common_name_str`` is the
``node_id``, e.g., ``urn:node:ABCD`` for production, or
``urn:node:mnTestABCD`` for the test environments.
For a local CA, something like ``localCA`` may be used.
For a locally trusted client side certificate, something like
``localClient`` may be used.
"""
return cryptography.x509.Name(
[
cryptography.x509.NameAttribute(
cryptography.x509.oid.NameOID.COUNTRY_NAME, "US"
),
cryptography.x509.NameAttribute(
cryptography.x509.oid.NameOID.STATE_OR_PROVINCE_NAME, "California"
),
cryptography.x509.NameAttribute(
cryptography.x509.oid.NameOID.LOCALITY_NAME, "San Francisco"
),
cryptography.x509.NameAttribute(
cryptography.x509.oid.NameOID.ORGANIZATION_NAME, "Root CA"
),
cryptography.x509.NameAttribute(
cryptography.x509.oid.NameOID.COMMON_NAME, "ca.ca.com"
),
]
) | Create the DN Subject for certificate that will be used in a DataONE environment.
The DN is formatted into a DataONE subject, which is used in authentication,
authorization and event tracking.
Args:
common_name_str: str
DataONE uses simple DNs without physical location information, so only the
``common_name_str`` (``CommonName``) needs to be specified.
For Member Node Client Side certificates or CSRs, ``common_name_str`` is the
``node_id``, e.g., ``urn:node:ABCD`` for production, or
``urn:node:mnTestABCD`` for the test environments.
For a local CA, something like ``localCA`` may be used.
For a locally trusted client side certificate, something like
``localClient`` may be used. | entailment |
def generate_csr(private_key_bytes, subject_name, fqdn_list):
"""Generate a Certificate Signing Request (CSR).
Args:
private_key_bytes: bytes
Private key with which the CSR will be signed.
subject_name: str
Certificate Subject Name
fqdn_list:
List of Fully Qualified Domain Names (FQDN) and/or IP addresses for which
this certificate will provide authentication.
E.g.: ['my.membernode.org', '1.2.3.4']
"""
return (
cryptography.x509.CertificateSigningRequestBuilder()
.subject_name(subject_name)
.add_extension(
extension=cryptography.x509.SubjectAlternativeName(
[cryptography.x509.DNSName(v) for v in fqdn_list]
),
critical=False,
)
.sign(
private_key=private_key_bytes,
algorithm=cryptography.hazmat.primitives.hashes.SHA256(),
backend=cryptography.hazmat.backends.default_backend(),
)
) | Generate a Certificate Signing Request (CSR).
Args:
private_key_bytes: bytes
Private key with which the CSR will be signed.
subject_name: str
Certificate Subject Name
fqdn_list:
List of Fully Qualified Domain Names (FQDN) and/or IP addresses for which
this certificate will provide authentication.
E.g.: ['my.membernode.org', '1.2.3.4'] | entailment |
def deserialize_pem(cert_pem):
"""Deserialize PEM (Base64) encoded X.509 v3 certificate.
Args:
cert_pem: str or bytes
PEM (Base64) encoded X.509 v3 certificate
Returns:
cert_obj: cryptography.Certificate
"""
if isinstance(cert_pem, str):
cert_pem = cert_pem.encode("utf-8")
return cryptography.x509.load_pem_x509_certificate(
data=cert_pem, backend=cryptography.hazmat.backends.default_backend()
) | Deserialize PEM (Base64) encoded X.509 v3 certificate.
Args:
cert_pem: str or bytes
PEM (Base64) encoded X.509 v3 certificate
Returns:
cert_obj: cryptography.Certificate | entailment |
def serialize_cert_to_pem(cert_obj):
"""Serialize certificate to PEM.
The certificate can be also be a Certificate Signing Request (CSR).
Args:
cert_obj: cryptography.Certificate
Returns:
bytes: PEM encoded certificate
"""
return cert_obj.public_bytes(
encoding=cryptography.hazmat.primitives.serialization.Encoding.PEM
) | Serialize certificate to PEM.
The certificate can be also be a Certificate Signing Request (CSR).
Args:
cert_obj: cryptography.Certificate
Returns:
bytes: PEM encoded certificate | entailment |
def extract_subject_info_extension(cert_obj):
"""Extract DataONE SubjectInfo XML doc from certificate.
Certificates issued by DataONE may include an embedded XML doc containing
additional information about the subject specified in the certificate DN. If
present, the doc is stored as an extension with an OID specified by DataONE and
formatted as specified in the DataONE SubjectInfo schema definition.
Args:
cert_obj: cryptography.Certificate
Returns:
str : SubjectInfo XML doc if present, else None
"""
try:
subject_info_der = cert_obj.extensions.get_extension_for_oid(
cryptography.x509.oid.ObjectIdentifier(DATAONE_SUBJECT_INFO_OID)
).value.value
return str(pyasn1.codec.der.decoder.decode(subject_info_der)[0])
except Exception as e:
logging.debug('SubjectInfo not extracted. reason="{}"'.format(e)) | Extract DataONE SubjectInfo XML doc from certificate.
Certificates issued by DataONE may include an embedded XML doc containing
additional information about the subject specified in the certificate DN. If
present, the doc is stored as an extension with an OID specified by DataONE and
formatted as specified in the DataONE SubjectInfo schema definition.
Args:
cert_obj: cryptography.Certificate
Returns:
str : SubjectInfo XML doc if present, else None | entailment |
def download_as_der(
base_url=d1_common.const.URL_DATAONE_ROOT,
timeout_sec=d1_common.const.DEFAULT_HTTP_TIMEOUT,
):
"""Download public certificate from a TLS/SSL web server as DER encoded ``bytes``.
If the certificate is being downloaded in order to troubleshoot validation issues,
the download itself may fail due to the validation issue that is being investigated.
To work around such chicken-and-egg problems, temporarily wrap calls to the
download_* functions with the ``disable_cert_validation()`` context manager (also in
this module).
Args:
base_url : str
A full URL to a DataONE service endpoint or a server hostname
timeout_sec : int or float
Timeout for the SSL socket operations
Returns:
bytes: The server's public certificate as DER encoded bytes.
"""
# TODO: It is unclear which SSL and TLS protocols are supported by the method
# currently being used. The current method and the two commented out below
# should be compared to determine which has the best compatibility with current
# versions of Python and current best practices for protocol selection.
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(timeout_sec)
ssl_socket = ssl.wrap_socket(sock)
url_obj = urllib.parse.urlparse(base_url)
ssl_socket.connect((url_obj.netloc, 443))
return ssl_socket.getpeercert(binary_form=True) | Download public certificate from a TLS/SSL web server as DER encoded ``bytes``.
If the certificate is being downloaded in order to troubleshoot validation issues,
the download itself may fail due to the validation issue that is being investigated.
To work around such chicken-and-egg problems, temporarily wrap calls to the
download_* functions with the ``disable_cert_validation()`` context manager (also in
this module).
Args:
base_url : str
A full URL to a DataONE service endpoint or a server hostname
timeout_sec : int or float
Timeout for the SSL socket operations
Returns:
bytes: The server's public certificate as DER encoded bytes. | entailment |
def download_as_pem(
base_url=d1_common.const.URL_DATAONE_ROOT,
timeout_sec=d1_common.const.DEFAULT_HTTP_TIMEOUT,
):
"""Download public certificate from a TLS/SSL web server as PEM encoded string.
Also see download_as_der().
Args:
base_url : str
A full URL to a DataONE service endpoint or a server hostname
timeout_sec : int or float
Timeout for the SSL socket operations
Returns:
str: The certificate as a PEM encoded string.
"""
return ssl.DER_cert_to_PEM_cert(download_as_der(base_url, timeout_sec)) | Download public certificate from a TLS/SSL web server as PEM encoded string.
Also see download_as_der().
Args:
base_url : str
A full URL to a DataONE service endpoint or a server hostname
timeout_sec : int or float
Timeout for the SSL socket operations
Returns:
str: The certificate as a PEM encoded string. | entailment |
def download_as_obj(
base_url=d1_common.const.URL_DATAONE_ROOT,
timeout_sec=d1_common.const.DEFAULT_HTTP_TIMEOUT,
):
"""Download public certificate from a TLS/SSL web server as Certificate object.
Also see download_as_der().
Args:
base_url : str
A full URL to a DataONE service endpoint or a server hostname
timeout_sec : int or float
Timeout for the SSL socket operations
Returns:
cryptography.Certificate
"""
return decode_der(download_as_der(base_url, timeout_sec)) | Download public certificate from a TLS/SSL web server as Certificate object.
Also see download_as_der().
Args:
base_url : str
A full URL to a DataONE service endpoint or a server hostname
timeout_sec : int or float
Timeout for the SSL socket operations
Returns:
cryptography.Certificate | entailment |
def decode_der(cert_der):
"""Decode cert DER string to Certificate object.
Args:
cert_der : Certificate as a DER encoded string
Returns:
cryptography.Certificate()
"""
return cryptography.x509.load_der_x509_certificate(
data=cert_der, backend=cryptography.hazmat.backends.default_backend()
) | Decode cert DER string to Certificate object.
Args:
cert_der : Certificate as a DER encoded string
Returns:
cryptography.Certificate() | entailment |
def disable_cert_validation():
"""Context manager to temporarily disable certificate validation in the standard SSL
library.
Note: This should not be used in production code but is sometimes useful for
troubleshooting certificate validation issues.
By design, the standard SSL library does not provide a way to disable verification
of the server side certificate. However, a patch to disable validation is described
by the library developers. This context manager allows applying the patch for
specific sections of code.
"""
current_context = ssl._create_default_https_context
ssl._create_default_https_context = ssl._create_unverified_context
try:
yield
finally:
ssl._create_default_https_context = current_context | Context manager to temporarily disable certificate validation in the standard SSL
library.
Note: This should not be used in production code but is sometimes useful for
troubleshooting certificate validation issues.
By design, the standard SSL library does not provide a way to disable verification
of the server side certificate. However, a patch to disable validation is described
by the library developers. This context manager allows applying the patch for
specific sections of code. | entailment |
def extract_issuer_ca_cert_url(cert_obj):
"""Extract issuer CA certificate URL from certificate.
Certificates may include a URL where the root certificate for the CA which was used
for signing the certificate can be downloaded. This function returns the URL if
present.
The primary use for this is to fix validation failure due to non-trusted issuer by
downloading the root CA certificate from the URL and installing it in the local
trust store.
Args:
cert_obj: cryptography.Certificate
Returns:
str: Issuer certificate URL if present, else None
"""
for extension in cert_obj.extensions:
if extension.oid.dotted_string == AUTHORITY_INFO_ACCESS_OID:
authority_info_access = extension.value
for access_description in authority_info_access:
if access_description.access_method.dotted_string == CA_ISSUERS_OID:
return access_description.access_location.value | Extract issuer CA certificate URL from certificate.
Certificates may include a URL where the root certificate for the CA which was used
for signing the certificate can be downloaded. This function returns the URL if
present.
The primary use for this is to fix validation failure due to non-trusted issuer by
downloading the root CA certificate from the URL and installing it in the local
trust store.
Args:
cert_obj: cryptography.Certificate
Returns:
str: Issuer certificate URL if present, else None | entailment |
def serialize_private_key_to_pem(private_key, passphrase_bytes=None):
"""Serialize private key to PEM.
Args:
private_key:
passphrase_bytes:
Returns:
bytes: PEM encoded private key
"""
return private_key.private_bytes(
encoding=cryptography.hazmat.primitives.serialization.Encoding.PEM,
format=cryptography.hazmat.primitives.serialization.PrivateFormat.TraditionalOpenSSL,
encryption_algorithm=cryptography.hazmat.primitives.serialization.BestAvailableEncryption(
passphrase_bytes
)
if passphrase_bytes is not None
else cryptography.hazmat.primitives.serialization.NoEncryption(),
) | Serialize private key to PEM.
Args:
private_key:
passphrase_bytes:
Returns:
bytes: PEM encoded private key | entailment |
def generate_private_key(key_size=2048):
"""Generate a private key"""
return cryptography.hazmat.primitives.asymmetric.rsa.generate_private_key(
public_exponent=65537,
key_size=key_size,
backend=cryptography.hazmat.backends.default_backend(),
) | Generate a private key | entailment |
def get_public_key_pem(cert_obj):
"""Extract public key from certificate as PEM encoded PKCS#1.
Args:
cert_obj: cryptography.Certificate
Returns:
bytes: PEM encoded PKCS#1 public key.
"""
return cert_obj.public_key().public_bytes(
encoding=cryptography.hazmat.primitives.serialization.Encoding.PEM,
format=cryptography.hazmat.primitives.serialization.PublicFormat.PKCS1,
) | Extract public key from certificate as PEM encoded PKCS#1.
Args:
cert_obj: cryptography.Certificate
Returns:
bytes: PEM encoded PKCS#1 public key. | entailment |
def load_csr(pem_path):
"""Load CSR from PEM encoded file"""
with open(pem_path, "rb") as f:
return cryptography.x509.load_pem_x509_csr(
data=f.read(), backend=cryptography.hazmat.backends.default_backend()
) | Load CSR from PEM encoded file | entailment |
def load_private_key(pem_path, passphrase_bytes=None):
"""Load private key from PEM encoded file"""
with open(pem_path, "rb") as f:
return cryptography.hazmat.primitives.serialization.load_pem_private_key(
data=f.read(),
password=passphrase_bytes,
backend=cryptography.hazmat.backends.default_backend(),
) | Load private key from PEM encoded file | entailment |
def serialize_cert_to_der(cert_obj):
"""Serialize certificate to DER.
Args:
cert_obj: cryptography.Certificate
Returns:
bytes: DER encoded certificate
"""
return cert_obj.public_bytes(
cryptography.hazmat.primitives.serialization.Encoding.DER
) | Serialize certificate to DER.
Args:
cert_obj: cryptography.Certificate
Returns:
bytes: DER encoded certificate | entailment |
def log_cert_info(logger, msg_str, cert_obj):
"""Dump basic certificate values to the log.
Args:
logger: Logger
Logger to which to write the certificate values.
msg_str: str
A message to write to the log before the certificate values.
cert_obj: cryptography.Certificate
Certificate containing values to log.
Returns:
None
"""
list(
map(
logger,
["{}:".format(msg_str)]
+ [
" {}".format(v)
for v in [
"Subject: {}".format(
_get_val_str(cert_obj, ["subject", "value"], reverse=True)
),
"Issuer: {}".format(
_get_val_str(cert_obj, ["issuer", "value"], reverse=True)
),
"Not Valid Before: {}".format(
cert_obj.not_valid_before.isoformat()
),
"Not Valid After: {}".format(cert_obj.not_valid_after.isoformat()),
"Subject Alt Names: {}".format(
_get_ext_val_str(
cert_obj, "SUBJECT_ALTERNATIVE_NAME", ["value", "value"]
)
),
"CRL Distribution Points: {}".format(
_get_ext_val_str(
cert_obj,
"CRL_DISTRIBUTION_POINTS",
["value", "full_name", "value", "value"],
)
),
"Authority Access Location: {}".format(
extract_issuer_ca_cert_url(cert_obj) or "<not found>"
),
]
],
)
) | Dump basic certificate values to the log.
Args:
logger: Logger
Logger to which to write the certificate values.
msg_str: str
A message to write to the log before the certificate values.
cert_obj: cryptography.Certificate
Certificate containing values to log.
Returns:
None | entailment |
def get_extension_by_name(cert_obj, extension_name):
"""Get a standard certificate extension by attribute name.
Args:
cert_obj: cryptography.Certificate
Certificate containing a standard extension.
extension_name : str
Extension name. E.g., 'SUBJECT_DIRECTORY_ATTRIBUTES'.
Returns:
Cryptography.Extension
"""
try:
return cert_obj.extensions.get_extension_for_oid(
getattr(cryptography.x509.oid.ExtensionOID, extension_name)
)
except cryptography.x509.ExtensionNotFound:
pass | Get a standard certificate extension by attribute name.
Args:
cert_obj: cryptography.Certificate
Certificate containing a standard extension.
extension_name : str
Extension name. E.g., 'SUBJECT_DIRECTORY_ATTRIBUTES'.
Returns:
Cryptography.Extension | entailment |
def _get_val_list(obj, path_list, reverse=False):
"""Extract values from nested objects by attribute names.
Objects contain attributes which are named references to objects. This will descend
down a tree of nested objects, starting at the given object, following the given
path.
Args:
obj: object
Any type of object
path_list: list
Attribute names
reverse: bool
Reverse the list of values before concatenation.
Returns:
list of objects
"""
try:
y = getattr(obj, path_list[0])
except AttributeError:
return []
if len(path_list) == 1:
return [y]
else:
val_list = [x for a in y for x in _get_val_list(a, path_list[1:], reverse)]
if reverse:
val_list.reverse()
return val_list | Extract values from nested objects by attribute names.
Objects contain attributes which are named references to objects. This will descend
down a tree of nested objects, starting at the given object, following the given
path.
Args:
obj: object
Any type of object
path_list: list
Attribute names
reverse: bool
Reverse the list of values before concatenation.
Returns:
list of objects | entailment |
def _get_val_str(obj, path_list=None, reverse=False):
"""Extract values from nested objects by attribute names and concatenate their
string representations.
Args:
obj: object
Any type of object
path_list: list
Attribute names
reverse: bool
Reverse the list of values before concatenation.
Returns:
str: Concatenated extracted values.
"""
val_list = _get_val_list(obj, path_list or [], reverse)
return "<not found>" if obj is None else " / ".join(map(str, val_list)) | Extract values from nested objects by attribute names and concatenate their
string representations.
Args:
obj: object
Any type of object
path_list: list
Attribute names
reverse: bool
Reverse the list of values before concatenation.
Returns:
str: Concatenated extracted values. | entailment |
def _JMS_to_Bern_I(C, qq):
"""From JMS to BernI basis (= traditional SUSY basis in this case)
for $\Delta F=2$ operators.
`qq` should be 'sb', 'db', 'ds' or 'cu'"""
if qq in ['sb', 'db', 'ds']:
dd = 'dd'
ij = tuple(dflav[q] for q in qq)
elif qq == 'cu':
dd = 'uu'
ij = tuple(uflav[q] for q in qq)
else:
raise ValueError("not in Bern_I: ".format(qq))
ji = (ij[1], ij[0])
d = {
'1' + 2 * qq : C["V{}LL".format(dd)][ij + ij],
'2' + 2 * qq : C["S1{}RR".format(dd)][ji + ji].conj()
- C["S8{}RR".format(dd)][ji + ji].conj() / (2 * Nc),
'3' + 2 * qq : C["S8{}RR".format(dd)][ji + ji].conj() / 2,
'4' + 2 * qq : -C["V8{}LR".format(dd)][ij + ij],
'5' + 2 * qq : -2 * C["V1{}LR".format(dd)][ij + ij]
+ C["V8{}LR".format(dd)][ij + ij] / Nc,
'1p' + 2 * qq : C["V{}RR".format(dd)][ij + ij],
'2p' + 2 * qq : C["S1{}RR".format(dd)][ij + ij]
- C["S8{}RR".format(dd)][ij + ij] / (2 * Nc),
'3p' + 2 * qq : C["S8{}RR".format(dd)][ij + ij] / 2
}
return d | From JMS to BernI basis (= traditional SUSY basis in this case)
for $\Delta F=2$ operators.
`qq` should be 'sb', 'db', 'ds' or 'cu | entailment |
def _Bern_to_JMS_I(C, qq):
"""From Bern to JMS basis for $\Delta F=2$ operators.
`qq` should be 'sb', 'db', 'ds' or 'cu'"""
if qq in ['sb', 'db', 'ds']:
dd = 'dd'
ij = '{}{}'.format(dflav[qq[0]] + 1, dflav[qq[1]] + 1)
elif qq == 'cu':
dd = 'uu'
ij = '{}{}'.format(uflav[qq[0]] + 1, uflav[qq[1]] + 1)
else:
raise ValueError("not in Bern_I: ".format(qq))
ji = ij[1] + ij[0]
d = {"V{}LL_{}{}".format(dd, ij, ij): C['1' + 2 * qq],
"S1{}RR_{}{}".format(dd, ji, ji): C['2' + 2 * qq].conjugate() + C['3' + 2 * qq].conjugate() / 3,
"S8{}RR_{}{}".format(dd, ji, ji): 2 * C['3' + 2 * qq].conjugate(),
"V1{}LR_{}{}".format(dd, ij, ij): -C['4' + 2 * qq] / 6 - C['5' + 2 * qq] / 2,
"V8{}LR_{}{}".format(dd, ij, ij): -C['4' + 2 * qq],
"V{}RR_{}{}".format(dd, ij, ij): C['1p' + 2 * qq],
"S1{}RR_{}{}".format(dd, ij, ij): C['2p' + 2 * qq] + C['3p' + 2 * qq] / 3,
"S8{}RR_{}{}".format(dd, ij, ij): 2 * C['3p' + 2 * qq],
}
if qq == 'cu':
# here we need to convert some operators that are not in the basis
for VXY in ['VuuRR', 'V1uuLR', 'V8uuLR', 'VuuLL']:
d[VXY + '_1212'] = d.pop(VXY + '_2121').conjugate()
return d | From Bern to JMS basis for $\Delta F=2$ operators.
`qq` should be 'sb', 'db', 'ds' or 'cu | entailment |
def _BernI_to_Flavio_I(C, qq):
"""From BernI to FlavioI basis for $\Delta F=2$ operators.
`qq` should be 'sb', 'db', 'ds' or 'uc'"""
qqf = qq[::-1] # flavio uses "bs" instead of "sb" etc.
if qq in ['sb', 'db', 'ds', 'cu']:
return {
'CVLL_' + 2*qqf: C["1" + 2*qq],
'CSLL_' + 2*qqf: C["2" + 2*qq] - 1 / 2 * C["3" + 2*qq],
'CTLL_' + 2*qqf: -1 / 8 * C["3" + 2*qq],
'CVLR_' + 2*qqf: -1 / 2 * C["5" + 2*qq],
'CVRR_' + 2*qqf: C["1p" + 2*qq],
'CSRR_' + 2*qqf: C["2p" + 2*qq] - 1 / 2 * C["3p" + 2*qq],
'CTRR_' + 2*qqf: -1 / 8 * C["3p" + 2*qq],
'CSLR_' + 2*qqf: C["4" + 2*qq]
}
else:
raise ValueError("not in Flavio_I: ".format(qq)) | From BernI to FlavioI basis for $\Delta F=2$ operators.
`qq` should be 'sb', 'db', 'ds' or 'uc | entailment |
def _FlavioI_to_Bern_I(C, qq):
"""From FlavioI to BernI basis for $\Delta F=2$ operators.
`qq` should be 'sb', 'db', 'ds' or 'uc'"""
qqb = qq[::-1] # flavio uses "bs" instead of "sb" etc.
if qq in ['bs', 'bd', 'sd', 'uc']:
return {
'1' + 2*qqb: C["CVLL_" + 2*qq],
'2' + 2*qqb: C["CSLL_" + 2*qq] - 4 * C["CTLL_" + 2*qq],
'3' + 2*qqb: -8 * C["CTLL_" + 2*qq],
'4' + 2*qqb: C["CSLR_" + 2*qq],
'5' + 2*qqb: -2 * C["CVLR_" + 2*qq],
'1p' + 2*qqb: C["CVRR_" + 2*qq],
'2p' + 2*qqb: C["CSRR_" + 2*qq] - 4 * C["CTRR_" + 2*qq],
'3p' + 2*qqb: -8 * C["CTRR_" + 2*qq],
}
else:
raise ValueError("not in Bern_I: ".format(qq)) | From FlavioI to BernI basis for $\Delta F=2$ operators.
`qq` should be 'sb', 'db', 'ds' or 'uc | entailment |
def _BernI_to_FormFlavor_I(C, qq):
"""From BernI to FormFlavorI basis for $\Delta F=2$ operators.
`qq` should be 'sb', 'db', 'ds' or 'uc'"""
qqf = qq[::-1] # FormFlavour uses "bs" instead of "sb" etc.
if qq in ['sb', 'db', 'ds']:
return {
'CVLL_' + 2*qqf: C["1" + 2*qq],
'CSLL_' + 2*qqf: C["2" + 2*qq] + 1 / 2 * C["3" + 2*qq],
'CTLL_' + 2*qqf: -1 / 8 * C["3" + 2*qq],
'CVLR_' + 2*qqf: -1 / 2 * C["5" + 2*qq],
'CVRR_' + 2*qqf: C["1p" + 2*qq],
'CSRR_' + 2*qqf: C["2p" + 2*qq] + 1 / 2 * C["3p" + 2*qq],
'CTRR_' + 2*qqf: -1 / 8 * C["3p" + 2*qq],
'CSLR_' + 2*qqf: C["4" + 2*qq]
}
elif qq == 'cu':
return {
'CVLL_' + 2*qq: C["1" + 2*qq].conjugate(),
'CSLL_' + 2*qq: C["2" + 2*qq] + 1 / 2 * C["3" + 2*qq].conjugate(),
'CTLL_' + 2*qq: -1 / 8 * C["3" + 2*qq].conjugate(),
'CVLR_' + 2*qq: -1 / 2 * C["5" + 2*qq].conjugate(),
'CVRR_' + 2*qq: C["1p" + 2*qq].conjugate(),
'CSRR_' + 2*qq: C["2p" + 2*qq].conjugate() + 1 / 2 * C["3p" + 2*qq].conjugate(),
'CTRR_' + 2*qq: -1 / 8 * C["3p" + 2*qq],
'CSLR_' + 2*qq: C["4" + 2*qq].conjugate()
}
else:
raise ValueError("{} not in FormFlavor_I".format(qq)) | From BernI to FormFlavorI basis for $\Delta F=2$ operators.
`qq` should be 'sb', 'db', 'ds' or 'uc | entailment |
def _JMS_to_Bern_II(C, udlnu):
"""From JMS to BernII basis for charged current process semileptonic
operators. `udlnu` should be of the form 'udl_enu_tau', 'cbl_munu_e' etc."""
u = uflav[udlnu[0]]
d = dflav[udlnu[1]]
l = lflav[udlnu[4:udlnu.find('n')]]
lp = lflav[udlnu[udlnu.find('_',5)+1:len(udlnu)]]
ind = udlnu[0]+udlnu[1]+udlnu[4:udlnu.find('n')]+udlnu[udlnu.find('_',5)+1
:len(udlnu)]
return {
'1' + ind : C["VnueduLL"][lp, l, d, u].conj(),
'5' + ind : C["SnueduRL"][lp, l, d, u].conj(),
'1p' + ind : C["VnueduLR"][lp, l, d, u].conj(),
'5p' + ind : C["SnueduRR"][lp, l, d, u].conj(),
'7p' + ind : C["TnueduRR"][lp, l, d, u].conj()
} | From JMS to BernII basis for charged current process semileptonic
operators. `udlnu` should be of the form 'udl_enu_tau', 'cbl_munu_e' etc. | entailment |
def _Bern_to_JMS_II(C, udlnu):
"""From BernII to JMS basis for charged current process semileptonic
operators. `udlnu` should be of the form 'udl_enu_tau', 'cbl_munu_e' etc."""
u = uflav[udlnu[0]]
d = dflav[udlnu[1]]
l = lflav[udlnu[4:udlnu.find('n')]]
lp = lflav[udlnu[udlnu.find('_',5)+1:len(udlnu)]]
ind = udlnu[0]+udlnu[1]+udlnu[4:udlnu.find('n')]+udlnu[udlnu.find('_',5)+1
:len(udlnu)]
return {
"VnueduLL_{}{}{}{}".format(lp + 1, l + 1, d + 1, u + 1): C['1' + ind].conjugate(),
"SnueduRL_{}{}{}{}".format(lp + 1, l + 1, d + 1, u + 1): C['5' + ind].conjugate(),
"VnueduLR_{}{}{}{}".format(lp + 1, l + 1, d + 1, u + 1): C['1p' + ind].conjugate(),
"SnueduRR_{}{}{}{}".format(lp + 1, l + 1, d + 1, u + 1): C['5p' + ind].conjugate(),
"TnueduRR_{}{}{}{}".format(lp + 1, l + 1, d + 1, u + 1): C['7p' + ind].conjugate()
} | From BernII to JMS basis for charged current process semileptonic
operators. `udlnu` should be of the form 'udl_enu_tau', 'cbl_munu_e' etc. | entailment |
def _BernII_to_Flavio_II(C, udlnu, parameters):
"""From BernII to FlavioII basis
for charged current process semileptonic operators.
`udlnu` should be of the form 'udl_enu_tau', 'cbl_munu_e' etc."""
p = parameters
u = uflav[udlnu[0]]
d = dflav[udlnu[1]]
l = lflav[udlnu[4:udlnu.find('n')]]
lp = lflav[udlnu[udlnu.find('_',5)+1:len(udlnu)]]
ind = udlnu[0]+udlnu[1]+udlnu[4:udlnu.find('n')]+udlnu[udlnu.find('_',5)+1
:len(udlnu)]
ind2 = udlnu[1]+udlnu[0]+udlnu[4:udlnu.find('n')]+'nu'+udlnu[
udlnu.find('_',5)+1:len(udlnu)]
dic = {
'CVL_' + ind2 : C['1' + ind],
'CVR_'+ ind2 : C['1p' + ind],
'CSR_'+ ind2 : C['5' + ind],
'CSL_'+ ind2 : C['5p' + ind],
'CT_'+ ind2 : C['7p' + ind]
}
V = ckmutil.ckm.ckm_tree(p["Vus"], p["Vub"], p["Vcb"], p["delta"])
prefactor = -sqrt(2) / p['GF'] / V[u, d] / 4
return {k: prefactor * v for k, v in dic.items()} | From BernII to FlavioII basis
for charged current process semileptonic operators.
`udlnu` should be of the form 'udl_enu_tau', 'cbl_munu_e' etc. | entailment |
def _Fierz_to_JMS_III_IV_V(Fqqqq, qqqq):
"""From 4-quark Fierz to JMS basis for Classes III, IV and V.
`qqqq` should be of the form 'sbuc', 'sdcc', 'ucuu' etc."""
F = Fqqqq.copy()
#case dduu
classIII = ['sbuc', 'sbcu', 'dbuc', 'dbcu', 'dsuc', 'dscu']
classVdduu = ['sbuu' , 'dbuu', 'dsuu', 'sbcc' , 'dbcc', 'dscc']
if qqqq in classIII + classVdduu:
f1 = str(dflav[qqqq[0]] + 1)
f2 = str(dflav[qqqq[1]] + 1)
f3 = str(uflav[qqqq[2]] + 1)
f4 = str(uflav[qqqq[3]] + 1)
d = {'V1udLL_' + f3 + f4 + f1 + f2: F['F' + qqqq + '1'] + F['F' + qqqq + '2'] / Nc,
'V8udLL_' + f3 + f4 + f1 + f2: 2 * F['F' + qqqq + '2'],
'V1duLR_' + f1 + f2 + f3 + f4: F['F' + qqqq + '3'] + F['F' + qqqq + '4'] / Nc,
'V8duLR_' + f1 + f2 + f3 + f4: 2 * F['F' + qqqq + '4'],
'S1udRR_' + f3 + f4 + f1 + f2: F['F' + qqqq + '5'] + F['F' + qqqq + '6'] / Nc - 4 * F['F' + qqqq + '9'] - (4 * F['F' + qqqq + '10']) / Nc,
'S8udRR_' + f3 + f4 + f1 + f2: 2 * F['F' + qqqq + '6'] - 8 * F['F' + qqqq + '10'],
'S1udduRR_' + f3 + f2 + f1 + f4: -((8 * F['F' + qqqq + '9']) / Nc) - 8 * F['F' + qqqq + '10'],
'V8udduLR_' + f4 + f1 + f2 + f3: -F['F' + qqqq + '7'].conjugate(),
'V1udduLR_' + f4 + f1 + f2 + f3: -(F['F' + qqqq + '7'].conjugate() / (2 * Nc)) - F['F' + qqqq + '8'].conjugate() / 2,
'S8udduRR_' + f3 + f2 + f1 + f4: -16 * F['F' + qqqq + '9'],
'V1udRR_' + f3 + f4 + f1 + f2: F['F' + qqqq + '1p'] + F['F' + qqqq + '2p'] / Nc,
'V8udRR_' + f3 + f4 + f1 + f2: 2 * F['F' + qqqq + '2p'],
'V1udLR_' + f3 + f4 + f1 + f2: F['F' + qqqq + '3p'] + F['F' + qqqq + '4p'] / Nc,
'V8udLR_' + f3 + f4 + f1 + f2: 2 * F['F' + qqqq + '4p'],
'S1udRR_' + f4 + f3 + f2 + f1: F['F' + qqqq + '5p'].conjugate() + F['F' + qqqq + '6p'].conjugate() / Nc - 4 * F['F' + qqqq + '9p'].conjugate() - (4 * F['F' + qqqq + '10p'].conjugate()) / Nc,
'S8udRR_' + f4 + f3 + f2 + f1: 2 * F['F' + qqqq + '6p'].conjugate() - 8 * F['F' + qqqq + '10p'].conjugate(),
'S1udduRR_' + f4 + f1 + f2 + f3: -((8 * F['F' + qqqq + '9p'].conjugate()) / Nc) - 8 * F['F' + qqqq + '10p'].conjugate(),
'V8udduLR_' + f3 + f2 + f1 + f4: -F['F' + qqqq + '7p'],
'V1udduLR_' + f3 + f2 + f1 + f4: -(F['F' + qqqq + '7p'] / (2 * Nc)) - F['F' + qqqq + '8p'] / 2,
'S8udduRR_' + f4 + f1 + f2 + f3: -16 * F['F' + qqqq + '9p'].conjugate(),
}
return symmetrize_JMS_dict(d)
#case uudd
classVuudd = ['ucdd', 'ucss','ucbb']
if qqqq in classVuudd:
f3 = str(uflav[qqqq[0]] + 1)
f4 = str(uflav[qqqq[1]] + 1)
f1 = str(dflav[qqqq[2]] + 1)
f2 = str(dflav[qqqq[3]] + 1)
d = {'V1udLL_' + f3 + f4 + f1 + f2: F['F' + qqqq + '1'] + F['F' + qqqq + '2'] / Nc,
'V8udLL_' + f3 + f4 + f1 + f2: 2 * F['F' + qqqq + '2'],
'V1duLR_' + f1 + f2 + f3 + f4: F['F' + qqqq + '3p'] + F['F' + qqqq + '4p'] / Nc,
'V8duLR_' + f1 + f2 + f3 + f4: 2 * F['F' + qqqq + '4p'],
'S1udRR_' + f3 + f4 + f1 + f2: F['F' + qqqq + '5'] + F['F' + qqqq + '6'] / Nc - 4 * F['F' + qqqq + '9'] - (4 * F['F' + qqqq + '10']) / Nc,
'S8udRR_' + f3 + f4 + f1 + f2: 2 * F['F' + qqqq + '6'] - 8 * F['F' + qqqq + '10'],
'S1udduRR_' + f3 + f2 + f1 + f4: -((8 * F['F' + qqqq + '9']) / Nc) - 8 * F['F' + qqqq + '10'],
'V8udduLR_' + f4 + f1 + f2 + f3: -F['F' + qqqq + '7p'].conjugate(),
'V1udduLR_' + f4 + f1 + f2 + f3: -(F['F' + qqqq + '7p'].conjugate() / (2 * Nc)) - F['F' + qqqq + '8p'].conjugate() / 2,
'S8udduRR_' + f3 + f2 + f1 + f4: -16 * F['F' + qqqq + '9'],
'V1udRR_' + f3 + f4 + f1 + f2: F['F' + qqqq + '1p'] + F['F' + qqqq + '2p'] / Nc,
'V8udRR_' + f3 + f4 + f1 + f2: 2 * F['F' + qqqq + '2p'],
'V1udLR_' + f3 + f4 + f1 + f2: F['F' + qqqq + '3'] + F['F' + qqqq + '4'] / Nc,
'V8udLR_' + f3 + f4 + f1 + f2: 2 * F['F' + qqqq + '4'],
'S1udRR_' + f4 + f3 + f2 + f1: F['F' + qqqq + '5p'].conjugate() + F['F' + qqqq + '6p'].conjugate() / Nc - 4 * F['F' + qqqq + '9p'].conjugate() - (4 * F['F' + qqqq + '10p'].conjugate()) / Nc,
'S8udRR_' + f4 + f3 + f2 + f1: 2 * F['F' + qqqq + '6p'].conjugate() - 8 * F['F' + qqqq + '10p'].conjugate(),
'S1udduRR_' + f4 + f1 + f2 + f3: -((8 * F['F' + qqqq + '9p'].conjugate()) / Nc) - 8 * F['F' + qqqq + '10p'].conjugate(),
'V8udduLR_' + f3 + f2 + f1 + f4: -F['F' + qqqq + '7'],
'V1udduLR_' + f3 + f2 + f1 + f4: -(F['F' + qqqq + '7'] / (2 * Nc)) - F['F' + qqqq + '8'] / 2,
'S8udduRR_' + f4 + f1 + f2 + f3: -16 * F['F' + qqqq + '9p'].conjugate(),
}
return symmetrize_JMS_dict(d)
#case dddd
classIV = ['sbsd', 'dbds', 'bsbd']
classVdddd = ['sbss', 'dbdd', 'dsdd', 'sbbb', 'dbbb', 'dsss']
classVddddind = ['sbdd', 'dsbb', 'dbss']
classVuuuu = ['ucuu', 'cucc', 'uccc', 'cuuu']
if qqqq in classVdddd + classIV + classVuuuu:
# if 2nd and 4th or 1st and 3rd fields are the same, Fierz can be used
# to express the even coeffs in terms of the odd ones
for key in F:
# to make sure we're not screwing things up, check that none
# of the even WCs is actually present
assert int(key[5:].replace('p', '')) % 2 == 1, "Unexpected key in Fierz basis: " + key
for p in ['', 'p']:
if qqqq in ['sbbb', 'dbbb', 'dsss', 'uccc']:
F['F' + qqqq + '2' + p] = F['F' + qqqq + '1' + p]
F['F' + qqqq + '4' + p] = -1 / 2 * F['F' + qqqq + '7' + p]
F['F' + qqqq + '6' + p] = -1 / 2 * F['F' + qqqq + '5' + p] - 6 * F['F' + qqqq + '9' + p]
F['F' + qqqq + '8' + p] = -2 * F['F' + qqqq + '3' + p]
F['F' + qqqq + '10' + p] = -1 / 8 * F['F' + qqqq + '5' + p] + 1 / 2 * F['F' + qqqq + '9' + p]
elif qqqq in ['sbss', 'dbdd', 'dsdd', 'sbsd', 'dbds', 'bsbd', 'ucuu']:
notp = 'p' if p == '' else ''
F['F' + qqqq + '2' + p] = F['F' + qqqq + '1' + p]
F['F' + qqqq + '4' + p] = -1 / 2 * F['F' + qqqq + '7' + notp]
F['F' + qqqq + '6' + notp] = -1 / 2 * F['F' + qqqq + '5' + notp] - 6 * F['F' + qqqq + '9' + notp]
F['F' + qqqq + '8' + notp] = -2 * F['F' + qqqq + '3' + p]
F['F' + qqqq + '10' + notp] = -1 / 8 * F['F' + qqqq + '5' + notp] + 1 / 2 * F['F' + qqqq + '9' + notp]
if qqqq in classIV + classVdddd + classVddddind:
f1 = str(dflav[qqqq[0]] + 1)
f2 = str(dflav[qqqq[1]] + 1)
f3 = str(dflav[qqqq[2]] + 1)
f4 = str(dflav[qqqq[3]] + 1)
d = {
'VddLL_' + f3 + f4 + f1 + f2: F['F' + qqqq + '1'],
'VddLL_' + f1 + f4 + f3 + f2: F['F' + qqqq + '2'],
'V1ddLR_' + f1 + f2 + f3 + f4: F['F' + qqqq + '3'] + F['F' + qqqq + '4'] / Nc,
'V8ddLR_' + f1 + f2 + f3 + f4: 2 * F['F' + qqqq + '4'],
'S1ddRR_' + f3 + f4 + f1 + f2: F['F' + qqqq + '5'] + F['F' + qqqq + '6'] / Nc - 4 * F['F' + qqqq + '9'] - (4 * F['F' + qqqq + '10']) / Nc,
'S8ddRR_' + f3 + f4 + f1 + f2: 2 * F['F' + qqqq + '6'] - 8 * F['F' + qqqq + '10'],
'V8ddLR_' + f1 + f4 + f3 + f2: -F['F' + qqqq + '7'],
'V1ddLR_' + f1 + f4 + f3 + f2: -(F['F' + qqqq + '7'] / (2 * Nc)) - F['F' + qqqq + '8'] / 2,
'S1ddRR_' + f1 + f4 + f3 + f2: -((8 * F['F' + qqqq + '9']) / Nc) - 8 * F['F' + qqqq + '10'],
'S8ddRR_' + f3 + f2 + f1 + f4: -16 * F['F' + qqqq + '9'],
'VddRR_' + f3 + f4 + f1 + f2: F['F' + qqqq + '1p'],
'VddRR_' + f1 + f4 + f3 + f2: F['F' + qqqq + '2p'],
'V1ddLR_' + f3 + f4 + f1 + f2: F['F' + qqqq + '3p'] + F['F' + qqqq + '4p'] / Nc,
'V8ddLR_' + f3 + f4 + f1 + f2: 2 * F['F' + qqqq + '4p'],
'S1ddRR_' + f4 + f3 + f2 + f1: F['F' + qqqq + '5p'].conjugate() + F['F' + qqqq + '6p'].conjugate() / Nc - 4 * F['F' + qqqq + '9p'].conjugate() - (4 * F['F' + qqqq + '10p'].conjugate()) / Nc,
'S8ddRR_' + f4 + f3 + f2 + f1: 2 * F['F' + qqqq + '6p'].conjugate() - 8 * F['F' + qqqq + '10p'].conjugate(),
'V8ddLR_' + f3 + f2 + f1 + f4: -F['F' + qqqq + '7p'],
'V1ddLR_' + f3 + f2 + f1 + f4: -(F['F' + qqqq + '7p'] / (2 * Nc)) - F['F' + qqqq + '8p'] / 2,
'S1ddRR_' + f4 + f1 + f2 + f3: -((8 * F['F' + qqqq + '9p'].conjugate()) / Nc) - 8 * F['F' + qqqq + '10p'].conjugate(),
'S8ddRR_' + f4 + f1 + f2 + f3: -16 * F['F' + qqqq + '9p'].conjugate(),
}
return symmetrize_JMS_dict(d)
#case uuuu
if qqqq in classVuuuu:
f1 = str(uflav[qqqq[0]] + 1)
f2 = str(uflav[qqqq[1]] + 1)
f3 = str(uflav[qqqq[2]] + 1)
f4 = str(uflav[qqqq[3]] + 1)
d = {
'VuuLL_' + f3 + f4 + f1 + f2: F['F' + qqqq + '1'],
'VuuLL_' + f1 + f4 + f3 + f2: F['F' + qqqq + '2'],
'V1uuLR_' + f1 + f2 + f3 + f4: F['F' + qqqq + '3'] + F['F' + qqqq + '4'] / Nc,
'V8uuLR_' + f1 + f2 + f3 + f4: 2 * F['F' + qqqq + '4'],
'S1uuRR_' + f3 + f4 + f1 + f2: F['F' + qqqq + '5'] + F['F' + qqqq + '6'] / Nc - 4 * F['F' + qqqq + '9'] - (4 * F['F' + qqqq + '10']) / Nc,
'S8uuRR_' + f3 + f4 + f1 + f2: 2 * F['F' + qqqq + '6'] - 8 * F['F' + qqqq + '10'],
'V8uuLR_' + f1 + f4 + f3 + f2: -F['F' + qqqq + '7'],
'V1uuLR_' + f1 + f4 + f3 + f2: -(F['F' + qqqq + '7'] / (2 * Nc)) - F['F' + qqqq + '8'] / 2,
'S1uuRR_' + f1 + f4 + f3 + f2: -((8 * F['F' + qqqq + '9']) / Nc) - 8 * F['F' + qqqq + '10'],
'S8uuRR_' + f3 + f2 + f1 + f4: -16 * F['F' + qqqq + '9'],
'VuuRR_' + f3 + f4 + f1 + f2: F['F' + qqqq + '1p'],
'VuuRR_' + f1 + f4 + f3 + f2: F['F' + qqqq + '2p'],
'V1uuLR_' + f3 + f4 + f1 + f2: F['F' + qqqq + '3p'] + F['F' + qqqq + '4p'] / Nc,
'V8uuLR_' + f3 + f4 + f1 + f2: 2 * F['F' + qqqq + '4p'],
'S1uuRR_' + f4 + f3 + f2 + f1: F['F' + qqqq + '5p'].conjugate() + F['F' + qqqq + '6p'].conjugate() / Nc - 4 * F['F' + qqqq + '9p'].conjugate() - (4 * F['F' + qqqq + '10p'].conjugate()) / Nc,
'S8uuRR_' + f4 + f3 + f2 + f1: 2 * F['F' + qqqq + '6p'].conjugate() - 8 * F['F' + qqqq + '10p'].conjugate(),
'V8uuLR_' + f3 + f2 + f1 + f4: -F['F' + qqqq + '7p'],
'V1uuLR_' + f3 + f2 + f1 + f4: -(F['F' + qqqq + '7p'] / (2 * Nc)) - F['F' + qqqq + '8p'] / 2,
'S1uuRR_' + f4 + f1 + f2 + f3: -((8 * F['F' + qqqq + '9p'].conjugate()) / Nc) - 8 * F['F' + qqqq + '10p'].conjugate(),
'S8uuRR_' + f4 + f1 + f2 + f3: -16 * F['F' + qqqq + '9p'].conjugate()
}
return symmetrize_JMS_dict(d)
raise ValueError("Case not implemented: {}".format(qqqq)) | From 4-quark Fierz to JMS basis for Classes III, IV and V.
`qqqq` should be of the form 'sbuc', 'sdcc', 'ucuu' etc. | entailment |
def _JMS_to_Fierz_III_IV_V(C, qqqq):
"""From JMS to 4-quark Fierz basis for Classes III, IV and V.
`qqqq` should be of the form 'sbuc', 'sdcc', 'ucuu' etc."""
#case dduu
classIII = ['sbuc', 'sbcu', 'dbuc', 'dbcu', 'dsuc', 'dscu']
classVdduu = ['sbuu' , 'dbuu', 'dsuu', 'sbcc' , 'dbcc', 'dscc']
if qqqq in classIII + classVdduu:
f1 = dflav[qqqq[0]]
f2 = dflav[qqqq[1]]
f3 = uflav[qqqq[2]]
f4 = uflav[qqqq[3]]
return {
'F' + qqqq + '1' : C["V1udLL"][f3, f4, f1, f2]
- C["V8udLL"][f3, f4, f1, f2] / (2 * Nc),
'F' + qqqq + '2' : C["V8udLL"][f3, f4, f1, f2] / 2,
'F' + qqqq + '3' : C["V1duLR"][f1, f2, f3, f4]
- C["V8duLR"][f1, f2, f3, f4] / (2 * Nc),
'F' + qqqq + '4' : C["V8duLR"][f1, f2, f3, f4] / 2,
'F' + qqqq + '5' : C["S1udRR"][f3, f4, f1, f2]
- C["S8udduRR"][f3, f2, f1, f4] / 4
- C["S8udRR"][f3, f4, f1, f2] / (2 * Nc),
'F' + qqqq + '6' : -C["S1udduRR"][f3, f2, f1, f4] / 2
+ C["S8udduRR"][f3, f2, f1, f4] /(4 * Nc)
+ C["S8udRR"][f3, f4, f1, f2] / 2,
'F' + qqqq + '7' : -C["V8udduLR"][f4, f1, f2, f3].conj(),
'F' + qqqq + '8' : -2 * C["V1udduLR"][f4, f1, f2, f3].conj()
+ C["V8udduLR"][f4, f1, f2, f3].conj() / Nc,
'F' + qqqq + '9' : -C["S8udduRR"][f3, f2, f1, f4] / 16,
'F' + qqqq + '10' : -C["S1udduRR"][f3, f2, f1, f4] / 8
+ C["S8udduRR"][f3, f2, f1, f4] / (16 * Nc),
'F' + qqqq + '1p' : C["V1udRR"][f3, f4, f1, f2]
- C["V8udRR"][f3, f4, f1, f2] / (2 * Nc),
'F' + qqqq + '2p' : C["V8udRR"][f3, f4, f1, f2] / 2,
'F' + qqqq + '3p' : C["V1udLR"][f3, f4, f1, f2]
- C["V8udLR"][f3, f4, f1, f2] / (2 * Nc),
'F' + qqqq + '4p' : C["V8udLR"][f3, f4, f1, f2] / 2,
'F' + qqqq + '5p' : C["S1udRR"][f4, f3, f2, f1].conj() -
C["S8udduRR"][f4, f1, f2, f3].conj() / 4
- C["S8udRR"][f4, f3, f2, f1].conj() / (2 * Nc),
'F' + qqqq + '6p' : -C["S1udduRR"][f4, f1, f2, f3].conj() / 2 +
C["S8udduRR"][f4, f1, f2, f3].conj()/(4 * Nc)
+ C["S8udRR"][f4, f3, f2, f1].conj() / 2,
'F' + qqqq + '7p' : -C["V8udduLR"][f3, f2, f1, f4],
'F' + qqqq + '8p' : - 2 * C["V1udduLR"][f3, f2, f1, f4]
+ C["V8udduLR"][f3, f2, f1, f4] / Nc,
'F' + qqqq + '9p' : -C["S8udduRR"][f4, f1, f2, f3].conj() / 16,
'F' + qqqq + '10p' : -C["S1udduRR"][f4, f1, f2, f3].conj() / 8
+ C["S8udduRR"][f4, f1, f2, f3].conj() / 16 / Nc
}
classVuudd = ['ucdd', 'ucss', 'ucbb']
if qqqq in classVuudd:
f3 = uflav[qqqq[0]]
f4 = uflav[qqqq[1]]
f1 = dflav[qqqq[2]]
f2 = dflav[qqqq[3]]
return {
'F' + qqqq + '1' : C["V1udLL"][f3, f4, f1, f2]
- C["V8udLL"][f3, f4, f1, f2] / (2 * Nc),
'F' + qqqq + '2' : C["V8udLL"][f3, f4, f1, f2] / 2,
'F' + qqqq + '3p' : C["V1duLR"][f1, f2, f3, f4]
- C["V8duLR"][f1, f2, f3, f4] / (2 * Nc),
'F' + qqqq + '4p' : C["V8duLR"][f1, f2, f3, f4] / 2,
'F' + qqqq + '5' : C["S1udRR"][f3, f4, f1, f2]
- C["S8udduRR"][f3, f2, f1, f4] / 4
- C["S8udRR"][f3, f4, f1, f2] / (2 * Nc),
'F' + qqqq + '6' : -C["S1udduRR"][f3, f2, f1, f4] / 2
+ C["S8udduRR"][f3, f2, f1, f4] /(4 * Nc)
+ C["S8udRR"][f3, f4, f1, f2] / 2,
'F' + qqqq + '7p' : -C["V8udduLR"][f4, f1, f2, f3].conj(),
'F' + qqqq + '8p' : -2 * C["V1udduLR"][f4, f1, f2, f3].conj()
+ C["V8udduLR"][f4, f1, f2, f3].conj() / Nc,
'F' + qqqq + '9' : -C["S8udduRR"][f3, f2, f1, f4] / 16,
'F' + qqqq + '10' : -C["S1udduRR"][f3, f2, f1, f4] / 8
+ C["S8udduRR"][f3, f2, f1, f4] / (16 * Nc),
'F' + qqqq + '1p' : C["V1udRR"][f3, f4, f1, f2]
- C["V8udRR"][f3, f4, f1, f2] / (2 * Nc),
'F' + qqqq + '2p' : C["V8udRR"][f3, f4, f1, f2] / 2,
'F' + qqqq + '3' : C["V1udLR"][f3, f4, f1, f2]
- C["V8udLR"][f3, f4, f1, f2] / (2 * Nc),
'F' + qqqq + '4' : C["V8udLR"][f3, f4, f1, f2] / 2,
'F' + qqqq + '5p' : C["S1udRR"][f4, f3, f2, f1].conj() -
C["S8udduRR"][f4, f1, f2, f3].conj() / 4
- C["S8udRR"][f4, f3, f2, f1].conj() / (2 * Nc),
'F' + qqqq + '6p' : -C["S1udduRR"][f4, f1, f2, f3].conj() / 2 +
C["S8udduRR"][f4, f1, f2, f3].conj()/(4 * Nc)
+ C["S8udRR"][f4, f3, f2, f1].conj() / 2,
'F' + qqqq + '7' : -C["V8udduLR"][f3, f2, f1, f4],
'F' + qqqq + '8' : - 2 * C["V1udduLR"][f3, f2, f1, f4]
+ C["V8udduLR"][f3, f2, f1, f4] / Nc,
'F' + qqqq + '9p' : -C["S8udduRR"][f4, f1, f2, f3].conj() / 16,
'F' + qqqq + '10p' : -C["S1udduRR"][f4, f1, f2, f3].conj() / 8
+ C["S8udduRR"][f4, f1, f2, f3].conj() / 16 / Nc
}
#case dddd
classIV = ['sbsd', 'dbds', 'bsbd']
classVdddd = ['sbss', 'dbdd', 'dsdd', 'sbbb', 'dbbb', 'dsss']
classVddddind = ['sbdd', 'dsbb', 'dbss']
if qqqq in classIV + classVdddd + classVddddind:
f1 = dflav[qqqq[0]]
f2 = dflav[qqqq[1]]
f3 = dflav[qqqq[2]]
f4 = dflav[qqqq[3]]
return {
'F'+ qqqq +'1' : C["VddLL"][f3, f4, f1, f2],
'F'+ qqqq +'2' : C["VddLL"][f1, f4, f3, f2],
'F'+ qqqq +'3' : C["V1ddLR"][f1, f2, f3, f4]
- C["V8ddLR"][f1, f2, f3, f4]/(2 * Nc),
'F'+ qqqq +'4' : C["V8ddLR"][f1, f2, f3, f4] / 2,
'F'+ qqqq +'5' : C["S1ddRR"][f3, f4, f1, f2]
- C["S8ddRR"][f3, f2, f1,f4] / 4
- C["S8ddRR"][f3, f4, f1, f2] / (2 * Nc),
'F'+ qqqq +'6' : -C["S1ddRR"][f1, f4, f3, f2] / 2
+ C["S8ddRR"][f3, f2, f1, f4] / (4 * Nc)
+ C["S8ddRR"][f3, f4, f1, f2] / 2,
'F'+ qqqq +'7' : -C["V8ddLR"][f1, f4, f3, f2],
'F'+ qqqq +'8' : -2 * C["V1ddLR"][f1, f4, f3, f2]
+ C["V8ddLR"][f1, f4, f3, f2] / Nc,
'F'+ qqqq +'9' : -C["S8ddRR"][f3, f2, f1, f4] / 16,
'F'+ qqqq +'10' : -C["S1ddRR"][f1, f4, f3, f2] / 8
+ C["S8ddRR"][f3, f2, f1, f4] / (16 * Nc),
'F'+ qqqq +'1p' : C["VddRR"][f3, f4, f1, f2],
'F'+ qqqq +'2p' : C["VddRR"][f1, f4, f3, f2],
'F'+ qqqq +'3p' : C["V1ddLR"][f3, f4, f1, f2]
- C["V8ddLR"][f3, f4, f1,f2] / (2 * Nc),
'F'+ qqqq +'4p' : C["V8ddLR"][f3, f4, f1, f2] / 2,
'F'+ qqqq +'5p' : C["S1ddRR"][f4, f3, f2, f1].conj() -
C["S8ddRR"][f4, f1, f2, f3].conj() / 4
-C["S8ddRR"][f4, f3, f2, f1].conj() / 2 / Nc,
'F'+ qqqq +'6p' : -C["S1ddRR"][f4, f1, f2, f3].conj() / 2 +
C["S8ddRR"][f4, f1, f2, f3].conj() / 4 / Nc
+ C["S8ddRR"][f4, f3, f2, f1].conj() / 2,
'F'+ qqqq +'7p' : -C["V8ddLR"][f3, f2, f1, f4],
'F'+ qqqq +'8p' : -2 * C["V1ddLR"][f3, f2, f1, f4]
+ C["V8ddLR"][f3, f2, f1, f4] / Nc,
'F'+ qqqq +'9p' : -C["S8ddRR"][f4, f1, f2, f3].conj() / 16,
'F'+ qqqq +'10p' : -C["S1ddRR"][f4, f1, f2, f3].conj() / 8 +
C["S8ddRR"][f4, f1, f2, f3].conj() / 16 / Nc
}
#case uuuu
classVuuuu = ['ucuu', 'cucc', 'cuuu', 'uccc']
if qqqq in classVuuuu:
f1 = uflav[qqqq[0]]
f2 = uflav[qqqq[1]]
f3 = uflav[qqqq[2]]
f4 = uflav[qqqq[3]]
return {
'F' + qqqq + '1' : C["VuuLL"][f3, f4, f1, f2],
'F' + qqqq + '2' : C["VuuLL"][f1, f4, f3, f2],
'F' + qqqq + '3' : C["V1uuLR"][f1, f2, f3, f4]
- C["V8uuLR"][f1, f2, f3, f4] / (2 * Nc),
'F' + qqqq + '4' : C["V8uuLR"][f1, f2, f3, f4] / 2,
'F' + qqqq + '5' : C["S1uuRR"][f3, f4, f1, f2]
- C["S8uuRR"][f3, f2, f1, f4] / 4
- C["S8uuRR"][f3, f4, f1, f2] / (2 * Nc),
'F' + qqqq + '6' : -C["S1uuRR"][f1, f4, f3, f2] / 2
+ C["S8uuRR"][f3, f2, f1, f4] / (4 * Nc)
+ C["S8uuRR"][f3, f4, f1, f2] / 2,
'F' + qqqq + '7' : -C["V8uuLR"][f1, f4, f3, f2],
'F' + qqqq + '8' : -2 * C["V1uuLR"][f1, f4, f3, f2]
+ C["V8uuLR"][f1, f4, f3, f2] / Nc,
'F' + qqqq + '9' : -C["S8uuRR"][f3, f2, f1, f4] / 16,
'F' + qqqq + '10' : -C["S1uuRR"][f1, f4, f3, f2] / 8
+ C["S8uuRR"][f3, f2, f1, f4] / (16 * Nc),
'F'+ qqqq + '1p': C["VuuRR"][f3, f4, f1, f2],
'F' + qqqq + '2p': C["VuuRR"][f1, f3, f4, f2],
'F' + qqqq + '3p' : C["V1uuLR"][f3, f4, f1, f2]
- C["V8uuLR"][f3, f4, f1,f2] / (2 * Nc),
'F' + qqqq + '4p' : C["V8uuLR"][f3, f4, f1, f2] / 2,
'F' + qqqq + '5p' : C["S1uuRR"][f4, f3, f2, f1].conj() -
C["S8uuRR"][f4, f1, f2, f3].conj() / 4 -
C["S8uuRR"][f4, f3, f2, f1].conj() / 2 / Nc,
'F' + qqqq + '6p' : -C["S1uuRR"][f4, f1, f2, f3].conj() / 2 +
C["S8uuRR"][f4, f1, f2, f3].conj() / 4 / Nc
+ C["S8uuRR"][f4, f3, f2, f1].conj() / 2,
'F' + qqqq + '7p' : -C["V8uuLR"][f3, f2, f1, f4],
'F' + qqqq + '8p' : -2 * C["V1uuLR"][f3, f2, f1, f4]
+ C["V8uuLR"][f3, f2, f1, f4] / Nc,
'F' + qqqq + '9p' : -C["S8uuRR"][f4, f1, f2, f3].conj() / 16,
'F' + qqqq + '10p' : -C["S1uuRR"][f4, f1, f2, f3].conj() / 8 +
C["S8uuRR"][f4, f1, f2, f3].conj() / 16 / Nc
}
else:
raise ValueError("Case not implemented: {}".format(qqqq)) | From JMS to 4-quark Fierz basis for Classes III, IV and V.
`qqqq` should be of the form 'sbuc', 'sdcc', 'ucuu' etc. | entailment |
def _Fierz_to_Bern_III_IV_V(Fqqqq, qqqq):
"""From Fierz to 4-quark Bern basis for Classes III, IV and V.
`qqqq` should be of the form 'sbuc', 'sdcc', 'ucuu' etc."""
# 2nd != 4th, color-octet redundant
if qqqq in ['sbss', 'dbdd', 'dbds', 'sbsd', 'bsbd', 'dsdd']:
return {
'1' + qqqq : -Fqqqq['F' + qqqq + '1'] / 3
+ 4 * Fqqqq['F' + qqqq + '3'] / 3,
'3' + qqqq : Fqqqq['F' + qqqq + '1'] / 12 - Fqqqq['F' + qqqq + '3'] / 12,
'5' + qqqq : -Fqqqq['F' + qqqq + '5p'] / 3
+ 4 * Fqqqq['F' + qqqq + '7p'] / 3,
'7' + qqqq : Fqqqq['F' + qqqq + '5p'] / 3 - Fqqqq['F' + qqqq + '7p'] / 3
+ Fqqqq['F' + qqqq + '9p'],
'9' + qqqq : Fqqqq['F' + qqqq + '5p'] / 48
- Fqqqq['F' + qqqq + '7p'] / 48,
'1p' + qqqq : -Fqqqq['F' + qqqq + '1p'] / 3
+ 4 * Fqqqq['F' + qqqq + '3p'] / 3,
'3p' + qqqq : Fqqqq['F' + qqqq + '1p'] / 12
- Fqqqq['F' + qqqq + '3p'] / 12,
'5p' + qqqq : -Fqqqq['F' + qqqq + '5'] / 3
+ 4 * Fqqqq['F' + qqqq + '7'] / 3,
'7p' + qqqq : Fqqqq['F' + qqqq + '5'] / 3 - Fqqqq['F' + qqqq + '7'] / 3
+ Fqqqq['F' + qqqq + '9'],
'9p' + qqqq : Fqqqq['F' + qqqq + '5'] / 48
- Fqqqq['F' + qqqq + '7'] / 48
}
if qqqq in ['dbbb', 'sbbb', 'dsss']: # 2nd = 4th, color-octet redundant
return {
'1' + qqqq : -Fqqqq['F' + qqqq + '1'] / 3
+ 4 * Fqqqq['F' + qqqq + '3'] / 3,
'3' + qqqq : Fqqqq['F' + qqqq + '1'] / 12 - Fqqqq['F' + qqqq + '3'] / 12,
'5' + qqqq : -Fqqqq['F' + qqqq + '5'] / 3
+ 4 * Fqqqq['F' + qqqq + '7'] / 3,
'7' + qqqq : Fqqqq['F' + qqqq + '5'] / 3 - Fqqqq['F' + qqqq + '7'] / 3
+ Fqqqq['F' + qqqq + '9'],
'9' + qqqq : Fqqqq['F' + qqqq + '5'] / 48
- Fqqqq['F' + qqqq + '7'] / 48,
'1p' + qqqq : -Fqqqq['F' + qqqq + '1p'] / 3
+ 4 * Fqqqq['F' + qqqq + '3p'] / 3,
'3p' + qqqq : Fqqqq['F' + qqqq + '1p'] / 12
- Fqqqq['F' + qqqq + '3p'] / 12,
'5p' + qqqq : -Fqqqq['F' + qqqq + '5p'] / 3
+ 4 * Fqqqq['F' + qqqq + '7p'] / 3,
'7p' + qqqq : Fqqqq['F' + qqqq + '5p'] / 3 - Fqqqq['F' + qqqq + '7p'] / 3
+ Fqqqq['F' + qqqq + '9p'],
'9p' + qqqq : Fqqqq['F' + qqqq + '5p'] / 48
- Fqqqq['F' + qqqq + '7p'] / 48
}
# generic case
if qqqq in ['sbuu', 'sbdd', 'sbuu', 'sbuc', 'sbcu', 'sbcc',
'dbuu', 'dbss', 'dbuu', 'dbuc', 'dbcu', 'dbcc',
'dsuu', 'dsbb', 'dsuu', 'dsuc', 'dscu', 'dscc',]:
return {
'1'+qqqq : -Fqqqq['F' + qqqq + '1']/3 + 4 * Fqqqq['F' + qqqq + '3'] / 3
- Fqqqq['F' + qqqq + '2']/(3 * Nc)
+ 4 * Fqqqq['F' + qqqq + '4'] / (3 * Nc),
'2'+qqqq : -2 * Fqqqq['F' + qqqq + '2'] / 3
+ 8 * Fqqqq['F' + qqqq + '4'] / 3,
'3'+qqqq : Fqqqq['F' + qqqq + '1'] / 12
- Fqqqq['F' + qqqq + '3'] / 12
+ Fqqqq['F' + qqqq + '2'] / (12 * Nc)
- Fqqqq['F' + qqqq + '4'] / (12 * Nc),
'4'+ qqqq : Fqqqq['F' + qqqq + '2'] / 6 - Fqqqq['F' + qqqq + '4'] / 6,
'5'+ qqqq : -Fqqqq['F' + qqqq + '5'] / 3
+ 4 * Fqqqq['F' + qqqq + '7'] / 3
- Fqqqq['F' + qqqq + '6']/(3 * Nc)
+ 4 * Fqqqq['F' + qqqq + '8']/(3 * Nc),
'6'+qqqq : -2 * Fqqqq['F' + qqqq + '6'] / 3
+ 8 * Fqqqq['F' + qqqq + '8'] / 3,
'7'+qqqq : Fqqqq['F' + qqqq + '5'] / 3 - Fqqqq['F' + qqqq + '7'] / 3
+ Fqqqq['F' + qqqq + '9'] + Fqqqq['F' + qqqq + '10'] / Nc
+ Fqqqq['F' + qqqq + '6']/(3 * Nc)
- Fqqqq['F' + qqqq + '8']/(3 * Nc),
'8'+qqqq : 2*Fqqqq['F' + qqqq + '10'] + 2 * Fqqqq['F' + qqqq + '6'] / 3
-2 * Fqqqq['F' + qqqq + '8'] / 3,
'9'+qqqq : Fqqqq['F' + qqqq + '5'] / 48 - Fqqqq['F' + qqqq + '7'] / 48
+ Fqqqq['F' + qqqq + '6'] / (48 * Nc)
- Fqqqq['F' + qqqq + '8'] / (48 * Nc),
'10'+qqqq : Fqqqq['F' + qqqq + '6'] / 24 - Fqqqq['F' + qqqq + '8'] / 24,
'1p'+qqqq : -Fqqqq['F' + qqqq + '1p'] / 3
+ 4 * Fqqqq['F' + qqqq + '3p'] / 3
- Fqqqq['F' + qqqq + '2p'] / (3 * Nc)
+ 4 * Fqqqq['F' + qqqq + '4p'] / (3 * Nc),
'2p'+qqqq : -2 * Fqqqq['F' + qqqq + '2p'] / 3
+ 8 * Fqqqq['F' + qqqq + '4p'] / 3,
'3p'+qqqq : Fqqqq['F' + qqqq + '1p'] / 12
- Fqqqq['F' + qqqq + '3p'] / 12
+ Fqqqq['F' + qqqq + '2p'] / (12 * Nc)
- Fqqqq['F' + qqqq + '4p'] / (12 * Nc),
'4p'+qqqq : Fqqqq['F' + qqqq + '2p'] / 6 - Fqqqq['F' + qqqq + '4p'] / 6,
'5p'+qqqq : -Fqqqq['F' + qqqq + '5p'] / 3
+ 4 * Fqqqq['F' + qqqq + '7p'] / 3
- Fqqqq['F' + qqqq + '6p'] / (3 * Nc)
+ 4 * Fqqqq['F' + qqqq + '8p'] / (3 * Nc),
'6p'+qqqq : -2 * Fqqqq['F' + qqqq + '6p'] / 3
+ 8 * Fqqqq['F' + qqqq + '8p'] / 3,
'7p'+qqqq : Fqqqq['F' + qqqq + '5p'] / 3 - Fqqqq['F' + qqqq + '7p'] / 3
+ Fqqqq['F' + qqqq + '9p'] + Fqqqq['F' + qqqq + '10p'] / Nc
+ Fqqqq['F' + qqqq + '6p']/(3 * Nc)
- Fqqqq['F' + qqqq + '8p']/(3 * Nc),
'8p'+qqqq : 2 * Fqqqq['F' + qqqq + '10p']
+ 2 * Fqqqq['F' + qqqq + '6p'] / 3
- 2 * Fqqqq['F' + qqqq + '8p'] / 3,
'9p'+qqqq : Fqqqq['F' + qqqq + '5p'] / 48
- Fqqqq['F' + qqqq + '7p'] / 48
+ Fqqqq['F' + qqqq + '6p'] / (48 * Nc)
- Fqqqq['F' + qqqq + '8p'] / (48 * Nc),
'10p'+qqqq : Fqqqq['F' + qqqq + '6p'] / 24
- Fqqqq['F' + qqqq + '8p'] / 24
}
raise ValueError("Case not implemented: {}".format(qqqq)) | From Fierz to 4-quark Bern basis for Classes III, IV and V.
`qqqq` should be of the form 'sbuc', 'sdcc', 'ucuu' etc. | entailment |
def _Bern_to_Fierz_III_IV_V(C, qqqq):
"""From Bern to 4-quark Fierz basis for Classes III, IV and V.
`qqqq` should be of the form 'sbuc', 'sdcc', 'ucuu' etc."""
# 2nd != 4th, color-octet redundant
if qqqq in ['sbss', 'dbdd', 'dbds', 'sbsd', 'bsbd', 'dsdd']:
return {
'F' + qqqq + '1': C['1' + qqqq] + 16 * C['3' + qqqq],
'F' + qqqq + '1p': C['1p' + qqqq] + 16 * C['3p' + qqqq],
'F' + qqqq + '3': C['1' + qqqq] + 4 * C['3' + qqqq],
'F' + qqqq + '3p': C['1p' + qqqq] + 4 * C['3p' + qqqq],
'F' + qqqq + '5': C['5p' + qqqq] + 64 * C['9p' + qqqq],
'F' + qqqq + '5p': C['5' + qqqq] + 64 * C['9' + qqqq],
'F' + qqqq + '7': C['5p' + qqqq] + 16 * C['9p' + qqqq],
'F' + qqqq + '7p': C['5' + qqqq] + 16 * C['9' + qqqq],
'F' + qqqq + '9': C['7p' + qqqq] - 16 * C['9p' + qqqq],
'F' + qqqq + '9p': C['7' + qqqq] - 16 * C['9' + qqqq],
}
if qqqq in ['dbbb', 'sbbb', 'dsss']: # 2nd = 4th, color-octet redundant
return {
'F' + qqqq + '1': C['1' + qqqq] + 16 * C['3' + qqqq],
'F' + qqqq + '1p': C['1p' + qqqq] + 16 * C['3p' + qqqq],
'F' + qqqq + '3': C['1' + qqqq] + 4 * C['3' + qqqq],
'F' + qqqq + '3p': C['1p' + qqqq] + 4 * C['3p' + qqqq],
'F' + qqqq + '5': C['5' + qqqq] + 64 * C['9' + qqqq],
'F' + qqqq + '5p': C['5p' + qqqq] + 64 * C['9p' + qqqq],
'F' + qqqq + '7': C['5' + qqqq] + 16 * C['9' + qqqq],
'F' + qqqq + '7p': C['5p' + qqqq] + 16 * C['9p' + qqqq],
'F' + qqqq + '9': C['7' + qqqq] - 16 * C['9' + qqqq],
'F' + qqqq + '9p': C['7p' + qqqq] - 16 * C['9p' + qqqq],
}
# generic case
if qqqq in ['sbuu', 'sbdd', 'sbuu', 'sbuc', 'sbcu', 'sbcc',
'dbuu', 'dbss', 'dbuu', 'dbuc', 'dbcu', 'dbcc',
'dsuu', 'dsbb', 'dsuu', 'dsuc', 'dscu', 'dscc',]:
return {
'F' + qqqq + '1': C['1' + qqqq] - C['2' + qqqq] / 6 + 16 * C['3' + qqqq] - (8 * C['4' + qqqq]) / 3,
'F' + qqqq + '10': -8 * C['10' + qqqq] + C['8' + qqqq] / 2,
'F' + qqqq + '10p': -8 * C['10p' + qqqq] + C['8p' + qqqq] / 2,
'F' + qqqq + '1p': C['1p' + qqqq] - C['2p' + qqqq] / 6 + 16 * C['3p' + qqqq] - (8 * C['4p' + qqqq]) / 3,
'F' + qqqq + '2': C['2' + qqqq] / 2 + 8 * C['4' + qqqq],
'F' + qqqq + '2p': C['2p' + qqqq] / 2 + 8 * C['4p' + qqqq],
'F' + qqqq + '3': C['1' + qqqq] - C['2' + qqqq] / 6 + 4 * C['3' + qqqq] - (2 * C['4' + qqqq]) / 3,
'F' + qqqq + '3p': C['1p' + qqqq] - C['2p' + qqqq] / 6 + 4 * C['3p' + qqqq] - (2 * C['4p' + qqqq]) / 3,
'F' + qqqq + '4': C['2' + qqqq] / 2 + 2 * C['4' + qqqq],
'F' + qqqq + '4p': C['2p' + qqqq] / 2 + 2 * C['4p' + qqqq],
'F' + qqqq + '5': -((32 * C['10' + qqqq]) / 3) + C['5' + qqqq] - C['6' + qqqq] / 6 + 64 * C['9' + qqqq],
'F' + qqqq + '5p': -((32 * C['10p' + qqqq]) / 3) + C['5p' + qqqq] - C['6p' + qqqq] / 6 + 64 * C['9p' + qqqq],
'F' + qqqq + '6': 32 * C['10' + qqqq] + C['6' + qqqq] / 2,
'F' + qqqq + '6p': 32 * C['10p' + qqqq] + C['6p' + qqqq] / 2,
'F' + qqqq + '7': -((8 * C['10' + qqqq]) / 3) + C['5' + qqqq] - C['6' + qqqq] / 6 + 16 * C['9' + qqqq],
'F' + qqqq + '7p': -((8 * C['10p' + qqqq]) / 3) + C['5p' + qqqq] - C['6p' + qqqq] / 6 + 16 * C['9p' + qqqq],
'F' + qqqq + '8': 8 * C['10' + qqqq] + C['6' + qqqq] / 2,
'F' + qqqq + '8p': 8 * C['10p' + qqqq] + C['6p' + qqqq] / 2,
'F' + qqqq + '9': (8 * C['10' + qqqq]) / 3 + C['7' + qqqq] - C['8' + qqqq] / 6 - 16 * C['9' + qqqq],
'F' + qqqq + '9p': (8 * C['10p' + qqqq]) / 3 + C['7p' + qqqq] - C['8p' + qqqq] / 6 - 16 * C['9p' + qqqq],
}
raise ValueError("Case not implemented: {}".format(qqqq)) | From Bern to 4-quark Fierz basis for Classes III, IV and V.
`qqqq` should be of the form 'sbuc', 'sdcc', 'ucuu' etc. | entailment |
def _Fierz_to_EOS_V(Fsbuu,Fsbdd,Fsbcc,Fsbss,Fsbbb,parameters):
p = parameters
V = ckmutil.ckm.ckm_tree(p["Vus"], p["Vub"], p["Vcb"], p["delta"])
Vtb = V[2,2]
Vts = V[2,1]
"""From Fierz to the EOS basis for b -> s transitions.
The arguments are dictionaries of the corresponding Fierz bases """
dic = {
'b->s::c1' : -Fsbbb['Fsbbb1']/3 + 2*Fsbcc['Fsbcc1']
- 2 * Fsbdd['Fsbdd1'] / 3 + Fsbdd['Fsbdd2']/3 -
Fsbss['Fsbss1'] / 3 - 2 * Fsbuu['Fsbuu1'] / 3
+ Fsbuu['Fsbuu2'] / 3,
'b->s::c2' : -2 * Fsbbb['Fsbbb1'] / 9 + Fsbcc['Fsbcc1'] / 3
+ Fsbcc['Fsbcc2'] + Fsbdd['Fsbdd1'] / 18
- 5 * Fsbdd['Fsbdd2'] / 18 - 2 * Fsbss['Fsbss1'] / 9
+ Fsbuu['Fsbuu1'] / 18 - 5 * Fsbuu['Fsbuu2'] / 18,
'b->s::c3' : -2 * Fsbbb['Fsbbb1'] / 27 + 4 * Fsbbb['Fsbbb3'] / 15
+ 4 * Fsbbb['Fsbbb4'] / 45 + 4 * Fsbcc['Fsbcc3'] / 15
+ 4 * Fsbcc['Fsbcc4'] / 45 - 5 * Fsbdd['Fsbdd1'] / 54
+ Fsbdd['Fsbdd2'] / 54 + 4 * Fsbdd['Fsbdd3'] / 15
+ 4 * Fsbdd['Fsbdd4'] / 45 - 2 * Fsbss['Fsbss1'] / 27
+ 4 * Fsbss['Fsbss3'] / 15 + 4 * Fsbss['Fsbss4'] / 45
- 5 * Fsbuu['Fsbuu1'] / 54 + Fsbuu['Fsbuu2'] / 54
+ 4 * Fsbuu['Fsbuu3'] / 15 + 4 * Fsbuu['Fsbuu4'] / 45,
'b->s::c4' : -Fsbbb['Fsbbb1'] / 9 + 8 * Fsbbb['Fsbbb4'] / 15
+ 8 * Fsbcc['Fsbcc4'] / 15 + Fsbdd['Fsbdd1'] / 9
- 2 * Fsbdd['Fsbdd2'] / 9 + 8 * Fsbdd['Fsbdd4'] / 15
- Fsbss['Fsbss1'] / 9 + 8 * Fsbss['Fsbss4'] / 15
+ Fsbuu['Fsbuu1'] / 9 - 2 * Fsbuu['Fsbuu2'] / 9
+ 8 * Fsbuu['Fsbuu4'] / 15,
'b->s::c5' : Fsbbb['Fsbbb1'] / 54 - Fsbbb['Fsbbb3'] / 60
- Fsbbb['Fsbbb4'] / 180 - Fsbcc['Fsbcc3'] / 60
- Fsbcc['Fsbcc4'] / 180 + 5 * Fsbdd['Fsbdd1'] / 216
- Fsbdd['Fsbdd2'] / 216 - Fsbdd['Fsbdd3'] / 60
- Fsbdd['Fsbdd4'] / 180 + Fsbss['Fsbss1'] / 54
- Fsbss['Fsbss3'] / 60 - Fsbss['Fsbss4'] / 180
+ 5 * Fsbuu['Fsbuu1'] / 216 - Fsbuu['Fsbuu2'] / 216
- Fsbuu['Fsbuu3'] / 60 - Fsbuu['Fsbuu4'] / 180,
'b->s::c6' : Fsbbb['Fsbbb1'] / 36 - Fsbbb['Fsbbb4'] / 30
- Fsbcc['Fsbcc4'] / 30 - Fsbdd['Fsbdd1'] / 36
+ Fsbdd['Fsbdd2'] / 18 - Fsbdd['Fsbdd4'] / 30
+ Fsbss['Fsbss1'] / 36 - Fsbss['Fsbss4'] / 30
- Fsbuu['Fsbuu1'] / 36 + Fsbuu['Fsbuu2'] / 18
- Fsbuu['Fsbuu4'] / 30
}
prefactor = sqrt(2)/p['GF']/Vtb/Vts.conj()/4
return {k: prefactor * v for k,v in dic.items()} | From Fierz to the EOS basis for b -> s transitions.
The arguments are dictionaries of the corresponding Fierz bases | entailment |
def JMS_to_Fierz_lep(C, ddll):
"""From JMS to semileptonic Fierz basis for Class V.
`ddll` should be of the form 'sbl_enu_tau', 'dbl_munu_e' etc."""
if ddll[:2] == 'uc':
s = uflav[ddll[0]]
b = uflav[ddll[1]]
q = 'u'
else:
s = dflav[ddll[0]]
b = dflav[ddll[1]]
q = 'd'
l = lflav[ddll[4:ddll.find('n')]]
lp = lflav[ddll[ddll.find('_',5)+1:len(ddll)]]
ind = ddll.replace('l_','').replace('nu_','')
return {
'F' + ind + '9' : C["V" + q + "eLR"][s, b, l, lp] / 2
+ C["Ve" + q + "LL"][l, lp, s, b] / 2,
'F' + ind + '10' : C["V" + q + "eLR"][s, b, l, lp] / 2
- C["Ve" + q + "LL"][l, lp, s, b] / 2,
'F' + ind + 'S' : C["Se" + q + "RL"][lp, l, b, s].conj() / 2
+ C["Se" + q + "RR"][l, lp, s, b] / 2,
'F' + ind + 'P' : - C["Se" + q + "RL"][lp, l, b, s].conj() / 2
+ C["Se" + q + "RR"][l, lp, s, b] / 2,
'F' + ind + 'T' : C["Te" + q + "RR"][l, lp, s, b] / 2
+ C["Te" + q + "RR"][lp, l, b, s].conj() / 2,
'F' + ind + 'T5' : C["Te" + q + "RR"][l, lp, s, b] / 2
- C["Te" + q + "RR"][lp, l, b, s].conj() / 2,
'F' + ind + '9p' : C["Ve" + q + "LR"][l, lp, s, b] / 2
+ C["Ve" + q + "RR"][l, lp, s, b] / 2,
'F' + ind + '10p' : -C["Ve" + q + "LR"][l, lp, s, b] / 2
+ C["Ve" + q + "RR"][l, lp, s, b] / 2,
'F' + ind + 'Sp' : C["Se" + q + "RL"][l, lp, s, b] / 2
+ C["Se" + q + "RR"][lp, l, b, s].conj() / 2,
'F' + ind + 'Pp' : C["Se" + q + "RL"][l, lp, s, b] / 2
- C["Se" + q + "RR"][lp, l, b, s].conj() / 2,
} | From JMS to semileptonic Fierz basis for Class V.
`ddll` should be of the form 'sbl_enu_tau', 'dbl_munu_e' etc. | entailment |
def JMS_to_Fierz_nunu(C, ddll):
"""From JMS to semileptonic Fierz basis for Class V.
`ddll` should be of the form 'sbl_enu_tau', 'dbl_munu_e' etc."""
s = dflav[ddll[0]]
b = dflav[ddll[1]]
l = lflav[ddll[4:ddll.find('n')]]
lp = lflav[ddll[ddll.find('_',5)+1:len(ddll)]]
ind = ddll.replace('l_','').replace('nu_','')
return {
'F' + ind + 'nu' : C["VnudLL"][l, lp, s, b],
'F' + ind + 'nup' : C["VnudLR"][l, lp, s, b]
} | From JMS to semileptonic Fierz basis for Class V.
`ddll` should be of the form 'sbl_enu_tau', 'dbl_munu_e' etc. | entailment |
def Fierz_to_JMS_lep(C, ddll):
"""From Fierz to semileptonic JMS basis for Class V.
`ddll` should be of the form 'sbl_enu_tau', 'dbl_munu_e' etc."""
if ddll[:2] == 'uc':
s = str(uflav[ddll[0]] + 1)
b = str(uflav[ddll[1]] + 1)
q = 'u'
else:
s = str(dflav[ddll[0]] + 1)
b = str(dflav[ddll[1]] + 1)
q = 'd'
l = str(lflav[ddll[4:ddll.find('n')]] + 1)
lp = str(lflav[ddll[ddll.find('_',5)+1:len(ddll)]] + 1)
ind = ddll.replace('l_','').replace('nu_','')
d = {
"Ve" + q + "LL" + '_' + l + lp + s + b : -C['F' + ind + '10'] + C['F' + ind + '9'],
"V" + q + "eLR" + '_' + s + b + l + lp : C['F' + ind + '10'] + C['F' + ind + '9'],
"Se" + q + "RR" + '_' + l + lp + s + b : C['F' + ind + 'P'] + C['F' + ind + 'S'],
"Se" + q + "RL" + '_' + lp + l + b + s : -C['F' + ind + 'P'].conjugate() + C['F' + ind + 'S'].conjugate(),
"Te" + q + "RR" + '_' + lp + l + b + s : C['F' + ind + 'T'].conjugate() - C['F' + ind + 'T5'].conjugate(),
"Te" + q + "RR" + '_' + l + lp + s + b : C['F' + ind + 'T'] + C['F' + ind + 'T5'],
"Ve" + q + "LR" + '_' + l + lp + s + b : -C['F' + ind + '10p'] + C['F' + ind + '9p'],
"Ve" + q + "RR" + '_' + l + lp + s + b : C['F' + ind + '10p'] + C['F' + ind + '9p'],
"Se" + q + "RL" + '_' + l + lp + s + b : C['F' + ind + 'Pp'] + C['F' + ind + 'Sp'],
"Se" + q + "RR" + '_' + lp + l + b + s : -C['F' + ind + 'Pp'].conjugate() + C['F' + ind + 'Sp'].conjugate(),
}
return symmetrize_JMS_dict(d) | From Fierz to semileptonic JMS basis for Class V.
`ddll` should be of the form 'sbl_enu_tau', 'dbl_munu_e' etc. | entailment |
def Fierz_to_JMS_nunu(C, ddll):
"""From Fierz to semileptonic JMS basis for Class V.
`ddll` should be of the form 'sbl_enu_tau', 'dbl_munu_e' etc."""
s = str(dflav[ddll[0]] + 1)
b = str(dflav[ddll[1]] + 1)
l = str(lflav[ddll[4:ddll.find('n')]] + 1)
lp = str(lflav[ddll[ddll.find('_',5)+1:len(ddll)]] + 1)
ind = ddll.replace('l_','').replace('nu_','')
d = {
"VnudLL" + '_' + l + lp + s + b : C['F' + ind + 'nu'],
"VnudLR" + '_' + l + lp + s + b : C['F' + ind + 'nup']
}
return symmetrize_JMS_dict(d) | From Fierz to semileptonic JMS basis for Class V.
`ddll` should be of the form 'sbl_enu_tau', 'dbl_munu_e' etc. | entailment |
def Fierz_to_Bern_lep(C, ddll):
"""From semileptonic Fierz basis to Bern semileptonic basis for Class V.
C should be the corresponding leptonic Fierz basis and
`ddll` should be of the form 'sbl_enu_tau', 'dbl_munu_e' etc."""
ind = ddll.replace('l_','').replace('nu_','')
dic = {
'1' + ind : 5 * C['F'+ ind + '10'] / 3 + C['F'+ ind + '9'],
'3' + ind : -C['F' + ind + '10'] / 6,
'5' + ind : C['F' + ind + 'S'] - 5 * C['F' + ind + 'P'] / 3,
'7' + ind : 2 * C['F' + ind + 'P'] / 3 + C['F' + ind + 'T']
+ C['F' + ind + 'T5'],
'9' + ind : C['F' + ind + 'P'] / 24,
'1p' + ind : C['F' + ind + '9p'] - 5 * C['F' + ind + '10p'] / 3,
'3p' + ind : C['F' + ind + '10p'] / 6,
'5p' + ind : 5 * C['F' + ind + 'Pp'] / 3 + C['F' + ind + 'Sp'],
'7p' + ind : -2 * C['F' + ind + 'Pp'] / 3 + C['F' + ind + 'T']
- C['F' + ind + 'T5'],
'9p' + ind : -C['F' + ind + 'Pp'] / 24,
}
return dic | From semileptonic Fierz basis to Bern semileptonic basis for Class V.
C should be the corresponding leptonic Fierz basis and
`ddll` should be of the form 'sbl_enu_tau', 'dbl_munu_e' etc. | entailment |
def Fierz_to_Bern_nunu(C, ddll):
"""From semileptonic Fierz basis to Bern semileptonic basis for Class V.
C should be the corresponding leptonic Fierz basis and
`ddll` should be of the form 'sbl_enu_tau', 'dbl_munu_e' etc."""
ind = ddll.replace('l_','').replace('nu_','')
dic = {
'nu1' + ind : C['F' + ind + 'nu'],
'nu1p' + ind : C['F' + ind + 'nup']
}
return dic | From semileptonic Fierz basis to Bern semileptonic basis for Class V.
C should be the corresponding leptonic Fierz basis and
`ddll` should be of the form 'sbl_enu_tau', 'dbl_munu_e' etc. | entailment |
def Bern_to_Fierz_lep(C,ddll):
"""From semileptonic Bern basis to Fierz semileptonic basis for Class V.
C should be the corresponding leptonic Fierz basis and
`ddll` should be of the form 'sbl_enu_tau', 'dbl_munu_e' etc."""
ind = ddll.replace('l_','').replace('nu_','')
return {'F' + ind + '9': C['1' + ind] + 10 * C['3' + ind],
'F' + ind + '10': -6 * C['3' + ind],
'F' + ind + 'S': C['5' + ind] + 40 * C['9' + ind],
'F' + ind + 'P': 24 * C['9' + ind],
'F' + ind + 'T': C['7' + ind] / 2 + C['7p' + ind] / 2 - 8 * C['9' + ind] - 8 * C['9p' + ind],
'F' + ind + 'T5': C['7' + ind] / 2 - C['7p' + ind] / 2 - 8 * C['9' + ind] + 8 * C['9p' + ind],
'F' + ind + '9p': C['1p' + ind] + 10 * C['3p' + ind],
'F' + ind + '10p': 6 * C['3p' + ind],
'F' + ind + 'Sp': C['5p' + ind] + 40 * C['9p' + ind],
'F' + ind + 'Pp': -24 * C['9p' + ind],
} | From semileptonic Bern basis to Fierz semileptonic basis for Class V.
C should be the corresponding leptonic Fierz basis and
`ddll` should be of the form 'sbl_enu_tau', 'dbl_munu_e' etc. | entailment |
def Bern_to_Fierz_nunu(C,ddll):
"""From semileptonic Bern basis to Fierz semileptonic basis for Class V.
C should be the corresponding leptonic Fierz basis and
`ddll` should be of the form 'sbl_enu_tau', 'dbl_munu_e' etc."""
ind = ddll.replace('l_','').replace('nu_','')
return {
'F' + ind + 'nu': C['nu1' + ind],
'F' + ind + 'nup': C['nu1p' + ind],
} | From semileptonic Bern basis to Fierz semileptonic basis for Class V.
C should be the corresponding leptonic Fierz basis and
`ddll` should be of the form 'sbl_enu_tau', 'dbl_munu_e' etc. | entailment |
def Fierz_to_Flavio_lep(C, ddll, parameters, norm_gf=True):
"""From semileptonic Fierz basis to Flavio semileptonic basis for Class V.
C should be the corresponding leptonic Fierz basis and
`ddll` should be of the form 'sbl_enu_tau', 'dbl_munu_e' etc."""
p = parameters
V = ckmutil.ckm.ckm_tree(p["Vus"], p["Vub"], p["Vcb"], p["delta"])
if ddll[:2] == 'sb':
xi = V[2, 2] * V[2, 1].conj()
elif ddll[:2] == 'db':
xi = V[2, 2] * V[2, 0].conj()
elif ddll[:2] == 'ds':
xi = V[2, 1] * V[2, 0].conj()
elif ddll[:2] == 'uc':
xi = V[1, 2].conj() * V[0, 2]
else:
raise ValueError("Unexpected flavours: {}".format(ddll[:2]))
q1, q2 = ddll[:2]
l1 = ddll[4:ddll.find('n')]
l2 = ddll[ddll.find('_', 5) + 1:]
ind = q1 + q2 + l1 + l2
# flavio has indices within currents inverted
indfl = q2 + q1 + l2 + l1
e = sqrt(4* pi * parameters['alpha_e'])
if ddll[:2] == 'sb' or ddll[:2] == 'db':
mq = parameters['m_b']
elif ddll[:2] == 'ds':
mq = parameters['m_s']
elif ddll[:2] == 'uc':
mq = parameters['m_c']
else:
KeyError("Not sure what to do with quark mass for flavour {}".format(ddll[:2]))
dic = {
"C9_" + indfl : (16 * pi**2) / e**2 * C['F' + ind + '9'],
"C9p_" + indfl : (16 * pi**2) / e**2 * C['F' + ind + '9p'],
"C10_" + indfl : (16 * pi**2) / e**2 * C['F' + ind + '10'],
"C10p_" + indfl : (16 * pi**2) / e**2 * C['F' + ind + '10p'],
"CS_" + indfl : (16 * pi**2) / e**2 / mq * C['F' + ind + 'S'],
"CSp_" + indfl : (16 * pi**2) / e**2 / mq * C['F' + ind + 'Sp'],
"CP_" + indfl : (16 * pi**2) / e**2 / mq * C['F' + ind + 'P'],
"CPp_" + indfl : (16 * pi**2) / e**2 / mq * C['F' + ind + 'Pp'],
}
if norm_gf:
prefactor = sqrt(2)/p['GF']/xi/4
else:
prefactor = 1 / xi
return {k: prefactor * v for k,v in dic.items()} | From semileptonic Fierz basis to Flavio semileptonic basis for Class V.
C should be the corresponding leptonic Fierz basis and
`ddll` should be of the form 'sbl_enu_tau', 'dbl_munu_e' etc. | entailment |
def Flavio_to_Fierz_nunu(C, ddll, parameters, norm_gf=True):
"""From Flavio semileptonic basis to semileptonic Fierz basis for Class V.
C should be the corresponding leptonic Fierz basis and
`ddll` should be of the form 'sbl_enu_tau', 'dbl_munu_e' etc."""
p = parameters
V = ckmutil.ckm.ckm_tree(p["Vus"], p["Vub"], p["Vcb"], p["delta"])
if ddll[:2] == 'sb':
xi = V[2, 2] * V[2, 1].conj()
elif ddll[:2] == 'db':
xi = V[2, 2] * V[2, 0].conj()
elif ddll[:2] == 'ds':
xi = V[2, 1] * V[2, 0].conj()
else:
raise ValueError("Unexpected flavours: {}".format(ddll[:2]))
q1, q2 = ddll[:2]
l1 = ddll[4:ddll.find('n')]
l2 = ddll[ddll.find('_', 5) + 1:]
ind = q1 + q2 + l1 + l2
# flavio has indices within currents inverted
indnu = q2 + q1 + 'nu' + l2 + 'nu' + l1
e = sqrt(4* pi * parameters['alpha_e'])
dic = {
'F' + ind + 'nu': C["CL_" + indnu] / ((8 * pi**2) / e**2),
'F' + ind + 'nup': C["CR_" + indnu] / ((8 * pi**2) / e**2),
}
if norm_gf:
prefactor = sqrt(2)/p['GF']/xi/4
else:
prefactor = 1 / xi
return {k: v / prefactor for k, v in dic.items()} | From Flavio semileptonic basis to semileptonic Fierz basis for Class V.
C should be the corresponding leptonic Fierz basis and
`ddll` should be of the form 'sbl_enu_tau', 'dbl_munu_e' etc. | entailment |
def Fierz_to_EOS_lep(C, ddll, parameters):
"""From semileptonic Fierz basis to EOS semileptonic basis for Class V.
C should be the corresponding leptonic Fierz basis and
`ddll` should be of the form 'sbl_enu_tau', 'dbl_munu_e' etc."""
p = parameters
V = ckmutil.ckm.ckm_tree(p["Vus"], p["Vub"], p["Vcb"], p["delta"])
Vtb = V[2,2]
Vts = V[2,1]
ind = ddll.replace('l_','').replace('nu_','')
ind2 = ddll.replace('l_','').replace('nu_','')[2::]
e = sqrt(4* pi * parameters['alpha_e'])
dic = {
'b->s' + ind2 + '::c9' : (16 * pi**2) / e**2 * C['F' + ind + '9'],
'b->s' + ind2 + "::c9'" : (16 * pi**2) / e**2 * C['F' + ind + '9p'],
'b->s' + ind2 + "::c10" : (16 * pi**2) / e**2 * C['F' + ind + '10'],
'b->s' + ind2 + "::c10'" : (16 * pi**2) / e**2 * C['F' + ind + '10p'],
'b->s' + ind2 + "::cS" : (16 * pi**2) / e**2 * C['F' + ind + 'S'],
'b->s' + ind2 + "::cS'" : (16 * pi**2) / e**2 * C['F' + ind + 'Sp'],
'b->s' + ind2 + "::cP" : (16 * pi**2) / e**2 * C['F' + ind + 'P'],
'b->s' + ind2 + "::cP'" : (16 * pi**2) / e**2 * C['F' + ind + 'Pp'],
'b->s' + ind2 + "::cT" : (16 * pi**2) / e**2 * C['F' + ind + 'T'],
'b->s' + ind2 + "::cT5" : (16 * pi**2) / e**2 * C['F' + ind + 'T5']
}
prefactor = sqrt(2)/p['GF']/Vtb/Vts.conj()/4
return {k: prefactor * v for k,v in dic.items()} | From semileptonic Fierz basis to EOS semileptonic basis for Class V.
C should be the corresponding leptonic Fierz basis and
`ddll` should be of the form 'sbl_enu_tau', 'dbl_munu_e' etc. | entailment |
def JMS_to_FormFlavor_lep(C, dd):
"""From JMS to semileptonic Fierz basis for Classes V.
C should be the JMS basis and `ddll` should be of the
form 'sbl_eni_tau', 'dbl_munu_e' etc."""
b = dflav[dd[0]]
s = dflav[dd[1]]
return {
'CVLL_' + dd + 'mm' : C["VedLL"][1, 1, s, b],
'CVRR_' + dd + 'mm' : C["VedRR"][1, 1, s, b],
'CVLR_' + dd + 'mm' : C["VdeLR"][s, b, 1, 1],
'CVRL_' + dd + 'mm' : C["VedLR"][1, 1, s, b],
'CSLL_' + dd + 'mm' : C["SedRR"][1, 1, b, s].conj(),
'CSRR_' + dd + 'mm' : C["SedRR"][1, 1, s, b],
'CSLR_' + dd + 'mm' : C["SedRL"][1, 1, s, b],
'CSRL_' + dd + 'mm' : C["SedRL"][1, 1, b, s].conj(),
'CTLL_' + dd + 'mm' : C["TedRR"][1, 1, b, s].conj(),
'CTRR_' + dd + 'mm' : C["TedRR"][1, 1, s, b],
'CVLL_sdnn' : 1 / 3 * C["VnudLL"][0, 0, s-1, s]
+ 1 / 3 * C["VnudLL"][1, 1, s-1, s]
+ 1 / 3 * C["VnudLL"][2, 2, s-1, s],
'CVRL_sdnn' : 1 / 3 * C["VnudLR"][0, 0, s-1, s]
+ 1 / 3 * C["VnudLR"][1, 1, s-1, s]
+ 1 / 3 * C["VnudLR"][2, 2, s-1, s]
} | From JMS to semileptonic Fierz basis for Classes V.
C should be the JMS basis and `ddll` should be of the
form 'sbl_eni_tau', 'dbl_munu_e' etc. | entailment |
def JMS_to_Fierz_chrom(C, qq):
"""From JMS to chromomagnetic Fierz basis for Class V.
qq should be of the form 'sb', 'ds' etc."""
if qq[0] in dflav:
s = dflav[qq[0]]
b = dflav[qq[1]]
return {
'F7gamma' + qq : C['dgamma'][s, b],
'F8g' + qq : C['dG'][s, b],
'F7pgamma' + qq : C['dgamma'][b, s].conj(),
'F8pg' + qq : C['dG'][b, s].conj()
}
else:
u = uflav[qq[0]]
c = uflav[qq[1]]
return {
'F7gamma' + qq : C['ugamma'][u, c],
'F8g' + qq : C['uG'][u, c],
'F7pgamma' + qq : C['ugamma'][c, u].conj(),
'F8pg' + qq : C['uG'][c, u].conj()
} | From JMS to chromomagnetic Fierz basis for Class V.
qq should be of the form 'sb', 'ds' etc. | entailment |
def Fierz_to_JMS_chrom(C, qq):
"""From chromomagnetic Fierz to JMS basis for Class V.
qq should be of the form 'sb', 'ds' etc."""
if qq[0] in dflav:
s = dflav[qq[0]] + 1
b = dflav[qq[1]] + 1
return {'dgamma_{}{}'.format(s, b): C['F7gamma' + qq],
'dG_{}{}'.format(s, b): C['F8g' + qq],
'dgamma_{}{}'.format(b, s): C['F7pgamma' + qq].conjugate(),
'dG_{}{}'.format(b, s): C['F8pg' + qq].conjugate(),
}
else:
u = uflav[qq[0]] + 1
c = uflav[qq[1]] + 1
return {'ugamma_{}{}'.format(u, c): C['F7gamma' + qq],
'uG_{}{}'.format(u, c): C['F8g' + qq],
'ugamma_{}{}'.format(c, u): C['F7pgamma' + qq].conjugate(),
'uG_{}{}'.format(c, u): C['F8pg' + qq].conjugate(),
} | From chromomagnetic Fierz to JMS basis for Class V.
qq should be of the form 'sb', 'ds' etc. | entailment |
def Fierz_to_Bern_chrom(C, dd, parameters):
"""From Fierz to chromomagnetic Bern basis for Class V.
dd should be of the form 'sb', 'ds' etc."""
e = sqrt(4 * pi * parameters['alpha_e'])
gs = sqrt(4 * pi * parameters['alpha_s'])
if dd == 'sb' or dd == 'db':
mq = parameters['m_b']
elif dd == 'ds':
mq = parameters['m_s']
else:
KeyError("Not sure what to do with quark mass for flavour {}".format(dd))
return {
'7gamma' + dd : gs**2 / e / mq * C['F7gamma' + dd ],
'8g' + dd : gs / mq * C['F8g' + dd ],
'7pgamma' + dd : gs**2 / e /mq * C['F7pgamma' + dd],
'8pg' + dd : gs / mq * C['F8pg' + dd]
} | From Fierz to chromomagnetic Bern basis for Class V.
dd should be of the form 'sb', 'ds' etc. | entailment |
def Flavio_to_Fierz_chrom(C, qq, parameters):
"""From Flavio to chromomagnetic Fierz basis for Class V.
qq should be of the form 'sb', 'db' etc."""
p = parameters
V = ckmutil.ckm.ckm_tree(p["Vus"], p["Vub"], p["Vcb"], p["delta"])
if qq == 'sb':
xi = V[2, 2] * V[2, 1].conj()
elif qq == 'db':
xi = V[2, 2] * V[2, 0].conj()
elif qq == 'ds':
xi = V[2, 1] * V[2, 0].conj()
elif qq == 'uc':
xi = V[1, 2].conj() * V[0, 2]
else:
raise ValueError("Unexpected flavours: {}".format(qq))
qqfl = qq[::-1]
e = sqrt(4 * pi * parameters['alpha_e'])
gs = sqrt(4 * pi * parameters['alpha_s'])
if qq == 'sb' or qq == 'db':
mq = parameters['m_b']
elif qq == 'ds':
mq = parameters['m_s']
elif qq == 'uc':
mq = parameters['m_c']
else:
KeyError("Not sure what to do with quark mass for flavour {}".format(qq))
dic = {
'F7gamma' + qq: C["C7_" + qqfl] / ((16 * pi**2) / e / mq),
'F8g' + qq: C["C8_" + qqfl] / ((16 * pi**2) / gs / mq),
'F7pgamma' + qq: C["C7p_" + qqfl] / ((16 * pi**2) / e / mq),
'F8pg' + qq: C["C8p_" + qqfl] / ((16 * pi**2) / gs / mq)
}
prefactor = sqrt(2)/p['GF']/xi/4
return {k: v / prefactor for k, v in dic.items()} | From Flavio to chromomagnetic Fierz basis for Class V.
qq should be of the form 'sb', 'db' etc. | entailment |
def Fierz_to_EOS_chrom(C, dd, parameters):
"""From Fierz to chromomagnetic EOS basis for Class V.
dd should be of the form 'sb', 'ds' etc."""
p = parameters
V = ckmutil.ckm.ckm_tree(p["Vus"], p["Vub"], p["Vcb"], p["delta"])
Vtb = V[2,2]
Vts = V[2,1]
e = sqrt(4 * pi * parameters['alpha_e'])
gs = sqrt(4 * pi * parameters['alpha_s'])
mb = parameters['m_b']
dic = {"b->s::c7": 16 * pi**2 / mb / e * C["F7gamma" + dd],
"b->s::c7'": 16 * pi**2 / mb / e * C["F7pgamma" + dd],
"b->s::c8": 16 * pi**2 / mb / gs * C["F8g" + dd],
"b->s::c8'": 16 * pi**2 / mb / gs * C["F8pg" + dd]
}
prefactor = sqrt(2)/p['GF']/Vtb/Vts.conj()/4
return {k: prefactor * v for k,v in dic.items()} | From Fierz to chromomagnetic EOS basis for Class V.
dd should be of the form 'sb', 'ds' etc. | entailment |
def JMS_to_FormFlavor_chrom(C, qq, parameters):
"""From JMS to chromomagnetic FormFlavor basis for Class V.
qq should be of the form 'sb', 'ds', 'uu', mt (mu tau), em (e mu) etc."""
e = sqrt(4 * pi * parameters['alpha_e'])
gs = sqrt(4 * pi * parameters['alpha_s'])
if qq[0] in dflav.keys():
s = dflav[qq[0]]
b = dflav[qq[1]]
return {
'CAR_' + qq : C['dgamma'][s, b] / e,
'CGR_' + qq : C['dG'][s, b] / gs,
'CAL_' + qq : C['dgamma'][b, s].conj() / e,
'CGL_' + qq : C['dG'][b, s].conj() / gs,
}
if qq[0] in llflav.keys():
l1 = llflav[qq[0]]
l2 = llflav[qq[1]]
return {
'CAR_' + qq : C['egamma'][l1, l2] / e,
'CAL_' + qq : C['egamma'][l2, l1].conj() / gs,
}
if qq[0] in uflav.keys():
u = uflav[qq[0]]
c = uflav[qq[1]]
return {
'CAR_' + qq : C['ugamma'][u, c] / e,
'CGR_' + qq : C['uG'][u, c] / gs,
'CAL_' + qq : C['ugamma'][c, u].conj() / e,
'CGL_' + qq : C['uG'][c, u].conj() / gs,
}
else:
return 'not in FormFlav_chrom' | From JMS to chromomagnetic FormFlavor basis for Class V.
qq should be of the form 'sb', 'ds', 'uu', mt (mu tau), em (e mu) etc. | entailment |
def _JMS_to_Flavio_VII(C, parameters):
"""From JMS to flavio basis for class VII, i.e. flavour blind operators."""
d = {}
dtrans = json.loads(pkgutil.get_data('wilson', 'data/flavio_jms_vii.json').decode('utf8'))
for cj, cf in dtrans.items():
d[cf] = C.get(cj, 0)
gs = sqrt(4 * pi * parameters['alpha_s'])
e = sqrt(4 * pi * parameters['alpha_e'])
preC7 = 16 * pi**2 / e
preC8 = 16 * pi**2 / gs
d['C8_uu'] = preC8 / parameters['m_u'] * C.get('uG_11', 0)
d['C8_cc'] = preC8 / parameters['m_c'] * C.get('uG_22', 0)
d['C8_dd'] = preC8 / parameters['m_d'] * C.get('dG_11', 0)
d['C8_ss'] = preC8 / parameters['m_s'] * C.get('dG_22', 0)
d['C8_bb'] = preC8 / parameters['m_b'] * C.get('dG_33', 0)
d['C7_uu'] = preC7 / parameters['m_u'] * C.get('ugamma_11', 0)
d['C7_cc'] = preC7 / parameters['m_c'] * C.get('ugamma_22', 0)
d['C7_dd'] = preC7 / parameters['m_d'] * C.get('dgamma_11', 0)
d['C7_ss'] = preC7 / parameters['m_s'] * C.get('dgamma_22', 0)
d['C7_bb'] = preC7 / parameters['m_b'] * C.get('dgamma_33', 0)
d['C7_ee'] = preC7 / parameters['m_e'] * C.get('egamma_11', 0)
d['C7_mumu'] = preC7 / parameters['m_mu'] * C.get('egamma_22', 0)
d['C7_tautau'] = preC7 / parameters['m_tau'] * C.get('egamma_33', 0)
preGF = sqrt(2) / parameters['GF'] / 4
return {k: preGF * v for k,v in d.items()} | From JMS to flavio basis for class VII, i.e. flavour blind operators. | entailment |
def get_parameters(scale, f=5, input_parameters=None):
"""Get parameters (masses, coupling constants, ...) at the scale
`scale` in QCD with `f` dynamical quark flavours. Optionally takes a
dictionary of inputs (otherwise, defaults are used)."""
p = default_parameters.copy()
if input_parameters is not None:
# if parameters are passed in, overwrite the default values
p.update(input_parameters)
parameters = {}
# running quark masses and alpha_s
parameters['m_b'] = m_b(p['m_b'], scale, f, p['alpha_s'])
parameters['m_c'] = m_c(p['m_c'], scale, f, p['alpha_s'])
parameters['m_s'] = m_s(p['m_s'], scale, f, p['alpha_s'])
parameters['m_u'] = m_s(p['m_u'], scale, f, p['alpha_s'])
parameters['m_d'] = m_s(p['m_d'], scale, f, p['alpha_s'])
parameters['alpha_s'] = alpha_s(scale, f, p['alpha_s'])
# no running is performed for these parameters
for k in ['m_W', 'm_Z', 'GF',
'alpha_e',
'Vus', 'Vub', 'Vcb', 'delta',
'm_e', 'm_mu', 'm_tau', ]:
parameters[k] = p[k]
return parameters | Get parameters (masses, coupling constants, ...) at the scale
`scale` in QCD with `f` dynamical quark flavours. Optionally takes a
dictionary of inputs (otherwise, defaults are used). | entailment |
def cut_from_chain(sciobj_model):
"""Remove an object from a revision chain.
The object can be at any location in the chain, including the head or tail.
Preconditions:
- The object with the pid is verified to exist and to be a member of an
revision chain. E.g., with:
d1_gmn.app.views.asserts.is_existing_object(pid)
d1_gmn.app.views.asserts.is_in_revision_chain(pid)
Postconditions:
- The given object is a standalone object with empty obsoletes, obsoletedBy and
seriesId fields.
- The previously adjacent objects in the chain are adjusted to close any gap that
was created or remove dangling reference at the head or tail.
- If the object was the last object in the chain and the chain has a SID, the SID
reference is shifted over to the new last object in the chain.
"""
if _is_head(sciobj_model):
old_pid = sciobj_model.obsoletes.did
_cut_head_from_chain(sciobj_model)
elif _is_tail(sciobj_model):
old_pid = sciobj_model.obsoleted_by.did
_cut_tail_from_chain(sciobj_model)
else:
old_pid = sciobj_model.obsoleted_by.did
_cut_embedded_from_chain(sciobj_model)
_update_sid_to_last_existing_pid_map(old_pid) | Remove an object from a revision chain.
The object can be at any location in the chain, including the head or tail.
Preconditions:
- The object with the pid is verified to exist and to be a member of an
revision chain. E.g., with:
d1_gmn.app.views.asserts.is_existing_object(pid)
d1_gmn.app.views.asserts.is_in_revision_chain(pid)
Postconditions:
- The given object is a standalone object with empty obsoletes, obsoletedBy and
seriesId fields.
- The previously adjacent objects in the chain are adjusted to close any gap that
was created or remove dangling reference at the head or tail.
- If the object was the last object in the chain and the chain has a SID, the SID
reference is shifted over to the new last object in the chain. | entailment |
def resolve_sid(sid):
"""Get the PID to which the ``sid`` currently maps.
Preconditions:
- ``sid`` is verified to exist. E.g., with d1_gmn.app.views.asserts.is_sid().
"""
return d1_gmn.app.models.Chain.objects.get(sid__did=sid).head_pid.did | Get the PID to which the ``sid`` currently maps.
Preconditions:
- ``sid`` is verified to exist. E.g., with d1_gmn.app.views.asserts.is_sid(). | entailment |
def is_obsoletes_pid(pid):
"""Return True if ``pid`` is referenced in the obsoletes field of any object.
This will return True even if the PID is in the obsoletes field of an object that
does not exist on the local MN, such as replica that is in an incomplete chain.
"""
return d1_gmn.app.models.ScienceObject.objects.filter(obsoletes__did=pid).exists() | Return True if ``pid`` is referenced in the obsoletes field of any object.
This will return True even if the PID is in the obsoletes field of an object that
does not exist on the local MN, such as replica that is in an incomplete chain. | entailment |
def is_obsoleted_by_pid(pid):
"""Return True if ``pid`` is referenced in the obsoletedBy field of any object.
This will return True even if the PID is in the obsoletes field of an object that
does not exist on the local MN, such as replica that is in an incomplete chain.
"""
return d1_gmn.app.models.ScienceObject.objects.filter(
obsoleted_by__did=pid
).exists() | Return True if ``pid`` is referenced in the obsoletedBy field of any object.
This will return True even if the PID is in the obsoletes field of an object that
does not exist on the local MN, such as replica that is in an incomplete chain. | entailment |
def _merge_chains(chain_model_a, chain_model_b):
"""Merge two chains.
For use when it becomes known that two chains that were created separately
actually are separate sections of the same chain
E.g.:
- A obsoleted by X is created. A has no SID. X does not exist yet. A chain is
created for A.
- B obsoleting Y is created. B has SID. Y does not exist yet. A chain is created
for B.
- C obsoleting X, obsoleted by Y is created. C tells us that X and Y are in the
same chain, which means that A and B are in the same chain. At this point, the
two chains need to be merged. Merging the chains causes A to take on the SID of
B.
"""
_set_chain_sid(
chain_model_a, d1_gmn.app.did.get_did_by_foreign_key(chain_model_b.sid)
)
for member_model in _get_all_chain_member_queryset_by_chain(chain_model_b):
member_model.chain = chain_model_a
member_model.save()
chain_model_b.delete() | Merge two chains.
For use when it becomes known that two chains that were created separately
actually are separate sections of the same chain
E.g.:
- A obsoleted by X is created. A has no SID. X does not exist yet. A chain is
created for A.
- B obsoleting Y is created. B has SID. Y does not exist yet. A chain is created
for B.
- C obsoleting X, obsoleted by Y is created. C tells us that X and Y are in the
same chain, which means that A and B are in the same chain. At this point, the
two chains need to be merged. Merging the chains causes A to take on the SID of
B. | entailment |
def _set_chain_sid(chain_model, sid):
"""Set or update SID for chain.
If the chain already has a SID, ``sid`` must either be None or match the existing
SID.
"""
if not sid:
return
if chain_model.sid and chain_model.sid.did != sid:
raise d1_common.types.exceptions.ServiceFailure(
0,
'Attempted to modify existing SID. '
'existing_sid="{}", new_sid="{}"'.format(chain_model.sid.did, sid),
)
chain_model.sid = d1_gmn.app.did.get_or_create_did(sid)
chain_model.save() | Set or update SID for chain.
If the chain already has a SID, ``sid`` must either be None or match the existing
SID. | entailment |
def _get_chain_by_pid(pid):
"""Find chain by pid.
Return None if not found.
"""
try:
return d1_gmn.app.models.ChainMember.objects.get(pid__did=pid).chain
except d1_gmn.app.models.ChainMember.DoesNotExist:
pass | Find chain by pid.
Return None if not found. | entailment |
def _get_chain_by_sid(sid):
"""Return None if not found."""
try:
return d1_gmn.app.models.Chain.objects.get(sid__did=sid)
except d1_gmn.app.models.Chain.DoesNotExist:
pass | Return None if not found. | entailment |
def _update_sid_to_last_existing_pid_map(pid):
"""Set chain head PID to the last existing object in the chain to which ``pid``
belongs. If SID has been set for chain, it resolves to chain head PID.
Intended to be called in MNStorage.delete() and other chain manipulation.
Preconditions:
- ``pid`` must exist and be verified to be a PID.
d1_gmn.app.views.asserts.is_existing_object()
"""
last_pid = _find_head_or_latest_connected(pid)
chain_model = _get_chain_by_pid(last_pid)
if not chain_model:
return
chain_model.head_pid = d1_gmn.app.did.get_or_create_did(last_pid)
chain_model.save() | Set chain head PID to the last existing object in the chain to which ``pid``
belongs. If SID has been set for chain, it resolves to chain head PID.
Intended to be called in MNStorage.delete() and other chain manipulation.
Preconditions:
- ``pid`` must exist and be verified to be a PID.
d1_gmn.app.views.asserts.is_existing_object() | entailment |
def _create_chain(pid, sid):
"""Create the initial chain structure for a new standalone object. Intended to be
called in MNStorage.create().
Preconditions:
- ``sid`` must be verified to be available to be assigned to a new standalone
object. E.g., with is_valid_sid_for_new_standalone().
"""
chain_model = d1_gmn.app.models.Chain(
# sid=d1_gmn.app.models.did(sid) if sid else None,
head_pid=d1_gmn.app.did.get_or_create_did(pid)
)
chain_model.save()
_add_pid_to_chain(chain_model, pid)
_set_chain_sid(chain_model, sid)
return chain_model | Create the initial chain structure for a new standalone object. Intended to be
called in MNStorage.create().
Preconditions:
- ``sid`` must be verified to be available to be assigned to a new standalone
object. E.g., with is_valid_sid_for_new_standalone(). | entailment |
def populate_entity_type(apps, schema_editor):
"""Populate entity type from attached descriptor schema."""
Entity = apps.get_model('flow', 'Entity')
for entity in Entity.objects.all():
if entity.descriptor_schema is not None:
entity.type = entity.descriptor_schema.slug
entity.save() | Populate entity type from attached descriptor schema. | entailment |
def deserialize(doc_xml, pyxb_binding=None):
"""Deserialize DataONE XML types to PyXB.
Args:
doc_xml: UTF-8 encoded ``bytes``
pyxb_binding: PyXB binding object. If not specified, the correct one should be
selected automatically.
Returns:
PyXB object
See Also:
``deserialize_d1_exception()`` for deserializing DataONE Exception types.
"""
pyxb_binding = pyxb_binding or d1_common.types.dataoneTypes
try:
return pyxb_binding.CreateFromDocument(doc_xml)
except pyxb.ValidationError as e:
raise ValueError(
'Unable to deserialize XML to PyXB. error="{}" xml="{}"'.format(
e.details(), doc_xml
)
)
except (pyxb.PyXBException, xml.sax.SAXParseException, Exception) as e:
raise ValueError(
'Unable to deserialize XML to PyXB. error="{}" xml="{}"'.format(
str(e), doc_xml
)
) | Deserialize DataONE XML types to PyXB.
Args:
doc_xml: UTF-8 encoded ``bytes``
pyxb_binding: PyXB binding object. If not specified, the correct one should be
selected automatically.
Returns:
PyXB object
See Also:
``deserialize_d1_exception()`` for deserializing DataONE Exception types. | entailment |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.