content
stringlengths 5
1.05M
|
|---|
#!/usr/bin/env python2.7
"""
Check the .lib file for an SRAM
"""
import unittest
from testutils import header,openram_test
import sys,os,re
sys.path.append(os.path.join(sys.path[0],".."))
import globals
from globals import OPTS
import debug
class lib_test(openram_test):
def runTest(self):
globals.init_openram("config_20_{0}".format(OPTS.tech_name))
OPTS.check_lvsdrc = False
import sram
from characterizer import lib
debug.info(1, "Testing timing for sample 2 bit, 16 words SRAM with 1 bank")
s = sram.sram(word_size=2,
num_words=16,
num_banks=1,
name="sram_2_16_1_{0}".format(OPTS.tech_name))
OPTS.check_lvsdrc = True
tempspice = OPTS.openram_temp + "temp.sp"
s.sp_write(tempspice)
lib.lib(out_dir=OPTS.openram_temp, sram=s, sp_file=tempspice, use_model=True)
# get all of the .lib files generated
files = os.listdir(OPTS.openram_temp)
nametest = re.compile("\.lib$", re.IGNORECASE)
lib_files = filter(nametest.search, files)
# and compare them with the golden model
for filename in lib_files:
newname = filename.replace(".lib","_analytical.lib")
libname = "{0}/{1}".format(OPTS.openram_temp,filename)
golden = "{0}/golden/{1}".format(os.path.dirname(os.path.realpath(__file__)),newname)
self.isapproxdiff(libname,golden,0.15)
globals.end_openram()
# instantiate a copdsay of the class to actually run the test
if __name__ == "__main__":
(OPTS, args) = globals.parse_args()
del sys.argv[1:]
header(__file__, OPTS.tech_name)
unittest.main()
|
import pytest
# module-level parameterization
pytestmark = pytest.mark.parametrize('x', [(1,), (1.0,), (1+0j,)])
def test_param_13(x):
assert x == 1
class TestParamAll(object):
def test_param_13(self, x):
assert x == 1
def test_spam_13(self, x):
assert x == 1
|
from itertools import count
from math import sqrt
def counting_rectangles(target):
"""
Returns the area of the rectangular grid with the nearest to target
number of rectangles.
"""
mindiff = target
bestarea = 0
def rectangles(n, m):
return int((n**2 + n) * (m**2 + m) / 4)
for i in count(1):
for j in count(int(sqrt(4*target/(i**2 + i)))):
r = rectangles(i, j)
if abs(target - r) < mindiff:
mindiff = abs(target - r)
bestarea = i * j
if r > target:
break
if i**2 + i > target / 2:
break
return bestarea
def solve(vol=0):
return counting_rectangles(2000000)
|
from abc import ABC
from requests.auth import AuthBase, HTTPBasicAuth, HTTPDigestAuth
class RegistryAuthBase(AuthBase, ABC):
"""Base class that all RegistryAuth drives from"""
class RegistryHTTPBasicAuth(RegistryAuthBase, HTTPBasicAuth):
"""Implements HTTP Basic Authentication"""
class RegistryHTTPDigestAuth(RegistryAuthBase, HTTPDigestAuth):
"""Implements HTTP Digest Authentication"""
|
import os
import numpy as np
import pytest
from config.stage import ConfigStage
from extract.stage import ExtractStage
from preprocess.stage import PreprocessStage
@pytest.mark.parametrize("action", ['train'])
def test_extract_stage(action):
path = os.path.abspath(os.path.join(__file__, "../../..", 'resources/config.json'))
config = ConfigStage().run(path)
images, labels = ExtractStage().run(config, action)
images, labels = PreprocessStage().run(images, labels, config, action)
assert isinstance(images, np.ndarray)
assert isinstance(labels, np.ndarray)
|
# Conway's game of life
import random, time, copy
WIDTH = 60
HEIGHT = 20
#Create a list of list for the cells.
nextCells = []
for x in range(WIDTH):
column = []
for y in range(HEIGHT):
if random.randint(0,1) == 0:
column.append('#') #creates a living cell
else:
column.append(' ') #adds a dead cell
nextCells.append(column) #next cells is a list of column lists.
while True: # Main program loop
print('\n\n\n\n\n') # Separate each step with new lines.
currentCells = copy.deepcopy(nextCells)
#print currentCells on the screen.
for y in range(HEIGHT):
for x in range(WIDTH):
print(currentCells[x][y], end='') #prints the # or space
print() #print a newline at the end of the row.
#Calculate the next step's cells based on current step's cells.
for x in range(WIDTH):
for y in range(HEIGHT):
#get neighboring coordinates
# `% WIDTH` ensures lefCoord is always between 0 and WIDTH -1
leftCoord = (x - 1) % WIDTH
rightCoord = (x + 1) % WIDTH
aboveCoord = (y - 1) % HEIGHT
belowCoor = (y + 1) % HEIGHT
#Count numebr of living neighbors
numNeighbors = 0
if currentCells[leftCoord][aboveCoord] == '#':
numNeighbors += 1 # Top-Left neighbor is alive.
if currentCells[x][aboveCoord] == '#':
numNeighbors += 1 #Top neighbor is alive.
if currentCells[rightCoord][aboveCoord] == '#':
numNeighbors += 1 # Top-Right neighbor is alive.
if currentCells[leftCoord][y] == '#':
currentCells +=1 # Left neighbor is alive.
if currentCells[rightCoord][y] == '#':
currentCells += 1 # Right neighbor is alive.
if currentCells[leftCoord][belowCoor] == '#':
currentCells += 1 # Below-left neighbor is alive.
if currentCells[x][belowCoor] == '#':
currentCells += 1 # Below neighbor is alive.
if currentCells[rightCoord][belowCoor] == '#':
currentCells += 1 # Reight-below neighbor is alive.
#Set cell based on Conway's Game of live rules
if currentCells[x][y] == '#' and (numNeighbors == 2 or numNeighbors == 3):
#living cells with 2 or 3 neighbors stay alive
nextCells[x][y] = '#'
elif currentCells[x][y] == '' and numNeighbors == 3:
#Dead cells with 3 neighbors become alive
nextCells[x][y] = '#'
else:
nextCells[x][y] = ''
time.sleep(1) # Add a 1-second pause to reduce flickering.
|
#!/usr/bin/python
# smartpiano Python Client
# Copyright (c) Jeremy Collette 2020.
# This software is released under the MIT license.
# See the 'LICENSE' file for more information.
import sys
from smartpianofactory import SmartPianoFactory
if __name__ != "__main__":
sys.exit(0)
print("smartpiano Python Client v0.1.1")
print("----------------------------------------")
print("Copyright (c) 2020 Jeremy Collette.\n")
if len(sys.argv) <= 1:
print("python3 main.py <midi_file_name> [--serial <serial_device>] [--baud <baud_rate>]")
sys.exit(1)
midi_file_name = sys.argv[1]
serial_device = "/dev/ttyACM0"
baud_rate = "115200"
i = 2
while i < len(sys.argv):
arg = sys.argv[i]
if arg == "--serial":
if i == len(sys.argv)-1:
print("Expected string after \"--serial\" option. Run without arguments to see valid options.")
sys.exit(1)
serial_device = sys.argv[i+1]
i += 1 # Skip parsing option value
elif arg == "--baud":
if i == len(sys.argv)-1:
print("Expected integer after \"--baud\" option. Run without arguments to see valid options.")
sys.exit(1)
baud_rate = sys.argv[i + 1]
i += 1 # Skip parsing option value
else:
print("Unrecognised option \"" + arg + "\". Run without arguments to see valid options.")
sys.exit(1)
i += 1
print("midi_file_name = " + midi_file_name)
print("serial_device = " + serial_device)
print("baud_rate = " + baud_rate)
smart_piano_factory = SmartPianoFactory(serial_device, baud_rate)
smart_piano = smart_piano_factory.create_smart_piano_client(midi_file_name)
smart_piano.play_song()
|
from flask import Blueprint, request, jsonify
from flask_jwt_extended import jwt_required
from marshmallow import Schema, validate, fields
from jcapi.models import Contract
from jcapi import db
contract = Blueprint('contract', __name__)
@jwt_required
@contract.route('/', methods=['POST'])
def create_contract():
rv = request.get_json()
errors = CreateContractSchema().validate(rv)
if errors:
return jsonify({}), 400
contract = Contract()
contract.version = 1
contract.name = rv['name']
contract.description = rv['description']
contract.legal_text = rv['legal_text']
contract.effective_date = rv['effective_date']
contract.expiration_date = rv['expiration_date']
contract.currency = rv['currency']
contract.status = rv['status']
contract.owner_id = 1
db.session.add(contract)
db.session.commit()
return jsonify(contract.to_dict()), 200
@jwt_required
@contract.route('/<id>', methods=['GET'])
def get_contract(id):
contract = Contract.query.get(int(id))
if contract is None:
return jsonify({'msg': 'Contract not found'}), 404
return jsonify(contract.to_dict())
@jwt_required
@contract.route('/<id>', methods=['POST'])
def update_contract(id: int):
# TODO finish it properly
contract = Contract.query.get(int(id))
if contract is None:
return jsonify({'msg': 'Contract not found'}), 404
return jsonify(contract.to_dict())
@jwt_required
@contract.route('/<id>/versions', methods=['GET'])
def get_contract_versions(id: int):
# TODO finish it properly
contract = Contract.query.get(int(id))
if contract is None:
return jsonify({'msg': 'Contract not found'}), 404
return jsonify(contract.to_dict())
@jwt_required
@contract.route('/<id>/parties', methods=['GET'])
def get_contract_parties(id: int):
# TODO finish it properly
contract = Contract.query.get(int(id))
if contract is None:
return jsonify({'msg': 'Contract not found'}), 404
return jsonify(contract.to_dict())
@jwt_required
@contract.route('/<id>/template_tags', methods=['GET'])
def get_template_tags(id: int):
# TODO finish it properly
contract = Contract.query.get(int(id))
if contract is None:
return jsonify({'msg': 'Contract not found'}), 404
return jsonify(contract.to_dict())
class CreateContractSchema(Schema):
name = fields.String(validate=validate.Length(min=3, max=128), required=True)
description = fields.String()
legal_text = fields.String(required=True)
effective_date = fields.Date()
expiration_date = fields.Date()
currency = fields.String(validate=validate.Length(min=3, max=3))
status = fields.String(validate=validate.Length(min=3, max=32))
class UpdateContractSchema(Schema):
version = fields.Integer(required=False)
name = fields.String(validate=validate.Length(min=3, max=128), required=True)
description = fields.String()
legal_text = fields.String(required=True)
effective_date = fields.Date()
expiration_date = fields.Date()
currency = fields.String(validate=validate.Length(min=3, max=3))
status = fields.String(validate=validate.Length(min=3, max=32))
|
# Licensed under an MIT style license -- see LICENSE.md
import numpy as np
import warnings
from scipy import stats
from seaborn.distributions import (
_DistributionPlotter as SeabornDistributionPlotter, KDE as SeabornKDE,
)
from seaborn.utils import _normalize_kwargs, _check_argument
import pandas as pd
__author__ = ["Charlie Hoy <charlie.hoy@ligo.org>", "Seaborn authors"]
class KDE(SeabornKDE):
"""Extension of the `seaborn._statistics.KDE` to allow for custom
kde_kernel
Parameters
----------
*args: tuple
all args passed to the `seaborn._statistics.KDE` class
kde_kernel: func, optional
kernel you wish to use to evaluate the KDE. Default
scipy.stats.gaussian_kde
kde_kwargs: dict, optional
optional kwargs to be passed to the kde_kernel. Default {}
**kwargs: dict
all kwargs passed to the `seaborn._statistics.KDE` class
"""
def __init__(
self, *args, kde_kernel=stats.gaussian_kde, kde_kwargs={}, **kwargs
):
super(KDE, self).__init__(*args, **kwargs)
self._kde_kernel = kde_kernel
self._kde_kwargs = kde_kwargs
def _fit(self, fit_data, weights=None):
"""Fit the scipy kde while adding bw_adjust logic and version check."""
fit_kws = self._kde_kwargs
fit_kws["bw_method"] = self.bw_method
if weights is not None:
fit_kws["weights"] = weights
kde = self._kde_kernel(fit_data, **fit_kws)
kde.set_bandwidth(kde.factor * self.bw_adjust)
return kde
class _DistributionPlotter(SeabornDistributionPlotter):
"""Extension of the `seaborn._statistics._DistributionPlotter` to allow for
the custom KDE method to be used
Parameters
----------
*args: tuple
all args passed to the `seaborn._statistics._DistributionPlotter` class
**kwargs: dict
all kwargs passed to the `seaborn._statistics._DistributionPlotter`
class
"""
def __init__(self, *args, **kwargs):
super(_DistributionPlotter, self).__init__(*args, **kwargs)
def plot_univariate_density(
self,
multiple,
common_norm,
common_grid,
fill,
legend,
estimate_kws,
variance_atol,
**plot_kws,
):
import matplotlib as mpl
# Handle conditional defaults
if fill is None:
fill = multiple in ("stack", "fill")
# Preprocess the matplotlib keyword dictionaries
if fill:
artist = mpl.collections.PolyCollection
else:
artist = mpl.lines.Line2D
plot_kws = _normalize_kwargs(plot_kws, artist)
# Input checking
_check_argument("multiple", ["layer", "stack", "fill"], multiple)
# Always share the evaluation grid when stacking
subsets = bool(set(self.variables) - {"x", "y"})
if subsets and multiple in ("stack", "fill"):
common_grid = True
# Check if the data axis is log scaled
log_scale = self._log_scaled(self.data_variable)
# Do the computation
densities = self._compute_univariate_density(
self.data_variable,
common_norm,
common_grid,
estimate_kws,
log_scale,
variance_atol,
)
# Note: raises when no hue and multiple != layer. A problem?
densities, baselines = self._resolve_multiple(densities, multiple)
# Control the interaction with autoscaling by defining sticky_edges
# i.e. we don't want autoscale margins below the density curve
sticky_density = (0, 1) if multiple == "fill" else (0, np.inf)
if multiple == "fill":
# Filled plots should not have any margins
sticky_support = densities.index.min(), densities.index.max()
else:
sticky_support = []
# Handle default visual attributes
if "hue" not in self.variables:
if self.ax is None:
color = plot_kws.pop("color", None)
default_color = "C0" if color is None else color
else:
if fill:
if self.var_types[self.data_variable] == "datetime":
# Avoid drawing empty fill_between on date axis
# https://github.com/matplotlib/matplotlib/issues/17586
scout = None
default_color = plot_kws.pop(
"color", plot_kws.pop("facecolor", None)
)
if default_color is None:
default_color = "C0"
else:
alpha_shade = plot_kws.pop("alpha_shade", 0.25)
scout = self.ax.fill_between([], [], **plot_kws)
default_color = tuple(scout.get_facecolor().squeeze())
plot_kws.pop("color", None)
else:
plot_kws.pop("alpha_shade", 0.25)
scout, = self.ax.plot([], [], **plot_kws)
default_color = scout.get_color()
if scout is not None:
scout.remove()
plot_kws.pop("color", None)
default_alpha = .25 if multiple == "layer" else .75
alpha = plot_kws.pop("alpha", default_alpha) # TODO make parameter?
# Now iterate through the subsets and draw the densities
# We go backwards so stacked densities read from top-to-bottom
for sub_vars, _ in self.iter_data("hue", reverse=True):
# Extract the support grid and density curve for this level
key = tuple(sub_vars.items())
try:
density = densities[key]
except KeyError:
continue
support = density.index
fill_from = baselines[key]
ax = self._get_axes(sub_vars)
# Modify the matplotlib attributes from semantic mapping
if "hue" in self.variables:
color = self._hue_map(sub_vars["hue"])
else:
color = default_color
artist_kws = self._artist_kws(
plot_kws, fill, False, multiple, color, alpha
)
# Either plot a curve with observation values on the x axis
if "x" in self.variables:
if fill:
artist = ax.fill_between(
support, fill_from, density, **artist_kws
)
else:
artist, = ax.plot(support, density, **artist_kws)
artist.sticky_edges.x[:] = sticky_support
artist.sticky_edges.y[:] = sticky_density
# Or plot a curve with observation values on the y axis
else:
if fill:
artist = ax.fill_betweenx(
support, fill_from, density, **artist_kws
)
else:
artist, = ax.plot(density, support, **artist_kws)
artist.sticky_edges.x[:] = sticky_density
artist.sticky_edges.y[:] = sticky_support
# --- Finalize the plot ----
ax = self.ax if self.ax is not None else self.facets.axes.flat[0]
default_x = default_y = ""
if self.data_variable == "x":
default_y = "Density"
if self.data_variable == "y":
default_x = "Density"
self._add_axis_labels(ax, default_x, default_y)
if "hue" in self.variables and legend:
from functools import partial
if fill:
artist = partial(mpl.patches.Patch)
else:
artist = partial(mpl.lines.Line2D, [], [])
ax_obj = self.ax if self.ax is not None else self.facets
self._add_legend(
ax_obj, artist, fill, False, multiple, alpha, plot_kws, {},
)
def _compute_univariate_density(
self,
data_variable,
common_norm,
common_grid,
estimate_kws,
log_scale,
variance_atol,
):
# Initialize the estimator object
estimator = KDE(**estimate_kws)
all_data = self.plot_data.dropna()
if set(self.variables) - {"x", "y"}:
if common_grid:
all_observations = self.comp_data.dropna()
estimator.define_support(all_observations[data_variable])
else:
common_norm = False
densities = {}
for sub_vars, sub_data in self.iter_data("hue", from_comp_data=True):
# Extract the data points from this sub set and remove nulls
sub_data = sub_data.dropna()
observations = sub_data[data_variable]
observation_variance = observations.var()
if np.isclose(observation_variance, 0, atol=variance_atol) or np.isnan(observation_variance):
msg = "Dataset has 0 variance; skipping density estimate."
warnings.warn(msg, UserWarning)
continue
# Extract the weights for this subset of observations
if "weights" in self.variables:
weights = sub_data["weights"]
else:
weights = None
# Estimate the density of observations at this level
density, support = estimator(observations, weights=weights)
if log_scale:
support = np.power(10, support)
# Apply a scaling factor so that the integral over all subsets is 1
if common_norm:
density *= len(sub_data) / len(all_data)
# Store the density for this level
key = tuple(sub_vars.items())
densities[key] = pd.Series(density, index=support)
return densities
def kdeplot(
x=None, # Allow positional x, because behavior will not change with reorg
*,
y=None,
shade=None, # Note "soft" deprecation, explained below
vertical=False, # Deprecated
kernel=None, # Deprecated
bw=None, # Deprecated
gridsize=200, # TODO maybe depend on uni/bivariate?
cut=3, clip=None, legend=True, cumulative=False,
shade_lowest=None, # Deprecated, controlled with levels now
cbar=False, cbar_ax=None, cbar_kws=None,
ax=None,
# New params
weights=None, # TODO note that weights is grouped with semantics
hue=None, palette=None, hue_order=None, hue_norm=None,
multiple="layer", common_norm=True, common_grid=False,
levels=10, thresh=.05,
bw_method="scott", bw_adjust=1, log_scale=None,
color=None, fill=None, kde_kernel=stats.gaussian_kde, kde_kwargs={},
variance_atol=1e-8,
# Renamed params
data=None, data2=None,
**kwargs,
):
if kde_kernel is None:
kde_kernel = stats.gaussian_kde
# Handle deprecation of `data2` as name for y variable
if data2 is not None:
y = data2
# If `data2` is present, we need to check for the `data` kwarg being
# used to pass a vector for `x`. We'll reassign the vectors and warn.
# We need this check because just passing a vector to `data` is now
# technically valid.
x_passed_as_data = (
x is None
and data is not None
and np.ndim(data) == 1
)
if x_passed_as_data:
msg = "Use `x` and `y` rather than `data` `and `data2`"
x = data
else:
msg = "The `data2` param is now named `y`; please update your code"
warnings.warn(msg, FutureWarning)
# Handle deprecation of `vertical`
if vertical:
msg = (
"The `vertical` parameter is deprecated and will be removed in a "
"future version. Assign the data to the `y` variable instead."
)
warnings.warn(msg, FutureWarning)
x, y = y, x
# Handle deprecation of `bw`
if bw is not None:
msg = (
"The `bw` parameter is deprecated in favor of `bw_method` and "
f"`bw_adjust`. Using {bw} for `bw_method`, but please "
"see the docs for the new parameters and update your code."
)
warnings.warn(msg, FutureWarning)
bw_method = bw
# Handle deprecation of `kernel`
if kernel is not None:
msg = (
"Support for alternate kernels has been removed. "
"Using Gaussian kernel."
)
warnings.warn(msg, UserWarning)
# Handle deprecation of shade_lowest
if shade_lowest is not None:
if shade_lowest:
thresh = 0
msg = (
"`shade_lowest` is now deprecated in favor of `thresh`. "
f"Setting `thresh={thresh}`, but please update your code."
)
warnings.warn(msg, UserWarning)
# Handle `n_levels`
# This was never in the formal API but it was processed, and appeared in an
# example. We can treat as an alias for `levels` now and deprecate later.
levels = kwargs.pop("n_levels", levels)
# Handle "soft" deprecation of shade `shade` is not really the right
# terminology here, but unlike some of the other deprecated parameters it
# is probably very commonly used and much hard to remove. This is therefore
# going to be a longer process where, first, `fill` will be introduced and
# be used throughout the documentation. In 0.12, when kwarg-only
# enforcement hits, we can remove the shade/shade_lowest out of the
# function signature all together and pull them out of the kwargs. Then we
# can actually fire a FutureWarning, and eventually remove.
if shade is not None:
fill = shade
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #
p = _DistributionPlotter(
data=data,
variables=_DistributionPlotter.get_semantics(locals()),
)
p.map_hue(palette=palette, order=hue_order, norm=hue_norm)
if ax is None:
import matplotlib.pyplot as plt
ax = plt.gca()
# Check for a specification that lacks x/y data and return early
if not p.has_xy_data:
return ax
# Pack the kwargs for statistics.KDE
estimate_kws = dict(
bw_method=bw_method,
bw_adjust=bw_adjust,
gridsize=gridsize,
cut=cut,
clip=clip,
cumulative=cumulative,
kde_kernel=kde_kernel,
kde_kwargs=kde_kwargs
)
p._attach(ax, allowed_types=["numeric", "datetime"], log_scale=log_scale)
if p.univariate:
plot_kws = kwargs.copy()
if color is not None:
plot_kws["color"] = color
p.plot_univariate_density(
multiple=multiple,
common_norm=common_norm,
common_grid=common_grid,
fill=fill,
legend=legend,
estimate_kws=estimate_kws,
variance_atol=variance_atol,
**plot_kws,
)
else:
p.plot_bivariate_density(
common_norm=common_norm,
fill=fill,
levels=levels,
thresh=thresh,
legend=legend,
color=color,
cbar=cbar,
cbar_ax=cbar_ax,
cbar_kws=cbar_kws,
estimate_kws=estimate_kws,
**kwargs,
)
return ax
|
from api.models import Category, Customer, Item, Restaurant
from conftest import create_app, make_auth_header
app = create_app()
### RESTAURANTS ###
def test_pass_get_restaurants(client):
response = client.get('/restaurants')
assert response.status_code == 200
restaurants = response.get_json()
assert type(restaurants) == list
with app.app_context():
for restaurant in restaurants:
assert type(restaurant) == dict
assert type(restaurant['id']) == int
assert type(restaurant['name']) == str
assert 'auth0_id' not in restaurant
assert Restaurant.query.get(restaurant['id']).auth0_id
def test_fail_get_restaurants(client):
# GET /restaurant (singular instead of plural)
response = client.get('/restaurant')
assert response.status_code == 404
assert response.get_json() is None
def test_pass_get_restaurant(client):
response = client.get('/restaurants/1')
assert response.status_code == 200
assert response.get_json() is not None
def test_fail_get_restaurant(client):
# Use incorrect ID format
response = client.get('/restaurants/somestring')
assert response.status_code == 404
def test_pass_create_restaurant(client):
new_restaurant_data = {
"name": "TEST_RESTAURANT",
"email": "test-restaurant@test.com",
"phone": "1-234-567890",
"address": "111 Test St, Test City, RE",
}
response = client.post(
'/restaurants', json=new_restaurant_data, headers=make_auth_header('restaurant')
)
assert response.status_code == 201
def test_fail_create_restaurant(client):
# Try to verify ownership of restaurant resource using customer token
response = client.post(
'/restaurants', json={}, headers=make_auth_header('customer')
)
assert response.status_code == 403
def test_pass_update_restaurant(client):
with app.app_context():
latest_id = max(r.id for r in Restaurant.query.all())
response = client.patch(
f'/restaurants/{latest_id}',
json={'name': 'Renamed Restaurant'},
headers=make_auth_header('restaurant'),
)
assert response.status_code == 200
restaurant = Restaurant.query.get_or_404(latest_id)
assert restaurant.name == 'Renamed Restaurant'
def test_fail_update_restaurant(client):
with app.app_context():
latest_id = max(r.id for r in Restaurant.query.all())
response = client.put(
f'/restaurants/{latest_id}', json={}, headers=make_auth_header('restaurant')
)
assert response.status_code == 400
def test_pass_delete_restaurant(client):
with app.app_context():
num_restaurants_before = Restaurant.query.count()
latest_id = max(r.id for r in Restaurant.query.all())
response = client.delete(
f'/restaurants/{latest_id}', headers=make_auth_header('restaurant')
)
assert response.status_code == 200
assert num_restaurants_before > Restaurant.query.count()
def test_fail_delete_restaurant(client):
with app.app_context():
# Cannot delete all restaurants at once
response = client.delete('/restaurants')
assert response.status_code == 405
### CUSTOMERS ###
def test_pass_create_customer(client):
new_customer_data = {
"name": "TEST",
"email": "test@test.com",
"phone": "1-234-567890",
"address": "999 Test St, Test City, CU",
}
response = client.post(
'/customers', json=new_customer_data, headers=make_auth_header('customer')
)
assert response.status_code == 201
def test_fail_create_customer(client):
response = client.post('/customers', json={})
assert response.status_code == 401
def test_pass_get_customer(client):
with app.app_context():
latest_id = max(c.id for c in Customer.query.all())
response = client.get(
f'/customers/{latest_id}', headers=make_auth_header('customer')
)
assert response.status_code == 200
# Make sure all the data points are present in the response
assert all(response.json[key] for key in Customer.defaults())
def test_fail_get_customers(client):
response = client.get('/customers')
assert response.status_code == 405
def test_pass_update_customer(client):
with app.app_context():
latest_id = max(c.id for c in Customer.query.all())
response = client.patch(
f'/customers/{latest_id}',
json={'phone': '987-654-321'},
headers=make_auth_header('customer'),
)
assert response.status_code == 200
customer = Customer.query.get_or_404(latest_id)
assert customer.phone == '987-654-321'
def test_fail_update_customer(client):
with app.app_context():
latest_id = max(c.id for c in Customer.query.all())
response = client.patch(
f'/customers/{latest_id}',
json={'phone': None},
headers=make_auth_header('customer'),
)
assert response.status_code == 400
def test_pass_delete_customer(client):
with app.app_context():
num_customers_before = Customer.query.count()
latest_id = max(c.id for c in Customer.query.all())
response = client.delete(
f'/customers/{latest_id}', headers=make_auth_header('customer')
)
assert response.status_code == 200
assert num_customers_before > Customer.query.count()
def test_fail_delete_customer(client):
with app.app_context():
# Cannot delete all customers at once
response = client.delete('/customers')
assert response.status_code == 405
### CATEGORIES ###
def test_pass_create_category(client):
response = client.post(
'/restaurants/1/categories',
json={'name': 'Tastiest'},
headers=make_auth_header('restaurant'),
)
assert response.status_code == 201
def test_fail_create_category(client):
response = client.post(
'/restaurants/1/categories',
json={'name': 'Yummies'},
headers=make_auth_header('customer'),
)
assert response.status_code == 403
def test_pass_update_category(client):
with app.app_context():
latest_id = max(c.id for c in Category.query.all())
response = client.patch(
f'/categories/{latest_id}',
json={'name': 'Yummies'},
headers=make_auth_header('restaurant'),
)
assert response.status_code == 200
assert Category.query.get_or_404(latest_id).name == 'Yummies'
def test_fail_update_category(client):
with app.app_context():
latest_id = max(c.id for c in Category.query.all())
response = client.put(
f'/categories/{latest_id}',
json={'blame': 'Yummies'},
headers=make_auth_header('restaurant'),
)
assert response.status_code == 400
def test_pass_delete_category(client):
with app.app_context():
latest_id = max(c.id for c in Category.query.all())
response = client.delete(
f'/categories/{latest_id}', headers=make_auth_header('restaurant')
)
assert response.status_code == 200
def test_fail_delete_category(client):
with app.app_context():
latest_id = max(c.id for c in Category.query.all())
response = client.delete(
f'/categories/{latest_id}', headers=make_auth_header('customer')
)
assert response.status_code == 403
### ITEMS AND INGREDIENTS ###
def test_pass_search_items(client):
response = client.post('/items', json={'search_term': 'soup'})
assert response.status_code == 200
assert len(response.json) >= 1
def test_fail_search_items(client):
# Invalid request format
response = client.post('/items', data={'search_term': 'soup'})
assert response.status_code == 400
def test_pass_get_item(client):
response = client.get('/items/1')
assert response.status_code == 200
assert 'name' in response.json
assert 'price' in response.json
def test_fail_get_item(client):
# Singular resource in URL
response = client.get('/item/1')
assert response.status_code == 404
def test_pass_create_item(client):
with app.app_context():
num_items_before = Item.query.count()
new_item_data = {
'name': 'Bullseye',
'description': 'Not what it sounds.',
'price': 24.42,
'ingredients': ['bull', 'duh', 'saffron', 'turmeric']
}
response = client.post('/categories/1/items', json=new_item_data, headers=make_auth_header('restaurant'))
assert response.status_code == 201
with app.app_context():
num_items_before < Item.query.count()
def test_fail_create_item(client):
new_item_data = {
'name': 'Bullseye',
'ingredients': []
}
response = client.post('/categories/1/items', json=new_item_data)
assert response.status_code == 401
def test_pass_update_item(client):
put_item_data = {
'name': 'Catseye',
'description': "It was a bull, now it's a cat.",
'price': 42.24,
'ingredients': ['cat', 'nip', 'saffron', 'turmeric']
}
response = client.put('/items/1', json=put_item_data, headers=make_auth_header('restaurant'))
assert response.status_code == 200
with app.app_context():
assert Item.query.get_or_404(1).name == 'Catseye'
def test_fail_update_item(client):
with app.app_context():
latest_id = max(i.id for i in Item.query.all())
put_item_data = {
'name': 'Catseye',
'ingredients': ['cat', 'nip', 'saffron', 'turmeric']
}
response = client.put(f'/items/{latest_id}', json=put_item_data, headers=make_auth_header('restaurant'))
assert response.status_code == 400
def test_pass_delete_item(client):
with app.app_context():
latest_id = max(i.id for i in Item.query.all())
response = client.delete(f'/items/{latest_id}', headers=make_auth_header('restaurant'))
assert response.status_code == 200
def test_fail_delete_item(client):
response = client.delete('/items/3')
assert response.status_code == 401
def test_pass_get_items_by_ingredient(client):
for ingredient_id in {1, 23, 57}:
response = client.get(f'/ingredients/{ingredient_id}/items')
assert response.status_code == 200
assert response.json
def test_fail_get_items_by_ingredient(client):
for ingredient_name in {'turmeric', 'saffron', 'pepper'}:
response = client.get(f'/ingredients/{ingredient_name}/items')
assert response.status_code == 404
### ORDERS ###
def test_pass_create_order(client):
orders = [
{'items': [1, 2, 3, 4]},
{'items': [44, 45, 46, 47, 48]},
{'items': [87, 88, 89]},
]
for order in orders:
response = client.post(
'/customers/1/orders', json=order, headers=make_auth_header('customer')
)
assert response.status_code == 201
def test_fail_create_order(client):
# Mix up items from different restaurants per order
orders = [
{'items': [93, 2, 75, 4]},
{'items': [23, 45, 1, 6]},
{'items': [52, 2, 43]},
]
for order in orders:
response = client.post(
'/customers/2/orders', json=order, headers=make_auth_header('customer')
)
assert response.status_code == 400
def test_pass_get_customer_orders(client):
response = client.get('/customers/1/orders', headers=make_auth_header('customer'))
assert response.status_code == 200
assert len(response.json) >= 3
def test_fail_get_customer_orders(client):
# Mix up resources in the URL
response = client.get('/orders/1/customer', headers=make_auth_header('customer'))
assert response.status_code == 404
def test_pass_get_restaurant_orders(client):
response = client.get(
'/restaurants/1/orders', headers=make_auth_header('restaurant')
)
assert response.status_code == 200
assert len(response.json) >= 1
assert response.json[0]['customer_id'] == 1
def test_fail_get_restaurant_orders(client):
# Mix up resources in the URL
response = client.get(
'/orders/1/restaurant', headers=make_auth_header('restaurant')
)
assert response.status_code == 404
|
from django.db.models.signals import pre_save
from django.dispatch import receiver
from .models import CustomUser
@receiver(pre_save, sender=CustomUser)
def auto_rename_user(sender, instance, **kwargs):
user = instance
""" Extract a name from the email in case user has no name Ex.xyz@gmail => name=xyz """
if user.name == '':
user.name = user.email.split("@")[0]
|
#!/usr/bin/env python
from setuptools import setup
setup(
setup_requires=[
'pbr>=5.2',
'reno>=2.11',
'setuptools>=41.0',
],
pbr=True,
)
|
# Generated by Django 3.0.8 on 2020-08-23 11:02
from django.db import migrations
def fill_foirequests_field(apps, schema_editor):
InformationObject = apps.get_model('froide_campaign', 'InformationObject')
for information_object in InformationObject.objects.all():
request = information_object.foirequest
if request:
information_object.foirequests.add(request)
class Migration(migrations.Migration):
dependencies = [
('froide_campaign', '0022_informationobject_foirequests'),
]
operations = [
migrations.RunPython(fill_foirequests_field)
]
|
# @copyright@
# Copyright (c) 2006 - 2019 Teradata
# All rights reserved. Stacki(r) v5.x stacki.com
# https://github.com/Teradata/stacki/blob/master/LICENSE.txt
# @copyright@
#
# @rocks@
# Copyright (c) 2000 - 2010 The Regents of the University of California
# All rights reserved. Rocks(r) v5.4 www.rocksclusters.org
# https://github.com/Teradata/stacki/blob/master/LICENSE-ROCKS.txt
# @rocks@
from operator import itemgetter
import stack.commands
from stack.argument_processors.scope import ScopeArgProcessor
class Command(ScopeArgProcessor, stack.commands.list.command):
"""
List the global routes.
<example cmd='list route'>
Lists all the global routes for this cluster.
</example>
"""
def run(self, params, args):
# Get the scope and make sure the args are valid
scope, = self.fillParams([('scope', 'global')])
scope_mappings = self.getScopeMappings(args, scope)
self.beginOutput()
for scope_mapping in scope_mappings:
if scope == 'host':
# Get the host's info for the scope linking
host, appliance_id, os_id, environment_id = self.db.select("""
nodes.name, appliance, boxes.os, environment
FROM nodes, boxes
WHERE nodes.id = %s AND nodes.box = boxes.id
""", (scope_mapping.node_id,))[0]
# Get all the routes for all scopes that match the host's info
routes = self.db.select("""
routes.address, routes.netmask, routes.gateway,
subnets.name, routes.interface,
UPPER(LEFT(scope_map.scope, 1))
FROM routes
LEFT JOIN subnets
ON routes.subnet_id = subnets.id
INNER JOIN scope_map
ON routes.scope_map_id = scope_map.id
WHERE scope_map.scope = 'global'
OR (scope_map.scope = 'appliance' AND scope_map.appliance_id <=> %s)
OR (scope_map.scope = 'os' AND scope_map.os_id <=> %s)
OR (scope_map.scope = 'environment' AND scope_map.environment_id <=> %s)
OR (scope_map.scope = 'host' AND scope_map.node_id <=> %s)
ORDER BY scope_map.scope DESC, routes.address, routes.id
""", (appliance_id, os_id, environment_id, scope_mapping.node_id))
# The routes come out of the DB with the higher value scopes
# first. Surprisingly, there is no simple way in SQL to squash
# these rules down by scope value. So, we do it here instead.
seen_addresses = set()
squashed_routes = []
for route in routes:
if route[0] not in seen_addresses:
squashed_routes.append(list(route))
seen_addresses.add(route[0])
# If the route has a subnet, we need to look up if the host has an
# interface linked to it, and output that interface if it does
for route in squashed_routes:
if route[3]:
rows = self.db.select("""
device FROM subnets,networks,nodes
WHERE nodes.id = %s
AND subnets.name = %s
AND subnets.id = networks.subnet
AND networks.node = nodes.id
AND networks.device NOT LIKE 'vlan%%'
""", (scope_mapping.node_id, route[3]))
if rows:
route[4] = rows[0][0]
for route in sorted(squashed_routes, key=itemgetter(0)):
self.addOutput(host, route)
else:
# All the other scopes just list their routes
routes = self.db.select("""
COALESCE(appliances.name, oses.name, environments.name, ''),
routes.address, routes.netmask, routes.gateway,
subnets.name, routes.interface
FROM routes
LEFT JOIN subnets
ON routes.subnet_id = subnets.id
INNER JOIN scope_map
ON routes.scope_map_id = scope_map.id
LEFT JOIN appliances
ON scope_map.appliance_id = appliances.id
LEFT JOIN oses
ON scope_map.os_id = oses.id
LEFT JOIN environments
ON scope_map.environment_id = environments.id
WHERE scope_map.scope = %s
AND scope_map.appliance_id <=> %s
AND scope_map.os_id <=> %s
AND scope_map.environment_id <=> %s
AND scope_map.node_id <=> %s
ORDER BY routes.address, routes.id
""", scope_mapping)
for route in routes:
self.addOutput(route[0], route[1:])
if scope == 'host':
self.endOutput(header=[
'host', 'network', 'netmask', 'gateway', 'subnet',
'interface', 'source'
])
elif scope == 'global':
self.endOutput(header=[
'', 'network', 'netmask', 'gateway', 'subnet', 'interface'
])
else:
self.endOutput(header=[
scope, 'network', 'netmask', 'gateway','subnet', 'interface'
])
|
import itertools
class Suitor:
def __init__(self, id, preference_list):
""" A Suitor consists of an integer id (between 0 and the total number
of Suitors), and a preference list implicitly defining a ranking of the
set of Suiteds.
E.g., Suitor(2, [5, 0, 3, 4, 1, 2]) says the third Suitor prefers the
Suited with index 5 the most, then the Suited with index 0, etc.
The Suitor will propose in decreasing order of preference, and maintains
the internal state index_to_propose_to to keep track of the next proposal.
"""
self.preference_list = preference_list
self.index_to_propose_to = 0
self.id = id
def preference(self):
return self.preference_list[self.index_to_propose_to]
def post_rejection(self):
self.index_to_propose_to += 1
def __eq__(self, other):
return isinstance(other, Suitor) and self.id == other.id
def __hash__(self):
return hash(self.id)
def __repr__(self):
return "Suitor({})".format(self.id)
class Suited:
def __init__(self, id, preference_list):
self.preference_list = preference_list
self.held = None
self.current_suitors = set()
self.id = id
def reject(self):
"""Return the subset of Suitors in self.current_suitors to reject,
leaving only the held Suitor in self.current_suitors.
"""
if len(self.current_suitors) == 0:
return set()
self.held = min(self.current_suitors,
key=lambda suitor: self.preference_list.index(suitor.id))
rejected = self.current_suitors - set([self.held])
self.current_suitors = set([self.held])
return rejected
def add_suitor(self, suitor):
self.current_suitors.add(suitor)
def __eq__(self, other):
return isinstance(other, Suited) and self.id == other.id
def __hash__(self):
return hash(self.id)
def __repr__(self):
return "Suited({})".format(self.id)
def stable_marriage(suitors, suiteds):
""" Construct a stable marriage between Suitors and Suiteds.
Arguments:
suitors: a list of Suitor
suiteds: a list of Suited, which deferred acceptance of Suitors.
Returns:
A dict {Suitor: Suited} matching Suitors to Suiteds.
"""
unassigned = set(suitors)
while len(unassigned) > 0:
for suitor in unassigned:
next_to_propose_to = suiteds[suitor.preference()]
next_to_propose_to.add_suitor(suitor)
unassigned = set()
for suited in suiteds:
unassigned |= suited.reject()
for suitor in unassigned:
suitor.post_rejection() # have some ice cream
return dict([(suited.held, suited) for suited in suiteds])
def verify_stable(suitors, suiteds, marriage):
""" Check that the assignment of suitors to suited is a stable marriage.
Arguments:
suitors: a list of Suitors
suiteds: a list of Suiteds
marriage: a matching {Suitor: Suited}
Returns:
True if the marriage is stable, otherwise a tuple (False, (x, y))
where x is a Suitor, y is a Suited, and (x, y) are a counterexample
to the claim that the marriage is stable.
"""
suited_to_suitor = dict((v, k) for (k, v) in marriage.items())
def precedes(L, item1, item2): return L.index(item1) < L.index(item2)
def suitor_prefers(suitor, suited):
return precedes(suitor.preference_list, suited.id, marriage[suitor].id)
def suited_prefers(suited, suitor):
return precedes(suited.preference_list, suitor.id, suited_to_suitor[suited].id)
for (suitor, suited) in itertools.product(suitors, suiteds):
if (suited != marriage[suitor]
and suitor_prefers(suitor, suited)
and suited_prefers(suited, suitor)):
return False, (suitor, suited)
return True
|
import yaml
from dotmap import DotMap
def get_config(path: str):
with open(path) as f:
cfg = yaml.safe_load(f)
cfg = DotMap(cfg)
return cfg
|
#!/bin/python3
import sys
def solve(grades):
# Complete this function
for i in range(len(grades)):
if grades[i] < 38:
continue
else:
mod = grades[i] % 5
multi5 = int(((grades[i] - mod) / 5 + 1) * 5)
if multi5 - grades[i] < 3:
grades[i] = multi5
return grades
n = int(input().strip())
grades = []
grades_i = 0
for grades_i in range(n):
grades_t = int(input().strip())
grades.append(grades_t)
result = solve(grades)
print("\n".join(map(str, result)))
|
# File produced automatically by PNCodeGen.ipynb
from scipy.integrate import solve_ivp
import numpy as np
from numpy import dot, cross, log, sqrt, pi
from numpy import euler_gamma as EulerGamma
from numba import jit, njit, float64, boolean
from numba.experimental import jitclass
from scipy.interpolate import InterpolatedUnivariateSpline as Spline
from scipy.special import zeta
import quaternionic
qmul = njit(quaternionic.algebra.multiply)
qexp=njit(quaternionic.algebra.exp)
qconj=njit(quaternionic.algebra.conj)
qinverse=njit(quaternionic.algebra.reciprocal)
@njit(cache=True)
def mul(A,B):
C=np.empty(4)
qmul(A,B,C)
return C
@njit(cache=True)
def exp(A):
B=np.empty(4)
qexp(A,B)
return B
@njit(cache=True)
def conjugate(A):
B=np.empty(4)
qconj(A,B)
return B
@njit(cache=True)
def inverse(A):
B=np.empty(4)
qinverse(A,B)
return B
@njit(cache=True)
def FrameFromAngularVelocity_2D_Integrand(rfrak_x, rfrak_y, Omega):
rfrakMag = np.sqrt(rfrak_x*rfrak_x+rfrak_y*rfrak_y)
rfrakDot_x = Omega[0]/2.0
rfrakDot_y = Omega[1]/2.0
if np.abs(np.sin(rfrakMag)) > 1e-12 and np.abs(np.cos(rfrakMag)) > 1e-12:
omega_v = (Omega[0]*(-rfrak_y/rfrakMag)+Omega[1]*(rfrak_x/rfrakMag))*np.tan(rfrakMag)-Omega[2]
Omega[0] += -omega_v*np.sin(2*rfrakMag)*(-rfrak_y/rfrakMag)
Omega[1] += -omega_v*np.sin(2*rfrakMag)*(rfrak_x/rfrakMag)
Omega[2] += omega_v*np.cos(2*rfrakMag)
dotTerm = (rfrak_x*Omega[0]+rfrak_y*Omega[1])/(rfrakMag*rfrakMag)
cotTerm = rfrakMag/(2*np.tan(rfrakMag))
rfrakDot_x = (Omega[0] - rfrak_x*dotTerm)*cotTerm + rfrak_x*dotTerm/2. - 0.5*Omega[2]*rfrak_y
rfrakDot_y = (Omega[1] - rfrak_y*dotTerm)*cotTerm + rfrak_y*dotTerm/2. + 0.5*Omega[2]*rfrak_x
return rfrakDot_x, rfrakDot_y
@njit(cache=True)
def FrameFromAngularVelocityIntegrand(rfrak, Omega):
rfrakMag = np.sqrt(rfrak[0] * rfrak[0] + rfrak[1] * rfrak[1] + rfrak[2] * rfrak[2])
OmegaMag = np.sqrt(Omega[0] * Omega[0] + Omega[1] * Omega[1] + Omega[2] * Omega[2])
# If the matrix is really close to the identity, return
if rfrakMag < 1e-12*OmegaMag:
return np.array([Omega[0] / 2.0, Omega[1] / 2.0, Omega[2] / 2.0])
# If the matrix is really close to singular, it's equivalent to the identity, so return
if np.abs(np.sin(rfrakMag)) < 1e-12:
return np.array([Omega[0] / 2.0, Omega[1] / 2.0, Omega[2] / 2.0])
OmegaOver2 = np.array([Omega[0] / 2.0, Omega[1] / 2.0, Omega[2] / 2.0])
rfrakHat = np.array([rfrak[0] / rfrakMag, rfrak[1] / rfrakMag, rfrak[2] / rfrakMag])
return ((OmegaOver2 - rfrakHat * np.dot(rfrakHat, OmegaOver2)) * (rfrakMag / np.tan(rfrakMag))
+ rfrakHat * np.dot(rfrakHat, OmegaOver2) + np.cross(OmegaOver2, rfrak))
ConsSpec=[('xHat', float64[:]),('yHat', float64[:]),('zHat', float64[:]),('M1', float64[:]),('M2', float64[:]),('S_chi1', float64[:]),('S_chi2', float64[:]),('M', float64[:]),('delta', float64[:]),('nu', float64[:]),('chi1chi1', float64[:]),('chi1chi2', float64[:]),('chi2chi2', float64[:]),('Fcal_0', float64[:]),('Fcal_2', float64[:]),('Fcal_3', float64[:]),('Fcal_4', float64[:]),('Fcal_5', float64[:]),('Fcal_6', float64[:]),('Fcal_lnv_6', float64[:]),('Fcal_7', float64[:]),('Fcal_8', float64[:]),('Fcal_lnv_8', float64[:]),('E_0', float64[:]),('E_2', float64[:]),('E_4', float64[:]),('E_6', float64[:]),('E_8', float64[:]),('E_lnv_8', float64[:]),('EvolveSpin1',boolean),('EvolveSpin2',boolean)]
@jitclass(ConsSpec)
class Cons:
def __init__(self,xHat,yHat,zHat,M1,M2,S_chi1,S_chi2,M,delta,nu,chi1chi1,chi1chi2,chi2chi2,Fcal_0,Fcal_2,Fcal_3,Fcal_4,Fcal_5,Fcal_6,Fcal_lnv_6,Fcal_7,Fcal_8,Fcal_lnv_8,E_0,E_2,E_4,E_6,E_8,E_lnv_8,EvolveSpin1,EvolveSpin2):
self.xHat=xHat
self.yHat=yHat
self.zHat=zHat
self.M1=M1
self.M2=M2
self.S_chi1=S_chi1
self.S_chi2=S_chi2
self.M=M
self.delta=delta
self.nu=nu
self.chi1chi1=chi1chi1
self.chi1chi2=chi1chi2
self.chi2chi2=chi2chi2
self.Fcal_0=Fcal_0
self.Fcal_2=Fcal_2
self.Fcal_3=Fcal_3
self.Fcal_4=Fcal_4
self.Fcal_5=Fcal_5
self.Fcal_6=Fcal_6
self.Fcal_lnv_6=Fcal_lnv_6
self.Fcal_7=Fcal_7
self.Fcal_8=Fcal_8
self.Fcal_lnv_8=Fcal_lnv_8
self.E_0=E_0
self.E_2=E_2
self.E_4=E_4
self.E_6=E_6
self.E_8=E_8
self.E_lnv_8=E_lnv_8
self.EvolveSpin1=EvolveSpin1
self.EvolveSpin2=EvolveSpin2
VarsSpec=[('v', float64[:]),('rfrak_chi1', float64[:]),('rfrak_chi2', float64[:]),('rfrak_frame', float64[:]),('R', float64[:]),('nHat', float64[:]),('lambdaHat', float64[:]),('ellHat', float64[:]),('R_S1', float64[:]),('R_S2', float64[:]),('chiVec1', float64[:]),('chiVec2', float64[:]),('chi1_n', float64[:]),('chi1_lambda', float64[:]),('chi1_ell', float64[:]),('chi2_n', float64[:]),('chi2_lambda', float64[:]),('chi2_ell', float64[:]),('S_ell', float64[:]),('S_n', float64[:]),('S_lambda', float64[:]),('Sigma_ell', float64[:]),('Sigma_n', float64[:]),('Sigma_lambda', float64[:]),('chi_s_ell', float64[:]),('chi_a_ell', float64[:]),('logv', float64[:]),('Fcal_coeff', float64[:]),('Fcal_SQ_4', float64[:]),('Fcal_SO_3', float64[:]),('Fcal_SO_5', float64[:]),('Fcal_SO_6', float64[:]),('Fcal_SO_7', float64[:]),('Fcal_SO_8', float64[:]),('E_SQ_4', float64[:]),('E_SO_3', float64[:]),('E_SO_5', float64[:]),('E_SO_7', float64[:])]
@jitclass(VarsSpec)
class Vars:
def __init__(self,v,rfrak_chi1,rfrak_chi2,rfrak_frame,R,nHat,lambdaHat,ellHat,R_S1,R_S2,chiVec1,chiVec2,chi1_n,chi1_lambda,chi1_ell,chi2_n,chi2_lambda,chi2_ell,S_ell,S_n,S_lambda,Sigma_ell,Sigma_n,Sigma_lambda,chi_s_ell,chi_a_ell,logv,Fcal_coeff,Fcal_SQ_4,Fcal_SO_3,Fcal_SO_5,Fcal_SO_6,Fcal_SO_7,Fcal_SO_8,E_SQ_4,E_SO_3,E_SO_5,E_SO_7):
self.v=v
self.rfrak_chi1=rfrak_chi1
self.rfrak_chi2=rfrak_chi2
self.rfrak_frame=rfrak_frame
self.R=R
self.nHat=nHat
self.lambdaHat=lambdaHat
self.ellHat=ellHat
self.R_S1=R_S1
self.R_S2=R_S2
self.chiVec1=chiVec1
self.chiVec2=chiVec2
self.chi1_n=chi1_n
self.chi1_lambda=chi1_lambda
self.chi1_ell=chi1_ell
self.chi2_n=chi2_n
self.chi2_lambda=chi2_lambda
self.chi2_ell=chi2_ell
self.S_ell=S_ell
self.S_n=S_n
self.S_lambda=S_lambda
self.Sigma_ell=Sigma_ell
self.Sigma_n=Sigma_n
self.Sigma_lambda=Sigma_lambda
self.chi_s_ell=chi_s_ell
self.chi_a_ell=chi_a_ell
self.logv=logv
self.Fcal_coeff=Fcal_coeff
self.Fcal_SQ_4=Fcal_SQ_4
self.Fcal_SO_3=Fcal_SO_3
self.Fcal_SO_5=Fcal_SO_5
self.Fcal_SO_6=Fcal_SO_6
self.Fcal_SO_7=Fcal_SO_7
self.Fcal_SO_8=Fcal_SO_8
self.E_SQ_4=E_SQ_4
self.E_SO_3=E_SO_3
self.E_SO_5=E_SO_5
self.E_SO_7=E_SO_7
@njit(cache=True)
def Initialization(Cons, xHat_i, yHat_i, zHat_i, M1_i, M2_i, v_i, S_chi1_i, S_chi2_i, rfrak_frame_i):
Cons.xHat=xHat_i
Cons.yHat=yHat_i
Cons.zHat=zHat_i
Cons.M1=np.array([M1_i])
Cons.M2=np.array([M2_i])
Cons.S_chi1=S_chi1_i
Cons.S_chi2=S_chi2_i
rfrak_chi1=np.array([0.0,0.0])
rfrak_chi2=np.array([0.0,0.0])
Cons.M=Cons.M1 + Cons.M2
Cons.delta=(Cons.M1 - Cons.M2)/Cons.M
Cons.nu=Cons.M1*Cons.M2/Cons.M**2
R_S1=exp(rfrak_chi1[0]*Cons.xHat + rfrak_chi1[1]*Cons.yHat)
R_S2=exp(rfrak_chi2[0]*Cons.xHat + rfrak_chi2[1]*Cons.yHat)
chiVec1=mul(mul(mul(Cons.S_chi1,R_S1),Cons.zHat),mul(conjugate(R_S1),conjugate(Cons.S_chi1)))
chiVec2=mul(mul(mul(Cons.S_chi2,R_S2),Cons.zHat),mul(conjugate(R_S2),conjugate(Cons.S_chi2)))
Cons.chi1chi1=np.array([dot(chiVec1[1:],chiVec1[1:])])
Cons.chi1chi2=np.array([dot(chiVec1[1:],chiVec2[1:])])
Cons.chi2chi2=np.array([dot(chiVec2[1:],chiVec2[1:])])
Cons.Fcal_0=np.array([1.0])
Cons.Fcal_2=-35*Cons.nu/12 - 1247/336
Cons.Fcal_3=np.array([4*pi])
Cons.Fcal_4=65*Cons.nu**2/18 + 9271*Cons.nu/504 - 44711/9072
Cons.Fcal_5=pi*(-583*Cons.nu/24 - 8191/672)
Cons.Fcal_6=-775*Cons.nu**3/324 - 94403*Cons.nu**2/3024 + Cons.nu*(-134543/7776 + 41*pi**2/48) - 1712*log(4)/105 - 1712*EulerGamma/105 + 16*pi**2/3 + 6643739519/69854400
Cons.Fcal_lnv_6=np.array([-1712/105])
Cons.Fcal_7=pi*(193385*Cons.nu**2/3024 + 214745*Cons.nu/1728 - 16285/504)
Cons.Fcal_8=np.array([-1369*pi**2/126 - 323105549467/3178375200 - 47385*log(3)/1568 + 232597*EulerGamma/4410 + 39931*log(2)/294])
Cons.Fcal_lnv_8=np.array([232597/4410])
Cons.E_0=np.array([1.0])
Cons.E_2=-Cons.nu/12 - 3/4
Cons.E_4=-Cons.nu**2/24 + 19*Cons.nu/8 - 27/8
Cons.E_6=-35*Cons.nu**3/5184 - 155*Cons.nu**2/96 + Cons.nu*(34445/576 - 205*pi**2/96) - 675/64
Cons.E_8=77*Cons.nu**4/31104 + 301*Cons.nu**3/1728 + Cons.nu**2*(-498449/3456 + 3157*pi**2/576) + Cons.nu*(-123671/5760 + 896*EulerGamma/15 + 9037*pi**2/1536 + 1792*log(2)/15) - 3969/128
Cons.E_lnv_8=896*Cons.nu/15
Cons.EvolveSpin1=np.linalg.norm(mul(Cons.S_chi1,conjugate(Cons.S_chi1)))>1e-8
Cons.EvolveSpin2=np.linalg.norm(mul(Cons.S_chi2,conjugate(Cons.S_chi2)))>1e-8
@njit(cache=True)
def Recalculate_0(Cons,Vars,y):
Vars.v = np.array([y[0]])
Vars.rfrak_chi1 = np.array([y[1],y[2]])
Vars.rfrak_chi2 = np.array([y[3],y[4]])
Vars.rfrak_frame = np.array([y[5],y[6],y[7]])
Vars.R = exp(Vars.rfrak_frame[0]*Cons.xHat + Vars.rfrak_frame[1]*Cons.yHat + Vars.rfrak_frame[2]*Cons.zHat)
Vars.nHat = mul(mul(Vars.R,Cons.xHat),conjugate(Vars.R))
Vars.ellHat = mul(mul(Vars.R,Cons.zHat),conjugate(Vars.R))
Vars.R_S1 = exp(Vars.rfrak_chi1[0]*Cons.xHat + Vars.rfrak_chi1[1]*Cons.yHat)
Vars.R_S2 = exp(Vars.rfrak_chi2[0]*Cons.xHat + Vars.rfrak_chi2[1]*Cons.yHat)
Vars.chiVec1 = mul(mul(mul(Cons.S_chi1,Vars.R_S1),Cons.zHat),mul(conjugate(Vars.R_S1),conjugate(Cons.S_chi1)))
Vars.chiVec2 = mul(mul(mul(Cons.S_chi2,Vars.R_S2),Cons.zHat),mul(conjugate(Vars.R_S2),conjugate(Cons.S_chi2)))
Vars.chi1_n = np.array([dot(Vars.chiVec1[1:],Vars.nHat[1:])])
Vars.chi1_ell = np.array([dot(Vars.chiVec1[1:],Vars.ellHat[1:])])
Vars.chi2_n = np.array([dot(Vars.chiVec2[1:],Vars.nHat[1:])])
Vars.chi2_ell = np.array([dot(Vars.chiVec2[1:],Vars.ellHat[1:])])
Vars.S_ell = Cons.M1**2*Vars.chi1_ell + Cons.M2**2*Vars.chi2_ell
Vars.S_n = Cons.M1**2*Vars.chi1_n + Cons.M2**2*Vars.chi2_n
Vars.Sigma_ell = Cons.M*(-Cons.M1*Vars.chi1_ell + Cons.M2*Vars.chi2_ell)
Vars.Sigma_n = Cons.M*(-Cons.M1*Vars.chi1_n + Cons.M2*Vars.chi2_n)
Vars.chi_s_ell = Vars.chi1_ell/2 + Vars.chi2_ell/2
Vars.chi_a_ell = Vars.chi1_ell/2 - Vars.chi2_ell/2
Vars.Fcal_coeff = 32*Cons.nu**2*Vars.v**10/5
@njit
def OmegaVec_chiVec_1_0(Cons,Vars):
Omega1_coeff = Vars.v**5/Cons.M
return Omega1_coeff*Vars.ellHat*(-0.75*Cons.delta + 0.5*Cons.nu + 0.75)
@njit
def OmegaVec_chiVec_2_0(Cons,Vars):
Omega2_coeff = Vars.v**5/Cons.M
return Omega2_coeff*Vars.ellHat*(0.75*Cons.delta + 0.5*Cons.nu + 0.75)
@njit
def OmegaVec_0(Cons,Vars):
a_ell_0 = 7.0*Vars.S_n + 3.0*Vars.Sigma_n*Cons.delta
gamma_PN_0 = 1.00000000000000
return Vars.ellHat*Vars.v**3/Cons.M + a_ell_0*gamma_PN_0*Vars.nHat*Vars.v**6/Cons.M**3
@njit(cache=True)
def TaylorT1_0(Cons,Vars):
Flux = Cons.Fcal_0*Vars.Fcal_coeff
dEdV = -Cons.E_0*Cons.M*Cons.nu*Vars.v
Absorption = 0
dvdt_T1 = (-Absorption - Flux)/dEdV
dydt=np.zeros(8)
[dydt[5],dydt[6],dydt[7]] = FrameFromAngularVelocityIntegrand(Vars.rfrak_frame, OmegaVec_0(Cons,Vars)[1:])
dydt[0] = dvdt_T1[0]
if(Cons.EvolveSpin1):
dydt[1], dydt[2]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi1[0], Vars.rfrak_chi1[1],(mul(mul(inverse(Cons.S_chi1),OmegaVec_chiVec_1_0(Cons,Vars)),Cons.S_chi1))[1:])
else:
dydt[1], dydt[2] = 0.0, 0.0
if(Cons.EvolveSpin2):
dydt[3], dydt[4]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi2[0], Vars.rfrak_chi2[1],(mul(mul(inverse(Cons.S_chi2),OmegaVec_chiVec_2_0(Cons,Vars)),Cons.S_chi2))[1:])
else:
dydt[3], dydt[4] = 0.0, 0.0
return dydt
@njit(cache=True)
def TaylorT4_0(Cons,Vars):
dvdt_T4 = -2.0*Vars.Fcal_coeff*-Cons.Fcal_0/(Cons.nu*Vars.v*2*Cons.E_0*Cons.M)
dydt=np.zeros(8)
[dydt[5],dydt[6],dydt[7]] = FrameFromAngularVelocityIntegrand(Vars.rfrak_frame, OmegaVec_0(Cons,Vars)[1:])
dydt[0] = dvdt_T4[0]
if(Cons.EvolveSpin1):
dydt[1], dydt[2]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi1[0], Vars.rfrak_chi1[1],(mul(mul(inverse(Cons.S_chi1),OmegaVec_chiVec_1_0(Cons,Vars)),Cons.S_chi1))[1:])
else:
dydt[1], dydt[2] = 0.0, 0.0
if(Cons.EvolveSpin2):
dydt[3], dydt[4]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi2[0], Vars.rfrak_chi2[1],(mul(mul(inverse(Cons.S_chi2),OmegaVec_chiVec_2_0(Cons,Vars)),Cons.S_chi2))[1:])
else:
dydt[3], dydt[4] = 0.0, 0.0
return dydt
@njit(cache=True)
def TaylorT5_0(Cons,Vars):
dtdv = -0.5*Cons.nu*Vars.v*2*Cons.E_0*Cons.M/(Vars.Fcal_coeff*-Cons.Fcal_0)
dvdt_T5 = 1.0/dtdv
dydt=np.zeros(8)
[dydt[5],dydt[6],dydt[7]] = FrameFromAngularVelocityIntegrand(Vars.rfrak_frame, OmegaVec_0(Cons,Vars)[1:])
dydt[0] = dvdt_T5[0]
if(Cons.EvolveSpin1):
dydt[1], dydt[2]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi1[0], Vars.rfrak_chi1[1],(mul(mul(inverse(Cons.S_chi1),OmegaVec_chiVec_1_0(Cons,Vars)),Cons.S_chi1))[1:])
else:
dydt[1], dydt[2] = 0.0, 0.0
if(Cons.EvolveSpin2):
dydt[3], dydt[4]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi2[0], Vars.rfrak_chi2[1],(mul(mul(inverse(Cons.S_chi2),OmegaVec_chiVec_2_0(Cons,Vars)),Cons.S_chi2))[1:])
else:
dydt[3], dydt[4] = 0.0, 0.0
return dydt
@njit(cache=True)
def Recalculate_0p50(Cons,Vars,y):
Vars.v = np.array([y[0]])
Vars.rfrak_chi1 = np.array([y[1],y[2]])
Vars.rfrak_chi2 = np.array([y[3],y[4]])
Vars.rfrak_frame = np.array([y[5],y[6],y[7]])
Vars.R = exp(Vars.rfrak_frame[0]*Cons.xHat + Vars.rfrak_frame[1]*Cons.yHat + Vars.rfrak_frame[2]*Cons.zHat)
Vars.nHat = mul(mul(Vars.R,Cons.xHat),conjugate(Vars.R))
Vars.ellHat = mul(mul(Vars.R,Cons.zHat),conjugate(Vars.R))
Vars.R_S1 = exp(Vars.rfrak_chi1[0]*Cons.xHat + Vars.rfrak_chi1[1]*Cons.yHat)
Vars.R_S2 = exp(Vars.rfrak_chi2[0]*Cons.xHat + Vars.rfrak_chi2[1]*Cons.yHat)
Vars.chiVec1 = mul(mul(mul(Cons.S_chi1,Vars.R_S1),Cons.zHat),mul(conjugate(Vars.R_S1),conjugate(Cons.S_chi1)))
Vars.chiVec2 = mul(mul(mul(Cons.S_chi2,Vars.R_S2),Cons.zHat),mul(conjugate(Vars.R_S2),conjugate(Cons.S_chi2)))
Vars.chi1_n = np.array([dot(Vars.chiVec1[1:],Vars.nHat[1:])])
Vars.chi1_ell = np.array([dot(Vars.chiVec1[1:],Vars.ellHat[1:])])
Vars.chi2_n = np.array([dot(Vars.chiVec2[1:],Vars.nHat[1:])])
Vars.chi2_ell = np.array([dot(Vars.chiVec2[1:],Vars.ellHat[1:])])
Vars.S_ell = Cons.M1**2*Vars.chi1_ell + Cons.M2**2*Vars.chi2_ell
Vars.S_n = Cons.M1**2*Vars.chi1_n + Cons.M2**2*Vars.chi2_n
Vars.Sigma_ell = Cons.M*(-Cons.M1*Vars.chi1_ell + Cons.M2*Vars.chi2_ell)
Vars.Sigma_n = Cons.M*(-Cons.M1*Vars.chi1_n + Cons.M2*Vars.chi2_n)
Vars.chi_s_ell = Vars.chi1_ell/2 + Vars.chi2_ell/2
Vars.chi_a_ell = Vars.chi1_ell/2 - Vars.chi2_ell/2
Vars.Fcal_coeff = 32*Cons.nu**2*Vars.v**10/5
@njit
def OmegaVec_chiVec_1_0p50(Cons,Vars):
Omega1_coeff = Vars.v**5/Cons.M
return Omega1_coeff*(Vars.ellHat*(-0.75*Cons.delta + 0.5*Cons.nu + 0.75) + Vars.nHat*Vars.v*(3.0*Vars.chi1_n*Cons.nu + 3.0*Cons.M2**2*Vars.chi2_n/Cons.M**2) - Cons.M2**2*Vars.chiVec2*Vars.v/Cons.M**2)
@njit
def OmegaVec_chiVec_2_0p50(Cons,Vars):
Omega2_coeff = Vars.v**5/Cons.M
return Omega2_coeff*(Vars.ellHat*(0.75*Cons.delta + 0.5*Cons.nu + 0.75) + Vars.nHat*Vars.v*(3.0*Vars.chi2_n*Cons.nu + 3.0*Cons.M1**2*Vars.chi1_n/Cons.M**2) - Cons.M1**2*Vars.chiVec1*Vars.v/Cons.M**2)
@njit
def OmegaVec_0p50(Cons,Vars):
a_ell_0 = 7.0*Vars.S_n + 3.0*Vars.Sigma_n*Cons.delta
gamma_PN_0 = 1.00000000000000
return Vars.ellHat*Vars.v**3/Cons.M + a_ell_0*gamma_PN_0*Vars.nHat*Vars.v**6/Cons.M**3
@njit(cache=True)
def TaylorT1_0p50(Cons,Vars):
Flux = Cons.Fcal_0*Vars.Fcal_coeff
dEdV = -Cons.E_0*Cons.M*Cons.nu*Vars.v
Absorption = 0
dvdt_T1 = (-Absorption - Flux)/dEdV
dydt=np.zeros(8)
[dydt[5],dydt[6],dydt[7]] = FrameFromAngularVelocityIntegrand(Vars.rfrak_frame, OmegaVec_0p50(Cons,Vars)[1:])
dydt[0] = dvdt_T1[0]
if(Cons.EvolveSpin1):
dydt[1], dydt[2]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi1[0], Vars.rfrak_chi1[1],(mul(mul(inverse(Cons.S_chi1),OmegaVec_chiVec_1_0p50(Cons,Vars)),Cons.S_chi1))[1:])
else:
dydt[1], dydt[2] = 0.0, 0.0
if(Cons.EvolveSpin2):
dydt[3], dydt[4]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi2[0], Vars.rfrak_chi2[1],(mul(mul(inverse(Cons.S_chi2),OmegaVec_chiVec_2_0p50(Cons,Vars)),Cons.S_chi2))[1:])
else:
dydt[3], dydt[4] = 0.0, 0.0
return dydt
@njit(cache=True)
def TaylorT4_0p50(Cons,Vars):
dvdt_T4 = -2.0*Vars.Fcal_coeff*(-Cons.Fcal_0 + 0*Vars.v - 0*-Cons.Fcal_0*Vars.v/2*Cons.E_0*Cons.M)/(Cons.nu*Vars.v*2*Cons.E_0*Cons.M)
dydt=np.zeros(8)
[dydt[5],dydt[6],dydt[7]] = FrameFromAngularVelocityIntegrand(Vars.rfrak_frame, OmegaVec_0p50(Cons,Vars)[1:])
dydt[0] = dvdt_T4[0]
if(Cons.EvolveSpin1):
dydt[1], dydt[2]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi1[0], Vars.rfrak_chi1[1],(mul(mul(inverse(Cons.S_chi1),OmegaVec_chiVec_1_0p50(Cons,Vars)),Cons.S_chi1))[1:])
else:
dydt[1], dydt[2] = 0.0, 0.0
if(Cons.EvolveSpin2):
dydt[3], dydt[4]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi2[0], Vars.rfrak_chi2[1],(mul(mul(inverse(Cons.S_chi2),OmegaVec_chiVec_2_0p50(Cons,Vars)),Cons.S_chi2))[1:])
else:
dydt[3], dydt[4] = 0.0, 0.0
return dydt
@njit(cache=True)
def TaylorT5_0p50(Cons,Vars):
dtdv = -0.5*Cons.nu*Vars.v*(2*Cons.E_0*Cons.M + 0*Vars.v - 0*2*Cons.E_0*Cons.M*Vars.v/-Cons.Fcal_0)/(Vars.Fcal_coeff*-Cons.Fcal_0)
dvdt_T5 = 1.0/dtdv
dydt=np.zeros(8)
[dydt[5],dydt[6],dydt[7]] = FrameFromAngularVelocityIntegrand(Vars.rfrak_frame, OmegaVec_0p50(Cons,Vars)[1:])
dydt[0] = dvdt_T5[0]
if(Cons.EvolveSpin1):
dydt[1], dydt[2]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi1[0], Vars.rfrak_chi1[1],(mul(mul(inverse(Cons.S_chi1),OmegaVec_chiVec_1_0p50(Cons,Vars)),Cons.S_chi1))[1:])
else:
dydt[1], dydt[2] = 0.0, 0.0
if(Cons.EvolveSpin2):
dydt[3], dydt[4]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi2[0], Vars.rfrak_chi2[1],(mul(mul(inverse(Cons.S_chi2),OmegaVec_chiVec_2_0p50(Cons,Vars)),Cons.S_chi2))[1:])
else:
dydt[3], dydt[4] = 0.0, 0.0
return dydt
@njit(cache=True)
def Recalculate_1p0(Cons,Vars,y):
Vars.v = np.array([y[0]])
Vars.rfrak_chi1 = np.array([y[1],y[2]])
Vars.rfrak_chi2 = np.array([y[3],y[4]])
Vars.rfrak_frame = np.array([y[5],y[6],y[7]])
Vars.R = exp(Vars.rfrak_frame[0]*Cons.xHat + Vars.rfrak_frame[1]*Cons.yHat + Vars.rfrak_frame[2]*Cons.zHat)
Vars.nHat = mul(mul(Vars.R,Cons.xHat),conjugate(Vars.R))
Vars.ellHat = mul(mul(Vars.R,Cons.zHat),conjugate(Vars.R))
Vars.R_S1 = exp(Vars.rfrak_chi1[0]*Cons.xHat + Vars.rfrak_chi1[1]*Cons.yHat)
Vars.R_S2 = exp(Vars.rfrak_chi2[0]*Cons.xHat + Vars.rfrak_chi2[1]*Cons.yHat)
Vars.chiVec1 = mul(mul(mul(Cons.S_chi1,Vars.R_S1),Cons.zHat),mul(conjugate(Vars.R_S1),conjugate(Cons.S_chi1)))
Vars.chiVec2 = mul(mul(mul(Cons.S_chi2,Vars.R_S2),Cons.zHat),mul(conjugate(Vars.R_S2),conjugate(Cons.S_chi2)))
Vars.chi1_n = np.array([dot(Vars.chiVec1[1:],Vars.nHat[1:])])
Vars.chi1_ell = np.array([dot(Vars.chiVec1[1:],Vars.ellHat[1:])])
Vars.chi2_n = np.array([dot(Vars.chiVec2[1:],Vars.nHat[1:])])
Vars.chi2_ell = np.array([dot(Vars.chiVec2[1:],Vars.ellHat[1:])])
Vars.S_ell = Cons.M1**2*Vars.chi1_ell + Cons.M2**2*Vars.chi2_ell
Vars.S_n = Cons.M1**2*Vars.chi1_n + Cons.M2**2*Vars.chi2_n
Vars.Sigma_ell = Cons.M*(-Cons.M1*Vars.chi1_ell + Cons.M2*Vars.chi2_ell)
Vars.Sigma_n = Cons.M*(-Cons.M1*Vars.chi1_n + Cons.M2*Vars.chi2_n)
Vars.chi_s_ell = Vars.chi1_ell/2 + Vars.chi2_ell/2
Vars.chi_a_ell = Vars.chi1_ell/2 - Vars.chi2_ell/2
Vars.Fcal_coeff = 32*Cons.nu**2*Vars.v**10/5
@njit
def OmegaVec_chiVec_1_1p0(Cons,Vars):
Omega1_coeff = Vars.v**5/Cons.M
return Omega1_coeff*(Vars.ellHat*(-0.75*Cons.delta + 0.5*Cons.nu + Vars.v**2*(Cons.delta*(0.625*Cons.nu - 0.5625) + Cons.nu*(1.25 - 0.0416666666666667*Cons.nu) + 0.5625) + 0.75) + Vars.nHat*Vars.v*(3.0*Vars.chi1_n*Cons.nu + 3.0*Cons.M2**2*Vars.chi2_n/Cons.M**2) - Cons.M2**2*Vars.chiVec2*Vars.v/Cons.M**2)
@njit
def OmegaVec_chiVec_2_1p0(Cons,Vars):
Omega2_coeff = Vars.v**5/Cons.M
return Omega2_coeff*(Vars.ellHat*(0.75*Cons.delta + 0.5*Cons.nu + Vars.v**2*(Cons.delta*(0.5625 - 0.625*Cons.nu) + Cons.nu*(1.25 - 0.0416666666666667*Cons.nu) + 0.5625) + 0.75) + Vars.nHat*Vars.v*(3.0*Vars.chi2_n*Cons.nu + 3.0*Cons.M1**2*Vars.chi1_n/Cons.M**2) - Cons.M1**2*Vars.chiVec1*Vars.v/Cons.M**2)
@njit
def OmegaVec_1p0(Cons,Vars):
a_ell_0 = 7.0*Vars.S_n + 3.0*Vars.Sigma_n*Cons.delta
gamma_PN_2 = 1.0 - 0.333333333333333*Cons.nu
gamma_PN_0 = 1.00000000000000
a_ell_2 = Vars.S_n*(-9.66666666666667*Cons.nu - 10.0) + Vars.Sigma_n*Cons.delta*(-4.5*Cons.nu - 6.0)
return Vars.ellHat*Vars.v**3/Cons.M + Vars.nHat*Vars.v**6*(a_ell_0 + a_ell_2*Vars.v**2)*(gamma_PN_0 + gamma_PN_2*Vars.v**2)/Cons.M**3
@njit(cache=True)
def TaylorT1_1p0(Cons,Vars):
Flux = Vars.Fcal_coeff*(Cons.Fcal_0 + Cons.Fcal_2*Vars.v**2)
dEdV = -Cons.M*Cons.nu*Vars.v*(Cons.E_0 + 2.0*Cons.E_2*Vars.v**2)
Absorption = 0
dvdt_T1 = (-Absorption - Flux)/dEdV
dydt=np.zeros(8)
[dydt[5],dydt[6],dydt[7]] = FrameFromAngularVelocityIntegrand(Vars.rfrak_frame, OmegaVec_1p0(Cons,Vars)[1:])
dydt[0] = dvdt_T1[0]
if(Cons.EvolveSpin1):
dydt[1], dydt[2]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi1[0], Vars.rfrak_chi1[1],(mul(mul(inverse(Cons.S_chi1),OmegaVec_chiVec_1_1p0(Cons,Vars)),Cons.S_chi1))[1:])
else:
dydt[1], dydt[2] = 0.0, 0.0
if(Cons.EvolveSpin2):
dydt[3], dydt[4]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi2[0], Vars.rfrak_chi2[1],(mul(mul(inverse(Cons.S_chi2),OmegaVec_chiVec_2_1p0(Cons,Vars)),Cons.S_chi2))[1:])
else:
dydt[3], dydt[4] = 0.0, 0.0
return dydt
@njit(cache=True)
def TaylorT4_1p0(Cons,Vars):
dvdt_T4 = -2.0*Vars.Fcal_coeff*(-Cons.Fcal_0 + 0*Vars.v + -Cons.Fcal_2*Vars.v**2 + (0*(--Cons.Fcal_0*Vars.v - 0*Vars.v**2) - 4*Cons.E_2*Cons.M*-Cons.Fcal_0*Vars.v**2 + 0**2*-Cons.Fcal_0*Vars.v**2/2*Cons.E_0*Cons.M)/2*Cons.E_0*Cons.M)/(Cons.nu*Vars.v*2*Cons.E_0*Cons.M)
dydt=np.zeros(8)
[dydt[5],dydt[6],dydt[7]] = FrameFromAngularVelocityIntegrand(Vars.rfrak_frame, OmegaVec_1p0(Cons,Vars)[1:])
dydt[0] = dvdt_T4[0]
if(Cons.EvolveSpin1):
dydt[1], dydt[2]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi1[0], Vars.rfrak_chi1[1],(mul(mul(inverse(Cons.S_chi1),OmegaVec_chiVec_1_1p0(Cons,Vars)),Cons.S_chi1))[1:])
else:
dydt[1], dydt[2] = 0.0, 0.0
if(Cons.EvolveSpin2):
dydt[3], dydt[4]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi2[0], Vars.rfrak_chi2[1],(mul(mul(inverse(Cons.S_chi2),OmegaVec_chiVec_2_1p0(Cons,Vars)),Cons.S_chi2))[1:])
else:
dydt[3], dydt[4] = 0.0, 0.0
return dydt
@njit(cache=True)
def TaylorT5_1p0(Cons,Vars):
dtdv = -0.5*Cons.nu*Vars.v*(2*Cons.E_0*Cons.M + 0*Vars.v + 4*Cons.E_2*Cons.M*Vars.v**2 + (0*(-2*Cons.E_0*Cons.M*Vars.v - 0*Vars.v**2) - -Cons.Fcal_2*2*Cons.E_0*Cons.M*Vars.v**2 + 0**2*2*Cons.E_0*Cons.M*Vars.v**2/-Cons.Fcal_0)/-Cons.Fcal_0)/(Vars.Fcal_coeff*-Cons.Fcal_0)
dvdt_T5 = 1.0/dtdv
dydt=np.zeros(8)
[dydt[5],dydt[6],dydt[7]] = FrameFromAngularVelocityIntegrand(Vars.rfrak_frame, OmegaVec_1p0(Cons,Vars)[1:])
dydt[0] = dvdt_T5[0]
if(Cons.EvolveSpin1):
dydt[1], dydt[2]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi1[0], Vars.rfrak_chi1[1],(mul(mul(inverse(Cons.S_chi1),OmegaVec_chiVec_1_1p0(Cons,Vars)),Cons.S_chi1))[1:])
else:
dydt[1], dydt[2] = 0.0, 0.0
if(Cons.EvolveSpin2):
dydt[3], dydt[4]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi2[0], Vars.rfrak_chi2[1],(mul(mul(inverse(Cons.S_chi2),OmegaVec_chiVec_2_1p0(Cons,Vars)),Cons.S_chi2))[1:])
else:
dydt[3], dydt[4] = 0.0, 0.0
return dydt
@njit(cache=True)
def Recalculate_1p5(Cons,Vars,y):
Vars.v = np.array([y[0]])
Vars.rfrak_chi1 = np.array([y[1],y[2]])
Vars.rfrak_chi2 = np.array([y[3],y[4]])
Vars.rfrak_frame = np.array([y[5],y[6],y[7]])
Vars.R = exp(Vars.rfrak_frame[0]*Cons.xHat + Vars.rfrak_frame[1]*Cons.yHat + Vars.rfrak_frame[2]*Cons.zHat)
Vars.nHat = mul(mul(Vars.R,Cons.xHat),conjugate(Vars.R))
Vars.lambdaHat = mul(mul(Vars.R,Cons.yHat),conjugate(Vars.R))
Vars.ellHat = mul(mul(Vars.R,Cons.zHat),conjugate(Vars.R))
Vars.R_S1 = exp(Vars.rfrak_chi1[0]*Cons.xHat + Vars.rfrak_chi1[1]*Cons.yHat)
Vars.R_S2 = exp(Vars.rfrak_chi2[0]*Cons.xHat + Vars.rfrak_chi2[1]*Cons.yHat)
Vars.chiVec1 = mul(mul(mul(Cons.S_chi1,Vars.R_S1),Cons.zHat),mul(conjugate(Vars.R_S1),conjugate(Cons.S_chi1)))
Vars.chiVec2 = mul(mul(mul(Cons.S_chi2,Vars.R_S2),Cons.zHat),mul(conjugate(Vars.R_S2),conjugate(Cons.S_chi2)))
Vars.chi1_n = np.array([dot(Vars.chiVec1[1:],Vars.nHat[1:])])
Vars.chi1_lambda = np.array([dot(Vars.chiVec1[1:],Vars.lambdaHat[1:])])
Vars.chi1_ell = np.array([dot(Vars.chiVec1[1:],Vars.ellHat[1:])])
Vars.chi2_n = np.array([dot(Vars.chiVec2[1:],Vars.nHat[1:])])
Vars.chi2_lambda = np.array([dot(Vars.chiVec2[1:],Vars.lambdaHat[1:])])
Vars.chi2_ell = np.array([dot(Vars.chiVec2[1:],Vars.ellHat[1:])])
Vars.S_ell = Cons.M1**2*Vars.chi1_ell + Cons.M2**2*Vars.chi2_ell
Vars.S_n = Cons.M1**2*Vars.chi1_n + Cons.M2**2*Vars.chi2_n
Vars.S_lambda = Cons.M1**2*Vars.chi1_lambda + Cons.M2**2*Vars.chi2_lambda
Vars.Sigma_ell = Cons.M*(-Cons.M1*Vars.chi1_ell + Cons.M2*Vars.chi2_ell)
Vars.Sigma_n = Cons.M*(-Cons.M1*Vars.chi1_n + Cons.M2*Vars.chi2_n)
Vars.Sigma_lambda = Cons.M*(-Cons.M1*Vars.chi1_lambda + Cons.M2*Vars.chi2_lambda)
Vars.chi_s_ell = Vars.chi1_ell/2 + Vars.chi2_ell/2
Vars.chi_a_ell = Vars.chi1_ell/2 - Vars.chi2_ell/2
Vars.Fcal_coeff = 32*Cons.nu**2*Vars.v**10/5
Vars.Fcal_SO_3 = (-4*Vars.S_ell - 5*Vars.Sigma_ell*Cons.delta/4)/Cons.M**2
Vars.E_SO_3 = (14*Vars.S_ell/3 + 2*Vars.Sigma_ell*Cons.delta)/Cons.M**2
@njit
def OmegaVec_chiVec_1_1p5(Cons,Vars):
Omega1_coeff = Vars.v**5/Cons.M
return Omega1_coeff*(Vars.ellHat*(-0.75*Cons.delta + 0.5*Cons.nu + Vars.v**2*(Cons.delta*(0.625*Cons.nu - 0.5625) + Cons.nu*(1.25 - 0.0416666666666667*Cons.nu) + 0.5625) + 0.75) + Vars.nHat*Vars.v*(3.0*Vars.chi1_n*Cons.nu + 3.0*Cons.M2**2*Vars.chi2_n/Cons.M**2) - Cons.M2**2*Vars.chiVec2*Vars.v/Cons.M**2)
@njit
def OmegaVec_chiVec_2_1p5(Cons,Vars):
Omega2_coeff = Vars.v**5/Cons.M
return Omega2_coeff*(Vars.ellHat*(0.75*Cons.delta + 0.5*Cons.nu + Vars.v**2*(Cons.delta*(0.5625 - 0.625*Cons.nu) + Cons.nu*(1.25 - 0.0416666666666667*Cons.nu) + 0.5625) + 0.75) + Vars.nHat*Vars.v*(3.0*Vars.chi2_n*Cons.nu + 3.0*Cons.M1**2*Vars.chi1_n/Cons.M**2) - Cons.M1**2*Vars.chiVec1*Vars.v/Cons.M**2)
@njit
def OmegaVec_1p5(Cons,Vars):
a_ell_0 = 7.0*Vars.S_n + 3.0*Vars.Sigma_n*Cons.delta
gamma_PN_2 = 1.0 - 0.333333333333333*Cons.nu
gamma_PN_3 = (1.66666666666667*Vars.S_ell + Vars.Sigma_ell*Cons.delta)/Cons.M**2
gamma_PN_0 = 1.00000000000000
a_ell_2 = Vars.S_n*(-9.66666666666667*Cons.nu - 10.0) + Vars.Sigma_n*Cons.delta*(-4.5*Cons.nu - 6.0)
return Vars.ellHat*Vars.v**3/Cons.M + Vars.nHat*Vars.v**6*(a_ell_0 + a_ell_2*Vars.v**2)*(gamma_PN_0 + Vars.v**2*(gamma_PN_2 + gamma_PN_3*Vars.v))/Cons.M**3
@njit(cache=True)
def TaylorT1_1p5(Cons,Vars):
Flux = Vars.Fcal_coeff*(Cons.Fcal_0 + Vars.v**2*(Cons.Fcal_2 + Vars.v*(Cons.Fcal_3 + Vars.Fcal_SO_3)))
dEdV = -0.5*Cons.M*Cons.nu*Vars.v*(2.0*Cons.E_0 + Vars.v**2*(4.0*Cons.E_2 + 5.0*Vars.E_SO_3*Vars.v))
Absorption = 0
dvdt_T1 = (-Absorption - Flux)/dEdV
dydt=np.zeros(8)
[dydt[5],dydt[6],dydt[7]] = FrameFromAngularVelocityIntegrand(Vars.rfrak_frame, OmegaVec_1p5(Cons,Vars)[1:])
dydt[0] = dvdt_T1[0]
if(Cons.EvolveSpin1):
dydt[1], dydt[2]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi1[0], Vars.rfrak_chi1[1],(mul(mul(inverse(Cons.S_chi1),OmegaVec_chiVec_1_1p5(Cons,Vars)),Cons.S_chi1))[1:])
else:
dydt[1], dydt[2] = 0.0, 0.0
if(Cons.EvolveSpin2):
dydt[3], dydt[4]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi2[0], Vars.rfrak_chi2[1],(mul(mul(inverse(Cons.S_chi2),OmegaVec_chiVec_2_1p5(Cons,Vars)),Cons.S_chi2))[1:])
else:
dydt[3], dydt[4] = 0.0, 0.0
return dydt
@njit(cache=True)
def TaylorT4_1p5(Cons,Vars):
dvdt_T4 = -2.0*Vars.Fcal_coeff*(1.0*-Cons.Fcal_0 + 1.0*0*Vars.v + 1.0*-Cons.Fcal_2*Vars.v**2 + 1.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*Vars.v**3 + (0*(-1.0*-Cons.Fcal_0*Vars.v - 1.0*0*Vars.v**2 - 1.0*-Cons.Fcal_2*Vars.v**3) + 4*Cons.E_2*Cons.M*(-1.0*-Cons.Fcal_0*Vars.v**2 - 1.0*0*Vars.v**3) - 1.0*5*Vars.E_SO_3*Cons.M*-Cons.Fcal_0*Vars.v**3 + (0*(0*(1.0*-Cons.Fcal_0*Vars.v**2 + 1.0*0*Vars.v**3) + 2.0*4*Cons.E_2*Cons.M*-Cons.Fcal_0*Vars.v**3) - 1.0*0**3*-Cons.Fcal_0*Vars.v**3/2*Cons.E_0*Cons.M)/2*Cons.E_0*Cons.M)/2*Cons.E_0*Cons.M)/(Cons.nu*Vars.v*2*Cons.E_0*Cons.M)
dydt=np.zeros(8)
[dydt[5],dydt[6],dydt[7]] = FrameFromAngularVelocityIntegrand(Vars.rfrak_frame, OmegaVec_1p5(Cons,Vars)[1:])
dydt[0] = dvdt_T4[0]
if(Cons.EvolveSpin1):
dydt[1], dydt[2]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi1[0], Vars.rfrak_chi1[1],(mul(mul(inverse(Cons.S_chi1),OmegaVec_chiVec_1_1p5(Cons,Vars)),Cons.S_chi1))[1:])
else:
dydt[1], dydt[2] = 0.0, 0.0
if(Cons.EvolveSpin2):
dydt[3], dydt[4]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi2[0], Vars.rfrak_chi2[1],(mul(mul(inverse(Cons.S_chi2),OmegaVec_chiVec_2_1p5(Cons,Vars)),Cons.S_chi2))[1:])
else:
dydt[3], dydt[4] = 0.0, 0.0
return dydt
@njit(cache=True)
def TaylorT5_1p5(Cons,Vars):
dtdv = -0.5*Cons.nu*Vars.v*(1.0*2*Cons.E_0*Cons.M + 1.0*0*Vars.v + 1.0*4*Cons.E_2*Cons.M*Vars.v**2 + 1.0*5*Vars.E_SO_3*Cons.M*Vars.v**3 + (0*(-1.0*2*Cons.E_0*Cons.M*Vars.v - 1.0*0*Vars.v**2 - 1.0*4*Cons.E_2*Cons.M*Vars.v**3) + -Cons.Fcal_2*(-1.0*2*Cons.E_0*Cons.M*Vars.v**2 - 1.0*0*Vars.v**3) - 1.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*2*Cons.E_0*Cons.M*Vars.v**3 + (0*(0*(1.0*2*Cons.E_0*Cons.M*Vars.v**2 + 1.0*0*Vars.v**3) + 2.0*-Cons.Fcal_2*2*Cons.E_0*Cons.M*Vars.v**3) - 1.0*0**3*2*Cons.E_0*Cons.M*Vars.v**3/-Cons.Fcal_0)/-Cons.Fcal_0)/-Cons.Fcal_0)/(Vars.Fcal_coeff*-Cons.Fcal_0)
dvdt_T5 = 1.0/dtdv
dydt=np.zeros(8)
[dydt[5],dydt[6],dydt[7]] = FrameFromAngularVelocityIntegrand(Vars.rfrak_frame, OmegaVec_1p5(Cons,Vars)[1:])
dydt[0] = dvdt_T5[0]
if(Cons.EvolveSpin1):
dydt[1], dydt[2]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi1[0], Vars.rfrak_chi1[1],(mul(mul(inverse(Cons.S_chi1),OmegaVec_chiVec_1_1p5(Cons,Vars)),Cons.S_chi1))[1:])
else:
dydt[1], dydt[2] = 0.0, 0.0
if(Cons.EvolveSpin2):
dydt[3], dydt[4]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi2[0], Vars.rfrak_chi2[1],(mul(mul(inverse(Cons.S_chi2),OmegaVec_chiVec_2_1p5(Cons,Vars)),Cons.S_chi2))[1:])
else:
dydt[3], dydt[4] = 0.0, 0.0
return dydt
@njit(cache=True)
def Recalculate_2p0(Cons,Vars,y):
Vars.v = np.array([y[0]])
Vars.rfrak_chi1 = np.array([y[1],y[2]])
Vars.rfrak_chi2 = np.array([y[3],y[4]])
Vars.rfrak_frame = np.array([y[5],y[6],y[7]])
Vars.R = exp(Vars.rfrak_frame[0]*Cons.xHat + Vars.rfrak_frame[1]*Cons.yHat + Vars.rfrak_frame[2]*Cons.zHat)
Vars.nHat = mul(mul(Vars.R,Cons.xHat),conjugate(Vars.R))
Vars.lambdaHat = mul(mul(Vars.R,Cons.yHat),conjugate(Vars.R))
Vars.ellHat = mul(mul(Vars.R,Cons.zHat),conjugate(Vars.R))
Vars.R_S1 = exp(Vars.rfrak_chi1[0]*Cons.xHat + Vars.rfrak_chi1[1]*Cons.yHat)
Vars.R_S2 = exp(Vars.rfrak_chi2[0]*Cons.xHat + Vars.rfrak_chi2[1]*Cons.yHat)
Vars.chiVec1 = mul(mul(mul(Cons.S_chi1,Vars.R_S1),Cons.zHat),mul(conjugate(Vars.R_S1),conjugate(Cons.S_chi1)))
Vars.chiVec2 = mul(mul(mul(Cons.S_chi2,Vars.R_S2),Cons.zHat),mul(conjugate(Vars.R_S2),conjugate(Cons.S_chi2)))
Vars.chi1_n = np.array([dot(Vars.chiVec1[1:],Vars.nHat[1:])])
Vars.chi1_lambda = np.array([dot(Vars.chiVec1[1:],Vars.lambdaHat[1:])])
Vars.chi1_ell = np.array([dot(Vars.chiVec1[1:],Vars.ellHat[1:])])
Vars.chi2_n = np.array([dot(Vars.chiVec2[1:],Vars.nHat[1:])])
Vars.chi2_lambda = np.array([dot(Vars.chiVec2[1:],Vars.lambdaHat[1:])])
Vars.chi2_ell = np.array([dot(Vars.chiVec2[1:],Vars.ellHat[1:])])
Vars.S_ell = Cons.M1**2*Vars.chi1_ell + Cons.M2**2*Vars.chi2_ell
Vars.S_n = Cons.M1**2*Vars.chi1_n + Cons.M2**2*Vars.chi2_n
Vars.S_lambda = Cons.M1**2*Vars.chi1_lambda + Cons.M2**2*Vars.chi2_lambda
Vars.Sigma_ell = Cons.M*(-Cons.M1*Vars.chi1_ell + Cons.M2*Vars.chi2_ell)
Vars.Sigma_n = Cons.M*(-Cons.M1*Vars.chi1_n + Cons.M2*Vars.chi2_n)
Vars.Sigma_lambda = Cons.M*(-Cons.M1*Vars.chi1_lambda + Cons.M2*Vars.chi2_lambda)
Vars.chi_s_ell = Vars.chi1_ell/2 + Vars.chi2_ell/2
Vars.chi_a_ell = Vars.chi1_ell/2 - Vars.chi2_ell/2
Vars.Fcal_coeff = 32*Cons.nu**2*Vars.v**10/5
Vars.Fcal_SQ_4 = Cons.chi1chi1*(-89*Cons.delta/192 + 89*Cons.nu/96 - 89/192) - 103*Cons.chi1chi2*Cons.nu/48 + Cons.chi2chi2*(89*Cons.delta/192 + 89*Cons.nu/96 - 89/192) + Vars.chi_a_ell*(Vars.chi_a_ell*(287/96 - 12*Cons.nu) + 287*Vars.chi_s_ell*Cons.delta/48) + Vars.chi_s_ell**2*(Cons.nu/24 + 287/96)
Vars.Fcal_SO_3 = (-4*Vars.S_ell - 5*Vars.Sigma_ell*Cons.delta/4)/Cons.M**2
Vars.E_SQ_4 = -3*Vars.chi_a_ell**2/2 - 3*Vars.chi_s_ell**2/2 - Cons.delta*(Cons.chi2chi2/2 + 3*Vars.chi_a_ell*Vars.chi_s_ell) + Cons.nu*(Cons.chi1chi2 + 6*Vars.chi_a_ell**2) + (Cons.chi1chi1 + Cons.chi2chi2)*(Cons.delta - 2*Cons.nu + 1)/4
Vars.E_SO_3 = (14*Vars.S_ell/3 + 2*Vars.Sigma_ell*Cons.delta)/Cons.M**2
@njit
def OmegaVec_chiVec_1_2p0(Cons,Vars):
Omega1_coeff = Vars.v**5/Cons.M
return Omega1_coeff*(Vars.ellHat*(-0.75*Cons.delta + 0.5*Cons.nu + Vars.v**2*(Cons.delta*(0.625*Cons.nu - 0.5625) + Cons.nu*(1.25 - 0.0416666666666667*Cons.nu) + Vars.v**2*(Cons.delta*(Cons.nu*(4.875 - 0.15625*Cons.nu) - 0.84375) + Cons.nu*(Cons.nu*(-0.0208333333333333*Cons.nu - 3.28125) + 0.1875) + 0.84375) + 0.5625) + 0.75) + Vars.nHat*Vars.v*(3.0*Vars.chi1_n*Cons.nu + 3.0*Cons.M2**2*Vars.chi2_n/Cons.M**2) - Cons.M2**2*Vars.chiVec2*Vars.v/Cons.M**2)
@njit
def OmegaVec_chiVec_2_2p0(Cons,Vars):
Omega2_coeff = Vars.v**5/Cons.M
return Omega2_coeff*(Vars.ellHat*(0.75*Cons.delta + 0.5*Cons.nu + Vars.v**2*(Cons.delta*(0.5625 - 0.625*Cons.nu) + Cons.nu*(1.25 - 0.0416666666666667*Cons.nu) + Vars.v**2*(Cons.delta*(Cons.nu*(0.15625*Cons.nu - 4.875) + 0.84375) + Cons.nu*(Cons.nu*(-0.0208333333333333*Cons.nu - 3.28125) + 0.1875) + 0.84375) + 0.5625) + 0.75) + Vars.nHat*Vars.v*(3.0*Vars.chi2_n*Cons.nu + 3.0*Cons.M1**2*Vars.chi1_n/Cons.M**2) - Cons.M1**2*Vars.chiVec1*Vars.v/Cons.M**2)
@njit
def OmegaVec_2p0(Cons,Vars):
a_ell_0 = 7.0*Vars.S_n + 3.0*Vars.Sigma_n*Cons.delta
gamma_PN_2 = 1.0 - 0.333333333333333*Cons.nu
a_ell_4 = Vars.S_n*(5.77777777777778*Cons.nu**2 + 14.75*Cons.nu + 1.5) + Vars.Sigma_n*Cons.delta*(2.83333333333333*Cons.nu**2 + 9.125*Cons.nu + 1.5)
gamma_PN_3 = (1.66666666666667*Vars.S_ell + Vars.Sigma_ell*Cons.delta)/Cons.M**2
gamma_PN_0 = 1.00000000000000
gamma_PN_4 = 1.0 - 5.41666666666667*Cons.nu
a_ell_2 = Vars.S_n*(-9.66666666666667*Cons.nu - 10.0) + Vars.Sigma_n*Cons.delta*(-4.5*Cons.nu - 6.0)
return Vars.ellHat*Vars.v**3/Cons.M + Vars.nHat*Vars.v**6*(a_ell_0 + Vars.v**2*(a_ell_2 + a_ell_4*Vars.v**2))*(gamma_PN_0 + Vars.v**2*(gamma_PN_2 + Vars.v*(gamma_PN_3 + gamma_PN_4*Vars.v)))/Cons.M**3
@njit(cache=True)
def TaylorT1_2p0(Cons,Vars):
Flux = Vars.Fcal_coeff*(Cons.Fcal_0 + Vars.v**2*(Cons.Fcal_2 + Vars.v*(Cons.Fcal_3 + Vars.Fcal_SO_3 + Vars.v*(Cons.Fcal_4 + Vars.Fcal_SQ_4))))
dEdV = -0.5*Cons.M*Cons.nu*Vars.v*(2.0*Cons.E_0 + Vars.v**2*(4.0*Cons.E_2 + Vars.v*(5.0*Vars.E_SO_3 + 6.0*Vars.v*(Cons.E_4 + Vars.E_SQ_4))))
Absorption = 0
dvdt_T1 = (-Absorption - Flux)/dEdV
dydt=np.zeros(8)
[dydt[5],dydt[6],dydt[7]] = FrameFromAngularVelocityIntegrand(Vars.rfrak_frame, OmegaVec_2p0(Cons,Vars)[1:])
dydt[0] = dvdt_T1[0]
if(Cons.EvolveSpin1):
dydt[1], dydt[2]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi1[0], Vars.rfrak_chi1[1],(mul(mul(inverse(Cons.S_chi1),OmegaVec_chiVec_1_2p0(Cons,Vars)),Cons.S_chi1))[1:])
else:
dydt[1], dydt[2] = 0.0, 0.0
if(Cons.EvolveSpin2):
dydt[3], dydt[4]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi2[0], Vars.rfrak_chi2[1],(mul(mul(inverse(Cons.S_chi2),OmegaVec_chiVec_2_2p0(Cons,Vars)),Cons.S_chi2))[1:])
else:
dydt[3], dydt[4] = 0.0, 0.0
return dydt
@njit(cache=True)
def TaylorT4_2p0(Cons,Vars):
dvdt_T4 = -2.0*Vars.Fcal_coeff*(1.0*-Cons.Fcal_0 + 1.0*0*Vars.v + 1.0*-Cons.Fcal_2*Vars.v**2 + 1.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*Vars.v**3 + 1.0*-Cons.Fcal_4 - Vars.Fcal_SQ_4*Vars.v**4 + (0*(-1.0*-Cons.Fcal_0*Vars.v - 1.0*0*Vars.v**2 - 1.0*-Cons.Fcal_2*Vars.v**3 - 1.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*Vars.v**4) + 4*Cons.E_2*Cons.M*(-1.0*-Cons.Fcal_0*Vars.v**2 - 1.0*0*Vars.v**3 - 1.0*-Cons.Fcal_2*Vars.v**4) + 5*Vars.E_SO_3*Cons.M*(-1.0*-Cons.Fcal_0*Vars.v**3 - 1.0*0*Vars.v**4) - 1.0*6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M*-Cons.Fcal_0*Vars.v**4 + (0*(0*(1.0*-Cons.Fcal_0*Vars.v**2 + 1.0*0*Vars.v**3 + 1.0*-Cons.Fcal_2*Vars.v**4) + 4*Cons.E_2*Cons.M*(2.0*-Cons.Fcal_0*Vars.v**3 + 2.0*0*Vars.v**4) + 2.0*5*Vars.E_SO_3*Cons.M*-Cons.Fcal_0*Vars.v**4) + 1.0*4*Cons.E_2*Cons.M**2*-Cons.Fcal_0*Vars.v**4 + (0**2*(0*(-1.0*-Cons.Fcal_0*Vars.v**3 - 1.0*0*Vars.v**4) - 3.0*4*Cons.E_2*Cons.M*-Cons.Fcal_0*Vars.v**4) + 1.0*0**4*-Cons.Fcal_0*Vars.v**4/2*Cons.E_0*Cons.M)/2*Cons.E_0*Cons.M)/2*Cons.E_0*Cons.M)/2*Cons.E_0*Cons.M)/(Cons.nu*Vars.v*2*Cons.E_0*Cons.M)
dydt=np.zeros(8)
[dydt[5],dydt[6],dydt[7]] = FrameFromAngularVelocityIntegrand(Vars.rfrak_frame, OmegaVec_2p0(Cons,Vars)[1:])
dydt[0] = dvdt_T4[0]
if(Cons.EvolveSpin1):
dydt[1], dydt[2]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi1[0], Vars.rfrak_chi1[1],(mul(mul(inverse(Cons.S_chi1),OmegaVec_chiVec_1_2p0(Cons,Vars)),Cons.S_chi1))[1:])
else:
dydt[1], dydt[2] = 0.0, 0.0
if(Cons.EvolveSpin2):
dydt[3], dydt[4]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi2[0], Vars.rfrak_chi2[1],(mul(mul(inverse(Cons.S_chi2),OmegaVec_chiVec_2_2p0(Cons,Vars)),Cons.S_chi2))[1:])
else:
dydt[3], dydt[4] = 0.0, 0.0
return dydt
@njit(cache=True)
def TaylorT5_2p0(Cons,Vars):
dtdv = -0.5*Cons.nu*Vars.v*(1.0*2*Cons.E_0*Cons.M + 1.0*0*Vars.v + 1.0*4*Cons.E_2*Cons.M*Vars.v**2 + 1.0*5*Vars.E_SO_3*Cons.M*Vars.v**3 + 1.0*6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M*Vars.v**4 + (0*(-1.0*2*Cons.E_0*Cons.M*Vars.v - 1.0*0*Vars.v**2 - 1.0*4*Cons.E_2*Cons.M*Vars.v**3 - 1.0*5*Vars.E_SO_3*Cons.M*Vars.v**4) + -Cons.Fcal_2*(-1.0*2*Cons.E_0*Cons.M*Vars.v**2 - 1.0*0*Vars.v**3 - 1.0*4*Cons.E_2*Cons.M*Vars.v**4) + -Cons.Fcal_3 - Vars.Fcal_SO_3*(-1.0*2*Cons.E_0*Cons.M*Vars.v**3 - 1.0*0*Vars.v**4) - 1.0*-Cons.Fcal_4 - Vars.Fcal_SQ_4*2*Cons.E_0*Cons.M*Vars.v**4 + (0*(0*(1.0*2*Cons.E_0*Cons.M*Vars.v**2 + 1.0*0*Vars.v**3 + 1.0*4*Cons.E_2*Cons.M*Vars.v**4) + -Cons.Fcal_2*(2.0*2*Cons.E_0*Cons.M*Vars.v**3 + 2.0*0*Vars.v**4) + 2.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*2*Cons.E_0*Cons.M*Vars.v**4) + 1.0*-Cons.Fcal_2**2*2*Cons.E_0*Cons.M*Vars.v**4 + (0**2*(0*(-1.0*2*Cons.E_0*Cons.M*Vars.v**3 - 1.0*0*Vars.v**4) - 3.0*-Cons.Fcal_2*2*Cons.E_0*Cons.M*Vars.v**4) + 1.0*0**4*2*Cons.E_0*Cons.M*Vars.v**4/-Cons.Fcal_0)/-Cons.Fcal_0)/-Cons.Fcal_0)/-Cons.Fcal_0)/(Vars.Fcal_coeff*-Cons.Fcal_0)
dvdt_T5 = 1.0/dtdv
dydt=np.zeros(8)
[dydt[5],dydt[6],dydt[7]] = FrameFromAngularVelocityIntegrand(Vars.rfrak_frame, OmegaVec_2p0(Cons,Vars)[1:])
dydt[0] = dvdt_T5[0]
if(Cons.EvolveSpin1):
dydt[1], dydt[2]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi1[0], Vars.rfrak_chi1[1],(mul(mul(inverse(Cons.S_chi1),OmegaVec_chiVec_1_2p0(Cons,Vars)),Cons.S_chi1))[1:])
else:
dydt[1], dydt[2] = 0.0, 0.0
if(Cons.EvolveSpin2):
dydt[3], dydt[4]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi2[0], Vars.rfrak_chi2[1],(mul(mul(inverse(Cons.S_chi2),OmegaVec_chiVec_2_2p0(Cons,Vars)),Cons.S_chi2))[1:])
else:
dydt[3], dydt[4] = 0.0, 0.0
return dydt
@njit(cache=True)
def Recalculate_2p5(Cons,Vars,y):
Vars.v = np.array([y[0]])
Vars.rfrak_chi1 = np.array([y[1],y[2]])
Vars.rfrak_chi2 = np.array([y[3],y[4]])
Vars.rfrak_frame = np.array([y[5],y[6],y[7]])
Vars.R = exp(Vars.rfrak_frame[0]*Cons.xHat + Vars.rfrak_frame[1]*Cons.yHat + Vars.rfrak_frame[2]*Cons.zHat)
Vars.nHat = mul(mul(Vars.R,Cons.xHat),conjugate(Vars.R))
Vars.lambdaHat = mul(mul(Vars.R,Cons.yHat),conjugate(Vars.R))
Vars.ellHat = mul(mul(Vars.R,Cons.zHat),conjugate(Vars.R))
Vars.R_S1 = exp(Vars.rfrak_chi1[0]*Cons.xHat + Vars.rfrak_chi1[1]*Cons.yHat)
Vars.R_S2 = exp(Vars.rfrak_chi2[0]*Cons.xHat + Vars.rfrak_chi2[1]*Cons.yHat)
Vars.chiVec1 = mul(mul(mul(Cons.S_chi1,Vars.R_S1),Cons.zHat),mul(conjugate(Vars.R_S1),conjugate(Cons.S_chi1)))
Vars.chiVec2 = mul(mul(mul(Cons.S_chi2,Vars.R_S2),Cons.zHat),mul(conjugate(Vars.R_S2),conjugate(Cons.S_chi2)))
Vars.chi1_n = np.array([dot(Vars.chiVec1[1:],Vars.nHat[1:])])
Vars.chi1_lambda = np.array([dot(Vars.chiVec1[1:],Vars.lambdaHat[1:])])
Vars.chi1_ell = np.array([dot(Vars.chiVec1[1:],Vars.ellHat[1:])])
Vars.chi2_n = np.array([dot(Vars.chiVec2[1:],Vars.nHat[1:])])
Vars.chi2_lambda = np.array([dot(Vars.chiVec2[1:],Vars.lambdaHat[1:])])
Vars.chi2_ell = np.array([dot(Vars.chiVec2[1:],Vars.ellHat[1:])])
Vars.S_ell = Cons.M1**2*Vars.chi1_ell + Cons.M2**2*Vars.chi2_ell
Vars.S_n = Cons.M1**2*Vars.chi1_n + Cons.M2**2*Vars.chi2_n
Vars.S_lambda = Cons.M1**2*Vars.chi1_lambda + Cons.M2**2*Vars.chi2_lambda
Vars.Sigma_ell = Cons.M*(-Cons.M1*Vars.chi1_ell + Cons.M2*Vars.chi2_ell)
Vars.Sigma_n = Cons.M*(-Cons.M1*Vars.chi1_n + Cons.M2*Vars.chi2_n)
Vars.Sigma_lambda = Cons.M*(-Cons.M1*Vars.chi1_lambda + Cons.M2*Vars.chi2_lambda)
Vars.chi_s_ell = Vars.chi1_ell/2 + Vars.chi2_ell/2
Vars.chi_a_ell = Vars.chi1_ell/2 - Vars.chi2_ell/2
Vars.Fcal_coeff = 32*Cons.nu**2*Vars.v**10/5
Vars.Fcal_SQ_4 = Cons.chi1chi1*(-89*Cons.delta/192 + 89*Cons.nu/96 - 89/192) - 103*Cons.chi1chi2*Cons.nu/48 + Cons.chi2chi2*(89*Cons.delta/192 + 89*Cons.nu/96 - 89/192) + Vars.chi_a_ell*(Vars.chi_a_ell*(287/96 - 12*Cons.nu) + 287*Vars.chi_s_ell*Cons.delta/48) + Vars.chi_s_ell**2*(Cons.nu/24 + 287/96)
Vars.Fcal_SO_3 = (-4*Vars.S_ell - 5*Vars.Sigma_ell*Cons.delta/4)/Cons.M**2
Vars.Fcal_SO_5 = (Vars.S_ell*(272*Cons.nu/9 - 9/2) + Vars.Sigma_ell*Cons.delta*(43*Cons.nu/4 - 13/16))/Cons.M**2
Vars.E_SQ_4 = -3*Vars.chi_a_ell**2/2 - 3*Vars.chi_s_ell**2/2 - Cons.delta*(Cons.chi2chi2/2 + 3*Vars.chi_a_ell*Vars.chi_s_ell) + Cons.nu*(Cons.chi1chi2 + 6*Vars.chi_a_ell**2) + (Cons.chi1chi1 + Cons.chi2chi2)*(Cons.delta - 2*Cons.nu + 1)/4
Vars.E_SO_3 = (14*Vars.S_ell/3 + 2*Vars.Sigma_ell*Cons.delta)/Cons.M**2
Vars.E_SO_5 = (Vars.S_ell*(11 - 61*Cons.nu/9) + Vars.Sigma_ell*Cons.delta*(3 - 10*Cons.nu/3))/Cons.M**2
@njit
def OmegaVec_chiVec_1_2p5(Cons,Vars):
Omega1_coeff = Vars.v**5/Cons.M
return Omega1_coeff*(Vars.ellHat*(-0.75*Cons.delta + 0.5*Cons.nu + Vars.v**2*(Cons.delta*(0.625*Cons.nu - 0.5625) + Cons.nu*(1.25 - 0.0416666666666667*Cons.nu) + Vars.v**2*(Cons.delta*(Cons.nu*(4.875 - 0.15625*Cons.nu) - 0.84375) + Cons.nu*(Cons.nu*(-0.0208333333333333*Cons.nu - 3.28125) + 0.1875) + 0.84375) + 0.5625) + 0.75) + Vars.nHat*Vars.v*(3.0*Vars.chi1_n*Cons.nu + 3.0*Cons.M2**2*Vars.chi2_n/Cons.M**2) - Cons.M2**2*Vars.chiVec2*Vars.v/Cons.M**2)
@njit
def OmegaVec_chiVec_2_2p5(Cons,Vars):
Omega2_coeff = Vars.v**5/Cons.M
return Omega2_coeff*(Vars.ellHat*(0.75*Cons.delta + 0.5*Cons.nu + Vars.v**2*(Cons.delta*(0.5625 - 0.625*Cons.nu) + Cons.nu*(1.25 - 0.0416666666666667*Cons.nu) + Vars.v**2*(Cons.delta*(Cons.nu*(0.15625*Cons.nu - 4.875) + 0.84375) + Cons.nu*(Cons.nu*(-0.0208333333333333*Cons.nu - 3.28125) + 0.1875) + 0.84375) + 0.5625) + 0.75) + Vars.nHat*Vars.v*(3.0*Vars.chi2_n*Cons.nu + 3.0*Cons.M1**2*Vars.chi1_n/Cons.M**2) - Cons.M1**2*Vars.chiVec1*Vars.v/Cons.M**2)
@njit
def OmegaVec_2p5(Cons,Vars):
a_ell_0 = 7.0*Vars.S_n + 3.0*Vars.Sigma_n*Cons.delta
gamma_PN_2 = 1.0 - 0.333333333333333*Cons.nu
a_ell_4 = Vars.S_n*(5.77777777777778*Cons.nu**2 + 14.75*Cons.nu + 1.5) + Vars.Sigma_n*Cons.delta*(2.83333333333333*Cons.nu**2 + 9.125*Cons.nu + 1.5)
gamma_PN_3 = (1.66666666666667*Vars.S_ell + Vars.Sigma_ell*Cons.delta)/Cons.M**2
gamma_PN_0 = 1.00000000000000
gamma_PN_5 = (Vars.S_ell*(0.888888888888889*Cons.nu + 3.33333333333333) + 2.0*Vars.Sigma_ell*Cons.delta)/Cons.M**2
gamma_PN_4 = 1.0 - 5.41666666666667*Cons.nu
a_ell_2 = Vars.S_n*(-9.66666666666667*Cons.nu - 10.0) + Vars.Sigma_n*Cons.delta*(-4.5*Cons.nu - 6.0)
return Vars.ellHat*Vars.v**3/Cons.M + Vars.nHat*Vars.v**6*(a_ell_0 + Vars.v**2*(a_ell_2 + a_ell_4*Vars.v**2))*(gamma_PN_0 + Vars.v**2*(gamma_PN_2 + Vars.v*(gamma_PN_3 + Vars.v*(gamma_PN_4 + gamma_PN_5*Vars.v))))/Cons.M**3
@njit(cache=True)
def TaylorT1_2p5(Cons,Vars):
Flux = Vars.Fcal_coeff*(Cons.Fcal_0 + Vars.v**2*(Cons.Fcal_2 + Vars.v*(Cons.Fcal_3 + Vars.Fcal_SO_3 + Vars.v*(Cons.Fcal_4 + Vars.Fcal_SQ_4 + Vars.v*(Cons.Fcal_5 + Vars.Fcal_SO_5)))))
dEdV = -0.5*Cons.M*Cons.nu*Vars.v*(2.0*Cons.E_0 + Vars.v**2*(4.0*Cons.E_2 + Vars.v*(5.0*Vars.E_SO_3 + Vars.v*(6.0*Cons.E_4 + 7.0*Vars.E_SO_5*Vars.v + 6.0*Vars.E_SQ_4))))
Absorption = 0
dvdt_T1 = (-Absorption - Flux)/dEdV
dydt=np.zeros(8)
[dydt[5],dydt[6],dydt[7]] = FrameFromAngularVelocityIntegrand(Vars.rfrak_frame, OmegaVec_2p5(Cons,Vars)[1:])
dydt[0] = dvdt_T1[0]
if(Cons.EvolveSpin1):
dydt[1], dydt[2]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi1[0], Vars.rfrak_chi1[1],(mul(mul(inverse(Cons.S_chi1),OmegaVec_chiVec_1_2p5(Cons,Vars)),Cons.S_chi1))[1:])
else:
dydt[1], dydt[2] = 0.0, 0.0
if(Cons.EvolveSpin2):
dydt[3], dydt[4]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi2[0], Vars.rfrak_chi2[1],(mul(mul(inverse(Cons.S_chi2),OmegaVec_chiVec_2_2p5(Cons,Vars)),Cons.S_chi2))[1:])
else:
dydt[3], dydt[4] = 0.0, 0.0
return dydt
@njit(cache=True)
def TaylorT4_2p5(Cons,Vars):
dvdt_T4 = -2.0*Vars.Fcal_coeff*(1.0*-Cons.Fcal_0 + 1.0*0*Vars.v + 1.0*-Cons.Fcal_2*Vars.v**2 + 1.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*Vars.v**3 + 1.0*-Cons.Fcal_4 - Vars.Fcal_SQ_4*Vars.v**4 + 1.0*-Cons.Fcal_5 - Vars.Fcal_SO_5*Vars.v**5 + (0*(-1.0*-Cons.Fcal_0*Vars.v - 1.0*0*Vars.v**2 - 1.0*-Cons.Fcal_2*Vars.v**3 - 1.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*Vars.v**4 - 1.0*-Cons.Fcal_4 - Vars.Fcal_SQ_4*Vars.v**5) + 4*Cons.E_2*Cons.M*(-1.0*-Cons.Fcal_0*Vars.v**2 - 1.0*0*Vars.v**3 - 1.0*-Cons.Fcal_2*Vars.v**4 - 1.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*Vars.v**5) + 5*Vars.E_SO_3*Cons.M*(-1.0*-Cons.Fcal_0*Vars.v**3 - 1.0*0*Vars.v**4 - 1.0*-Cons.Fcal_2*Vars.v**5) + 6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M*(-1.0*-Cons.Fcal_0*Vars.v**4 - 1.0*0*Vars.v**5) - 1.0*7*Vars.E_SO_5*Cons.M*-Cons.Fcal_0*Vars.v**5 + (0*(0*(1.0*-Cons.Fcal_0*Vars.v**2 + 1.0*0*Vars.v**3 + 1.0*-Cons.Fcal_2*Vars.v**4 + 1.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*Vars.v**5) + 4*Cons.E_2*Cons.M*(2.0*-Cons.Fcal_0*Vars.v**3 + 2.0*0*Vars.v**4 + 2.0*-Cons.Fcal_2*Vars.v**5) + 5*Vars.E_SO_3*Cons.M*(2.0*-Cons.Fcal_0*Vars.v**4 + 2.0*0*Vars.v**5) + 2.0*6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M*-Cons.Fcal_0*Vars.v**5) + 4*Cons.E_2*Cons.M*(4*Cons.E_2*Cons.M*(1.0*-Cons.Fcal_0*Vars.v**4 + 1.0*0*Vars.v**5) + 2.0*5*Vars.E_SO_3*Cons.M*-Cons.Fcal_0*Vars.v**5) + (0*(0*(0*(-1.0*-Cons.Fcal_0*Vars.v**3 - 1.0*0*Vars.v**4 - 1.0*-Cons.Fcal_2*Vars.v**5) + 4*Cons.E_2*Cons.M*(-3.0*-Cons.Fcal_0*Vars.v**4 - 3.0*0*Vars.v**5) - 3.0*5*Vars.E_SO_3*Cons.M*-Cons.Fcal_0*Vars.v**5) - 3.0*4*Cons.E_2*Cons.M**2*-Cons.Fcal_0*Vars.v**5) + (0**3*(0*(1.0*-Cons.Fcal_0*Vars.v**4 + 1.0*0*Vars.v**5) + 4.0*4*Cons.E_2*Cons.M*-Cons.Fcal_0*Vars.v**5) - 1.0*0**5*-Cons.Fcal_0*Vars.v**5/2*Cons.E_0*Cons.M)/2*Cons.E_0*Cons.M)/2*Cons.E_0*Cons.M)/2*Cons.E_0*Cons.M)/2*Cons.E_0*Cons.M)/(Cons.nu*Vars.v*2*Cons.E_0*Cons.M)
dydt=np.zeros(8)
[dydt[5],dydt[6],dydt[7]] = FrameFromAngularVelocityIntegrand(Vars.rfrak_frame, OmegaVec_2p5(Cons,Vars)[1:])
dydt[0] = dvdt_T4[0]
if(Cons.EvolveSpin1):
dydt[1], dydt[2]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi1[0], Vars.rfrak_chi1[1],(mul(mul(inverse(Cons.S_chi1),OmegaVec_chiVec_1_2p5(Cons,Vars)),Cons.S_chi1))[1:])
else:
dydt[1], dydt[2] = 0.0, 0.0
if(Cons.EvolveSpin2):
dydt[3], dydt[4]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi2[0], Vars.rfrak_chi2[1],(mul(mul(inverse(Cons.S_chi2),OmegaVec_chiVec_2_2p5(Cons,Vars)),Cons.S_chi2))[1:])
else:
dydt[3], dydt[4] = 0.0, 0.0
return dydt
@njit(cache=True)
def TaylorT5_2p5(Cons,Vars):
dtdv = -0.5*Cons.nu*Vars.v*(1.0*2*Cons.E_0*Cons.M + 1.0*0*Vars.v + 1.0*4*Cons.E_2*Cons.M*Vars.v**2 + 1.0*5*Vars.E_SO_3*Cons.M*Vars.v**3 + 1.0*6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M*Vars.v**4 + 1.0*7*Vars.E_SO_5*Cons.M*Vars.v**5 + (0*(-1.0*2*Cons.E_0*Cons.M*Vars.v - 1.0*0*Vars.v**2 - 1.0*4*Cons.E_2*Cons.M*Vars.v**3 - 1.0*5*Vars.E_SO_3*Cons.M*Vars.v**4 - 1.0*6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M*Vars.v**5) + -Cons.Fcal_2*(-1.0*2*Cons.E_0*Cons.M*Vars.v**2 - 1.0*0*Vars.v**3 - 1.0*4*Cons.E_2*Cons.M*Vars.v**4 - 1.0*5*Vars.E_SO_3*Cons.M*Vars.v**5) + -Cons.Fcal_3 - Vars.Fcal_SO_3*(-1.0*2*Cons.E_0*Cons.M*Vars.v**3 - 1.0*0*Vars.v**4 - 1.0*4*Cons.E_2*Cons.M*Vars.v**5) + -Cons.Fcal_4 - Vars.Fcal_SQ_4*(-1.0*2*Cons.E_0*Cons.M*Vars.v**4 - 1.0*0*Vars.v**5) - 1.0*-Cons.Fcal_5 - Vars.Fcal_SO_5*2*Cons.E_0*Cons.M*Vars.v**5 + (0*(0*(1.0*2*Cons.E_0*Cons.M*Vars.v**2 + 1.0*0*Vars.v**3 + 1.0*4*Cons.E_2*Cons.M*Vars.v**4 + 1.0*5*Vars.E_SO_3*Cons.M*Vars.v**5) + -Cons.Fcal_2*(2.0*2*Cons.E_0*Cons.M*Vars.v**3 + 2.0*0*Vars.v**4 + 2.0*4*Cons.E_2*Cons.M*Vars.v**5) + -Cons.Fcal_3 - Vars.Fcal_SO_3*(2.0*2*Cons.E_0*Cons.M*Vars.v**4 + 2.0*0*Vars.v**5) + 2.0*-Cons.Fcal_4 - Vars.Fcal_SQ_4*2*Cons.E_0*Cons.M*Vars.v**5) + -Cons.Fcal_2*(-Cons.Fcal_2*(1.0*2*Cons.E_0*Cons.M*Vars.v**4 + 1.0*0*Vars.v**5) + 2.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*2*Cons.E_0*Cons.M*Vars.v**5) + (0*(0*(0*(-1.0*2*Cons.E_0*Cons.M*Vars.v**3 - 1.0*0*Vars.v**4 - 1.0*4*Cons.E_2*Cons.M*Vars.v**5) + -Cons.Fcal_2*(-3.0*2*Cons.E_0*Cons.M*Vars.v**4 - 3.0*0*Vars.v**5) - 3.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*2*Cons.E_0*Cons.M*Vars.v**5) - 3.0*-Cons.Fcal_2**2*2*Cons.E_0*Cons.M*Vars.v**5) + (0**3*(0*(1.0*2*Cons.E_0*Cons.M*Vars.v**4 + 1.0*0*Vars.v**5) + 4.0*-Cons.Fcal_2*2*Cons.E_0*Cons.M*Vars.v**5) - 1.0*0**5*2*Cons.E_0*Cons.M*Vars.v**5/-Cons.Fcal_0)/-Cons.Fcal_0)/-Cons.Fcal_0)/-Cons.Fcal_0)/-Cons.Fcal_0)/(Vars.Fcal_coeff*-Cons.Fcal_0)
dvdt_T5 = 1.0/dtdv
dydt=np.zeros(8)
[dydt[5],dydt[6],dydt[7]] = FrameFromAngularVelocityIntegrand(Vars.rfrak_frame, OmegaVec_2p5(Cons,Vars)[1:])
dydt[0] = dvdt_T5[0]
if(Cons.EvolveSpin1):
dydt[1], dydt[2]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi1[0], Vars.rfrak_chi1[1],(mul(mul(inverse(Cons.S_chi1),OmegaVec_chiVec_1_2p5(Cons,Vars)),Cons.S_chi1))[1:])
else:
dydt[1], dydt[2] = 0.0, 0.0
if(Cons.EvolveSpin2):
dydt[3], dydt[4]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi2[0], Vars.rfrak_chi2[1],(mul(mul(inverse(Cons.S_chi2),OmegaVec_chiVec_2_2p5(Cons,Vars)),Cons.S_chi2))[1:])
else:
dydt[3], dydt[4] = 0.0, 0.0
return dydt
@njit(cache=True)
def Recalculate_3p0(Cons,Vars,y):
Vars.v = np.array([y[0]])
Vars.rfrak_chi1 = np.array([y[1],y[2]])
Vars.rfrak_chi2 = np.array([y[3],y[4]])
Vars.rfrak_frame = np.array([y[5],y[6],y[7]])
Vars.R = exp(Vars.rfrak_frame[0]*Cons.xHat + Vars.rfrak_frame[1]*Cons.yHat + Vars.rfrak_frame[2]*Cons.zHat)
Vars.nHat = mul(mul(Vars.R,Cons.xHat),conjugate(Vars.R))
Vars.lambdaHat = mul(mul(Vars.R,Cons.yHat),conjugate(Vars.R))
Vars.ellHat = mul(mul(Vars.R,Cons.zHat),conjugate(Vars.R))
Vars.R_S1 = exp(Vars.rfrak_chi1[0]*Cons.xHat + Vars.rfrak_chi1[1]*Cons.yHat)
Vars.R_S2 = exp(Vars.rfrak_chi2[0]*Cons.xHat + Vars.rfrak_chi2[1]*Cons.yHat)
Vars.chiVec1 = mul(mul(mul(Cons.S_chi1,Vars.R_S1),Cons.zHat),mul(conjugate(Vars.R_S1),conjugate(Cons.S_chi1)))
Vars.chiVec2 = mul(mul(mul(Cons.S_chi2,Vars.R_S2),Cons.zHat),mul(conjugate(Vars.R_S2),conjugate(Cons.S_chi2)))
Vars.chi1_n = np.array([dot(Vars.chiVec1[1:],Vars.nHat[1:])])
Vars.chi1_lambda = np.array([dot(Vars.chiVec1[1:],Vars.lambdaHat[1:])])
Vars.chi1_ell = np.array([dot(Vars.chiVec1[1:],Vars.ellHat[1:])])
Vars.chi2_n = np.array([dot(Vars.chiVec2[1:],Vars.nHat[1:])])
Vars.chi2_lambda = np.array([dot(Vars.chiVec2[1:],Vars.lambdaHat[1:])])
Vars.chi2_ell = np.array([dot(Vars.chiVec2[1:],Vars.ellHat[1:])])
Vars.S_ell = Cons.M1**2*Vars.chi1_ell + Cons.M2**2*Vars.chi2_ell
Vars.S_n = Cons.M1**2*Vars.chi1_n + Cons.M2**2*Vars.chi2_n
Vars.S_lambda = Cons.M1**2*Vars.chi1_lambda + Cons.M2**2*Vars.chi2_lambda
Vars.Sigma_ell = Cons.M*(-Cons.M1*Vars.chi1_ell + Cons.M2*Vars.chi2_ell)
Vars.Sigma_n = Cons.M*(-Cons.M1*Vars.chi1_n + Cons.M2*Vars.chi2_n)
Vars.Sigma_lambda = Cons.M*(-Cons.M1*Vars.chi1_lambda + Cons.M2*Vars.chi2_lambda)
Vars.chi_s_ell = Vars.chi1_ell/2 + Vars.chi2_ell/2
Vars.chi_a_ell = Vars.chi1_ell/2 - Vars.chi2_ell/2
Vars.logv = log(Vars.v)
Vars.Fcal_coeff = 32*Cons.nu**2*Vars.v**10/5
Vars.Fcal_SQ_4 = Cons.chi1chi1*(-89*Cons.delta/192 + 89*Cons.nu/96 - 89/192) - 103*Cons.chi1chi2*Cons.nu/48 + Cons.chi2chi2*(89*Cons.delta/192 + 89*Cons.nu/96 - 89/192) + Vars.chi_a_ell*(Vars.chi_a_ell*(287/96 - 12*Cons.nu) + 287*Vars.chi_s_ell*Cons.delta/48) + Vars.chi_s_ell**2*(Cons.nu/24 + 287/96)
Vars.Fcal_SO_3 = (-4*Vars.S_ell - 5*Vars.Sigma_ell*Cons.delta/4)/Cons.M**2
Vars.Fcal_SO_5 = (Vars.S_ell*(272*Cons.nu/9 - 9/2) + Vars.Sigma_ell*Cons.delta*(43*Cons.nu/4 - 13/16))/Cons.M**2
Vars.Fcal_SO_6 = (-16*Vars.S_ell*pi - 31*Vars.Sigma_ell*Cons.delta*pi/6)/Cons.M**2
Vars.E_SQ_4 = -3*Vars.chi_a_ell**2/2 - 3*Vars.chi_s_ell**2/2 - Cons.delta*(Cons.chi2chi2/2 + 3*Vars.chi_a_ell*Vars.chi_s_ell) + Cons.nu*(Cons.chi1chi2 + 6*Vars.chi_a_ell**2) + (Cons.chi1chi1 + Cons.chi2chi2)*(Cons.delta - 2*Cons.nu + 1)/4
Vars.E_SO_3 = (14*Vars.S_ell/3 + 2*Vars.Sigma_ell*Cons.delta)/Cons.M**2
Vars.E_SO_5 = (Vars.S_ell*(11 - 61*Cons.nu/9) + Vars.Sigma_ell*Cons.delta*(3 - 10*Cons.nu/3))/Cons.M**2
@njit
def OmegaVec_chiVec_1_3p0(Cons,Vars):
Omega1_coeff = Vars.v**5/Cons.M
return Omega1_coeff*(Vars.ellHat*(-0.75*Cons.delta + 0.5*Cons.nu + Vars.v**2*(Cons.delta*(0.625*Cons.nu - 0.5625) + Cons.nu*(1.25 - 0.0416666666666667*Cons.nu) + Vars.v**2*(Cons.delta*(Cons.nu*(4.875 - 0.15625*Cons.nu) - 0.84375) + Cons.nu*(Cons.nu*(-0.0208333333333333*Cons.nu - 3.28125) + 0.1875) + 0.84375) + 0.5625) + 0.75) + Vars.nHat*Vars.v*(3.0*Vars.chi1_n*Cons.nu + 3.0*Cons.M2**2*Vars.chi2_n/Cons.M**2) - Cons.M2**2*Vars.chiVec2*Vars.v/Cons.M**2)
@njit
def OmegaVec_chiVec_2_3p0(Cons,Vars):
Omega2_coeff = Vars.v**5/Cons.M
return Omega2_coeff*(Vars.ellHat*(0.75*Cons.delta + 0.5*Cons.nu + Vars.v**2*(Cons.delta*(0.5625 - 0.625*Cons.nu) + Cons.nu*(1.25 - 0.0416666666666667*Cons.nu) + Vars.v**2*(Cons.delta*(Cons.nu*(0.15625*Cons.nu - 4.875) + 0.84375) + Cons.nu*(Cons.nu*(-0.0208333333333333*Cons.nu - 3.28125) + 0.1875) + 0.84375) + 0.5625) + 0.75) + Vars.nHat*Vars.v*(3.0*Vars.chi2_n*Cons.nu + 3.0*Cons.M1**2*Vars.chi1_n/Cons.M**2) - Cons.M1**2*Vars.chiVec1*Vars.v/Cons.M**2)
@njit
def OmegaVec_3p0(Cons,Vars):
a_ell_0 = 7.0*Vars.S_n + 3.0*Vars.Sigma_n*Cons.delta
gamma_PN_2 = 1.0 - 0.333333333333333*Cons.nu
a_ell_4 = Vars.S_n*(5.77777777777778*Cons.nu**2 + 14.75*Cons.nu + 1.5) + Vars.Sigma_n*Cons.delta*(2.83333333333333*Cons.nu**2 + 9.125*Cons.nu + 1.5)
gamma_PN_3 = (1.66666666666667*Vars.S_ell + Vars.Sigma_ell*Cons.delta)/Cons.M**2
gamma_PN_6 = 0.0123456790123457*Cons.nu**3 + 6.36111111111111*Cons.nu**2 - 2.98177812235564*Cons.nu + 1.0
gamma_PN_0 = 1.00000000000000
gamma_PN_5 = (Vars.S_ell*(0.888888888888889*Cons.nu + 3.33333333333333) + 2.0*Vars.Sigma_ell*Cons.delta)/Cons.M**2
gamma_PN_4 = 1.0 - 5.41666666666667*Cons.nu
a_ell_2 = Vars.S_n*(-9.66666666666667*Cons.nu - 10.0) + Vars.Sigma_n*Cons.delta*(-4.5*Cons.nu - 6.0)
return Vars.ellHat*Vars.v**3/Cons.M + Vars.nHat*Vars.v**6*(a_ell_0 + Vars.v**2*(a_ell_2 + a_ell_4*Vars.v**2))*(gamma_PN_0 + Vars.v**2*(gamma_PN_2 + Vars.v*(gamma_PN_3 + Vars.v*(gamma_PN_4 + Vars.v*(gamma_PN_5 + gamma_PN_6*Vars.v)))))/Cons.M**3
@njit(cache=True)
def TaylorT1_3p0(Cons,Vars):
Flux = Vars.Fcal_coeff*(Cons.Fcal_0 + Vars.v**2*(Cons.Fcal_2 + Vars.v*(Cons.Fcal_3 + Vars.Fcal_SO_3 + Vars.v*(Cons.Fcal_4 + Vars.Fcal_SQ_4 + Vars.v*(Cons.Fcal_5 + Vars.Fcal_SO_5 + Vars.v*(Cons.Fcal_6 + Vars.Fcal_SO_6 + Cons.Fcal_lnv_6*Vars.logv))))))
dEdV = -0.5*Cons.M*Cons.nu*Vars.v*(2.0*Cons.E_0 + Vars.v**2*(4.0*Cons.E_2 + Vars.v*(5.0*Vars.E_SO_3 + Vars.v*(6.0*Cons.E_4 + 6.0*Vars.E_SQ_4 + Vars.v*(8.0*Cons.E_6*Vars.v + 7.0*Vars.E_SO_5)))))
Absorption = 0
dvdt_T1 = (-Absorption - Flux)/dEdV
dydt=np.zeros(8)
[dydt[5],dydt[6],dydt[7]] = FrameFromAngularVelocityIntegrand(Vars.rfrak_frame, OmegaVec_3p0(Cons,Vars)[1:])
dydt[0] = dvdt_T1[0]
if(Cons.EvolveSpin1):
dydt[1], dydt[2]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi1[0], Vars.rfrak_chi1[1],(mul(mul(inverse(Cons.S_chi1),OmegaVec_chiVec_1_3p0(Cons,Vars)),Cons.S_chi1))[1:])
else:
dydt[1], dydt[2] = 0.0, 0.0
if(Cons.EvolveSpin2):
dydt[3], dydt[4]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi2[0], Vars.rfrak_chi2[1],(mul(mul(inverse(Cons.S_chi2),OmegaVec_chiVec_2_3p0(Cons,Vars)),Cons.S_chi2))[1:])
else:
dydt[3], dydt[4] = 0.0, 0.0
return dydt
@njit(cache=True)
def TaylorT4_3p0(Cons,Vars):
dvdt_T4 = -2.0*Vars.Fcal_coeff*(1.0*-Cons.Fcal_0 + 1.0*0*Vars.v + 1.0*-Cons.Fcal_2*Vars.v**2 + 1.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*Vars.v**3 + 1.0*-Cons.Fcal_4 - Vars.Fcal_SQ_4*Vars.v**4 + 1.0*-Cons.Fcal_5 - Vars.Fcal_SO_5*Vars.v**5 + 1.0*-Cons.Fcal_6 - Vars.Fcal_SO_6 - Cons.Fcal_lnv_6*Vars.logv*Vars.v**6 + (0*(-1.0*-Cons.Fcal_0*Vars.v - 1.0*0*Vars.v**2 - 1.0*-Cons.Fcal_2*Vars.v**3 - 1.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*Vars.v**4 - 1.0*-Cons.Fcal_4 - Vars.Fcal_SQ_4*Vars.v**5 - 1.0*-Cons.Fcal_5 - Vars.Fcal_SO_5*Vars.v**6) + 4*Cons.E_2*Cons.M*(-1.0*-Cons.Fcal_0*Vars.v**2 - 1.0*0*Vars.v**3 - 1.0*-Cons.Fcal_2*Vars.v**4 - 1.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*Vars.v**5 - 1.0*-Cons.Fcal_4 - Vars.Fcal_SQ_4*Vars.v**6) + 5*Vars.E_SO_3*Cons.M*(-1.0*-Cons.Fcal_0*Vars.v**3 - 1.0*0*Vars.v**4 - 1.0*-Cons.Fcal_2*Vars.v**5 - 1.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*Vars.v**6) + 6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M*(-1.0*-Cons.Fcal_0*Vars.v**4 - 1.0*0*Vars.v**5 - 1.0*-Cons.Fcal_2*Vars.v**6) + 7*Vars.E_SO_5*Cons.M*(-1.0*-Cons.Fcal_0*Vars.v**5 - 1.0*0*Vars.v**6) - 1.0*8*Cons.E_6*Cons.M*-Cons.Fcal_0*Vars.v**6 + (0*(0*(1.0*-Cons.Fcal_0*Vars.v**2 + 1.0*0*Vars.v**3 + 1.0*-Cons.Fcal_2*Vars.v**4 + 1.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*Vars.v**5 + 1.0*-Cons.Fcal_4 - Vars.Fcal_SQ_4*Vars.v**6) + 4*Cons.E_2*Cons.M*(2.0*-Cons.Fcal_0*Vars.v**3 + 2.0*0*Vars.v**4 + 2.0*-Cons.Fcal_2*Vars.v**5 + 2.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*Vars.v**6) + 5*Vars.E_SO_3*Cons.M*(2.0*-Cons.Fcal_0*Vars.v**4 + 2.0*0*Vars.v**5 + 2.0*-Cons.Fcal_2*Vars.v**6) + 6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M*(2.0*-Cons.Fcal_0*Vars.v**5 + 2.0*0*Vars.v**6) + 2.0*7*Vars.E_SO_5*Cons.M*-Cons.Fcal_0*Vars.v**6) + 4*Cons.E_2*Cons.M*(4*Cons.E_2*Cons.M*(1.0*-Cons.Fcal_0*Vars.v**4 + 1.0*0*Vars.v**5 + 1.0*-Cons.Fcal_2*Vars.v**6) + 5*Vars.E_SO_3*Cons.M*(2.0*-Cons.Fcal_0*Vars.v**5 + 2.0*0*Vars.v**6) + 2.0*6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M*-Cons.Fcal_0*Vars.v**6) + 1.0*5*Vars.E_SO_3*Cons.M**2*-Cons.Fcal_0*Vars.v**6 + (0*(0*(0*(-1.0*-Cons.Fcal_0*Vars.v**3 - 1.0*0*Vars.v**4 - 1.0*-Cons.Fcal_2*Vars.v**5 - 1.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*Vars.v**6) + 4*Cons.E_2*Cons.M*(-3.0*-Cons.Fcal_0*Vars.v**4 - 3.0*0*Vars.v**5 - 3.0*-Cons.Fcal_2*Vars.v**6) + 5*Vars.E_SO_3*Cons.M*(-3.0*-Cons.Fcal_0*Vars.v**5 - 3.0*0*Vars.v**6) - 3.0*6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M*-Cons.Fcal_0*Vars.v**6) + 4*Cons.E_2*Cons.M*(4*Cons.E_2*Cons.M*(-3.0*-Cons.Fcal_0*Vars.v**5 - 3.0*0*Vars.v**6) - 6.0*5*Vars.E_SO_3*Cons.M*-Cons.Fcal_0*Vars.v**6)) - 1.0*4*Cons.E_2*Cons.M**3*-Cons.Fcal_0*Vars.v**6 + (0**2*(0*(0*(1.0*-Cons.Fcal_0*Vars.v**4 + 1.0*0*Vars.v**5 + 1.0*-Cons.Fcal_2*Vars.v**6) + 4*Cons.E_2*Cons.M*(4.0*-Cons.Fcal_0*Vars.v**5 + 4.0*0*Vars.v**6) + 4.0*5*Vars.E_SO_3*Cons.M*-Cons.Fcal_0*Vars.v**6) + 6.0*4*Cons.E_2*Cons.M**2*-Cons.Fcal_0*Vars.v**6) + (0**4*(0*(-1.0*-Cons.Fcal_0*Vars.v**5 - 1.0*0*Vars.v**6) - 5.0*4*Cons.E_2*Cons.M*-Cons.Fcal_0*Vars.v**6) + 1.0*0**6*-Cons.Fcal_0*Vars.v**6/2*Cons.E_0*Cons.M)/2*Cons.E_0*Cons.M)/2*Cons.E_0*Cons.M)/2*Cons.E_0*Cons.M)/2*Cons.E_0*Cons.M)/2*Cons.E_0*Cons.M)/(Cons.nu*Vars.v*2*Cons.E_0*Cons.M)
dydt=np.zeros(8)
[dydt[5],dydt[6],dydt[7]] = FrameFromAngularVelocityIntegrand(Vars.rfrak_frame, OmegaVec_3p0(Cons,Vars)[1:])
dydt[0] = dvdt_T4[0]
if(Cons.EvolveSpin1):
dydt[1], dydt[2]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi1[0], Vars.rfrak_chi1[1],(mul(mul(inverse(Cons.S_chi1),OmegaVec_chiVec_1_3p0(Cons,Vars)),Cons.S_chi1))[1:])
else:
dydt[1], dydt[2] = 0.0, 0.0
if(Cons.EvolveSpin2):
dydt[3], dydt[4]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi2[0], Vars.rfrak_chi2[1],(mul(mul(inverse(Cons.S_chi2),OmegaVec_chiVec_2_3p0(Cons,Vars)),Cons.S_chi2))[1:])
else:
dydt[3], dydt[4] = 0.0, 0.0
return dydt
@njit(cache=True)
def TaylorT5_3p0(Cons,Vars):
dtdv = -0.5*Cons.nu*Vars.v*(1.0*2*Cons.E_0*Cons.M + 1.0*0*Vars.v + 1.0*4*Cons.E_2*Cons.M*Vars.v**2 + 1.0*5*Vars.E_SO_3*Cons.M*Vars.v**3 + 1.0*6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M*Vars.v**4 + 1.0*7*Vars.E_SO_5*Cons.M*Vars.v**5 + 1.0*8*Cons.E_6*Cons.M*Vars.v**6 + (0*(-1.0*2*Cons.E_0*Cons.M*Vars.v - 1.0*0*Vars.v**2 - 1.0*4*Cons.E_2*Cons.M*Vars.v**3 - 1.0*5*Vars.E_SO_3*Cons.M*Vars.v**4 - 1.0*6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M*Vars.v**5 - 1.0*7*Vars.E_SO_5*Cons.M*Vars.v**6) + -Cons.Fcal_2*(-1.0*2*Cons.E_0*Cons.M*Vars.v**2 - 1.0*0*Vars.v**3 - 1.0*4*Cons.E_2*Cons.M*Vars.v**4 - 1.0*5*Vars.E_SO_3*Cons.M*Vars.v**5 - 1.0*6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M*Vars.v**6) + -Cons.Fcal_3 - Vars.Fcal_SO_3*(-1.0*2*Cons.E_0*Cons.M*Vars.v**3 - 1.0*0*Vars.v**4 - 1.0*4*Cons.E_2*Cons.M*Vars.v**5 - 1.0*5*Vars.E_SO_3*Cons.M*Vars.v**6) + -Cons.Fcal_4 - Vars.Fcal_SQ_4*(-1.0*2*Cons.E_0*Cons.M*Vars.v**4 - 1.0*0*Vars.v**5 - 1.0*4*Cons.E_2*Cons.M*Vars.v**6) + -Cons.Fcal_5 - Vars.Fcal_SO_5*(-1.0*2*Cons.E_0*Cons.M*Vars.v**5 - 1.0*0*Vars.v**6) - 1.0*-Cons.Fcal_6 - Vars.Fcal_SO_6 - Cons.Fcal_lnv_6*Vars.logv*2*Cons.E_0*Cons.M*Vars.v**6 + (0*(0*(1.0*2*Cons.E_0*Cons.M*Vars.v**2 + 1.0*0*Vars.v**3 + 1.0*4*Cons.E_2*Cons.M*Vars.v**4 + 1.0*5*Vars.E_SO_3*Cons.M*Vars.v**5 + 1.0*6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M*Vars.v**6) + -Cons.Fcal_2*(2.0*2*Cons.E_0*Cons.M*Vars.v**3 + 2.0*0*Vars.v**4 + 2.0*4*Cons.E_2*Cons.M*Vars.v**5 + 2.0*5*Vars.E_SO_3*Cons.M*Vars.v**6) + -Cons.Fcal_3 - Vars.Fcal_SO_3*(2.0*2*Cons.E_0*Cons.M*Vars.v**4 + 2.0*0*Vars.v**5 + 2.0*4*Cons.E_2*Cons.M*Vars.v**6) + -Cons.Fcal_4 - Vars.Fcal_SQ_4*(2.0*2*Cons.E_0*Cons.M*Vars.v**5 + 2.0*0*Vars.v**6) + 2.0*-Cons.Fcal_5 - Vars.Fcal_SO_5*2*Cons.E_0*Cons.M*Vars.v**6) + -Cons.Fcal_2*(-Cons.Fcal_2*(1.0*2*Cons.E_0*Cons.M*Vars.v**4 + 1.0*0*Vars.v**5 + 1.0*4*Cons.E_2*Cons.M*Vars.v**6) + -Cons.Fcal_3 - Vars.Fcal_SO_3*(2.0*2*Cons.E_0*Cons.M*Vars.v**5 + 2.0*0*Vars.v**6) + 2.0*-Cons.Fcal_4 - Vars.Fcal_SQ_4*2*Cons.E_0*Cons.M*Vars.v**6) + 1.0*-Cons.Fcal_3 - Vars.Fcal_SO_3**2*2*Cons.E_0*Cons.M*Vars.v**6 + (0*(0*(0*(-1.0*2*Cons.E_0*Cons.M*Vars.v**3 - 1.0*0*Vars.v**4 - 1.0*4*Cons.E_2*Cons.M*Vars.v**5 - 1.0*5*Vars.E_SO_3*Cons.M*Vars.v**6) + -Cons.Fcal_2*(-3.0*2*Cons.E_0*Cons.M*Vars.v**4 - 3.0*0*Vars.v**5 - 3.0*4*Cons.E_2*Cons.M*Vars.v**6) + -Cons.Fcal_3 - Vars.Fcal_SO_3*(-3.0*2*Cons.E_0*Cons.M*Vars.v**5 - 3.0*0*Vars.v**6) - 3.0*-Cons.Fcal_4 - Vars.Fcal_SQ_4*2*Cons.E_0*Cons.M*Vars.v**6) + -Cons.Fcal_2*(-Cons.Fcal_2*(-3.0*2*Cons.E_0*Cons.M*Vars.v**5 - 3.0*0*Vars.v**6) - 6.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*2*Cons.E_0*Cons.M*Vars.v**6)) - 1.0*-Cons.Fcal_2**3*2*Cons.E_0*Cons.M*Vars.v**6 + (0**2*(0*(0*(1.0*2*Cons.E_0*Cons.M*Vars.v**4 + 1.0*0*Vars.v**5 + 1.0*4*Cons.E_2*Cons.M*Vars.v**6) + -Cons.Fcal_2*(4.0*2*Cons.E_0*Cons.M*Vars.v**5 + 4.0*0*Vars.v**6) + 4.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*2*Cons.E_0*Cons.M*Vars.v**6) + 6.0*-Cons.Fcal_2**2*2*Cons.E_0*Cons.M*Vars.v**6) + (0**4*(0*(-1.0*2*Cons.E_0*Cons.M*Vars.v**5 - 1.0*0*Vars.v**6) - 5.0*-Cons.Fcal_2*2*Cons.E_0*Cons.M*Vars.v**6) + 1.0*0**6*2*Cons.E_0*Cons.M*Vars.v**6/-Cons.Fcal_0)/-Cons.Fcal_0)/-Cons.Fcal_0)/-Cons.Fcal_0)/-Cons.Fcal_0)/-Cons.Fcal_0)/(Vars.Fcal_coeff*-Cons.Fcal_0)
dvdt_T5 = 1.0/dtdv
dydt=np.zeros(8)
[dydt[5],dydt[6],dydt[7]] = FrameFromAngularVelocityIntegrand(Vars.rfrak_frame, OmegaVec_3p0(Cons,Vars)[1:])
dydt[0] = dvdt_T5[0]
if(Cons.EvolveSpin1):
dydt[1], dydt[2]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi1[0], Vars.rfrak_chi1[1],(mul(mul(inverse(Cons.S_chi1),OmegaVec_chiVec_1_3p0(Cons,Vars)),Cons.S_chi1))[1:])
else:
dydt[1], dydt[2] = 0.0, 0.0
if(Cons.EvolveSpin2):
dydt[3], dydt[4]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi2[0], Vars.rfrak_chi2[1],(mul(mul(inverse(Cons.S_chi2),OmegaVec_chiVec_2_3p0(Cons,Vars)),Cons.S_chi2))[1:])
else:
dydt[3], dydt[4] = 0.0, 0.0
return dydt
@njit(cache=True)
def Recalculate_3p5(Cons,Vars,y):
Vars.v = np.array([y[0]])
Vars.rfrak_chi1 = np.array([y[1],y[2]])
Vars.rfrak_chi2 = np.array([y[3],y[4]])
Vars.rfrak_frame = np.array([y[5],y[6],y[7]])
Vars.R = exp(Vars.rfrak_frame[0]*Cons.xHat + Vars.rfrak_frame[1]*Cons.yHat + Vars.rfrak_frame[2]*Cons.zHat)
Vars.nHat = mul(mul(Vars.R,Cons.xHat),conjugate(Vars.R))
Vars.lambdaHat = mul(mul(Vars.R,Cons.yHat),conjugate(Vars.R))
Vars.ellHat = mul(mul(Vars.R,Cons.zHat),conjugate(Vars.R))
Vars.R_S1 = exp(Vars.rfrak_chi1[0]*Cons.xHat + Vars.rfrak_chi1[1]*Cons.yHat)
Vars.R_S2 = exp(Vars.rfrak_chi2[0]*Cons.xHat + Vars.rfrak_chi2[1]*Cons.yHat)
Vars.chiVec1 = mul(mul(mul(Cons.S_chi1,Vars.R_S1),Cons.zHat),mul(conjugate(Vars.R_S1),conjugate(Cons.S_chi1)))
Vars.chiVec2 = mul(mul(mul(Cons.S_chi2,Vars.R_S2),Cons.zHat),mul(conjugate(Vars.R_S2),conjugate(Cons.S_chi2)))
Vars.chi1_n = np.array([dot(Vars.chiVec1[1:],Vars.nHat[1:])])
Vars.chi1_lambda = np.array([dot(Vars.chiVec1[1:],Vars.lambdaHat[1:])])
Vars.chi1_ell = np.array([dot(Vars.chiVec1[1:],Vars.ellHat[1:])])
Vars.chi2_n = np.array([dot(Vars.chiVec2[1:],Vars.nHat[1:])])
Vars.chi2_lambda = np.array([dot(Vars.chiVec2[1:],Vars.lambdaHat[1:])])
Vars.chi2_ell = np.array([dot(Vars.chiVec2[1:],Vars.ellHat[1:])])
Vars.S_ell = Cons.M1**2*Vars.chi1_ell + Cons.M2**2*Vars.chi2_ell
Vars.S_n = Cons.M1**2*Vars.chi1_n + Cons.M2**2*Vars.chi2_n
Vars.S_lambda = Cons.M1**2*Vars.chi1_lambda + Cons.M2**2*Vars.chi2_lambda
Vars.Sigma_ell = Cons.M*(-Cons.M1*Vars.chi1_ell + Cons.M2*Vars.chi2_ell)
Vars.Sigma_n = Cons.M*(-Cons.M1*Vars.chi1_n + Cons.M2*Vars.chi2_n)
Vars.Sigma_lambda = Cons.M*(-Cons.M1*Vars.chi1_lambda + Cons.M2*Vars.chi2_lambda)
Vars.chi_s_ell = Vars.chi1_ell/2 + Vars.chi2_ell/2
Vars.chi_a_ell = Vars.chi1_ell/2 - Vars.chi2_ell/2
Vars.logv = log(Vars.v)
Vars.Fcal_coeff = 32*Cons.nu**2*Vars.v**10/5
Vars.Fcal_SQ_4 = Cons.chi1chi1*(-89*Cons.delta/192 + 89*Cons.nu/96 - 89/192) - 103*Cons.chi1chi2*Cons.nu/48 + Cons.chi2chi2*(89*Cons.delta/192 + 89*Cons.nu/96 - 89/192) + Vars.chi_a_ell*(Vars.chi_a_ell*(287/96 - 12*Cons.nu) + 287*Vars.chi_s_ell*Cons.delta/48) + Vars.chi_s_ell**2*(Cons.nu/24 + 287/96)
Vars.Fcal_SO_3 = (-4*Vars.S_ell - 5*Vars.Sigma_ell*Cons.delta/4)/Cons.M**2
Vars.Fcal_SO_5 = (Vars.S_ell*(272*Cons.nu/9 - 9/2) + Vars.Sigma_ell*Cons.delta*(43*Cons.nu/4 - 13/16))/Cons.M**2
Vars.Fcal_SO_6 = (-16*Vars.S_ell*pi - 31*Vars.Sigma_ell*Cons.delta*pi/6)/Cons.M**2
Vars.Fcal_SO_7 = (Vars.S_ell*(-2810*Cons.nu**2/27 + 6172*Cons.nu/189 + 476645/6804) + Vars.Sigma_ell*Cons.delta*(-1501*Cons.nu**2/36 + 1849*Cons.nu/126 + 9535/336))/Cons.M**2
Vars.E_SQ_4 = -3*Vars.chi_a_ell**2/2 - 3*Vars.chi_s_ell**2/2 - Cons.delta*(Cons.chi2chi2/2 + 3*Vars.chi_a_ell*Vars.chi_s_ell) + Cons.nu*(Cons.chi1chi2 + 6*Vars.chi_a_ell**2) + (Cons.chi1chi1 + Cons.chi2chi2)*(Cons.delta - 2*Cons.nu + 1)/4
Vars.E_SO_3 = (14*Vars.S_ell/3 + 2*Vars.Sigma_ell*Cons.delta)/Cons.M**2
Vars.E_SO_5 = (Vars.S_ell*(11 - 61*Cons.nu/9) + Vars.Sigma_ell*Cons.delta*(3 - 10*Cons.nu/3))/Cons.M**2
Vars.E_SO_7 = (Vars.S_ell*(29*Cons.nu**2/12 - 367*Cons.nu/4 + 135/4) + Vars.Sigma_ell*Cons.delta*(5*Cons.nu**2/4 - 39*Cons.nu + 27/4))/Cons.M**2
@njit
def OmegaVec_chiVec_1_3p5(Cons,Vars):
Omega1_coeff = Vars.v**5/Cons.M
return Omega1_coeff*(Vars.ellHat*(-0.75*Cons.delta + 0.5*Cons.nu + Vars.v**2*(Cons.delta*(0.625*Cons.nu - 0.5625) + Cons.nu*(1.25 - 0.0416666666666667*Cons.nu) + Vars.v**2*(Cons.delta*(Cons.nu*(4.875 - 0.15625*Cons.nu) - 0.84375) + Cons.nu*(Cons.nu*(-0.0208333333333333*Cons.nu - 3.28125) + 0.1875) + 0.84375) + 0.5625) + 0.75) + Vars.nHat*Vars.v*(3.0*Vars.chi1_n*Cons.nu + 3.0*Cons.M2**2*Vars.chi2_n/Cons.M**2) - Cons.M2**2*Vars.chiVec2*Vars.v/Cons.M**2)
@njit
def OmegaVec_chiVec_2_3p5(Cons,Vars):
Omega2_coeff = Vars.v**5/Cons.M
return Omega2_coeff*(Vars.ellHat*(0.75*Cons.delta + 0.5*Cons.nu + Vars.v**2*(Cons.delta*(0.5625 - 0.625*Cons.nu) + Cons.nu*(1.25 - 0.0416666666666667*Cons.nu) + Vars.v**2*(Cons.delta*(Cons.nu*(0.15625*Cons.nu - 4.875) + 0.84375) + Cons.nu*(Cons.nu*(-0.0208333333333333*Cons.nu - 3.28125) + 0.1875) + 0.84375) + 0.5625) + 0.75) + Vars.nHat*Vars.v*(3.0*Vars.chi2_n*Cons.nu + 3.0*Cons.M1**2*Vars.chi1_n/Cons.M**2) - Cons.M1**2*Vars.chiVec1*Vars.v/Cons.M**2)
@njit
def OmegaVec_3p5(Cons,Vars):
gamma_PN_7 = (Vars.S_ell*(-6.0*Cons.nu**2 - 10.5833333333333*Cons.nu + 5.0) - 2.66666666666667*Vars.Sigma_ell*Cons.delta*Cons.nu**2 + Vars.Sigma_ell*Cons.delta*(3.0 - 10.1666666666667*Cons.nu))/Cons.M**2
a_ell_0 = 7.0*Vars.S_n + 3.0*Vars.Sigma_n*Cons.delta
gamma_PN_2 = 1.0 - 0.333333333333333*Cons.nu
a_ell_4 = Vars.S_n*(5.77777777777778*Cons.nu**2 + 14.75*Cons.nu + 1.5) + Vars.Sigma_n*Cons.delta*(2.83333333333333*Cons.nu**2 + 9.125*Cons.nu + 1.5)
gamma_PN_3 = (1.66666666666667*Vars.S_ell + Vars.Sigma_ell*Cons.delta)/Cons.M**2
gamma_PN_6 = 0.0123456790123457*Cons.nu**3 + 6.36111111111111*Cons.nu**2 - 2.98177812235564*Cons.nu + 1.0
gamma_PN_0 = 1.00000000000000
gamma_PN_5 = (Vars.S_ell*(0.888888888888889*Cons.nu + 3.33333333333333) + 2.0*Vars.Sigma_ell*Cons.delta)/Cons.M**2
gamma_PN_4 = 1.0 - 5.41666666666667*Cons.nu
a_ell_2 = Vars.S_n*(-9.66666666666667*Cons.nu - 10.0) + Vars.Sigma_n*Cons.delta*(-4.5*Cons.nu - 6.0)
return Vars.ellHat*Vars.v**3/Cons.M + Vars.nHat*Vars.v**6*(a_ell_0 + Vars.v**2*(a_ell_2 + a_ell_4*Vars.v**2))*(gamma_PN_0 + Vars.v**2*(gamma_PN_2 + Vars.v*(gamma_PN_3 + Vars.v*(gamma_PN_4 + Vars.v*(gamma_PN_5 + Vars.v*(gamma_PN_6 + gamma_PN_7*Vars.v))))))/Cons.M**3
@njit(cache=True)
def TaylorT1_3p5(Cons,Vars):
Flux = Vars.Fcal_coeff*(Cons.Fcal_0 + Vars.v**2*(Cons.Fcal_2 + Vars.v*(Cons.Fcal_3 + Vars.Fcal_SO_3 + Vars.v*(Cons.Fcal_4 + Vars.Fcal_SQ_4 + Vars.v*(Cons.Fcal_5 + Vars.Fcal_SO_5 + Vars.v*(Cons.Fcal_6 + Vars.Fcal_SO_6 + Cons.Fcal_lnv_6*Vars.logv + Vars.v*(Cons.Fcal_7 + Vars.Fcal_SO_7)))))))
dEdV = -0.5*Cons.M*Cons.nu*Vars.v*(2.0*Cons.E_0 + Vars.v**2*(4.0*Cons.E_2 + Vars.v*(5.0*Vars.E_SO_3 + Vars.v*(6.0*Cons.E_4 + 6.0*Vars.E_SQ_4 + Vars.v*(7.0*Vars.E_SO_5 + Vars.v*(8.0*Cons.E_6 + 9.0*Vars.E_SO_7*Vars.v))))))
Absorption = 0
dvdt_T1 = (-Absorption - Flux)/dEdV
dydt=np.zeros(8)
[dydt[5],dydt[6],dydt[7]] = FrameFromAngularVelocityIntegrand(Vars.rfrak_frame, OmegaVec_3p5(Cons,Vars)[1:])
dydt[0] = dvdt_T1[0]
if(Cons.EvolveSpin1):
dydt[1], dydt[2]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi1[0], Vars.rfrak_chi1[1],(mul(mul(inverse(Cons.S_chi1),OmegaVec_chiVec_1_3p5(Cons,Vars)),Cons.S_chi1))[1:])
else:
dydt[1], dydt[2] = 0.0, 0.0
if(Cons.EvolveSpin2):
dydt[3], dydt[4]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi2[0], Vars.rfrak_chi2[1],(mul(mul(inverse(Cons.S_chi2),OmegaVec_chiVec_2_3p5(Cons,Vars)),Cons.S_chi2))[1:])
else:
dydt[3], dydt[4] = 0.0, 0.0
return dydt
@njit(cache=True)
def TaylorT4_3p5(Cons,Vars):
dvdt_T4 = -2.0*Vars.Fcal_coeff*(1.0*-Cons.Fcal_0 + 1.0*0*Vars.v + 1.0*-Cons.Fcal_2*Vars.v**2 + 1.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*Vars.v**3 + 1.0*-Cons.Fcal_4 - Vars.Fcal_SQ_4*Vars.v**4 + 1.0*-Cons.Fcal_5 - Vars.Fcal_SO_5*Vars.v**5 + 1.0*-Cons.Fcal_6 - Vars.Fcal_SO_6 - Cons.Fcal_lnv_6*Vars.logv*Vars.v**6 + 1.0*-Cons.Fcal_7 - Vars.Fcal_SO_7*Vars.v**7 + (0*(-1.0*-Cons.Fcal_0*Vars.v - 1.0*0*Vars.v**2 - 1.0*-Cons.Fcal_2*Vars.v**3 - 1.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*Vars.v**4 - 1.0*-Cons.Fcal_4 - Vars.Fcal_SQ_4*Vars.v**5 - 1.0*-Cons.Fcal_5 - Vars.Fcal_SO_5*Vars.v**6 - 1.0*-Cons.Fcal_6 - Vars.Fcal_SO_6 - Cons.Fcal_lnv_6*Vars.logv*Vars.v**7) + 4*Cons.E_2*Cons.M*(-1.0*-Cons.Fcal_0*Vars.v**2 - 1.0*0*Vars.v**3 - 1.0*-Cons.Fcal_2*Vars.v**4 - 1.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*Vars.v**5 - 1.0*-Cons.Fcal_4 - Vars.Fcal_SQ_4*Vars.v**6 - 1.0*-Cons.Fcal_5 - Vars.Fcal_SO_5*Vars.v**7) + 5*Vars.E_SO_3*Cons.M*(-1.0*-Cons.Fcal_0*Vars.v**3 - 1.0*0*Vars.v**4 - 1.0*-Cons.Fcal_2*Vars.v**5 - 1.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*Vars.v**6 - 1.0*-Cons.Fcal_4 - Vars.Fcal_SQ_4*Vars.v**7) + 6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M*(-1.0*-Cons.Fcal_0*Vars.v**4 - 1.0*0*Vars.v**5 - 1.0*-Cons.Fcal_2*Vars.v**6 - 1.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*Vars.v**7) + 7*Vars.E_SO_5*Cons.M*(-1.0*-Cons.Fcal_0*Vars.v**5 - 1.0*0*Vars.v**6 - 1.0*-Cons.Fcal_2*Vars.v**7) + 8*Cons.E_6*Cons.M*(-1.0*-Cons.Fcal_0*Vars.v**6 - 1.0*0*Vars.v**7) - 1.0*9*Vars.E_SO_7*Cons.M*-Cons.Fcal_0*Vars.v**7 + (0*(0*(1.0*-Cons.Fcal_0*Vars.v**2 + 1.0*0*Vars.v**3 + 1.0*-Cons.Fcal_2*Vars.v**4 + 1.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*Vars.v**5 + 1.0*-Cons.Fcal_4 - Vars.Fcal_SQ_4*Vars.v**6 + 1.0*-Cons.Fcal_5 - Vars.Fcal_SO_5*Vars.v**7) + 4*Cons.E_2*Cons.M*(2.0*-Cons.Fcal_0*Vars.v**3 + 2.0*0*Vars.v**4 + 2.0*-Cons.Fcal_2*Vars.v**5 + 2.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*Vars.v**6 + 2.0*-Cons.Fcal_4 - Vars.Fcal_SQ_4*Vars.v**7) + 5*Vars.E_SO_3*Cons.M*(2.0*-Cons.Fcal_0*Vars.v**4 + 2.0*0*Vars.v**5 + 2.0*-Cons.Fcal_2*Vars.v**6 + 2.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*Vars.v**7) + 6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M*(2.0*-Cons.Fcal_0*Vars.v**5 + 2.0*0*Vars.v**6 + 2.0*-Cons.Fcal_2*Vars.v**7) + 7*Vars.E_SO_5*Cons.M*(2.0*-Cons.Fcal_0*Vars.v**6 + 2.0*0*Vars.v**7) + 2.0*8*Cons.E_6*Cons.M*-Cons.Fcal_0*Vars.v**7) + 4*Cons.E_2*Cons.M*(4*Cons.E_2*Cons.M*(1.0*-Cons.Fcal_0*Vars.v**4 + 1.0*0*Vars.v**5 + 1.0*-Cons.Fcal_2*Vars.v**6 + 1.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*Vars.v**7) + 5*Vars.E_SO_3*Cons.M*(2.0*-Cons.Fcal_0*Vars.v**5 + 2.0*0*Vars.v**6 + 2.0*-Cons.Fcal_2*Vars.v**7) + 6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M*(2.0*-Cons.Fcal_0*Vars.v**6 + 2.0*0*Vars.v**7) + 2.0*7*Vars.E_SO_5*Cons.M*-Cons.Fcal_0*Vars.v**7) + 5*Vars.E_SO_3*Cons.M*(5*Vars.E_SO_3*Cons.M*(1.0*-Cons.Fcal_0*Vars.v**6 + 1.0*0*Vars.v**7) + 2.0*6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M*-Cons.Fcal_0*Vars.v**7) + (0*(0*(0*(-1.0*-Cons.Fcal_0*Vars.v**3 - 1.0*0*Vars.v**4 - 1.0*-Cons.Fcal_2*Vars.v**5 - 1.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*Vars.v**6 - 1.0*-Cons.Fcal_4 - Vars.Fcal_SQ_4*Vars.v**7) + 4*Cons.E_2*Cons.M*(-3.0*-Cons.Fcal_0*Vars.v**4 - 3.0*0*Vars.v**5 - 3.0*-Cons.Fcal_2*Vars.v**6 - 3.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*Vars.v**7) + 5*Vars.E_SO_3*Cons.M*(-3.0*-Cons.Fcal_0*Vars.v**5 - 3.0*0*Vars.v**6 - 3.0*-Cons.Fcal_2*Vars.v**7) + 6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M*(-3.0*-Cons.Fcal_0*Vars.v**6 - 3.0*0*Vars.v**7) - 3.0*7*Vars.E_SO_5*Cons.M*-Cons.Fcal_0*Vars.v**7) + 4*Cons.E_2*Cons.M*(4*Cons.E_2*Cons.M*(-3.0*-Cons.Fcal_0*Vars.v**5 - 3.0*0*Vars.v**6 - 3.0*-Cons.Fcal_2*Vars.v**7) + 5*Vars.E_SO_3*Cons.M*(-6.0*-Cons.Fcal_0*Vars.v**6 - 6.0*0*Vars.v**7) - 6.0*6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M*-Cons.Fcal_0*Vars.v**7) - 3.0*5*Vars.E_SO_3*Cons.M**2*-Cons.Fcal_0*Vars.v**7) + 4*Cons.E_2*Cons.M**2*(4*Cons.E_2*Cons.M*(-1.0*-Cons.Fcal_0*Vars.v**6 - 1.0*0*Vars.v**7) - 3.0*5*Vars.E_SO_3*Cons.M*-Cons.Fcal_0*Vars.v**7) + (0*(0*(0*(0*(1.0*-Cons.Fcal_0*Vars.v**4 + 1.0*0*Vars.v**5 + 1.0*-Cons.Fcal_2*Vars.v**6 + 1.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*Vars.v**7) + 4*Cons.E_2*Cons.M*(4.0*-Cons.Fcal_0*Vars.v**5 + 4.0*0*Vars.v**6 + 4.0*-Cons.Fcal_2*Vars.v**7) + 5*Vars.E_SO_3*Cons.M*(4.0*-Cons.Fcal_0*Vars.v**6 + 4.0*0*Vars.v**7) + 4.0*6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M*-Cons.Fcal_0*Vars.v**7) + 4*Cons.E_2*Cons.M*(4*Cons.E_2*Cons.M*(6.0*-Cons.Fcal_0*Vars.v**6 + 6.0*0*Vars.v**7) + 12.0*5*Vars.E_SO_3*Cons.M*-Cons.Fcal_0*Vars.v**7)) + 4.0*4*Cons.E_2*Cons.M**3*-Cons.Fcal_0*Vars.v**7) + (0**3*(0*(0*(-1.0*-Cons.Fcal_0*Vars.v**5 - 1.0*0*Vars.v**6 - 1.0*-Cons.Fcal_2*Vars.v**7) + 4*Cons.E_2*Cons.M*(-5.0*-Cons.Fcal_0*Vars.v**6 - 5.0*0*Vars.v**7) - 5.0*5*Vars.E_SO_3*Cons.M*-Cons.Fcal_0*Vars.v**7) - 10.0*4*Cons.E_2*Cons.M**2*-Cons.Fcal_0*Vars.v**7) + (0**5*(0*(1.0*-Cons.Fcal_0*Vars.v**6 + 1.0*0*Vars.v**7) + 6.0*4*Cons.E_2*Cons.M*-Cons.Fcal_0*Vars.v**7) - 1.0*0**7*-Cons.Fcal_0*Vars.v**7/2*Cons.E_0*Cons.M)/2*Cons.E_0*Cons.M)/2*Cons.E_0*Cons.M)/2*Cons.E_0*Cons.M)/2*Cons.E_0*Cons.M)/2*Cons.E_0*Cons.M)/2*Cons.E_0*Cons.M)/(Cons.nu*Vars.v*2*Cons.E_0*Cons.M)
dydt=np.zeros(8)
[dydt[5],dydt[6],dydt[7]] = FrameFromAngularVelocityIntegrand(Vars.rfrak_frame, OmegaVec_3p5(Cons,Vars)[1:])
dydt[0] = dvdt_T4[0]
if(Cons.EvolveSpin1):
dydt[1], dydt[2]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi1[0], Vars.rfrak_chi1[1],(mul(mul(inverse(Cons.S_chi1),OmegaVec_chiVec_1_3p5(Cons,Vars)),Cons.S_chi1))[1:])
else:
dydt[1], dydt[2] = 0.0, 0.0
if(Cons.EvolveSpin2):
dydt[3], dydt[4]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi2[0], Vars.rfrak_chi2[1],(mul(mul(inverse(Cons.S_chi2),OmegaVec_chiVec_2_3p5(Cons,Vars)),Cons.S_chi2))[1:])
else:
dydt[3], dydt[4] = 0.0, 0.0
return dydt
@njit(cache=True)
def TaylorT5_3p5(Cons,Vars):
dtdv = -0.5*Cons.nu*Vars.v*(1.0*2*Cons.E_0*Cons.M + 1.0*0*Vars.v + 1.0*4*Cons.E_2*Cons.M*Vars.v**2 + 1.0*5*Vars.E_SO_3*Cons.M*Vars.v**3 + 1.0*6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M*Vars.v**4 + 1.0*7*Vars.E_SO_5*Cons.M*Vars.v**5 + 1.0*8*Cons.E_6*Cons.M*Vars.v**6 + 1.0*9*Vars.E_SO_7*Cons.M*Vars.v**7 + (0*(-1.0*2*Cons.E_0*Cons.M*Vars.v - 1.0*0*Vars.v**2 - 1.0*4*Cons.E_2*Cons.M*Vars.v**3 - 1.0*5*Vars.E_SO_3*Cons.M*Vars.v**4 - 1.0*6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M*Vars.v**5 - 1.0*7*Vars.E_SO_5*Cons.M*Vars.v**6 - 1.0*8*Cons.E_6*Cons.M*Vars.v**7) + -Cons.Fcal_2*(-1.0*2*Cons.E_0*Cons.M*Vars.v**2 - 1.0*0*Vars.v**3 - 1.0*4*Cons.E_2*Cons.M*Vars.v**4 - 1.0*5*Vars.E_SO_3*Cons.M*Vars.v**5 - 1.0*6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M*Vars.v**6 - 1.0*7*Vars.E_SO_5*Cons.M*Vars.v**7) + -Cons.Fcal_3 - Vars.Fcal_SO_3*(-1.0*2*Cons.E_0*Cons.M*Vars.v**3 - 1.0*0*Vars.v**4 - 1.0*4*Cons.E_2*Cons.M*Vars.v**5 - 1.0*5*Vars.E_SO_3*Cons.M*Vars.v**6 - 1.0*6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M*Vars.v**7) + -Cons.Fcal_4 - Vars.Fcal_SQ_4*(-1.0*2*Cons.E_0*Cons.M*Vars.v**4 - 1.0*0*Vars.v**5 - 1.0*4*Cons.E_2*Cons.M*Vars.v**6 - 1.0*5*Vars.E_SO_3*Cons.M*Vars.v**7) + -Cons.Fcal_5 - Vars.Fcal_SO_5*(-1.0*2*Cons.E_0*Cons.M*Vars.v**5 - 1.0*0*Vars.v**6 - 1.0*4*Cons.E_2*Cons.M*Vars.v**7) + -Cons.Fcal_6 - Vars.Fcal_SO_6 - Cons.Fcal_lnv_6*Vars.logv*(-1.0*2*Cons.E_0*Cons.M*Vars.v**6 - 1.0*0*Vars.v**7) - 1.0*-Cons.Fcal_7 - Vars.Fcal_SO_7*2*Cons.E_0*Cons.M*Vars.v**7 + (0*(0*(1.0*2*Cons.E_0*Cons.M*Vars.v**2 + 1.0*0*Vars.v**3 + 1.0*4*Cons.E_2*Cons.M*Vars.v**4 + 1.0*5*Vars.E_SO_3*Cons.M*Vars.v**5 + 1.0*6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M*Vars.v**6 + 1.0*7*Vars.E_SO_5*Cons.M*Vars.v**7) + -Cons.Fcal_2*(2.0*2*Cons.E_0*Cons.M*Vars.v**3 + 2.0*0*Vars.v**4 + 2.0*4*Cons.E_2*Cons.M*Vars.v**5 + 2.0*5*Vars.E_SO_3*Cons.M*Vars.v**6 + 2.0*6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M*Vars.v**7) + -Cons.Fcal_3 - Vars.Fcal_SO_3*(2.0*2*Cons.E_0*Cons.M*Vars.v**4 + 2.0*0*Vars.v**5 + 2.0*4*Cons.E_2*Cons.M*Vars.v**6 + 2.0*5*Vars.E_SO_3*Cons.M*Vars.v**7) + -Cons.Fcal_4 - Vars.Fcal_SQ_4*(2.0*2*Cons.E_0*Cons.M*Vars.v**5 + 2.0*0*Vars.v**6 + 2.0*4*Cons.E_2*Cons.M*Vars.v**7) + -Cons.Fcal_5 - Vars.Fcal_SO_5*(2.0*2*Cons.E_0*Cons.M*Vars.v**6 + 2.0*0*Vars.v**7) + 2.0*-Cons.Fcal_6 - Vars.Fcal_SO_6 - Cons.Fcal_lnv_6*Vars.logv*2*Cons.E_0*Cons.M*Vars.v**7) + -Cons.Fcal_2*(-Cons.Fcal_2*(1.0*2*Cons.E_0*Cons.M*Vars.v**4 + 1.0*0*Vars.v**5 + 1.0*4*Cons.E_2*Cons.M*Vars.v**6 + 1.0*5*Vars.E_SO_3*Cons.M*Vars.v**7) + -Cons.Fcal_3 - Vars.Fcal_SO_3*(2.0*2*Cons.E_0*Cons.M*Vars.v**5 + 2.0*0*Vars.v**6 + 2.0*4*Cons.E_2*Cons.M*Vars.v**7) + -Cons.Fcal_4 - Vars.Fcal_SQ_4*(2.0*2*Cons.E_0*Cons.M*Vars.v**6 + 2.0*0*Vars.v**7) + 2.0*-Cons.Fcal_5 - Vars.Fcal_SO_5*2*Cons.E_0*Cons.M*Vars.v**7) + -Cons.Fcal_3 - Vars.Fcal_SO_3*(-Cons.Fcal_3 - Vars.Fcal_SO_3*(1.0*2*Cons.E_0*Cons.M*Vars.v**6 + 1.0*0*Vars.v**7) + 2.0*-Cons.Fcal_4 - Vars.Fcal_SQ_4*2*Cons.E_0*Cons.M*Vars.v**7) + (0*(0*(0*(-1.0*2*Cons.E_0*Cons.M*Vars.v**3 - 1.0*0*Vars.v**4 - 1.0*4*Cons.E_2*Cons.M*Vars.v**5 - 1.0*5*Vars.E_SO_3*Cons.M*Vars.v**6 - 1.0*6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M*Vars.v**7) + -Cons.Fcal_2*(-3.0*2*Cons.E_0*Cons.M*Vars.v**4 - 3.0*0*Vars.v**5 - 3.0*4*Cons.E_2*Cons.M*Vars.v**6 - 3.0*5*Vars.E_SO_3*Cons.M*Vars.v**7) + -Cons.Fcal_3 - Vars.Fcal_SO_3*(-3.0*2*Cons.E_0*Cons.M*Vars.v**5 - 3.0*0*Vars.v**6 - 3.0*4*Cons.E_2*Cons.M*Vars.v**7) + -Cons.Fcal_4 - Vars.Fcal_SQ_4*(-3.0*2*Cons.E_0*Cons.M*Vars.v**6 - 3.0*0*Vars.v**7) - 3.0*-Cons.Fcal_5 - Vars.Fcal_SO_5*2*Cons.E_0*Cons.M*Vars.v**7) + -Cons.Fcal_2*(-Cons.Fcal_2*(-3.0*2*Cons.E_0*Cons.M*Vars.v**5 - 3.0*0*Vars.v**6 - 3.0*4*Cons.E_2*Cons.M*Vars.v**7) + -Cons.Fcal_3 - Vars.Fcal_SO_3*(-6.0*2*Cons.E_0*Cons.M*Vars.v**6 - 6.0*0*Vars.v**7) - 6.0*-Cons.Fcal_4 - Vars.Fcal_SQ_4*2*Cons.E_0*Cons.M*Vars.v**7) - 3.0*-Cons.Fcal_3 - Vars.Fcal_SO_3**2*2*Cons.E_0*Cons.M*Vars.v**7) + -Cons.Fcal_2**2*(-Cons.Fcal_2*(-1.0*2*Cons.E_0*Cons.M*Vars.v**6 - 1.0*0*Vars.v**7) - 3.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*2*Cons.E_0*Cons.M*Vars.v**7) + (0*(0*(0*(0*(1.0*2*Cons.E_0*Cons.M*Vars.v**4 + 1.0*0*Vars.v**5 + 1.0*4*Cons.E_2*Cons.M*Vars.v**6 + 1.0*5*Vars.E_SO_3*Cons.M*Vars.v**7) + -Cons.Fcal_2*(4.0*2*Cons.E_0*Cons.M*Vars.v**5 + 4.0*0*Vars.v**6 + 4.0*4*Cons.E_2*Cons.M*Vars.v**7) + -Cons.Fcal_3 - Vars.Fcal_SO_3*(4.0*2*Cons.E_0*Cons.M*Vars.v**6 + 4.0*0*Vars.v**7) + 4.0*-Cons.Fcal_4 - Vars.Fcal_SQ_4*2*Cons.E_0*Cons.M*Vars.v**7) + -Cons.Fcal_2*(-Cons.Fcal_2*(6.0*2*Cons.E_0*Cons.M*Vars.v**6 + 6.0*0*Vars.v**7) + 12.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*2*Cons.E_0*Cons.M*Vars.v**7)) + 4.0*-Cons.Fcal_2**3*2*Cons.E_0*Cons.M*Vars.v**7) + (0**3*(0*(0*(-1.0*2*Cons.E_0*Cons.M*Vars.v**5 - 1.0*0*Vars.v**6 - 1.0*4*Cons.E_2*Cons.M*Vars.v**7) + -Cons.Fcal_2*(-5.0*2*Cons.E_0*Cons.M*Vars.v**6 - 5.0*0*Vars.v**7) - 5.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*2*Cons.E_0*Cons.M*Vars.v**7) - 10.0*-Cons.Fcal_2**2*2*Cons.E_0*Cons.M*Vars.v**7) + (0**5*(0*(1.0*2*Cons.E_0*Cons.M*Vars.v**6 + 1.0*0*Vars.v**7) + 6.0*-Cons.Fcal_2*2*Cons.E_0*Cons.M*Vars.v**7) - 1.0*0**7*2*Cons.E_0*Cons.M*Vars.v**7/-Cons.Fcal_0)/-Cons.Fcal_0)/-Cons.Fcal_0)/-Cons.Fcal_0)/-Cons.Fcal_0)/-Cons.Fcal_0)/-Cons.Fcal_0)/(Vars.Fcal_coeff*-Cons.Fcal_0)
dvdt_T5 = 1.0/dtdv
dydt=np.zeros(8)
[dydt[5],dydt[6],dydt[7]] = FrameFromAngularVelocityIntegrand(Vars.rfrak_frame, OmegaVec_3p5(Cons,Vars)[1:])
dydt[0] = dvdt_T5[0]
if(Cons.EvolveSpin1):
dydt[1], dydt[2]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi1[0], Vars.rfrak_chi1[1],(mul(mul(inverse(Cons.S_chi1),OmegaVec_chiVec_1_3p5(Cons,Vars)),Cons.S_chi1))[1:])
else:
dydt[1], dydt[2] = 0.0, 0.0
if(Cons.EvolveSpin2):
dydt[3], dydt[4]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi2[0], Vars.rfrak_chi2[1],(mul(mul(inverse(Cons.S_chi2),OmegaVec_chiVec_2_3p5(Cons,Vars)),Cons.S_chi2))[1:])
else:
dydt[3], dydt[4] = 0.0, 0.0
return dydt
@njit(cache=True)
def Recalculate_4p0(Cons,Vars,y):
Vars.v = np.array([y[0]])
Vars.rfrak_chi1 = np.array([y[1],y[2]])
Vars.rfrak_chi2 = np.array([y[3],y[4]])
Vars.rfrak_frame = np.array([y[5],y[6],y[7]])
Vars.R = exp(Vars.rfrak_frame[0]*Cons.xHat + Vars.rfrak_frame[1]*Cons.yHat + Vars.rfrak_frame[2]*Cons.zHat)
Vars.nHat = mul(mul(Vars.R,Cons.xHat),conjugate(Vars.R))
Vars.lambdaHat = mul(mul(Vars.R,Cons.yHat),conjugate(Vars.R))
Vars.ellHat = mul(mul(Vars.R,Cons.zHat),conjugate(Vars.R))
Vars.R_S1 = exp(Vars.rfrak_chi1[0]*Cons.xHat + Vars.rfrak_chi1[1]*Cons.yHat)
Vars.R_S2 = exp(Vars.rfrak_chi2[0]*Cons.xHat + Vars.rfrak_chi2[1]*Cons.yHat)
Vars.chiVec1 = mul(mul(mul(Cons.S_chi1,Vars.R_S1),Cons.zHat),mul(conjugate(Vars.R_S1),conjugate(Cons.S_chi1)))
Vars.chiVec2 = mul(mul(mul(Cons.S_chi2,Vars.R_S2),Cons.zHat),mul(conjugate(Vars.R_S2),conjugate(Cons.S_chi2)))
Vars.chi1_n = np.array([dot(Vars.chiVec1[1:],Vars.nHat[1:])])
Vars.chi1_lambda = np.array([dot(Vars.chiVec1[1:],Vars.lambdaHat[1:])])
Vars.chi1_ell = np.array([dot(Vars.chiVec1[1:],Vars.ellHat[1:])])
Vars.chi2_n = np.array([dot(Vars.chiVec2[1:],Vars.nHat[1:])])
Vars.chi2_lambda = np.array([dot(Vars.chiVec2[1:],Vars.lambdaHat[1:])])
Vars.chi2_ell = np.array([dot(Vars.chiVec2[1:],Vars.ellHat[1:])])
Vars.S_ell = Cons.M1**2*Vars.chi1_ell + Cons.M2**2*Vars.chi2_ell
Vars.S_n = Cons.M1**2*Vars.chi1_n + Cons.M2**2*Vars.chi2_n
Vars.S_lambda = Cons.M1**2*Vars.chi1_lambda + Cons.M2**2*Vars.chi2_lambda
Vars.Sigma_ell = Cons.M*(-Cons.M1*Vars.chi1_ell + Cons.M2*Vars.chi2_ell)
Vars.Sigma_n = Cons.M*(-Cons.M1*Vars.chi1_n + Cons.M2*Vars.chi2_n)
Vars.Sigma_lambda = Cons.M*(-Cons.M1*Vars.chi1_lambda + Cons.M2*Vars.chi2_lambda)
Vars.chi_s_ell = Vars.chi1_ell/2 + Vars.chi2_ell/2
Vars.chi_a_ell = Vars.chi1_ell/2 - Vars.chi2_ell/2
Vars.logv = log(Vars.v)
Vars.Fcal_coeff = 32*Cons.nu**2*Vars.v**10/5
Vars.Fcal_SQ_4 = Cons.chi1chi1*(-89*Cons.delta/192 + 89*Cons.nu/96 - 89/192) - 103*Cons.chi1chi2*Cons.nu/48 + Cons.chi2chi2*(89*Cons.delta/192 + 89*Cons.nu/96 - 89/192) + Vars.chi_a_ell*(Vars.chi_a_ell*(287/96 - 12*Cons.nu) + 287*Vars.chi_s_ell*Cons.delta/48) + Vars.chi_s_ell**2*(Cons.nu/24 + 287/96)
Vars.Fcal_SO_3 = (-4*Vars.S_ell - 5*Vars.Sigma_ell*Cons.delta/4)/Cons.M**2
Vars.Fcal_SO_5 = (Vars.S_ell*(272*Cons.nu/9 - 9/2) + Vars.Sigma_ell*Cons.delta*(43*Cons.nu/4 - 13/16))/Cons.M**2
Vars.Fcal_SO_6 = (-16*Vars.S_ell*pi - 31*Vars.Sigma_ell*Cons.delta*pi/6)/Cons.M**2
Vars.Fcal_SO_7 = (Vars.S_ell*(-2810*Cons.nu**2/27 + 6172*Cons.nu/189 + 476645/6804) + Vars.Sigma_ell*Cons.delta*(-1501*Cons.nu**2/36 + 1849*Cons.nu/126 + 9535/336))/Cons.M**2
Vars.Fcal_SO_8 = (Vars.S_ell*pi*(13879*Cons.nu/72 - 3485/96) + Vars.Sigma_ell*Cons.delta*pi*(130583*Cons.nu/2016 - 7163/672))/Cons.M**2
Vars.E_SQ_4 = -3*Vars.chi_a_ell**2/2 - 3*Vars.chi_s_ell**2/2 - Cons.delta*(Cons.chi2chi2/2 + 3*Vars.chi_a_ell*Vars.chi_s_ell) + Cons.nu*(Cons.chi1chi2 + 6*Vars.chi_a_ell**2) + (Cons.chi1chi1 + Cons.chi2chi2)*(Cons.delta - 2*Cons.nu + 1)/4
Vars.E_SO_3 = (14*Vars.S_ell/3 + 2*Vars.Sigma_ell*Cons.delta)/Cons.M**2
Vars.E_SO_5 = (Vars.S_ell*(11 - 61*Cons.nu/9) + Vars.Sigma_ell*Cons.delta*(3 - 10*Cons.nu/3))/Cons.M**2
Vars.E_SO_7 = (Vars.S_ell*(29*Cons.nu**2/12 - 367*Cons.nu/4 + 135/4) + Vars.Sigma_ell*Cons.delta*(5*Cons.nu**2/4 - 39*Cons.nu + 27/4))/Cons.M**2
@njit
def OmegaVec_chiVec_1_4p0(Cons,Vars):
Omega1_coeff = Vars.v**5/Cons.M
return Omega1_coeff*(Vars.ellHat*(-0.75*Cons.delta + 0.5*Cons.nu + Vars.v**2*(Cons.delta*(0.625*Cons.nu - 0.5625) + Cons.nu*(1.25 - 0.0416666666666667*Cons.nu) + Vars.v**2*(Cons.delta*(Cons.nu*(4.875 - 0.15625*Cons.nu) - 0.84375) + Cons.nu*(Cons.nu*(-0.0208333333333333*Cons.nu - 3.28125) + 0.1875) + 0.84375) + 0.5625) + 0.75) + Vars.nHat*Vars.v*(3.0*Vars.chi1_n*Cons.nu + 3.0*Cons.M2**2*Vars.chi2_n/Cons.M**2) - Cons.M2**2*Vars.chiVec2*Vars.v/Cons.M**2)
@njit
def OmegaVec_chiVec_2_4p0(Cons,Vars):
Omega2_coeff = Vars.v**5/Cons.M
return Omega2_coeff*(Vars.ellHat*(0.75*Cons.delta + 0.5*Cons.nu + Vars.v**2*(Cons.delta*(0.5625 - 0.625*Cons.nu) + Cons.nu*(1.25 - 0.0416666666666667*Cons.nu) + Vars.v**2*(Cons.delta*(Cons.nu*(0.15625*Cons.nu - 4.875) + 0.84375) + Cons.nu*(Cons.nu*(-0.0208333333333333*Cons.nu - 3.28125) + 0.1875) + 0.84375) + 0.5625) + 0.75) + Vars.nHat*Vars.v*(3.0*Vars.chi2_n*Cons.nu + 3.0*Cons.M1**2*Vars.chi1_n/Cons.M**2) - Cons.M1**2*Vars.chiVec1*Vars.v/Cons.M**2)
@njit
def OmegaVec_4p0(Cons,Vars):
gamma_PN_7 = (Vars.S_ell*(-6.0*Cons.nu**2 - 10.5833333333333*Cons.nu + 5.0) - 2.66666666666667*Vars.Sigma_ell*Cons.delta*Cons.nu**2 + Vars.Sigma_ell*Cons.delta*(3.0 - 10.1666666666667*Cons.nu))/Cons.M**2
a_ell_0 = 7.0*Vars.S_n + 3.0*Vars.Sigma_n*Cons.delta
gamma_PN_2 = 1.0 - 0.333333333333333*Cons.nu
a_ell_4 = Vars.S_n*(5.77777777777778*Cons.nu**2 + 14.75*Cons.nu + 1.5) + Vars.Sigma_n*Cons.delta*(2.83333333333333*Cons.nu**2 + 9.125*Cons.nu + 1.5)
gamma_PN_3 = (1.66666666666667*Vars.S_ell + Vars.Sigma_ell*Cons.delta)/Cons.M**2
gamma_PN_6 = 0.0123456790123457*Cons.nu**3 + 6.36111111111111*Cons.nu**2 - 2.98177812235564*Cons.nu + 1.0
gamma_PN_0 = 1.00000000000000
gamma_PN_5 = (Vars.S_ell*(0.888888888888889*Cons.nu + 3.33333333333333) + 2.0*Vars.Sigma_ell*Cons.delta)/Cons.M**2
gamma_PN_4 = 1.0 - 5.41666666666667*Cons.nu
a_ell_2 = Vars.S_n*(-9.66666666666667*Cons.nu - 10.0) + Vars.Sigma_n*Cons.delta*(-4.5*Cons.nu - 6.0)
return Vars.ellHat*Vars.v**3/Cons.M + Vars.nHat*Vars.v**6*(a_ell_0 + Vars.v**2*(a_ell_2 + a_ell_4*Vars.v**2))*(gamma_PN_0 + Vars.v**2*(gamma_PN_2 + Vars.v*(gamma_PN_3 + Vars.v*(gamma_PN_4 + Vars.v*(gamma_PN_5 + Vars.v*(gamma_PN_6 + gamma_PN_7*Vars.v))))))/Cons.M**3
@njit(cache=True)
def TaylorT1_4p0(Cons,Vars):
Flux = Vars.Fcal_coeff*(Cons.Fcal_0 + Vars.v**2*(Cons.Fcal_2 + Vars.v*(Cons.Fcal_3 + Vars.Fcal_SO_3 + Vars.v*(Cons.Fcal_4 + Vars.Fcal_SQ_4 + Vars.v*(Cons.Fcal_5 + Vars.Fcal_SO_5 + Vars.v*(Cons.Fcal_6 + Vars.Fcal_SO_6 + Cons.Fcal_lnv_6*Vars.logv + Vars.v*(Cons.Fcal_7 + Vars.Fcal_SO_7 + Vars.v*(Cons.Fcal_8 + Vars.Fcal_SO_8 + Cons.Fcal_lnv_8*Vars.logv))))))))
dEdV = -0.5*Cons.M*Cons.nu*Vars.v*(2.0*Cons.E_0 + Vars.v**2*(4.0*Cons.E_2 + Vars.v*(5.0*Vars.E_SO_3 + Vars.v*(6.0*Cons.E_4 + 6.0*Vars.E_SQ_4 + Vars.v*(7.0*Vars.E_SO_5 + Vars.v*(8.0*Cons.E_6 + Vars.v*(9.0*Vars.E_SO_7 + Vars.v*(10.0*Cons.E_8 + Cons.E_lnv_8*(10.0*Vars.logv + 1.0)))))))))
Absorption = 0
dvdt_T1 = (-Absorption - Flux)/dEdV
dydt=np.zeros(8)
[dydt[5],dydt[6],dydt[7]] = FrameFromAngularVelocityIntegrand(Vars.rfrak_frame, OmegaVec_4p0(Cons,Vars)[1:])
dydt[0] = dvdt_T1[0]
if(Cons.EvolveSpin1):
dydt[1], dydt[2]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi1[0], Vars.rfrak_chi1[1],(mul(mul(inverse(Cons.S_chi1),OmegaVec_chiVec_1_4p0(Cons,Vars)),Cons.S_chi1))[1:])
else:
dydt[1], dydt[2] = 0.0, 0.0
if(Cons.EvolveSpin2):
dydt[3], dydt[4]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi2[0], Vars.rfrak_chi2[1],(mul(mul(inverse(Cons.S_chi2),OmegaVec_chiVec_2_4p0(Cons,Vars)),Cons.S_chi2))[1:])
else:
dydt[3], dydt[4] = 0.0, 0.0
return dydt
@njit(cache=True)
def TaylorT4_4p0(Cons,Vars):
dvdt_T4 = -2.0*Vars.Fcal_coeff*(1.0*-Cons.Fcal_0 + 1.0*0*Vars.v + 1.0*-Cons.Fcal_2*Vars.v**2 + 1.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*Vars.v**3 + 1.0*-Cons.Fcal_4 - Vars.Fcal_SQ_4*Vars.v**4 + 1.0*-Cons.Fcal_5 - Vars.Fcal_SO_5*Vars.v**5 + 1.0*-Cons.Fcal_6 - Vars.Fcal_SO_6 - Cons.Fcal_lnv_6*Vars.logv*Vars.v**6 + 1.0*-Cons.Fcal_7 - Vars.Fcal_SO_7*Vars.v**7 + 1.0*-Cons.Fcal_8 - Vars.Fcal_SO_8 - Cons.Fcal_lnv_8*Vars.logv*Vars.v**8 + (0*(-1.0*-Cons.Fcal_0*Vars.v - 1.0*0*Vars.v**2 - 1.0*-Cons.Fcal_2*Vars.v**3 - 1.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*Vars.v**4 - 1.0*-Cons.Fcal_4 - Vars.Fcal_SQ_4*Vars.v**5 - 1.0*-Cons.Fcal_5 - Vars.Fcal_SO_5*Vars.v**6 - 1.0*-Cons.Fcal_6 - Vars.Fcal_SO_6 - Cons.Fcal_lnv_6*Vars.logv*Vars.v**7 - 1.0*-Cons.Fcal_7 - Vars.Fcal_SO_7*Vars.v**8) + 4*Cons.E_2*Cons.M*(-1.0*-Cons.Fcal_0*Vars.v**2 - 1.0*0*Vars.v**3 - 1.0*-Cons.Fcal_2*Vars.v**4 - 1.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*Vars.v**5 - 1.0*-Cons.Fcal_4 - Vars.Fcal_SQ_4*Vars.v**6 - 1.0*-Cons.Fcal_5 - Vars.Fcal_SO_5*Vars.v**7 - 1.0*-Cons.Fcal_6 - Vars.Fcal_SO_6 - Cons.Fcal_lnv_6*Vars.logv*Vars.v**8) + 5*Vars.E_SO_3*Cons.M*(-1.0*-Cons.Fcal_0*Vars.v**3 - 1.0*0*Vars.v**4 - 1.0*-Cons.Fcal_2*Vars.v**5 - 1.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*Vars.v**6 - 1.0*-Cons.Fcal_4 - Vars.Fcal_SQ_4*Vars.v**7 - 1.0*-Cons.Fcal_5 - Vars.Fcal_SO_5*Vars.v**8) + 6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M*(-1.0*-Cons.Fcal_0*Vars.v**4 - 1.0*0*Vars.v**5 - 1.0*-Cons.Fcal_2*Vars.v**6 - 1.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*Vars.v**7 - 1.0*-Cons.Fcal_4 - Vars.Fcal_SQ_4*Vars.v**8) + 7*Vars.E_SO_5*Cons.M*(-1.0*-Cons.Fcal_0*Vars.v**5 - 1.0*0*Vars.v**6 - 1.0*-Cons.Fcal_2*Vars.v**7 - 1.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*Vars.v**8) + 8*Cons.E_6*Cons.M*(-1.0*-Cons.Fcal_0*Vars.v**6 - 1.0*0*Vars.v**7 - 1.0*-Cons.Fcal_2*Vars.v**8) + 9*Vars.E_SO_7*Cons.M*(-1.0*-Cons.Fcal_0*Vars.v**7 - 1.0*0*Vars.v**8) - 1.0*10*Cons.E_8*Cons.M + Cons.E_lnv_8*Cons.M*(10*Vars.logv + 1)*-Cons.Fcal_0*Vars.v**8 + (0*(0*(1.0*-Cons.Fcal_0*Vars.v**2 + 1.0*0*Vars.v**3 + 1.0*-Cons.Fcal_2*Vars.v**4 + 1.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*Vars.v**5 + 1.0*-Cons.Fcal_4 - Vars.Fcal_SQ_4*Vars.v**6 + 1.0*-Cons.Fcal_5 - Vars.Fcal_SO_5*Vars.v**7 + 1.0*-Cons.Fcal_6 - Vars.Fcal_SO_6 - Cons.Fcal_lnv_6*Vars.logv*Vars.v**8) + 4*Cons.E_2*Cons.M*(2.0*-Cons.Fcal_0*Vars.v**3 + 2.0*0*Vars.v**4 + 2.0*-Cons.Fcal_2*Vars.v**5 + 2.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*Vars.v**6 + 2.0*-Cons.Fcal_4 - Vars.Fcal_SQ_4*Vars.v**7 + 2.0*-Cons.Fcal_5 - Vars.Fcal_SO_5*Vars.v**8) + 5*Vars.E_SO_3*Cons.M*(2.0*-Cons.Fcal_0*Vars.v**4 + 2.0*0*Vars.v**5 + 2.0*-Cons.Fcal_2*Vars.v**6 + 2.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*Vars.v**7 + 2.0*-Cons.Fcal_4 - Vars.Fcal_SQ_4*Vars.v**8) + 6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M*(2.0*-Cons.Fcal_0*Vars.v**5 + 2.0*0*Vars.v**6 + 2.0*-Cons.Fcal_2*Vars.v**7 + 2.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*Vars.v**8) + 7*Vars.E_SO_5*Cons.M*(2.0*-Cons.Fcal_0*Vars.v**6 + 2.0*0*Vars.v**7 + 2.0*-Cons.Fcal_2*Vars.v**8) + 8*Cons.E_6*Cons.M*(2.0*-Cons.Fcal_0*Vars.v**7 + 2.0*0*Vars.v**8) + 2.0*9*Vars.E_SO_7*Cons.M*-Cons.Fcal_0*Vars.v**8) + 4*Cons.E_2*Cons.M*(4*Cons.E_2*Cons.M*(1.0*-Cons.Fcal_0*Vars.v**4 + 1.0*0*Vars.v**5 + 1.0*-Cons.Fcal_2*Vars.v**6 + 1.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*Vars.v**7 + 1.0*-Cons.Fcal_4 - Vars.Fcal_SQ_4*Vars.v**8) + 5*Vars.E_SO_3*Cons.M*(2.0*-Cons.Fcal_0*Vars.v**5 + 2.0*0*Vars.v**6 + 2.0*-Cons.Fcal_2*Vars.v**7 + 2.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*Vars.v**8) + 6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M*(2.0*-Cons.Fcal_0*Vars.v**6 + 2.0*0*Vars.v**7 + 2.0*-Cons.Fcal_2*Vars.v**8) + 7*Vars.E_SO_5*Cons.M*(2.0*-Cons.Fcal_0*Vars.v**7 + 2.0*0*Vars.v**8) + 2.0*8*Cons.E_6*Cons.M*-Cons.Fcal_0*Vars.v**8) + 5*Vars.E_SO_3*Cons.M*(5*Vars.E_SO_3*Cons.M*(1.0*-Cons.Fcal_0*Vars.v**6 + 1.0*0*Vars.v**7 + 1.0*-Cons.Fcal_2*Vars.v**8) + 6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M*(2.0*-Cons.Fcal_0*Vars.v**7 + 2.0*0*Vars.v**8) + 2.0*7*Vars.E_SO_5*Cons.M*-Cons.Fcal_0*Vars.v**8) + 1.0*6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M**2*-Cons.Fcal_0*Vars.v**8 + (0*(0*(0*(-1.0*-Cons.Fcal_0*Vars.v**3 - 1.0*0*Vars.v**4 - 1.0*-Cons.Fcal_2*Vars.v**5 - 1.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*Vars.v**6 - 1.0*-Cons.Fcal_4 - Vars.Fcal_SQ_4*Vars.v**7 - 1.0*-Cons.Fcal_5 - Vars.Fcal_SO_5*Vars.v**8) + 4*Cons.E_2*Cons.M*(-3.0*-Cons.Fcal_0*Vars.v**4 - 3.0*0*Vars.v**5 - 3.0*-Cons.Fcal_2*Vars.v**6 - 3.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*Vars.v**7 - 3.0*-Cons.Fcal_4 - Vars.Fcal_SQ_4*Vars.v**8) + 5*Vars.E_SO_3*Cons.M*(-3.0*-Cons.Fcal_0*Vars.v**5 - 3.0*0*Vars.v**6 - 3.0*-Cons.Fcal_2*Vars.v**7 - 3.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*Vars.v**8) + 6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M*(-3.0*-Cons.Fcal_0*Vars.v**6 - 3.0*0*Vars.v**7 - 3.0*-Cons.Fcal_2*Vars.v**8) + 7*Vars.E_SO_5*Cons.M*(-3.0*-Cons.Fcal_0*Vars.v**7 - 3.0*0*Vars.v**8) - 3.0*8*Cons.E_6*Cons.M*-Cons.Fcal_0*Vars.v**8) + 4*Cons.E_2*Cons.M*(4*Cons.E_2*Cons.M*(-3.0*-Cons.Fcal_0*Vars.v**5 - 3.0*0*Vars.v**6 - 3.0*-Cons.Fcal_2*Vars.v**7 - 3.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*Vars.v**8) + 5*Vars.E_SO_3*Cons.M*(-6.0*-Cons.Fcal_0*Vars.v**6 - 6.0*0*Vars.v**7 - 6.0*-Cons.Fcal_2*Vars.v**8) + 6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M*(-6.0*-Cons.Fcal_0*Vars.v**7 - 6.0*0*Vars.v**8) - 6.0*7*Vars.E_SO_5*Cons.M*-Cons.Fcal_0*Vars.v**8) + 5*Vars.E_SO_3*Cons.M*(5*Vars.E_SO_3*Cons.M*(-3.0*-Cons.Fcal_0*Vars.v**7 - 3.0*0*Vars.v**8) - 6.0*6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M*-Cons.Fcal_0*Vars.v**8)) + 4*Cons.E_2*Cons.M*(4*Cons.E_2*Cons.M*(4*Cons.E_2*Cons.M*(-1.0*-Cons.Fcal_0*Vars.v**6 - 1.0*0*Vars.v**7 - 1.0*-Cons.Fcal_2*Vars.v**8) + 5*Vars.E_SO_3*Cons.M*(-3.0*-Cons.Fcal_0*Vars.v**7 - 3.0*0*Vars.v**8) - 3.0*6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M*-Cons.Fcal_0*Vars.v**8) - 3.0*5*Vars.E_SO_3*Cons.M**2*-Cons.Fcal_0*Vars.v**8) + (0*(0*(0*(0*(1.0*-Cons.Fcal_0*Vars.v**4 + 1.0*0*Vars.v**5 + 1.0*-Cons.Fcal_2*Vars.v**6 + 1.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*Vars.v**7 + 1.0*-Cons.Fcal_4 - Vars.Fcal_SQ_4*Vars.v**8) + 4*Cons.E_2*Cons.M*(4.0*-Cons.Fcal_0*Vars.v**5 + 4.0*0*Vars.v**6 + 4.0*-Cons.Fcal_2*Vars.v**7 + 4.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*Vars.v**8) + 5*Vars.E_SO_3*Cons.M*(4.0*-Cons.Fcal_0*Vars.v**6 + 4.0*0*Vars.v**7 + 4.0*-Cons.Fcal_2*Vars.v**8) + 6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M*(4.0*-Cons.Fcal_0*Vars.v**7 + 4.0*0*Vars.v**8) + 4.0*7*Vars.E_SO_5*Cons.M*-Cons.Fcal_0*Vars.v**8) + 4*Cons.E_2*Cons.M*(4*Cons.E_2*Cons.M*(6.0*-Cons.Fcal_0*Vars.v**6 + 6.0*0*Vars.v**7 + 6.0*-Cons.Fcal_2*Vars.v**8) + 5*Vars.E_SO_3*Cons.M*(12.0*-Cons.Fcal_0*Vars.v**7 + 12.0*0*Vars.v**8) + 12.0*6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M*-Cons.Fcal_0*Vars.v**8) + 6.0*5*Vars.E_SO_3*Cons.M**2*-Cons.Fcal_0*Vars.v**8) + 4*Cons.E_2*Cons.M**2*(4*Cons.E_2*Cons.M*(4.0*-Cons.Fcal_0*Vars.v**7 + 4.0*0*Vars.v**8) + 12.0*5*Vars.E_SO_3*Cons.M*-Cons.Fcal_0*Vars.v**8)) + 1.0*4*Cons.E_2*Cons.M**4*-Cons.Fcal_0*Vars.v**8 + (0**2*(0*(0*(0*(-1.0*-Cons.Fcal_0*Vars.v**5 - 1.0*0*Vars.v**6 - 1.0*-Cons.Fcal_2*Vars.v**7 - 1.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*Vars.v**8) + 4*Cons.E_2*Cons.M*(-5.0*-Cons.Fcal_0*Vars.v**6 - 5.0*0*Vars.v**7 - 5.0*-Cons.Fcal_2*Vars.v**8) + 5*Vars.E_SO_3*Cons.M*(-5.0*-Cons.Fcal_0*Vars.v**7 - 5.0*0*Vars.v**8) - 5.0*6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M*-Cons.Fcal_0*Vars.v**8) + 4*Cons.E_2*Cons.M*(4*Cons.E_2*Cons.M*(-10.0*-Cons.Fcal_0*Vars.v**7 - 10.0*0*Vars.v**8) - 20.0*5*Vars.E_SO_3*Cons.M*-Cons.Fcal_0*Vars.v**8)) - 10.0*4*Cons.E_2*Cons.M**3*-Cons.Fcal_0*Vars.v**8) + (0**4*(0*(0*(1.0*-Cons.Fcal_0*Vars.v**6 + 1.0*0*Vars.v**7 + 1.0*-Cons.Fcal_2*Vars.v**8) + 4*Cons.E_2*Cons.M*(6.0*-Cons.Fcal_0*Vars.v**7 + 6.0*0*Vars.v**8) + 6.0*5*Vars.E_SO_3*Cons.M*-Cons.Fcal_0*Vars.v**8) + 15.0*4*Cons.E_2*Cons.M**2*-Cons.Fcal_0*Vars.v**8) + (0**6*(0*(-1.0*-Cons.Fcal_0*Vars.v**7 - 1.0*0*Vars.v**8) - 7.0*4*Cons.E_2*Cons.M*-Cons.Fcal_0*Vars.v**8) + 1.0*0**8*-Cons.Fcal_0*Vars.v**8/2*Cons.E_0*Cons.M)/2*Cons.E_0*Cons.M)/2*Cons.E_0*Cons.M)/2*Cons.E_0*Cons.M)/2*Cons.E_0*Cons.M)/2*Cons.E_0*Cons.M)/2*Cons.E_0*Cons.M)/2*Cons.E_0*Cons.M)/(Cons.nu*Vars.v*2*Cons.E_0*Cons.M)
dydt=np.zeros(8)
[dydt[5],dydt[6],dydt[7]] = FrameFromAngularVelocityIntegrand(Vars.rfrak_frame, OmegaVec_4p0(Cons,Vars)[1:])
dydt[0] = dvdt_T4[0]
if(Cons.EvolveSpin1):
dydt[1], dydt[2]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi1[0], Vars.rfrak_chi1[1],(mul(mul(inverse(Cons.S_chi1),OmegaVec_chiVec_1_4p0(Cons,Vars)),Cons.S_chi1))[1:])
else:
dydt[1], dydt[2] = 0.0, 0.0
if(Cons.EvolveSpin2):
dydt[3], dydt[4]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi2[0], Vars.rfrak_chi2[1],(mul(mul(inverse(Cons.S_chi2),OmegaVec_chiVec_2_4p0(Cons,Vars)),Cons.S_chi2))[1:])
else:
dydt[3], dydt[4] = 0.0, 0.0
return dydt
@njit(cache=True)
def TaylorT5_4p0(Cons,Vars):
dtdv = -0.5*Cons.nu*Vars.v*(1.0*2*Cons.E_0*Cons.M + 1.0*0*Vars.v + 1.0*4*Cons.E_2*Cons.M*Vars.v**2 + 1.0*5*Vars.E_SO_3*Cons.M*Vars.v**3 + 1.0*6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M*Vars.v**4 + 1.0*7*Vars.E_SO_5*Cons.M*Vars.v**5 + 1.0*8*Cons.E_6*Cons.M*Vars.v**6 + 1.0*9*Vars.E_SO_7*Cons.M*Vars.v**7 + 1.0*10*Cons.E_8*Cons.M + Cons.E_lnv_8*Cons.M*(10*Vars.logv + 1)*Vars.v**8 + (0*(-1.0*2*Cons.E_0*Cons.M*Vars.v - 1.0*0*Vars.v**2 - 1.0*4*Cons.E_2*Cons.M*Vars.v**3 - 1.0*5*Vars.E_SO_3*Cons.M*Vars.v**4 - 1.0*6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M*Vars.v**5 - 1.0*7*Vars.E_SO_5*Cons.M*Vars.v**6 - 1.0*8*Cons.E_6*Cons.M*Vars.v**7 - 1.0*9*Vars.E_SO_7*Cons.M*Vars.v**8) + -Cons.Fcal_2*(-1.0*2*Cons.E_0*Cons.M*Vars.v**2 - 1.0*0*Vars.v**3 - 1.0*4*Cons.E_2*Cons.M*Vars.v**4 - 1.0*5*Vars.E_SO_3*Cons.M*Vars.v**5 - 1.0*6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M*Vars.v**6 - 1.0*7*Vars.E_SO_5*Cons.M*Vars.v**7 - 1.0*8*Cons.E_6*Cons.M*Vars.v**8) + -Cons.Fcal_3 - Vars.Fcal_SO_3*(-1.0*2*Cons.E_0*Cons.M*Vars.v**3 - 1.0*0*Vars.v**4 - 1.0*4*Cons.E_2*Cons.M*Vars.v**5 - 1.0*5*Vars.E_SO_3*Cons.M*Vars.v**6 - 1.0*6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M*Vars.v**7 - 1.0*7*Vars.E_SO_5*Cons.M*Vars.v**8) + -Cons.Fcal_4 - Vars.Fcal_SQ_4*(-1.0*2*Cons.E_0*Cons.M*Vars.v**4 - 1.0*0*Vars.v**5 - 1.0*4*Cons.E_2*Cons.M*Vars.v**6 - 1.0*5*Vars.E_SO_3*Cons.M*Vars.v**7 - 1.0*6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M*Vars.v**8) + -Cons.Fcal_5 - Vars.Fcal_SO_5*(-1.0*2*Cons.E_0*Cons.M*Vars.v**5 - 1.0*0*Vars.v**6 - 1.0*4*Cons.E_2*Cons.M*Vars.v**7 - 1.0*5*Vars.E_SO_3*Cons.M*Vars.v**8) + -Cons.Fcal_6 - Vars.Fcal_SO_6 - Cons.Fcal_lnv_6*Vars.logv*(-1.0*2*Cons.E_0*Cons.M*Vars.v**6 - 1.0*0*Vars.v**7 - 1.0*4*Cons.E_2*Cons.M*Vars.v**8) + -Cons.Fcal_7 - Vars.Fcal_SO_7*(-1.0*2*Cons.E_0*Cons.M*Vars.v**7 - 1.0*0*Vars.v**8) - 1.0*-Cons.Fcal_8 - Vars.Fcal_SO_8 - Cons.Fcal_lnv_8*Vars.logv*2*Cons.E_0*Cons.M*Vars.v**8 + (0*(0*(1.0*2*Cons.E_0*Cons.M*Vars.v**2 + 1.0*0*Vars.v**3 + 1.0*4*Cons.E_2*Cons.M*Vars.v**4 + 1.0*5*Vars.E_SO_3*Cons.M*Vars.v**5 + 1.0*6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M*Vars.v**6 + 1.0*7*Vars.E_SO_5*Cons.M*Vars.v**7 + 1.0*8*Cons.E_6*Cons.M*Vars.v**8) + -Cons.Fcal_2*(2.0*2*Cons.E_0*Cons.M*Vars.v**3 + 2.0*0*Vars.v**4 + 2.0*4*Cons.E_2*Cons.M*Vars.v**5 + 2.0*5*Vars.E_SO_3*Cons.M*Vars.v**6 + 2.0*6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M*Vars.v**7 + 2.0*7*Vars.E_SO_5*Cons.M*Vars.v**8) + -Cons.Fcal_3 - Vars.Fcal_SO_3*(2.0*2*Cons.E_0*Cons.M*Vars.v**4 + 2.0*0*Vars.v**5 + 2.0*4*Cons.E_2*Cons.M*Vars.v**6 + 2.0*5*Vars.E_SO_3*Cons.M*Vars.v**7 + 2.0*6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M*Vars.v**8) + -Cons.Fcal_4 - Vars.Fcal_SQ_4*(2.0*2*Cons.E_0*Cons.M*Vars.v**5 + 2.0*0*Vars.v**6 + 2.0*4*Cons.E_2*Cons.M*Vars.v**7 + 2.0*5*Vars.E_SO_3*Cons.M*Vars.v**8) + -Cons.Fcal_5 - Vars.Fcal_SO_5*(2.0*2*Cons.E_0*Cons.M*Vars.v**6 + 2.0*0*Vars.v**7 + 2.0*4*Cons.E_2*Cons.M*Vars.v**8) + -Cons.Fcal_6 - Vars.Fcal_SO_6 - Cons.Fcal_lnv_6*Vars.logv*(2.0*2*Cons.E_0*Cons.M*Vars.v**7 + 2.0*0*Vars.v**8) + 2.0*-Cons.Fcal_7 - Vars.Fcal_SO_7*2*Cons.E_0*Cons.M*Vars.v**8) + -Cons.Fcal_2*(-Cons.Fcal_2*(1.0*2*Cons.E_0*Cons.M*Vars.v**4 + 1.0*0*Vars.v**5 + 1.0*4*Cons.E_2*Cons.M*Vars.v**6 + 1.0*5*Vars.E_SO_3*Cons.M*Vars.v**7 + 1.0*6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M*Vars.v**8) + -Cons.Fcal_3 - Vars.Fcal_SO_3*(2.0*2*Cons.E_0*Cons.M*Vars.v**5 + 2.0*0*Vars.v**6 + 2.0*4*Cons.E_2*Cons.M*Vars.v**7 + 2.0*5*Vars.E_SO_3*Cons.M*Vars.v**8) + -Cons.Fcal_4 - Vars.Fcal_SQ_4*(2.0*2*Cons.E_0*Cons.M*Vars.v**6 + 2.0*0*Vars.v**7 + 2.0*4*Cons.E_2*Cons.M*Vars.v**8) + -Cons.Fcal_5 - Vars.Fcal_SO_5*(2.0*2*Cons.E_0*Cons.M*Vars.v**7 + 2.0*0*Vars.v**8) + 2.0*-Cons.Fcal_6 - Vars.Fcal_SO_6 - Cons.Fcal_lnv_6*Vars.logv*2*Cons.E_0*Cons.M*Vars.v**8) + -Cons.Fcal_3 - Vars.Fcal_SO_3*(-Cons.Fcal_3 - Vars.Fcal_SO_3*(1.0*2*Cons.E_0*Cons.M*Vars.v**6 + 1.0*0*Vars.v**7 + 1.0*4*Cons.E_2*Cons.M*Vars.v**8) + -Cons.Fcal_4 - Vars.Fcal_SQ_4*(2.0*2*Cons.E_0*Cons.M*Vars.v**7 + 2.0*0*Vars.v**8) + 2.0*-Cons.Fcal_5 - Vars.Fcal_SO_5*2*Cons.E_0*Cons.M*Vars.v**8) + 1.0*-Cons.Fcal_4 - Vars.Fcal_SQ_4**2*2*Cons.E_0*Cons.M*Vars.v**8 + (0*(0*(0*(-1.0*2*Cons.E_0*Cons.M*Vars.v**3 - 1.0*0*Vars.v**4 - 1.0*4*Cons.E_2*Cons.M*Vars.v**5 - 1.0*5*Vars.E_SO_3*Cons.M*Vars.v**6 - 1.0*6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M*Vars.v**7 - 1.0*7*Vars.E_SO_5*Cons.M*Vars.v**8) + -Cons.Fcal_2*(-3.0*2*Cons.E_0*Cons.M*Vars.v**4 - 3.0*0*Vars.v**5 - 3.0*4*Cons.E_2*Cons.M*Vars.v**6 - 3.0*5*Vars.E_SO_3*Cons.M*Vars.v**7 - 3.0*6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M*Vars.v**8) + -Cons.Fcal_3 - Vars.Fcal_SO_3*(-3.0*2*Cons.E_0*Cons.M*Vars.v**5 - 3.0*0*Vars.v**6 - 3.0*4*Cons.E_2*Cons.M*Vars.v**7 - 3.0*5*Vars.E_SO_3*Cons.M*Vars.v**8) + -Cons.Fcal_4 - Vars.Fcal_SQ_4*(-3.0*2*Cons.E_0*Cons.M*Vars.v**6 - 3.0*0*Vars.v**7 - 3.0*4*Cons.E_2*Cons.M*Vars.v**8) + -Cons.Fcal_5 - Vars.Fcal_SO_5*(-3.0*2*Cons.E_0*Cons.M*Vars.v**7 - 3.0*0*Vars.v**8) - 3.0*-Cons.Fcal_6 - Vars.Fcal_SO_6 - Cons.Fcal_lnv_6*Vars.logv*2*Cons.E_0*Cons.M*Vars.v**8) + -Cons.Fcal_2*(-Cons.Fcal_2*(-3.0*2*Cons.E_0*Cons.M*Vars.v**5 - 3.0*0*Vars.v**6 - 3.0*4*Cons.E_2*Cons.M*Vars.v**7 - 3.0*5*Vars.E_SO_3*Cons.M*Vars.v**8) + -Cons.Fcal_3 - Vars.Fcal_SO_3*(-6.0*2*Cons.E_0*Cons.M*Vars.v**6 - 6.0*0*Vars.v**7 - 6.0*4*Cons.E_2*Cons.M*Vars.v**8) + -Cons.Fcal_4 - Vars.Fcal_SQ_4*(-6.0*2*Cons.E_0*Cons.M*Vars.v**7 - 6.0*0*Vars.v**8) - 6.0*-Cons.Fcal_5 - Vars.Fcal_SO_5*2*Cons.E_0*Cons.M*Vars.v**8) + -Cons.Fcal_3 - Vars.Fcal_SO_3*(-Cons.Fcal_3 - Vars.Fcal_SO_3*(-3.0*2*Cons.E_0*Cons.M*Vars.v**7 - 3.0*0*Vars.v**8) - 6.0*-Cons.Fcal_4 - Vars.Fcal_SQ_4*2*Cons.E_0*Cons.M*Vars.v**8)) + -Cons.Fcal_2*(-Cons.Fcal_2*(-Cons.Fcal_2*(-1.0*2*Cons.E_0*Cons.M*Vars.v**6 - 1.0*0*Vars.v**7 - 1.0*4*Cons.E_2*Cons.M*Vars.v**8) + -Cons.Fcal_3 - Vars.Fcal_SO_3*(-3.0*2*Cons.E_0*Cons.M*Vars.v**7 - 3.0*0*Vars.v**8) - 3.0*-Cons.Fcal_4 - Vars.Fcal_SQ_4*2*Cons.E_0*Cons.M*Vars.v**8) - 3.0*-Cons.Fcal_3 - Vars.Fcal_SO_3**2*2*Cons.E_0*Cons.M*Vars.v**8) + (0*(0*(0*(0*(1.0*2*Cons.E_0*Cons.M*Vars.v**4 + 1.0*0*Vars.v**5 + 1.0*4*Cons.E_2*Cons.M*Vars.v**6 + 1.0*5*Vars.E_SO_3*Cons.M*Vars.v**7 + 1.0*6*Cons.E_4*Cons.M + 6*Vars.E_SQ_4*Cons.M*Vars.v**8) + -Cons.Fcal_2*(4.0*2*Cons.E_0*Cons.M*Vars.v**5 + 4.0*0*Vars.v**6 + 4.0*4*Cons.E_2*Cons.M*Vars.v**7 + 4.0*5*Vars.E_SO_3*Cons.M*Vars.v**8) + -Cons.Fcal_3 - Vars.Fcal_SO_3*(4.0*2*Cons.E_0*Cons.M*Vars.v**6 + 4.0*0*Vars.v**7 + 4.0*4*Cons.E_2*Cons.M*Vars.v**8) + -Cons.Fcal_4 - Vars.Fcal_SQ_4*(4.0*2*Cons.E_0*Cons.M*Vars.v**7 + 4.0*0*Vars.v**8) + 4.0*-Cons.Fcal_5 - Vars.Fcal_SO_5*2*Cons.E_0*Cons.M*Vars.v**8) + -Cons.Fcal_2*(-Cons.Fcal_2*(6.0*2*Cons.E_0*Cons.M*Vars.v**6 + 6.0*0*Vars.v**7 + 6.0*4*Cons.E_2*Cons.M*Vars.v**8) + -Cons.Fcal_3 - Vars.Fcal_SO_3*(12.0*2*Cons.E_0*Cons.M*Vars.v**7 + 12.0*0*Vars.v**8) + 12.0*-Cons.Fcal_4 - Vars.Fcal_SQ_4*2*Cons.E_0*Cons.M*Vars.v**8) + 6.0*-Cons.Fcal_3 - Vars.Fcal_SO_3**2*2*Cons.E_0*Cons.M*Vars.v**8) + -Cons.Fcal_2**2*(-Cons.Fcal_2*(4.0*2*Cons.E_0*Cons.M*Vars.v**7 + 4.0*0*Vars.v**8) + 12.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*2*Cons.E_0*Cons.M*Vars.v**8)) + 1.0*-Cons.Fcal_2**4*2*Cons.E_0*Cons.M*Vars.v**8 + (0**2*(0*(0*(0*(-1.0*2*Cons.E_0*Cons.M*Vars.v**5 - 1.0*0*Vars.v**6 - 1.0*4*Cons.E_2*Cons.M*Vars.v**7 - 1.0*5*Vars.E_SO_3*Cons.M*Vars.v**8) + -Cons.Fcal_2*(-5.0*2*Cons.E_0*Cons.M*Vars.v**6 - 5.0*0*Vars.v**7 - 5.0*4*Cons.E_2*Cons.M*Vars.v**8) + -Cons.Fcal_3 - Vars.Fcal_SO_3*(-5.0*2*Cons.E_0*Cons.M*Vars.v**7 - 5.0*0*Vars.v**8) - 5.0*-Cons.Fcal_4 - Vars.Fcal_SQ_4*2*Cons.E_0*Cons.M*Vars.v**8) + -Cons.Fcal_2*(-Cons.Fcal_2*(-10.0*2*Cons.E_0*Cons.M*Vars.v**7 - 10.0*0*Vars.v**8) - 20.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*2*Cons.E_0*Cons.M*Vars.v**8)) - 10.0*-Cons.Fcal_2**3*2*Cons.E_0*Cons.M*Vars.v**8) + (0**4*(0*(0*(1.0*2*Cons.E_0*Cons.M*Vars.v**6 + 1.0*0*Vars.v**7 + 1.0*4*Cons.E_2*Cons.M*Vars.v**8) + -Cons.Fcal_2*(6.0*2*Cons.E_0*Cons.M*Vars.v**7 + 6.0*0*Vars.v**8) + 6.0*-Cons.Fcal_3 - Vars.Fcal_SO_3*2*Cons.E_0*Cons.M*Vars.v**8) + 15.0*-Cons.Fcal_2**2*2*Cons.E_0*Cons.M*Vars.v**8) + (0**6*(0*(-1.0*2*Cons.E_0*Cons.M*Vars.v**7 - 1.0*0*Vars.v**8) - 7.0*-Cons.Fcal_2*2*Cons.E_0*Cons.M*Vars.v**8) + 1.0*0**8*2*Cons.E_0*Cons.M*Vars.v**8/-Cons.Fcal_0)/-Cons.Fcal_0)/-Cons.Fcal_0)/-Cons.Fcal_0)/-Cons.Fcal_0)/-Cons.Fcal_0)/-Cons.Fcal_0)/-Cons.Fcal_0)/(Vars.Fcal_coeff*-Cons.Fcal_0)
dvdt_T5 = 1.0/dtdv
dydt=np.zeros(8)
[dydt[5],dydt[6],dydt[7]] = FrameFromAngularVelocityIntegrand(Vars.rfrak_frame, OmegaVec_4p0(Cons,Vars)[1:])
dydt[0] = dvdt_T5[0]
if(Cons.EvolveSpin1):
dydt[1], dydt[2]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi1[0], Vars.rfrak_chi1[1],(mul(mul(inverse(Cons.S_chi1),OmegaVec_chiVec_1_4p0(Cons,Vars)),Cons.S_chi1))[1:])
else:
dydt[1], dydt[2] = 0.0, 0.0
if(Cons.EvolveSpin2):
dydt[3], dydt[4]=FrameFromAngularVelocity_2D_Integrand(Vars.rfrak_chi2[0], Vars.rfrak_chi2[1],(mul(mul(inverse(Cons.S_chi2),OmegaVec_chiVec_2_4p0(Cons,Vars)),Cons.S_chi2))[1:])
else:
dydt[3], dydt[4] = 0.0, 0.0
return dydt
class PNEv:
def Integrand(t,y):
PNEv.Recalculate.get(2*PNEv.PNEvolutionOrder)(PNEv.Cons,PNEv.Vars,y)
dydt=PNEv.Taylor.get(PNEv.TaylorTn+20*PNEv.PNEvolutionOrder)(PNEv.Cons,PNEv.Vars)
if PNEv.Vars.v>=1.0 and PNEv.NotForward:
print("Beyond domain of PN validity, this is a good way to terminate.")
PNEv.terminal1=False
if dydt[0]<1.0e-12 and PNEv.NotForward:
print("v is decreasing, which is not an uncommon way to stop.")
PNEv.terminal2=False
return dydt
def Evolution(xHat_i, yHat_i, zHat_i, M1_i, M2_i, v_i, S_chi1_i, S_chi2_i, rfrak_frame,
t_PNStart=False, t_PNEnd=False, PNEvolutionOrder=3.5, TaylorTn=1, StepsPerOrbit=32, ForwardInTime=True, tol=1e-8, MinStep=1e-7):
# Initialization of constants
PNEv.terminal1=True
PNEv.terminal2=True
PNEv.NotForward=True
PNEv.PNEvolutionOrder=PNEvolutionOrder
PNEv.TaylorTn=TaylorTn
PNEv.Recalculate={ 0:Recalculate_0,
1:Recalculate_0p50,
2:Recalculate_1p0,
3:Recalculate_1p5,
4:Recalculate_2p0,
5:Recalculate_2p5,
6:Recalculate_3p0,
7:Recalculate_3p5,
8:Recalculate_4p0}
PNEv.Taylor={
1:TaylorT1_0,
11:TaylorT1_0p50,
21:TaylorT1_1p0,
31:TaylorT1_1p5,
41:TaylorT1_2p0,
51:TaylorT1_2p5,
61:TaylorT1_3p0,
71:TaylorT1_3p5,
81:TaylorT1_4p0,
4:TaylorT4_0,
14:TaylorT4_0p50,
24:TaylorT4_1p0,
34:TaylorT4_1p5,
44:TaylorT4_2p0,
54:TaylorT4_2p5,
64:TaylorT4_3p0,
74:TaylorT4_3p5,
84:TaylorT4_4p0,
5:TaylorT5_0,
15:TaylorT5_0p50,
25:TaylorT5_1p0,
35:TaylorT5_1p5,
45:TaylorT5_2p0,
55:TaylorT5_2p5,
65:TaylorT5_3p0,
75:TaylorT5_3p5,
85:TaylorT5_4p0}
z=np.array([0.0])
PNEv.Cons=Cons(z,z,z,z,z,z,z,z,z,z,z,z,z,z,z,z,z,z,z,z,z,z,z,z,z,z,z,z,z,True,True)
PNEv.Vars=Vars(z,z,z,z,z,z,z,z,z,z,z,z,z,z,z,z,z,z,z,z,z,z,z,z,z,z,z,z,z,z,z,z,z,z,z,z,z,z)
Initialization(PNEv.Cons,xHat_i, yHat_i, zHat_i, M1_i, M2_i, v_i, S_chi1_i, S_chi2_i, rfrak_frame)
def terminate(t,y):
return 1.0*PNEv.terminal1*PNEv.terminal2
terminate.terminal=True
TMerger=5.0/(256.0*PNEv.Cons.nu*v_i**8)
TEnd=TMerger
if t_PNEnd:
TEnd=t_PNEnd
time=[0.0]
while time[-1]<TEnd and 2*PNEv.Cons.M*(256*PNEv.Cons.nu*(TMerger-time[-1])/5)**(3/8)/StepsPerOrbit>MinStep:
time.append(time[-1]+(2*PNEv.Cons.M*(256*PNEv.Cons.nu*(TMerger-time[-1])/5)**(3/8)/StepsPerOrbit)[0])
time=np.delete(time, -1)
# Integrate
yy=solve_ivp(PNEv.Integrand, [time[0],time[-1]], [v_i,0.0,
0.0,0.0,0.0,rfrak_frame[0],rfrak_frame[1],rfrak_frame[2]], method='DOP853',
t_eval=time, dense_output=True, events=terminate, rtol=tol, atol=tol)
if ForwardInTime:
PNEv.NotForward=False
time=[0.0]
TStart=-3*TMerger
if t_PNStart:
TStart=t_PNStart
while time[-1]>TStart:
time.append(time[-1]-(2*PNEv.Cons.M*(256*PNEv.Cons.nu*(TMerger-time[-1])/5)**(3/8)/StepsPerOrbit)[0])
yyForward=solve_ivp(PNEv.Integrand, [time[0],time[-1]], [v_i,0.0,
0.0,0.0,0.0,rfrak_frame[0],rfrak_frame[1],rfrak_frame[2]], method='DOP853',
t_eval=time, dense_output=True, rtol=tol, atol=tol)
yy.t=np.append(yyForward.t[1:][::-1],yy.t)
data=np.empty((8,len(yy.t)))
for i in range(8):
data[i]=np.append(yyForward.y[i][1:][::-1],yy.y[i])
yy.y=data
return yy
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
# El code_menu debe ser único y se configurará como un permiso del sistema
MENU_DEFAULT = [
{'code_menu': 'acceso_programaciones_didacticas',
'texto_menu': 'Programación General Anual',
'href': '',
'nivel': 1,
'tipo': 'Accesible',
'pos': 1
},
{'code_menu': 'acceso_programaciones_ccff',
'texto_menu': 'Programaciones CCFF',
'href': 'programaciones',
'nivel': 2,
'tipo': 'Accesible',
'pos': 1,
'parent': 'acceso_programaciones_didacticas'
},
{'code_menu': 'acceso_cargar_programaciones',
'texto_menu': 'Cargar programaciones',
'href': 'cargar_programaciones',
'nivel': 2,
'tipo': 'Accesible',
'pos': 2,
'parent': 'acceso_programaciones_didacticas'
},
{'code_menu': 'acceso_resultados_aprendizaje',
'texto_menu': 'Resultados de aprendizaje',
'href': 'objetivos_criterios',
'nivel': 2,
'tipo': 'Accesible',
'pos': 3,
'parent': 'acceso_programaciones_didacticas'
},
{'code_menu': 'acceso_cuerpos_funcionarios',
'texto_menu': 'Cuerpos de funcionarios',
'href': 'cuerpos_funcionarios_entidad',
'nivel': 2,
'tipo': 'Accesible',
'pos': 4,
'parent': 'acceso_programaciones_didacticas'
},
{'code_menu': 'acceso_departamentos_centro_educativo',
'texto_menu': 'Departamentos del centro',
'href': 'departamentos_centro_educativo',
'nivel': 2,
'tipo': 'Accesible',
'pos': 5,
'parent': 'acceso_programaciones_didacticas'
},
{'code_menu': 'acceso_profesores_centro_educativo',
'texto_menu': 'Profesores del centro',
'href': 'profesores_centro_educativo',
'nivel': 2,
'tipo': 'Accesible',
'pos': 6,
'parent': 'acceso_programaciones_didacticas'
},
{'code_menu': 'acceso_aspectos_pga',
'texto_menu': 'Aspectos generales de la PGA',
'href': 'aspectos_pga',
'nivel': 2,
'tipo': 'Accesible',
'pos': 7,
'parent': 'acceso_programaciones_didacticas'
},
{'code_menu': 'acceso_pec',
'texto_menu': 'Proyecto Educativo del Centro',
'href': 'proyecto_educativo_centro',
'nivel': 2,
'tipo': 'Accesible',
'pos': 8,
'parent': 'acceso_programaciones_didacticas'
}
]
# Se añaden otros permisos para el usuario
PERMISOS = [{'code_nombre': 'crea_programaciones_ccff',
'nombre': 'Tiene permiso para crear programaciones de CCFF',
'menu': 'acceso_programaciones_ccff'
},
{'code_nombre': 'edita_programaciones_ccff',
'nombre': 'Tiene permiso para editar cualquier programación de CCFF',
'menu': 'acceso_programaciones_ccff'
},
{'code_nombre': 'copia_programaciones_ccff',
'nombre': 'Tiene permiso para copiar cualquier programación de CCFF',
'menu': 'acceso_programaciones_ccff'
},
{'code_nombre': 'borra_programaciones_ccff',
'nombre': 'Tiene permiso para borrar cualquier programación de CCFF',
'menu': 'acceso_programaciones_ccff'
},
{'code_nombre': 'crea_resultados_aprendizaje_ccff',
'nombre': 'Tiene permiso para crear resultados de aprendizaje asociados a un Ciclo Formativo',
'menu': 'acceso_resultados_aprendizaje'
},
{'code_nombre': 'borra_resultados_aprendizaje_ccff',
'nombre': 'Tiene permiso para borrar resultados de aprendizaje asociados a un Ciclo Formativo',
'menu': 'acceso_resultados_aprendizaje'
},
{'code_nombre': 'crea_objetivos_ccff',
'nombre': 'Pued crear objetivos y criterios de evaluación asociados a un resultado de aprendizaje',
'menu': 'acceso_resultados_aprendizaje'
},
{'code_nombre': 'borra_objetivos_ccff',
'nombre': 'Puede borrar objetivos y criterios de evaluación asociados a un resultado de aprendizaje',
'menu': 'acceso_resultados_aprendizaje'
},
{'code_nombre': 'borra_departamentos',
'nombre': 'Puede borrar departamentos del centro educativo',
'menu': 'acceso_departamentos_centro_educativo'
},
{'code_nombre': 'recarga_departamentos',
'nombre': 'Puede recargar todos los departamentos posibles para el centro educativo',
'menu': 'acceso_departamentos_centro_educativo'
},
{'code_nombre': 'add_miembros_departamento',
'nombre': 'Puede añadir usuarios al departamento del centro educativo',
'menu': 'acceso_departamentos_centro_educativo'
},
{'code_nombre': 'carga_programaciones',
'nombre': 'Puede cargar programaciones del centro educativo',
'menu': 'acceso_cargar_programaciones'
},
{'code_nombre': 'borra_programaciones_cargadas',
'nombre': 'Puede borrar programaciones del centro educativo',
'menu': 'acceso_cargar_programaciones'
},
{'code_nombre': 'descarga_programaciones',
'nombre': 'Puede descargar programaciones del centro educativo',
'menu': 'acceso_cargar_programaciones'
},
{'code_nombre': 'descarga_pga',
'nombre': 'Puede descargar la programación general anual del centro educativo',
'menu': 'acceso_cargar_programaciones'
}
]
|
#!/usr/bin/env python
# coding: utf-8
import pickle
from pathlib import Path
import numpy as np
import pandas as pd
from gensim import models
from gensim.models.doc2vec import Doc2Vec
from gensim.models.word2vec import Word2Vec
# --- load ground truth ---
# loading gt EN
from semsim.constants import DATA_DIR, TMP_DIR
sj_file_en = TMP_DIR / 'Psycho-Paper/synonym_judgement/cueing study stimuli for distribution.csv'
sj_en_full = pd.read_csv(sj_file_en)
sj_en = sj_en_full[['Probe', 'Target', 'Foil1', 'Foil2']]
sj_en = sj_en[~sj_en.isna().any(axis=1)]
# loading gt DE
sj_file_de = TMP_DIR / 'Psycho-Paper/synonym_judgement/SJT_stimuli.csv'
sj_de_full = pd.read_csv(sj_file_de)
sj_de = sj_de_full[['probe', 'target', 'foil1', 'foil2']]
sj_de = sj_de[~sj_de.isna().any(axis=1)]
def similarities(terms, vectors):
terms = terms.to_list()
probe = terms[0]
targets = terms[1:]
try:
distances = vectors.distances(probe, targets)
sims = 1 - distances
except KeyError:
if probe not in vectors:
sims = [np.nan] * 3
else:
sims = []
for term in targets:
if term in vectors:
similarity = vectors.similarity(probe, term)
sims.append(similarity)
else:
sims.append(np.nan)
return pd.Series(sims)
def synonym_judgement_accuracy(word_vectors, tests, target_idx=0, file=None):
sim_cols = ['target_sim', 'foil1_sim', 'foil2_sim']
tests = tests.copy()
# calculating similarities
tests[sim_cols] = tests.apply(similarities, vectors=word_vectors, axis=1)
# default values for OOV tests
tests['pred'] = -1
tests['correct'] = np.nan
# predictions for in-vocab test
in_vocab = ~tests[sim_cols].isna().any(axis=1)
tests.loc[in_vocab, 'pred'] = tests.loc[in_vocab, sim_cols].apply(np.argmax, axis=1)
pred = tests.loc[in_vocab, 'pred']
tests.loc[in_vocab, 'correct'] = (pred == target_idx)
# calculating accuracy
correct = tests.loc[in_vocab, 'correct'].sum()
acc = correct / len(pred)
print(f"Accuracy: {acc:.03f}")
print(f"Number of tests omitted due to unknown words: {len(tests) - len(pred)}")
# writing results
if file is not None:
file = Path(file)
file = file.parent / f'{file.name}_acc{acc:.03f}.sjt'
print(f"Saving SJT predictions to {file}")
tests.to_csv(file, sep='\t')
# -- evaluate word vectors on SJT --
def convert_csv_to_w2v_format(csv_file_path, w2v_file_path):
lsi_wv = pd.read_csv(csv_file_path, index_col=0)
with open(w2v_file_path, 'w') as fp:
fp.write(f'{lsi_wv.shape[0]} {lsi_wv.shape[1]}\n')
lsi_wv.to_csv(fp, sep=' ', header=False)
def example_vectors_en():
# - pretrained vectors -
google_w2v = models.KeyedVectors.load_word2vec_format(
str(DATA_DIR / 'vectors/GoogleNews-vectors-negative300.bin'),
binary=True
)
synonym_judgement_accuracy(google_w2v, sj_en)
# - bnc lsi vectors -
file = 'bnc_lsi_gensim_term_vectors.csv'
dir_path = DATA_DIR / 'out/SemD/bnc_cs1000_minsz50_lc_filtered'
csv_file_path = dir_path / file
w2v_file_path = csv_file_path.with_suffix('.w2v')
convert_csv_to_w2v_format(csv_file_path, w2v_file_path)
print(f"Loading {w2v_file_path}")
bnc_lsi = models.KeyedVectors.load_word2vec_format(str(w2v_file_path))
synonym_judgement_accuracy(bnc_lsi, sj_en)
def example_vectors_de():
file = DATA_DIR / 'vectors/d2v'
print(f"Loading {file}")
d2v = Doc2Vec.load(str(file))
synonym_judgement_accuracy(d2v.wv, sj_de)
file = DATA_DIR / 'vectors/w2v'
print(f"Loading {file}")
w2v = Word2Vec.load(str(file))
synonym_judgement_accuracy(w2v.wv, sj_de)
file = DATA_DIR / 'out/SemD/OP2/OnlineParticipation_lsi_gensim_term_vectors.csv'
print(f"Loading {file}")
op_lsi = models.KeyedVectors.load_word2vec_format(str(file))
synonym_judgement_accuracy(op_lsi, sj_de)
file = DATA_DIR / 'data/out/SemD/DEWAC_1000_40k/dewac_lsi_word_vectors.vec'
print(f"Loading {file}")
op_lsi = models.KeyedVectors.load_word2vec_format(str(file))
synonym_judgement_accuracy(op_lsi, sj_de)
file = DATA_DIR / 'out/SemD/DEWAC_1000/dewac_lsi_word_vectors.vec'
print(f"Loading {file}")
op_lsi = models.KeyedVectors.load_word2vec_format(str(file))
synonym_judgement_accuracy(op_lsi, sj_de)
file = DATA_DIR / 'out/SemD/DEWAC/dewac_lsi_word_vectors.vec'
print(f"Loading {file}")
op_lsi = models.KeyedVectors.load_word2vec_format(str(file))
synonym_judgement_accuracy(op_lsi, sj_de)
def evaluate_lsi_vectors(vec_path):
file = Path(vec_path)
print(f"Loading {file}")
op_lsi = models.KeyedVectors.load_word2vec_format(str(file))
synonym_judgement_accuracy(op_lsi, sj_de, file=file)
def evaluate_d2v_vectors(vec_path):
for file in sorted(vec_path.iterdir()):
try:
d2v = Doc2Vec.load(str(file))
print(f'{file} loaded')
synonym_judgement_accuracy(d2v.wv, sj_de, file=file)
print()
except pickle.UnpicklingError:
pass
if __name__ == '__main__':
# example_vectors_de()
# evaluate_d2v_vectors(DATA_DIR / 'out/models/d2v_dewac')
# evaluate_dewac_d2v_vectors(DATA_DIR / 'out/models/d2v_dewac_vocab')
# evaluate_d2v_vectors(DATA_DIR / 'out/models/d2v_test_vocab_B')
evaluate_lsi_vectors(DATA_DIR / 'out/SemD/DEWAC_1000_40k_v2/dewac_lsi_word_vectors.vec')
|
"Re-saves all things which might produce GL transactions."
import os, time
from optparse import make_option
import inspect
import importlib
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from django.core.exceptions import ObjectDoesNotExist
from django.db import transaction
from accountifie.gl.models import Transaction
from accountifie.gl.bmo import BusinessModelObject
from accountifie.common.api import api_func
from accountifie.query.query_manager_strategy_factory import QueryManagerStrategyFactory
class Command(BaseCommand):
"""
option_list = BaseCommand.option_list + (
make_option('--all',
action='store_true',
help='Full recalc of GL'),
)
"""
def handle(self, *args, **options):
#faster and less likely to mess stuff up.
klasses = []
kl_paths = api_func('environment', 'variable_list', 'BMO_MODULES')
# find all the BMO classes
for path in kl_paths:
for name, kl in inspect.getmembers(importlib.import_module(path), inspect.isclass):
if BusinessModelObject in kl.__bases__:
klasses.append(kl)
with transaction.atomic():
Transaction.objects.all().delete()
for cmpny in [c['id'] for c in api_func('gl', 'company')]:
QueryManagerStrategyFactory().erase(cmpny)
print("deleted all transactions")
QueryManagerStrategyFactory().set_fast_inserts('*', True)
for klass in klasses:
print('working on', klass)
qs =klass.objects.all()
for obj in qs:
obj.update_gl()
print('finished with', klass)
QueryManagerStrategyFactory().set_fast_inserts('*', False)
QueryManagerStrategyFactory().take_snapshot('*')
print("updated %d transactions" % qs.count())
|
#Read input file
# parameters:
# input file - .i input file
def is_number(s):
try:
float(s)
return True
except ValueError:
return False
def read_input_file(input_file,number_of_inputs):
inputs = {}
with open(input_file) as f:
for line in f:
if '=' in line:
inputs[line.split("=")[0].strip().lower()] = line.split("=")[1].strip()
else: pass
if len(inputs) != number_of_inputs:
print("Please recheck the input file since some parameter is missing...")
print("Exiting program...")
exit()
else:
print("Successfully read in input file")
for key,val in inputs.items():
if is_number(val) == True:
inputs[key] = float(val)
return inputs
|
from rest_framework import serializers
from leads.models import Lead
from accounts.models import Tags, Account
from common.serializer import (
UserSerializer,
AttachmentsSerializer,
LeadCommentSerializer,
CompanySerializer
)
from contacts.serializer import ContactSerializer
from teams.serializer import TeamsSerializer
class TagsSerializer(serializers.ModelSerializer):
class Meta:
model = Tags
fields = ("id", "name", "slug")
class LeadSerializer(serializers.ModelSerializer):
assigned_to = UserSerializer(read_only=True, many=True)
created_by = UserSerializer()
tags = TagsSerializer(read_only=True, many=True)
lead_attachment = AttachmentsSerializer(read_only=True, many=True)
teams = TeamsSerializer(read_only=True, many=True)
company = CompanySerializer()
lead_comments = LeadCommentSerializer(read_only=True, many=True)
class Meta:
model = Lead
# fields = ‘__all__’
fields = (
"id",
"title",
"first_name",
"last_name",
"phone",
"email",
"status",
"source",
"address_line",
"street",
"city",
"state",
"postcode",
"country",
"website",
"description",
"lead_attachment",
"lead_comments",
"assigned_to",
"account_name",
"opportunity_amount",
"created_by",
"created_on",
"is_active",
"enquery_type",
"tags",
"created_from_site",
"teams",
"company",
)
class LeadCreateSerializer(serializers.ModelSerializer):
def __init__(self, *args, **kwargs):
request_obj = kwargs.pop("request_obj", None)
super(LeadCreateSerializer, self).__init__(*args, **kwargs)
if self.initial_data.get("status") == "converted":
self.fields["account_name"].required = True
self.fields["email"].required = True
self.fields["first_name"].required = False
self.fields["last_name"].required = False
self.fields["title"].required = True
if self.instance:
if self.instance.created_from_site:
prev_choices = self.fields["source"]._get_choices()
prev_choices = prev_choices + \
[("micropyramid", "Micropyramid")]
self.fields["source"]._set_choices(prev_choices)
self.company = request_obj.company
def validate_account_name(self, account_name):
if self.instance:
if Account.objects.filter(
name__iexact=account_name,
company=self.company,
).exclude(id=self.instance.id).exists():
raise serializers.ValidationError(
"Account already exists with this name")
else:
if Account.objects.filter(
name__iexact=account_name, company=self.company
).exists():
raise serializers.ValidationError(
"Account already exists with this name")
return account_name
def validate_title(self, title):
if self.instance:
if Lead.objects.filter(
title__iexact=title,
company=self.company,
).exclude(id=self.instance.id).exists():
raise serializers.ValidationError(
"Lead already exists with this title")
else:
if Lead.objects.filter(
title__iexact=title, company=self.company
).exists():
raise serializers.ValidationError(
"Lead already exists with this title")
return title
class Meta:
model = Lead
fields = (
"first_name",
"last_name",
"account_name",
"title",
"phone",
"email",
"status",
"source",
"website",
"description",
"address_line",
"street",
"city",
"state",
"postcode",
"country",
)
|
def _jupyter_server_extension_paths():
# Locally import to avoid install errors.
from .notebookapp import NotebookApp
return [
{
'module': 'nbclassic.notebookapp',
'app': NotebookApp,
'name': 'jupyter-nbclassic'
}
]
|
# standard lib
import sys
import pdb
# 3rd-party lib
import numpy as np
import torch
import torch.distributed as dist
import torch.nn.functional as F
# mm lib
import mmcv
DISTANCES = ['mse','cosine','relation']
def multi_gpu_test_with_distance(model, meta, teacher_data_loader, student_data_loader, distance_metric, rank):
assert distance_metric in DISTANCES
results = torch.tensor(0.).cuda()
results_qq = torch.tensor(0.).cuda()
if rank == 0:
prog_bar = mmcv.ProgressBar(len(teacher_data_loader))
model.module.manipulate_arch(meta['arch'])
query_embeddings = None
key_embeddings = None
temperature = 0.2
for idx, data in enumerate(zip(teacher_data_loader, student_data_loader)):
with torch.no_grad():
# scale up for avoiding overflow
result1 = model(**data[0], mode='get_embedding')*100
result2 = model(**data[0], mode='get_embedding', extract_from='encoder_k')*100
result1 = torch.nn.functional.normalize(result1, dim=1)
result2 = torch.nn.functional.normalize(result2, dim=1)
batch_size = result1.size(0)
if distance_metric == 'mse':
results += torch.sum((result1-result2)*(result1-result2))/batch_size
if distance_metric == 'cosine':
results += torch.sum(torch.einsum('nc,nc->n', [result1,result2]))/batch_size
elif distance_metric == 'relation':
if query_embeddings is None:
query_embeddings = torch.zeros(result1.size(1),len(teacher_data_loader)*batch_size).cuda() # [C, L]
key_embeddings = torch.zeros(result2.size(1),len(teacher_data_loader)*batch_size).cuda() # [C, L]
query_embeddings[:,idx*batch_size:(idx+1)*batch_size] = result1.T
key_embeddings[:,idx*batch_size:(idx+1)*batch_size] = result2.T
logits_q = torch.einsum('nc,ck->nk', [result1, key_embeddings[:,:(idx+1)*batch_size]])
logits_q_qq = torch.einsum('nc,ck->nk', [result1, query_embeddings[:,:(idx+1)*batch_size]])
logits_k = torch.einsum('nc,ck->nk', [result2, key_embeddings[:,:(idx+1)*batch_size]])
results += - torch.sum( F.softmax(logits_k / temperature, dim=1) \
* F.log_softmax(logits_q / temperature, dim=1), dim=1).mean()
results_qq += - torch.sum( F.softmax(logits_k / temperature, dim=1) \
* F.log_softmax(logits_q_qq / temperature, dim=1), dim=1).mean()
if rank == 0:
sys.stdout.flush()
prog_bar.update()
results = results/len(teacher_data_loader)
if distance_metric == 'relation':
results_qq = results_qq/len(teacher_data_loader)
results_all = {}
dist.barrier()
dist.all_reduce(results)
dist.all_reduce(results_qq)
world_size = dist.get_world_size()
if distance_metric == 'mse':
results_all['mse'] = (results / world_size).item()
elif distance_metric == 'cosine':
results_all['cosine'] = (results / world_size).item()
elif distance_metric == 'relation':
results_all['qk_kk_mean'] = (results / world_size).item()
results_all['qq_kk_mean'] = (results_qq / world_size).item()
return results_all
def multi_gpu_test_with_dense_distance(model, meta, teacher_data_loader, student_data_loader, distance_metric, rank):
assert distance_metric in DISTANCES
results = torch.tensor(0.).cuda()
results_qq = torch.tensor(0.).cuda()
if rank == 0:
prog_bar = mmcv.ProgressBar(len(teacher_data_loader))
model.module.manipulate_arch(meta['arch'])
query_embeddings = None
key_embeddings = None
temperature = 0.2
#pdb.set_trace()
for idx, data in enumerate(zip(teacher_data_loader, student_data_loader)):
#pdb.set_trace()
with torch.no_grad():
result1 = model(**data[0], mode='extract') #([N,C,H,W],...) depend on your config, which stage's feature will be return.
result2 = model(**data[0], mode='extract', extract_from='encoder_k') #([N,C,H,W],...)
for tensor_1, tensor_2 in zip(result1, result2):
# scale up for avoiding overflow
# resolution of tensor from early stage may cause memory out. Consider cropping it
tensor_1 = tensor_1*100
tensor_2 = tensor_2*100
tensor_1 = torch.nn.functional.normalize(tensor_1, dim=1)
tensor_2 = torch.nn.functional.normalize(tensor_2, dim=1)
tensor_1 = tensor_1.view(tensor_1.size(0),tensor_1.size(1),-1) # [N,C,H*W]
tensor_2 = tensor_2.view(tensor_2.size(0),tensor_2.size(1),-1) #
tensor_1 = torch.bmm(tensor_1.transpose(1,2),tensor_1) #[N,H*W,H*W]
tensor_1 = tensor_1.view(-1, tensor_1.size(2)) # [N*HW, H*W]
tensor_2 = torch.bmm(tensor_2.transpose(1,2),tensor_2)
tensor_2 = tensor_2.view(-1, tensor_2.size(2))
batch_size = tensor_1.size(0)
if distance_metric == 'mse':
results += torch.sum((tensor_1-tensor_2)*(tensor_1-tensor_2))/batch_size
elif distance_metric == 'cosine':
results += torch.sum(torch.einsum('nc,nc->n', [tensor_1,tensor_2]))/batch_size
elif distance_metric == 'kl':
results += torch.sum(- torch.sum( F.softmax(tensor_2 / temperature, dim=1) * F.log_softmax(tensor_1 / temperature, dim=1), dim=1))/batch_size
if rank == 0:
sys.stdout.flush()
prog_bar.update()
results = results/len(teacher_data_loader)
results_all = {}
dist.barrier()
dist.all_reduce(results)
world_size = dist.get_world_size()
#pdb.set_trace()
if distance_metric == 'mse':
results_all['mse'] = (results / world_size).item()
elif distance_metric == 'cosine':
results_all['cosine'] = (results / world_size).item()
elif distance_metric == 'kl':
results_all['kl'] = (results / world_size).item()
return results_all
|
"""
---> Wiggle Subsequence
---> Medium
"""
class Solution:
def wiggleMaxLength(self, nums) -> int:
n = len(nums)
final_nums = [nums[0]]
for i in range(1, n):
if i == 1 or len(final_nums) == 1:
if nums[i] > final_nums[0] or nums[i] < final_nums[0]:
final_nums.append(nums[i])
else:
pass
else:
if nums[i] > final_nums[-1]:
if final_nums[-1] >= final_nums[-2]:
final_nums[-1] = nums[i]
else:
final_nums.append(nums[i])
else:
if nums[i] == final_nums[-1]:
continue
if final_nums[-1] <= final_nums[-2]:
final_nums[-1] = nums[i]
else:
final_nums.append(nums[i])
return len(final_nums)
def wiggleMaxLength_sol2(self, nums) -> int:
positive_till_now = 1
negative_till_now = 1
for i in range(1, len(nums)):
if nums[i] > nums[i - 1]:
positive_till_now = negative_till_now + 1
elif nums[i] < nums[i - 1]:
negative_till_now = positive_till_now + 1
return max(positive_till_now, negative_till_now)
in_nums = [1, 17, 5, 10, 13, 15, 10, 5, 16, 8]
a = Solution()
print(a.wiggleMaxLength(in_nums))
print(a.wiggleMaxLength_sol2(in_nums))
"""
Approach 1:
Check if the second number should be added then if the next number is greater than last number added, if the last
difference was negative then add the next number else pass else if the number is smaller or equal then check if the new
number is equal to last number then pass else if it the last difference is negative then replace the last number to the
new one else just append
Approach 2:
Just keep track of number that is the difference between last added numbers was negative and positive, if the next
difference is positive then add the negative till now + 1 to positive now one else add the positive till now + 1 to
negative till now
Reference - https://leetcode.com/problems/wiggle-subsequence/discuss/1585932/100-efficient-Simple-Code-(Python3)%3A-O(n)
Complexities:
Time -> O(N)
Space -> O(N)
"""
|
class PalindromesCount:
def count(self, A, B):
def is_palindrome(s):
return s == s[::-1]
return sum(is_palindrome(A[:i] + B + A[i:]) for i in xrange(len(A) + 1))
|
import json
import logging
from .cutom_serializers import HassIoSerializers
from homeassistant.components.http import HomeAssistantView
import homeassistant.core as ha
from homeassistant.helpers.service import async_get_all_descriptions
from .const import ONBOARDING_DOMAIN ,ONBOARDING_STEP_USER ,ONBOARDING_STEP_CORE_CONFIG ,ONBOARDING_STEP_INTEGRATION
_LOGGER = logging.getLogger(__name__)
from .FunikiAreaView import FunikiAreasView ,FunikiAreaDeleteView
from .FunikiUserView import FunikiUserView ,FunikiDeleteUserView
from .FunikiDeviceView import FunikiDeviceView
from .FunikiEntitiesView import FunikiEntitiesView
from .FunikiSummaryView import FunikiSummaryView
from .FunikiOnBoardingStatus import FunikiOnBoardingStatus
DOMAIN = "funiki"
def setup(hass, config):
hass.http.register_view(FunikiSummaryView)
hass.http.register_view(FunikiDeviceView)
hass.http.register_view(FunikiEntitiesView)
hass.http.register_view(FunikiAreasView)
hass.http.register_view(FunikiAreaDeleteView)
hass.http.register_view(FunikiUserView)
hass.http.register_view(FunikiDeleteUserView)
hass.http.register_view(FunikiOnBoardingStatus)
return True
async def async_services_json(hass):
descriptions = await async_get_all_descriptions(hass)
return [{"domain": key, "services": value} for key, value in descriptions.items()]
@ha.callback
def async_events_json(hass):
return [
{"event": key, "listener_count": value}
for key, value in hass.bus.async_listeners().items()
]
|
import sys
sys.path = ["."] + sys.path
from petlib.ec import EcGroup
from petlib.bn import Bn
from hashlib import sha256
import math
## ######################################################
## An implementation of the ring signature scheme in
##
## Jens Groth and Markulf Kohlweiss. "One-out-of-Many Proofs:
## Or How to Leak a Secret and Spend a Coin"
## Cryptology ePrint Archive: Report 2014/764
##
## https://eprint.iacr.org/2014/764
## ######################################################
def challenge(elements):
"""Packages a challenge in a bijective way"""
elem = [len(elements)] + elements
elem_str = list(map(str, elem))
elem_len = list(map(lambda x: "%s||%s" % (len(x) , x), elem_str))
state = "|".join(elem_len)
H = sha256()
H.update(state.encode("utf8"))
return Bn.from_binary(H.digest())
def setup():
""" Generates parameters for Commitments """
G = EcGroup()
g = G.hash_to_point(b'g')
h = G.hash_to_point(b'h')
o = G.order()
return (G, g, h, o)
def Com(ck, m, k):
""" Pedersen Commitment. """
(G, g, h, o) = ck
return m * g + k * h
def ProveZeroOne(ck, c, m, r):
""" Simple proof that a Commitment c = Com(m,r) is either 0 or 1 """
assert Com(ck, m, r) == c
(G, g, h, o) = ck
a, s, t = o.random(), o.random(), o.random()
ca = Com(ck, a, s)
cb = Com(ck, a*m, t)
x = challenge([g, h, ca, cb]) % o
f = (x * m + a) % o
za = (r * x + s) % o
zb = (r * (x - f) + t) % o
return (x, f, za, zb)
def VerifyZeroOne(ck, c, proof):
""" Verify that a Commitment c = Com(m,r) is either 0 or 1 """
(G, g, h, o) = ck
(x, f, za, zb) = proof
assert 0 < x < o
assert 0 < f < o
assert 0 < za < o
assert 0 < zb < o
ca = Com(ck,f,za) - x * c
cb = Com(ck, 0, zb) - (x-f) * c
xp = challenge([g, h, ca, cb]) % o
return xp == x
def ProveOneOfN(ck, cis, el, r, message = ""):
""" NIZK Proof that Com(0; r) is within Cis.
The fact that it is the el'th commitmtnet is not revealed.
+ Ring signature on "message". """
n = int(math.ceil(math.log(len(cis)) / math.log(2)))
assert Com(ck, 0, r) == cis[el]
(G, g, h, o) = ck
## Commit to the bits of the index
el = Bn(el)
eli = [Bn(int(el.is_bit_set(i))) for i in range(n)]
ri = [o.random() for i in range(n)]
ai = [o.random() for i in range(n)]
si = [o.random() for i in range(n)]
ti = [o.random() for i in range(n)]
Celi = [Com(ck, elix, rix) for elix, rix in zip(eli, ri)]
Cai = [Com(ck, a, s) for a, s in zip(ai, si)]
Cbi = [Com(ck, l * a , s) for l, a, s in zip(eli, ai, ti)]
# Compute p_idxi(x)
p_idx_i = []
for idx in range(len(cis)):
idx = Bn(idx)
idxi = [Bn(int(idx.is_bit_set(i))) for i in range(n)]
p = [Bn(1)]
for j, idxi_j in enumerate(idxi):
if idxi_j == 0:
p = poly_mul(o, p, [ -ai[j] , - eli[j] + 1] )
else:
p = poly_mul(o, p, [ ai[j] , eli[j] ])
p_idx_i += [p]
# Compute all Cdi's
roi = []
cdi = []
for i in range(n):
roi_i = o.random()
roi += [ roi_i ]
# cdi_i = Com(ck, 0, roi_i)
wis = []
for idx, cidx in enumerate(cis):
wis += [ p_idx_i[idx][i] ]
# cdi_i += p_idx_i[idx][i] * cidx
# assert G.wsum(wis, cis) + Com(ck, 0, roi_i) == cdi_i
cdi_i = G.wsum(wis, cis) + Com(ck, 0, roi_i)
cdi += [ cdi_i ]
## The challenge
x = challenge(list(ck) + cis + Celi + Cai + Cbi + cdi + [ message ])
## The responses
fi = [(elj * x + aj) % o for elj, aj in zip(eli, ai)]
zai = [(rj * x + sj) % o for rj, sj in zip(ri, si)]
zbi = [(rj * (x - fj) + tj) % o for rj, fj, tj in zip(ri, fi, ti)]
zd = r * pow(x, n, o) % o
for k in range(n):
zd = (zd - roi[k] * pow(x, k, o)) % o
proof = (Celi, Cai, Cbi, cdi, fi, zai, zbi, zd)
return proof
def VerifyOneOfN(ck, cis, proof, message = ""):
""" Verify the ring signature on message """
n = int(math.ceil(math.log(len(cis)) / math.log(2)))
(G, g, h, o) = ck
(Celi, Cai, Cbi, cdi, fi, zai, zbi, zd) = proof
## Check all parts of the proof are in the right groups
assert 0 <= zd < o
for k in range(n):
assert 0 <= fi[k] < o
assert 0 <= zai[k] < o
assert 0 <= zbi[k] < o
assert G.check_point(Celi[k])
assert G.check_point(Cai[k])
assert G.check_point(Cbi[k])
assert G.check_point(cdi[k])
# Recompute the challenge
x = challenge(list(ck) + cis + Celi + Cai + Cbi + cdi + [ message ])
ret = True
for i in range(n):
ret &= x * Celi[i] + Cai[i] == Com(ck, fi[i], zai[i])
ret &= (x - fi[i]) * Celi[i] + Cbi[i] == Com(ck, Bn(0), zbi[i])
# acc = G.infinite()
bases = []
expons = []
for idx, ci in enumerate(cis):
idx = Bn(idx)
idxi = [Bn(int(idx.is_bit_set(i))) for i in range(n)]
acc_exp = Bn(1)
for k, ij in enumerate(idxi):
if ij == 0:
acc_exp = acc_exp.mod_mul(x - fi[k], o)
else:
acc_exp = acc_exp.mod_mul(fi[k], o)
bases += [ ci ]
expons += [ acc_exp ]
# acc = acc + acc_exp * ci
for k in range(n):
expi = (- pow(x,k,o))
# acc = acc + expi * cdi[k]
bases += [ cdi[k] ]
expons += [ expi ]
# assert G.wsum(expons, bases) == acc
acc = G.wsum(expons, bases)
ret &= acc == Com(ck, 0, zd)
return ret
## ######################################
## Naive polynomial arithmetic
zero = Bn(0)
def poly_expand(o, poly, size):
global zero
assert len(poly) <= size
# zero = Bn(0)
new_poly = [zero for _ in range(size)]
for i in range(len(poly)):
new_poly[i] = poly[i]
return new_poly
def poly_add(o, poly1, poly2):
size = max(len(poly1), len(poly2))
p1 = poly_expand(o, poly1, size)
p2 = poly_expand(o, poly2, size)
pout = poly_expand(o, [], size)
for i, (c1, c2) in enumerate(zip(p1, p2)):
pout[i] = c1.mod_add( c2, o)
return pout
def poly_mul(o, poly1, poly2):
global zero
p = [ zero ]
for i, c1 in enumerate(poly1):
p2 = ([ zero ] * i) + [(c1.mod_mul(c2, o)) for c2 in poly2]
p = poly_add(o, p2, p)
return p
###################################################
# ---------------- TESTS ----------------------- #
###################################################
import pytest
def test_poly_expand():
ck = setup()
(G, g, h, o) = ck
p1 = [Bn(1), Bn(2)]
p2 = poly_expand(o, p1, 10)
assert len(p2) == 10
assert p2[:2] == p1
def test_poly_add():
ck = setup()
(G, g, h, o) = ck
p1 = [Bn(1), Bn(2)]
p2 = poly_add(o, p1, p1)
assert len(p2) == len(p1)
assert p2 == [2, 4]
def test_poly_mul():
ck = setup()
(G, g, h, o) = ck
p1 = [Bn(1), Bn(2)]
p2 = poly_mul(o, p1, p1)
assert p2 == [1, 4, 4]
def test_setup():
ck = setup()
def test_proof():
ck = setup()
(G, g, h, o) = ck
m, r = 1, o.random()
c = Com(ck, m, r)
ProveZeroOne(ck, c, m, r)
@pytest.mark.parametrize("input,expected", [
(1, True),
(0, True),
(2, False),
])
def test_verify(input,expected):
ck = setup()
(G, g, h, o) = ck
m, r = input, o.random()
c = Com(ck, m, r)
proof = ProveZeroOne(ck, c, m, r)
assert VerifyZeroOne(ck, c, proof) == expected
def test_prove_n():
ck = setup()
(G, g, h, o) = ck
c0 = Com(ck, 1, o.random())
c1 = Com(ck, 1, o.random())
c2 = Com(ck, 1, o.random())
c3 = Com(ck, 1, o.random())
r = o.random()
cr = Com(ck,0, r)
cis = [c0, c1, c2, c3, cr]
proof = ProveOneOfN(ck, cis, 4, r, message="Hello World!")
ret = VerifyOneOfN(ck, cis, proof, message="Hello World!")
assert ret
def notest_timing(upper=101):
ck = setup()
(G, g, h, o) = ck
c0 = Com(ck, 1, o.random())
r = o.random()
cr = Com(ck,0, r)
import time
repeats = 10
all_sizes = range(10, upper, 10)
prove_time = []
verify_time = []
for size in all_sizes:
cis = [c0] * (size + 1) + [cr]
t0 = time.clock()
for _ in range(repeats):
proof = ProveOneOfN(ck, cis, len(cis)-1, r, message="Hello World!")
t1 = time.clock()
dt = (t1-t0) / repeats
prove_time += [ dt ]
print( "Proof time: %s - %2.4f" % (size, dt) )
t0 = time.clock()
for _ in range(repeats):
ret = VerifyOneOfN(ck, cis, proof, message="Hello World!")
assert ret
t1 = time.clock()
dt = (t1-t0) / repeats
verify_time += [ dt ]
print( "Verify time: %s - %2.4f" % (size, dt) )
return all_sizes, prove_time, verify_time
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='Test and time the Tor median statistics.')
parser.add_argument('--time', action='store_true', help='Run timing tests')
parser.add_argument('--lprof', action='store_true', help='Run the line profiler')
parser.add_argument('--cprof', action='store_true', help='Run the c profiler')
parser.add_argument('--plot', action='store_true', help='Upload time plot to plotly')
args = parser.parse_args()
if args.time:
notest_timing(31)
if args.cprof:
import cProfile
cProfile.run("notest_timing(51)", sort="tottime")
if args.lprof:
from line_profiler import LineProfiler
profile = LineProfiler(VerifyOneOfN, ProveOneOfN, Bn.__init__, Bn.__del__)
profile.run("notest_timing(31)")
profile.print_stats()
if args.plot:
all_sizes, prove_time, verify_time = notest_timing()
import plotly.plotly as py
from plotly.graph_objs import *
trace0 = Scatter(
x=all_sizes,
y=prove_time,
name='Proving',
)
trace1 = Scatter(
x=all_sizes,
y=verify_time,
name='Verification',
)
data = Data([trace0, trace1])
layout = Layout(
title='Timing for GK15 Proof and Verification using petlib',
xaxis=XAxis(
title='Size of ring (no. commits)',
showgrid=False,
zeroline=False
),
yaxis=YAxis(
title='time (sec)',
showline=False
)
)
fig = Figure(data=data, layout=layout)
unique_url = py.plot(fig, filename = 'GK15-petlib-timing')
|
from rest_framework import serializers
from .models import CartoDBTable, GatewayType, Location, LocationRemapHistory
class CartoDBTableSerializer(serializers.ModelSerializer):
id = serializers.CharField(read_only=True)
class Meta:
model = CartoDBTable
fields = (
'id',
'domain',
'api_key',
'table_name',
'display_name',
'pcode_col',
'color',
'location_type',
'name_col'
)
class GatewayTypeSerializer(serializers.ModelSerializer):
class Meta:
model = GatewayType
fields = ('name', 'admin_level')
class LocationLightSerializer(serializers.ModelSerializer):
id = serializers.CharField(read_only=True)
name_display = serializers.CharField(source='__str__')
name = serializers.SerializerMethodField()
gateway = GatewayTypeSerializer()
class Meta:
model = Location
fields = (
'id',
'name',
'p_code',
'gateway',
'parent',
'name_display'
)
@staticmethod
def get_name(obj):
return '{}{}'.format(
str(obj),
" -- {}".format(obj.parent.name) if obj.parent else "",
)
class LocationSerializer(LocationLightSerializer):
geo_point = serializers.StringRelatedField()
class Meta(LocationLightSerializer.Meta):
model = Location
fields = LocationLightSerializer.Meta.fields + ('geo_point', )
class LocationExportSerializer(serializers.ModelSerializer):
name_display = serializers.CharField(source='__str__')
location_type = serializers.CharField(source='gateway.name')
geo_point = serializers.StringRelatedField()
point = serializers.StringRelatedField()
class Meta:
model = Location
fields = "__all__"
class LocationExportFlatSerializer(serializers.ModelSerializer):
name_display = serializers.CharField(source='__str__')
location_type = serializers.CharField(source='gateway.name')
geom = serializers.SerializerMethodField()
point = serializers.StringRelatedField()
class Meta:
model = Location
fields = "__all__"
def get_geom(self, obj):
return obj.geom.point_on_surface if obj.geom else ""
class LocationRemapHistorySerializer(serializers.ModelSerializer):
name = serializers.CharField(source='__str__')
class Meta:
model = LocationRemapHistory
fields = "__all__"
|
from __future__ import unicode_literals
import threading
import socket
import os.path
from .constants import *
class RoonDiscovery(threading.Thread):
"""Class to discover Roon Servers connected in the network."""
_exit = threading.Event()
_discovered_callback = None
def __init__(self, callback):
self._discovered_callback = callback
threading.Thread.__init__(self)
self.daemon = True
def run(self):
''' run discovery untill server found '''
while not self._exit.isSet():
host, port = self.first()
if host:
self._discovered_callback(host, port)
self.stop()
def stop(self):
self._exit.set()
def all(self):
"""Scan and return all found entries as a list. Each server is a tuple of host,port."""
self.discover(first_only=False)
def first(self):
''' returns first server that is found'''
all_servers = self._discover(first_only=True)
return all_servers[0] if all_servers else (None, None)
def _discover(self, first_only=False):
"""update the server entry with details"""
this_dir = os.path.dirname(os.path.abspath(__file__))
sood_file = os.path.join(this_dir, ".soodmsg")
with open(sood_file) as f:
msg = f.read()
msg = msg.encode()
entries = []
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
sock.settimeout(5)
sock.bind(('', 0))
try:
sock.sendto(msg, ('<broadcast>', 9003))
while not self._exit.isSet():
try:
data, server = sock.recvfrom(1024)
data = data.decode()
lines = []
for line in data.split("\n"):
lines.extend(line.split("\x04"))
if "SOOD" in lines[0] and len(lines) > 6 and "http_port" in lines[4]:
# match for Roon server found!
port = int(lines[5].encode("utf-8").strip())
host = server[0]
entries.append((host, port))
if first_only:
# we're only interested in the first server found
break
except socket.timeout:
break
except Exception as exc:
self.roonLogger.exception(exc)
finally:
sock.close()
return entries
|
'''see also: https://github.com/python/cpython/blob/master/Lib/test/crashers
as well as https://wiki.python.org/moin/CrashingPython'''
import sys
from utils import print_file
print('oh come on, you knew running this was a bad idea')
print_file(__file__)
def recurse(n=0):
if n == 0:
print("yup, that's 0 alright")
return
recurse(n - 1)
try:
recurse(1000)
except RecursionError as e:
print(e)
print("Really? that wasn't even that big!")
print("ok let's do this for real")
# found this from trial and error, I think print calls some built-in functions?
overhead = 5
sys.setrecursionlimit(1000 + overhead)
recurse(1000)
print("that's what I'm talking about")
print("how high can we go?")
for i in range(10):
current = 10 ** i
print('trying', current)
sys.setrecursionlimit(current + overhead)
recurse(current)
|
from Bio.Align import MultipleSeqAlignment
from Bio import AlignIO
import u2py
u2py.initContext( './' )
inputAlignments = []
inputFile = open( '../../data/samples/CLUSTALW/COI_copy1.sto', 'rU' )
inputAlignments.append( AlignIO.read( inputFile, 'stockholm' ) )
inputFile.close( )
inputFile = open( '../../data/samples/CLUSTALW/HIV-1_copy1.sto', 'rU' )
inputAlignments.append( AlignIO.read( inputFile, 'stockholm' ) )
inputFile.close( )
scheme = u2py.Scheme( 'muscle', inputAlignments )
scheme.setElementAttribute( 'Write Alignment', 'document-format', 'stockholm' )
outputAlignments = scheme.launch( )
for aln in outputAlignments :
print aln
scheme.cleanUp( )
u2py.releaseContext( )
|
import os
from mlsploit_local import Job
from data import (
build_image_dataset,
get_or_create_dataset,
process_image,
recreate_image,
)
from defenses import DEFENSE_MAP
def main():
# Initialize the job, which will
# load and verify all input parameters
Job.initialize()
defense_name = Job.function
defense_options = dict(Job.options)
defense_class = DEFENSE_MAP[defense_name]
input_file_paths = list(map(lambda f: f.path, Job.input_files))
input_dataset, is_temp_dataset = get_or_create_dataset(input_file_paths)
output_dataset = build_image_dataset(
Job.make_output_filepath(input_dataset.path.name)
)
for item in input_dataset:
input_image = recreate_image(item.data)
output_image = defense_class.apply(input_image, **defense_options)
output_image_arr = process_image(output_image)
output_dataset.add_item(
name=item.name, data=output_image_arr, label=item.label, prediction=-1
)
output_item = output_dataset[0]
output_image = recreate_image(output_item.data)
output_image_path = Job.make_output_filepath(output_item.name)
output_image.save(output_image_path)
Job.add_output_file(str(output_dataset.path), is_extra=True)
Job.add_output_file(
output_image_path, is_modified=True, tags={"mlsploit-visualize": "image"}
)
Job.commit_output()
if is_temp_dataset:
os.remove(input_dataset.path)
if __name__ == "__main__":
main()
|
from __future__ import print_function, division, absolute_import
from llvm.core import Type, Constant
import llvm.core as lc
import llvm.ee as le
from llvm import LLVMException
from numba.config import PYVERSION
import numba.ctypes_support as ctypes
from numba import types, utils, cgutils, _helperlib, assume
_PyNone = ctypes.c_ssize_t(id(None))
class NativeError(RuntimeError):
pass
@utils.runonce
def fix_python_api():
"""
Execute once to install special symbols into the LLVM symbol table
"""
le.dylib_add_symbol("Py_None", ctypes.addressof(_PyNone))
le.dylib_add_symbol("numba_native_error", id(NativeError))
# Add C helper functions
c_helpers = _helperlib.c_helpers
for py_name in c_helpers:
c_name = "numba_" + py_name
c_address = c_helpers[py_name]
le.dylib_add_symbol(c_name, c_address)
# Add all built-in exception classes
for obj in utils.builtins.__dict__.values():
if isinstance(obj, type) and issubclass(obj, BaseException):
le.dylib_add_symbol("PyExc_%s" % (obj.__name__), id(obj))
class PythonAPI(object):
"""
Code generation facilities to call into the CPython C API (and related
helpers).
"""
def __init__(self, context, builder):
"""
Note: Maybe called multiple times when lowering a function
"""
fix_python_api()
self.context = context
self.builder = builder
self.module = builder.basic_block.function.module
# Initialize types
self.pyobj = self.context.get_argument_type(types.pyobject)
self.voidptr = Type.pointer(Type.int(8))
self.long = Type.int(ctypes.sizeof(ctypes.c_long) * 8)
self.ulonglong = Type.int(ctypes.sizeof(ctypes.c_ulonglong) * 8)
self.longlong = self.ulonglong
self.double = Type.double()
self.py_ssize_t = self.context.get_value_type(types.intp)
self.cstring = Type.pointer(Type.int(8))
self.gil_state = Type.int(_helperlib.py_gil_state_size * 8)
# ------ Python API -----
#
# Basic object API
#
def incref(self, obj):
fnty = Type.function(Type.void(), [self.pyobj])
fn = self._get_function(fnty, name="Py_IncRef")
self.builder.call(fn, [obj])
def decref(self, obj):
fnty = Type.function(Type.void(), [self.pyobj])
fn = self._get_function(fnty, name="Py_DecRef")
self.builder.call(fn, [obj])
#
# Argument unpacking
#
def parse_tuple_and_keywords(self, args, kws, fmt, keywords, *objs):
charptr = Type.pointer(Type.int(8))
charptrary = Type.pointer(charptr)
argtypes = [self.pyobj, self.pyobj, charptr, charptrary]
fnty = Type.function(Type.int(), argtypes, var_arg=True)
fn = self._get_function(fnty, name="PyArg_ParseTupleAndKeywords")
return self.builder.call(fn, [args, kws, fmt, keywords] + list(objs))
def parse_tuple(self, args, fmt, *objs):
charptr = Type.pointer(Type.int(8))
argtypes = [self.pyobj, charptr]
fnty = Type.function(Type.int(), argtypes, var_arg=True)
fn = self._get_function(fnty, name="PyArg_ParseTuple")
return self.builder.call(fn, [args, fmt] + list(objs))
#
# Exception handling
#
def err_occurred(self):
fnty = Type.function(self.pyobj, ())
fn = self._get_function(fnty, name="PyErr_Occurred")
return self.builder.call(fn, ())
def err_clear(self):
fnty = Type.function(Type.void(), ())
fn = self._get_function(fnty, name="PyErr_Clear")
return self.builder.call(fn, ())
def err_set_string(self, exctype, msg):
fnty = Type.function(Type.void(), [self.pyobj, self.cstring])
fn = self._get_function(fnty, name="PyErr_SetString")
if isinstance(exctype, str):
exctype = self.get_c_object(exctype)
if isinstance(msg, str):
msg = self.context.insert_const_string(self.module, msg)
return self.builder.call(fn, (exctype, msg))
def err_set_object(self, exctype, excval):
fnty = Type.function(Type.void(), [self.pyobj, self.pyobj])
fn = self._get_function(fnty, name="PyErr_SetObject")
return self.builder.call(fn, (exctype, excval))
def raise_native_error(self, msg):
cstr = self.context.insert_const_string(self.module, msg)
self.err_set_string(self.native_error_type, cstr)
def raise_exception(self, exctype, excval):
# XXX This produces non-reusable bitcode: the pointer's value
# is specific to this process execution.
exctypeaddr = self.context.get_constant(types.intp, id(exctype))
excvaladdr = self.context.get_constant(types.intp, id(excval))
self.err_set_object(exctypeaddr.inttoptr(self.pyobj),
excvaladdr.inttoptr(self.pyobj))
def get_c_object(self, name):
"""
Get a Python object through its C-accessible *name*.
(e.g. "PyExc_ValueError").
"""
try:
gv = self.module.get_global_variable_named(name)
except LLVMException:
gv = self.module.add_global_variable(self.pyobj.pointee, name)
return gv
@property
def native_error_type(self):
return self.get_c_object("numba_native_error")
def raise_missing_global_error(self, name):
msg = "global name '%s' is not defined" % name
cstr = self.context.insert_const_string(self.module, msg)
self.err_set_string("PyExc_NameError", cstr)
def raise_missing_name_error(self, name):
msg = "name '%s' is not defined" % name
cstr = self.context.insert_const_string(self.module, msg)
self.err_set_string("PyExc_NameError", cstr)
#
# Concrete dict API
#
def dict_getitem_string(self, dic, name):
"""Returns a borrowed reference
"""
fnty = Type.function(self.pyobj, [self.pyobj, self.cstring])
fn = self._get_function(fnty, name="PyDict_GetItemString")
cstr = self.context.insert_const_string(self.module, name)
return self.builder.call(fn, [dic, cstr])
def dict_new(self, presize=0):
if presize == 0:
fnty = Type.function(self.pyobj, ())
fn = self._get_function(fnty, name="PyDict_New")
return self.builder.call(fn, ())
else:
fnty = Type.function(self.pyobj, [self.py_ssize_t])
fn = self._get_function(fnty, name="_PyDict_NewPresized")
return self.builder.call(fn,
[Constant.int(self.py_ssize_t, presize)])
def dict_setitem(self, dictobj, nameobj, valobj):
fnty = Type.function(Type.int(), (self.pyobj, self.pyobj,
self.pyobj))
fn = self._get_function(fnty, name="PyDict_SetItem")
return self.builder.call(fn, (dictobj, nameobj, valobj))
def dict_setitem_string(self, dictobj, name, valobj):
fnty = Type.function(Type.int(), (self.pyobj, self.cstring,
self.pyobj))
fn = self._get_function(fnty, name="PyDict_SetItemString")
cstr = self.context.insert_const_string(self.module, name)
return self.builder.call(fn, (dictobj, cstr, valobj))
def dict_pack(self, keyvalues):
"""
Args
-----
keyvalues: iterable of (str, llvm.Value of PyObject*)
"""
dictobj = self.dict_new()
not_null = cgutils.is_not_null(self.builder, dictobj)
with cgutils.if_likely(self.builder, not_null):
for k, v in keyvalues:
self.dict_setitem_string(dictobj, k, v)
return dictobj
#
# Concrete number APIs
#
def float_from_double(self, fval):
fnty = Type.function(self.pyobj, [self.double])
fn = self._get_function(fnty, name="PyFloat_FromDouble")
return self.builder.call(fn, [fval])
def number_as_ssize_t(self, numobj):
fnty = Type.function(self.py_ssize_t, [self.pyobj])
fn = self._get_function(fnty, name="PyNumber_AsSsize_t")
return self.builder.call(fn, [numobj])
def number_long(self, numobj):
fnty = Type.function(self.pyobj, [self.pyobj])
fn = self._get_function(fnty, name="PyNumber_Long")
return self.builder.call(fn, [numobj])
def long_as_ulonglong(self, numobj):
fnty = Type.function(self.ulonglong, [self.pyobj])
fn = self._get_function(fnty, name="PyLong_AsUnsignedLongLong")
return self.builder.call(fn, [numobj])
def long_as_longlong(self, numobj):
fnty = Type.function(self.ulonglong, [self.pyobj])
fn = self._get_function(fnty, name="PyLong_AsLongLong")
return self.builder.call(fn, [numobj])
def _long_from_native_int(self, ival, func_name, native_int_type,
signed):
fnty = Type.function(self.pyobj, [native_int_type])
fn = self._get_function(fnty, name=func_name)
resptr = cgutils.alloca_once(self.builder, self.pyobj)
if PYVERSION < (3, 0):
# Under Python 2, we try to return a PyInt object whenever
# the given number fits in a C long.
pyint_fnty = Type.function(self.pyobj, [self.long])
pyint_fn = self._get_function(pyint_fnty, name="PyInt_FromLong")
long_max = Constant.int(native_int_type, _helperlib.long_max)
if signed:
long_min = Constant.int(native_int_type, _helperlib.long_min)
use_pyint = self.builder.and_(
self.builder.icmp(lc.ICMP_SGE, ival, long_min),
self.builder.icmp(lc.ICMP_SLE, ival, long_max),
)
else:
use_pyint = self.builder.icmp(lc.ICMP_ULE, ival, long_max)
with cgutils.ifelse(self.builder, use_pyint) as (then, otherwise):
with then:
downcast_ival = self.builder.trunc(ival, self.long)
res = self.builder.call(pyint_fn, [downcast_ival])
self.builder.store(res, resptr)
with otherwise:
res = self.builder.call(fn, [ival])
self.builder.store(res, resptr)
else:
fn = self._get_function(fnty, name=func_name)
self.builder.store(self.builder.call(fn, [ival]), resptr)
return self.builder.load(resptr)
def long_from_long(self, ival):
if PYVERSION < (3, 0):
func_name = "PyInt_FromLong"
else:
func_name = "PyLong_FromLong"
fnty = Type.function(self.pyobj, [self.long])
fn = self._get_function(fnty, name=func_name)
return self.builder.call(fn, [ival])
def long_from_ssize_t(self, ival):
return self._long_from_native_int(ival, "PyLong_FromSsize_t",
self.py_ssize_t, signed=True)
def long_from_longlong(self, ival):
return self._long_from_native_int(ival, "PyLong_FromLongLong",
self.longlong, signed=True)
def long_from_ulonglong(self, ival):
return self._long_from_native_int(ival, "PyLong_FromUnsignedLongLong",
self.ulonglong, signed=False)
def _get_number_operator(self, name):
fnty = Type.function(self.pyobj, [self.pyobj, self.pyobj])
fn = self._get_function(fnty, name="PyNumber_%s" % name)
return fn
def _call_number_operator(self, name, lhs, rhs, inplace=False):
if inplace:
name = "InPlace" + name
fn = self._get_number_operator(name)
return self.builder.call(fn, [lhs, rhs])
def number_add(self, lhs, rhs, inplace=False):
return self._call_number_operator("Add", lhs, rhs, inplace=inplace)
def number_subtract(self, lhs, rhs, inplace=False):
return self._call_number_operator("Subtract", lhs, rhs, inplace=inplace)
def number_multiply(self, lhs, rhs, inplace=False):
return self._call_number_operator("Multiply", lhs, rhs, inplace=inplace)
def number_divide(self, lhs, rhs, inplace=False):
assert PYVERSION < (3, 0)
return self._call_number_operator("Divide", lhs, rhs, inplace=inplace)
def number_truedivide(self, lhs, rhs, inplace=False):
return self._call_number_operator("TrueDivide", lhs, rhs, inplace=inplace)
def number_floordivide(self, lhs, rhs, inplace=False):
return self._call_number_operator("FloorDivide", lhs, rhs, inplace=inplace)
def number_remainder(self, lhs, rhs, inplace=False):
return self._call_number_operator("Remainder", lhs, rhs, inplace=inplace)
def number_lshift(self, lhs, rhs, inplace=False):
return self._call_number_operator("Lshift", lhs, rhs, inplace=inplace)
def number_rshift(self, lhs, rhs, inplace=False):
return self._call_number_operator("Rshift", lhs, rhs, inplace=inplace)
def number_and(self, lhs, rhs, inplace=False):
return self._call_number_operator("And", lhs, rhs, inplace=inplace)
def number_or(self, lhs, rhs, inplace=False):
return self._call_number_operator("Or", lhs, rhs, inplace=inplace)
def number_xor(self, lhs, rhs, inplace=False):
return self._call_number_operator("Xor", lhs, rhs, inplace=inplace)
def number_power(self, lhs, rhs, inplace=False):
fnty = Type.function(self.pyobj, [self.pyobj] * 3)
fname = "PyNumber_InPlacePower" if inplace else "PyNumber_Power"
fn = self._get_function(fnty, fname)
return self.builder.call(fn, [lhs, rhs, self.borrow_none()])
def number_negative(self, obj):
fnty = Type.function(self.pyobj, [self.pyobj])
fn = self._get_function(fnty, name="PyNumber_Negative")
return self.builder.call(fn, (obj,))
def number_positive(self, obj):
fnty = Type.function(self.pyobj, [self.pyobj])
fn = self._get_function(fnty, name="PyNumber_Positive")
return self.builder.call(fn, (obj,))
def number_float(self, val):
fnty = Type.function(self.pyobj, [self.pyobj])
fn = self._get_function(fnty, name="PyNumber_Float")
return self.builder.call(fn, [val])
def number_invert(self, obj):
fnty = Type.function(self.pyobj, [self.pyobj])
fn = self._get_function(fnty, name="PyNumber_Invert")
return self.builder.call(fn, (obj,))
def float_as_double(self, fobj):
fnty = Type.function(self.double, [self.pyobj])
fn = self._get_function(fnty, name="PyFloat_AsDouble")
return self.builder.call(fn, [fobj])
def bool_from_bool(self, bval):
"""
Get a Python bool from a LLVM boolean.
"""
longval = self.builder.zext(bval, self.long)
return self.bool_from_long(longval)
def bool_from_long(self, ival):
fnty = Type.function(self.pyobj, [self.long])
fn = self._get_function(fnty, name="PyBool_FromLong")
return self.builder.call(fn, [ival])
def complex_from_doubles(self, realval, imagval):
fnty = Type.function(self.pyobj, [Type.double(), Type.double()])
fn = self._get_function(fnty, name="PyComplex_FromDoubles")
return self.builder.call(fn, [realval, imagval])
def complex_real_as_double(self, cobj):
fnty = Type.function(Type.double(), [self.pyobj])
fn = self._get_function(fnty, name="PyComplex_RealAsDouble")
return self.builder.call(fn, [cobj])
def complex_imag_as_double(self, cobj):
fnty = Type.function(Type.double(), [self.pyobj])
fn = self._get_function(fnty, name="PyComplex_ImagAsDouble")
return self.builder.call(fn, [cobj])
#
# List and sequence APIs
#
def sequence_getslice(self, obj, start, stop):
fnty = Type.function(self.pyobj, [self.pyobj, self.py_ssize_t,
self.py_ssize_t])
fn = self._get_function(fnty, name="PySequence_GetSlice")
return self.builder.call(fn, (obj, start, stop))
def sequence_tuple(self, obj):
fnty = Type.function(self.pyobj, [self.pyobj])
fn = self._get_function(fnty, name="PySequence_Tuple")
return self.builder.call(fn, [obj])
def list_new(self, szval):
fnty = Type.function(self.pyobj, [self.py_ssize_t])
fn = self._get_function(fnty, name="PyList_New")
return self.builder.call(fn, [szval])
def list_setitem(self, seq, idx, val):
"""
Warning: Steals reference to ``val``
"""
fnty = Type.function(Type.int(), [self.pyobj, self.py_ssize_t,
self.pyobj])
fn = self._get_function(fnty, name="PyList_SetItem")
return self.builder.call(fn, [seq, idx, val])
def list_getitem(self, lst, idx):
"""
Returns a borrowed reference.
"""
fnty = Type.function(self.pyobj, [self.pyobj, self.py_ssize_t])
fn = self._get_function(fnty, name="PyList_GetItem")
if isinstance(idx, int):
idx = self.context.get_constant(types.intp, idx)
return self.builder.call(fn, [lst, idx])
#
# Concrete tuple API
#
def tuple_getitem(self, tup, idx):
"""
Borrow reference
"""
fnty = Type.function(self.pyobj, [self.pyobj, self.py_ssize_t])
fn = self._get_function(fnty, name="PyTuple_GetItem")
idx = self.context.get_constant(types.intp, idx)
return self.builder.call(fn, [tup, idx])
def tuple_pack(self, items):
fnty = Type.function(self.pyobj, [self.py_ssize_t], var_arg=True)
fn = self._get_function(fnty, name="PyTuple_Pack")
n = self.context.get_constant(types.intp, len(items))
args = [n]
args.extend(items)
return self.builder.call(fn, args)
def tuple_size(self, tup):
fnty = Type.function(self.py_ssize_t, [self.pyobj])
fn = self._get_function(fnty, name="PyTuple_Size")
return self.builder.call(fn, [tup])
def tuple_new(self, count):
fnty = Type.function(self.pyobj, [Type.int()])
fn = self._get_function(fnty, name='PyTuple_New')
return self.builder.call(fn, [self.context.get_constant(types.int32,
count)])
def tuple_setitem(self, tuple_val, index, item):
"""
Steals a reference to `item`.
"""
fnty = Type.function(Type.int(), [self.pyobj, Type.int(), self.pyobj])
setitem_fn = self._get_function(fnty, name='PyTuple_SetItem')
index = self.context.get_constant(types.int32, index)
self.builder.call(setitem_fn, [tuple_val, index, item])
#
# Concrete set API
#
def set_new(self, iterable=None):
if iterable is None:
iterable = self.get_null_object()
fnty = Type.function(self.pyobj, [self.pyobj])
fn = self._get_function(fnty, name="PySet_New")
return self.builder.call(fn, [iterable])
def set_add(self, set, value):
fnty = Type.function(Type.int(), [self.pyobj, self.pyobj])
fn = self._get_function(fnty, name="PySet_Add")
return self.builder.call(fn, [set, value])
#
# GIL APIs
#
def gil_ensure(self):
"""
Ensure the GIL is acquired.
The returned value must be consumed by gil_release().
"""
gilptrty = Type.pointer(self.gil_state)
fnty = Type.function(Type.void(), [gilptrty])
fn = self._get_function(fnty, "numba_gil_ensure")
gilptr = cgutils.alloca_once(self.builder, self.gil_state)
self.builder.call(fn, [gilptr])
return gilptr
def gil_release(self, gil):
"""
Release the acquired GIL by gil_ensure().
Must be pair with a gil_ensure().
"""
gilptrty = Type.pointer(self.gil_state)
fnty = Type.function(Type.void(), [gilptrty])
fn = self._get_function(fnty, "numba_gil_release")
return self.builder.call(fn, [gil])
#
# Other APIs (organize them better!)
#
def import_module_noblock(self, modname):
fnty = Type.function(self.pyobj, [self.cstring])
fn = self._get_function(fnty, name="PyImport_ImportModuleNoBlock")
return self.builder.call(fn, [modname])
def call_function_objargs(self, callee, objargs):
fnty = Type.function(self.pyobj, [self.pyobj], var_arg=True)
fn = self._get_function(fnty, name="PyObject_CallFunctionObjArgs")
args = [callee] + list(objargs)
args.append(self.context.get_constant_null(types.pyobject))
return self.builder.call(fn, args)
def call(self, callee, args, kws):
fnty = Type.function(self.pyobj, [self.pyobj] * 3)
fn = self._get_function(fnty, name="PyObject_Call")
return self.builder.call(fn, (callee, args, kws))
def object_istrue(self, obj):
fnty = Type.function(Type.int(), [self.pyobj])
fn = self._get_function(fnty, name="PyObject_IsTrue")
return self.builder.call(fn, [obj])
def object_not(self, obj):
fnty = Type.function(Type.int(), [self.pyobj])
fn = self._get_function(fnty, name="PyObject_Not")
return self.builder.call(fn, [obj])
def object_richcompare(self, lhs, rhs, opstr):
"""
Refer to Python source Include/object.h for macros definition
of the opid.
"""
ops = ['<', '<=', '==', '!=', '>', '>=']
opid = ops.index(opstr)
assert 0 <= opid < len(ops)
fnty = Type.function(self.pyobj, [self.pyobj, self.pyobj, Type.int()])
fn = self._get_function(fnty, name="PyObject_RichCompare")
lopid = self.context.get_constant(types.int32, opid)
return self.builder.call(fn, (lhs, rhs, lopid))
def iter_next(self, iterobj):
fnty = Type.function(self.pyobj, [self.pyobj])
fn = self._get_function(fnty, name="PyIter_Next")
return self.builder.call(fn, [iterobj])
def object_getiter(self, obj):
fnty = Type.function(self.pyobj, [self.pyobj])
fn = self._get_function(fnty, name="PyObject_GetIter")
return self.builder.call(fn, [obj])
def object_getattr_string(self, obj, attr):
cstr = self.context.insert_const_string(self.module, attr)
fnty = Type.function(self.pyobj, [self.pyobj, self.cstring])
fn = self._get_function(fnty, name="PyObject_GetAttrString")
return self.builder.call(fn, [obj, cstr])
def object_setattr_string(self, obj, attr, val):
cstr = self.context.insert_const_string(self.module, attr)
fnty = Type.function(Type.int(), [self.pyobj, self.cstring, self.pyobj])
fn = self._get_function(fnty, name="PyObject_SetAttrString")
return self.builder.call(fn, [obj, cstr, val])
def object_delattr_string(self, obj, attr):
# PyObject_DelAttrString() is actually a C macro calling
# PyObject_SetAttrString() with value == NULL.
return self.object_setattr_string(obj, attr, self.get_null_object())
def object_getitem(self, obj, key):
fnty = Type.function(self.pyobj, [self.pyobj, self.pyobj])
fn = self._get_function(fnty, name="PyObject_GetItem")
return self.builder.call(fn, (obj, key))
def object_setitem(self, obj, key, val):
fnty = Type.function(Type.int(), [self.pyobj, self.pyobj, self.pyobj])
fn = self._get_function(fnty, name="PyObject_SetItem")
return self.builder.call(fn, (obj, key, val))
def string_as_string(self, strobj):
fnty = Type.function(self.cstring, [self.pyobj])
if PYVERSION >= (3, 0):
fname = "PyUnicode_AsUTF8"
else:
fname = "PyString_AsString"
fn = self._get_function(fnty, name=fname)
return self.builder.call(fn, [strobj])
def string_from_string_and_size(self, string, size):
fnty = Type.function(self.pyobj, [self.cstring, self.py_ssize_t])
if PYVERSION >= (3, 0):
fname = "PyUnicode_FromStringAndSize"
else:
fname = "PyString_FromStringAndSize"
fn = self._get_function(fnty, name=fname)
return self.builder.call(fn, [string, size])
def bytes_from_string_and_size(self, string, size):
fnty = Type.function(self.pyobj, [self.cstring, self.py_ssize_t])
if PYVERSION >= (3, 0):
fname = "PyBytes_FromStringAndSize"
else:
fname = "PyString_FromStringAndSize"
fn = self._get_function(fnty, name=fname)
return self.builder.call(fn, [string, size])
def object_str(self, obj):
fnty = Type.function(self.pyobj, [self.pyobj])
fn = self._get_function(fnty, name="PyObject_Str")
return self.builder.call(fn, [obj])
def make_none(self):
obj = self._get_object("Py_None")
self.incref(obj)
return obj
def borrow_none(self):
obj = self._get_object("Py_None")
return obj
def sys_write_stdout(self, fmt, *args):
fnty = Type.function(Type.void(), [self.cstring], var_arg=True)
fn = self._get_function(fnty, name="PySys_WriteStdout")
return self.builder.call(fn, (fmt,) + args)
def object_dump(self, obj):
"""
Dump a Python object on C stderr. For debugging purposes.
"""
fnty = Type.function(Type.void(), [self.pyobj])
fn = self._get_function(fnty, name="_PyObject_Dump")
return self.builder.call(fn, (obj,))
# ------ utils -----
def _get_object(self, name):
try:
gv = self.module.get_global_variable_named(name)
except LLVMException:
gv = self.module.add_global_variable(self.pyobj, name)
return self.builder.load(gv)
def _get_function(self, fnty, name):
return self.module.get_or_insert_function(fnty, name=name)
def alloca_obj(self):
return self.builder.alloca(self.pyobj)
def print_object(self, obj):
strobj = self.object_str(obj)
cstr = self.string_as_string(strobj)
fmt = self.context.insert_const_string(self.module, "%s")
self.sys_write_stdout(fmt, cstr)
self.decref(strobj)
def print_string(self, text):
fmt = self.context.insert_const_string(self.module, text)
self.sys_write_stdout(fmt)
def get_null_object(self):
return Constant.null(self.pyobj)
def return_none(self):
none = self.make_none()
self.builder.ret(none)
def list_pack(self, items):
n = len(items)
seq = self.list_new(self.context.get_constant(types.intp, n))
not_null = cgutils.is_not_null(self.builder, seq)
with cgutils.if_likely(self.builder, not_null):
for i in range(n):
idx = self.context.get_constant(types.intp, i)
self.incref(items[i])
self.list_setitem(seq, idx, items[i])
return seq
def to_native_arg(self, obj, typ):
if isinstance(typ, types.Record):
# Generate a dummy integer type that has the size of Py_buffer
dummy_py_buffer_type = Type.int(_helperlib.py_buffer_size * 8)
# Allocate the Py_buffer
py_buffer = cgutils.alloca_once(self.builder, dummy_py_buffer_type)
# Zero-fill the py_buffer. where the obj field in Py_buffer is NULL
# PyBuffer_Release has no effect.
zeroed_buffer = lc.Constant.null(dummy_py_buffer_type)
self.builder.store(zeroed_buffer, py_buffer)
buf_as_voidptr = self.builder.bitcast(py_buffer, self.voidptr)
ptr = self.extract_record_data(obj, buf_as_voidptr)
with cgutils.if_unlikely(self.builder,
cgutils.is_null(self.builder, ptr)):
self.builder.ret(ptr)
ltyp = self.context.get_value_type(typ)
val = cgutils.init_record_by_ptr(self.builder, ltyp, ptr)
def dtor():
self.release_record_buffer(buf_as_voidptr)
else:
val = self.to_native_value(obj, typ)
def dtor():
pass
return val, dtor
def to_native_value(self, obj, typ):
if isinstance(typ, types.Object) or typ == types.pyobject:
return obj
elif typ == types.boolean:
istrue = self.object_istrue(obj)
zero = Constant.null(istrue.type)
return self.builder.icmp(lc.ICMP_NE, istrue, zero)
elif typ in types.unsigned_domain:
longobj = self.number_long(obj)
ullval = self.long_as_ulonglong(longobj)
self.decref(longobj)
return self.builder.trunc(ullval,
self.context.get_argument_type(typ))
elif typ in types.signed_domain:
longobj = self.number_long(obj)
llval = self.long_as_longlong(longobj)
self.decref(longobj)
return self.builder.trunc(llval,
self.context.get_argument_type(typ))
elif typ == types.float32:
fobj = self.number_float(obj)
fval = self.float_as_double(fobj)
self.decref(fobj)
return self.builder.fptrunc(fval,
self.context.get_argument_type(typ))
elif typ == types.float64:
fobj = self.number_float(obj)
fval = self.float_as_double(fobj)
self.decref(fobj)
return fval
elif typ in (types.complex128, types.complex64):
cplxcls = self.context.make_complex(types.complex128)
cplx = cplxcls(self.context, self.builder)
pcplx = cplx._getpointer()
ok = self.complex_adaptor(obj, pcplx)
failed = cgutils.is_false(self.builder, ok)
with cgutils.if_unlikely(self.builder, failed):
self.builder.ret(self.get_null_object())
if typ == types.complex64:
c64cls = self.context.make_complex(typ)
c64 = c64cls(self.context, self.builder)
freal = self.context.cast(self.builder, cplx.real,
types.float64, types.float32)
fimag = self.context.cast(self.builder, cplx.imag,
types.float64, types.float32)
c64.real = freal
c64.imag = fimag
return c64._getvalue()
else:
return cplx._getvalue()
elif isinstance(typ, types.NPDatetime):
val = self.extract_np_datetime(obj)
return val
elif isinstance(typ, types.NPTimedelta):
val = self.extract_np_timedelta(obj)
return val
elif isinstance(typ, types.Array):
return self.to_native_array(typ, obj)
elif isinstance(typ, types.Optional):
isnone = self.builder.icmp(lc.ICMP_EQ, obj, self.borrow_none())
with cgutils.ifelse(self.builder, isnone) as (then, orelse):
with then:
noneval = self.context.make_optional_none(self.builder, typ.type)
ret = cgutils.alloca_once(self.builder, noneval.type)
self.builder.store(noneval, ret)
with orelse:
val = self.to_native_value(obj, typ.type)
just = self.context.make_optional_value(self.builder,
typ.type, val)
self.builder.store(just, ret)
return ret
raise NotImplementedError(typ)
def from_native_return(self, val, typ):
return self.from_native_value(val, typ)
def from_native_value(self, val, typ):
if typ == types.pyobject:
return val
elif typ == types.boolean:
longval = self.builder.zext(val, self.long)
return self.bool_from_long(longval)
elif typ in types.unsigned_domain:
ullval = self.builder.zext(val, self.ulonglong)
return self.long_from_ulonglong(ullval)
elif typ in types.signed_domain:
ival = self.builder.sext(val, self.longlong)
return self.long_from_longlong(ival)
elif typ == types.float32:
dbval = self.builder.fpext(val, self.double)
return self.float_from_double(dbval)
elif typ == types.float64:
return self.float_from_double(val)
elif typ == types.complex128:
cmplxcls = self.context.make_complex(typ)
cval = cmplxcls(self.context, self.builder, value=val)
return self.complex_from_doubles(cval.real, cval.imag)
elif typ == types.complex64:
cmplxcls = self.context.make_complex(typ)
cval = cmplxcls(self.context, self.builder, value=val)
freal = self.context.cast(self.builder, cval.real,
types.float32, types.float64)
fimag = self.context.cast(self.builder, cval.imag,
types.float32, types.float64)
return self.complex_from_doubles(freal, fimag)
elif isinstance(typ, types.NPDatetime):
return self.create_np_datetime(val, typ.unit_code)
elif isinstance(typ, types.NPTimedelta):
return self.create_np_timedelta(val, typ.unit_code)
elif typ == types.none:
ret = self.make_none()
return ret
elif isinstance(typ, types.Optional):
return self.from_native_return(val, typ.type)
elif isinstance(typ, types.Array):
return self.from_native_array(typ, val)
elif isinstance(typ, types.Record):
# Note we will create a copy of the record
# This is the only safe way.
pdata = cgutils.get_record_data(self.builder, val)
size = Constant.int(Type.int(), pdata.type.pointee.count)
ptr = self.builder.bitcast(pdata, Type.pointer(Type.int(8)))
# Note: this will only work for CPU mode
# The following requires access to python object
dtype_addr = Constant.int(self.py_ssize_t, id(typ.dtype))
dtypeobj = dtype_addr.inttoptr(self.pyobj)
return self.recreate_record(ptr, size, dtypeobj)
elif isinstance(typ, (types.Tuple, types.UniTuple)):
return self.from_tuple(typ, val)
raise NotImplementedError(typ)
def to_native_array(self, typ, ary):
# TODO check matching dtype.
# currently, mismatching dtype will still work and causes
# potential memory corruption
voidptr = Type.pointer(Type.int(8))
nativearycls = self.context.make_array(typ)
nativeary = nativearycls(self.context, self.builder)
aryptr = nativeary._getpointer()
ptr = self.builder.bitcast(aryptr, voidptr)
errcode = self.numba_array_adaptor(ary, ptr)
failed = cgutils.is_not_null(self.builder, errcode)
with cgutils.if_unlikely(self.builder, failed):
# TODO
self.builder.unreachable()
return self.builder.load(aryptr)
def from_native_array(self, typ, ary):
assert assume.return_argument_array_only
nativearycls = self.context.make_array(typ)
nativeary = nativearycls(self.context, self.builder, value=ary)
parent = nativeary.parent
self.incref(parent)
return parent
def from_tuple(self, typ, val):
tuple_val = self.tuple_new(typ.count)
for i, dtype in enumerate(typ):
item = self.builder.extract_value(val, i)
obj = self.from_native_value(item, dtype)
self.tuple_setitem(tuple_val, i, obj)
return tuple_val
def numba_array_adaptor(self, ary, ptr):
voidptr = Type.pointer(Type.int(8))
fnty = Type.function(Type.int(), [self.pyobj, voidptr])
fn = self._get_function(fnty, name="numba_adapt_ndarray")
fn.args[0].add_attribute(lc.ATTR_NO_CAPTURE)
fn.args[1].add_attribute(lc.ATTR_NO_CAPTURE)
return self.builder.call(fn, (ary, ptr))
def complex_adaptor(self, cobj, cmplx):
fnty = Type.function(Type.int(), [self.pyobj, cmplx.type])
fn = self._get_function(fnty, name="numba_complex_adaptor")
return self.builder.call(fn, [cobj, cmplx])
def extract_record_data(self, obj, pbuf):
fnty = Type.function(self.voidptr, [self.pyobj,
self.voidptr])
fn = self._get_function(fnty, name="numba_extract_record_data")
return self.builder.call(fn, [obj, pbuf])
def release_record_buffer(self, pbuf):
fnty = Type.function(Type.void(), [self.voidptr])
fn = self._get_function(fnty, name="numba_release_record_buffer")
return self.builder.call(fn, [pbuf])
def extract_np_datetime(self, obj):
fnty = Type.function(Type.int(64), [self.pyobj])
fn = self._get_function(fnty, name="numba_extract_np_datetime")
return self.builder.call(fn, [obj])
def extract_np_timedelta(self, obj):
fnty = Type.function(Type.int(64), [self.pyobj])
fn = self._get_function(fnty, name="numba_extract_np_timedelta")
return self.builder.call(fn, [obj])
def create_np_datetime(self, val, unit_code):
unit_code = Constant.int(Type.int(), unit_code)
fnty = Type.function(self.pyobj, [Type.int(64), Type.int()])
fn = self._get_function(fnty, name="numba_create_np_datetime")
return self.builder.call(fn, [val, unit_code])
def create_np_timedelta(self, val, unit_code):
unit_code = Constant.int(Type.int(), unit_code)
fnty = Type.function(self.pyobj, [Type.int(64), Type.int()])
fn = self._get_function(fnty, name="numba_create_np_timedelta")
return self.builder.call(fn, [val, unit_code])
def recreate_record(self, pdata, size, dtypeaddr):
fnty = Type.function(self.pyobj, [Type.pointer(Type.int(8)),
Type.int(), self.pyobj])
fn = self._get_function(fnty, name="numba_recreate_record")
return self.builder.call(fn, [pdata, size, dtypeaddr])
def string_from_constant_string(self, string):
cstr = self.context.insert_const_string(self.module, string)
sz = self.context.get_constant(types.intp, len(string))
return self.string_from_string_and_size(cstr, sz)
|
"""
pyifc.compress._pack
--------------------
Functions for packing .ifc files.
"""
import os
import pathlib
import tarfile
from zipfile import ZipFile
from pyifc._utils import timeit
from pyifc.compress._compress import compress
from pyifc.compress._validators import existence_validator, extension_validator
def _common(input_filepath, output_dir, output_filename, ext):
"""Common tasks to be performed before compressing and archiving.
Tasks include:
- validation of extensions
- validation of paths
- creation of new paths and filenames
Args:
input_filepath (str): path to the .ifc file to be compressed and
archived.
output_dir (str): path to output directory, where archived file will be
saved.
output_filename (str): filename with `ext` extension.
ext (str): extension that filename should have.
Returns:
tuple[str, str]: tuple of path to the compressed file and the path to
the archived file.
"""
extension_validator(
filepath=output_filename,
extension=ext,
variable="output_filename",
)
input_filename = pathlib.PurePath(input_filepath).name
extension_validator(
filepath=input_filepath, extension=".ifc", variable="input_filepath"
)
existence_validator(input_filepath)
existence_validator(output_dir)
compressed_filename = (
os.path.join(input_filename.rstrip(".ifc")) + "_compressed.ifc"
)
compressed_filepath = compress(
input_filepath, output_dir, compressed_filename
)
output_filepath = os.path.abspath(os.path.join(output_dir, output_filename))
return compressed_filepath, output_filepath
@timeit
def compress_and_tar(input_filepath, output_dir, output_filename):
"""Compress and write file to .tar.gz archive.
Args:
input_filepath (str): path to the .ifc file to be compressed and
archived.
output_dir (str): path to the output directory, where archived file
will be saved.
output_filename (str): filename with .tar.gz extension.
Returns:
str: path to the archived file.
"""
input_filepath = os.path.abspath(input_filepath)
compressed_filepath, output_filepath = _common(
input_filepath=input_filepath,
output_dir=output_dir,
output_filename=output_filename,
ext=".tar.gz",
)
with tarfile.open(output_filepath, "w:gz") as tar:
tar.add(compressed_filepath)
print(
f"Successfully compressed and archived "
f"{os.path.abspath(input_filepath)} to "
f"{os.path.abspath(output_filepath)}"
)
os.remove(compressed_filepath)
return output_filepath
@timeit
def compress_and_zip(input_filepath, output_dir, output_filename):
"""Compress and write file to .zip archive.
Args:
input_filepath (str): path to the .ifc file to be compressed and
archived.
output_dir (str): path to the output directory, where archived file
will be saved.
output_filename (str): filename with .zip extension.
Returns:
str: path to the archived file.
"""
input_filepath = os.path.abspath(input_filepath)
compressed_filepath, output_filepath = _common(
input_filepath=input_filepath,
output_dir=output_dir,
output_filename=output_filename,
ext=".zip",
)
with ZipFile(output_filepath, "w") as zip_file:
zip_file.write(compressed_filepath)
print(
f"Successfully compressed and archived "
f"{os.path.abspath(input_filepath)} to "
f"{os.path.abspath(output_filepath)}"
)
os.remove(compressed_filepath)
return output_filepath
|
from pytest import raises
from pydantic import BaseModel, ValidationError
from pydantic_jsonapi import JsonApiResponse
from tests.helpers import ItemModel
class TestJsonApiResponse:
def test_attributes_as_dict(self):
MyResponse = JsonApiResponse('item', dict)
obj_to_validate = {
'data': {'id': '123', 'type': 'item', 'attributes': {}},
'included': [{'id': '456', 'type': 'not-an-item', 'attributes': {}}]
}
my_request_obj = MyResponse(**obj_to_validate)
assert my_request_obj.dict() == {
'data': {
'id': '123',
'type': 'item',
'attributes': {},
},
'included': [{
'id': '456',
'type': 'not-an-item',
'attributes': {}
}]
}
def test_attributes_as_item_model(self):
ItemResponse = JsonApiResponse('item', ItemModel)
obj_to_validate = {
'data': {
'id': '123',
'type': 'item',
'attributes': {
'name': 'apple',
'quantity': 10,
'price': 1.20
},
'relationships': {
'store': {
'links': {
'related': '/stores/123',
},
},
},
}
}
my_request_obj = ItemResponse(**obj_to_validate)
assert my_request_obj.dict() == {
'data': {
'id': '123',
'type': 'item',
'attributes': {
'name': 'apple',
'quantity': 10,
'price': 1.20,
},
'relationships': {
'store': {
'links': {
'related': '/stores/123',
},
},
},
},
}
def test_list_item_model(self):
ItemResponse = JsonApiResponse('item', ItemModel, use_list=True)
obj_to_validate = {
'data': [
{
'id': '123',
'type': 'item',
'attributes': {
'name': 'apple',
'quantity': 10,
'price': 1.20
},
},
{
'id': '321',
'type': 'item',
'attributes': {
'name': 'banana',
'quantity': 20,
'price': 2.34
},
},
],
}
my_request_obj = ItemResponse(**obj_to_validate)
assert my_request_obj.dict() == {
'data': [
{
'id': '123',
'type': 'item',
'attributes': {
'name': 'apple',
'quantity': 10,
'price': 1.20,
},
},
{
'id': '321',
'type': 'item',
'attributes': {
'name': 'banana',
'quantity': 20,
'price': 2.34,
},
},
],
}
def test_type_invalid_string(self):
MyResponse = JsonApiResponse('item', dict)
obj_to_validate = {
'data': {'id': '123', 'type': 'not_an_item', 'attributes': {}}
}
with raises(ValidationError) as e:
MyResponse(**obj_to_validate)
assert e.value.errors() == [
{
'loc': ('data', 'type'),
'msg': "unexpected value; permitted: 'item'",
'type': 'value_error.const',
'ctx': {'given': 'not_an_item', 'permitted': ('item',)},
},
]
def test_attributes_required(self):
ItemResponse = JsonApiResponse('item', ItemModel)
obj_to_validate = {
'data': {'id': '123', 'type': 'item', 'attributes': None}
}
with raises(ValidationError) as e:
ItemResponse(**obj_to_validate)
assert e.value.errors() == [
{
'loc': ('data', 'attributes'),
'msg': 'value is not a valid dict',
'type': 'type_error.dict',
},
]
def test_attributes_as_item_model__empty_dict(self):
ItemResponse = JsonApiResponse('item', ItemModel)
obj_to_validate = {
'data': {
'id': '123',
'type': 'item',
'attributes': {}
}
}
with raises(ValidationError) as e:
ItemResponse(**obj_to_validate)
assert e.value.errors() == [
{'loc': ('data', 'attributes', 'name'), 'msg': 'field required', 'type': 'value_error.missing'},
{'loc': ('data', 'attributes', 'quantity'), 'msg': 'field required', 'type': 'value_error.missing'},
{'loc': ('data', 'attributes', 'price'), 'msg': 'field required', 'type': 'value_error.missing'},
]
|
from django.conf.urls import include
from django.conf.urls import url
urlpatterns = [url(r"^api/morango/v1/", include("morango.api.urls"))]
|
import re
import ast
from setuptools import setup
_version_re = re.compile(r'__version__\s+=\s+(.*)')
with open('markpy/__init__.py', 'rb') as f:
version = str(ast.literal_eval(_version_re.search(
f.read().decode('utf-8')).group(1)))
setup(
name='markpy',
version=version,
url='https://github.com/zeaphoo/markpy/',
license='MIT',
author='Wei Zhuo',
author_email='zeaphoo@qq.com',
description='A static type compiler for python',
long_description='A program style using python write static program and a compiler',
packages=['markpy'],
include_package_data=False,
zip_safe=False,
platforms='any',
install_requires=[
'sh',
'parso',
'click',
'basepy'
],
extras_require={
'dev': [
'pytest>=3',
],
},
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Topic :: Software Development :: Libraries :: Python Modules'
],
entry_points='''
[console_scripts]
markpy=markpy.main:markpy
'''
)
|
import pandas as pd
import pickle
# Deserialize your dictionary using the load function of pickle
deserialized_dict = pickle.load( open("nested_population_dict.pkl", "rb") )
# Get the dataframe of data related to Sweden
sweden_population_df = deserialized_dict['Sweden']
# Show the data
print(sweden_population_df)
|
"""
file : diffusion_models.py
implemetation of 'steps' in file diffusion.py
each function describe here takes a set of
parameters and update some of it
"""
import numpy as np
def infinite_surface(x, y, vx, vy, dt):
'''
A bunch of paticles put in an infinite plane surface.
modelled with euler-maruyana method
'''
gamma = 1.0
vx = vx - gamma*vx*dt + 500*np.random.randn(len(vx))*dt
vy = vy - gamma*vy*dt + 500*np.random.randn(len(vy))*dt
x = x + vx*dt
y = y + vy*dt
return (x, y, vx, vy)
def surface_with_B(x, y, vx, vy, dt):
'''
A bunch of paticles put in an infinite plane surface.
Magnetic field is applied perpendicular to plane.
modelled with euler-maruyana method
'''
gamma = 1.0
eB = 0.01
vx = vx - gamma*vx*dt - eB*vy + 600*np.random.randn(len(vx))*dt
vy = vy - gamma*vy*dt + eB*vx + 600*np.random.randn(len(vy))*dt
x = x + vx*dt
y = y + vy*dt
return (x, y, vx, vy)
def harmonic_dish(x, y, vx, vy, dt):
'''
A bunch of paticles put in a harmonic dish.
modelled with euler-maruyana method
'''
gamma = 1.0
k = 0.1
vx = vx - gamma*vx*dt - k*x + 600*np.random.randn(len(vx))*dt
vy = vy - gamma*vy*dt - k*y + 600*np.random.randn(len(vy))*dt
x = x + vx*dt
y = y + vy*dt
return (x, y, vx, vy)
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
doc+='''<!DOCTYPE html>\n<html>\n<head>\n '''
try: doc+=str(incluir(data,"head"))
except Exception as e: doc+=str(e)
doc+='''\n \n</head>\n<body>\n<link rel="stylesheet" type="text/css" href="'''
try: doc+=str(config.base_url)
except Exception as e: doc+=str(e)
doc+='''Components/css/Components.css">\n<section class="Components" style="padding: 10px;height: 500px;width:800px">\n \n</section>\n<script type="text/javascript" src="'''
try: doc+=str(config.base_url)
except Exception as e: doc+=str(e)
doc+='''/static/js/python/__javascript__/divi.js"></script>\n</body>\n</html>'''
|
class Persona:
def __init__(self,id,nombre,apellido,fechan,sexo,nombre_us,contraseña,especialidad,telefono,tipo):
self.id = id
self.nombre = nombre
self.apellido = apellido
self.fechan = fechan
self.sexo = sexo
self.nombre_us = nombre_us
self.contraseña = contraseña
self.especialidad = especialidad
self.telefono = telefono
self.tipo = tipo
# METODOS GET
def getId(self):
return self.id
def getNombre(self):
return self.nombre
def getApellido(self):
return self.apellido
def getFechan(self):
return self.fechan
def getSexo(self):
return self.sexo
def getNombre_us(self):
return self.nombre_us
def getContraseña(self):
return self.contraseña
def getEspecialidad(self):
return self.especialidad
def getTelefono(self):
return self.telefono
def getTipo(self):
return self.tipo
# METODOS SET
def setId(self, id):
self.id = id
def setNombre(self, nombre):
self.nombre = nombre
def setApellido(self, apellido):
self.apellido = apellido
def setFechan(self,fechan):
return self.fechan
def setSexo(self,sexo):
return self.sexo
def setNombre_us(self,nombre_us):
return self.nombre_us
def setContraseña(self,contraseña):
return self.contraseña
def setEspecialidad(self,especialidad):
return self.especialidad
def setTelefono(self,telefono):
return self.telefono
def setTipo(self,tipo):
return self.tipo
|
from carbonserver.api.infra.repositories.repository_projects import SqlAlchemyRepository
from carbonserver.api.schemas import ProjectReport
class ProjectSumsUsecase:
def __init__(self, project_repository: SqlAlchemyRepository) -> None:
self._project_repository = project_repository
def compute_detailed_sum(
self, project_id: str, start_date, end_date
) -> ProjectReport:
sums = self._project_repository.get_project_detailed_sums(
project_id,
start_date,
end_date,
)
return sums
|
"""
This module contains multiple functions in order to run MSAF algorithms.
"""
import logging
import os
from os.path import basename, dirname, splitext, join
import librosa
import numpy as np
from joblib import Parallel, delayed
import msaf
from msaf import jams2
from msaf import input_output as io
from msaf import utils
from msaf import featextract
from msaf import plotting
import msaf.algorithms as algorithms
save_plot_path = ''
def get_boundaries_module(boundaries_id):
"""Obtains the boundaries module given a boundary algorithm identificator.
Parameters
----------
boundaries_id: str
Boundary algorithm identificator (e.g., foote, sf).
Returns
-------
module: object
Object containing the selected boundary module.
None for "ground truth".
"""
if boundaries_id == "gt":
return None
try:
module = eval(algorithms.__name__ + "." + boundaries_id)
except AttributeError:
raise RuntimeError("Algorithm %s can not be found in msaf!" %
boundaries_id)
if not module.is_boundary_type:
raise RuntimeError("Algorithm %s can not identify boundaries!" %
boundaries_id)
return module
def get_labels_module(labels_id):
"""Obtains the label module given a label algorithm identificator.
Parameters
----------
labels_id: str
Label algorithm identificator (e.g., fmc2d, cnmf).
Returns
-------
module: object
Object containing the selected label module.
None for not computing the labeling part of music segmentation.
"""
if labels_id is None:
return None
try:
module = eval(algorithms.__name__ + "." + labels_id)
except AttributeError:
raise RuntimeError("Algorithm %s can not be found in msaf!" %
labels_id)
if not module.is_label_type:
raise RuntimeError("Algorithm %s can not label segments!" %
labels_id)
return module
def run_algorithms(audio_file, boundaries_id, labels_id, config,
annotator_id=0):
"""Runs the algorithms with the specified identifiers on the audio_file.
Parameters
----------
audio_file: str
Path to the audio file to segment.
boundaries_id: str
Identifier of the boundaries algorithm to use ("gt" for ground truth).
labels_id: str
Identifier of the labels algorithm to use (None for not labeling).
config: dict
Dictionary containing the custom parameters of the algorithms to use.
annotator_id: int
Annotator identificator in the ground truth.
Returns
-------
est_times: np.array or list
List of estimated times for the segment boundaries.
If `list`, it will be a list of np.arrays, sorted by segmentation layer.
est_labels: np.array or list
List of all the labels associated segments.
If `list`, it will be a list of np.arrays, sorted by segmentation layer.
"""
# At this point, features should have already been computed
hpcp, mfcc, tonnetz, cqt, gmt, beats, dur, anal = \
io.get_features(audio_file, config["annot_beats"],
config["framesync"],
pre_features=config["features"])
# Check that there are enough audio frames
if hpcp.shape[0] <= msaf.minimum__frames:
logging.warning("Audio file too short, or too many few beats "
"estimated. Returning empty estimations.")
return np.asarray([0, dur]), np.asarray([0], dtype=int)
# Get the corresponding modules
bounds_module = get_boundaries_module(boundaries_id)
labels_module = get_labels_module(labels_id)
# Get the correct frame times
frame_times = beats
if config["framesync"]:
frame_times = utils.get_time_frames(dur, anal)
# Segment audio based on type of segmentation
if config["hier"]:
# Hierarchical segmentation
if bounds_module is None:
raise RuntimeError("A boundary algorithm is needed when using "
"hierarchical segmentation.")
if labels_module is not None and \
bounds_module.__name__ != labels_module.__name__:
raise RuntimeError("The same algorithm for boundaries and labels is "
"needed when using hierarchical segmentation.")
S = bounds_module.Segmenter(audio_file, **config)
est_idxs, est_labels = S.processHierarchical()
# Make sure the first and last boundaries are included for each
# level in the hierarchy
est_times = []
cleaned_est_labels = []
for level in range(len(est_idxs)):
est_level_times, est_level_labels = \
utils.process_segmentation_level(est_idxs[level],
est_labels[level],
hpcp.shape[0],
frame_times,
dur)
est_times.append(est_level_times)
cleaned_est_labels.append(est_level_labels)
est_labels = cleaned_est_labels
else:
# Flat segmentation
# Segment using the specified boundaries and labels
# Case when boundaries and labels algorithms are the same
if bounds_module is not None and labels_module is not None and \
bounds_module.__name__ == labels_module.__name__:
S = bounds_module.Segmenter(audio_file, **config)
est_idxs, est_labels = S.processFlat()
# Different boundary and label algorithms
else:
# Identify segment boundaries
if bounds_module is not None:
S = bounds_module.Segmenter(audio_file, in_labels=[], **config)
est_idxs, est_labels = S.processFlat()
else:
try:
est_times, est_labels = io.read_references(
audio_file, annotator_id=annotator_id)
est_idxs = io.align_times(est_times, frame_times[:-1])
if est_idxs[0] != 0:
est_idxs = np.concatenate(([0], est_idxs))
if est_idxs[-1] != hpcp.shape[0] - 1:
est_idxs = np.concatenate((est_idxs, [hpcp.shape[0] - 1]))
except:
logging.warning("No references found for file: %s" %
audio_file)
return [], []
# Label segments
if labels_module is not None:
if len(est_idxs) == 2:
est_labels = np.array([0])
else:
S = labels_module.Segmenter(audio_file,
in_bound_idxs=est_idxs,
**config)
est_labels = S.processFlat()[1]
# Make sure the first and last boundaries are included
est_times, est_labels = utils.process_segmentation_level(
est_idxs, est_labels, hpcp.shape[0], frame_times, dur)
return est_times, est_labels
def process_track(file_struct, boundaries_id, labels_id, config,
annotator_id=0, plot=False):
"""Prepares the parameters, runs the algorithms, and saves results.
Parameters
----------
file_struct: Object
FileStruct containing the paths of the input files (audio file,
features file, reference file, output estimation file).
boundaries_id: str
Identifier of the boundaries algorithm to use ("gt" for ground truth).
labels_id: str
Identifier of the labels algorithm to use (None for not labeling).
config: dict
Dictionary containing the custom parameters of the algorithms to use.
annotator_id: int
Annotator identificator in the ground truth.
Returns
-------
est_times: np.array
List of estimated times for the segment boundaries.
est_labels: np.array
List of all the labels associated segments.
"""
# Only analize files with annotated beats
if config["annot_beats"]:
jam = jams2.load(file_struct.ref_file)
if len(jam.beats) > 0 and len(jam.beats[0].data) > 0:
pass
else:
logging.warning("No beat information in file %s" %
file_struct.ref_file)
return
logging.info("Segmenting %s" % file_struct.audio_file)
# Compute features if needed
if not os.path.isfile(file_struct.features_file):
featextract.compute_all_features(file_struct)
# Get estimations
est_times, est_labels = run_algorithms(file_struct.audio_file,
boundaries_id, labels_id, config,
annotator_id=annotator_id)
# Save
logging.info("Writing results in: %s" % file_struct.est_file)
io.save_estimations(file_struct.est_file, est_times, est_labels,
boundaries_id, labels_id, **config)
if plot:
audio_name = splitext(basename(file_struct.audio_file))[0]
plot_name = join(dirname(dirname(file_struct.audio_file)), 'plots', audio_name+'_'+'mfcc'+'_'+boundaries_id+'.pdf')
dataset_name = basename(dirname(dirname(file_struct.audio_file)))
plotting.plot_one_track(plot_name, file_struct, est_times, est_labels, boundaries_id, labels_id, dataset_name)
return est_times, est_labels
def process(in_path, annot_beats=False, feature="mfcc", ds_name="*",
framesync=False, boundaries_id="gt", labels_id=None, hier=False,
sonify_bounds=False, plot=False, n_jobs=4, annotator_id=0,
config=None, out_bounds="out_bounds.wav"):
"""Main process to segment a file or a collection of files.
Parameters
----------
in_path: str
Input path. If a directory, MSAF will function in collection mode.
If audio file, MSAF will be in single file mode.
annot_beats: bool
Whether to use annotated beats or not. Only available in collection
mode.
feature: str
String representing the feature to be used (e.g. hpcp, mfcc, tonnetz)
ds_name: str
Prefix of the dataset to be used (e.g. SALAMI, Isophonics)
framesync: str
Whether to use framesync features or not (default: False -> beatsync)
boundaries_id: str
Identifier of the boundaries algorithm (use "gt" for groundtruth)
labels_id: str
Identifier of the labels algorithm (use None to not compute labels)
hier : bool
Whether to compute a hierarchical or flat segmentation.
sonify_bounds: bool
Whether to write an output audio file with the annotated boundaries
or not (only available in Single File Mode).
plot: bool
Whether to plot the boundaries and labels against the ground truth.
n_jobs: int
Number of processes to run in parallel. Only available in collection
mode.
annotator_id: int
Annotator identificator in the ground truth.
config: dict
Dictionary containing custom configuration parameters for the
algorithms. If None, the default parameters are used.
out_bounds: str
Path to the output for the sonified boundaries (only in single file
mode, when sonify_bounds is True.
Returns
-------
results : list
List containing tuples of (est_times, est_labels) of estimated
boundary times and estimated labels.
If labels_id is None, est_labels will be a list of -1.
"""
# Seed random to reproduce results
np.random.seed(123)
# Set up configuration based on algorithms parameters
if config is None:
config = io.get_configuration(feature, annot_beats, framesync,
boundaries_id, labels_id)
config["features"] = None
config["hier"] = hier
if os.path.isfile(in_path):
# Single file mode
# Get (if they exitst) or compute features
# TODO:Modularize!
file_struct = msaf.io.FileStruct(in_path)
if os.path.exists(file_struct.features_file):
feat_prefix = ""
if not framesync:
feat_prefix = "bs_"
features = {}
'''Mi: added the Gammatone featureset'''
features["%shpcp" % feat_prefix], features["%smfcc" % feat_prefix], \
features["%stonnetz" % feat_prefix], features["%scqt" % feat_prefix], \
features["%sgmt" % feat_prefix], features["beats"], dur, \
features["anal"] = msaf.io.get_features(in_path,
annot_beats=annot_beats,
framesync=framesync,
pre_features=None)
else:
# Compute and save features
features = featextract.compute_features_for_audio_file(in_path)
msaf.utils.ensure_dir(os.path.dirname(file_struct.features_file))
msaf.featextract.save_features(file_struct.features_file, features)
config["features"] = features
config["hier"] = hier
# And run the algorithms
est_times, est_labels = run_algorithms(in_path, boundaries_id,
labels_id, config,
annotator_id=annotator_id)
if sonify_bounds:
logging.info("Sonifying boundaries in %s..." % out_bounds)
fs = 44100
audio_hq, sr = librosa.load(in_path, sr=fs)
utils.sonify_clicks(audio_hq, est_times, out_bounds, fs)
if plot:
plotting.plot_one_track(save_plot_path, file_struct, est_times, est_labels,
boundaries_id, labels_id, ds_name)
# Save estimations
msaf.utils.ensure_dir(os.path.dirname(file_struct.est_file))
config["features"] = None
io.save_estimations(file_struct.est_file, est_times, est_labels,
boundaries_id, labels_id, **config)
return est_times, est_labels
else:
# Collection mode
file_structs = io.get_dataset_files(in_path, ds_name)
# Call in parallel
return Parallel(n_jobs=n_jobs)(delayed(process_track)(
file_struct, boundaries_id, labels_id, config,
annotator_id=annotator_id, plot=plot) for file_struct in file_structs[:])
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated from FHIR {{ info.version }} on {{ info.date }}.
# {{ info.year }}, SMART Health IT.
class FHIRElementFactory(object):
""" Factory class to instantiate resources by resource name.
"""
@classmethod
def instantiate(cls, resource_name, jsondict):
""" Instantiate a resource of the type correlating to "resource_name".
:param str resource_name: The name/type of the resource to instantiate
:param dict jsondict: The JSON dictionary to use for data
:returns: A resource of the respective type or `Element`
"""
{%- for klass in classes %}{% if klass.resource_name %}
if "{{ klass.resource_name }}" == resource_name:
from . import {{ klass.module }}
return {{ klass.module }}.{{ klass.name }}(jsondict)
{%- endif %}{% endfor %}
from . import element
return element.Element(jsondict)
|
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2021 Edward Higgins <ed.higgins@york.ac.uk>
#
# Distributed under terms of the MIT license.
"""
"""
import os
import base64
import json
import dash_html_components as html
import dash_core_components as dcc
from dash.dependencies import Input,Output,State
from datetime import datetime
from parameters import Parameters, default_parameters
from dash_ui.app import app
full_data_folder=os.path.join(os.getcwd(),'web_data')
web_data_folder='/web_data'
input_list = []
for param in default_parameters:
if (default_parameters[param]['class'] in ["tracking", "image", "postprocessing"]):
input_list.append('{}-{}'.format(default_parameters[param]['class'], param))
def layout(params):
return html.Div(id='session-tab-container', children=[
html.H2("Session management"),
html.Button('New Session', id='new-session-button', n_clicks=0),
html.H3("Files"),
dcc.Dropdown(id='files-dropdown',
options=[],
clearable=False,
style={'color':'black'}
),
dcc.Upload(id='data-file-upload',
children=html.Div(
["Upload File"]
),
style={
"width": "80%",
"height": "60px",
"lineHeight": "60px",
"borderWidth": "1px",
"borderStyle": "dashed",
"borderRadius": "5px",
"textAlign": "center",
"position": "center",
"margin": "auto",
"marginTop": "10px",
"marginBottom": "10px",
},
multiple=False,
),
html.Div([
html.Button("Download file", id="file-download-button"),
dcc.Download(id="file-download")
]),
dcc.Store(id='session-id-store', data='default'),
dcc.Store(id='session-files-update-upload-store'),
dcc.Store(id='session-files-update-track-store'),
dcc.Store(id='session-files-update-postprocess-store', data=0),
dcc.Store(id='session-files-update-image-store', data=0),
dcc.Store(id='session-parameters-store', data=str(json.dumps(params._params))),
dcc.Store(id='session-active-img-file-store', data=""),
dcc.Store(id='session-active-info-file-store', data=""),
])
@app.callback([
Output('session-active-img-file-store', 'data'),
Output('session-active-info-file-store', 'data')
], [
Input('files-dropdown', 'value'),
State('session-active-img-file-store', 'data'),
State('session-active-info-file-store', 'data'),
State("session-id-store", "data"),
])
def set_active_file(filename, img_file, info_file, session_id):
if filename:
full_filename=""
if ".tif" in filename:
img_file = os.path.join(full_data_folder, session_id, filename)
elif ".tsv" in filename:
info_file = os.path.join(full_data_folder, session_id, filename)
elif ".png" in filename:
info_file = os.path.join(web_data_folder, session_id, filename)
else:
img_file = ""
info_file = ""
return [img_file, info_file]
@app.callback([
Output('files-dropdown', 'options'),
Output('files-dropdown', 'value')],
[
Input('session-id-store', 'data'),
Input('session-files-update-upload-store', 'data'),
Input('session-files-update-track-store', 'data'),
Input('session-files-update-postprocess-store', 'data'),
])
def update_file_options(session_id, update_upload, update_track, update_post):
absolute_filename = os.path.join(full_data_folder, session_id)
print(f"Updating options {absolute_filename}")
options = [{'label': f, 'value': f} for f in os.listdir(absolute_filename)]
first_element = ""
if options:
first_element = options[0]["value"]
return options, first_element
@app.callback(Output('file-download', 'data'),
Input('file-download-button', 'n_clicks'),
State('files-dropdown', 'value'),
State('session-id-store', 'data'),
prevent_initial_call=True,
)
def download_file(n_clicks,filename, session_path):
return dcc.send_file(os.path.join(full_data_folder, session_path, filename))
@app.callback([
Output('session-files-update-upload-store', 'data'),
], [
Input("data-file-upload", "filename"),
Input("data-file-upload", "contents"),
State('session-active-img-file-store', 'data'),
State('session-active-info-file-store', 'data'),
State("session-id-store", "data"),
])
def upload_file(filename, file_contents, img_file, info_file, session_id):
print("Called")
full_filename=""
if filename:
full_filename = os.path.join(full_data_folder, session_id, filename)
if filename is not None and file_contents is not None:
print(f"Uploading file to {full_filename}")
data = file_contents.encode("utf8").split(b";base64,")[1]
with open(full_filename, 'wb') as fp:
fp.write(base64.decodebytes(data))
else:
file_path=""
now = datetime.now()
time = now.strftime("%Y-%m-%dT%H:%M:%S.%f")
return [time]
@app.callback(
[
Output('session-id-store', 'data'),
], [
Input('new-session-button', 'n_clicks'),
]
)
def new_session(n_clicks):
now = datetime.now()
session_id = now.strftime("%Y-%m-%dT%H:%M:%S.%f")
session_folder = os.path.join(full_data_folder, session_id)
print(f"Active data folder: {session_folder}")
if not os.path.exists(session_folder):
os.makedirs(session_folder)
else:
print("Session already exists!")
os.exit()
return [session_id]
@app.callback(
Output('session-parameters-store', 'data'),
[Input('session-parameters-store', 'data')] +
[Input(input_box, 'value') for input_box in input_list]
)
def update_parameters(input_json, *args):
params = Parameters(json.loads(input_json))
params._params = json.loads(input_json)
for i in range(len(input_list)):
param_class, param = input_list[i].split('-')
if params._params[param]['value'] != args[i]:
print(f"Setting {param} from {params._params[param]['value']} to {args[i]}")
params._params[param]['value'] = args[i]
params_json = str(json.dumps(params._params))
return params_json
|
from labelbox.orm import query
from labelbox.orm.db_object import DbObject, Updateable
from labelbox.orm.model import Entity, Field, Relationship
class Webhook(DbObject, Updateable):
""" Represents a server-side rule for sending notifications to a web-server
whenever one of several predefined actions happens within a context of
a Project or an Organization.
"""
# Status
ACTIVE = "ACTIVE"
INACTIVE = "INACTIVE"
REVOKED = "REVOKED"
# Topic
LABEL_CREATED = "LABEL_CREATED"
LABEL_UPDATED = "LABEL_UPDATED"
LABEL_DELETED = "LABEL_DELETED"
updated_at = Field.DateTime("updated_at")
created_at = Field.DateTime("created_at")
url = Field.String("url")
topics = Field.String("topics")
status = Field.String("status")
@staticmethod
def create(client, topics, url, secret, project):
""" Creates a Webhook.
Args:
client (Client): The Labelbox client used to connect
to the server.
topics (list of str): A list of topics this Webhook should
get notifications for.
url (str): The URL to which notifications should be sent
by the Labelbox server.
secret (str): A secret key used for signing notifications.
project (Project or None): The project for which notifications
should be sent. If None notifications are sent for all
events in your organization.
Returns:
A newly created Webhook.
"""
project_str = "" if project is None \
else ("project:{id:\"%s\"}," % project.uid)
query_str = """mutation CreateWebhookPyApi {
createWebhook(data:{%s topics:{set:[%s]}, url:"%s", secret:"%s" }){%s}
} """ % (project_str, " ".join(topics), url, secret,
query.results_query_part(Entity.Webhook))
return Webhook(client, client.execute(query_str)["createWebhook"])
created_by = Relationship.ToOne("User", False, "created_by")
organization = Relationship.ToOne("Organization")
project = Relationship.ToOne("Project")
def update(self, topics=None, url=None, status=None):
""" Updates this Webhook.
Args:
topics (list of str): The new topics value, optional.
url (str): The new URL value, optional.
status (str): The new status value, optional.
"""
# Webhook has a custom `update` function due to custom types
# in `status` and `topics` fields.
topics_str = "" if topics is None \
else "topics: {set: [%s]}" % " ".join(topics)
url_str = "" if url is None else "url: \"%s\"" % url
status_str = "" if status is None else "status: %s" % status
query_str = """mutation UpdateWebhookPyApi {
updateWebhook(where: {id: "%s"} data:{%s}){%s}} """ % (
self.uid, ", ".join(filter(None,
(topics_str, url_str, status_str))),
query.results_query_part(Entity.Webhook))
self._set_field_values(self.client.execute(query_str)["updateWebhook"])
|
try:
x = int(input("Please enter a number: "))
print("Number: {}".format(x))
except ValueError:
print("Oops! That was no valid number. Try again...")
|
import numpy as np
import random
import matplotlib.pyplot as plt
import torch
import train_2D_rt_v2 as tr2
def plot_PMF(p_list,y_list,npdf,model,get_ypred_at_RT,kld=True):
'''Plots predicted and true PMF for given parameter, ykerlist and ylist (one p, yker, y in each list)'''
position = 0
y_pred = tr2.get_predicted_PMF(p_list=p_list,
npdf=npdf,position=position,model=model,get_ypred_at_RT = get_ypred_at_RT)
kld = tr2.get_metrics(y_pred,y_list,metric = 'kld')
print('KLD: ',kld.item())
y = y_list.detach().numpy()
Y = y_pred.detach().numpy().reshape(y.shape)
fig1,ax1=plt.subplots(nrows=1,ncols=3,figsize=(12,4))
cm='viridis'
ax1[0].imshow(np.log10(y).T,cmap=cm,aspect='auto')
ax1[0].invert_yaxis()
ax1[0].set_title('True log-PMF & basis locations')
ax1[1].imshow(np.log10(Y).T,cmap=cm,aspect='auto')
ax1[1].invert_yaxis()
ax1[1].set_title('Reconstructed log-PMF')
ax1[2].imshow(np.log10(np.abs(Y-y)).T,cmap=cm,aspect='auto')
ax1[2].invert_yaxis()
ax1[2].set_title('Log-absolute difference between PMFs')
def plot_training(e_,t_,metric='kld'):
'''Plots training data'''
plt.figure(figsize=(9,6))
plt.plot(range(len(e_)),e_,c='blue',label='Training Data')
plt.plot(range(len(t_)),t_,c='red',label='Testing Data')
plt.suptitle(f'Min KLD: {np.min(e_)}')
plt.title(f'metric = {metric}')
plt.xlabel('Epoch')
plt.ylabel(f'{metric}')
plt.legend()
def plot_CDF(array,metric='kld',xlim=None):
'''Plots CDF'''
cdf = np.zeros(len(array))
array_sorted = np.sort(array)
for i,value in enumerate(array_sorted):
cdf[i] = len(array_sorted[array_sorted<value])/len(array_sorted)
plt.scatter(array_sorted,cdf,s=5)
plt.xlabel(f'{metric}')
plt.ylabel('CDF')
if xlim:
xlow,xhigh=xlim
plt.xlim(xlow,xhigh)
plt.show()
def plot_histogram(array,bins,xlim=None):
'''Histogram of bin number of bins, xlim'''
plt.hist(array,bins = bins)
if xlim:
xlow,xhigh = xlim
plt.xlim(xlow,xhigh)
plt.title(f'Max KLD: {np.max(array):.4f}, Min KLD: {np.min(array):.4f}')
plt.xlabel('KL Divergence')
plt.ylabel('Frequency')
def get_parameters_quantile(train_list,klds,quantiles = [.95,1.0]):
'''Returns given percent parameters with the highest klds and klds.'''
parameters,y_list = tr2.load_training_data(train_list)
kld_low = np.quantile(klds,quantiles[0])
kld_high = np.quantile(klds,quantiles[1])
klds_segment = klds[klds>kld_low]
params_segment = parameters[klds>kld_low]
klds_segment_2 = klds_segment[klds_segment<kld_high]
params_segment_2 = params_segment[klds_segment<kld_high]
return(params_segment_2,klds_segment_2)
def plot_param_quantiles(klds,train_list):
params_segment_1,klds_segment_1 = get_parameters_quantile(train_list,klds,quantiles=[0,.25])
params_segment_2,klds_segment_2 = get_parameters_quantile(train_list,klds,quantiles=[.25,.5])
params_segment_3,klds_segment_3 = get_parameters_quantile(train_list,klds,quantiles=[.5,.75])
params_segment_4,klds_segment_4 = get_parameters_quantile(train_list,klds,quantiles=[.75,.95])
params_segment_5,klds_segment_5 = get_parameters_quantile(train_list,klds,quantiles=[.95,1.])
b_1 = 10**np.array([ p[0] for p in params_segment_1 ])
beta_1 = 10**np.array([ p[1] for p in params_segment_1 ])
gamma_1 = 10**np.array([ p[2] for p in params_segment_1 ])
b_2 = 10**np.array([ p[0] for p in params_segment_2 ])
beta_2 = 10**np.array([ p[1] for p in params_segment_2 ])
gamma_2 = 10**np.array([ p[2] for p in params_segment_2 ])
b_3 = 10**np.array([ p[0] for p in params_segment_3 ])
beta_3 = 10**np.array([ p[1] for p in params_segment_3 ])
gamma_3 = 10**np.array([ p[2] for p in params_segment_3 ])
b_4 = 10**np.array([ p[0] for p in params_segment_4 ])
beta_4 = 10**np.array([ p[1] for p in params_segment_4 ])
gamma_4 = 10**np.array([ p[2] for p in params_segment_4 ])
b_5 = 10**np.array([ p[0] for p in params_segment_5 ])
beta_5 = 10**np.array([ p[1] for p in params_segment_5 ])
gamma_5 = 10**np.array([ p[2] for p in params_segment_5 ])
fig,ax = plt.subplots(nrows=1,ncols=3,figsize=(12,5))
# some labels
ax[0].scatter(10,10,c='grey',label = 'Quantile 0-0.25')
ax[0].scatter(10,10,c='blue',label = 'Quantile 0.25-0.50')
ax[0].scatter(10,10,c='purple',label = 'Quantile 0.50-0.75')
ax[0].scatter(10,10,c='green',label = 'Quantile 0.75-0.95')
ax[0].scatter(10,10,c='red',label = 'Quantile 0.95-1.0')
ax[0].scatter(b_1,beta_1,c = klds_segment_1, cmap= 'Greys')
ax[0].scatter(b_2,beta_2,c = klds_segment_2, cmap= 'Blues')
ax[0].scatter(b_3,beta_3, c = klds_segment_3, cmap= 'Purples')
ax[0].scatter(b_4,beta_4,c = klds_segment_4, cmap= 'Greens')
ax[0].scatter(b_5,beta_5,c = klds_segment_5, cmap= 'Reds')
ax[0].set_xlabel('b')
ax[0].set_ylabel('beta')
ax[0].set_xscale('log')
ax[0].set_yscale('log')
ax[1].scatter(b_1,gamma_1,c = klds_segment_1, cmap= 'Greys')
ax[1].scatter(b_2,gamma_2, c = klds_segment_2, cmap= 'Blues')
ax[1].scatter(b_3,gamma_3,c = klds_segment_3, cmap= 'Purples')
ax[1].scatter(b_4,gamma_4,c = klds_segment_4, cmap= 'Greens')
ax[1].scatter(b_5,gamma_5,c = klds_segment_5, cmap= 'Reds')
ax[1].set_xlabel('b')
ax[1].set_ylabel('gamma')
ax[1].set_xscale('log')
ax[1].set_yscale('log')
ax[2].scatter(beta_1,gamma_1,c = klds_segment_1, cmap= 'Greys')
ax[2].scatter(beta_2,gamma_2, c = klds_segment_2, cmap= 'Blues')
ax[2].scatter(beta_3,gamma_3,c = klds_segment_3, cmap= 'Purples')
ax[2].scatter(beta_4,gamma_4,c = klds_segment_4, cmap= 'Greens')
ax[2].scatter(beta_5,gamma_5,c = klds_segment_5, cmap= 'Reds')
ax[2].set_xlabel('beta')
ax[2].set_ylabel('gama')
ax[2].set_xscale('log')
ax[2].set_yscale('log')
ax[0].legend()
fig.tight_layout()
plt.title('MLP 1 Parameters Colored by KLD Quantile')
def plot_PMF_grid(file_list,npdf,nrows,ncols,model,get_ypred_at_RT,kld=True):
p_list,y_list = tr2.load_training_data(file_list)
rand = np.zeros(nrows*ncols)
for i in range(nrows*ncols):
rand[i] = random.randint(0,len(y_list))
y = []
Y = []
for r in rand:
r = int(r)
y_pred = tr2.get_predicted_PMF(p_list=p_list,
npdf=npdf,position=r,model=model,get_ypred_at_RT = get_ypred_at_RT)
y.append(y_list[r])
Y.append(y_pred)
Y = [Y_.detach().numpy() for Y_ in Y]
y = [y_.detach().numpy() for y_ in y]
Y = [Y_.reshape(y[i].shape) for i,Y_ in enumerate(Y)]
fig, ax1 = plt.subplots(nrows=nrows, ncols=2*ncols, figsize=(15, 15))
k = 0
j_num = np.arange(0,ncols*2,2)
for i in range(nrows):
for j in j_num:
y_ = y[k]
Y_ = Y[k]
cm='viridis'
ax1[i,j].imshow(np.log10(y_).T,cmap=cm,aspect='auto')
ax1[i,j].invert_yaxis()
ax1[i,j].set_title('True log-PMF & basis locations')
ax1[i,j+1].imshow(np.log10(Y_).T,cmap=cm,aspect='auto')
ax1[i,j+1].invert_yaxis()
ax1[i,j+1].set_title('Reconstructed log-PMF')
if kld == True:
kld_ = -np.sum(y_.flatten()*np.log(Y_.flatten()/y_.flatten()))
ax1[i,j].title.set_text(f'KLD: {kld_}')
k = k + 1
fig.tight_layout()
|
##############################################################################
#
# Copyright (c) 2002 Zope Foundation and Contributors.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE
#
##############################################################################
import collections
import re
import sys
import pkg_resources
_version_string = None
_zope_version = None
ZopeVersion = collections.namedtuple(
"ZopeVersion",
["major", "minor", "micro", "status", "release"])
def _prep_version_data():
global _version_string, _zope_version
if _version_string is None:
v = sys.version_info
pyver = "python %d.%d.%d, %s" % (v[0], v[1], v[2], sys.platform)
dist = pkg_resources.get_distribution('Zope')
_version_string = "%s, %s" % (dist.version, pyver)
expr = re.compile(
r'(?P<major>[0-9]+)\.(?P<minor>[0-9]+)(\.(?P<micro>[0-9]+))?'
'(?P<status>[A-Za-z]+)?(?P<release>[0-9]+)?')
version_dict = expr.match(dist.version).groupdict()
_zope_version = ZopeVersion(
int(version_dict.get('major') or -1),
int(version_dict.get('minor') or -1),
int(version_dict.get('micro') or -1),
version_dict.get('status') or '',
int(version_dict.get('release') or -1))
def version_txt():
_prep_version_data()
return '(%s)' % _version_string
def getZopeVersion():
"""return information about the Zope version as a named tuple.
Format of zope_version tuple:
(major <int>, minor <int>, micro <int>, status <string>, release <int>)
If unreleased, integers may be -1.
"""
_prep_version_data()
return _zope_version
|
import tornado.web
import tornado.util
from monstro.conf import settings
from monstro.urls import urls
application = tornado.web.Application(
urls(settings.urls),
cookie_secret=settings.secret_key,
debug=settings.debug,
**getattr(settings, 'tornado_application_settings', {})
)
|
# Copyright 2016-present CERN – European Organization for Nuclear Research
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from qf_lib.backtesting.order.execution_style import ExecutionStyle
from qf_lib.backtesting.order.time_in_force import TimeInForce
from qf_lib.common.tickers.tickers import Ticker
class Order:
"""
Order generated by a strategy, then processed by PositionSizer.
Finally executed by ExecutionHandler.
"""
def __init__(self, ticker: Ticker, quantity: float, execution_style: ExecutionStyle,
time_in_force: TimeInForce, order_state: str = "", strategy: str = ""):
"""
This __init__ shouldn't be used anywhere beyond this module. Use OrderFactory for creating Order objects.
"""
self.id = None # type:int
self.ticker = ticker
self.quantity = quantity
self.time_in_force = time_in_force
self.execution_style = execution_style
self.order_state = order_state
self.strategy = strategy
def __str__(self):
return f"\nOrder:\n" \
f"\tid: {self.id}\n" \
f"\tticker: {self.ticker}\n" \
f"\tquantity: {self.quantity}\n" \
f"\ttif: {self.time_in_force}\n" \
f"\texecution_style: {self.execution_style}\n" \
f"\torder_state: {self.order_state}\n" \
f"\tstrategy: {self.strategy}"
def __eq__(self, other):
if self is other:
return True
if not isinstance(other, Order):
return False
if self.id is not None and other.id == self.id:
return True
# when both ids are none -> compare the values
return (self.ticker, self.quantity, self.time_in_force, self.execution_style) == \
(other.ticker, other.quantity, other.time_in_force, other.execution_style)
def __hash__(self):
return hash((self.ticker, self.quantity, self.time_in_force, self.execution_style))
|
import torch
import numpy as np
import torch.distributions as dists
import torch.nn.functional as F
from scipy.optimize import linear_sum_assignment
def compute_accuracy(training_step_outputs, discrete_vae, mode="stochastic"):
num_clusters = discrete_vae.latent_dim
num_classes = training_step_outputs[-1]["y"].shape[1]
# assignment problem -> build matrix
assignment_matrix = np.zeros([num_clusters, num_classes])
all_labels = []
all_preds = []
for entry in training_step_outputs:
x, y = entry["x"], entry["y"]
# all one hot vectors repeated current batch_size times
batch_size = x.shape[0]
# all pos. classes (one-hot) [batch_size, num_classes, num_classes]
all_classes = np.eye(num_classes).reshape(1, num_classes, num_classes).repeat(batch_size, 0)
# reshape y into [batch_size, 1, num_classes]
y = y.unsqueeze(1).detach().cpu().numpy()
# find target indices by finding vectors that agree with all_classes
target_indices = np.all(y == all_classes, axis=2).argmax(1)
# predict cluster either stochastic (sampling) or deterministic
probs_logits = discrete_vae.encode(x)
if mode == "stochastic": # predictions are sampled one-hot vectors
# sample class indices [batch]
sampled_indices = dists.Categorical(logits=probs_logits).sample()
# cast to one-hot vectors [batch, num_clusters]
pred = F.one_hot(sampled_indices, num_classes=num_clusters).type_as(probs_logits)
elif mode == "deterministic": # predictions are the argmax of each probability dist
pred = torch.zeros_like(probs_logits)
pred[torch.arange(pred.shape[0]), torch.argmax(probs_logits, dim=1)] = 1
# predicted clusters [batch_size, 1, num_clusters]
pred = pred.unsqueeze(1).cpu().detach().numpy()
# all possible clusters
all_clusters = (
np.eye(num_clusters).reshape(1, num_clusters, num_clusters).repeat(batch_size, 0)
)
# find source indices by finding vectors that agree with all_clusters
source_indices = np.all(pred == all_clusters, axis=2).argmax(1)
# inplace update of assignment_matrix
np.add.at(assignment_matrix, (source_indices, target_indices), 1)
# collect labels and predictions
all_labels.append(y)
all_preds.append(pred)
if num_classes == num_clusters: # one-to-one
# find optimal assignment using hungarian method
row_ind, col_ind = linear_sum_assignment(-assignment_matrix)
empty = 0
elif num_classes < num_clusters: # many-to-one
# number of clusters that are not assigned
empty = (np.sum(assignment_matrix, axis=1) == 0).sum()
# greedy approach
col_ind = np.argmax(assignment_matrix, axis=1)
if not np.all(np.in1d(list(range(num_classes)), col_ind)) and mode == "deterministic":
print(f"Assertion: Greedy solution does not work for {num_clusters}")
# assert np.all(np.in1d(list(range(num_classes)), col_ind)), "greedy solution does not work"
row_ind = np.arange(num_clusters)
# set matching values zero (in place)
assignment_matrix[row_ind, col_ind] = 0
# the rest of assignment matrix would be "misclassified"
accuracy = 1 - np.sum(assignment_matrix) / sum([labels.shape[0] for labels in all_labels])
return accuracy, empty
|
from gna.ui import basecmd
import numpy as np
import h5py
from gna.pointtree import PointTree
class cmd(basecmd):
@classmethod
def initparser(cls, parser, env):
super(cmd, cls).initparser(parser, env)
parser.add_argument('--fcscan', required=True)
parser.add_argument('--output', type=str, required=True)
parser.add_argument('--initial', type=int, default=0)
parser.add_argument('--contexts', type=int, nargs='+', default=[1])
parser.add_argument('--points', type=str, required=False)
def run(self):
fcscan = PointTree(self.opts.fcscan)
fcmap = PointTree(self.opts.output, "w")
fcmap.params = fcscan.params
if self.opts.points:
points = PointTree(self.opts.points)
else:
points = None
ifield = self.opts.initial
for path, values, ds in fcscan.iterall():
if points and path not in points:
continue
chi2s = ds["chi2s"]
print("{:20}: {} entries".format(path, len(chi2s)))
for ctx in self.opts.contexts:
mfield = ctx
dchi2s = chi2s[:, ifield] - chi2s[:, mfield]
vmap = np.sort(dchi2s)
grp = fcmap.touch(path + "/dchi2s")
grp.create_dataset(str(ctx), data=vmap)
grp.attrs["entries"] = len(chi2s)
return True
|
import pika
import time
import os
from rabbit import connection, wait_for, SERVER
ME = os.environ['HOSTNAME']
MODULO = int("0x{}".format(ME[0:3]), 16) % 2
print(" MODULO: {}".format(MODULO))
def callback(ch, method, properties, body):
print("<= Receiver {}".format(body))
# time.sleep(int(body))
# print("== done")
# ch.basic_ack(delivery_tag=method.delivery_tag)
if not wait_for('rabbitmq'):
print("Rabbit MQ server '{}' not up!".format(SERVER))
exit(1)
connection = connection(SERVER, 'guest', 'guest')
channel = connection.channel()
channel.exchange_declare(exchange='numbers',
exchange_type='direct')
queue = channel.queue_declare(exclusive=True)
queue_name = queue.method.queue
channel.queue_bind(exchange='numbers',
queue=queue_name,
routing_key=str(MODULO))
channel.basic_consume(callback,
queue=queue_name,
no_ack=True)
print("=== Waiting for messages")
channel.start_consuming()
|
from django.contrib import admin
from . import models
class AccountAdmin(admin.ModelAdmin):
list_display = ('title', 'type', 'active')
list_filter = ('type', 'active')
search_fields = ('title',)
class WithdrawalSlipAdmin(admin.ModelAdmin):
list_display = ('title', 'type', 'account', 'amount', 'completed', 'status')
list_filter = ('type', 'status')
search_fields = ('title', 'remarks',)
fields = ('type', 'status', 'account', 'amount', 'completed', 'title', 'remarks')
date_hierarchy = 'completed'
ordering = ['-completed', ]
class UnidentifiedDepositSlip(admin.ModelAdmin):
list_display = ('title', 'type', 'account', 'amount', 'completed', 'status')
list_filter = ('type', 'status')
search_fields = ('title', 'remarks',)
fields = ('type', 'status', 'account', 'amount', 'completed', 'title', 'remarks')
date_hierarchy = 'completed'
ordering = ['-completed', ]
admin.site.register(models.Account, AccountAdmin)
admin.site.register(models.WithdrawalSlip, WithdrawalSlipAdmin)
admin.site.register(models.UnidentifiedDepositSlip, UnidentifiedDepositSlip)
|
# coding: utf-8
from __future__ import unicode_literals
import pytest
@pytest.mark.parametrize("text", ["(under)"])
def test_da_tokenizer_splits_no_special(da_tokenizer, text):
tokens = da_tokenizer(text)
assert len(tokens) == 3
@pytest.mark.parametrize("text", ["ta'r", "Søren's", "Lars'"])
def test_da_tokenizer_handles_no_punct(da_tokenizer, text):
tokens = da_tokenizer(text)
assert len(tokens) == 1
@pytest.mark.parametrize("text", ["(ta'r"])
def test_da_tokenizer_splits_prefix_punct(da_tokenizer, text):
tokens = da_tokenizer(text)
assert len(tokens) == 2
assert tokens[0].text == "("
assert tokens[1].text == "ta'r"
@pytest.mark.parametrize("text", ["ta'r)"])
def test_da_tokenizer_splits_suffix_punct(da_tokenizer, text):
tokens = da_tokenizer(text)
assert len(tokens) == 2
assert tokens[0].text == "ta'r"
assert tokens[1].text == ")"
@pytest.mark.parametrize(
"text,expected", [("(ta'r)", ["(", "ta'r", ")"]), ("'ta'r'", ["'", "ta'r", "'"])]
)
def test_da_tokenizer_splits_even_wrap(da_tokenizer, text, expected):
tokens = da_tokenizer(text)
assert len(tokens) == len(expected)
assert [t.text for t in tokens] == expected
@pytest.mark.parametrize("text", ["(ta'r?)"])
def test_da_tokenizer_splits_uneven_wrap(da_tokenizer, text):
tokens = da_tokenizer(text)
assert len(tokens) == 4
assert tokens[0].text == "("
assert tokens[1].text == "ta'r"
assert tokens[2].text == "?"
assert tokens[3].text == ")"
@pytest.mark.parametrize(
"text,expected",
[("f.eks.", ["f.eks."]), ("fe.", ["fe", "."]), ("(f.eks.", ["(", "f.eks."])],
)
def test_da_tokenizer_splits_prefix_interact(da_tokenizer, text, expected):
tokens = da_tokenizer(text)
assert len(tokens) == len(expected)
assert [t.text for t in tokens] == expected
@pytest.mark.parametrize("text", ["f.eks.)"])
def test_da_tokenizer_splits_suffix_interact(da_tokenizer, text):
tokens = da_tokenizer(text)
assert len(tokens) == 2
assert tokens[0].text == "f.eks."
assert tokens[1].text == ")"
@pytest.mark.parametrize("text", ["(f.eks.)"])
def test_da_tokenizer_splits_even_wrap_interact(da_tokenizer, text):
tokens = da_tokenizer(text)
assert len(tokens) == 3
assert tokens[0].text == "("
assert tokens[1].text == "f.eks."
assert tokens[2].text == ")"
@pytest.mark.parametrize("text", ["(f.eks.?)"])
def test_da_tokenizer_splits_uneven_wrap_interact(da_tokenizer, text):
tokens = da_tokenizer(text)
assert len(tokens) == 4
assert tokens[0].text == "("
assert tokens[1].text == "f.eks."
assert tokens[2].text == "?"
assert tokens[3].text == ")"
@pytest.mark.parametrize("text", ["0,1-13,5", "0,0-0,1", "103,27-300", "1/2-3/4"])
def test_da_tokenizer_handles_numeric_range(da_tokenizer, text):
tokens = da_tokenizer(text)
assert len(tokens) == 1
@pytest.mark.parametrize("text", ["sort.Gul", "Hej.Verden"])
def test_da_tokenizer_splits_period_infix(da_tokenizer, text):
tokens = da_tokenizer(text)
assert len(tokens) == 3
@pytest.mark.parametrize("text", ["Hej,Verden", "en,to"])
def test_da_tokenizer_splits_comma_infix(da_tokenizer, text):
tokens = da_tokenizer(text)
assert len(tokens) == 3
assert tokens[0].text == text.split(",")[0]
assert tokens[1].text == ","
assert tokens[2].text == text.split(",")[1]
@pytest.mark.parametrize("text", ["sort...Gul", "sort...gul"])
def test_da_tokenizer_splits_ellipsis_infix(da_tokenizer, text):
tokens = da_tokenizer(text)
assert len(tokens) == 3
@pytest.mark.parametrize(
"text",
["gå-på-mod", "4-hjulstræk", "100-Pfennig-frimærke", "TV-2-spots", "trofæ-vaeggen"],
)
def test_da_tokenizer_keeps_hyphens(da_tokenizer, text):
tokens = da_tokenizer(text)
assert len(tokens) == 1
def test_da_tokenizer_splits_double_hyphen_infix(da_tokenizer):
tokens = da_tokenizer(
"Mange regler--eksempelvis bindestregs-reglerne--er komplicerede."
)
assert len(tokens) == 9
assert tokens[0].text == "Mange"
assert tokens[1].text == "regler"
assert tokens[2].text == "--"
assert tokens[3].text == "eksempelvis"
assert tokens[4].text == "bindestregs-reglerne"
assert tokens[5].text == "--"
assert tokens[6].text == "er"
assert tokens[7].text == "komplicerede"
def test_da_tokenizer_handles_posessives_and_contractions(da_tokenizer):
tokens = da_tokenizer(
"'DBA's, Lars' og Liz' bil sku' sgu' ik' ha' en bule, det ka' han ik' li' mere', sagde hun."
)
assert len(tokens) == 25
assert tokens[0].text == "'"
assert tokens[1].text == "DBA's"
assert tokens[2].text == ","
assert tokens[3].text == "Lars'"
assert tokens[4].text == "og"
assert tokens[5].text == "Liz'"
assert tokens[6].text == "bil"
assert tokens[7].text == "sku'"
assert tokens[8].text == "sgu'"
assert tokens[9].text == "ik'"
assert tokens[10].text == "ha'"
assert tokens[11].text == "en"
assert tokens[12].text == "bule"
assert tokens[13].text == ","
assert tokens[14].text == "det"
assert tokens[15].text == "ka'"
assert tokens[16].text == "han"
assert tokens[17].text == "ik'"
assert tokens[18].text == "li'"
assert tokens[19].text == "mere"
assert tokens[20].text == "'"
assert tokens[21].text == ","
assert tokens[22].text == "sagde"
assert tokens[23].text == "hun"
assert tokens[24].text == "."
|
from myapp.server import _run_server
@app.task(bind=True)
def run_task_redis(self):
_run_server()
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Instance Metadata information."""
import base64
import os
import posixpath
from oslo_config import cfg
from oslo_log import log as logging
from oslo_serialization import jsonutils
from oslo_utils import importutils
from oslo_utils import timeutils
import six
from nova.api.ec2 import ec2utils
from nova.api.metadata import password
from nova import availability_zones as az
from nova import block_device
from nova.cells import opts as cells_opts
from nova.cells import rpcapi as cells_rpcapi
import nova.conf
from nova import context
from nova import network
from nova.network.security_group import openstack_driver
from nova import objects
from nova.objects import keypair as keypair_obj
from nova import utils
from nova.virt import netutils
metadata_opts = [
cfg.StrOpt('config_drive_skip_versions',
default=('1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 '
'2007-12-15 2008-02-01 2008-09-01'),
help='List of metadata versions to skip placing into the '
'config drive'),
cfg.StrOpt('vendordata_driver',
default='nova.api.metadata.vendordata_json.JsonFileVendorData',
help='DEPRECATED: Driver to use for vendor data',
deprecated_for_removal=True),
]
CONF = nova.conf.CONF
CONF.register_opts(metadata_opts)
VERSIONS = [
'1.0',
'2007-01-19',
'2007-03-01',
'2007-08-29',
'2007-10-10',
'2007-12-15',
'2008-02-01',
'2008-09-01',
'2009-04-04',
]
FOLSOM = '2012-08-10'
GRIZZLY = '2013-04-04'
HAVANA = '2013-10-17'
LIBERTY = '2015-10-15'
OPENSTACK_VERSIONS = [
FOLSOM,
GRIZZLY,
HAVANA,
LIBERTY,
]
VERSION = "version"
CONTENT = "content"
CONTENT_DIR = "content"
MD_JSON_NAME = "meta_data.json"
VD_JSON_NAME = "vendor_data.json"
NW_JSON_NAME = "network_data.json"
UD_NAME = "user_data"
PASS_NAME = "password"
MIME_TYPE_TEXT_PLAIN = "text/plain"
MIME_TYPE_APPLICATION_JSON = "application/json"
LOG = logging.getLogger(__name__)
class InvalidMetadataVersion(Exception):
pass
class InvalidMetadataPath(Exception):
pass
class InstanceMetadata(object):
"""Instance metadata."""
def __init__(self, instance, address=None, content=None, extra_md=None,
network_info=None, vd_driver=None, network_metadata=None):
"""Creation of this object should basically cover all time consuming
collection. Methods after that should not cause time delays due to
network operations or lengthy cpu operations.
The user should then get a single instance and make multiple method
calls on it.
"""
if not content:
content = []
ctxt = context.get_admin_context()
# The default value of mimeType is set to MIME_TYPE_TEXT_PLAIN
self.set_mimetype(MIME_TYPE_TEXT_PLAIN)
self.instance = instance
self.extra_md = extra_md
self.availability_zone = az.get_instance_availability_zone(ctxt,
instance)
secgroup_api = openstack_driver.get_openstack_security_group_driver()
self.security_groups = secgroup_api.get_instance_security_groups(
ctxt, instance)
self.mappings = _format_instance_mapping(ctxt, instance)
if instance.user_data is not None:
self.userdata_raw = base64.b64decode(instance.user_data)
else:
self.userdata_raw = None
self.address = address
# expose instance metadata.
self.launch_metadata = utils.instance_meta(instance)
self.password = password.extract_password(instance)
self.uuid = instance.uuid
self.content = {}
self.files = []
# get network info, and the rendered network template
if network_info is None:
network_info = instance.info_cache.network_info
# expose network metadata
if network_metadata is None:
self.network_metadata = netutils.get_network_metadata(network_info)
else:
self.network_metadata = network_metadata
self.ip_info = \
ec2utils.get_ip_info_for_instance_from_nw_info(network_info)
self.network_config = None
cfg = netutils.get_injected_network_template(network_info)
if cfg:
key = "%04i" % len(self.content)
self.content[key] = cfg
self.network_config = {"name": "network_config",
'content_path': "/%s/%s" % (CONTENT_DIR, key)}
# 'content' is passed in from the configdrive code in
# nova/virt/libvirt/driver.py. That's how we get the injected files
# (personalities) in. AFAIK they're not stored in the db at all,
# so are not available later (web service metadata time).
for (path, contents) in content:
key = "%04i" % len(self.content)
self.files.append({'path': path,
'content_path': "/%s/%s" % (CONTENT_DIR, key)})
self.content[key] = contents
if vd_driver is None:
vdclass = importutils.import_class(CONF.vendordata_driver)
else:
vdclass = vd_driver
self.vddriver = vdclass(instance=instance, address=address,
extra_md=extra_md, network_info=network_info)
self.route_configuration = None
def _route_configuration(self):
if self.route_configuration:
return self.route_configuration
path_handlers = {UD_NAME: self._user_data,
PASS_NAME: self._password,
VD_JSON_NAME: self._vendor_data,
MD_JSON_NAME: self._metadata_as_json,
NW_JSON_NAME: self._network_data,
VERSION: self._handle_version,
CONTENT: self._handle_content}
self.route_configuration = RouteConfiguration(path_handlers)
return self.route_configuration
def set_mimetype(self, mime_type):
self.md_mimetype = mime_type
def get_mimetype(self):
return self.md_mimetype
def get_ec2_metadata(self, version):
if version == "latest":
version = VERSIONS[-1]
if version not in VERSIONS:
raise InvalidMetadataVersion(version)
hostname = self._get_hostname()
floating_ips = self.ip_info['floating_ips']
floating_ip = floating_ips and floating_ips[0] or ''
fixed_ips = self.ip_info['fixed_ips']
fixed_ip = fixed_ips and fixed_ips[0] or ''
fmt_sgroups = [x['name'] for x in self.security_groups]
meta_data = {
'ami-id': self.instance.ec2_ids.ami_id,
'ami-launch-index': self.instance.launch_index,
'ami-manifest-path': 'FIXME',
'instance-id': self.instance.ec2_ids.instance_id,
'hostname': hostname,
'local-ipv4': fixed_ip or self.address,
'reservation-id': self.instance.reservation_id,
'security-groups': fmt_sgroups}
# public keys are strangely rendered in ec2 metadata service
# meta-data/public-keys/ returns '0=keyname' (with no trailing /)
# and only if there is a public key given.
# '0=keyname' means there is a normally rendered dict at
# meta-data/public-keys/0
#
# meta-data/public-keys/ : '0=%s' % keyname
# meta-data/public-keys/0/ : 'openssh-key'
# meta-data/public-keys/0/openssh-key : '%s' % publickey
if self.instance.key_name:
meta_data['public-keys'] = {
'0': {'_name': "0=" + self.instance.key_name,
'openssh-key': self.instance.key_data}}
if self._check_version('2007-01-19', version):
meta_data['local-hostname'] = hostname
meta_data['public-hostname'] = hostname
meta_data['public-ipv4'] = floating_ip
if False and self._check_version('2007-03-01', version):
# TODO(vish): store product codes
meta_data['product-codes'] = []
if self._check_version('2007-08-29', version):
instance_type = self.instance.get_flavor()
meta_data['instance-type'] = instance_type['name']
if False and self._check_version('2007-10-10', version):
# TODO(vish): store ancestor ids
meta_data['ancestor-ami-ids'] = []
if self._check_version('2007-12-15', version):
meta_data['block-device-mapping'] = self.mappings
if self.instance.ec2_ids.kernel_id:
meta_data['kernel-id'] = self.instance.ec2_ids.kernel_id
if self.instance.ec2_ids.ramdisk_id:
meta_data['ramdisk-id'] = self.instance.ec2_ids.ramdisk_id
if self._check_version('2008-02-01', version):
meta_data['placement'] = {'availability-zone':
self.availability_zone}
if self._check_version('2008-09-01', version):
meta_data['instance-action'] = 'none'
data = {'meta-data': meta_data}
if self.userdata_raw is not None:
data['user-data'] = self.userdata_raw
return data
def get_ec2_item(self, path_tokens):
# get_ec2_metadata returns dict without top level version
data = self.get_ec2_metadata(path_tokens[0])
return find_path_in_tree(data, path_tokens[1:])
def get_openstack_item(self, path_tokens):
if path_tokens[0] == CONTENT_DIR:
return self._handle_content(path_tokens)
return self._route_configuration().handle_path(path_tokens)
def _metadata_as_json(self, version, path):
metadata = {'uuid': self.uuid}
if self.launch_metadata:
metadata['meta'] = self.launch_metadata
if self.files:
metadata['files'] = self.files
if self.extra_md:
metadata.update(self.extra_md)
if self.network_config:
metadata['network_config'] = self.network_config
if self.instance.key_name:
metadata['public_keys'] = {
self.instance.key_name: self.instance.key_data
}
if cells_opts.get_cell_type() == 'compute':
cells_api = cells_rpcapi.CellsAPI()
keypair = cells_api.get_keypair_at_top(
context.get_admin_context(), self.instance.user_id,
self.instance.key_name)
else:
keypair = keypair_obj.KeyPair.get_by_name(
context.get_admin_context(), self.instance.user_id,
self.instance.key_name)
metadata['keys'] = [
{'name': keypair.name,
'type': keypair.type,
'data': keypair.public_key}
]
metadata['hostname'] = self._get_hostname()
metadata['name'] = self.instance.display_name
metadata['launch_index'] = self.instance.launch_index
metadata['availability_zone'] = self.availability_zone
if self._check_os_version(GRIZZLY, version):
metadata['random_seed'] = base64.b64encode(os.urandom(512))
if self._check_os_version(LIBERTY, version):
metadata['project_id'] = self.instance.project_id
self.set_mimetype(MIME_TYPE_APPLICATION_JSON)
return jsonutils.dump_as_bytes(metadata)
def _handle_content(self, path_tokens):
if len(path_tokens) == 1:
raise KeyError("no listing for %s" % "/".join(path_tokens))
if len(path_tokens) != 2:
raise KeyError("Too many tokens for /%s" % CONTENT_DIR)
return self.content[path_tokens[1]]
def _handle_version(self, version, path):
# request for /version, give a list of what is available
ret = [MD_JSON_NAME]
if self.userdata_raw is not None:
ret.append(UD_NAME)
if self._check_os_version(GRIZZLY, version):
ret.append(PASS_NAME)
if self._check_os_version(HAVANA, version):
ret.append(VD_JSON_NAME)
if self._check_os_version(LIBERTY, version):
ret.append(NW_JSON_NAME)
return ret
def _user_data(self, version, path):
if self.userdata_raw is None:
raise KeyError(path)
return self.userdata_raw
def _network_data(self, version, path):
if self.network_metadata is None:
return jsonutils.dump_as_bytes({})
return jsonutils.dump_as_bytes(self.network_metadata)
def _password(self, version, path):
if self._check_os_version(GRIZZLY, version):
return password.handle_password
raise KeyError(path)
def _vendor_data(self, version, path):
if self._check_os_version(HAVANA, version):
self.set_mimetype(MIME_TYPE_APPLICATION_JSON)
return jsonutils.dump_as_bytes(self.vddriver.get())
raise KeyError(path)
def _check_version(self, required, requested, versions=VERSIONS):
return versions.index(requested) >= versions.index(required)
def _check_os_version(self, required, requested):
return self._check_version(required, requested, OPENSTACK_VERSIONS)
def _get_hostname(self):
return "%s%s%s" % (self.instance.hostname,
'.' if CONF.dhcp_domain else '',
CONF.dhcp_domain)
def lookup(self, path):
if path == "" or path[0] != "/":
path = posixpath.normpath("/" + path)
else:
path = posixpath.normpath(path)
# Set default mimeType. It will be modified only if there is a change
self.set_mimetype(MIME_TYPE_TEXT_PLAIN)
# fix up requests, prepending /ec2 to anything that does not match
path_tokens = path.split('/')[1:]
if path_tokens[0] not in ("ec2", "openstack"):
if path_tokens[0] == "":
# request for /
path_tokens = ["ec2"]
else:
path_tokens = ["ec2"] + path_tokens
path = "/" + "/".join(path_tokens)
# all values of 'path' input starts with '/' and have no trailing /
# specifically handle the top level request
if len(path_tokens) == 1:
if path_tokens[0] == "openstack":
# NOTE(vish): don't show versions that are in the future
today = timeutils.utcnow().strftime("%Y-%m-%d")
versions = [v for v in OPENSTACK_VERSIONS if v <= today]
if OPENSTACK_VERSIONS != versions:
LOG.debug("future versions %s hidden in version list",
[v for v in OPENSTACK_VERSIONS
if v not in versions])
versions += ["latest"]
else:
versions = VERSIONS + ["latest"]
return versions
try:
if path_tokens[0] == "openstack":
data = self.get_openstack_item(path_tokens[1:])
else:
data = self.get_ec2_item(path_tokens[1:])
except (InvalidMetadataVersion, KeyError):
raise InvalidMetadataPath(path)
return data
def metadata_for_config_drive(self):
"""Yields (path, value) tuples for metadata elements."""
# EC2 style metadata
for version in VERSIONS + ["latest"]:
if version in CONF.config_drive_skip_versions.split(' '):
continue
data = self.get_ec2_metadata(version)
if 'user-data' in data:
filepath = os.path.join('ec2', version, 'user-data')
yield (filepath, data['user-data'])
del data['user-data']
try:
del data['public-keys']['0']['_name']
except KeyError:
pass
filepath = os.path.join('ec2', version, 'meta-data.json')
yield (filepath, jsonutils.dump_as_bytes(data['meta-data']))
ALL_OPENSTACK_VERSIONS = OPENSTACK_VERSIONS + ["latest"]
for version in ALL_OPENSTACK_VERSIONS:
path = 'openstack/%s/%s' % (version, MD_JSON_NAME)
yield (path, self.lookup(path))
path = 'openstack/%s/%s' % (version, UD_NAME)
if self.userdata_raw is not None:
yield (path, self.lookup(path))
if self._check_version(HAVANA, version, ALL_OPENSTACK_VERSIONS):
path = 'openstack/%s/%s' % (version, VD_JSON_NAME)
yield (path, self.lookup(path))
if self._check_version(LIBERTY, version, ALL_OPENSTACK_VERSIONS):
path = 'openstack/%s/%s' % (version, NW_JSON_NAME)
yield (path, self.lookup(path))
for (cid, content) in six.iteritems(self.content):
yield ('%s/%s/%s' % ("openstack", CONTENT_DIR, cid), content)
class RouteConfiguration(object):
"""Routes metadata paths to request handlers."""
def __init__(self, path_handler):
self.path_handlers = path_handler
def _version(self, version):
if version == "latest":
version = OPENSTACK_VERSIONS[-1]
if version not in OPENSTACK_VERSIONS:
raise InvalidMetadataVersion(version)
return version
def handle_path(self, path_tokens):
version = self._version(path_tokens[0])
if len(path_tokens) == 1:
path = VERSION
else:
path = '/'.join(path_tokens[1:])
path_handler = self.path_handlers[path]
if path_handler is None:
raise KeyError(path)
return path_handler(version, path)
class VendorDataDriver(object):
"""The base VendorData Drivers should inherit from."""
def __init__(self, *args, **kwargs):
"""Init method should do all expensive operations."""
self._data = {}
def get(self):
"""Return a dictionary of primitives to be rendered in metadata
:return: A dictionary or primitives.
"""
return self._data
def get_metadata_by_address(address):
ctxt = context.get_admin_context()
fixed_ip = network.API().get_fixed_ip_by_address(ctxt, address)
return get_metadata_by_instance_id(fixed_ip['instance_uuid'],
address,
ctxt)
def get_metadata_by_instance_id(instance_id, address, ctxt=None):
ctxt = ctxt or context.get_admin_context()
instance = objects.Instance.get_by_uuid(
ctxt, instance_id, expected_attrs=['ec2_ids', 'flavor', 'info_cache',
'metadata', 'system_metadata',
'security_groups'])
return InstanceMetadata(instance, address)
def _format_instance_mapping(ctxt, instance):
bdms = objects.BlockDeviceMappingList.get_by_instance_uuid(
ctxt, instance.uuid)
return block_device.instance_block_mapping(instance, bdms)
def ec2_md_print(data):
if isinstance(data, dict):
output = ''
for key in sorted(data.keys()):
if key == '_name':
continue
if isinstance(data[key], dict):
if '_name' in data[key]:
output += str(data[key]['_name'])
else:
output += key + '/'
else:
output += key
output += '\n'
return output[:-1]
elif isinstance(data, list):
return '\n'.join(data)
else:
return str(data)
def find_path_in_tree(data, path_tokens):
# given a dict/list tree, and a path in that tree, return data found there.
for i in range(0, len(path_tokens)):
if isinstance(data, dict) or isinstance(data, list):
if path_tokens[i] in data:
data = data[path_tokens[i]]
else:
raise KeyError("/".join(path_tokens[0:i]))
else:
if i != len(path_tokens) - 1:
raise KeyError("/".join(path_tokens[0:i]))
data = data[path_tokens[i]]
return data
|
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import absolute_import
from __future__ import division
from random import shuffle
from random import seed
import matplotlib.pyplot as plt
from sklearn.decomposition import PCA
from sklearn.feature_extraction import DictVectorizer
from sklearn.metrics import adjusted_rand_score
from concept_formation.cobweb3 import Cobweb3Tree
from concept_formation.cluster import cluster
from concept_formation.datasets import load_iris
seed(0)
irises = load_iris()
shuffle(irises)
tree = Cobweb3Tree()
irises_no_class = [{a: iris[a]
for a in iris if a != 'class'} for iris in irises]
clusters = next(cluster(tree, irises_no_class))
iris_class = [iris[a] for iris in irises for a in iris if a == 'class']
ari = adjusted_rand_score(clusters, iris_class)
dv = DictVectorizer(sparse=False)
iris_X = dv.fit_transform(
[{a: iris[a] for a in iris if a != 'class'} for iris in irises])
pca = PCA(n_components=2)
iris_2d_x = pca.fit_transform(iris_X)
colors = ['b', 'g', 'r', 'y', 'k', 'c', 'm']
shapes = ['o', '^', '+']
clust_set = {v: i for i, v in enumerate(list(set(clusters)))}
class_set = {v: i for i, v in enumerate(list(set(iris_class)))}
for class_idx, class_label in enumerate(class_set):
x = [v[0] for i, v in enumerate(iris_2d_x) if iris_class[i] == class_label]
y = [v[1] for i, v in enumerate(iris_2d_x) if iris_class[i] == class_label]
c = [colors[clust_set[clusters[i]]] for i, v in enumerate(iris_2d_x) if
iris_class[i] == class_label]
plt.scatter(x, y, color=c, marker=shapes[class_idx], label=class_label)
plt.title("COBWEB/3 Iris Clustering (ARI = %0.2f)" % (ari))
plt.xlabel("PCA Dimension 1")
plt.ylabel("PCA Dimension 2")
plt.legend(loc=4)
plt.show()
|
from abc import ABC, abstractmethod
class State(ABC):
"""
State object, which stores a solution via its decision variables. The
objective value is evaluated via its ``objective()`` member, and should
return a numeric type - e.g. an ``int``, ``float``, or comparable.
The State class is abstract - you are encouraged to subclass it to suit
your specific problem.
"""
@abstractmethod
def objective(self):
"""
Computes the state's associated objective value.
Returns
-------
float
Some numeric value, e.g. an ``int`` or ``float``.
"""
return NotImplemented
|
# Lint as: python3
#
# Copyright 2020 The XLS Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=invalid-name
"""Type system deduction rules for AST nodes."""
import typing
from typing import Text, Dict, Union, Callable, Type, Tuple, List, Set
from absl import logging
import dataclasses
from xls.dslx import ast_helpers
from xls.dslx import bit_helpers
from xls.dslx import concrete_type_helpers
from xls.dslx import dslx_builtins
from xls.dslx import parametric_instantiator
from xls.dslx.python import cpp_ast as ast
from xls.dslx.python import cpp_scanner as scanner
from xls.dslx.python.cpp_concrete_type import ArrayType
from xls.dslx.python.cpp_concrete_type import BitsType
from xls.dslx.python.cpp_concrete_type import ConcreteType
from xls.dslx.python.cpp_concrete_type import ConcreteTypeDim
from xls.dslx.python.cpp_concrete_type import EnumType
from xls.dslx.python.cpp_concrete_type import FunctionType
from xls.dslx.python.cpp_concrete_type import TupleType
from xls.dslx.python.cpp_parametric_expression import ParametricAdd
from xls.dslx.python.cpp_parametric_expression import ParametricExpression
from xls.dslx.python.cpp_parametric_expression import ParametricSymbol
from xls.dslx.python.cpp_pos import Span
from xls.dslx.python.cpp_type_info import TypeInfo
from xls.dslx.python.cpp_type_info import TypeMissingError
from xls.dslx.xls_type_error import TypeInferenceError
from xls.dslx.xls_type_error import XlsTypeError
# Dictionary used as registry for rule dispatch based on AST node class.
RULES = {}
SymbolicBindings = parametric_instantiator.SymbolicBindings
RuleFunction = Callable[[ast.AstNode, 'DeduceCtx'], ConcreteType]
def _rule(cls: Type[ast.AstNode]):
"""Decorator for a type inference rule that pertains to class 'cls'."""
def register(f):
# Register the checked function as the rule.
RULES[cls] = f
return f
return register
# Type signature for the import function callback:
# (import_tokens) -> (module, type_info)
ImportFn = Callable[[Tuple[Text, ...]], Tuple[ast.Module, TypeInfo]]
# Type signature for interpreter function callback:
# (module, type_info, env, bit_widths, expr, f_import, fn_ctx) ->
# value_as_int
#
# This is an abstract interface to break circular dependencies; see
# interpreter_helpers.py
InterpCallbackType = Callable[[
ast.Module, TypeInfo, Dict[Text, int], Dict[Text, int], ast.Expr, ImportFn
], int]
# Type for stack of functions deduction is running on.
# [(name, symbolic_bindings), ...]
FnStack = List[Tuple[Text, Dict[Text, int]]]
@dataclasses.dataclass
class DeduceCtx:
"""A wrapper over useful objects for typechecking.
Attributes:
type_info: Maps an AST node to its deduced type.
module: The (entry point) module we are typechecking.
interpret_expr: An Interpreter wrapper that parametric_instantiator uses to
evaluate bindings with complex expressions (eg. function calls).
check_function_in_module: A callback to typecheck parametric functions that
are not in this module.
fn_stack: Keeps track of the function we're currently typechecking and the
symbolic bindings we should be using.
"""
type_info: TypeInfo
module: ast.Module
interpret_expr: InterpCallbackType
check_function_in_module: Callable[[ast.Function, 'DeduceCtx'], None]
fn_stack: FnStack = dataclasses.field(default_factory=list)
def resolve(type_: ConcreteType, ctx: DeduceCtx) -> ConcreteType:
"""Resolves "type_" via provided symbolic bindings.
Uses the symbolic bindings of the function we're currently inside of to
resolve parametric types.
Args:
type_: Type to resolve any contained dims for.
ctx: Deduction context to use in resolving the dims.
Returns:
"type_" with dimensions resolved according to bindings in "ctx".
"""
_, fn_symbolic_bindings = ctx.fn_stack[-1]
def resolver(dim: ConcreteTypeDim) -> ConcreteTypeDim:
if isinstance(dim.value, ParametricExpression):
return ConcreteTypeDim(dim.value.evaluate(fn_symbolic_bindings))
return dim
return concrete_type_helpers.map_size(type_, ctx.module, resolver)
@_rule(ast.Param)
def _deduce_Param(self: ast.Param, ctx: DeduceCtx) -> ConcreteType: # pytype: disable=wrong-arg-types
return deduce(self.type_, ctx)
@_rule(ast.Constant)
def _deduce_Constant(self: ast.Constant, ctx: DeduceCtx) -> ConcreteType: # pytype: disable=wrong-arg-types
result = ctx.type_info[self.name] = deduce(self.value, ctx)
ctx.type_info.note_constant(self.name, self)
return result
def _check_bitwidth(n: ast.Number, concrete_type: ConcreteType) -> None:
if (isinstance(concrete_type, BitsType) and
isinstance(concrete_type.get_total_bit_count().value, int) and
not bit_helpers.fits_in_bits(
ast_helpers.get_value_as_int(n),
concrete_type.get_total_bit_count().value)):
msg = 'value {!r} does not fit in the bitwidth of a {} ({})'.format(
n.value, concrete_type,
concrete_type.get_total_bit_count().value)
raise TypeInferenceError(span=n.span, type_=concrete_type, suffix=msg)
@_rule(ast.ConstantArray)
def _deduce_ConstantArray(
self: ast.ConstantArray, ctx: DeduceCtx) -> ConcreteType: # pytype: disable=wrong-arg-types
"""Deduces the concrete type of a ConstantArray AST node."""
# We permit constant arrays to drop annotations for numbers as a convenience
# (before we have unifying type inference) by allowing constant arrays to have
# a leading type annotation. If they don't have a leading type annotation,
# just fall back to normal array type inference, if we encounter a number
# without a type annotation we'll flag an error per usual.
if self.type_ is None:
return _deduce_Array(self, ctx)
# Determine the element type that corresponds to the annotation and go mark
# any un-typed numbers in the constant array as having that type.
concrete_type = deduce(self.type_, ctx)
if not isinstance(concrete_type, ArrayType):
raise TypeInferenceError(
self.type_.span, concrete_type,
f'Annotated type for array literal must be an array type; got {concrete_type.get_debug_type_name()} {self.type_}'
)
element_type = concrete_type.get_element_type()
for member in self.members:
assert ast.is_constant(member)
if isinstance(member, ast.Number) and not member.type_:
ctx.type_info[member] = element_type
_check_bitwidth(member, element_type)
# Use the base class to check all members are compatible.
_deduce_Array(self, ctx)
return concrete_type
def _create_element_invocation(owner: ast.AstNodeOwner, span_: Span,
callee: Union[ast.NameRef, ast.ModRef],
arg_array: ast.Expr) -> ast.Invocation:
"""Creates a function invocation on the first element of the given array.
We need to create a fake invocation to deduce the type of a function
in the case where map is called with a builtin as the map function. Normally,
map functions (including parametric ones) have their types deduced when their
ast.Function nodes are encountered (where a similar fake ast.Invocation node
is created).
Builtins don't have ast.Function nodes, so that inference can't occur, so we
essentually perform that synthesis and deduction here.
Args:
owner: AST node owner.
span_: The location in the code where analysis is occurring.
callee: The function to be invoked.
arg_array: The array of arguments (at least one) to the function.
Returns:
An invocation node for the given function when called with an element in the
argument array.
"""
annotation = ast_helpers.make_builtin_type_annotation(
owner, span_, scanner.Token(span_, scanner.Keyword.U32), ())
index_number = ast.Number(owner, span_, '32', ast.NumberKind.OTHER,
annotation)
index = ast.Index(owner, span_, arg_array, index_number)
return ast.Invocation(owner, span_, callee, (index,))
def _check_parametric_invocation(parametric_fn: ast.Function,
invocation: ast.Invocation,
symbolic_bindings: SymbolicBindings,
ctx: DeduceCtx):
"""Checks the parametric fn body using the invocation's symbolic bindings."""
if isinstance(invocation.callee, ast.ModRef):
# We need to typecheck this function with respect to its own module.
# Let's use typecheck._check_function_or_test_in_module() to do this
# in case we run into more dependencies in that module.
if ctx.type_info.has_instantiation(invocation, symbolic_bindings):
# We've already typechecked this imported parametric function using
# these symbolic bindings.
return
imported_module, imported_type_info = ctx.type_info.get_imported(
invocation.callee.mod)
invocation_imported_type_info = TypeInfo(
imported_module, parent=imported_type_info)
imported_ctx = DeduceCtx(invocation_imported_type_info, imported_module,
ctx.interpret_expr, ctx.check_function_in_module)
imported_ctx.fn_stack.append(
(parametric_fn.name.identifier, dict(symbolic_bindings)))
ctx.check_function_in_module(parametric_fn, imported_ctx)
ctx.type_info.add_instantiation(invocation, symbolic_bindings,
invocation_imported_type_info)
return
assert isinstance(invocation.callee, ast.NameRef), invocation.callee
has_instantiation = ctx.type_info.has_instantiation(invocation,
symbolic_bindings)
# We need to typecheck this function with respect to its own module
# Let's take advantage of the existing try-catch mechanism in
# typecheck._check_function_or_test_in_module().
try:
# See if the body is present in the type_info mapping (we do this just
# to observe if it raises an exception).
ctx.type_info[parametric_fn.body]
except TypeMissingError as e:
# If we've already typechecked the parametric function with the
# current symbolic bindings, no need to do it again.
if not has_instantiation:
# Let's typecheck this parametric function using the symbolic bindings
# we just derived to make sure they check out ok.
e.node = invocation.callee.name_def
ctx.fn_stack.append(
(parametric_fn.name.identifier, dict(symbolic_bindings)))
ctx.type_info = TypeInfo(ctx.type_info.module, parent=ctx.type_info)
raise
if not has_instantiation:
# If we haven't yet stored a type_info for these symbolic bindings
# and we're at this point, it means that we just finished typechecking
# the parametric function. Let's store the results.
ctx.type_info.parent.add_instantiation(invocation, symbolic_bindings,
ctx.type_info)
ctx.type_info = ctx.type_info.parent
@_rule(ast.Invocation)
def _deduce_Invocation(self: ast.Invocation, ctx: DeduceCtx) -> ConcreteType: # pytype: disable=wrong-arg-types
"""Deduces the concrete type of an Invocation AST node."""
logging.vlog(5, 'Deducing type for invocation: %s', self)
arg_types = []
_, fn_symbolic_bindings = ctx.fn_stack[-1]
for arg in self.args:
try:
arg_types.append(resolve(deduce(arg, ctx), ctx))
except TypeMissingError as e:
# These nodes could be ModRefs or NameRefs.
callee_is_map = isinstance(
self.callee, ast.NameRef) and self.callee.name_def.identifier == 'map'
arg_is_builtin = isinstance(
arg, ast.NameRef
) and arg.name_def.identifier in dslx_builtins.PARAMETRIC_BUILTIN_NAMES
if callee_is_map and arg_is_builtin:
invocation = _create_element_invocation(ctx.module, self.span, arg,
self.args[0])
arg_types.append(resolve(deduce(invocation, ctx), ctx))
else:
raise
try:
# This will get us the type signature of the function.
# If the function is parametric, we won't check its body
# until after we have symbolic bindings for it
callee_type = deduce(self.callee, ctx)
except TypeMissingError as e:
e.span = self.span
e.user = self
raise
if not isinstance(callee_type, FunctionType):
raise XlsTypeError(self.callee.span, callee_type, None,
'Callee does not have a function type.')
if isinstance(self.callee, ast.ModRef):
imported_module, _ = ctx.type_info.get_imported(self.callee.mod)
callee_name = self.callee.value
callee_fn = imported_module.get_function(callee_name)
else:
assert isinstance(self.callee, ast.NameRef), self.callee
callee_name = self.callee.identifier
callee_fn = ctx.module.get_function(callee_name)
self_type, callee_sym_bindings = parametric_instantiator.instantiate_function(
self.span, callee_type, tuple(arg_types), ctx,
callee_fn.parametric_bindings)
caller_sym_bindings = tuple(fn_symbolic_bindings.items())
ctx.type_info.add_invocation_symbolic_bindings(self, caller_sym_bindings,
callee_sym_bindings)
if callee_fn.is_parametric():
# Now that we have callee_sym_bindings, let's use them to typecheck the body
# of callee_fn to make sure these values actually work
_check_parametric_invocation(callee_fn, self, callee_sym_bindings, ctx)
return self_type
def _deduce_slice_type(self: ast.Index, ctx: DeduceCtx,
lhs_type: ConcreteType) -> ConcreteType:
"""Deduces the concrete type of an Index AST node with a slice spec."""
index_slice = self.index
assert isinstance(index_slice, (ast.Slice, ast.WidthSlice)), index_slice
# TODO(leary): 2019-10-28 Only slicing bits types for now, and only with
# number ast nodes, generalize to arrays and constant expressions.
if not isinstance(lhs_type, BitsType):
raise XlsTypeError(self.span, lhs_type, None,
'Value to slice is not of "bits" type.')
bit_count = lhs_type.get_total_bit_count().value
if isinstance(index_slice, ast.WidthSlice):
start = index_slice.start
if isinstance(start, ast.Number) and start.type_ is None:
start_type = lhs_type.to_ubits()
resolved_start_type = resolve(start_type, ctx)
start_int = ast_helpers.get_value_as_int(start)
if not bit_helpers.fits_in_bits(
start_int,
resolved_start_type.get_total_bit_count().value):
raise TypeInferenceError(
start.span, resolved_start_type,
'Cannot fit {} in {} bits (inferred from bits to slice).'.format(
start_int,
resolved_start_type.get_total_bit_count().value))
ctx.type_info[start] = start_type
else:
start_type = deduce(start, ctx)
# Check the start is unsigned.
if start_type.signed:
raise TypeInferenceError(
start.span,
type_=start_type,
suffix='Start index for width-based slice must be unsigned.')
width_type = deduce(index_slice.width, ctx)
if isinstance(width_type.get_total_bit_count().value, int) and isinstance(
lhs_type.get_total_bit_count().value,
int) and width_type.get_total_bit_count(
).value > lhs_type.get_total_bit_count().value:
raise XlsTypeError(
start.span, lhs_type, width_type,
'Slice type must have <= original number of bits; attempted slice from {} to {} bits.'
.format(lhs_type.get_total_bit_count().value,
width_type.get_total_bit_count().value))
# Check the width type is bits-based (no enums, since value could be out
# of range of the enum values).
if not isinstance(width_type, BitsType):
raise TypeInferenceError(
self.span,
type_=width_type,
suffix='Require a bits-based type for width-based slice.')
# The width type is the thing returned from the width-slice.
return width_type
assert isinstance(index_slice, ast.Slice), index_slice
limit = ast_helpers.get_value_as_int(
index_slice.limit) if index_slice.limit else None
# PyType has trouble figuring out that start is definitely an Number at this
# point.
start = index_slice.start
assert isinstance(start, (ast.Number, type(None)))
start = ast_helpers.get_value_as_int(start) if start else None
_, fn_symbolic_bindings = ctx.fn_stack[-1]
if isinstance(bit_count, ParametricExpression):
bit_count = bit_count.evaluate(fn_symbolic_bindings)
start, width = bit_helpers.resolve_bit_slice_indices(bit_count, start, limit)
key = tuple(fn_symbolic_bindings.items())
ctx.type_info.add_slice_start_width(index_slice, key, (start, width))
return BitsType(signed=False, size=width)
@_rule(ast.Index)
def _deduce_Index(self: ast.Index, ctx: DeduceCtx) -> ConcreteType: # pytype: disable=wrong-arg-types
"""Deduces the concrete type of an Index AST node."""
lhs_type = deduce(self.lhs, ctx)
# Check whether this is a slice-based indexing operations.
if isinstance(self.index, (ast.Slice, ast.WidthSlice)):
return _deduce_slice_type(self, ctx, lhs_type)
index_type = deduce(self.index, ctx)
if isinstance(lhs_type, TupleType):
if not isinstance(self.index, ast.Number):
raise XlsTypeError(self.index.span, index_type, None,
'Tuple index is not a literal number.')
index_value = ast_helpers.get_value_as_int(self.index)
if index_value >= lhs_type.get_tuple_length():
raise XlsTypeError(
self.index.span, lhs_type, None,
'Tuple index {} is out of range for this tuple type.'.format(
index_value))
return lhs_type.get_unnamed_members()[index_value]
if not isinstance(lhs_type, ArrayType):
raise TypeInferenceError(self.lhs.span, lhs_type,
'Value to index is not an array.')
index_ok = isinstance(index_type,
BitsType) and not isinstance(index_type, ArrayType)
if not index_ok:
raise XlsTypeError(self.index.span, index_type, None,
'Index type is not scalar bits.')
return lhs_type.get_element_type()
@_rule(ast.XlsTuple)
def _deduce_XlsTuple(self: ast.XlsTuple, ctx: DeduceCtx) -> ConcreteType: # pytype: disable=wrong-arg-types
members = tuple(deduce(m, ctx) for m in self.members)
return TupleType(members)
def _bind_names(name_def_tree: ast.NameDefTree, type_: ConcreteType,
ctx: DeduceCtx) -> None:
"""Binds names in name_def_tree to corresponding type given in type_."""
if name_def_tree.is_leaf():
name_def = name_def_tree.get_leaf()
ctx.type_info[name_def] = type_
return
if not isinstance(type_, TupleType):
raise XlsTypeError(
name_def_tree.span,
type_,
rhs_type=None,
suffix='Expected a tuple type for these names, but got {}.'.format(
type_))
if len(name_def_tree.tree) != type_.get_tuple_length():
raise TypeInferenceError(
name_def_tree.span, type_,
'Could not bind names, names are mismatched in number vs type; at '
'this level of the tuple: {} names, {} types.'.format(
len(name_def_tree.tree), type_.get_tuple_length()))
for subtree, subtype in zip(name_def_tree.tree, type_.get_unnamed_members()):
ctx.type_info[subtree] = subtype
_bind_names(subtree, subtype, ctx)
@_rule(ast.Let)
def _deduce_Let(self: ast.Let, ctx: DeduceCtx) -> ConcreteType: # pytype: disable=wrong-arg-types
"""Deduces the concrete type of a Let AST node."""
rhs_type = deduce(self.rhs, ctx)
resolved_rhs_type = resolve(rhs_type, ctx)
if self.type_ is not None:
concrete_type = deduce(self.type_, ctx)
resolved_concrete_type = resolve(concrete_type, ctx)
if resolved_rhs_type != resolved_concrete_type:
raise XlsTypeError(
self.rhs.span, resolved_concrete_type, resolved_rhs_type,
'Annotated type did not match inferred type of right hand side.')
_bind_names(self.name_def_tree, resolved_rhs_type, ctx)
if self.const:
deduce(self.const, ctx)
return deduce(self.body, ctx)
def _unify_WildcardPattern(_self: ast.WildcardPattern, _type: ConcreteType,
_ctx: DeduceCtx) -> None:
pass # Wildcard matches any type.
def _unify_NameDefTree(self: ast.NameDefTree, type_: ConcreteType,
ctx: DeduceCtx) -> None:
"""Unifies the NameDefTree AST node with the observed RHS type type_."""
resolved_rhs_type = resolve(type_, ctx)
if self.is_leaf():
leaf = self.get_leaf()
if isinstance(leaf, ast.NameDef):
ctx.type_info[leaf] = resolved_rhs_type
elif isinstance(leaf, ast.WildcardPattern):
pass
elif isinstance(leaf, (ast.Number, ast.EnumRef)):
resolved_leaf_type = resolve(deduce(leaf, ctx), ctx)
if resolved_leaf_type != resolved_rhs_type:
raise TypeInferenceError(
span=self.span,
type_=resolved_rhs_type,
suffix='Conflicting types; pattern expects {} but got {} from value'
.format(resolved_rhs_type, resolved_leaf_type))
else:
assert isinstance(leaf, ast.NameRef), repr(leaf)
ref_type = ctx.type_info[leaf.name_def]
resolved_ref_type = resolve(ref_type, ctx)
if resolved_ref_type != resolved_rhs_type:
raise TypeInferenceError(
span=self.span,
type_=resolved_rhs_type,
suffix='Conflicting types; pattern expects {} but got {} from reference'
.format(resolved_rhs_type, resolved_ref_type))
else:
assert isinstance(self.tree, tuple)
if isinstance(type_, TupleType) and type_.get_tuple_length() == len(
self.tree):
for subtype, subtree in zip(type_.get_unnamed_members(), self.tree):
_unify(subtree, subtype, ctx)
def _unify(n: ast.AstNode, other: ConcreteType, ctx: DeduceCtx) -> None:
f = globals()['_unify_{}'.format(n.__class__.__name__)]
f(n, other, ctx)
@_rule(ast.Match)
def _deduce_Match(self: ast.Match, ctx: DeduceCtx) -> ConcreteType: # pytype: disable=wrong-arg-types
"""Deduces the concrete type of a Match AST node."""
matched = deduce(self.matched, ctx)
for arm in self.arms:
for pattern in arm.patterns:
_unify(pattern, matched, ctx)
arm_types = tuple(deduce(arm, ctx) for arm in self.arms)
resolved_arm0_type = resolve(arm_types[0], ctx)
for i, arm_type in enumerate(arm_types[1:], 1):
resolved_arm_type = resolve(arm_type, ctx)
if resolved_arm_type != resolved_arm0_type:
raise XlsTypeError(
self.arms[i].span, resolved_arm_type, resolved_arm0_type,
'This match arm did not have the same type as preceding match arms.')
return resolved_arm0_type
@_rule(ast.MatchArm)
def _deduce_MatchArm(self: ast.MatchArm, ctx: DeduceCtx) -> ConcreteType: # pytype: disable=wrong-arg-types
return deduce(self.expr, ctx)
@_rule(ast.For)
def _deduce_For(self: ast.For, ctx: DeduceCtx) -> ConcreteType: # pytype: disable=wrong-arg-types
"""Deduces the concrete type of a For AST node."""
init_type = deduce(self.init, ctx)
annotated_type = deduce(self.type_, ctx)
_bind_names(self.names, annotated_type, ctx)
body_type = deduce(self.body, ctx)
deduce(self.iterable, ctx)
resolved_init_type = resolve(init_type, ctx)
resolved_body_type = resolve(body_type, ctx)
if resolved_init_type != resolved_body_type:
raise XlsTypeError(
self.span, resolved_init_type, resolved_body_type,
"For-loop init value type did not match for-loop body's result type.")
# TODO(leary): 2019-02-19 Type check annotated_type (the bound names each
# iteration) against init_type/body_type -- this requires us to understand
# how iterables turn into induction values.
return resolved_init_type
@_rule(ast.While)
def _deduce_While(self: ast.While, ctx: DeduceCtx) -> ConcreteType: # pytype: disable=wrong-arg-types
"""Deduces the concrete type of a While AST node."""
init_type = deduce(self.init, ctx)
test_type = deduce(self.test, ctx)
resolved_init_type = resolve(init_type, ctx)
resolved_test_type = resolve(test_type, ctx)
if resolved_test_type != ConcreteType.U1:
raise XlsTypeError(self.test.span, test_type, ConcreteType.U1,
'Expect while-loop test to be a bool value.')
body_type = deduce(self.body, ctx)
resolved_body_type = resolve(body_type, ctx)
if resolved_init_type != resolved_body_type:
raise XlsTypeError(
self.span, init_type, body_type,
"While-loop init value type did not match while-loop body's "
'result type.')
return resolved_init_type
@_rule(ast.Carry)
def _deduce_Carry(self: ast.Carry, ctx: DeduceCtx) -> ConcreteType: # pytype: disable=wrong-arg-types
return deduce(self.loop.init, ctx)
def _is_acceptable_cast(from_: ConcreteType, to: ConcreteType) -> bool:
if {type(from_), type(to)} == {ArrayType, BitsType}:
return from_.get_total_bit_count() == to.get_total_bit_count()
return True
@_rule(ast.Cast)
def _deduce_Cast(self: ast.Cast, ctx: DeduceCtx) -> ConcreteType: # pytype: disable=wrong-arg-types
"""Deduces the concrete type of a Cast AST node."""
type_result = deduce(self.type_, ctx)
expr_type = deduce(self.expr, ctx)
resolved_type_result = resolve(type_result, ctx)
resolved_expr_type = resolve(expr_type, ctx)
if not _is_acceptable_cast(from_=resolved_type_result, to=resolved_expr_type):
raise XlsTypeError(
self.span, expr_type, type_result,
'Cannot cast from expression type {} to {}.'.format(
resolved_expr_type, resolved_type_result))
return resolved_type_result
@_rule(ast.Unop)
def _deduce_Unop(self: ast.Unop, ctx: DeduceCtx) -> ConcreteType: # pytype: disable=wrong-arg-types
return deduce(self.operand, ctx)
@_rule(ast.Array)
def _deduce_Array(self: ast.Array, ctx: DeduceCtx) -> ConcreteType: # pytype: disable=wrong-arg-types
"""Deduces the concrete type of an Array AST node."""
member_types = [deduce(m, ctx) for m in self.members]
resolved_type0 = resolve(member_types[0], ctx)
for i, x in enumerate(member_types[1:], 1):
resolved_x = resolve(x, ctx)
logging.vlog(5, 'array member type %d: %s', i, resolved_x)
if resolved_x != resolved_type0:
raise XlsTypeError(
self.members[i].span, resolved_type0, resolved_x,
'Array member did not have same type as other members.')
inferred = ArrayType(resolved_type0, len(member_types))
if not self.type_:
return inferred
annotated = deduce(self.type_, ctx)
if not isinstance(annotated, ArrayType):
raise XlsTypeError(self.span, annotated, None,
'Array was not annotated with an array type.')
resolved_element_type = resolve(annotated.get_element_type(), ctx)
if resolved_element_type != resolved_type0:
raise XlsTypeError(
self.span, resolved_element_type, resolved_type0,
'Annotated element type did not match inferred element type.')
if self.has_ellipsis:
# Since there are ellipsis, we determine the size from the annotated type.
# We've already checked the element types lined up.
return annotated
else:
if annotated.size != len(member_types):
raise XlsTypeError(
self.span, annotated, inferred,
'Annotated array size {!r} does not match inferred array size {!r}.'
.format(annotated.size, len(member_types)))
return inferred
@_rule(ast.TypeRef)
def _deduce_TypeRef(self: ast.TypeRef, ctx: DeduceCtx) -> ConcreteType: # pytype: disable=wrong-arg-types
return deduce(self.type_def, ctx)
@_rule(ast.ConstRef)
@_rule(ast.NameRef)
def _deduce_NameRef(self: ast.NameRef, ctx: DeduceCtx) -> ConcreteType: # pytype: disable=wrong-arg-types
"""Deduces the concrete type of a NameDef AST node."""
try:
result = ctx.type_info[self.name_def]
except TypeMissingError as e:
logging.vlog(3, 'Could not resolve name def: %s', self.name_def)
e.span = self.span
e.user = self
raise
return result
@_rule(ast.EnumRef)
def _deduce_EnumRef(self: ast.EnumRef, ctx: DeduceCtx) -> ConcreteType: # pytype: disable=wrong-arg-types
"""Deduces the concrete type of an EnumRef AST node."""
try:
result = ctx.type_info[self.enum]
except TypeMissingError as e:
logging.vlog(3, 'Could not resolve enum to type: %s', self.enum)
e.span = self.span
e.user = self
raise
# Check the name we're accessing is actually defined on the enum.
assert isinstance(result, EnumType), result
enum = result.get_nominal_type(ctx.module)
assert isinstance(enum, ast.Enum), enum
name = self.value
if not enum.has_value(name):
raise TypeInferenceError(
span=self.span,
type_=None,
suffix='Name {!r} is not defined by the enum {}'.format(
name, enum.identifier))
return result
@_rule(ast.Number)
def _deduce_Number(self: ast.Number, ctx: DeduceCtx) -> ConcreteType: # pytype: disable=wrong-arg-types
"""Deduces the concrete type of a Number AST node."""
if not self.type_:
if self.kind == ast.NumberKind.BOOL:
return ConcreteType.U1
if self.kind == ast.NumberKind.CHARACTER:
return ConcreteType.U8
raise TypeInferenceError(
span=self.span,
type_=None,
suffix='Could not infer a type for this number, please annotate a type.'
)
concrete_type = resolve(deduce(self.type_, ctx), ctx)
_check_bitwidth(self, concrete_type)
return concrete_type
@_rule(ast.TypeDef)
def _deduce_TypeDef(self: ast.TypeDef, ctx: DeduceCtx) -> ConcreteType: # pytype: disable=wrong-arg-types
concrete_type = deduce(self.type_, ctx)
ctx.type_info[self.name] = concrete_type
return concrete_type
def _dim_to_parametric(self: ast.TypeAnnotation,
expr: ast.Expr) -> ParametricExpression:
"""Converts a dimension expression to a 'parametric' AST node."""
assert not isinstance(expr, ast.ConstRef), expr
if isinstance(expr, ast.NameRef):
return ParametricSymbol(expr.name_def.identifier, expr.span)
if isinstance(expr, ast.Binop):
if expr.kind == ast.BinopKind.ADD:
return ParametricAdd(
_dim_to_parametric(self, expr.lhs),
_dim_to_parametric(self, expr.rhs))
msg = 'Could not concretize type with dimension: {}.'.format(expr)
raise TypeInferenceError(self.span, self, suffix=msg)
def _dim_to_parametric_or_int(
self: ast.TypeAnnotation, expr: ast.Expr,
ctx: DeduceCtx) -> Union[int, ParametricExpression]:
"""Converts dimension expression within an annotation to int or parametric."""
if isinstance(expr, ast.Number):
ctx.type_info[expr] = ConcreteType.U32
return ast_helpers.get_value_as_int(expr)
if isinstance(expr, ast.ConstRef):
n = ctx.type_info.get_const_int(expr.name_def)
if not isinstance(n, ast.Number):
raise TypeInferenceError(
span=expr.span,
type_=None,
suffix=f'Expected a constant integral value with the name {expr.name_def}; got {n}'
)
return ast_helpers.get_value_as_int(n)
return _dim_to_parametric(self, expr)
@_rule(ast.TypeRefTypeAnnotation)
def _deduce_TypeRefTypeAnnotation(self: ast.TypeRefTypeAnnotation,
ctx: DeduceCtx) -> ConcreteType:
"""Dedeuces the concrete type of a TypeRef type annotation."""
base_type = deduce(self.type_ref, ctx)
maybe_struct = ast_helpers.evaluate_to_struct_or_enum_or_annotation(
self.type_ref.type_def, _get_imported_module_via_type_info, ctx.type_info)
if (isinstance(maybe_struct, ast.Struct) and maybe_struct.is_parametric() and
self.parametrics):
base_type = _concretize_struct_annotation(ctx.module, self, maybe_struct,
base_type)
return base_type
@_rule(ast.BuiltinTypeAnnotation)
def _deduce_BuiltinTypeAnnotation(
self: ast.BuiltinTypeAnnotation,
ctx: DeduceCtx, # pylint: disable=unused-argument
) -> ConcreteType:
signedness, bits = self.signedness_and_bits
return BitsType(signedness, bits)
@_rule(ast.TupleTypeAnnotation)
def _deduce_TupleTypeAnnotation(self: ast.TupleTypeAnnotation,
ctx: DeduceCtx) -> ConcreteType:
members = []
for member in self.members:
members.append(deduce(member, ctx))
return TupleType(tuple(members))
@_rule(ast.ArrayTypeAnnotation)
def _deduce_ArrayTypeAnnotation(self: ast.ArrayTypeAnnotation,
ctx: DeduceCtx) -> ConcreteType:
"""Deduces the concrete type of an Array type annotation."""
dim = _dim_to_parametric_or_int(self, self.dim, ctx)
if (isinstance(self.element_type, ast.BuiltinTypeAnnotation) and
self.element_type.bits == 0):
# No-volume builtin types like bits, uN, and sN.
return BitsType(self.element_type.signedness, dim)
element_type = deduce(self.element_type, ctx)
result = ArrayType(element_type, dim)
logging.vlog(4, 'array type annotation: %s => %s', self, result)
return result
@_rule(ast.ModRef)
def _deduce_ModRef(self: ast.ModRef, ctx: DeduceCtx) -> ConcreteType: # pytype: disable=wrong-arg-types
"""Deduces the type of an entity referenced via module reference."""
imported_module, imported_type_info = ctx.type_info.get_imported(self.mod)
leaf_name = self.value
# May be a type definition reference.
if leaf_name in imported_module.get_typedef_by_name():
td = imported_module.get_typedef_by_name()[leaf_name]
if not td.public:
raise TypeInferenceError(
self.span,
type_=None,
suffix='Attempted to refer to module type that is not public.')
return imported_type_info[td.name]
# May be a function reference.
try:
f = imported_module.get_function(leaf_name)
except KeyError:
raise TypeInferenceError(
self.span,
type_=None,
suffix='Module {!r} function {!r} does not exist.'.format(
imported_module.name, leaf_name))
if not f.public:
raise TypeInferenceError(
self.span,
type_=None,
suffix='Attempted to refer to module {!r} function {!r} that is not public.'
.format(imported_module.name, f.name))
if f.name not in imported_type_info:
logging.vlog(
2, 'Function name not in imported_type_info; must be parametric: %r',
f.name)
assert f.is_parametric()
# We don't type check parametric functions until invocations.
# Let's typecheck this imported parametric function with respect to its
# module (this will only get the type signature, body gets typechecked
# after parametric instantiation).
imported_ctx = DeduceCtx(imported_type_info, imported_module,
ctx.interpret_expr, ctx.check_function_in_module)
imported_ctx.fn_stack.append(ctx.fn_stack[-1])
ctx.check_function_in_module(f, imported_ctx)
ctx.type_info.update(imported_ctx.type_info)
imported_type_info = imported_ctx.type_info
return imported_type_info[f.name]
@_rule(ast.Enum)
def _deduce_Enum(self: ast.Enum, ctx: DeduceCtx) -> ConcreteType: # pytype: disable=wrong-arg-types
"""Deduces the concrete type of a Enum AST node."""
resolved_type = resolve(deduce(self.type_, ctx), ctx)
if not isinstance(resolved_type, BitsType):
raise XlsTypeError(self.span, resolved_type, None,
'Underlying type for an enum must be a bits type.')
# Grab the bit count of the Enum's underlying type.
bit_count = resolved_type.get_total_bit_count()
self.set_signedness(resolved_type.get_signedness())
result = EnumType(self, bit_count)
for member in self.values:
name, value = member.get_name_value(self)
# Note: the parser places the type_ from the enum on the value when it is
# a number, so this deduction flags inappropriate numbers.
deduce(value, ctx)
ctx.type_info[name] = ctx.type_info[value] = result
ctx.type_info[self.name] = ctx.type_info[self] = result
return result
@_rule(ast.Ternary)
def _deduce_Ternary(self: ast.Ternary, ctx: DeduceCtx) -> ConcreteType: # pytype: disable=wrong-arg-types
"""Deduces the concrete type of a Ternary AST node."""
test_type = deduce(self.test, ctx)
resolved_test_type = resolve(test_type, ctx)
if resolved_test_type != ConcreteType.U1:
raise XlsTypeError(self.span, resolved_test_type, ConcreteType.U1,
'Test type for conditional expression is not "bool"')
cons_type = deduce(self.consequent, ctx)
resolved_cons_type = resolve(cons_type, ctx)
alt_type = deduce(self.alternate, ctx)
resolved_alt_type = resolve(alt_type, ctx)
if resolved_cons_type != resolved_alt_type:
raise XlsTypeError(
self.span, resolved_cons_type, resolved_alt_type,
'Ternary consequent type (in the "then" clause) did not match '
'alternate type (in the "else" clause)')
return resolved_cons_type
def _deduce_Concat(self: ast.Binop, ctx: DeduceCtx) -> ConcreteType:
"""Deduces the concrete type of a concatenate Binop AST node."""
lhs_type = deduce(self.lhs, ctx)
resolved_lhs_type = resolve(lhs_type, ctx)
rhs_type = deduce(self.rhs, ctx)
resolved_rhs_type = resolve(rhs_type, ctx)
# Array-ness must be the same on both sides.
if (isinstance(resolved_lhs_type, ArrayType) != isinstance(
resolved_rhs_type, ArrayType)):
raise XlsTypeError(
self.span, resolved_lhs_type, resolved_rhs_type,
'Attempting to concatenate array/non-array values together.')
if (isinstance(resolved_lhs_type, ArrayType) and
resolved_lhs_type.get_element_type() !=
resolved_rhs_type.get_element_type()):
raise XlsTypeError(
self.span, resolved_lhs_type, resolved_rhs_type,
'Array concatenation requires element types to be the same.')
new_size = resolved_lhs_type.size + resolved_rhs_type.size # pytype: disable=attribute-error
if isinstance(resolved_lhs_type, ArrayType):
return ArrayType(resolved_lhs_type.get_element_type(), new_size)
return BitsType(signed=False, size=new_size)
@_rule(ast.Binop)
def _deduce_Binop(self: ast.Binop, ctx: DeduceCtx) -> ConcreteType: # pytype: disable=wrong-arg-types
"""Deduces the concrete type of a Binop AST node."""
# Concatenation is handled differently from other binary operations.
if self.kind == ast.BinopKind.CONCAT:
return _deduce_Concat(self, ctx)
lhs_type = deduce(self.lhs, ctx)
rhs_type = deduce(self.rhs, ctx)
resolved_lhs_type = resolve(lhs_type, ctx)
resolved_rhs_type = resolve(rhs_type, ctx)
if resolved_lhs_type != resolved_rhs_type:
raise XlsTypeError(
self.span, resolved_lhs_type, resolved_rhs_type,
'Could not deduce type for binary operation {0} ({0!r}).'.format(
self.kind))
# Enums only support a more limited set of binary operations.
if isinstance(lhs_type,
EnumType) and self.kind not in ast_helpers.BINOP_ENUM_OK_KINDS:
raise XlsTypeError(
self.span, resolved_lhs_type, None,
"Cannot use '{}' on values with enum type {}".format(
self.kind.value,
lhs_type.get_nominal_type(ctx.module).identifier))
if self.kind in ast_helpers.BINOP_COMPARISON_KINDS:
return ConcreteType.U1
return resolved_lhs_type
@_rule(ast.Struct)
def _deduce_Struct(self: ast.Struct, ctx: DeduceCtx) -> ConcreteType: # pytype: disable=wrong-arg-types
"""Returns the concrete type for a (potentially parametric) struct."""
for parametric in self.parametric_bindings:
parametric_binding_type = deduce(parametric.type_, ctx)
assert isinstance(parametric_binding_type, ConcreteType)
if parametric.expr:
expr_type = deduce(parametric.expr, ctx)
if expr_type != parametric_binding_type:
raise XlsTypeError(
parametric.span,
parametric_binding_type,
expr_type,
suffix='Annotated type of derived parametric '
'value did not match inferred type.')
ctx.type_info[parametric.name] = parametric_binding_type
members = tuple(
(k.identifier, resolve(deduce(m, ctx), ctx)) for k, m in self.members)
result = ctx.type_info[self.name] = TupleType(members, self)
logging.vlog(5, 'Deduced type for struct %s => %s; type_info: %r', self,
result, ctx.type_info)
return result
def _validate_struct_members_subset(
members: ast_helpers.StructInstanceMembers, struct_type: ConcreteType,
struct_text: str, ctx: DeduceCtx
) -> Tuple[Set[str], Tuple[ConcreteType], Tuple[ConcreteType]]:
"""Validates a struct instantiation is a subset of members with no dups.
Args:
members: Sequence of members used in instantiation. Note this may be a
subset; e.g. in the case of splat instantiation.
struct_type: The deduced type for the struct (instantiation).
struct_text: Display name to use for the struct in case of an error.
ctx: Wrapper containing node to type mapping context.
Returns:
A tuple containing the set of struct member names that were instantiated,
the ConcreteTypes of the provided arguments, and the ConcreteTypes of the
corresponding struct member definition.
"""
assert isinstance(struct_type, TupleType), struct_type
seen_names = set()
arg_types = []
member_types = []
for k, v in members:
if k in seen_names:
raise TypeInferenceError(
v.span,
type_=None,
suffix='Duplicate value seen for {!r} in this {!r} struct instance.'
.format(k, struct_text))
seen_names.add(k)
expr_type = resolve(deduce(v, ctx), ctx)
arg_types.append(expr_type)
try:
member_type = struct_type.get_member_type_by_name(k)
member_types.append(member_type)
except KeyError:
raise TypeInferenceError(
v.span,
None,
suffix='Struct {!r} has no member {!r}, but it was provided by this instance.'
.format(struct_text, k))
return seen_names, tuple(arg_types), tuple(member_types)
@_rule(ast.StructInstance)
def _deduce_StructInstance(
self: ast.StructInstance, ctx: DeduceCtx) -> ConcreteType: # pytype: disable=wrong-arg-types
"""Deduces the type of the struct instantiation expression and its members."""
logging.vlog(5, 'Deducing type for struct instance: %s', self)
struct_type = deduce(self.struct, ctx)
assert isinstance(struct_type, TupleType), struct_type
assert struct_type.named, struct_type
expected_names = set(struct_type.tuple_names)
seen_names, arg_types, member_types = _validate_struct_members_subset(
self.unordered_members, struct_type, self.struct_text, ctx)
if seen_names != expected_names:
missing = ', '.join(
repr(s) for s in sorted(list(expected_names - seen_names)))
raise TypeInferenceError(
self.span,
None,
suffix='Struct instance is missing member(s): {}'.format(missing))
struct_def = self.struct
if not isinstance(struct_def, ast.Struct):
# Traverse TypeDefs and ModRefs until we get the struct AST node.
struct_def = ast_helpers.evaluate_to_struct_or_enum_or_annotation(
struct_def, _get_imported_module_via_type_info, ctx.type_info)
assert isinstance(struct_def, ast.Struct), struct_def
resolved_struct_type, _ = parametric_instantiator.instantiate_struct(
self.span, struct_type, arg_types, member_types, ctx,
struct_def.parametric_bindings)
return resolved_struct_type
def _concretize_struct_annotation(module: ast.Module,
type_annotation: ast.TypeRefTypeAnnotation,
struct: ast.Struct,
base_type: ConcreteType) -> ConcreteType:
"""Returns concretized struct type using the provided bindings.
For example, if we have a struct defined as `struct [N: u32, M: u32] Foo`,
the default TupleType will be (N, M). If a type annotation provides bindings,
(e.g. Foo[A, 16]), we will replace N, M with those values. In the case above,
we will return (A, 16) instead.
Args:
module: Owning AST module for the nodes.
type_annotation: The provided type annotation for this parametric struct.
struct: The corresponding struct AST node.
base_type: The TupleType of the struct, based only on the struct definition.
"""
assert len(struct.parametric_bindings) == len(type_annotation.parametrics)
defined_to_annotated = {}
for defined_parametric, annotated_parametric in zip(
struct.parametric_bindings, type_annotation.parametrics):
assert isinstance(defined_parametric,
ast.ParametricBinding), defined_parametric
if isinstance(annotated_parametric, ast.Number):
defined_to_annotated[defined_parametric.name.identifier] = \
int(annotated_parametric.value)
else:
assert isinstance(annotated_parametric,
ast.NameRef), repr(annotated_parametric)
defined_to_annotated[defined_parametric.name.identifier] = \
ParametricSymbol(annotated_parametric.identifier,
annotated_parametric.span)
def resolver(dim: ConcreteTypeDim) -> ConcreteTypeDim:
if isinstance(dim.value, ParametricExpression):
return ConcreteTypeDim(dim.value.evaluate(defined_to_annotated))
return dim
return concrete_type_helpers.map_size(base_type, module, resolver)
def _get_imported_module_via_type_info(
import_: ast.Import, type_info: TypeInfo) -> Tuple[ast.Module, TypeInfo]:
"""Uses type_info to retrieve the corresponding module of a ModRef."""
return type_info.get_imported(import_)
@_rule(ast.SplatStructInstance)
def _deduce_SplatStructInstance(
self: ast.SplatStructInstance, ctx: DeduceCtx) -> ConcreteType: # pytype: disable=wrong-arg-types
"""Deduces the type of the struct instantiation expression and its members."""
struct_type = deduce(self.struct, ctx)
splatted_type = deduce(self.splatted, ctx)
assert isinstance(struct_type, TupleType), struct_type
assert isinstance(splatted_type, TupleType), splatted_type
# We will make sure this splat typechecks during instantiation. Let's just
# ensure the same number of elements for now.
assert len(struct_type.tuple_names) == len(splatted_type.tuple_names)
(seen_names, seen_arg_types,
seen_member_types) = _validate_struct_members_subset(self.members,
struct_type,
self.struct_text, ctx)
arg_types = list(seen_arg_types)
member_types = list(seen_member_types)
for m in struct_type.tuple_names:
if m not in seen_names:
splatted_member_type = splatted_type.get_member_type_by_name(m)
struct_member_type = struct_type.get_member_type_by_name(m)
arg_types.append(splatted_member_type)
member_types.append(struct_member_type)
# At this point, we should have the same number of args compared to the
# number of members defined in the struct.
assert len(arg_types) == len(member_types)
struct_def = self.struct
if not isinstance(struct_def, ast.Struct):
# Traverse TypeDefs and ModRefs until we get the struct AST node.
struct_def = ast_helpers.evaluate_to_struct_or_enum_or_annotation(
struct_def, _get_imported_module_via_type_info, ctx.type_info)
assert isinstance(struct_def, ast.Struct), struct_def
resolved_struct_type, _ = parametric_instantiator.instantiate_struct(
self.span, struct_type, tuple(arg_types), tuple(member_types), ctx,
struct_def.parametric_bindings)
return resolved_struct_type
@_rule(ast.Attr)
def _deduce_Attr(self: ast.Attr, ctx: DeduceCtx) -> ConcreteType: # pytype: disable=wrong-arg-types
"""Deduces the type of a struct attribute access expression."""
struct = deduce(self.lhs, ctx)
assert isinstance(struct, TupleType), struct
if not struct.has_named_member(self.attr.identifier):
raise TypeInferenceError(
span=self.span,
type_=None,
suffix='Struct does not have a member with name {!r}.'.format(
self.attr))
return struct.get_member_type_by_name(self.attr.identifier)
def _deduce(n: ast.AstNode, ctx: DeduceCtx) -> ConcreteType:
f = RULES[n.__class__]
f = typing.cast(Callable[[ast.AstNode, DeduceCtx], ConcreteType], f)
result = f(n, ctx)
ctx.type_info[n] = result
return result
def deduce(n: ast.AstNode, ctx: DeduceCtx) -> ConcreteType:
"""Deduces and returns the type of value produced by this expr.
Also adds n to ctx.type_info memoization dictionary.
Args:
n: The AST node to deduce the type for.
ctx: Wraps a type_info, a dictionary mapping nodes to their types.
Returns:
The type of this expression.
As a side effect the type_info mapping is filled with all the deductions
that were necessary to determine (deduce) the resulting type of n.
"""
assert isinstance(n, ast.AstNode), n
if n in ctx.type_info:
result = ctx.type_info[n]
assert isinstance(result, ConcreteType), result
else:
result = ctx.type_info[n] = _deduce(n, ctx)
logging.vlog(5, 'Deduced type of %s => %s', n, result)
assert isinstance(result, ConcreteType), \
'_deduce did not return a ConcreteType; got: {!r}'.format(result)
return result
|
import pysam
import argparse
import sys
import logging
import os
from asyncore import read
parser = argparse.ArgumentParser(description="Convert fai to bed",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-i', '--input', action='store', nargs='?', help='Input fai file', required=True)
parser.add_argument('-o', '--output', action='store', nargs='?', help="Output bed file", required=True)
args = parser.parse_args()
logger = logging.getLogger('fai2bed')
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)-8s - %(message)s')
logger.info("writing to %s" % args.output)
with open(args.input, "rt") as fin:
with open(args.output, "wt") as fout:
for line in fin:
parts = line.strip().split('\t')
chrom = parts[0]
chromLength = parts[1]
fout.write(f"{chrom}\t0\t{chromLength}\n")
logger.info("done")
|
import streamsx.hdfs as hdfs
from streamsx.topology.topology import Topology
import streamsx as streamsx
from streamsx.topology.tester import Tester
import streamsx.spl.toolkit as tk
import streamsx.rest as sr
import streamsx.spl.op as op
from streamsx.topology.schema import StreamSchema
import unittest
import datetime
import os
import json
##
## Test assumptions
##
## Streaming analytics service or Streams instance running
## IBM cloud Analytics Engine service credentials are located in a file referenced by environment variable ANALYTICS_ENGINE.
## The core-site.xml is referenced by HDFS_SITE_XML environment variable.
## HDFS toolkit location is given by STREAMS_HDFS_TOOLKIT environment variable.
##
def toolkit_env_var():
result = True
try:
os.environ['STREAMS_HDFS_TOOLKIT']
except KeyError:
result = False
return result
def streams_install_env_var():
result = True
try:
os.environ['STREAMS_INSTALL']
except KeyError:
result = False
return result
def site_xml_env_var():
result = True
try:
os.environ['HDFS_SITE_XML']
except KeyError:
result = False
return result
def cloud_creds_env_var():
result = True
try:
os.environ['ANALYTICS_ENGINE']
except KeyError:
result = False
return result
class TestParams(unittest.TestCase):
@unittest.skipIf(site_xml_env_var() == False, "Missing HDFS_SITE_XML environment variable.")
def test_xml_creds(self):
xml_file = os.environ['HDFS_SITE_XML']
topo = Topology()
hdfs.scan(topo, credentials=xml_file, directory='a_dir')
hdfs.scan(topo, credentials=xml_file, directory='a_dir', pattern='*.txt', init_delay=datetime.timedelta(seconds=5))
@unittest.skipIf(cloud_creds_env_var() == False, "Missing ANALYTICS_ENGINE environment variable.")
def test_bad_close_file_param(self):
creds_file = os.environ['ANALYTICS_ENGINE']
with open(creds_file) as data_file:
credentials = json.load(data_file)
topo = Topology()
s = topo.source(['Hello World!']).as_string()
# expect ValueError because bytesPerFile, timePerFile, and tuplesPerFile parameters are mutually exclusive.
self.assertRaises(ValueError, hdfs.write, s, credentials=credentials, file='any_file', timePerFile=5, tuplesPerFile=5)
self.assertRaises(ValueError, hdfs.write, s, credentials=credentials, file='any_file', bytesPerFile=5, timePerFile=5)
self.assertRaises(ValueError, hdfs.write, s, credentials=credentials, file='any_file', bytesPerFile=5, tuplesPerFile=5)
self.assertRaises(ValueError, hdfs.write, s, credentials=credentials, file='any_file', bytesPerFile=200, timePerFile=5, tuplesPerFile=5)
class TestCompositeDistributed(unittest.TestCase):
@classmethod
def setUpClass(self):
print (str(self))
def setUp(self):
Tester.setup_distributed(self)
self.hdfs_toolkit_location = os.environ['STREAMS_HDFS_TOOLKIT']
# ------------------------------------
@unittest.skipIf(site_xml_env_var() == False, "HDFS_SITE_XML environment variable.")
def test_HdfsFileSink(self):
hdfs_cfg_file = os.environ['HDFS_SITE_XML']
credentials=hdfs_cfg_file
topo = Topology('test_HdfsFileSink')
if self.hdfs_toolkit_location is not None:
tk.add_toolkit(topo, self.hdfs_toolkit_location)
# Beacon generates 1000 lines
createLines = op.Source(topo, "spl.utility::Beacon", 'tuple<rstring line>', params = {'period':0.01, 'iterations':1000})
createLines.line = createLines.output('"This line will be written into a HDFS file via HdfsFileSink. " + (rstring) IterationCount()')
to_file = createLines.outputs[0]
config = {
'hdfsUser': 'hdfs',
'configPath': 'etc',
'tuplesPerFile': 100
}
# HdfsFileSink writes every 100 lines in a new file (sample41.txt, sample42.txt, ...)
fsink = hdfs.HdfsFileSink(credentials=credentials, file='pytest/sample4%FILENUM.txt', **config)
to_file.for_each(fsink)
tester = Tester(topo)
tester.run_for(60)
cfg = {}
job_config = streamsx.topology.context.JobConfig(tracing='info')
job_config.add(cfg)
cfg[streamsx.topology.context.ConfigParams.SSL_VERIFY] = False
# Run the test
tester.test(self.test_ctxtype, cfg, always_collect_logs=True)
# ------------------------------------
# HdfsDirectoryScan delivers the file names in pytest directory and HdfsFileSource opens and reads HDFS files.
@unittest.skipIf(site_xml_env_var() == False, "HDFS_SITE_XML environment variable.")
def test_HdfsFileSource(self):
hdfs_cfg_file = os.environ['HDFS_SITE_XML']
topo = Topology('test_HdfsFileSource')
if self.hdfs_toolkit_location is not None:
tk.add_toolkit(topo, self.hdfs_toolkit_location)
dir_scan_output_schema = StreamSchema('tuple<rstring fileName>')
dirScannParameters = {
'initDelay': 2.0,
'sleepTime' : 2.0,
'pattern' : 'sample.*txt'
}
# HdfsDirectoryScan scans directory 'pytest' and delivers HDFS file names in output port.
scannedFileNames = topo.source(hdfs.HdfsDirectoryScan(credentials=hdfs_cfg_file, directory='pytest', schema=dir_scan_output_schema, **dirScannParameters))
scannedFileNames.print()
sourceParamaters = {
'initDelay': 1.0
}
source_schema = StreamSchema('tuple<rstring line>')
# HdfsFileSource reads HDFS files in directory 'pytest' and returns the lines of files in output port
readLines = scannedFileNames.map(hdfs.HdfsFileSource(credentials=hdfs_cfg_file, schema=source_schema, **sourceParamaters))
readLines.print()
tester = Tester(topo)
tester.run_for(60)
cfg = {}
job_config = streamsx.topology.context.JobConfig(tracing='info')
job_config.add(cfg)
cfg[streamsx.topology.context.ConfigParams.SSL_VERIFY] = False
# Run the test
tester.test(self.test_ctxtype, cfg, always_collect_logs=True)
@unittest.skipIf(site_xml_env_var() == False, "HDFS_SITE_XML environment variable.")
def test_HdfsFileCopy(self):
hdfs_cfg_file = os.environ['HDFS_SITE_XML']
credentials=hdfs_cfg_file
# credentials is the path to the HDSF *configuration file 'hdfs-site.xml'
topo = Topology('test_HdfsFileCopy')
if self.hdfs_toolkit_location is not None:
tk.add_toolkit(topo, self.hdfs_toolkit_location)
dir_scan_output_schema = StreamSchema('tuple<rstring hdfsFileName>')
dirScannParameters = {
'initDelay': 2.0,
'sleepTime' : 2.0,
'pattern' : 'sample.*txt'
}
# HdfsDirectoryScan scans directory 'pytest' and delivers HDFS file names in output port.
scannedFileNames = topo.source(hdfs.HdfsDirectoryScan(credentials=credentials, directory='pytest', schema=dir_scan_output_schema, **dirScannParameters))
scannedFileNames.print()
fileCopyParamaters = {
'hdfsFileAttrName': 'hdfsFileName',
'localFile' : '/tmp/'
}
output_schema = StreamSchema('tuple<rstring result, uint64 numResults>')
# HdfsFileCopy copies HDFS files from directory 'pytest' into local directory /tmp
copyFiles = scannedFileNames.map(hdfs.HdfsFileCopy(credentials=hdfs_cfg_file, direction='copyToLocalFile', schema=output_schema, **fileCopyParamaters))
copyFiles.print()
tester = Tester(topo)
cfg = {}
job_config = streamsx.topology.context.JobConfig(tracing='info')
job_config.add(cfg)
cfg[streamsx.topology.context.ConfigParams.SSL_VERIFY] = False
# Run the test
tester.test(self.test_ctxtype, cfg, always_collect_logs=True)
class TestCompositeWebHdfs(unittest.TestCase):
@classmethod
def setUpClass(self):
print (str(self))
def setUp(self):
Tester.setup_distributed(self)
self.hdfs_toolkit_location = os.environ['STREAMS_HDFS_TOOLKIT']
# ------------------------------------
@unittest.skipIf(cloud_creds_env_var() == False, "Missing ANALYTICS_ENGINE environment variable.")
def test_HdfsFileSink(self):
ae_service_creds_file = os.environ['ANALYTICS_ENGINE']
with open(ae_service_creds_file) as data_file:
credentials = data_file.read()
topo = Topology('test_HdfsFileSink')
if self.hdfs_toolkit_location is not None:
tk.add_toolkit(topo, self.hdfs_toolkit_location)
# Beacon generates 1000 lines
createLines = op.Source(topo, "spl.utility::Beacon", 'tuple<rstring line>', params = {'period':0.01, 'iterations':1000})
createLines.line = createLines.output('"This line will be written into a HDFS file via HdfsFileSink. " + (rstring) IterationCount()')
to_file = createLines.outputs[0]
config = {
'tuplesPerFile': 100
}
fsink = hdfs.HdfsFileSink(credentials=credentials, file='pytest/sample4%FILENUM.txt', **config)
to_file.for_each(fsink)
tester = Tester(topo)
tester.run_for(60)
cfg = {}
job_config = streamsx.topology.context.JobConfig(tracing='info')
job_config.add(cfg)
cfg[streamsx.topology.context.ConfigParams.SSL_VERIFY] = False
# Run the test
tester.test(self.test_ctxtype, cfg, always_collect_logs=True)
# ------------------------------------
# HdfsDirectoryScan delivers the file names in pytest directoty and HdfsFileSource opens and reads HDFS files.
@unittest.skipIf(cloud_creds_env_var() == False, "Missing ANALYTICS_ENGINE environment variable.")
def test_HdfsFileSource(self):
ae_service_creds_file = os.environ['ANALYTICS_ENGINE']
with open(ae_service_creds_file) as data_file:
credentials = data_file.read()
topo = Topology('test_HdfsFileSource')
if self.hdfs_toolkit_location is not None:
tk.add_toolkit(topo, self.hdfs_toolkit_location)
sample_schema = StreamSchema('tuple<rstring directory>')
dirScanParameters = {
'initDelay': 2.0,
'sleepTime' : 2.0,
'pattern' : 'sample.*txt'
}
scannedFileNames = topo.source(hdfs.HdfsDirectoryScan(credentials=credentials, directory='pytest', schema=sample_schema, **dirScanParameters))
scannedFileNames.print()
sourceParamaters = {
'initDelay': 1.0
}
source_schema = StreamSchema('tuple<rstring line>')
readLines = scannedFileNames.map(hdfs.HdfsFileSource(credentials=credentials, schema=source_schema, **sourceParamaters))
readLines.print()
tester = Tester(topo)
tester.run_for(60)
cfg = {}
job_config = streamsx.topology.context.JobConfig(tracing='info')
job_config.add(cfg)
cfg[streamsx.topology.context.ConfigParams.SSL_VERIFY] = False
# Run the test
tester.test(self.test_ctxtype, cfg, always_collect_logs=True)
class TestFileSink(unittest.TestCase):
@classmethod
def setUpClass(self):
print (str(self))
def setUp(self):
Tester.setup_distributed(self)
self.hdfs_toolkit_location = os.environ['STREAMS_HDFS_TOOLKIT']
# ------------------------------------
@unittest.skipIf(site_xml_env_var() == False, "HDFS_SITE_XML environment variable.")
def test_HdfsFileSink(self):
hdfs_cfg_file = os.environ['HDFS_SITE_XML']
topo = Topology('test_HdfsFileSink')
if self.hdfs_toolkit_location is not None:
tk.add_toolkit(topo, self.hdfs_toolkit_location)
pulse = op.Source(topo, "spl.utility::Beacon", 'tuple<rstring directory>', params = {'period':0.5, 'iterations':100})
pulse.directory = pulse.output('"This line will be written into a HDFS file via HdfsFileSink. " + (rstring) IterationCount()')
to_file = pulse.outputs[0]
config = {
'configPath' : hdfs_cfg_file
}
fsink = hdfs.HdfsFileSink(credentials=hdfs_cfg_file, file='pytest1/sample611.txt', **config)
to_file.for_each(fsink)
tester = Tester(topo)
cfg = {}
job_config = streamsx.topology.context.JobConfig(tracing='info')
job_config.add(cfg)
cfg[streamsx.topology.context.ConfigParams.SSL_VERIFY] = False
# Run the test
tester.test(self.test_ctxtype, cfg, always_collect_logs=True)
class TestFileSource(unittest.TestCase):
@classmethod
def setUpClass(self):
print (str(self))
def setUp(self):
Tester.setup_distributed(self)
self.hdfs_toolkit_location = os.environ['STREAMS_HDFS_TOOLKIT']
# ------------------------------------
@unittest.skipIf(site_xml_env_var() == False, "HDFS_SITE_XML environment variable.")
def test_HdfsFileSource(self):
hdfs_cfg_file = os.environ['HDFS_SITE_XML']
topo = Topology('test_HdfsFileSource')
if self.hdfs_toolkit_location is not None:
tk.add_toolkit(topo, self.hdfs_toolkit_location)
sample_schema = StreamSchema('tuple<rstring directory>')
options = {
'initDelay': 2.0,
'sleepTime' : 2.0,
'pattern' : 'sample.*txt'
}
scanned = topo.source(hdfs.HdfsDirectoryScan(credentials=hdfs_cfg_file, directory='pytest1', schema=sample_schema, **options))
scanned.print()
sourceParamaters = {
'configPath' : hdfs_cfg_file
}
source_schema = StreamSchema('tuple<rstring line>')
fsource = scanned.map(hdfs.HdfsFileSource(credentials=hdfs_cfg_file, schema=source_schema, **sourceParamaters))
fsource.print()
tester = Tester(topo)
cfg = {}
job_config = streamsx.topology.context.JobConfig(tracing='info')
job_config.add(cfg)
cfg[streamsx.topology.context.ConfigParams.SSL_VERIFY] = False
# Run the test
tester.test(self.test_ctxtype, cfg, always_collect_logs=True)
class TestDirScan(unittest.TestCase):
@classmethod
def setUpClass(self):
print (str(self))
def setUp(self):
Tester.setup_distributed(self)
self.hdfs_toolkit_location = os.environ['STREAMS_HDFS_TOOLKIT']
# ------------------------------------
@unittest.skipIf(site_xml_env_var() == False, "HDFS_SITE_XML environment variable.")
def test_HdfsDirectoryScan(self):
hdfs_cfg_file = os.environ['HDFS_SITE_XML']
topo = Topology('test_HdfsDirectoryScan')
if self.hdfs_toolkit_location is not None:
tk.add_toolkit(topo, self.hdfs_toolkit_location)
credentials=hdfs_cfg_file
directory='pytest1'
sample_schema = StreamSchema('tuple<rstring directory>')
options = {
'initDelay': 2.0,
'sleepTime' : 2.0,
'pattern' : 'sample.*txt'
}
scannedFileNames = topo.source(hdfs.HdfsDirectoryScan(credentials, directory=directory, schema=sample_schema, **options))
scannedFileNames.print(name='printScannedFileNames')
tester = Tester(topo)
cfg = {}
job_config = streamsx.topology.context.JobConfig(tracing='info')
job_config.add(cfg)
cfg[streamsx.topology.context.ConfigParams.SSL_VERIFY] = False
# Run the test
tester.test(self.test_ctxtype, cfg, always_collect_logs=True)
class TestDistributed(unittest.TestCase):
""" Test in local Streams instance with local toolkit from STREAMS_HDFS_TOOLKIT environment variable """
@classmethod
def setUpClass(self):
print (str(self))
def setUp(self):
Tester.setup_distributed(self)
self.hdfs_toolkit_location = os.environ['STREAMS_HDFS_TOOLKIT']
# ------------------------------------
@unittest.skipIf(site_xml_env_var() == False, "HDFS_SITE_XML environment variable.")
def test_all_hdfs_operators(self):
hdfs_cfg_file = os.environ['HDFS_SITE_XML']
topo = Topology('test_all_hdfs_operators')
if self.hdfs_toolkit_location is not None:
tk.add_toolkit(topo, self.hdfs_toolkit_location)
# creates an input stream
fileSinkInputStream = topo.source(['This line will be written into a HDFS file.']).as_string()
# writes a line into a HDFS file (HDFS2FileSink)
fileSinkResults = hdfs.write(fileSinkInputStream, credentials=hdfs_cfg_file, file='pytest1/sample4%FILENUM.txt')
fileSinkResults.print(name='printFileSinkResults')
# scans an HDFS directory and return file names (HDFS2DirectoryScan)
scannedFileNames = hdfs.scan(topo, credentials=hdfs_cfg_file, directory='pytest1', pattern='sample.*txt', init_delay=10)
scannedFileNames.print(name='printScannedFileNames')
# reads lines from a HDFS file (HDFS2FileSource)
readLines = hdfs.read(scannedFileNames, credentials=hdfs_cfg_file)
readLines.print(name='printReadLines')
# copies files from HDFS into local disk "/tmp/" (HDFS2FileCopy)
copyFileResults=hdfs.copy(scannedFileNames, credentials=hdfs_cfg_file, direction='copyToLocalFile' , hdfsFile=None, hdfsFileAttrName='fileName', localFile='/tmp/')
copyFileResults.print(name='printCopyFileResults')
tester = Tester(topo)
tester.tuple_count(readLines, 1, exact=False)
cfg = {}
job_config = streamsx.topology.context.JobConfig(tracing='info')
job_config.add(cfg)
cfg[streamsx.topology.context.ConfigParams.SSL_VERIFY] = False
# Run the test
tester.test(self.test_ctxtype, cfg, always_collect_logs=True)
# ------------------------------------
@unittest.skipIf(cloud_creds_env_var() == False, "Missing ANALYTICS_ENGINE environment variable.")
def test_hdfs_read_with_credentials(self):
ae_service_creds_file = os.environ['ANALYTICS_ENGINE']
with open(ae_service_creds_file) as data_file:
credentials = data_file.read()
# credentials is as JSON string
topo = Topology('test_hdfs_read_with_credentials')
if self.hdfs_toolkit_location is not None:
tk.add_toolkit(topo, self.hdfs_toolkit_location)
s = topo.source(['Hello World!']).as_string()
result = hdfs.write(s, credentials=credentials, file='pytest/1sample%FILENUM.txt')
result.print()
scanned_files = hdfs.scan(topo, credentials=credentials, directory='pytest', pattern='1sample.*txt', init_delay=10)
scanned_files.print()
lines = hdfs.read(scanned_files, credentials=credentials)
lines.print()
tester = Tester(topo)
tester.tuple_count(lines, 1, exact=True)
#tester.run_for(60)
cfg = {}
job_config = streamsx.topology.context.JobConfig(tracing='info')
job_config.add(cfg)
cfg[streamsx.topology.context.ConfigParams.SSL_VERIFY] = False
# Run the test
tester.test(self.test_ctxtype, cfg, always_collect_logs=True)
# ------------------------------------
@unittest.skipIf(cloud_creds_env_var() == False, "Missing ANALYTICS_ENGINE environment variable.")
def test_hdfs_uri(self):
ae_service_creds_file = os.environ['ANALYTICS_ENGINE']
with open(ae_service_creds_file) as data_file:
credentials = json.load(data_file)
# credentials is dict
topo = Topology('test_hdfs_uri')
if self.hdfs_toolkit_location is not None:
tk.add_toolkit(topo, self.hdfs_toolkit_location)
# creates an input stream
fileSinkInputStream = topo.source(['This line will be written into a HDFS file.']).as_string()
result = hdfs.write(fileSinkInputStream, credentials=credentials, file='pytest/2sample%FILENUM.txt')
result.print()
scanned_files = hdfs.scan(topo, credentials=credentials, directory='pytest', pattern='2sample.*txt', init_delay=10)
scanned_files.print()
lines = hdfs.read(scanned_files, credentials=credentials)
lines.print()
tester = Tester(topo)
tester.tuple_count(lines, 1, exact=True)
#tester.run_for(60)
cfg = {}
job_config = streamsx.topology.context.JobConfig(tracing='info')
job_config.add(cfg)
cfg[streamsx.topology.context.ConfigParams.SSL_VERIFY] = False
# Run the test
tester.test(self.test_ctxtype, cfg, always_collect_logs=True)
# ------------------------------------
@unittest.skipIf(cloud_creds_env_var() == False, "Missing ANALYTICS_ENGINE environment variable.")
def test_close_on_tuples(self):
ae_service_creds_file = os.environ['ANALYTICS_ENGINE']
with open(ae_service_creds_file) as data_file:
credentials = json.load(data_file)
topo = Topology('test_close_on_tuples')
if self.hdfs_toolkit_location is not None:
tk.add_toolkit(topo, self.hdfs_toolkit_location)
s = topo.source(['Hello World!','Hello','World','Hello World!','Hello','World']).as_string()
result = hdfs.write(s, credentials=credentials, file='pytest/write_test%FILENUM.txt', tuplesPerFile=3)
result.print()
tester = Tester(topo)
tester.tuple_count(result, 2, exact=True)
#tester.run_for(60)
cfg = {}
job_config = streamsx.topology.context.JobConfig(tracing='info')
job_config.add(cfg)
cfg[streamsx.topology.context.ConfigParams.SSL_VERIFY] = False
# Run the test
tester.test(self.test_ctxtype, cfg, always_collect_logs=True)
class TestCloud(TestDistributed):
""" Test in Streaming Analytics Service using local toolkit from STREAMS_HDFS_TOOLKIT environment variable """
@classmethod
def setUpClass(self):
# start streams service
connection = sr.StreamingAnalyticsConnection()
service = connection.get_streaming_analytics()
result = service.start_instance()
print(result +'\n')
def setUp(self):
Tester.setup_streaming_analytics(self, force_remote_build=False)
self.hdfs_toolkit_location = os.environ['STREAMS_HDFS_TOOLKIT']
class TestCloudRemote(TestCloud):
""" Test in Streaming Analytics Service using remote toolkit from cloud build service """
@classmethod
def setUpClass(self):
super().setUpClass()
def setUp(self):
Tester.setup_streaming_analytics(self, force_remote_build=True)
self.hdfs_toolkit_location = None
class TestICPRemote(TestDistributed):
""" Test in Cloud Pak using remote toolkit from cloud build service """
@classmethod
def setUpClass(self):
super().setUpClass()
def setUp(self):
Tester.setup_distributed(self)
self.hdfs_toolkit_location = None
|
from fibo import fib
def test_fib():
assert fib(0) == 0
assert fib(1) == 1
assert fib(10) == 55
|
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""SOCv2018 codes -> Occupation names"""
import re
import requests
def _create_soc_codes_map():
"""Generates SOC codes to occupation map from the SOCv2018 MCF file"""
# A regex to clean SOC names. This regex catches non alphanumeric characters
# (excluding whitespace), 'and', 'Occupations'.
soc_name_clean_regex = re.compile(r'[^A-Za-z0-9 ]+|\band\b|\bOccupations\b')
soc_map = dict()
soc_mcf = requests.get(
'https://raw.githubusercontent.com/datacommonsorg/schema/main/core/soc.mcf'
).text
soc_mcf = soc_mcf.split('\n')
for line in soc_mcf:
# Extracting the SOC code from the dcid
# The dcid is of the form 'dcid:SOCv2018/<code>'
if line.startswith('Node:'):
if 'SOCv2018/' in line:
code = line.split('SOCv2018/')[1]
else:
code = None
# Extracting occupation from name
if line.startswith('name') and code is not None:
occupation = line.split(':')[1].strip()
occupation = soc_name_clean_regex.sub('', occupation)
# Capitalize first letter in each word
occupation = "".join(
[word[0].upper() + word[1:] for word in occupation.split()])
occupation = re.sub(r'\s+', '', occupation) # Remove whitespace
soc_map[code] = occupation # Assign occupation to code from dcid
return soc_map
SOC_MAP = {
'11-0000':
'Management',
'11-1000':
'TopExecutives',
'11-1010':
'ChiefExecutives',
'11-1011':
'ChiefExecutives',
'11-1020':
'GeneralOperationsManagers',
'11-1021':
'GeneralOperationsManagers',
'11-1030':
'Legislators',
'11-1031':
'Legislators',
'11-2000':
'AdvertisingMarketingPromotionsPublicRelationsSalesManagers',
'11-2010':
'AdvertisingPromotionsManagers',
'11-2011':
'AdvertisingPromotionsManagers',
'11-2020':
'MarketingSalesManagers',
'11-2021':
'MarketingManagers',
'11-2022':
'SalesManagers',
'11-2030':
'PublicRelationsFundraisingManagers',
'11-2032':
'PublicRelationsManagers',
'11-2033':
'FundraisingManagers',
'11-3000':
'OperationsSpecialtiesManagers',
'11-3010':
'AdministrativeServicesFacilitiesManagers',
'11-3012':
'AdministrativeServicesManagers',
'11-3013':
'FacilitiesManagers',
'11-3020':
'ComputerInformationSystemsManagers',
'11-3021':
'ComputerInformationSystemsManagers',
'11-3030':
'FinancialManagers',
'11-3031':
'FinancialManagers',
'11-3050':
'IndustrialProductionManagers',
'11-3051':
'IndustrialProductionManagers',
'11-3060':
'PurchasingManagers',
'11-3061':
'PurchasingManagers',
'11-3070':
'TransportationStorageDistributionManagers',
'11-3071':
'TransportationStorageDistributionManagers',
'11-3110':
'CompensationBenefitsManagers',
'11-3111':
'CompensationBenefitsManagers',
'11-3120':
'HumanResourcesManagers',
'11-3121':
'HumanResourcesManagers',
'11-3130':
'TrainingDevelopmentManagers',
'11-3131':
'TrainingDevelopmentManagers',
'11-9000':
'OtherManagement',
'11-9010':
'FarmersRanchersOtherAgriculturalManagers',
'11-9013':
'FarmersRanchersOtherAgriculturalManagers',
'11-9020':
'ConstructionManagers',
'11-9021':
'ConstructionManagers',
'11-9030':
'EducationChildcareAdministrators',
'11-9031':
'EducationChildcareAdministratorsPreschoolDaycare',
'11-9032':
'EducationAdministratorsKindergartenThroughSecondary',
'11-9033':
'EducationAdministratorsPostsecondary',
'11-9039':
'EducationAdministratorsAllOther',
'11-9040':
'ArchitecturalEngineeringManagers',
'11-9041':
'ArchitecturalEngineeringManagers',
'11-9050':
'FoodServiceManagers',
'11-9051':
'FoodServiceManagers',
'11-9070':
'EntertainmentRecreationManagers',
'11-9071':
'GamblingManagers',
'11-9072':
'EntertainmentRecreationManagersExceptGambling',
'11-9080':
'LodgingManagers',
'11-9081':
'LodgingManagers',
'11-9110':
'MedicalHealthServicesManagers',
'11-9111':
'MedicalHealthServicesManagers',
'11-9120':
'NaturalSciencesManagers',
'11-9121':
'NaturalSciencesManagers',
'11-9130':
'PostmastersMailSuperintendents',
'11-9131':
'PostmastersMailSuperintendents',
'11-9140':
'PropertyRealEstateCommunityAssociationManagers',
'11-9141':
'PropertyRealEstateCommunityAssociationManagers',
'11-9150':
'SocialCommunityServiceManagers',
'11-9151':
'SocialCommunityServiceManagers',
'11-9160':
'EmergencyManagementDirectors',
'11-9161':
'EmergencyManagementDirectors',
'11-9170':
'PersonalServiceManagers',
'11-9171':
'FuneralHomeManagers',
'11-9179':
'PersonalServiceManagersAllOther',
'11-9190':
'MiscellaneousManagers',
'11-9199':
'ManagersAllOther',
'13-0000':
'BusinessFinancialOperations',
'13-1000':
'BusinessOperationsSpecialists',
'13-1010':
'AgentsBusinessManagersOfArtistsPerformersAthletes',
'13-1011':
'AgentsBusinessManagersOfArtistsPerformersAthletes',
'13-1020':
'BuyersPurchasingAgents',
'13-1021':
'BuyersPurchasingAgentsFarmProducts',
'13-1022':
'WholesaleRetailBuyersExceptFarmProducts',
'13-1023':
'PurchasingAgentsExceptWholesaleRetailFarmProducts',
'13-1030':
'ClaimsAdjustersAppraisersExaminersInvestigators',
'13-1031':
'ClaimsAdjustersExaminersInvestigators',
'13-1032':
'InsuranceAppraisersAutoDamage',
'13-1040':
'ComplianceOfficers',
'13-1041':
'ComplianceOfficers',
'13-1050':
'CostEstimators',
'13-1051':
'CostEstimators',
'13-1070':
'HumanResourcesWorkers',
'13-1071':
'HumanResourcesSpecialists',
'13-1074':
'FarmLaborContractors',
'13-1075':
'LaborRelationsSpecialists',
'13-1080':
'LogisticiansProjectManagementSpecialists',
'13-1081':
'Logisticians',
'13-1082':
'ProjectManagementSpecialists',
'13-1110':
'ManagementAnalysts',
'13-1111':
'ManagementAnalysts',
'13-1120':
'MeetingConventionEventPlanners',
'13-1121':
'MeetingConventionEventPlanners',
'13-1130':
'Fundraisers',
'13-1131':
'Fundraisers',
'13-1140':
'CompensationBenefitsJobAnalysisSpecialists',
'13-1141':
'CompensationBenefitsJobAnalysisSpecialists',
'13-1150':
'TrainingDevelopmentSpecialists',
'13-1151':
'TrainingDevelopmentSpecialists',
'13-1160':
'MarketResearchAnalystsMarketingSpecialists',
'13-1161':
'MarketResearchAnalystsMarketingSpecialists',
'13-1190':
'MiscellaneousBusinessOperationsSpecialists',
'13-1199':
'BusinessOperationsSpecialistsAllOther',
'13-2000':
'FinancialSpecialists',
'13-2010':
'AccountantsAuditors',
'13-2011':
'AccountantsAuditors',
'13-2020':
'PropertyAppraisersAssessors',
'13-2022':
'AppraisersOfPersonalBusinessProperty',
'13-2023':
'AppraisersAssessorsOfRealEstate',
'13-2030':
'BudgetAnalysts',
'13-2031':
'BudgetAnalysts',
'13-2040':
'CreditAnalysts',
'13-2041':
'CreditAnalysts',
'13-2050':
'FinancialAnalystsAdvisors',
'13-2051':
'FinancialInvestmentAnalysts',
'13-2052':
'PersonalFinancialAdvisors',
'13-2053':
'InsuranceUnderwriters',
'13-2054':
'FinancialRiskSpecialists',
'13-2060':
'FinancialExaminers',
'13-2061':
'FinancialExaminers',
'13-2070':
'CreditCounselorsLoanOfficers',
'13-2071':
'CreditCounselors',
'13-2072':
'LoanOfficers',
'13-2080':
'TaxExaminersCollectorsPreparersRevenueAgents',
'13-2081':
'TaxExaminersCollectorsRevenueAgents',
'13-2082':
'TaxPreparers',
'13-2090':
'MiscellaneousFinancialSpecialists',
'13-2099':
'FinancialSpecialistsAllOther',
'15-0000':
'ComputerMathematical',
'15-1200':
'Computer',
'15-1210':
'ComputerInformationAnalysts',
'15-1211':
'ComputerSystemsAnalysts',
'15-1212':
'InformationSecurityAnalysts',
'15-1220':
'ComputerInformationResearchScientists',
'15-1221':
'ComputerInformationResearchScientists',
'15-1230':
'ComputerSupportSpecialists',
'15-1231':
'ComputerNetworkSupportSpecialists',
'15-1232':
'ComputerUserSupportSpecialists',
'15-1240':
'DatabaseNetworkAdministratorsArchitects',
'15-1241':
'ComputerNetworkArchitects',
'15-1242':
'DatabaseAdministrators',
'15-1243':
'DatabaseArchitects',
'15-1244':
'NetworkComputerSystemsAdministrators',
'15-1250':
'SoftwareWebDevelopersProgrammersTesters',
'15-1251':
'ComputerProgrammers',
'15-1252':
'SoftwareDevelopers',
'15-1253':
'SoftwareQualityAssuranceAnalystsTesters',
'15-1254':
'WebDevelopers',
'15-1255':
'WebDigitalInterfaceDesigners',
'15-1290':
'MiscellaneousComputer',
'15-1299':
'ComputerAllOther',
'15-2000':
'MathematicalScience',
'15-2010':
'Actuaries',
'15-2011':
'Actuaries',
'15-2020':
'Mathematicians',
'15-2021':
'Mathematicians',
'15-2030':
'OperationsResearchAnalysts',
'15-2031':
'OperationsResearchAnalysts',
'15-2040':
'Statisticians',
'15-2041':
'Statisticians',
'15-2050':
'DataScientists',
'15-2051':
'DataScientists',
'15-2090':
'MiscellaneousMathematicalScience',
'15-2099':
'MathematicalScienceAllOther',
'17-0000':
'ArchitectureEngineering',
'17-1000':
'ArchitectsSurveyorsCartographers',
'17-1010':
'ArchitectsExceptNaval',
'17-1011':
'ArchitectsExceptLandscapeNaval',
'17-1012':
'LandscapeArchitects',
'17-1020':
'SurveyorsCartographersPhotogrammetrists',
'17-1021':
'CartographersPhotogrammetrists',
'17-1022':
'Surveyors',
'17-2000':
'Engineers',
'17-2010':
'AerospaceEngineers',
'17-2011':
'AerospaceEngineers',
'17-2020':
'AgriculturalEngineers',
'17-2021':
'AgriculturalEngineers',
'17-2030':
'BioengineersBiomedicalEngineers',
'17-2031':
'BioengineersBiomedicalEngineers',
'17-2040':
'ChemicalEngineers',
'17-2041':
'ChemicalEngineers',
'17-2050':
'CivilEngineers',
'17-2051':
'CivilEngineers',
'17-2060':
'ComputerHardwareEngineers',
'17-2061':
'ComputerHardwareEngineers',
'17-2070':
'ElectricalElectronicsEngineers',
'17-2071':
'ElectricalEngineers',
'17-2072':
'ElectronicsEngineersExceptComputer',
'17-2080':
'EnvironmentalEngineers',
'17-2081':
'EnvironmentalEngineers',
'17-2110':
'IndustrialEngineersIncludingHealthSafety',
'17-2111':
'HealthSafetyEngineersExceptMiningSafetyEngineersInspectors',
'17-2112':
'IndustrialEngineers',
'17-2120':
'MarineEngineersNavalArchitects',
'17-2121':
'MarineEngineersNavalArchitects',
'17-2130':
'MaterialsEngineers',
'17-2131':
'MaterialsEngineers',
'17-2140':
'MechanicalEngineers',
'17-2141':
'MechanicalEngineers',
'17-2150':
'MiningGeologicalEngineersIncludingMiningSafetyEngineers',
'17-2151':
'MiningGeologicalEngineersIncludingMiningSafetyEngineers',
'17-2160':
'NuclearEngineers',
'17-2161':
'NuclearEngineers',
'17-2170':
'PetroleumEngineers',
'17-2171':
'PetroleumEngineers',
'17-2190':
'MiscellaneousEngineers',
'17-2199':
'EngineersAllOther',
'17-3000':
'DraftersEngineeringTechniciansMappingTechnicians',
'17-3010':
'Drafters',
'17-3011':
'ArchitecturalCivilDrafters',
'17-3012':
'ElectricalElectronicsDrafters',
'17-3013':
'MechanicalDrafters',
'17-3019':
'DraftersAllOther',
'17-3020':
'EngineeringTechnologistsTechniciansExceptDrafters',
'17-3021':
'AerospaceEngineeringOperationsTechnologistsTechnicians',
'17-3022':
'CivilEngineeringTechnologistsTechnicians',
'17-3023':
'ElectricalElectronicEngineeringTechnologistsTechnicians',
'17-3024':
'ElectroMechanicalMechatronicsTechnologistsTechnicians',
'17-3025':
'EnvironmentalEngineeringTechnologistsTechnicians',
'17-3026':
'IndustrialEngineeringTechnologistsTechnicians',
'17-3027':
'MechanicalEngineeringTechnologistsTechnicians',
'17-3028':
'CalibrationTechnologistsTechnicians',
'17-3029':
'EngineeringTechnologistsTechniciansExceptDraftersAllOther',
'17-3030':
'SurveyingMappingTechnicians',
'17-3031':
'SurveyingMappingTechnicians',
'19-0000':
'LifePhysicalSocialScience',
'19-1000':
'LifeScientists',
'19-1010':
'AgriculturalFoodScientists',
'19-1011':
'AnimalScientists',
'19-1012':
'FoodScientistsTechnologists',
'19-1013':
'SoilPlantScientists',
'19-1020':
'BiologicalScientists',
'19-1021':
'BiochemistsBiophysicists',
'19-1022':
'Microbiologists',
'19-1023':
'ZoologistsWildlifeBiologists',
'19-1029':
'BiologicalScientistsAllOther',
'19-1030':
'ConservationScientistsForesters',
'19-1031':
'ConservationScientists',
'19-1032':
'Foresters',
'19-1040':
'MedicalScientists',
'19-1041':
'Epidemiologists',
'19-1042':
'MedicalScientistsExceptEpidemiologists',
'19-1090':
'MiscellaneousLifeScientists',
'19-1099':
'LifeScientistsAllOther',
'19-2000':
'PhysicalScientists',
'19-2010':
'AstronomersPhysicists',
'19-2011':
'Astronomers',
'19-2012':
'Physicists',
'19-2020':
'AtmosphericSpaceScientists',
'19-2021':
'AtmosphericSpaceScientists',
'19-2030':
'ChemistsMaterialsScientists',
'19-2031':
'Chemists',
'19-2032':
'MaterialsScientists',
'19-2040':
'EnvironmentalScientistsGeoscientists',
'19-2041':
'EnvironmentalScientistsSpecialistsIncludingHealth',
'19-2042':
'GeoscientistsExceptHydrologistsGeographers',
'19-2043':
'Hydrologists',
'19-2090':
'MiscellaneousPhysicalScientists',
'19-2099':
'PhysicalScientistsAllOther',
'19-3000':
'SocialScientistsRelatedWorkers',
'19-3010':
'Economists',
'19-3011':
'Economists',
'19-3020':
'SurveyResearchers',
'19-3022':
'SurveyResearchers',
'19-3030':
'Psychologists',
'19-3032':
'IndustrialOrganizationalPsychologists',
'19-3033':
'ClinicalCounselingPsychologists',
'19-3034':
'SchoolPsychologists',
'19-3039':
'PsychologistsAllOther',
'19-3040':
'Sociologists',
'19-3041':
'Sociologists',
'19-3050':
'UrbanRegionalPlanners',
'19-3051':
'UrbanRegionalPlanners',
'19-3090':
'MiscellaneousSocialScientistsRelatedWorkers',
'19-3091':
'AnthropologistsArcheologists',
'19-3092':
'Geographers',
'19-3093':
'Historians',
'19-3094':
'PoliticalScientists',
'19-3099':
'SocialScientistsRelatedWorkersAllOther',
'19-4000':
'LifePhysicalSocialScienceTechnicians',
'19-4010':
'AgriculturalFoodScienceTechnicians',
'19-4012':
'AgriculturalTechnicians',
'19-4013':
'FoodScienceTechnicians',
'19-4020':
'BiologicalTechnicians',
'19-4021':
'BiologicalTechnicians',
'19-4030':
'ChemicalTechnicians',
'19-4031':
'ChemicalTechnicians',
'19-4040':
'EnvironmentalScienceGeoscienceTechnicians',
'19-4042':
'EnvironmentalScienceProtectionTechniciansIncludingHealth',
'19-4043':
'GeologicalTechniciansExceptHydrologicTechnicians',
'19-4044':
'HydrologicTechnicians',
'19-4050':
'NuclearTechnicians',
'19-4051':
'NuclearTechnicians',
'19-4060':
'SocialScienceResearchAssistants',
'19-4061':
'SocialScienceResearchAssistants',
'19-4070':
'ForestConservationTechnicians',
'19-4071':
'ForestConservationTechnicians',
'19-4090':
'MiscellaneousLifePhysicalSocialScienceTechnicians',
'19-4092':
'ForensicScienceTechnicians',
'19-4099':
'LifePhysicalSocialScienceTechniciansAllOther',
'19-5000':
'OccupationalHealthSafetySpecialistsTechnicians',
'19-5010':
'OccupationalHealthSafetySpecialistsTechnicians',
'19-5011':
'OccupationalHealthSafetySpecialists',
'19-5012':
'OccupationalHealthSafetyTechnicians',
'21-0000':
'CommunitySocialService',
'21-1000':
'CounselorsSocialWorkersOtherCommunitySocialServiceSpecialists',
'21-1010':
'Counselors',
'21-1011':
'SubstanceAbuseBehavioralDisorderCounselors',
'21-1012':
'EducationalGuidanceCareerCounselorsAdvisors',
'21-1013':
'MarriageFamilyTherapists',
'21-1014':
'MentalHealthCounselors',
'21-1015':
'RehabilitationCounselors',
'21-1019':
'CounselorsAllOther',
'21-1020':
'SocialWorkers',
'21-1021':
'ChildFamilySchoolSocialWorkers',
'21-1022':
'HealthcareSocialWorkers',
'21-1023':
'MentalHealthSubstanceAbuseSocialWorkers',
'21-1029':
'SocialWorkersAllOther',
'21-1090':
'MiscellaneousCommunitySocialServiceSpecialists',
'21-1091':
'HealthEducationSpecialists',
'21-1092':
'ProbationOfficersCorrectionalTreatmentSpecialists',
'21-1093':
'SocialHumanServiceAssistants',
'21-1094':
'CommunityHealthWorkers',
'21-1099':
'CommunitySocialServiceSpecialistsAllOther',
'21-2000':
'ReligiousWorkers',
'21-2010':
'Clergy',
'21-2011':
'Clergy',
'21-2020':
'DirectorsReligiousActivitiesEducation',
'21-2021':
'DirectorsReligiousActivitiesEducation',
'21-2090':
'MiscellaneousReligiousWorkers',
'21-2099':
'ReligiousWorkersAllOther',
'23-0000':
'Legal',
'23-1000':
'LawyersJudgesRelatedWorkers',
'23-1010':
'LawyersJudicialLawClerks',
'23-1011':
'Lawyers',
'23-1012':
'JudicialLawClerks',
'23-1020':
'JudgesMagistratesOtherJudicialWorkers',
'23-1021':
'AdministrativeLawJudgesAdjudicatorsHearingOfficers',
'23-1022':
'ArbitratorsMediatorsConciliators',
'23-1023':
'JudgesMagistrateJudgesMagistrates',
'23-2000':
'LegalSupportWorkers',
'23-2010':
'ParalegalsLegalAssistants',
'23-2011':
'ParalegalsLegalAssistants',
'23-2090':
'MiscellaneousLegalSupportWorkers',
'23-2093':
'TitleExaminersAbstractorsSearchers',
'23-2099':
'LegalSupportWorkersAllOther',
'25-0000':
'EducationalInstructionLibrary',
'25-1000':
'PostsecondaryTeachers',
'25-1010':
'BusinessTeachersPostsecondary',
'25-1011':
'BusinessTeachersPostsecondary',
'25-1020':
'MathComputerScienceTeachersPostsecondary',
'25-1021':
'ComputerScienceTeachersPostsecondary',
'25-1022':
'MathematicalScienceTeachersPostsecondary',
'25-1030':
'EngineeringArchitectureTeachersPostsecondary',
'25-1031':
'ArchitectureTeachersPostsecondary',
'25-1032':
'EngineeringTeachersPostsecondary',
'25-1040':
'LifeSciencesTeachersPostsecondary',
'25-1041':
'AgriculturalSciencesTeachersPostsecondary',
'25-1042':
'BiologicalScienceTeachersPostsecondary',
'25-1043':
'ForestryConservationScienceTeachersPostsecondary',
'25-1050':
'PhysicalSciencesTeachersPostsecondary',
'25-1051':
'AtmosphericEarthMarineSpaceSciencesTeachersPostsecondary',
'25-1052':
'ChemistryTeachersPostsecondary',
'25-1053':
'EnvironmentalScienceTeachersPostsecondary',
'25-1054':
'PhysicsTeachersPostsecondary',
'25-1060':
'SocialSciencesTeachersPostsecondary',
'25-1061':
'AnthropologyArcheologyTeachersPostsecondary',
'25-1062':
'AreaEthnicCulturalStudiesTeachersPostsecondary',
'25-1063':
'EconomicsTeachersPostsecondary',
'25-1064':
'GeographyTeachersPostsecondary',
'25-1065':
'PoliticalScienceTeachersPostsecondary',
'25-1066':
'PsychologyTeachersPostsecondary',
'25-1067':
'SociologyTeachersPostsecondary',
'25-1069':
'SocialSciencesTeachersPostsecondaryAllOther',
'25-1070':
'HealthTeachersPostsecondary',
'25-1071':
'HealthSpecialtiesTeachersPostsecondary',
'25-1072':
'NursingInstructorsTeachersPostsecondary',
'25-1080':
'EducationLibraryScienceTeachersPostsecondary',
'25-1081':
'EducationTeachersPostsecondary',
'25-1082':
'LibraryScienceTeachersPostsecondary',
'25-1110':
'LawCriminalJusticeSocialWorkTeachersPostsecondary',
'25-1111':
'CriminalJusticeLawEnforcementTeachersPostsecondary',
'25-1112':
'LawTeachersPostsecondary',
'25-1113':
'SocialWorkTeachersPostsecondary',
'25-1120':
'ArtsCommunicationsHistoryHumanitiesTeachersPostsecondary',
'25-1121':
'ArtDramaMusicTeachersPostsecondary',
'25-1122':
'CommunicationsTeachersPostsecondary',
'25-1123':
'EnglishLanguageLiteratureTeachersPostsecondary',
'25-1124':
'ForeignLanguageLiteratureTeachersPostsecondary',
'25-1125':
'HistoryTeachersPostsecondary',
'25-1126':
'PhilosophyReligionTeachersPostsecondary',
'25-1190':
'MiscellaneousPostsecondaryTeachers',
'25-1192':
'FamilyConsumerSciencesTeachersPostsecondary',
'25-1193':
'RecreationFitnessStudiesTeachersPostsecondary',
'25-1194':
'CareerTechnicalEducationTeachersPostsecondary',
'25-1199':
'PostsecondaryTeachersAllOther',
'25-2000':
'PreschoolElementaryMiddleSecondarySpecialEducationTeachers',
'25-2010':
'PreschoolKindergartenTeachers',
'25-2011':
'PreschoolTeachersExceptSpecialEducation',
'25-2012':
'KindergartenTeachersExceptSpecialEducation',
'25-2020':
'ElementaryMiddleSchoolTeachers',
'25-2021':
'ElementarySchoolTeachersExceptSpecialEducation',
'25-2022':
'MiddleSchoolTeachersExceptSpecialCareerTechnicalEducation',
'25-2023':
'CareerTechnicalEducationTeachersMiddleSchool',
'25-2030':
'SecondarySchoolTeachers',
'25-2031':
'SecondarySchoolTeachersExceptSpecialCareerTechnicalEducation',
'25-2032':
'CareerTechnicalEducationTeachersSecondarySchool',
'25-2050':
'SpecialEducationTeachers',
'25-2051':
'SpecialEducationTeachersPreschool',
'25-2055':
'SpecialEducationTeachersKindergarten',
'25-2056':
'SpecialEducationTeachersElementarySchool',
'25-2057':
'SpecialEducationTeachersMiddleSchool',
'25-2058':
'SpecialEducationTeachersSecondarySchool',
'25-2059':
'SpecialEducationTeachersAllOther',
'25-3000':
'OtherTeachersInstructors',
'25-3010':
'AdultBasicEducationAdultSecondaryEducationEnglishAsASecondLanguageInstructors',
'25-3011':
'AdultBasicEducationAdultSecondaryEducationEnglishAsASecondLanguageInstructors',
'25-3020':
'SelfEnrichmentTeachers',
'25-3021':
'SelfEnrichmentTeachers',
'25-3030':
'SubstituteTeachersShortTerm',
'25-3031':
'SubstituteTeachersShortTerm',
'25-3040':
'Tutors',
'25-3041':
'Tutors',
'25-3090':
'MiscellaneousTeachersInstructors',
'25-3099':
'TeachersInstructorsAllOther',
'25-4000':
'LibrariansCuratorsArchivists',
'25-4010':
'ArchivistsCuratorsMuseumTechnicians',
'25-4011':
'Archivists',
'25-4012':
'Curators',
'25-4013':
'MuseumTechniciansConservators',
'25-4020':
'LibrariansMediaCollectionsSpecialists',
'25-4022':
'LibrariansMediaCollectionsSpecialists',
'25-4030':
'LibraryTechnicians',
'25-4031':
'LibraryTechnicians',
'25-9000':
'OtherEducationalInstructionLibrary',
'25-9020':
'FarmHomeManagementEducators',
'25-9021':
'FarmHomeManagementEducators',
'25-9030':
'InstructionalCoordinators',
'25-9031':
'InstructionalCoordinators',
'25-9040':
'TeachingAssistants',
'25-9042':
'TeachingAssistantsPreschoolElementaryMiddleSecondarySchoolExceptSpecialEducation',
'25-9043':
'TeachingAssistantsSpecialEducation',
'25-9044':
'TeachingAssistantsPostsecondary',
'25-9049':
'TeachingAssistantsAllOther',
'25-9090':
'MiscellaneousEducationalInstructionLibraryWorkers',
'25-9099':
'EducationalInstructionLibraryWorkersAllOther',
'27-0000':
'ArtsDesignEntertainmentSportsMedia',
'27-1000':
'ArtDesignWorkers',
'27-1010':
'ArtistsRelatedWorkers',
'27-1011':
'ArtDirectors',
'27-1012':
'CraftArtists',
'27-1013':
'FineArtistsIncludingPaintersSculptorsIllustrators',
'27-1014':
'SpecialEffectsArtistsAnimators',
'27-1019':
'ArtistsRelatedWorkersAllOther',
'27-1020':
'Designers',
'27-1021':
'CommercialIndustrialDesigners',
'27-1022':
'FashionDesigners',
'27-1023':
'FloralDesigners',
'27-1024':
'GraphicDesigners',
'27-1025':
'InteriorDesigners',
'27-1026':
'MerchandiseDisplayersWindowTrimmers',
'27-1027':
'SetExhibitDesigners',
'27-1029':
'DesignersAllOther',
'27-2000':
'EntertainersPerformersSportsRelatedWorkers',
'27-2010':
'ActorsProducersDirectors',
'27-2011':
'Actors',
'27-2012':
'ProducersDirectors',
'27-2020':
'AthletesCoachesUmpiresRelatedWorkers',
'27-2021':
'AthletesSportsCompetitors',
'27-2022':
'CoachesScouts',
'27-2023':
'UmpiresRefereesOtherSportsOfficials',
'27-2030':
'DancersChoreographers',
'27-2031':
'Dancers',
'27-2032':
'Choreographers',
'27-2040':
'MusiciansSingersRelatedWorkers',
'27-2041':
'MusicDirectorsComposers',
'27-2042':
'MusiciansSingers',
'27-2090':
'MiscellaneousEntertainersPerformersSportsRelatedWorkers',
'27-2091':
'DiscJockeysExceptRadio',
'27-2099':
'EntertainersPerformersSportsRelatedWorkersAllOther',
'27-3000':
'MediaCommunicationWorkers',
'27-3010':
'BroadcastAnnouncersRadioDiscJockeys',
'27-3011':
'BroadcastAnnouncersRadioDiscJockeys',
'27-3020':
'NewsAnalystsReportersJournalists',
'27-3023':
'NewsAnalystsReportersJournalists',
'27-3030':
'PublicRelationsSpecialists',
'27-3031':
'PublicRelationsSpecialists',
'27-3040':
'WritersEditors',
'27-3041':
'Editors',
'27-3042':
'TechnicalWriters',
'27-3043':
'WritersAuthors',
'27-3090':
'MiscellaneousMediaCommunicationWorkers',
'27-3091':
'InterpretersTranslators',
'27-3092':
'CourtReportersSimultaneousCaptioners',
'27-3099':
'MediaCommunicationWorkersAllOther',
'27-4000':
'MediaCommunicationEquipmentWorkers',
'27-4010':
'BroadcastSoundLightingTechnicians',
'27-4011':
'AudioVideoTechnicians',
'27-4012':
'BroadcastTechnicians',
'27-4014':
'SoundEngineeringTechnicians',
'27-4015':
'LightingTechnicians',
'27-4020':
'Photographers',
'27-4021':
'Photographers',
'27-4030':
'TelevisionVideoFilmCameraOperatorsEditors',
'27-4031':
'CameraOperatorsTelevisionVideoFilm',
'27-4032':
'FilmVideoEditors',
'27-4090':
'MiscellaneousMediaCommunicationEquipmentWorkers',
'27-4099':
'MediaCommunicationEquipmentWorkersAllOther',
'29-0000':
'HealthcarePractitionersTechnical',
'29-1000':
'HealthcareDiagnosingOrTreatingPractitioners',
'29-1010':
'Chiropractors',
'29-1011':
'Chiropractors',
'29-1020':
'Dentists',
'29-1021':
'DentistsGeneral',
'29-1022':
'OralMaxillofacialSurgeons',
'29-1023':
'Orthodontists',
'29-1024':
'Prosthodontists',
'29-1029':
'DentistsAllOtherSpecialists',
'29-1030':
'DietitiansNutritionists',
'29-1031':
'DietitiansNutritionists',
'29-1040':
'Optometrists',
'29-1041':
'Optometrists',
'29-1050':
'Pharmacists',
'29-1051':
'Pharmacists',
'29-1070':
'PhysicianAssistants',
'29-1071':
'PhysicianAssistants',
'29-1080':
'Podiatrists',
'29-1081':
'Podiatrists',
'29-1120':
'Therapists',
'29-1122':
'OccupationalTherapists',
'29-1123':
'PhysicalTherapists',
'29-1124':
'RadiationTherapists',
'29-1125':
'RecreationalTherapists',
'29-1126':
'RespiratoryTherapists',
'29-1127':
'SpeechLanguagePathologists',
'29-1128':
'ExercisePhysiologists',
'29-1129':
'TherapistsAllOther',
'29-1130':
'Veterinarians',
'29-1131':
'Veterinarians',
'29-1140':
'RegisteredNurses',
'29-1141':
'RegisteredNurses',
'29-1150':
'NurseAnesthetists',
'29-1151':
'NurseAnesthetists',
'29-1160':
'NurseMidwives',
'29-1161':
'NurseMidwives',
'29-1170':
'NursePractitioners',
'29-1171':
'NursePractitioners',
'29-1180':
'Audiologists',
'29-1181':
'Audiologists',
'29-1210':
'Physicians',
'29-1211':
'Anesthesiologists',
'29-1212':
'Cardiologists',
'29-1213':
'Dermatologists',
'29-1214':
'EmergencyMedicinePhysicians',
'29-1215':
'FamilyMedicinePhysicians',
'29-1216':
'GeneralInternalMedicinePhysicians',
'29-1217':
'Neurologists',
'29-1218':
'ObstetriciansGynecologists',
'29-1221':
'PediatriciansGeneral',
'29-1222':
'PhysiciansPathologists',
'29-1223':
'Psychiatrists',
'29-1224':
'Radiologists',
'29-1229':
'PhysiciansAllOther',
'29-1240':
'Surgeons',
'29-1241':
'OphthalmologistsExceptPediatric',
'29-1242':
'OrthopedicSurgeonsExceptPediatric',
'29-1243':
'PediatricSurgeons',
'29-1249':
'SurgeonsAllOther',
'29-1290':
'MiscellaneousHealthcareDiagnosingOrTreatingPractitioners',
'29-1291':
'Acupuncturists',
'29-1292':
'DentalHygienists',
'29-1299':
'HealthcareDiagnosingOrTreatingPractitionersAllOther',
'29-2000':
'HealthTechnologistsTechnicians',
'29-2010':
'ClinicalLaboratoryTechnologistsTechnicians',
'29-2011':
'MedicalClinicalLaboratoryTechnologists',
'29-2012':
'MedicalClinicalLaboratoryTechnicians',
'29-2030':
'DiagnosticRelatedTechnologistsTechnicians',
'29-2031':
'CardiovascularTechnologistsTechnicians',
'29-2032':
'DiagnosticMedicalSonographers',
'29-2033':
'NuclearMedicineTechnologists',
'29-2034':
'RadiologicTechnologistsTechnicians',
'29-2035':
'MagneticResonanceImagingTechnologists',
'29-2036':
'MedicalDosimetrists',
'29-2040':
'EmergencyMedicalTechniciansParamedics',
'29-2042':
'EmergencyMedicalTechnicians',
'29-2043':
'Paramedics',
'29-2050':
'HealthPractitionerSupportTechnologistsTechnicians',
'29-2051':
'DieteticTechnicians',
'29-2052':
'PharmacyTechnicians',
'29-2053':
'PsychiatricTechnicians',
'29-2055':
'SurgicalTechnologists',
'29-2056':
'VeterinaryTechnologistsTechnicians',
'29-2057':
'OphthalmicMedicalTechnicians',
'29-2060':
'LicensedPracticalLicensedVocationalNurses',
'29-2061':
'LicensedPracticalLicensedVocationalNurses',
'29-2070':
'MedicalRecordsSpecialists',
'29-2072':
'MedicalRecordsSpecialists',
'29-2080':
'OpticiansDispensing',
'29-2081':
'OpticiansDispensing',
'29-2090':
'MiscellaneousHealthTechnologistsTechnicians',
'29-2091':
'OrthotistsProsthetists',
'29-2092':
'HearingAidSpecialists',
'29-2099':
'HealthTechnologistsTechniciansAllOther',
'29-9000':
'OtherHealthcarePractitionersTechnical',
'29-9020':
'HealthInformationTechnologistsMedicalRegistrars',
'29-9021':
'HealthInformationTechnologistsMedicalRegistrars',
'29-9090':
'MiscellaneousHealthPractitionersTechnicalWorkers',
'29-9091':
'AthleticTrainers',
'29-9092':
'GeneticCounselors',
'29-9093':
'SurgicalAssistants',
'29-9099':
'HealthcarePractitionersTechnicalWorkersAllOther',
'31-0000':
'HealthcareSupport',
'31-1100':
'HomeHealthPersonalCareAidesNursingAssistantsOrderliesPsychiatricAides',
'31-1120':
'HomeHealthPersonalCareAides',
'31-1121':
'HomeHealthAides',
'31-1122':
'PersonalCareAides',
'31-1130':
'NursingAssistantsOrderliesPsychiatricAides',
'31-1131':
'NursingAssistants',
'31-1132':
'Orderlies',
'31-1133':
'PsychiatricAides',
'31-2000':
'OccupationalTherapyPhysicalTherapistAssistantsAides',
'31-2010':
'OccupationalTherapyAssistantsAides',
'31-2011':
'OccupationalTherapyAssistants',
'31-2012':
'OccupationalTherapyAides',
'31-2020':
'PhysicalTherapistAssistantsAides',
'31-2021':
'PhysicalTherapistAssistants',
'31-2022':
'PhysicalTherapistAides',
'31-9000':
'OtherHealthcareSupport',
'31-9010':
'MassageTherapists',
'31-9011':
'MassageTherapists',
'31-9090':
'MiscellaneousHealthcareSupport',
'31-9091':
'DentalAssistants',
'31-9092':
'MedicalAssistants',
'31-9093':
'MedicalEquipmentPreparers',
'31-9094':
'MedicalTranscriptionists',
'31-9095':
'PharmacyAides',
'31-9096':
'VeterinaryAssistantsLaboratoryAnimalCaretakers',
'31-9097':
'Phlebotomists',
'31-9099':
'HealthcareSupportWorkersAllOther',
'33-0000':
'ProtectiveService',
'33-1000':
'SupervisorsOfProtectiveServiceWorkers',
'33-1010':
'FirstLineSupervisorsOfLawEnforcementWorkers',
'33-1011':
'FirstLineSupervisorsOfCorrectionalOfficers',
'33-1012':
'FirstLineSupervisorsOfPoliceDetectives',
'33-1020':
'FirstLineSupervisorsOfFirefightingPreventionWorkers',
'33-1021':
'FirstLineSupervisorsOfFirefightingPreventionWorkers',
'33-1090':
'MiscellaneousFirstLineSupervisorsProtectiveServiceWorkers',
'33-1091':
'FirstLineSupervisorsOfSecurityWorkers',
'33-1099':
'FirstLineSupervisorsOfProtectiveServiceWorkersAllOther',
'33-2000':
'FirefightingPreventionWorkers',
'33-2010':
'Firefighters',
'33-2011':
'Firefighters',
'33-2020':
'FireInspectors',
'33-2021':
'FireInspectorsInvestigators',
'33-2022':
'ForestFireInspectorsPreventionSpecialists',
'33-3000':
'LawEnforcementWorkers',
'33-3010':
'BailiffsCorrectionalOfficersJailers',
'33-3011':
'Bailiffs',
'33-3012':
'CorrectionalOfficersJailers',
'33-3020':
'DetectivesCriminalInvestigators',
'33-3021':
'DetectivesCriminalInvestigators',
'33-3030':
'FishGameWardens',
'33-3031':
'FishGameWardens',
'33-3040':
'ParkingEnforcementWorkers',
'33-3041':
'ParkingEnforcementWorkers',
'33-3050':
'PoliceOfficers',
'33-3051':
'PoliceSheriffsPatrolOfficers',
'33-3052':
'TransitRailroadPolice',
'33-9000':
'OtherProtectiveServiceWorkers',
'33-9010':
'AnimalControlWorkers',
'33-9011':
'AnimalControlWorkers',
'33-9020':
'PrivateDetectivesInvestigators',
'33-9021':
'PrivateDetectivesInvestigators',
'33-9030':
'SecurityGuardsGamblingSurveillanceOfficers',
'33-9031':
'GamblingSurveillanceOfficersGamblingInvestigators',
'33-9032':
'SecurityGuards',
'33-9090':
'MiscellaneousProtectiveServiceWorkers',
'33-9091':
'CrossingGuardsFlaggers',
'33-9092':
'LifeguardsSkiPatrolOtherRecreationalProtectiveServiceWorkers',
'33-9093':
'TransportationSecurityScreeners',
'33-9094':
'SchoolBusMonitors',
'33-9099':
'ProtectiveServiceWorkersAllOther',
'35-0000':
'FoodPreparationServingRelated',
'35-1000':
'SupervisorsOfFoodPreparationServingWorkers',
'35-1010':
'SupervisorsOfFoodPreparationServingWorkers',
'35-1011':
'ChefsHeadCooks',
'35-1012':
'FirstLineSupervisorsOfFoodPreparationServingWorkers',
'35-2000':
'CooksFoodPreparationWorkers',
'35-2010':
'Cooks',
'35-2011':
'CooksFastFood',
'35-2012':
'CooksInstitutionCafeteria',
'35-2013':
'CooksPrivateHousehold',
'35-2014':
'CooksRestaurant',
'35-2015':
'CooksShortOrder',
'35-2019':
'CooksAllOther',
'35-2020':
'FoodPreparationWorkers',
'35-2021':
'FoodPreparationWorkers',
'35-3000':
'FoodBeverageServingWorkers',
'35-3010':
'Bartenders',
'35-3011':
'Bartenders',
'35-3020':
'FastFoodCounterWorkers',
'35-3023':
'FastFoodCounterWorkers',
'35-3030':
'WaitersWaitresses',
'35-3031':
'WaitersWaitresses',
'35-3040':
'FoodServersNonrestaurant',
'35-3041':
'FoodServersNonrestaurant',
'35-9000':
'OtherFoodPreparationServingRelatedWorkers',
'35-9010':
'DiningRoomCafeteriaAttendantsBartenderHelpers',
'35-9011':
'DiningRoomCafeteriaAttendantsBartenderHelpers',
'35-9020':
'Dishwashers',
'35-9021':
'Dishwashers',
'35-9030':
'HostsHostessesRestaurantLoungeCoffeeShop',
'35-9031':
'HostsHostessesRestaurantLoungeCoffeeShop',
'35-9090':
'MiscellaneousFoodPreparationServingRelatedWorkers',
'35-9099':
'FoodPreparationServingRelatedWorkersAllOther',
'37-0000':
'BuildingGroundsCleaningMaintenance',
'37-1000':
'SupervisorsOfBuildingGroundsCleaningMaintenanceWorkers',
'37-1010':
'FirstLineSupervisorsOfBuildingGroundsCleaningMaintenanceWorkers',
'37-1011':
'FirstLineSupervisorsOfHousekeepingJanitorialWorkers',
'37-1012':
'FirstLineSupervisorsOfLandscapingLawnServiceGroundskeepingWorkers',
'37-2000':
'BuildingCleaningPestControlWorkers',
'37-2010':
'BuildingCleaningWorkers',
'37-2011':
'JanitorsCleanersExceptMaidsHousekeepingCleaners',
'37-2012':
'MaidsHousekeepingCleaners',
'37-2019':
'BuildingCleaningWorkersAllOther',
'37-2020':
'PestControlWorkers',
'37-2021':
'PestControlWorkers',
'37-3000':
'GroundsMaintenanceWorkers',
'37-3010':
'GroundsMaintenanceWorkers',
'37-3011':
'LandscapingGroundskeepingWorkers',
'37-3012':
'PesticideHandlersSprayersApplicatorsVegetation',
'37-3013':
'TreeTrimmersPruners',
'37-3019':
'GroundsMaintenanceWorkersAllOther',
'39-0000':
'PersonalCareService',
'39-1000':
'SupervisorsOfPersonalCareServiceWorkers',
'39-1010':
'FirstLineSupervisorsOfEntertainmentRecreationWorkers',
'39-1013':
'FirstLineSupervisorsOfGamblingServicesWorkers',
'39-1014':
'FirstLineSupervisorsOfEntertainmentRecreationWorkersExceptGamblingServices',
'39-1020':
'FirstLineSupervisorsOfPersonalServiceWorkers',
'39-1022':
'FirstLineSupervisorsOfPersonalServiceWorkers',
'39-2000':
'AnimalCareServiceWorkers',
'39-2010':
'AnimalTrainers',
'39-2011':
'AnimalTrainers',
'39-2020':
'AnimalCaretakers',
'39-2021':
'AnimalCaretakers',
'39-3000':
'EntertainmentAttendantsRelatedWorkers',
'39-3010':
'GamblingServicesWorkers',
'39-3011':
'GamblingDealers',
'39-3012':
'GamblingSportsBookWritersRunners',
'39-3019':
'GamblingServiceWorkersAllOther',
'39-3020':
'MotionPictureProjectionists',
'39-3021':
'MotionPictureProjectionists',
'39-3030':
'UshersLobbyAttendantsTicketTakers',
'39-3031':
'UshersLobbyAttendantsTicketTakers',
'39-3090':
'MiscellaneousEntertainmentAttendantsRelatedWorkers',
'39-3091':
'AmusementRecreationAttendants',
'39-3092':
'CostumeAttendants',
'39-3093':
'LockerRoomCoatroomDressingRoomAttendants',
'39-3099':
'EntertainmentAttendantsRelatedWorkersAllOther',
'39-4000':
'FuneralServiceWorkers',
'39-4010':
'EmbalmersCrematoryOperators',
'39-4011':
'Embalmers',
'39-4012':
'CrematoryOperators',
'39-4020':
'FuneralAttendants',
'39-4021':
'FuneralAttendants',
'39-4030':
'MorticiansUndertakersFuneralArrangers',
'39-4031':
'MorticiansUndertakersFuneralArrangers',
'39-5000':
'PersonalAppearanceWorkers',
'39-5010':
'BarbersHairdressersHairstylistsCosmetologists',
'39-5011':
'Barbers',
'39-5012':
'HairdressersHairstylistsCosmetologists',
'39-5090':
'MiscellaneousPersonalAppearanceWorkers',
'39-5091':
'MakeupArtistsTheatricalPerformance',
'39-5092':
'ManicuristsPedicurists',
'39-5093':
'Shampooers',
'39-5094':
'SkincareSpecialists',
'39-6000':
'BaggagePortersBellhopsConcierges',
'39-6010':
'BaggagePortersBellhopsConcierges',
'39-6011':
'BaggagePortersBellhops',
'39-6012':
'Concierges',
'39-7000':
'TourTravelGuides',
'39-7010':
'TourTravelGuides',
'39-7011':
'TourGuidesEscorts',
'39-7012':
'TravelGuides',
'39-9000':
'OtherPersonalCareServiceWorkers',
'39-9010':
'ChildcareWorkers',
'39-9011':
'ChildcareWorkers',
'39-9030':
'RecreationFitnessWorkers',
'39-9031':
'ExerciseTrainersGroupFitnessInstructors',
'39-9032':
'RecreationWorkers',
'39-9040':
'ResidentialAdvisors',
'39-9041':
'ResidentialAdvisors',
'39-9090':
'MiscellaneousPersonalCareServiceWorkers',
'39-9099':
'PersonalCareServiceWorkersAllOther',
'41-0000':
'SalesRelated',
'41-1000':
'SupervisorsOfSalesWorkers',
'41-1010':
'FirstLineSupervisorsOfSalesWorkers',
'41-1011':
'FirstLineSupervisorsOfRetailSalesWorkers',
'41-1012':
'FirstLineSupervisorsOfNonRetailSalesWorkers',
'41-2000':
'RetailSalesWorkers',
'41-2010':
'Cashiers',
'41-2011':
'Cashiers',
'41-2012':
'GamblingChangePersonsBoothCashiers',
'41-2020':
'CounterRentalClerksPartsSalespersons',
'41-2021':
'CounterRentalClerks',
'41-2022':
'PartsSalespersons',
'41-2030':
'RetailSalespersons',
'41-2031':
'RetailSalespersons',
'41-3000':
'SalesRepresentativesServices',
'41-3010':
'AdvertisingSalesAgents',
'41-3011':
'AdvertisingSalesAgents',
'41-3020':
'InsuranceSalesAgents',
'41-3021':
'InsuranceSalesAgents',
'41-3030':
'SecuritiesCommoditiesFinancialServicesSalesAgents',
'41-3031':
'SecuritiesCommoditiesFinancialServicesSalesAgents',
'41-3040':
'TravelAgents',
'41-3041':
'TravelAgents',
'41-3090':
'MiscellaneousSalesRepresentativesServices',
'41-3091':
'SalesRepresentativesOfServicesExceptAdvertisingInsuranceFinancialServicesTravel',
'41-4000':
'SalesRepresentativesWholesaleManufacturing',
'41-4010':
'SalesRepresentativesWholesaleManufacturing',
'41-4011':
'SalesRepresentativesWholesaleManufacturingTechnicalScientificProducts',
'41-4012':
'SalesRepresentativesWholesaleManufacturingExceptTechnicalScientificProducts',
'41-9000':
'OtherSalesRelatedWorkers',
'41-9010':
'ModelsDemonstratorsProductPromoters',
'41-9011':
'DemonstratorsProductPromoters',
'41-9012':
'Models',
'41-9020':
'RealEstateBrokersSalesAgents',
'41-9021':
'RealEstateBrokers',
'41-9022':
'RealEstateSalesAgents',
'41-9030':
'SalesEngineers',
'41-9031':
'SalesEngineers',
'41-9040':
'Telemarketers',
'41-9041':
'Telemarketers',
'41-9090':
'MiscellaneousSalesRelatedWorkers',
'41-9091':
'DoortoDoorSalesWorkersNewsStreetVendorsRelatedWorkers',
'41-9099':
'SalesRelatedWorkersAllOther',
'43-0000':
'OfficeAdministrativeSupport',
'43-1000':
'SupervisorsOfOfficeAdministrativeSupportWorkers',
'43-1010':
'FirstLineSupervisorsOfOfficeAdministrativeSupportWorkers',
'43-1011':
'FirstLineSupervisorsOfOfficeAdministrativeSupportWorkers',
'43-2000':
'CommunicationsEquipmentOperators',
'43-2010':
'SwitchboardOperatorsIncludingAnsweringService',
'43-2011':
'SwitchboardOperatorsIncludingAnsweringService',
'43-2020':
'TelephoneOperators',
'43-2021':
'TelephoneOperators',
'43-2090':
'MiscellaneousCommunicationsEquipmentOperators',
'43-2099':
'CommunicationsEquipmentOperatorsAllOther',
'43-3000':
'FinancialClerks',
'43-3010':
'BillAccountCollectors',
'43-3011':
'BillAccountCollectors',
'43-3020':
'BillingPostingClerks',
'43-3021':
'BillingPostingClerks',
'43-3030':
'BookkeepingAccountingAuditingClerks',
'43-3031':
'BookkeepingAccountingAuditingClerks',
'43-3040':
'GamblingCageWorkers',
'43-3041':
'GamblingCageWorkers',
'43-3050':
'PayrollTimekeepingClerks',
'43-3051':
'PayrollTimekeepingClerks',
'43-3060':
'ProcurementClerks',
'43-3061':
'ProcurementClerks',
'43-3070':
'Tellers',
'43-3071':
'Tellers',
'43-3090':
'MiscellaneousFinancialClerks',
'43-3099':
'FinancialClerksAllOther',
'43-4000':
'InformationRecordClerks',
'43-4010':
'BrokerageClerks',
'43-4011':
'BrokerageClerks',
'43-4020':
'CorrespondenceClerks',
'43-4021':
'CorrespondenceClerks',
'43-4030':
'CourtMunicipalLicenseClerks',
'43-4031':
'CourtMunicipalLicenseClerks',
'43-4040':
'CreditAuthorizersCheckersClerks',
'43-4041':
'CreditAuthorizersCheckersClerks',
'43-4050':
'CustomerServiceRepresentatives',
'43-4051':
'CustomerServiceRepresentatives',
'43-4060':
'EligibilityInterviewersGovernmentPrograms',
'43-4061':
'EligibilityInterviewersGovernmentPrograms',
'43-4070':
'FileClerks',
'43-4071':
'FileClerks',
'43-4080':
'HotelMotelResortDeskClerks',
'43-4081':
'HotelMotelResortDeskClerks',
'43-4110':
'InterviewersExceptEligibilityLoan',
'43-4111':
'InterviewersExceptEligibilityLoan',
'43-4120':
'LibraryAssistantsClerical',
'43-4121':
'LibraryAssistantsClerical',
'43-4130':
'LoanInterviewersClerks',
'43-4131':
'LoanInterviewersClerks',
'43-4140':
'NewAccountsClerks',
'43-4141':
'NewAccountsClerks',
'43-4150':
'OrderClerks',
'43-4151':
'OrderClerks',
'43-4160':
'HumanResourcesAssistantsExceptPayrollTimekeeping',
'43-4161':
'HumanResourcesAssistantsExceptPayrollTimekeeping',
'43-4170':
'ReceptionistsInformationClerks',
'43-4171':
'ReceptionistsInformationClerks',
'43-4180':
'ReservationTransportationTicketAgentsTravelClerks',
'43-4181':
'ReservationTransportationTicketAgentsTravelClerks',
'43-4190':
'MiscellaneousInformationRecordClerks',
'43-4199':
'InformationRecordClerksAllOther',
'43-5000':
'MaterialRecordingSchedulingDispatchingDistributingWorkers',
'43-5010':
'CargoFreightAgents',
'43-5011':
'CargoFreightAgents',
'43-5020':
'CouriersMessengers',
'43-5021':
'CouriersMessengers',
'43-5030':
'Dispatchers',
'43-5031':
'PublicSafetyTelecommunicators',
'43-5032':
'DispatchersExceptPoliceFireAmbulance',
'43-5040':
'MeterReadersUtilities',
'43-5041':
'MeterReadersUtilities',
'43-5050':
'PostalServiceWorkers',
'43-5051':
'PostalServiceClerks',
'43-5052':
'PostalServiceMailCarriers',
'43-5053':
'PostalServiceMailSortersProcessorsProcessingMachineOperators',
'43-5060':
'ProductionPlanningExpeditingClerks',
'43-5061':
'ProductionPlanningExpeditingClerks',
'43-5070':
'ShippingReceivingInventoryClerks',
'43-5071':
'ShippingReceivingInventoryClerks',
'43-5110':
'WeighersMeasurersCheckersSamplersRecordkeeping',
'43-5111':
'WeighersMeasurersCheckersSamplersRecordkeeping',
'43-6000':
'SecretariesAdministrativeAssistants',
'43-6010':
'SecretariesAdministrativeAssistants',
'43-6011':
'ExecutiveSecretariesExecutiveAdministrativeAssistants',
'43-6012':
'LegalSecretariesAdministrativeAssistants',
'43-6013':
'MedicalSecretariesAdministrativeAssistants',
'43-6014':
'SecretariesAdministrativeAssistantsExceptLegalMedicalExecutive',
'43-9000':
'OtherOfficeAdministrativeSupportWorkers',
'43-9020':
'DataEntryInformationProcessingWorkers',
'43-9021':
'DataEntryKeyers',
'43-9022':
'WordProcessorsTypists',
'43-9030':
'DesktopPublishers',
'43-9031':
'DesktopPublishers',
'43-9040':
'InsuranceClaimsPolicyProcessingClerks',
'43-9041':
'InsuranceClaimsPolicyProcessingClerks',
'43-9050':
'MailClerksMailMachineOperatorsExceptPostalService',
'43-9051':
'MailClerksMailMachineOperatorsExceptPostalService',
'43-9060':
'OfficeClerksGeneral',
'43-9061':
'OfficeClerksGeneral',
'43-9070':
'OfficeMachineOperatorsExceptComputer',
'43-9071':
'OfficeMachineOperatorsExceptComputer',
'43-9080':
'ProofreadersCopyMarkers',
'43-9081':
'ProofreadersCopyMarkers',
'43-9110':
'StatisticalAssistants',
'43-9111':
'StatisticalAssistants',
'43-9190':
'MiscellaneousOfficeAdministrativeSupportWorkers',
'43-9199':
'OfficeAdministrativeSupportWorkersAllOther',
'45-0000':
'FarmingFishingForestry',
'45-1000':
'SupervisorsOfFarmingFishingForestryWorkers',
'45-1010':
'FirstLineSupervisorsOfFarmingFishingForestryWorkers',
'45-1011':
'FirstLineSupervisorsOfFarmingFishingForestryWorkers',
'45-2000':
'AgriculturalWorkers',
'45-2010':
'AgriculturalInspectors',
'45-2011':
'AgriculturalInspectors',
'45-2020':
'AnimalBreeders',
'45-2021':
'AnimalBreeders',
'45-2040':
'GradersSortersAgriculturalProducts',
'45-2041':
'GradersSortersAgriculturalProducts',
'45-2090':
'MiscellaneousAgriculturalWorkers',
'45-2091':
'AgriculturalEquipmentOperators',
'45-2092':
'FarmworkersLaborersCropNurseryGreenhouse',
'45-2093':
'FarmworkersFarmRanchAquaculturalAnimals',
'45-2099':
'AgriculturalWorkersAllOther',
'45-3000':
'FishingHuntingWorkers',
'45-3030':
'FishingHuntingWorkers',
'45-3031':
'FishingHuntingWorkers',
'45-4000':
'ForestConservationLoggingWorkers',
'45-4010':
'ForestConservationWorkers',
'45-4011':
'ForestConservationWorkers',
'45-4020':
'LoggingWorkers',
'45-4021':
'Fallers',
'45-4022':
'LoggingEquipmentOperators',
'45-4023':
'LogGradersScalers',
'45-4029':
'LoggingWorkersAllOther',
'47-0000':
'ConstructionExtraction',
'47-1000':
'SupervisorsOfConstructionExtractionWorkers',
'47-1010':
'FirstLineSupervisorsOfConstructionTradesExtractionWorkers',
'47-1011':
'FirstLineSupervisorsOfConstructionTradesExtractionWorkers',
'47-2000':
'ConstructionTradesWorkers',
'47-2010':
'Boilermakers',
'47-2011':
'Boilermakers',
'47-2020':
'BrickmasonsBlockmasonsStonemasons',
'47-2021':
'BrickmasonsBlockmasons',
'47-2022':
'Stonemasons',
'47-2030':
'Carpenters',
'47-2031':
'Carpenters',
'47-2040':
'CarpetFloorTileInstallersFinishers',
'47-2041':
'CarpetInstallers',
'47-2042':
'FloorLayersExceptCarpetWoodHardTiles',
'47-2043':
'FloorSandersFinishers',
'47-2044':
'TileStoneSetters',
'47-2050':
'CementMasonsConcreteFinishersTerrazzoWorkers',
'47-2051':
'CementMasonsConcreteFinishers',
'47-2053':
'TerrazzoWorkersFinishers',
'47-2060':
'ConstructionLaborers',
'47-2061':
'ConstructionLaborers',
'47-2070':
'ConstructionEquipmentOperators',
'47-2071':
'PavingSurfacingTampingEquipmentOperators',
'47-2072':
'PileDriverOperators',
'47-2073':
'OperatingEngineersOtherConstructionEquipmentOperators',
'47-2080':
'DrywallInstallersCeilingTileInstallersTapers',
'47-2081':
'DrywallCeilingTileInstallers',
'47-2082':
'Tapers',
'47-2110':
'Electricians',
'47-2111':
'Electricians',
'47-2120':
'Glaziers',
'47-2121':
'Glaziers',
'47-2130':
'InsulationWorkers',
'47-2131':
'InsulationWorkersFloorCeilingWall',
'47-2132':
'InsulationWorkersMechanical',
'47-2140':
'PaintersPaperhangers',
'47-2141':
'PaintersConstructionMaintenance',
'47-2142':
'Paperhangers',
'47-2150':
'PipelayersPlumbersPipefittersSteamfitters',
'47-2151':
'Pipelayers',
'47-2152':
'PlumbersPipefittersSteamfitters',
'47-2160':
'PlasterersStuccoMasons',
'47-2161':
'PlasterersStuccoMasons',
'47-2170':
'ReinforcingIronRebarWorkers',
'47-2171':
'ReinforcingIronRebarWorkers',
'47-2180':
'Roofers',
'47-2181':
'Roofers',
'47-2210':
'SheetMetalWorkers',
'47-2211':
'SheetMetalWorkers',
'47-2220':
'StructuralIronSteelWorkers',
'47-2221':
'StructuralIronSteelWorkers',
'47-2230':
'SolarPhotovoltaicInstallers',
'47-2231':
'SolarPhotovoltaicInstallers',
'47-3000':
'HelpersConstructionTrades',
'47-3010':
'HelpersConstructionTrades',
'47-3011':
'HelpersBrickmasonsBlockmasonsStonemasonsTileMarbleSetters',
'47-3012':
'HelpersCarpenters',
'47-3013':
'HelpersElectricians',
'47-3014':
'HelpersPaintersPaperhangersPlasterersStuccoMasons',
'47-3015':
'HelpersPipelayersPlumbersPipefittersSteamfitters',
'47-3016':
'HelpersRoofers',
'47-3019':
'HelpersConstructionTradesAllOther',
'47-4000':
'OtherConstructionRelatedWorkers',
'47-4010':
'ConstructionBuildingInspectors',
'47-4011':
'ConstructionBuildingInspectors',
'47-4020':
'ElevatorEscalatorInstallersRepairers',
'47-4021':
'ElevatorEscalatorInstallersRepairers',
'47-4030':
'FenceErectors',
'47-4031':
'FenceErectors',
'47-4040':
'HazardousMaterialsRemovalWorkers',
'47-4041':
'HazardousMaterialsRemovalWorkers',
'47-4050':
'HighwayMaintenanceWorkers',
'47-4051':
'HighwayMaintenanceWorkers',
'47-4060':
'RailTrackLayingMaintenanceEquipmentOperators',
'47-4061':
'RailTrackLayingMaintenanceEquipmentOperators',
'47-4070':
'SepticTankServicersSewerPipeCleaners',
'47-4071':
'SepticTankServicersSewerPipeCleaners',
'47-4090':
'MiscellaneousConstructionRelatedWorkers',
'47-4091':
'SegmentalPavers',
'47-4099':
'ConstructionRelatedWorkersAllOther',
'47-5000':
'ExtractionWorkers',
'47-5010':
'DerrickRotaryDrillServiceUnitOperatorsOilGas',
'47-5011':
'DerrickOperatorsOilGas',
'47-5012':
'RotaryDrillOperatorsOilGas',
'47-5013':
'ServiceUnitOperatorsOilGas',
'47-5020':
'SurfaceMiningMachineOperatorsEarthDrillers',
'47-5022':
'ExcavatingLoadingMachineDraglineOperatorsSurfaceMining',
'47-5023':
'EarthDrillersExceptOilGas',
'47-5030':
'ExplosivesWorkersOrdnanceHandlingExpertsBlasters',
'47-5032':
'ExplosivesWorkersOrdnanceHandlingExpertsBlasters',
'47-5040':
'UndergroundMiningMachineOperators',
'47-5041':
'ContinuousMiningMachineOperators',
'47-5043':
'RoofBoltersMining',
'47-5044':
'LoadingMovingMachineOperatorsUndergroundMining',
'47-5049':
'UndergroundMiningMachineOperatorsAllOther',
'47-5050':
'RockSplittersQuarry',
'47-5051':
'RockSplittersQuarry',
'47-5070':
'RoustaboutsOilGas',
'47-5071':
'RoustaboutsOilGas',
'47-5080':
'HelpersExtractionWorkers',
'47-5081':
'HelpersExtractionWorkers',
'47-5090':
'MiscellaneousExtractionWorkers',
'47-5099':
'ExtractionWorkersAllOther',
'49-0000':
'InstallationMaintenanceRepair',
'49-1000':
'SupervisorsOfInstallationMaintenanceRepairWorkers',
'49-1010':
'FirstLineSupervisorsOfMechanicsInstallersRepairers',
'49-1011':
'FirstLineSupervisorsOfMechanicsInstallersRepairers',
'49-2000':
'ElectricalElectronicEquipmentMechanicsInstallersRepairers',
'49-2010':
'ComputerAutomatedTellerOfficeMachineRepairers',
'49-2011':
'ComputerAutomatedTellerOfficeMachineRepairers',
'49-2020':
'RadioTelecommunicationsEquipmentInstallersRepairers',
'49-2021':
'RadioCellularTowerEquipmentInstallersRepairers',
'49-2022':
'TelecommunicationsEquipmentInstallersRepairersExceptLineInstallers',
'49-2090':
'MiscellaneousElectricalElectronicEquipmentMechanicsInstallersRepairers',
'49-2091':
'AvionicsTechnicians',
'49-2092':
'ElectricMotorPowerToolRelatedRepairers',
'49-2093':
'ElectricalElectronicsInstallersRepairersTransportationEquipment',
'49-2094':
'ElectricalElectronicsRepairersCommercialIndustrialEquipment',
'49-2095':
'ElectricalElectronicsRepairersPowerhouseSubstationRelay',
'49-2096':
'ElectronicEquipmentInstallersRepairersMotorVehicles',
'49-2097':
'AudiovisualEquipmentInstallersRepairers',
'49-2098':
'SecurityFireAlarmSystemsInstallers',
'49-3000':
'VehicleMobileEquipmentMechanicsInstallersRepairers',
'49-3010':
'AircraftMechanicsServiceTechnicians',
'49-3011':
'AircraftMechanicsServiceTechnicians',
'49-3020':
'AutomotiveTechniciansRepairers',
'49-3021':
'AutomotiveBodyRelatedRepairers',
'49-3022':
'AutomotiveGlassInstallersRepairers',
'49-3023':
'AutomotiveServiceTechniciansMechanics',
'49-3030':
'BusTruckMechanicsDieselEngineSpecialists',
'49-3031':
'BusTruckMechanicsDieselEngineSpecialists',
'49-3040':
'HeavyVehicleMobileEquipmentServiceTechniciansMechanics',
'49-3041':
'FarmEquipmentMechanicsServiceTechnicians',
'49-3042':
'MobileHeavyEquipmentMechanicsExceptEngines',
'49-3043':
'RailCarRepairers',
'49-3050':
'SmallEngineMechanics',
'49-3051':
'MotorboatMechanicsServiceTechnicians',
'49-3052':
'MotorcycleMechanics',
'49-3053':
'OutdoorPowerEquipmentOtherSmallEngineMechanics',
'49-3090':
'MiscellaneousVehicleMobileEquipmentMechanicsInstallersRepairers',
'49-3091':
'BicycleRepairers',
'49-3092':
'RecreationalVehicleServiceTechnicians',
'49-3093':
'TireRepairersChangers',
'49-9000':
'OtherInstallationMaintenanceRepair',
'49-9010':
'ControlValveInstallersRepairers',
'49-9011':
'MechanicalDoorRepairers',
'49-9012':
'ControlValveInstallersRepairersExceptMechanicalDoor',
'49-9020':
'HeatingAirConditioningRefrigerationMechanicsInstallers',
'49-9021':
'HeatingAirConditioningRefrigerationMechanicsInstallers',
'49-9030':
'HomeApplianceRepairers',
'49-9031':
'HomeApplianceRepairers',
'49-9040':
'IndustrialMachineryInstallationRepairMaintenanceWorkers',
'49-9041':
'IndustrialMachineryMechanics',
'49-9043':
'MaintenanceWorkersMachinery',
'49-9044':
'Millwrights',
'49-9045':
'RefractoryMaterialsRepairersExceptBrickmasons',
'49-9050':
'LineInstallersRepairers',
'49-9051':
'ElectricalPowerLineInstallersRepairers',
'49-9052':
'TelecommunicationsLineInstallersRepairers',
'49-9060':
'PrecisionInstrumentEquipmentRepairers',
'49-9061':
'CameraPhotographicEquipmentRepairers',
'49-9062':
'MedicalEquipmentRepairers',
'49-9063':
'MusicalInstrumentRepairersTuners',
'49-9064':
'WatchClockRepairers',
'49-9069':
'PrecisionInstrumentEquipmentRepairersAllOther',
'49-9070':
'MaintenanceRepairWorkersGeneral',
'49-9071':
'MaintenanceRepairWorkersGeneral',
'49-9080':
'WindTurbineServiceTechnicians',
'49-9081':
'WindTurbineServiceTechnicians',
'49-9090':
'MiscellaneousInstallationMaintenanceRepairWorkers',
'49-9091':
'CoinVendingAmusementMachineServicersRepairers',
'49-9092':
'CommercialDivers',
'49-9094':
'LocksmithsSafeRepairers',
'49-9095':
'ManufacturedBuildingMobileHomeInstallers',
'49-9096':
'Riggers',
'49-9097':
'SignalTrackSwitchRepairers',
'49-9098':
'HelpersInstallationMaintenanceRepairWorkers',
'49-9099':
'InstallationMaintenanceRepairWorkersAllOther',
'51-0000':
'Production',
'51-1000':
'SupervisorsOfProductionWorkers',
'51-1010':
'FirstLineSupervisorsOfProductionOperatingWorkers',
'51-1011':
'FirstLineSupervisorsOfProductionOperatingWorkers',
'51-2000':
'AssemblersFabricators',
'51-2010':
'AircraftStructureSurfacesRiggingSystemsAssemblers',
'51-2011':
'AircraftStructureSurfacesRiggingSystemsAssemblers',
'51-2020':
'ElectricalElectronicsElectromechanicalAssemblers',
'51-2021':
'CoilWindersTapersFinishers',
'51-2022':
'ElectricalElectronicEquipmentAssemblers',
'51-2023':
'ElectromechanicalEquipmentAssemblers',
'51-2030':
'EngineOtherMachineAssemblers',
'51-2031':
'EngineOtherMachineAssemblers',
'51-2040':
'StructuralMetalFabricatorsFitters',
'51-2041':
'StructuralMetalFabricatorsFitters',
'51-2050':
'FiberglassLaminatorsFabricators',
'51-2051':
'FiberglassLaminatorsFabricators',
'51-2060':
'TimingDeviceAssemblersAdjusters',
'51-2061':
'TimingDeviceAssemblersAdjusters',
'51-2090':
'MiscellaneousAssemblersFabricators',
'51-2092':
'TeamAssemblers',
'51-2099':
'AssemblersFabricatorsAllOther',
'51-3000':
'FoodProcessingWorkers',
'51-3010':
'Bakers',
'51-3011':
'Bakers',
'51-3020':
'ButchersOtherMeatPoultryFishProcessingWorkers',
'51-3021':
'ButchersMeatCutters',
'51-3022':
'MeatPoultryFishCuttersTrimmers',
'51-3023':
'SlaughterersMeatPackers',
'51-3090':
'MiscellaneousFoodProcessingWorkers',
'51-3091':
'FoodTobaccoRoastingBakingDryingMachineOperatorsTenders',
'51-3092':
'FoodBatchmakers',
'51-3093':
'FoodCookingMachineOperatorsTenders',
'51-3099':
'FoodProcessingWorkersAllOther',
'51-4000':
'MetalWorkersPlasticWorkers',
'51-4020':
'FormingMachineSettersOperatorsTendersMetalPlastic',
'51-4021':
'ExtrudingDrawingMachineSettersOperatorsTendersMetalPlastic',
'51-4022':
'ForgingMachineSettersOperatorsTendersMetalPlastic',
'51-4023':
'RollingMachineSettersOperatorsTendersMetalPlastic',
'51-4030':
'MachineToolCuttingSettersOperatorsTendersMetalPlastic',
'51-4031':
'CuttingPunchingPressMachineSettersOperatorsTendersMetalPlastic',
'51-4032':
'DrillingBoringMachineToolSettersOperatorsTendersMetalPlastic',
'51-4033':
'GrindingLappingPolishingBuffingMachineToolSettersOperatorsTendersMetalPlastic',
'51-4034':
'LatheTurningMachineToolSettersOperatorsTendersMetalPlastic',
'51-4035':
'MillingPlaningMachineSettersOperatorsTendersMetalPlastic',
'51-4040':
'Machinists',
'51-4041':
'Machinists',
'51-4050':
'MetalFurnaceOperatorsTendersPourersCasters',
'51-4051':
'MetalRefiningFurnaceOperatorsTenders',
'51-4052':
'PourersCastersMetal',
'51-4060':
'ModelMakersPatternmakersMetalPlastic',
'51-4061':
'ModelMakersMetalPlastic',
'51-4062':
'PatternmakersMetalPlastic',
'51-4070':
'MoldersMoldingMachineSettersOperatorsTendersMetalPlastic',
'51-4071':
'FoundryMoldCoremakers',
'51-4072':
'MoldingCoremakingCastingMachineSettersOperatorsTendersMetalPlastic',
'51-4080':
'MultipleMachineToolSettersOperatorsTendersMetalPlastic',
'51-4081':
'MultipleMachineToolSettersOperatorsTendersMetalPlastic',
'51-4110':
'ToolDieMakers',
'51-4111':
'ToolDieMakers',
'51-4120':
'WeldingSolderingBrazingWorkers',
'51-4121':
'WeldersCuttersSolderersBrazers',
'51-4122':
'WeldingSolderingBrazingMachineSettersOperatorsTenders',
'51-4190':
'MiscellaneousMetalWorkersPlasticWorkers',
'51-4191':
'HeatTreatingEquipmentSettersOperatorsTendersMetalPlastic',
'51-4192':
'LayoutWorkersMetalPlastic',
'51-4193':
'PlatingMachineSettersOperatorsTendersMetalPlastic',
'51-4194':
'ToolGrindersFilersSharpeners',
'51-4199':
'MetalWorkersPlasticWorkersAllOther',
'51-5100':
'PrintingWorkers',
'51-5110':
'PrintingWorkers',
'51-5111':
'PrepressTechniciansWorkers',
'51-5112':
'PrintingPressOperators',
'51-5113':
'PrintBindingFinishingWorkers',
'51-6000':
'TextileApparelFurnishingsWorkers',
'51-6010':
'LaundryDryCleaningWorkers',
'51-6011':
'LaundryDryCleaningWorkers',
'51-6020':
'PressersTextileGarmentRelatedMaterials',
'51-6021':
'PressersTextileGarmentRelatedMaterials',
'51-6030':
'SewingMachineOperators',
'51-6031':
'SewingMachineOperators',
'51-6040':
'ShoeLeatherWorkers',
'51-6041':
'ShoeLeatherWorkersRepairers',
'51-6042':
'ShoeMachineOperatorsTenders',
'51-6050':
'TailorsDressmakersSewers',
'51-6051':
'SewersHand',
'51-6052':
'TailorsDressmakersCustomSewers',
'51-6060':
'TextileMachineSettersOperatorsTenders',
'51-6061':
'TextileBleachingDyeingMachineOperatorsTenders',
'51-6062':
'TextileCuttingMachineSettersOperatorsTenders',
'51-6063':
'TextileKnittingWeavingMachineSettersOperatorsTenders',
'51-6064':
'TextileWindingTwistingDrawingOutMachineSettersOperatorsTenders',
'51-6090':
'MiscellaneousTextileApparelFurnishingsWorkers',
'51-6091':
'ExtrudingFormingMachineSettersOperatorsTendersSyntheticGlassFibers',
'51-6092':
'FabricApparelPatternmakers',
'51-6093':
'Upholsterers',
'51-6099':
'TextileApparelFurnishingsWorkersAllOther',
'51-7000':
'Woodworkers',
'51-7010':
'CabinetmakersBenchCarpenters',
'51-7011':
'CabinetmakersBenchCarpenters',
'51-7020':
'FurnitureFinishers',
'51-7021':
'FurnitureFinishers',
'51-7030':
'ModelMakersPatternmakersWood',
'51-7031':
'ModelMakersWood',
'51-7032':
'PatternmakersWood',
'51-7040':
'WoodworkingMachineSettersOperatorsTenders',
'51-7041':
'SawingMachineSettersOperatorsTendersWood',
'51-7042':
'WoodworkingMachineSettersOperatorsTendersExceptSawing',
'51-7090':
'MiscellaneousWoodworkers',
'51-7099':
'WoodworkersAllOther',
'51-8000':
'PlantSystemOperators',
'51-8010':
'PowerPlantOperatorsDistributorsDispatchers',
'51-8011':
'NuclearPowerReactorOperators',
'51-8012':
'PowerDistributorsDispatchers',
'51-8013':
'PowerPlantOperators',
'51-8020':
'StationaryEngineersBoilerOperators',
'51-8021':
'StationaryEngineersBoilerOperators',
'51-8030':
'WaterWastewaterTreatmentPlantSystemOperators',
'51-8031':
'WaterWastewaterTreatmentPlantSystemOperators',
'51-8090':
'MiscellaneousPlantSystemOperators',
'51-8091':
'ChemicalPlantSystemOperators',
'51-8092':
'GasPlantOperators',
'51-8093':
'PetroleumPumpSystemOperatorsRefineryOperatorsGaugers',
'51-8099':
'PlantSystemOperatorsAllOther',
'51-9000':
'OtherProduction',
'51-9010':
'ChemicalProcessingMachineSettersOperatorsTenders',
'51-9011':
'ChemicalEquipmentOperatorsTenders',
'51-9012':
'SeparatingFilteringClarifyingPrecipitatingStillMachineSettersOperatorsTenders',
'51-9020':
'CrushingGrindingPolishingMixingBlendingWorkers',
'51-9021':
'CrushingGrindingPolishingMachineSettersOperatorsTenders',
'51-9022':
'GrindingPolishingWorkersHand',
'51-9023':
'MixingBlendingMachineSettersOperatorsTenders',
'51-9030':
'CuttingWorkers',
'51-9031':
'CuttersTrimmersHand',
'51-9032':
'CuttingSlicingMachineSettersOperatorsTenders',
'51-9040':
'ExtrudingFormingPressingCompactingMachineSettersOperatorsTenders',
'51-9041':
'ExtrudingFormingPressingCompactingMachineSettersOperatorsTenders',
'51-9050':
'FurnaceKilnOvenDrierKettleOperatorsTenders',
'51-9051':
'FurnaceKilnOvenDrierKettleOperatorsTenders',
'51-9060':
'InspectorsTestersSortersSamplersWeighers',
'51-9061':
'InspectorsTestersSortersSamplersWeighers',
'51-9070':
'JewelersPreciousStoneMetalWorkers',
'51-9071':
'JewelersPreciousStoneMetalWorkers',
'51-9080':
'DentalOphthalmicLaboratoryTechniciansMedicalApplianceTechnicians',
'51-9081':
'DentalLaboratoryTechnicians',
'51-9082':
'MedicalApplianceTechnicians',
'51-9083':
'OphthalmicLaboratoryTechnicians',
'51-9110':
'PackagingFillingMachineOperatorsTenders',
'51-9111':
'PackagingFillingMachineOperatorsTenders',
'51-9120':
'PaintingWorkers',
'51-9123':
'PaintingCoatingDecoratingWorkers',
'51-9124':
'CoatingPaintingSprayingMachineSettersOperatorsTenders',
'51-9140':
'SemiconductorProcessingTechnicians',
'51-9141':
'SemiconductorProcessingTechnicians',
'51-9150':
'PhotographicProcessWorkersProcessingMachineOperators',
'51-9151':
'PhotographicProcessWorkersProcessingMachineOperators',
'51-9160':
'ComputerNumericallyControlledToolOperatorsProgrammers',
'51-9161':
'ComputerNumericallyControlledToolOperators',
'51-9162':
'ComputerNumericallyControlledToolProgrammers',
'51-9190':
'MiscellaneousProductionWorkers',
'51-9191':
'AdhesiveBondingMachineOperatorsTenders',
'51-9192':
'CleaningWashingMetalPicklingEquipmentOperatorsTenders',
'51-9193':
'CoolingFreezingEquipmentOperatorsTenders',
'51-9194':
'EtchersEngravers',
'51-9195':
'MoldersShapersCastersExceptMetalPlastic',
'51-9196':
'PaperGoodsMachineSettersOperatorsTenders',
'51-9197':
'TireBuilders',
'51-9198':
'HelpersProductionWorkers',
'51-9199':
'ProductionWorkersAllOther',
'53-0000':
'TransportationMaterialMoving',
'53-1000':
'SupervisorsOfTransportationMaterialMovingWorkers',
'53-1040':
'FirstLineSupervisorsOfTransportationMaterialMovingWorkers',
'53-1041':
'AircraftCargoHandlingSupervisors',
'53-1042':
'FirstLineSupervisorsOfHelpersLaborersMaterialMoversHand',
'53-1043':
'FirstLineSupervisorsOfMaterialMovingMachineVehicleOperators',
'53-1044':
'FirstLineSupervisorsOfPassengerAttendants',
'53-1049':
'FirstLineSupervisorsOfTransportationWorkersAllOther',
'53-2000':
'AirTransportationWorkers',
'53-2010':
'AircraftPilotsFlightEngineers',
'53-2011':
'AirlinePilotsCopilotsFlightEngineers',
'53-2012':
'CommercialPilots',
'53-2020':
'AirTrafficControllersAirfieldOperationsSpecialists',
'53-2021':
'AirTrafficControllers',
'53-2022':
'AirfieldOperationsSpecialists',
'53-2030':
'FlightAttendants',
'53-2031':
'FlightAttendants',
'53-3000':
'MotorVehicleOperators',
'53-3010':
'AmbulanceDriversAttendantsExceptEmergencyMedicalTechnicians',
'53-3011':
'AmbulanceDriversAttendantsExceptEmergencyMedicalTechnicians',
'53-3030':
'DriverSalesWorkersTruckDrivers',
'53-3031':
'DriverSalesWorkers',
'53-3032':
'HeavyTractorTrailerTruckDrivers',
'53-3033':
'LightTruckDrivers',
'53-3050':
'PassengerVehicleDrivers',
'53-3051':
'BusDriversSchool',
'53-3052':
'BusDriversTransitIntercity',
'53-3053':
'ShuttleDriversChauffeurs',
'53-3054':
'TaxiDrivers',
'53-3090':
'MiscellaneousMotorVehicleOperators',
'53-3099':
'MotorVehicleOperatorsAllOther',
'53-4000':
'RailTransportationWorkers',
'53-4010':
'LocomotiveEngineersOperators',
'53-4011':
'LocomotiveEngineers',
'53-4013':
'RailYardEngineersDinkeyOperatorsHostlers',
'53-4020':
'RailroadBrakeSignalSwitchOperatorsLocomotiveFirers',
'53-4022':
'RailroadBrakeSignalSwitchOperatorsLocomotiveFirers',
'53-4030':
'RailroadConductorsYardmasters',
'53-4031':
'RailroadConductorsYardmasters',
'53-4040':
'SubwayStreetcarOperators',
'53-4041':
'SubwayStreetcarOperators',
'53-4090':
'MiscellaneousRailTransportationWorkers',
'53-4099':
'RailTransportationWorkersAllOther',
'53-5000':
'WaterTransportationWorkers',
'53-5010':
'SailorsMarineOilers',
'53-5011':
'SailorsMarineOilers',
'53-5020':
'ShipBoatCaptainsOperators',
'53-5021':
'CaptainsMatesPilotsOfWaterVessels',
'53-5022':
'MotorboatOperators',
'53-5030':
'ShipEngineers',
'53-5031':
'ShipEngineers',
'53-6000':
'OtherTransportationWorkers',
'53-6010':
'BridgeLockTenders',
'53-6011':
'BridgeLockTenders',
'53-6020':
'ParkingAttendants',
'53-6021':
'ParkingAttendants',
'53-6030':
'TransportationServiceAttendants',
'53-6031':
'AutomotiveWatercraftServiceAttendants',
'53-6032':
'AircraftServiceAttendants',
'53-6040':
'TrafficTechnicians',
'53-6041':
'TrafficTechnicians',
'53-6050':
'TransportationInspectors',
'53-6051':
'TransportationInspectors',
'53-6060':
'PassengerAttendants',
'53-6061':
'PassengerAttendants',
'53-6090':
'MiscellaneousTransportationWorkers',
'53-6099':
'TransportationWorkersAllOther',
'53-7000':
'MaterialMovingWorkers',
'53-7010':
'ConveyorOperatorsTenders',
'53-7011':
'ConveyorOperatorsTenders',
'53-7020':
'CraneTowerOperators',
'53-7021':
'CraneTowerOperators',
'53-7030':
'DredgeOperators',
'53-7031':
'DredgeOperators',
'53-7040':
'HoistWinchOperators',
'53-7041':
'HoistWinchOperators',
'53-7050':
'IndustrialTruckTractorOperators',
'53-7051':
'IndustrialTruckTractorOperators',
'53-7060':
'LaborersMaterialMovers',
'53-7061':
'CleanersOfVehiclesEquipment',
'53-7062':
'LaborersFreightStockMaterialMoversHand',
'53-7063':
'MachineFeedersOffbearers',
'53-7064':
'PackersPackagersHand',
'53-7065':
'StockersOrderFillers',
'53-7070':
'PumpingStationOperators',
'53-7071':
'GasCompressorGasPumpingStationOperators',
'53-7072':
'PumpOperatorsExceptWellheadPumpers',
'53-7073':
'WellheadPumpers',
'53-7080':
'RefuseRecyclableMaterialCollectors',
'53-7081':
'RefuseRecyclableMaterialCollectors',
'53-7120':
'TankCarTruckShipLoaders',
'53-7121':
'TankCarTruckShipLoaders',
'53-7190':
'MiscellaneousMaterialMovingWorkers',
'53-7199':
'MaterialMovingWorkersAllOther',
'55-0000':
'MilitarySpecific',
'55-1000':
'MilitaryOfficerSpecialTacticalOperationsLeaders',
'55-1010':
'MilitaryOfficerSpecialTacticalOperationsLeaders',
'55-1011':
'AirCrewOfficers',
'55-1012':
'AircraftLaunchRecoveryOfficers',
'55-1013':
'ArmoredAssaultVehicleOfficers',
'55-1014':
'ArtilleryMissileOfficers',
'55-1015':
'CommandControlCenterOfficers',
'55-1016':
'InfantryOfficers',
'55-1017':
'SpecialForcesOfficers',
'55-1019':
'MilitaryOfficerSpecialTacticalOperationsLeadersAllOther',
'55-2000':
'FirstLineEnlistedMilitarySupervisors',
'55-2010':
'FirstLineEnlistedMilitarySupervisors',
'55-2011':
'FirstLineSupervisorsOfAirCrewMembers',
'55-2012':
'FirstLineSupervisorsOfWeaponsSpecialistsCrewMembers',
'55-2013':
'FirstLineSupervisorsOfAllOtherTacticalOperationsSpecialists',
'55-3000':
'MilitaryEnlistedTacticalOperationsAirWeaponsSpecialistsCrewMembers',
'55-3010':
'MilitaryEnlistedTacticalOperationsAirWeaponsSpecialistsCrewMembers',
'55-3011':
'AirCrewMembers',
'55-3012':
'AircraftLaunchRecoverySpecialists',
'55-3013':
'ArmoredAssaultVehicleCrewMembers',
'55-3014':
'ArtilleryMissileCrewMembers',
'55-3015':
'CommandControlCenterSpecialists',
'55-3016':
'Infantry',
'55-3018':
'SpecialForces',
'55-3019':
'MilitaryEnlistedTacticalOperationsAirWeaponsSpecialistsCrewMembersAllOther',
'intermediateAggregation-1':
'ManagementBusinessFinancial',
'intermediateAggregation-2':
'ComputerEngineeringScience',
'intermediateAggregation-3':
'EducationLegalCommunityServiceArtsMedia',
'intermediateAggregation-4':
'HealthcarePractitionersTechnical',
'intermediateAggregation-5':
'Service',
'intermediateAggregation-6':
'SalesRelated',
'intermediateAggregation-9':
'ConstructionExtraction',
'intermediateAggregation-10':
'InstallationMaintenanceRepair',
'intermediateAggregation-11':
'Production',
'intermediateAggregation-12':
'TransportationMaterialMoving',
'highLevelAggregation-1':
'ManagementBusinessScienceArts',
'highLevelAggregation-2':
'Service',
'highLevelAggregation-3':
'SalesOffice',
'highLevelAggregation-4':
'NaturalResourcesConstructionMaintenance',
'highLevelAggregation-5':
'ProductionTransportationMaterialMoving',
'highLevelAggregation-6':
'MilitarySpecific'
}
if __name__ == "__main__":
print(_create_soc_codes_map())
|
from django.db import models
from django.shortcuts import reverse
from django.utils.text import slugify
class Discussion(models.Model):
slug = models.SlugField(max_length=80, unique=True, blank=True)
name = models.CharField(max_length=60, unique=True, blank=True)
description = models.TextField(blank=True)
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse("discussions:details", kwargs={"slug": self.slug})
def post_topic_url(self):
return reverse("discussions:new-topic", kwargs={"slug": self.slug})
def get_topics(self):
return self.topics.filter(discussion=self)
def save(self, *args, **kwargs):
self.slug = slugify(self.name)
super().save(*args, **kwargs)
class Meta:
verbose_name = 'discussion'
verbose_name_plural = 'discussions'
|
word_list = ["""a-cxl""",
"""a-delta""",
"""a-i""",
"""a1-""",
"""a1c""",
"""a2-marker""",
"""a20""",
"""aap""",
"""aapcc""",
"""aarhus""",
"""aarss""",
"""aavg""",
"""aavgs""",
"""ab-secreting""",
"""ab023""",
"""aba""",
"""abandoned""",
"""abbott""",
"""abc""",
"""abdomen""",
"""abdomenwere""",
"""abdominal""",
"""abdominis""",
"""abducens""",
"""abduction""",
"""abductive""",
"""abductors""",
"""abemaciclib""",
"""aberrant""",
"""aberrantly""",
"""aberrations""",
"""abetted""",
"""abilities""",
"""ability""",
"""abiraterone""",
"""ablation""",
"""ablative""",
"""able""",
"""abmr""",
"""abnormal""",
"""abnormalities""",
"""abnormality""",
"""abnormally""",
"""abo""",
"""abo-incompatible""",
"""abobont-a""",
"""abobotulinumtoxina""",
"""abondance""",
"""abortion""",
"""abortive""",
"""about""",
"""above""",
"""abp""",
"""abroad""",
"""abrogate""",
"""abrogated""",
"""abruption""",
"""abscess""",
"""abscesses""",
"""abscisic""",
"""absence""",
"""absent""",
"""absenteeism""",
"""absolute""",
"""absorb""",
"""absorbance""",
"""absorbed""",
"""absorptiometry""",
"""absorption""",
"""abstinence""",
"""abstinence-induced""",
"""abstract""",
"""abstracted""",
"""abstracts""",
"""abundance""",
"""abundances""",
"""abundant""",
"""abundant-2""",
"""abundantly""",
"""abuse""",
"""abuse-induced""",
"""abused""",
"""abutting""",
"""abx""",
"""ac-motif""",
"""ac-motifcdkl3""",
"""ac-motifs""",
"""aca""",
"""aca-positive""",
"""academic""",
"""academics""",
"""academy""",
"""acalabrutinib""",
"""acalabrutinib-treated""",
"""acamprosate""",
"""acanthosis""",
"""acariasis""",
"""acas""",
"""acbt""",
"""accelerate""",
"""accelerated""",
"""accelerates""",
"""accelerating""",
"""acceleration""",
"""accelerations""",
"""accelerometry""",
"""accent""",
"""accents""",
"""accentuate""",
"""accept""",
"""acceptability""",
"""acceptable""",
"""acceptance""",
"""accepted""",
"""acceptor""",
"""acceptors""",
"""access""",
"""accessed""",
"""accessibility""",
"""accessible""",
"""accessing""",
"""accessory""",
"""accident""",
"""accidental""",
"""accidentally""",
"""accidents""",
"""acclimation""",
"""accommodate""",
"""accompanied""",
"""accompany""",
"""accompanying""",
"""accomplished""",
"""accordance""",
"""according""",
"""accordingly""",
"""account""",
"""accountability""",
"""accounted""",
"""accounting""",
"""accounts""",
"""accreditation""",
"""accreted""",
"""accrued""",
"""accruement""",
"""accumulated""",
"""accumulating""",
"""accumulation""",
"""accumulations""",
"""accuracies""",
"""accuracy""",
"""accurate""",
"""accurately""",
"""ace""",
"""ace-i""",
"""aceis""",
"""acetabular""",
"""acetabuloplasty""",
"""acetaminophen""",
"""acetate""",
"""acetic""",
"""acetone""",
"""acetonide""",
"""acetonitrile""",
"""acetylation""",
"""acetylcholine""",
"""acetylcholinesterase""",
"""ach""",
"""achalasia""",
"""ache""",
"""aches""",
"""achievable""",
"""achieve""",
"""achieved""",
"""achievement""",
"""achievements""",
"""achieves""",
"""achieving""",
"""achilles""",
"""acid""",
"""acid-binding""",
"""acid-copolymerized""",
"""acid-ethyl""",
"""acid-induced""",
"""acidic""",
"""acidosis""",
"""acids""",
"""acinar""",
"""acinetobacter""",
"""acknowledge""",
"""acknowledgement""",
"""acknowledgements""",
"""acknowledging""",
"""acl""",
"""acne""",
"""acompanhamento""",
"""acordo""",
"""acoustic""",
"""acoustic-phonetic""",
"""acq""",
"""acquired""",
"""acquirers""",
"""acquiring""",
"""acquisition""",
"""acquisitions""",
"""acr""",
"""acral""",
"""acromegaly""",
"""across""",
"""acrylate""",
"""acs""",
"""act""",
"""acted""",
"""acth""",
"""acth-independent""",
"""actigraphy""",
"""actin""",
"""acting""",
"""actinomycetemcomitans""",
"""action""",
"""actionable""",
"""actions""",
"""activate""",
"""activated""",
"""activates""",
"""activating""",
"""activation""",
"""activation-induced""",
"""activator""",
"""activators""",
"""active""",
"""actively""",
"""activin""",
"""activin-like""",
"""activists""",
"""activities""",
"""activity""",
"""activity-based""",
"""activity-sensing""",
"""actomyosin""",
"""actors""",
"""acts""",
"""actual""",
"""actually""",
"""actuarial""",
"""actuation""",
"""acuity""",
"""acupressure""",
"""acupuncture""",
"""acuseal""",
"""acute""",
"""acute-on-chronic""",
"""acute-onset""",
"""acutely""",
"""acyclicity""",
"""acyclovir""",
"""adalimumab""",
"""adamopoulos""",
"""adamts13""",
"""adapt""",
"""adaptability""",
"""adaptation""",
"""adaptations""",
"""adapted""",
"""adapting""",
"""adaption""",
"""adaptive""",
"""adaptor""",
"""add""",
"""add-on""",
"""added""",
"""addiction""",
"""addiction-based""",
"""addictive-like""",
"""adding""",
"""addition""",
"""additional""",
"""additionally""",
"""additive""",
"""address""",
"""addressed""",
"""addresses""",
"""addressing""",
"""adds""",
"""adduction""",
"""adductor""",
"""adductors""",
"""adenine""",
"""adenine-cytosine""",
"""adenocarcinoma""",
"""adenocarcinomas""",
"""adenoma""",
"""adenomas""",
"""adenosine""",
"""adenotonsillectomy""",
"""adenoviral""",
"""adequacy""",
"""adequate""",
"""adequately""",
"""adhere""",
"""adherence""",
"""adherent""",
"""adhesin""",
"""adhesion""",
"""adhesion-1""",
"""adhesions""",
"""adhesive""",
"""adipocytes""",
"""adipocytokine""",
"""adipokine""",
"""adipokines""",
"""adipose""",
"""adiposity""",
"""adj""",
"""adjacent""",
"""adjunct""",
"""adjunctive""",
"""adjuncts""",
"""adjust""",
"""adjustable""",
"""adjusted""",
"""adjusting""",
"""adjustment""",
"""adjustments""",
"""adjuvant""",
"""adl""",
"""adme""",
"""administered""",
"""administrated""",
"""administration""",
"""administrative""",
"""administratively""",
"""administrators""",
"""admission""",
"""admissions""",
"""admittance""",
"""admitted""",
"""admixture""",
"""adnex""",
"""adnexa""",
"""adnexal""",
"""adnexectomy""",
"""adolescence""",
"""adolescent""",
"""adolescents""",
"""adopt""",
"""adopted""",
"""adopting""",
"""adoption""",
"""adoptive""",
"""adoptively""",
"""adopts""",
"""adotado""",
"""adp-ribose""",
"""adr""",
"""adrenal""",
"""adrenergic""",
"""adrenocortical""",
"""adrenocorticotropic""",
"""adsd""",
"""adsms""",
"""adt""",
"""adult""",
"""adult-equivalent""",
"""adult-use""",
"""adulthood""",
"""adults""",
"""advance""",
"""advanced""",
"""advanced-level""",
"""advanced-stage""",
"""advancement""",
"""advancements""",
"""advances""",
"""advancing""",
"""advantage""",
"""advantageous""",
"""advantages""",
"""advent""",
"""advents""",
"""adverse""",
"""adversely""",
"""adversos""",
"""advertising""",
"""advice""",
"""advices""",
"""advisable""",
"""advised""",
"""advisory""",
"""advocacy""",
"""advocate""",
"""advocated""",
"""advocates""",
"""aer""",
"""aer2""",
"""aerial""",
"""aerobic""",
"""aerodynamic""",
"""aerogel""",
"""aerosol""",
"""aerosol-generating""",
"""aerosolised""",
"""aerosolization""",
"""aers""",
"""aeruginosa""",
"""aesthetic""",
"""aestivation""",
"""aestivum""",
"""aetiology""",
"""af10""",
"""af9""",
"""afa-pd""",
"""afectado""",
"""affairs""",
"""affect""",
"""affected""",
"""affecting""",
"""affection""",
"""affective""",
"""affects""",
"""afferent""",
"""afferents""",
"""affiliation""",
"""affinities""",
"""affinity""",
"""affirm""",
"""affirms""",
"""afflicting""",
"""afflicts""",
"""affordability""",
"""affordable""",
"""affords""",
"""afin""",
"""aforementioned""",
"""africa""",
"""african""",
"""afrique""",
"""afro-eurasian""",
"""after""",
"""afternoon""",
"""aftns""",
"""ag1478""",
"""again""",
"""against""",
"""agar""",
"""age""",
"""age-""",
"""age-appropriate""",
"""age-associated""",
"""age-compatible""",
"""age-dependent""",
"""age-gender""",
"""age-group""",
"""age-matched""",
"""age-period-cohort""",
"""age-proportional""",
"""age-related""",
"""age-specific""",
"""age-standardised""",
"""age-standardization""",
"""age-tailored""",
"""aged""",
"""ageing""",
"""agencies""",
"""agency""",
"""agenesis""",
"""agent""",
"""agent-based""",
"""agents""",
"""ages""",
"""aggca""",
"""aggravating""",
"""aggregate""",
"""aggregates""",
"""aggregatibacter""",
"""aggregation""",
"""aggregation-prone""",
"""aggregations""",
"""aggressive""",
"""aggressiveness""",
"""aghd""",
"""agility""",
"""aging""",
"""agitation""",
"""ago""",
"""agonism""",
"""agonist""",
"""agonistas""",
"""agonistic""",
"""agonists""",
"""agranular""",
"""agree""",
"""agreed""",
"""agreement""",
"""agricultural""",
"""agriculturally""",
"""agriculture""",
"""agroforestry""",
"""agrostemma""",
"""agudeza""",
"""agudo""",
"""ahead""",
"""ahf""",
"""ahi""",
"""ahn""",
"""ahnak""",
"""ahr""",
"""ahvaz""",
"""ai-assisted""",
"""ai-navigation""",
"""aicar""",
"""aichi""",
"""aid""",
"""aide-memoir""",
"""aided""",
"""aids""",
"""aim""",
"""aimed""",
"""aiming""",
"""aims""",
"""ainsi""",
"""air""",
"""airborne""",
"""aircraft""",
"""aires""",
"""airflow""",
"""airport""",
"""airway""",
"""airways""",
"""ais""",
"""aisselles""",
"""aiv""",
"""ajcc7""",
"""ajcc8""",
"""ajuda""",
"""aki""",
"""akin""",
"""akt""",
"""alabama""",
"""alanine""",
"""alanyl-trna""",
"""alarming""",
"""alars""",
"""alaryngeal""",
"""alaska""",
"""alb""",
"""alba""",
"""albeit""",
"""alberta""",
"""alborz""",
"""albumin""",
"""albuminuria""",
"""alc-depleted""",
"""alcohol""",
"""alcohol-related""",
"""alcohol-use""",
"""alcoholic""",
"""alcs""",
"""aldosterone""",
"""aldosterone-induced""",
"""aldosterone-infused""",
"""aldosterone-producing""",
"""aldosteronism""",
"""aleatorizados""",
"""alert""",
"""alf""",
"""alfa-2""",
"""alga""",
"""algae""",
"""algometer""",
"""algorithm""",
"""algorithms""",
"""alguns""",
"""align""",
"""aligned""",
"""alignment""",
"""alignments""",
"""aligns""",
"""alike""",
"""alive""",
"""alk""",
"""alkali""",
"""alkaline""",
"""alkaloids""",
"""alkenes""",
"""alkyl""",
"""alkylating""",
"""alkylator""",
"""alkylbenzenes""",
"""all""",
"""all-cause""",
"""all-day""",
"""all-inclusive""",
"""all-optical""",
"""allegations""",
"""alleged""",
"""allele""",
"""allele-specific""",
"""alleles""",
"""allergen""",
"""allergenic""",
"""allergens""",
"""allergic""",
"""allergists""",
"""allergy""",
"""alleviate""",
"""alleviates""",
"""alleviating""",
"""alleviation""",
"""alliance""",
"""allied""",
"""allo-hct""",
"""allo-hsct""",
"""alloantigen""",
"""allocate""",
"""allocated""",
"""allocation""",
"""allocative""",
"""allodynia""",
"""allogeneic""",
"""allogenic""",
"""allograft""",
"""allografts""",
"""allopurinol""",
"""alloreactive""",
"""allosct""",
"""allosteric""",
"""allow""",
"""allowed""",
"""allowing""",
"""allows""",
"""alloys""",
"""alma""",
"""almost""",
"""alone""",
"""along""",
"""alongside""",
"""alopecia""",
"""alpha""",
"""alpha-helix""",
"""alpha-linolenic""",
"""alpha-myosin""",
"""alpha-particle""",
"""already""",
"""also""",
"""alt""",
"""alter""",
"""alteration""",
"""alterations""",
"""altered""",
"""altering""",
"""alternaria""",
"""alternata""",
"""alternate""",
"""alternating""",
"""alternations""",
"""alternative""",
"""alternative-release""",
"""alternatives""",
"""alters""",
"""although""",
"""altitude""",
"""altmetric""",
"""altogether""",
"""altos""",
"""alum""",
"""aluminum""",
"""alveolar""",
"""alveolus""",
"""always""",
"""alzheimer""",
"""ama""",
"""amalgam""",
"""amara""",
"""ambient""",
"""ambos""",
"""ambulation""",
"""ambulators""",
"""ambulatory""",
"""amd""",
"""amed""",
"""ameliorated""",
"""ameliorates""",
"""amenable""",
"""amendable""",
"""amenorrhea""",
"""amenorrhoea""",
"""america""",
"""american""",
"""americans""",
"""amh""",
"""amharic""",
"""ami""",
"""amidation""",
"""amines""",
"""amino""",
"""aminoacyl-trna""",
"""aminomethane""",
"""aminophylline""",
"""aminotransferase""",
"""amitriptyline""",
"""aml""",
"""aml1-eto""",
"""aml1-eto-p300""",
"""amls""",
"""ammonia""",
"""ammonium""",
"""amniotomy""",
"""amo""",
"""amoebicidal""",
"""among""",
"""amongst""",
"""amorphous""",
"""amorphous-crystalline""",
"""amortized""",
"""amount""",
"""amounting""",
"""amounts""",
"""amphetamine""",
"""amphetamine-involved""",
"""amphetamines""",
"""amphibia""",
"""amphibian""",
"""amphibians""",
"""amphisbaenians""",
"""ampk""",
"""ampk-mediated""",
"""ample""",
"""amplicon""",
"""amplification""",
"""amplifications""",
"""amplitude""",
"""amplitudes""",
"""ampulloma""",
"""amputation""",
"""amygdala""",
"""amygdala-pfc""",
"""amyloid""",
"""amyloidogenic""",
"""amyloidosis""",
"""amyotrophic""",
"""anaemia""",
"""anaerobes""",
"""anaerobic""",
"""anaesthesia""",
"""anaesthesiologist""",
"""anaesthetic""",
"""anaesthetist""",
"""anaesthetized""",
"""anal""",
"""analgesia""",
"""analgesic""",
"""analgesics""",
"""analisadas""",
"""analog""",
"""analog-to-digital""",
"""analogic""",
"""analogue""",
"""analyse""",
"""analysed""",
"""analyses""",
"""analysing""",
"""analysis""",
"""analytes""",
"""analytic""",
"""analytical""",
"""analytically""",
"""analyze""",
"""analyzed""",
"""analyzer""",
"""analyzes""",
"""analyzing""",
"""anamnesis""",
"""anamnestic""",
"""anaphylaxis""",
"""anaphylaxis-related""",
"""anaplastic""",
"""anastomoses""",
"""anastomosing""",
"""anastomosis""",
"""anastomotic""",
"""anatomic""",
"""anatomical""",
"""anatomically""",
"""anatomically-accurate""",
"""anatomy""",
"""anbessa""",
"""anc""",
"""ancestor""",
"""ancestral""",
"""ancestry""",
"""anchor-based""",
"""anchoring""",
"""ancient""",
"""ancillary""",
"""and""",
"""anderson-sekhon""",
"""andrew""",
"""androgen""",
"""androgen-driven""",
"""androgenic""",
"""androgenization""",
"""androgens""",
"""androstenedione""",
"""anecdotal""",
"""anemia""",
"""anemogenic""",
"""anesthesia""",
"""anesthesiologists""",
"""anesthetic""",
"""anesthetics""",
"""aneuploidy""",
"""aneurysm""",
"""aneurysms""",
"""angelman""",
"""angina""",
"""angiogenesis""",
"""angiogenesis-""",
"""angiogenesis-related""",
"""angiogenic""",
"""angiogram""",
"""angiograms""",
"""angiographic""",
"""angiography""",
"""angiotensin""",
"""angiotensin-converting""",
"""angle""",
"""angle-measurement""",
"""angles""",
"""angular""",
"""angulated""",
"""angulation""",
"""angustirostris""",
"""anhedonia""",
"""anhedonia-like""",
"""ani""",
"""animal""",
"""animal-human""",
"""animalis""",
"""animals""",
"""animation""",
"""anion""",
"""anions""",
"""anisotropy""",
"""ankle""",
"""ankylosing""",
"""anne""",
"""anneal""",
"""annealed""",
"""annealing""",
"""annexin""",
"""annihilation""",
"""annotate""",
"""annotated""",
"""annotating""",
"""annotation""",
"""annotations""",
"""annotators""",
"""annual""",
"""annualized""",
"""annually""",
"""annular""",
"""annuum""",
"""ano""",
"""anolis""",
"""anomalies""",
"""anomaly""",
"""anonymous""",
"""anorexia""",
"""anos""",
"""another""",
"""anova""",
"""anovulation""",
"""anoxic""",
"""ansd""",
"""answer""",
"""answered""",
"""answers""",
"""antagonistically""",
"""antagonists""",
"""anteater""",
"""antegrade""",
"""antemortem-postmortem""",
"""antenatal""",
"""antepartum""",
"""anterior""",
"""anterior-posterior""",
"""anteversion""",
"""anthesis""",
"""anthracycline""",
"""anthracyclines""",
"""anthropogenic""",
"""anthropogenic-exclusive""",
"""anthropometric""",
"""anthropometrics""",
"""anthropometry""",
"""anti-a""",
"""anti-a1""",
"""anti-a2""",
"""anti-apoptosis""",
"""anti-apoptotic""",
"""anti-b""",
"""anti-bp180""",
"""anti-bp230""",
"""anti-citrullinated""",
"""anti-cll""",
"""anti-desmoglein""",
"""anti-ganglioside""",
"""anti-hbs""",
"""anti-hipertensivos""",
"""anti-hypertensives""",
"""anti-il-17""",
"""anti-inflammatory""",
"""anti-leukaemia""",
"""anti-lipogenic""",
"""anti-migraine""",
"""anti-mrsa""",
"""anti-obesity""",
"""anti-oxidant""",
"""anti-pd-1""",
"""anti-pd1""",
"""anti-polymerase""",
"""anti-prion""",
"""anti-rdrp""",
"""anti-thrombotics""",
"""anti-tumor""",
"""anti-vista""",
"""antiangiogenesis""",
"""antiarrhythmics""",
"""antibiotic""",
"""antibiotics""",
"""antibodies""",
"""antibody""",
"""antibody-dependent""",
"""antibody-mediated""",
"""anticancer""",
"""anticentromere""",
"""anticholinergic""",
"""anticholinesterase""",
"""anticipate""",
"""anticipated""",
"""anticipation""",
"""anticoagulants""",
"""anticoagulation""",
"""anticodons""",
"""antidepressant""",
"""antidepressants""",
"""antidotal""",
"""antiepileptic""",
"""antiepileptics""",
"""antifibrinolytic""",
"""antifungal""",
"""antigen""",
"""antigen-specific""",
"""antigenic""",
"""antigens""",
"""antiinflammatory""",
"""antileukemia""",
"""antimalarial""",
"""antimalarials""",
"""antimicrobial""",
"""antioxidant""",
"""antioxidants""",
"""antioxidation""",
"""antioxidative""",
"""antiphase""",
"""antiphospholipid""",
"""antiplatelet""",
"""antiplatelets""",
"""antiretroviral""",
"""antirheumatic""",
"""antisense""",
"""antiseptics""",
"""antitachycardia""",
"""antithrombotic""",
"""antithyroid""",
"""antitissue""",
"""antitumor""",
"""antitumoral""",
"""antiulcerative""",
"""antiviral""",
"""antral""",
"""ants""",
"""anura""",
"""anxiety""",
"""anxious""",
"""any""",
"""anyone""",
"""anyway""",
"""anywhere""",
"""aod""",
"""aor""",
"""aorta""",
"""aortic""",
"""ap2""",
"""apa""",
"""apache""",
"""apart""",
"""apathy""",
"""apc""",
"""apc-driven""",
"""apenas""",
"""aperiodicity""",
"""apes""",
"""apgar""",
"""apheresis""",
"""api""",
"""apis""",
"""apnea""",
"""apnea-hypopnea""",
"""apnoea""",
"""apoa-i""",
"""apob""",
"""apoe""",
"""apoplexy""",
"""apoptosis""",
"""apoptosis-inducing""",
"""apoptotic""",
"""apostichopus""",
"""app""",
"""appalachian""",
"""apparatus""",
"""apparent""",
"""apparently""",
"""apparition""",
"""appealing""",
"""appear""",
"""appearance""",
"""appearances""",
"""appeared""",
"""appearing""",
"""appears""",
"""appendectomy""",
"""appendicitis""",
"""appendix""",
"""applanation""",
"""apple""",
"""applicability""",
"""applicable""",
"""applicant""",
"""applicants""",
"""application""",
"""applications""",
"""applied""",
"""applies""",
"""apply""",
"""applying""",
"""appointment""",
"""appointments""",
"""appraise""",
"""appraised""",
"""appraising""",
"""appreciated""",
"""approach""",
"""approach-were""",
"""approached""",
"""approaches""",
"""approaching""",
"""appropriate""",
"""appropriately""",
"""appropriateness""",
"""approval""",
"""approvals""",
"""approved""",
"""approving""",
"""approximate""",
"""approximated""",
"""approximately""",
"""approximation""",
"""apr""",
"""apr1""",
"""apr4""",
"""apremilast""",
"""apresentou-se""",
"""april""",
"""april-december""",
"""apriori""",
"""aprs""",
"""aps""",
"""aps-related""",
"""aqlq""",
"""aqp""",
"""aqp1""",
"""aquaculture""",
"""aquaporin""",
"""aquatic""",
"""aqueles""",
"""aqueous""",
"""arab""",
"""arabic""",
"""arabica""",
"""arabidopsis""",
"""arachidonic""",
"""arachnoid""",
"""araf""",
"""arb""",
"""arbitrating""",
"""arbs""",
"""arc""",
"""arc-520""",
"""arch""",
"""archaea""",
"""archaeon""",
"""arches""",
"""architect""",
"""architectural""",
"""architecture""",
"""architectures""",
"""arctocephalus""",
"""arcus""",
"""ards""",
"""are""",
"""area""",
"""area-level""",
"""areas""",
"""areds""",
"""arena""",
"""arenas""",
"""arginine""",
"""argonautes""",
"""arguably""",
"""argue""",
"""argued""",
"""arguing""",
"""argyle""",
"""ari""",
"""aric""",
"""arid1a""",
"""arid5a""",
"""arid5a-deficient""",
"""arise""",
"""arises""",
"""arising""",
"""arksey""",
"""arm""",
"""armadillo""",
"""armamentarium""",
"""arms""",
"""army""",
"""arni""",
"""arnr""",
"""around""",
"""arousal""",
"""aroused""",
"""arq""",
"""arr""",
"""arrange""",
"""arrangement""",
"""arranging""",
"""array""",
"""arrays""",
"""arrest""",
"""arrested""",
"""arrests""",
"""arrhythmia""",
"""arrhythmias""",
"""arrival""",
"""arrived""",
"""arrythmia""",
"""arsenal""",
"""arsenite""",
"""art""",
"""arterial""",
"""arteries""",
"""arterio-portal""",
"""arteriogenesis""",
"""arteriovenous""",
"""arteritis""",
"""artery""",
"""artery-first""",
"""arthralgia""",
"""arthritides""",
"""arthritis""",
"""arthrodesis""",
"""arthroplasties""",
"""arthroplasty""",
"""arthroscopic""",
"""arthroscopy""",
"""article""",
"""articles""",
"""articular""",
"""articulate""",
"""articulating""",
"""articulation""",
"""articulatory""",
"""artifact""",
"""artifacts""",
"""artificial""",
"""artificial-intelligence-""",
"""artificially""",
"""arundel""",
"""arvc""",
"""as-needed""",
"""as-prepared""",
"""asa""",
"""asas""",
"""asbestos""",
"""ascending""",
"""ascent""",
"""ascertain""",
"""ascertainment""",
"""ascidians""",
"""ascites""",
"""asclepius""",
"""asclepius-health""",
"""ascorbate""",
"""ascorbic""",
"""ascribed""",
"""asd""",
"""asdas""",
"""asexual""",
"""ash""",
"""asia""",
"""asian""",
"""asians""",
"""asiatic""",
"""asics""",
"""aside""",
"""ask""",
"""asked""",
"""asking""",
"""asleep""",
"""asp""",
"""asp-bound""",
"""asp-x""",
"""aspartate""",
"""aspartyl""",
"""aspect""",
"""aspects""",
"""asphyxiants""",
"""asphyxiation""",
"""aspirated""",
"""aspirates""",
"""aspiration""",
"""aspirations""",
"""aspirin""",
"""asra""",
"""assaults""",
"""assay""",
"""assays""",
"""assembled""",
"""assembles""",
"""assemblies""",
"""assembly""",
"""assertion""",
"""asses""",
"""assess""",
"""assessed""",
"""assesses""",
"""assessing""",
"""assessment""",
"""assessments""",
"""asset""",
"""assets""",
"""assigned""",
"""assigning""",
"""assignment""",
"""assignments""",
"""assimilation""",
"""assist""",
"""assistance""",
"""assistant""",
"""assisted""",
"""assisting""",
"""associate""",
"""associated""",
"""associates""",
"""associating""",
"""association""",
"""associations""",
"""associative""",
"""assume""",
"""assumed""",
"""assuming""",
"""assumption""",
"""assumptions""",
"""assurance""",
"""assure""",
"""ast""",
"""asthenia""",
"""asthma""",
"""asthma-related""",
"""asthmatics""",
"""astigmatism""",
"""astrazeneca""",
"""astrobiological""",
"""astrobiology""",
"""astrocyte""",
"""astrocytes""",
"""astrocytic""",
"""astrocytomas""",
"""astroglial""",
"""asymmetric""",
"""asymmetrical""",
"""asymmetrically""",
"""asymptomatic""",
"""asymptotic""",
"""asynchronous""",
"""at""",
"""at-home""",
"""at-rich""",
"""at-risk""",
"""ata""",
"""ataques""",
"""ataxia""",
"""ataxias""",
"""atdc5""",
"""atelectasis""",
"""atempt""",
"""atendimentos""",
"""atenolol""",
"""atf1""",
"""atf3""",
"""atf3-dependent""",
"""atf4-dependent""",
"""atg""",
"""atheromatosis""",
"""atherosclerosis""",
"""atherosclerotic""",
"""athletes""",
"""athletic""",
"""atlantic""",
"""atlantoaxial""",
"""atlases""",
"""atm""",
"""atm-ctip-mre11""",
"""atmospheric""",
"""atolls""",
"""atomic""",
"""atomistic""",
"""atoms""",
"""atonia""",
"""atony""",
"""atopic""",
"""atp""",
"""atp-30""",
"""atpase""",
"""atraumatic""",
"""atrial""",
"""atrial-ventricular""",
"""atrophy""",
"""attached""",
"""attaching""",
"""attachment""",
"""attack""",
"""attacks""",
"""attain""",
"""attained""",
"""attempt""",
"""attempted""",
"""attempting""",
"""attempts""",
"""attend""",
"""attended""",
"""attending""",
"""attention""",
"""attentional""",
"""attentions""",
"""attenuata""",
"""attenuate""",
"""attenuated""",
"""attenuates""",
"""attenuating""",
"""attenuation""",
"""attested""",
"""attitude""",
"""attitudes""",
"""attp""",
"""attract""",
"""attracting""",
"""attractive""",
"""attractiveness""",
"""attributable""",
"""attribute""",
"""attributed""",
"""attributes""",
"""attributing""",
"""attribution""",
"""attrition""",
"""atypia""",
"""atypical""",
"""aua""",
"""auc""",
"""aucaw""",
"""aucun""",
"""aud""",
"""audio""",
"""audiometry""",
"""audit""",
"""auditory""",
"""auditory-perceptual""",
"""audits""",
"""augment""",
"""augmentant""",
"""augmentation""",
"""augmented""",
"""augments""",
"""august""",
"""aumentar""",
"""aur""",
"""aureus""",
"""auricular""",
"""auroc""",
"""aus""",
"""auscultation""",
"""australia""",
"""australian""",
"""authentic""",
"""author""",
"""authored""",
"""authorities""",
"""authority""",
"""authorizes""",
"""authors""",
"""autism""",
"""autism-as""",
"""auto-encoding""",
"""autoantibodies""",
"""autoantibody""",
"""autoantigens""",
"""autocorrelation""",
"""autocrine""",
"""autoencoder""",
"""autofluorescence""",
"""autogenic""",
"""autoimmune""",
"""autoimmunity""",
"""autologous""",
"""automated""",
"""automatic""",
"""automatically""",
"""automating""",
"""autonomic""",
"""autonomous""",
"""autonomously""",
"""autophagic""",
"""autophagy""",
"""autophagy-deficient""",
"""autophagy-dependent""",
"""autophagy-induced""",
"""autoreactive""",
"""autoregressive""",
"""autoregulation""",
"""autoregulatory""",
"""autosomal""",
"""autotransplantation""",
"""aux""",
"""auxin""",
"""auxin-induced""",
"""auxin-regulated""",
"""auxologic""",
"""availability""",
"""available""",
"""avait""",
"""avaliados""",
"""avaliou""",
"""avental""",
"""avenue""",
"""avenues""",
"""average""",
"""averaging""",
"""avert""",
"""avex""",
"""avian""",
"""avlt""",
"""avlt-delay""",
"""avlt-immediate""",
"""avlt-recognition""",
"""avmc""",
"""avoid""",
"""avoidable""",
"""avoidance""",
"""avoided""",
"""avoiders""",
"""avoiding""",
"""avoids""",
"""avp""",
"""avr""",
"""avulsion""",
"""avulsions""",
"""aw98""",
"""awaiting""",
"""awake""",
"""awakening""",
"""awakenings""",
"""award""",
"""aware""",
"""awareness""",
"""away""",
"""awd""",
"""awkward""",
"""axes""",
"""axial""",
"""axilla""",
"""axillaire""",
"""axillary""",
"""axis""",
"""axon""",
"""axonal""",
"""axons""",
"""axum""",
"""ayahuasca""",
"""azathioprine""",
"""azd1222""",
"""azide""",
"""b-cell""",
"""b-cells""",
"""b-dna""",
"""b-mode""",
"""b100""",
"""b12""",
"""b2-labeled""",
"""babesia-like""",
"""babies""",
"""baby""",
"""bacilli""",
"""bacillus""",
"""back""",
"""back-translation""",
"""backbone""",
"""backfolding""",
"""background""",
"""backpropagation""",
"""backward""",
"""backwards""",
"""bacteria""",
"""bacterial""",
"""bacterium""",
"""bafilomycin""",
"""bag""",
"""bag-1""",
"""bag-mask""",
"""bag-valve-mask""",
"""baited""",
"""baixo""",
"""baja""",
"""bal""",
"""bal-intervention""",
"""balance""",
"""balanced""",
"""balances""",
"""balancing""",
"""balb-c""",
"""ball""",
"""balloon-expandable""",
"""ban""",
"""band""",
"""bang""",
"""bangladesh""",
"""bangladeshi""",
"""bank""",
"""bannenberg""",
"""bans""",
"""barcelona""",
"""bare""",
"""bariatric""",
"""barium""",
"""bark""",
"""barotrauma""",
"""barre""",
"""barretos""",
"""barrett""",
"""barrier""",
"""barriers""",
"""bartholin""",
"""barts""",
"""basal""",
"""basal-branching""",
"""basaloid""",
"""base""",
"""based""",
"""baseline""",
"""baseline-dependent""",
"""basement""",
"""basic""",
"""basin""",
"""basis""",
"""basketball""",
"""basophil""",
"""basophilic""",
"""basophils""",
"""bat""",
"""batch""",
"""batched""",
"""bath""",
"""bathing""",
"""batteries""",
"""battery""",
"""bayesian""",
"""bayley""",
"""bayley-iii""",
"""bb-12""",
"""bbb""",
"""bbc""",
"""bbe""",
"""bbti""",
"""bcbm""",
"""bcc""",
"""bcd""",
"""bcdx2""",
"""bcg""",
"""bcl-2""",
"""bcl-xl""",
"""bcl2""",
"""bcl6""",
"""bcor""",
"""bcr""",
"""bcva""",
"""bd-i""",
"""bd-ipmn""",
"""bdla""",
"""bdp""",
"""be20ng""",
"""bea""",
"""beacon-exacerbating""",
"""bead""",
"""beadchip""",
"""beads""",
"""beak""",
"""beaked""",
"""beam""",
"""beam-induced""",
"""beam-sensitive""",
"""bearing""",
"""beat""",
"""beats""",
"""became""",
"""because""",
"""become""",
"""becomes""",
"""becoming""",
"""bed""",
"""bed-site""",
"""bed-time""",
"""bedridden""",
"""bee""",
"""been""",
"""bees""",
"""before""",
"""before-after""",
"""began""",
"""begin""",
"""beginners""",
"""beginning""",
"""begins""",
"""behalf""",
"""behaving""",
"""behavior""",
"""behavioral""",
"""behaviors""",
"""behaviour""",
"""behavioural""",
"""behaviours""",
"""behind""",
"""being""",
"""belgium""",
"""belief""",
"""beliefs""",
"""believe""",
"""believed""",
"""belize""",
"""belizean""",
"""belonged""",
"""belonging""",
"""belongs""",
"""below""",
"""belt""",
"""belts""",
"""bench-to-bedside""",
"""benchmark""",
"""benchmarking""",
"""bendamustine""",
"""beneficial""",
"""beneficiaries""",
"""beneficiary""",
"""benefit""",
"""benefited""",
"""benefits""",
"""beni-suef""",
"""benign""",
"""benign-""",
"""benralizumab""",
"""benzathine""",
"""benznidazole""",
"""benzodiazepine""",
"""benzodiazepines""",
"""berghei""",
"""berry""",
"""beru""",
"""berus""",
"""besides""",
"""best""",
"""best-corrected""",
"""best-practice""",
"""bet""",
"""beta""",
"""beta-1""",
"""beta-blockers""",
"""beta-carotene""",
"""beta-lactam""",
"""beta-mhc""",
"""beta-thalassemia""",
"""betaxolol""",
"""better""",
"""better-quality""",
"""betula""",
"""between""",
"""between-component""",
"""between-group""",
"""between-subject""",
"""bevantolol""",
"""beverage""",
"""beverages""",
"""beyond""",
"""bf-iol""",
"""bf-iols""",
"""bfh""",
"""bfhs""",
"""bhaktapur""",
"""bhd""",
"""bhramari""",
"""bi-weekly""",
"""biallelic""",
"""bias""",
"""biased""",
"""biasedly""",
"""biases""",
"""biasing""",
"""bibliographies""",
"""bibliography""",
"""bibliometric""",
"""bicarbonate""",
"""bidirectional""",
"""bidirectionally""",
"""bifidobacteria""",
"""bifidobacterium""",
"""bigger""",
"""bilateral""",
"""bile""",
"""bilevel""",
"""biliary""",
"""bilingually""",
"""bilirubin""",
"""bille""",
"""billing""",
"""billion""",
"""billions""",
"""biloba""",
"""bimanual""",
"""bimekizumab""",
"""bimodal""",
"""bimolecular""",
"""binary""",
"""binasal""",
"""bind""",
"""binding""",
"""binds""",
"""bing""",
"""binge""",
"""binge-eating""",
"""binomial""",
"""bio""",
"""bio-fluids""",
"""bio-inks""",
"""bio-mechanical""",
"""bio-stimulatory""",
"""bioabsorbable""",
"""bioactive""",
"""bioassays""",
"""bioavailability""",
"""biobank""",
"""biobased""",
"""biochemical""",
"""biochemically""",
"""biochemistry""",
"""bioconversion""",
"""biodegradability""",
"""biodegradable""",
"""biodegradation""",
"""biodiversity""",
"""bioenergetic""",
"""biofabrication""",
"""biofeedback""",
"""biofilm""",
"""biofilm-inducing""",
"""biofilm-promoting""",
"""biofilms""",
"""biofluid-based""",
"""biofluids""",
"""biogas""",
"""biogenesis""",
"""biogenic""",
"""biogeographical""",
"""bioimaging""",
"""bioinformatic""",
"""bioinformatics""",
"""bioinformatics-oriented""",
"""bioinformation""",
"""bioinspired""",
"""biologic""",
"""biological""",
"""biologically""",
"""biologics""",
"""biologists""",
"""biology""",
"""biomarker""",
"""biomarker-based""",
"""biomarkers""",
"""biomaterial""",
"""biomaterials""",
"""biomechanical""",
"""biomed""",
"""biomedical""",
"""biomedicine""",
"""biomicroscopy""",
"""biomolecular""",
"""biomolecule""",
"""biomolecules""",
"""biopharmaceutical""",
"""biophysical""",
"""bioplastic""",
"""bioplastics""",
"""bioprinted""",
"""bioprinter""",
"""bioprosthetic""",
"""biopsied""",
"""biopsies""",
"""biopsy""",
"""biopsy-proven""",
"""bioremediation""",
"""biorepositories""",
"""biorepository""",
"""biosensing""",
"""biosis""",
"""biostatistics""",
"""biosynthesis""",
"""biosynthetic""",
"""biotech""",
"""biotechnological""",
"""biotechnologies""",
"""biotechnology""",
"""biotransformations""",
"""biotype""",
"""bipap""",
"""bipartite""",
"""biphasic""",
"""bipoc""",
"""bipolar-i""",
"""bir""",
"""birch""",
"""bird""",
"""birds""",
"""birmingham""",
"""birth""",
"""births""",
"""birthweight""",
"""bis""",
"""bisexual""",
"""bisoprolol""",
"""bispecific""",
"""bisphenol""",
"""bisphosphonates""",
"""bisulfite""",
"""bite""",
"""bites""",
"""bivariable""",
"""bivariate""",
"""biventricular""",
"""bivittatum""",
"""biweekly""",
"""black""",
"""blacks""",
"""bladder""",
"""bladder-centric""",
"""blade""",
"""blakpc-2""",
"""bland-altman""",
"""blandm-1""",
"""blast""",
"""blastema""",
"""blastomycosis""",
"""blasts""",
"""bleed""",
"""bleeding""",
"""bleedings""",
"""bleomycin""",
"""blind""",
"""blinded""",
"""blinding""",
"""blindness""",
"""blistering""",
"""blisters""",
"""bloating""",
"""block""",
"""blockade""",
"""blocked""",
"""blocker""",
"""blockers""",
"""blocking""",
"""blocks""",
"""blood""",
"""blood-based""",
"""blood-brain""",
"""blood-derived""",
"""blood-spinal""",
"""bloodstream""",
"""bloqueio""",
"""blot""",
"""blotch""",
"""blotting""",
"""blowing""",
"""bls""",
"""blue""",
"""blue-reflectance""",
"""blue-white""",
"""blueprint""",
"""bluetooth""",
"""blunt""",
"""blunted""",
"""bm1""",
"""bmd""",
"""bmi""",
"""bmj""",
"""bml""",
"""bmls""",
"""bmq""",
"""bnp""",
"""bnt""",
"""board""",
"""board-approved""",
"""boating""",
"""bodies""",
"""bodily""",
"""body""",
"""body-exposure""",
"""bodyweight""",
"""bohm""",
"""bois""",
"""bologna""",
"""bolus""",
"""bom""",
"""bond""",
"""bond-to-bond""",
"""bonded""",
"""bonding""",
"""bonds""",
"""bone""",
"""bone-regeneration""",
"""bone-related""",
"""bones""",
"""bonferroni""",
"""bonnes""",
"""book""",
"""books""",
"""boost""",
"""boosted""",
"""boosting""",
"""bootstrap-based""",
"""border""",
"""borderline""",
"""borders""",
"""bordetella""",
"""born""",
"""borrelia""",
"""bortezomib""",
"""bortezomib-refractory""",
"""boston""",
"""both""",
"""both-sided""",
"""bothersome""",
"""bottle""",
"""bottles""",
"""bottom-up""",
"""bound""",
"""boundaries""",
"""boundary""",
"""bounding""",
"""bout""",
"""bovine""",
"""bowel""",
"""boxes""",
"""boy""",
"""boyfriends""",
"""boys""",
"""bp180""",
"""bp230""",
"""bpa""",
"""bpg""",
"""bpng""",
"""bpt""",
"""br005""",
"""brachial""",
"""brachionus""",
"""brachyurus""",
"""bracket""",
"""brackets""",
"""bradshaw""",
"""brady-arrhythmias""",
"""bradypus""",
"""braf""",
"""braf-""",
"""braf-inhibitors""",
"""braf-mutated""",
"""braf-targeting""",
"""braf-wild""",
"""brain""",
"""brain-behavior""",
"""brain-enriched""",
"""brains""",
"""brainstem""",
"""brake""",
"""branch""",
"""branch-duct""",
"""branched""",
"""branches""",
"""branching""",
"""branco""",
"""brand""",
"""brands""",
"""bras""",
"""brasil""",
"""brasiliensis""",
"""brass""",
"""brazil""",
"""brazilian""",
"""brca""",
"""breached""",
"""bread""",
"""breadth""",
"""break""",
"""breakdown""",
"""breaking""",
"""breakpoint""",
"""breaks""",
"""breakthrough""",
"""breakthroughs""",
"""breast""",
"""breastfed""",
"""breastfeeding""",
"""breath""",
"""breath-test""",
"""breathiness""",
"""breathing""",
"""breathing-phase""",
"""breathy""",
"""breeding""",
"""breeds""",
"""brepocitinib""",
"""brexanolone""",
"""bridge""",
"""bridges""",
"""bridging""",
"""brief""",
"""briefly""",
"""briggs""",
"""bring""",
"""bringing""",
"""brings""",
"""brisbane""",
"""britain""",
"""britannia""",
"""british""",
"""brittle""",
"""broad""",
"""broadcast""",
"""broaden""",
"""broadening""",
"""broadens""",
"""broader""",
"""broadly""",
"""broken""",
"""bromalizumab""",
"""bromodomain""",
"""bronchi""",
"""bronchial""",
"""bronchitis""",
"""bronchoalveolar""",
"""bronchogenic""",
"""bronchomalacia""",
"""bronchopulmonary""",
"""bronchoscope""",
"""bronchoscopic""",
"""bronchoscopy""",
"""bronchus""",
"""bronx""",
"""brought""",
"""brown""",
"""brown-throated""",
"""browning""",
"""brows""",
"""browsed""",
"""browser""",
"""bruising""",
"""brushing""",
"""bruton""",
"""bruvs""",
"""bsab""",
"""bsab-induced""",
"""bscb""",
"""bslr""",
"""bsmltg""",
"""bsp""",
"""btc""",
"""btcs""",
"""btk""",
"""btkcys481""",
"""btkcys481ser""",
"""btkcys481ser-expressing""",
"""btki""",
"""btki-naive""",
"""btki-treated""",
"""btkis""",
"""btkwt""",
"""btkwt-""",
"""btkwt-expressing""",
"""btm""",
"""bubble""",
"""buchholz""",
"""buckypaper""",
"""bud""",
"""budd-chiari""",
"""budding""",
"""budget""",
"""buenos""",
"""buettiker""",
"""buffalo""",
"""buffers""",
"""build""",
"""building""",
"""buildingexodus""",
"""built""",
"""built-in""",
"""bulbar""",
"""bulging""",
"""bulimia""",
"""bulk""",
"""bullae""",
"""bullous""",
"""bun""",
"""bundle""",
"""bundling""",
"""buprenorphine""",
"""burden""",
"""burdens""",
"""bureau""",
"""burgdorferi""",
"""burgeoning""",
"""buried""",
"""burn""",
"""burn-associated""",
"""burn-induced""",
"""burn-specific""",
"""burned""",
"""burning""",
"""burnout""",
"""burns""",
"""bursting""",
"""buscam""",
"""business""",
"""busy""",
"""but""",
"""button""",
"""butyl""",
"""butyrate""",
"""buy""",
"""bvs""",
"""bws""",
"""bxd""",
"""by""",
"""bypass""",
"""bypassed""",
"""bypassing""",
"""bypcr""",
"""byrd""",
"""bystander""",
"""bystander-initiated""",
"""c-arm""",
"""c-di-gmp""",
"""c-index""",
"""c-kit""",
"""c-kit1""",
"""c-kit2""",
"""c-means""",
"""c-myc""",
"""c-reactive""",
"""c-terminal""",
"""c19mc""",
"""c1s""",
"""c2c12""",
"""c3t""",
"""c4-activating""",
"""c4d""",
"""c9orf72""",
"""ca125""",
"""ca15-3""",
"""caail1""",
"""cab""",
"""cabg""",
"""cable""",
"""cables""",
"""cac""",
"""cacao""",
"""cacaofunesta""",
"""cachexia""",
"""cacna1a""",
"""cacna1d""",
"""cad""",
"""cadaveric""",
"""cade""",
"""cadherins""",
"""cadmium""",
"""caeas""",
"""caecilians""",
"""caenorhabditis""",
"""caep""",
"""caeps""",
"""caesarean""",
"""cafeteria""",
"""caffeine""",
"""cag""",
"""cagata17""",
"""cagr""",
"""cagtf3a""",
"""cah""",
"""cairo""",
"""caiu""",
"""calcarius""",
"""calcification""",
"""calcified""",
"""calcifying""",
"""calcineurin""",
"""calcium""",
"""calcium-dependent""",
"""calcium-rich""",
"""calculate""",
"""calculated""",
"""calculating""",
"""calculation""",
"""calculations""",
"""calculator""",
"""calendar""",
"""calgary""",
"""calibrate""",
"""calibrated""",
"""calibration""",
"""calidad""",
"""california""",
"""californianus""",
"""call""",
"""callback""",
"""called""",
"""caller""",
"""callers""",
"""calling""",
"""callorhinus""",
"""callosum""",
"""calls""",
"""calorie""",
"""calorie-restricted""",
"""calot""",
"""calprotectin""",
"""calvarial""",
"""calyciflorus""",
"""cambridge""",
"""came""",
"""camera""",
"""cameras""",
"""cameroon""",
"""cameroonian""",
"""camkk2""",
"""camp""",
"""campaign""",
"""campaigns""",
"""camptothecin""",
"""campylobacter""",
"""can""",
"""canada""",
"""canadian""",
"""canal""",
"""cancelled""",
"""cancer""",
"""cancer-associated""",
"""cancer-directed""",
"""cancer-related""",
"""cancer-specific""",
"""canceration""",
"""cancers""",
"""cancrivorus""",
"""candida""",
"""candidaemia""",
"""candidate""",
"""candidates""",
"""candidiasis""",
"""canine""",
"""cannabidiol""",
"""cannabinoids""",
"""cannabis""",
"""cannabis-related""",
"""cannulation""",
"""canonical""",
"""cap""",
"""cap1""",
"""capabilities""",
"""capability""",
"""capable""",
"""capacitance""",
"""capacities""",
"""capacitive""",
"""capacitor""",
"""capacity""",
"""capillaroscopy""",
"""capillary""",
"""capital""",
"""capitalist""",
"""capitalize""",
"""capitalizes""",
"""capitalizing""",
"""caprini""",
"""caps""",
"""capsicum""",
"""capsule""",
"""captive""",
"""capture""",
"""captured""",
"""capturing""",
"""car""",
"""car-t""",
"""carap2-7""",
"""carbamate""",
"""carbamazepine""",
"""carbamazepine-dependent""",
"""carbamazepine-induced""",
"""carbapenemase""",
"""carbapenemase-producing""",
"""carbapenemases""",
"""carbohydrate""",
"""carbohydrates""",
"""carbon""",
"""carboplatin""",
"""carboxy""",
"""carboxyhemoglobin""",
"""carboxyl""",
"""carceral""",
"""carcinoembryonic""",
"""carcinogenesis""",
"""carcinogenic""",
"""carcinogenicity""",
"""carcinoma""",
"""carcinomas""",
"""cardia""",
"""cardiac""",
"""cardinal""",
"""cardio""",
"""cardio-respiratory""",
"""cardiogenic""",
"""cardioid""",
"""cardioids""",
"""cardiol""",
"""cardiologia""",
"""cardiology""",
"""cardiometabolic""",
"""cardiomyocyte""",
"""cardiomyocyte-specific""",
"""cardiomyocytes""",
"""cardiomyopathy""",
"""cardioproteinopathy""",
"""cardiopulmonary""",
"""cardiorenal""",
"""cardiorespiratory""",
"""cardiotoxic""",
"""cardiovascular""",
"""cardiovascular-related""",
"""cardioversions""",
"""cardioverter""",
"""cardioverter-defibrillator""",
"""care""",
"""care-based""",
"""career""",
"""careful""",
"""carefully""",
"""careggi""",
"""caregiver""",
"""caregiver-child""",
"""caregivers""",
"""carers""",
"""cares""",
"""caretaking""",
"""carfentanil""",
"""carfilzomib""",
"""cargo""",
"""caribbean""",
"""caries""",
"""caring""",
"""carlo""",
"""carney""",
"""carolina""",
"""carotid""",
"""carotid-to-femoral""",
"""carpal""",
"""carreau""",
"""carriage""",
"""carried""",
"""carrier""",
"""carrier-mediated""",
"""carriers""",
"""carries""",
"""carry""",
"""carrying""",
"""cars""",
"""cart""",
"""cartilage""",
"""cas""",
"""cas9-knockout""",
"""cascade""",
"""cascade-independent""",
"""cascade-unrelated""",
"""cascaded""",
"""cascades""",
"""case""",
"""case-based""",
"""case-control""",
"""case-crossover""",
"""case-time-control""",
"""caseload""",
"""cases""",
"""casos""",
"""cast""",
"""castrate-resistant""",
"""castration""",
"""castration-resistance""",
"""castration-resistant""",
"""casts""",
"""casual""",
"""cat""",
"""catabolic""",
"""catabolism""",
"""catalanensis""",
"""catalase""",
"""catalog""",
"""catalonia""",
"""catalysis""",
"""catalyst""",
"""catalysts""",
"""catalytic""",
"""catalytically""",
"""catalyzed""",
"""cataplexy""",
"""cataract""",
"""cataracts""",
"""catastrophes""",
"""catastrophizing""",
"""catcf25""",
"""catch""",
"""catching""",
"""catchment""",
"""catecholamines""",
"""categorical""",
"""categories""",
"""categorised""",
"""categorization""",
"""categorized""",
"""category""",
"""cater""",
"""caterpillars""",
"""cathartics""",
"""cathepsin""",
"""catheter""",
"""catheterization""",
"""catheters""",
"""cations""",
"""cats""",
"""cattle""",
"""caucasian""",
"""caucasians""",
"""cauda""",
"""caudata""",
"""causada""",
"""causal""",
"""causality""",
"""causas""",
"""causative""",
"""cause""",
"""cause-specific""",
"""caused""",
"""causes""",
"""causing""",
"""cautery""",
"""caution""",
"""cautions""",
"""cautious""",
"""cava""",
"""cavitation""",
"""cavity""",
"""cavn1""",
"""caw""",
"""cbc""",
"""cbd""",
"""cbfv""",
"""cbm""",
"""cbp-triggered""",
"""cbpnm""",
"""cbpr""",
"""cbs""",
"""cbt""",
"""cc-by""",
"""ccc""",
"""cchs""",
"""cci""",
"""cck-8""",
"""ccl17""",
"""ccl2""",
"""ccl20""",
"""ccl22""",
"""ccn-51""",
"""ccn51""",
"""ccp""",
"""ccpr""",
"""ccrcc""",
"""ccrcc1""",
"""ccrcc1-4""",
"""ccrcc2""",
"""ccrt""",
"""ccs""",
"""ccta""",
"""ccts""",
"""cd-induced""",
"""cd1-restricted""",
"""cd117""",
"""cd123""",
"""cd14""",
"""cd147""",
"""cd160""",
"""cd20""",
"""cd200""",
"""cd28""",
"""cd3""",
"""cd33""",
"""cd34""",
"""cd38""",
"""cd4""",
"""cd61""",
"""cd62p""",
"""cd68""",
"""cd8""",
"""cdc""",
"""cdh1""",
"""cdk""",
"""cdk11b""",
"""cdk11b-induced""",
"""cdkis""",
"""cdkl3""",
"""cdks""",
"""cdls""",
"""cdpk4""",
"""cdr""",
"""cdsas""",
"""cdsm""",
"""cdva""",
"""cea""",
"""ceased""",
"""cec""",
"""ceguera""",
"""ceiling""",
"""celiac""",
"""cell""",
"""cell-associated""",
"""cell-cell""",
"""cell-context-specific""",
"""cell-cycle""",
"""cell-deleter""",
"""cell-derived""",
"""cell-driven""",
"""cell-free""",
"""cell-induced""",
"""cell-permeability""",
"""cell-specific""",
"""cell-surface""",
"""cell-targeted""",
"""cell-to-cell""",
"""cell-wall""",
"""cells""",
"""cellular""",
"""cellulose""",
"""celsius""",
"""celular""",
"""cement""",
"""cementless""",
"""census""",
"""centaur""",
"""center""",
"""center-based""",
"""center-specific""",
"""centered""",
"""centers""",
"""central""",
"""centralisation""",
"""centralization""",
"""centrally""",
"""centre""",
"""centres""",
"""centrifugation""",
"""centroids""",
"""centuries""",
"""century""",
"""ceo2""",
"""cepec2002""",
"""cependant""",
"""cephalometric""",
"""cephalopelvic""",
"""cephalosporin""",
"""ceramide""",
"""ceratocystis""",
"""cerca""",
"""cerdocyon""",
"""cerealis""",
"""cerebellar""",
"""cerebral""",
"""cerebrospinal""",
"""cerebrovascular""",
"""ceremonial""",
"""cerevisiae""",
"""certain""",
"""certainty""",
"""certeza""",
"""cervical""",
"""cervix""",
"""ces""",
"""cesarean""",
"""cesium""",
"""cessation""",
"""cet""",
"""cette""",
"""cf-pwv""",
"""cf-specific""",
"""cfa""",
"""cfd""",
"""cfd-based""",
"""cfd-predicted""",
"""cfdna""",
"""cfr""",
"""cfrd""",
"""cftr""",
"""cgl""",
"""cgmp""",
"""cgmp-dependent""",
"""cgvhd""",
"""chagas""",
"""chagasic""",
"""chain""",
"""chains""",
"""chalcedonica""",
"""chalkiness""",
"""challenge""",
"""challenged""",
"""challenges""",
"""challenging""",
"""chamar""",
"""chamber""",
"""chambers""",
"""chance""",
"""chances""",
"""change""",
"""changed""",
"""changement""",
"""changes""",
"""changing""",
"""channel""",
"""channels""",
"""chaperone""",
"""chapter""",
"""characterisation""",
"""characterise""",
"""characterised""",
"""characteristic""",
"""characteristically""",
"""characteristics""",
"""characterization""",
"""characterize""",
"""characterized""",
"""characterizing""",
"""charcoal""",
"""charge""",
"""charge-charge""",
"""charged""",
"""charlson""",
"""chart""",
"""charting""",
"""charts""",
"""chat""",
"""check-up""",
"""check-ups""",
"""checked""",
"""checkerboard""",
"""checking""",
"""checklist""",
"""checklist-25""",
"""checkpoint""",
"""checkups""",
"""cheese""",
"""chelate""",
"""chem""",
"""chemical""",
"""chemicals""",
"""chemistry""",
"""chemo-""",
"""chemoimmunotherapy""",
"""chemokines""",
"""chemoprophylaxis""",
"""chemoradiotherapy""",
"""chemoresistance""",
"""chemotherapeutic""",
"""chemotherapies""",
"""chemotherapy""",
"""chemotypes""",
"""chen""",
"""cheng""",
"""chest""",
"""chestnut-collared""",
"""cheung-bearelly""",
"""chez""",
"""chf""",
"""chg""",
"""chi""",
"""chi-square""",
"""chi-squared""",
"""chicago""",
"""chicken""",
"""chickens""",
"""chief""",
"""child""",
"""child-driven""",
"""child-elicited""",
"""child-level""",
"""child-pugh""",
"""child-sleep""",
"""child-to-parent""",
"""childbearing""",
"""childbirth""",
"""childhood""",
"""childhood-onset""",
"""children""",
"""chilean""",
"""chill""",
"""chimeric""",
"""china""",
"""chinese""",
"""chip""",
"""chip-qpcr""",
"""chip-seq""",
"""chips""",
"""chiral""",
"""chirality-controlled""",
"""chirality-regulated""",
"""chirality-related""",
"""chitinase""",
"""chitosan""",
"""chloride""",
"""chlorine-containing""",
"""chlorines""",
"""chloroflexota""",
"""chlorohydrate""",
"""chlorophyll""",
"""chloroquine""",
"""chm""",
"""chmi""",
"""chms""",
"""choice""",
"""choices""",
"""cholangiocarcinoma""",
"""cholangitic""",
"""cholecystectomy""",
"""cholerae""",
"""cholestatic""",
"""cholesterol""",
"""cholinergic""",
"""chondro-osseous""",
"""chondroblastoma-like""",
"""chondrocytes""",
"""chondrogenic""",
"""chongbai""",
"""choose""",
"""choosing""",
"""chordates""",
"""chordee""",
"""choreiform""",
"""choriocarcinoma""",
"""choroidal""",
"""chose""",
"""chosen""",
"""christian""",
"""chromatid""",
"""chromatids""",
"""chromatin""",
"""chromatographic""",
"""chromatography""",
"""chromatography-mass""",
"""chromatography-tandem""",
"""chromoblastomycosis""",
"""chromophore""",
"""chromosomal""",
"""chromosome""",
"""chromosomes""",
"""chronic""",
"""chronic-relapsing""",
"""chronically""",
"""chronification""",
"""chronological""",
"""chronology""",
"""chronos-3""",
"""chrysanthemum""",
"""chrysocyon""",
"""chss""",
"""chylothoraces""",
"""chylothorax""",
"""chyronhego""",
"""ci-mir""",
"""ci-mir-126""",
"""ci-mir-146a""",
"""ci-mir-155""",
"""ci-mir-181b""",
"""ci-mir-21""",
"""ci-mir-221""",
"""ci-mirs""",
"""cia""",
"""cibersort""",
"""cica""",
"""cicatricial""",
"""cidade""",
"""cidp""",
"""cie""",
"""cigar""",
"""cigarette""",
"""cigarettes""",
"""cigars""",
"""cihr""",
"""cihr-funded""",
"""cin""",
"""cinahl""",
"""cincinnati""",
"""cinco""",
"""cingulo-opercular""",
"""cinhal""",
"""cinquenta""",
"""cinta""",
"""circadian""",
"""circle""",
"""circles""",
"""circuit""",
"""circuitry""",
"""circuits""",
"""circular""",
"""circular-stapled""",
"""circularity""",
"""circularly""",
"""circulate""",
"""circulates""",
"""circulating""",
"""circulation""",
"""circulatory""",
"""circumference""",
"""circumferences""",
"""circumferential""",
"""circumscribed""",
"""circumspection""",
"""circumstances""",
"""circumvent""",
"""circumventing""",
"""cirratum""",
"""cirrhosis""",
"""cirrhotic""",
"""cis""",
"""cis-ab""",
"""cis-elements""",
"""cis-regulatory""",
"""cisgender""",
"""cisplatin-based""",
"""cistron""",
"""citation""",
"""citations""",
"""cite""",
"""cite-seq""",
"""cities""",
"""cities-in""",
"""citizen""",
"""citizens""",
"""citoprotectores""",
"""citrate""",
"""citrobacter""",
"""citrus""",
"""city""",
"""civil""",
"""ckc""",
"""ckd""",
"""claim""",
"""claims""",
"""claims-based""",
"""clamping""",
"""clar""",
"""clarified""",
"""clarifies""",
"""clarify""",
"""clarin-1""",
"""clarithromycin""",
"""clarity""",
"""clasping""",
"""class""",
"""classes""",
"""classic""",
"""classical""",
"""classically""",
"""classification""",
"""classifications""",
"""classified""",
"""classifier""",
"""classifiers""",
"""classify""",
"""classifying""",
"""clavien""",
"""clavien-dindo""",
"""clb""",
"""clc""",
"""clean""",
"""cleaner""",
"""cleaning""",
"""clear""",
"""clearance""",
"""clearing""",
"""clearly""",
"""cleavage""",
"""cleaves""",
"""cleaving""",
"""cleft""",
"""clefting""",
"""clerkship""",
"""cleveland""",
"""clients""",
"""climate""",
"""climate-induced""",
"""climatologists""",
"""climbing""",
"""clinal""",
"""clinic""",
"""clinical""",
"""clinical-educational""",
"""clinical-radiological""",
"""clinically""",
"""clinician""",
"""clinician-rated""",
"""clinicians""",
"""clinicopathologic""",
"""clinicopathological""",
"""clinics""",
"""clininal""",
"""clinique""",
"""clintrials""",
"""clip""",
"""clips""",
"""cll""",
"""clobazam""",
"""clomiphene""",
"""clonal""",
"""clonally""",
"""clone""",
"""clonic""",
"""clopidogrel""",
"""close""",
"""close-ended""",
"""closed""",
"""closely""",
"""closeness""",
"""closer""",
"""closest-age""",
"""closing""",
"""clostridium""",
"""closure""",
"""closures""",
"""clot""",
"""clotting""",
"""cloud""",
"""clrn1""",
"""clrn1-specific""",
"""cls""",
"""clues""",
"""cluster""",
"""cluster-randomised""",
"""clustered""",
"""clustering""",
"""clusters""",
"""cm2""",
"""cmacos5""",
"""cmc""",
"""cmcc""",
"""cme""",
"""cmlbd2""",
"""cmp""",
"""cmr""",
"""cms""",
"""cn-""",
"""cnas""",
"""cnc""",
"""cnf""",
"""cnfs""",
"""cni""",
"""cnki""",
"""cnn""",
"""cnn-lstm""",
"""cnn1""",
"""cnns""",
"""cns""",
"""cnv""",
"""co-authorship""",
"""co-culture""",
"""co-digesting""",
"""co-digestion""",
"""co-directional""",
"""co-directionally""",
"""co-expressed""",
"""co-expression""",
"""co-immunoprecipitations""",
"""co-ingesting""",
"""co-ingestion""",
"""co-ip""",
"""co-localization""",
"""co-located""",
"""co-morbidities""",
"""co-occurring""",
"""co-parallel""",
"""co-presence""",
"""co-receptor""",
"""co-repressors""",
"""co-use""",
"""co2""",
"""coaches""",
"""coagulant""",
"""coagulants""",
"""coagulase-negative""",
"""coagulation""",
"""coagulopathy""",
"""coarse""",
"""coarse-wool""",
"""coassemble""",
"""coassembly""",
"""coast""",
"""coated""",
"""coating""",
"""cobas""",
"""cocaethylene""",
"""cocaine""",
"""cocaine-""",
"""cocaine-associated""",
"""coccidioidomycosis""",
"""cochlea""",
"""cochleae""",
"""cochlear""",
"""cochran""",
"""cochrane""",
"""cocultures""",
"""cod""",
"""code""",
"""coded""",
"""codes""",
"""coding""",
"""codon""",
"""codons""",
"""coeff""",
"""coefficient""",
"""coefficients""",
"""coendou""",
"""coenzyme""",
"""coenzymes""",
"""coercion""",
"""coercive""",
"""coexist""",
"""coexistence""",
"""coexistent""",
"""coexisting""",
"""coexpression""",
"""coffea""",
"""coffee""",
"""coffee-shops""",
"""cognition""",
"""cognitive""",
"""cognitive-behavior""",
"""cogshall""",
"""cohen""",
"""coherence""",
"""coherent""",
"""cohesin""",
"""cohesive""",
"""cohort""",
"""cohorts""",
"""coiling""",
"""coin""",
"""coincides""",
"""coinciding""",
"""coinfections""",
"""col-0""",
"""col1a1""",
"""col1a2""",
"""col2a1""",
"""colateral""",
"""cold""",
"""cold-hot-neutral""",
"""cold-knife""",
"""cold-shock""",
"""colder""",
"""colectomy""",
"""coli""",
"""coli-type""",
"""colic""",
"""colitis""",
"""collaborated""",
"""collaboration""",
"""collaborations""",
"""collaborative""",
"""collagen""",
"""collagen-hybridizing""",
"""collagen-induced""",
"""collagenase""",
"""collagenases""",
"""collapse""",
"""collapses""",
"""collared""",
"""collated""",
"""collateral""",
"""collating""",
"""colleague""",
"""colleagues""",
"""collect""",
"""collected""",
"""collecting""",
"""collection""",
"""collections""",
"""collective""",
"""collectively""",
"""college""",
"""colleges""",
"""collimator""",
"""collisions""",
"""colocalized""",
"""cologne""",
"""colombia""",
"""colon""",
"""colonic""",
"""colonies""",
"""colonization""",
"""colonize""",
"""colonized""",
"""colonizing""",
"""colonography""",
"""colonoscopist""",
"""colonoscopy""",
"""colony""",
"""colony-formation""",
"""colony-forming""",
"""coloproctology""",
"""color""",
"""colorado""",
"""colorectal""",
"""colorimetric""",
"""coloring""",
"""colors""",
"""colour""",
"""columbia""",
"""column""",
"""columnar""",
"""columns""",
"""com""",
"""coma""",
"""combination""",
"""combinations""",
"""combinatory""",
"""combine""",
"""combined""",
"""combines""",
"""combining""",
"""combos""",
"""combustible""",
"""come""",
"""comes""",
"""comfirmed""",
"""comfort""",
"""comfortable""",
"""coming""",
"""command""",
"""commenced""",
"""commencement""",
"""commensals""",
"""comment""",
"""commentary""",
"""comments""",
"""commercial""",
"""commercialised""",
"""commercialization""",
"""commercially""",
"""commission""",
"""commitment""",
"""committed""",
"""committee""",
"""common""",
"""commoner""",
"""commonest""",
"""commonly""",
"""commonplace""",
"""commonwealth""",
"""commotion""",
"""commune""",
"""communicable""",
"""communicate""",
"""communication""",
"""communicative""",
"""communicators""",
"""communion""",
"""communities""",
"""community""",
"""community-acquired""",
"""community-based""",
"""community-care""",
"""community-controlled""",
"""community-dwelling""",
"""community-oriented""",
"""commuting""",
"""como""",
"""comorbid""",
"""comorbidities""",
"""comorbidity""",
"""comorbility""",
"""compact""",
"""companies""",
"""companion""",
"""company-sponsored""",
"""comparable""",
"""comparably""",
"""comparative""",
"""comparative-effectiveness""",
"""comparator""",
"""comparators""",
"""compare""",
"""compared""",
"""compares""",
"""comparing""",
"""comparison""",
"""comparisons""",
"""compartments""",
"""compassionate""",
"""compatibility""",
"""compatible""",
"""compelling""",
"""compensate""",
"""compensated""",
"""compensation""",
"""compensative""",
"""compensatory""",
"""competed""",
"""competence""",
"""competencies""",
"""competency""",
"""competing""",
"""competition""",
"""competitively""",
"""compile""",
"""compiled""",
"""complained""",
"""complaint""",
"""complaints""",
"""complement""",
"""complementarity""",
"""complementary""",
"""complementation""",
"""complete""",
"""completed""",
"""completely""",
"""completeness""",
"""completing""",
"""completion""",
"""complex""",
"""complexes""",
"""complexities""",
"""complexity""",
"""compliance""",
"""compliant""",
"""complicated""",
"""complicates""",
"""complicating""",
"""complication""",
"""complications""",
"""complies""",
"""complimentary""",
"""component""",
"""components""",
"""comportamento""",
"""comportamentos""",
"""composant""",
"""composants""",
"""composed""",
"""composite""",
"""composites""",
"""composition""",
"""compositional""",
"""compositionally""",
"""compositions""",
"""composting""",
"""composto""",
"""compound""",
"""compound-1""",
"""compound-ligand""",
"""compounds""",
"""comprehension""",
"""comprehensive""",
"""comprehensively""",
"""comprehensiveness""",
"""compression""",
"""compressions""",
"""comprise""",
"""comprised""",
"""comprises""",
"""comprising""",
"""compromettre""",
"""compromise""",
"""compromised""",
"""compromising""",
"""computation-based""",
"""computational""",
"""computationally""",
"""compute""",
"""computed""",
"""computer""",
"""computer-aided""",
"""computerised""",
"""computerized""",
"""computers""",
"""computing""",
"""con""",
"""concatenated""",
"""concavities""",
"""concealed""",
"""conceive""",
"""conceived""",
"""concentrate""",
"""concentrated""",
"""concentration""",
"""concentrations""",
"""concentric""",
"""concept""",
"""conception""",
"""concepts""",
"""conceptual""",
"""conceptualizing""",
"""concern""",
"""concerned""",
"""concerning""",
"""concerns""",
"""concert""",
"""concerted""",
"""concise""",
"""conclude""",
"""concluded""",
"""conclusion""",
"""conclusions""",
"""concomitant""",
"""concomitantly""",
"""concordance""",
"""concordant""",
"""concrete""",
"""concretely""",
"""concurrent""",
"""concurrently""",
"""condensation""",
"""condensed""",
"""condition""",
"""condition-related""",
"""condition-specific""",
"""conditional""",
"""conditioning""",
"""conditions""",
"""condom""",
"""condoms""",
"""conduct""",
"""conductance""",
"""conducted""",
"""conducting""",
"""conductive""",
"""conduit""",
"""cone""",
"""confer""",
"""conference""",
"""conferences""",
"""conferred""",
"""conferring""",
"""confers""",
"""confidence""",
"""confident""",
"""configural""",
"""configurations""",
"""confined""",
"""confinement""",
"""confirm""",
"""confirmation""",
"""confirmatory""",
"""confirmed""",
"""confirmer""",
"""confirming""",
"""confirms""",
"""conflated""",
"""conflict""",
"""conflicting""",
"""conflicts""",
"""conflictual""",
"""confocal""",
"""conformal""",
"""conformation""",
"""conforms""",
"""confounded""",
"""confounders""",
"""confounding""",
"""congenital""",
"""congestion""",
"""congregate""",
"""congress""",
"""conization""",
"""conization-to-pregnancy""",
"""conjoint""",
"""conjunction""",
"""conjunctival""",
"""conjunctivitis""",
"""connected""",
"""connectedness""",
"""connection""",
"""connections""",
"""connective""",
"""connectivity""",
"""connectivity-based""",
"""connectome""",
"""connects""",
"""conotruncal""",
"""consciously""",
"""consciousness""",
"""consecutive""",
"""consecutively""",
"""consensus""",
"""consensus-based""",
"""consensus-informed""",
"""consent""",
"""consented""",
"""consenting""",
"""consequence""",
"""consequences""",
"""consequent""",
"""consequential""",
"""consequently""",
"""conservation""",
"""conservative""",
"""conserved""",
"""consider""",
"""considerable""",
"""considerably""",
"""considerado""",
"""considerados""",
"""consideraron""",
"""consideration""",
"""considerations""",
"""considered""",
"""considering""",
"""considers""",
"""consign""",
"""consisted""",
"""consistency""",
"""consistent""",
"""consistently""",
"""consisting""",
"""consists""",
"""consolidate""",
"""consolidated""",
"""consolidates""",
"""consolidation""",
"""consortium""",
"""consortium-medicare""",
"""conspecifics""",
"""constancy""",
"""constant""",
"""constatamos""",
"""constellation""",
"""constituent""",
"""constituents""",
"""constitute""",
"""constituted""",
"""constitutes""",
"""constituting""",
"""constitutive""",
"""constraints""",
"""construct""",
"""constructed""",
"""constructing""",
"""construction""",
"""constructive""",
"""constructs""",
"""consult""",
"""consultancies""",
"""consultants""",
"""consultation""",
"""consultations""",
"""consulted""",
"""consulting""",
"""consults""",
"""consumed""",
"""consumer""",
"""consumers""",
"""consuming""",
"""consumption""",
"""contact""",
"""contacted""",
"""contagious""",
"""contain""",
"""contained""",
"""containing""",
"""containment""",
"""contains""",
"""contaminants""",
"""contamination""",
"""contemporaneously""",
"""contemporary""",
"""contenant""",
"""contending""",
"""content""",
"""content-specific""",
"""contents""",
"""context""",
"""context-specific""",
"""contexts""",
"""contextual""",
"""contextualization""",
"""contextualize""",
"""contiene""",
"""contiguity""",
"""contiguous""",
"""continence""",
"""continent""",
"""continents""",
"""contingency""",
"""contingent""",
"""contingents""",
"""continuation""",
"""continue""",
"""continued""",
"""continues""",
"""continuing""",
"""continuity""",
"""continuous""",
"""continuously""",
"""continuum""",
"""contour""",
"""contraception""",
"""contraceptive""",
"""contraceptives""",
"""contractile""",
"""contractility""",
"""contracting""",
"""contraction""",
"""contractions""",
"""contradict""",
"""contradictory""",
"""contraindicated""",
"""contraindication""",
"""contraindications""",
"""contralateral""",
"""contrary""",
"""contrast""",
"""contrast-enhanced""",
"""contraste""",
"""contrasted""",
"""contrasting""",
"""contrasts""",
"""contribuent""",
"""contribute""",
"""contributed""",
"""contributes""",
"""contributing""",
"""contribution""",
"""contributions""",
"""contributor""",
"""contributors""",
"""contributory""",
"""contriving""",
"""control""",
"""controlados""",
"""controldata""",
"""controle""",
"""controllable""",
"""controlled""",
"""controller""",
"""controlling""",
"""controls""",
"""controversial""",
"""controversies""",
"""controversy""",
"""convalescent""",
"""convened""",
"""convenience""",
"""convenient""",
"""convention""",
"""conventional""",
"""conventionally""",
"""converge""",
"""converged""",
"""convergence""",
"""convergent""",
"""convergently""",
"""conversation""",
"""conversations""",
"""conversely""",
"""conversion""",
"""conversions""",
"""convert""",
"""converted""",
"""converter""",
"""converting""",
"""convex""",
"""convey""",
"""conveyed""",
"""conveyor""",
"""convinced""",
"""convincing""",
"""convincingly""",
"""convolutional""",
"""convolutions""",
"""cooler""",
"""cooling""",
"""cooperate""",
"""cooperates""",
"""cooperation""",
"""cooperative""",
"""cooperatively""",
"""coordinate""",
"""coordinated""",
"""coordinates""",
"""coordination""",
"""coordinators""",
"""copanlisib""",
"""copd""",
"""cope""",
"""copeptin""",
"""copies""",
"""coping""",
"""copper""",
"""copresence""",
"""copri""",
"""copy""",
"""cor""",
"""cord""",
"""cord-related""",
"""cords""",
"""core""",
"""corevitas""",
"""cornea""",
"""corneal""",
"""corneas""",
"""cornelia""",
"""corner""",
"""corona""",
"""coronal""",
"""coronary""",
"""coronavirus""",
"""coronavirus-2""",
"""coronavirus-like""",
"""coronaviruses""",
"""corporal""",
"""corporate""",
"""corporation""",
"""corporations""",
"""corporelle""",
"""corpus""",
"""corr""",
"""correct""",
"""corrected""",
"""correction""",
"""corrections""",
"""correctly""",
"""corregida""",
"""correlate""",
"""correlated""",
"""correlates""",
"""correlating""",
"""correlation""",
"""correlational""",
"""correlations""",
"""correspond""",
"""corresponded""",
"""correspondence""",
"""corresponding""",
"""correspondingly""",
"""corresponds""",
"""corroborate""",
"""corroborating""",
"""corroded""",
"""corrosion""",
"""corruptions""",
"""cortex""",
"""cortical""",
"""corticocortical""",
"""corticosteroid""",
"""corticosteroid-based""",
"""corticosteroids""",
"""corticotrophin-releasing""",
"""cortisol""",
"""corynebacteria""",
"""corynebacterium""",
"""cos""",
"""cosecretion""",
"""cosmetic""",
"""cosmetical""",
"""cosmic""",
"""cosmopolitan""",
"""cost""",
"""cost-advantageous""",
"""cost-benefit""",
"""cost-containment""",
"""cost-effective""",
"""cost-effectiveness""",
"""costa-paz""",
"""costimulation""",
"""costimulatory""",
"""costing""",
"""costly""",
"""costs""",
"""cotinine""",
"""cotransporter""",
"""cotreatment""",
"""couch""",
"""cough""",
"""cough-one""",
"""coughed""",
"""coughing""",
"""could""",
"""council""",
"""counseling""",
"""counselling""",
"""count""",
"""counted""",
"""counteract""",
"""counterpart""",
"""counterparts""",
"""countervailing""",
"""counting""",
"""countries""",
"""country""",
"""counts""",
"""county""",
"""couple""",
"""coupled""",
"""couples""",
"""coupling""",
"""courage""",
"""course""",
"""courses""",
"""court""",
"""cov-19""",
"""covariance""",
"""covariates""",
"""covariation""",
"""cover""",
"""coverage""",
"""covered""",
"""covering""",
"""covers""",
"""covid""",
"""covid-19""",
"""covid-19-related""",
"""covid19""",
"""covid19-related""",
"""cox""",
"""cox-regression""",
"""cpap""",
"""cpb""",
"""cpcc""",
"""cpd""",
"""cpg""",
"""cpi""",
"""cpr""",
"""cprm""",
"""cpu""",
"""cqit""",
"""crab-eating""",
"""cramping""",
"""cranial""",
"""craniotomies""",
"""craniotomy""",
"""craving""",
"""cravings""",
"""crawling""",
"""crawling-dependent""",
"""crc""",
"""crd42020179398""",
"""create""",
"""created""",
"""creates""",
"""creatine""",
"""creating""",
"""creatinine""",
"""creation""",
"""creb3""",
"""crebbp""",
"""credence""",
"""credibility""",
"""credible""",
"""creoles""",
"""crescimento""",
"""crest""",
"""crh""",
"""crime""",
"""crimes""",
"""criminal""",
"""criminalized""",
"""crippling""",
"""crises""",
"""crisis""",
"""crispr""",
"""crispr-associated""",
"""criteria""",
"""criteriathe""",
"""criterion""",
"""criterios""",
"""critical""",
"""critical-sized""",
"""critically""",
"""criticized""",
"""critique""",
"""crlm""",
"""crm""",
"""crohn""",
"""croissance""",
"""crop""",
"""crops""",
"""cross""",
"""cross-border""",
"""cross-country""",
"""cross-linking""",
"""cross-over""",
"""cross-referenced""",
"""cross-referencing""",
"""cross-referrals""",
"""cross-sectional""",
"""cross-sectionally""",
"""cross-sections""",
"""cross-shock""",
"""cross-species""",
"""cross-talk""",
"""cross-validated""",
"""cross-validation""",
"""cross-volatility""",
"""cross-widely""",
"""crossfit""",
"""crossing""",
"""crossing-over""",
"""crosslink""",
"""crosslinking""",
"""crossover""",
"""crossovers""",
"""crosstalk""",
"""crowding""",
"""crowdsourcing""",
"""crowe""",
"""crown""",
"""crp""",
"""crpm""",
"""crps""",
"""crrnas""",
"""crs""",
"""crs-hipec""",
"""crswnp""",
"""crt""",
"""crucial""",
"""crucially""",
"""cruciate""",
"""crude""",
"""cruelty""",
"""crush""",
"""cruzi""",
"""crying""",
"""cryo-electron""",
"""cryo-em""",
"""cryo-et""",
"""cryo-tem""",
"""cryotherapy""",
"""cryptogenic""",
"""crystal""",
"""crystalline""",
"""crystallinity""",
"""crystallography""",
"""crystals""",
"""cs1""",
"""cs4""",
"""cs5""",
"""cs6""",
"""cscc""",
"""csccs""",
"""csdmard""",
"""cse""",
"""csf""",
"""csk""",
"""csop""",
"""csr""",
"""csws""",
"""ct-based""",
"""ct-ffr""",
"""ct1""",
"""ct1a""",
"""ct1b""",
"""ctcl""",
"""ctcls""",
"""ctdna""",
"""ctla-4""",
"""cto""",
"""ctsc""",
"""ctt""",
"""cualquier""",
"""cucumber""",
"""cucumbers""",
"""cue""",
"""cue-mediated""",
"""cues""",
"""cuff""",
"""cui""",
"""cuja""",
"""cull""",
"""culminate""",
"""culminating""",
"""cult""",
"""cultivar""",
"""cultivars""",
"""cultivating""",
"""cultivation""",
"""cultural""",
"""culturally""",
"""culture""",
"""culture-negative""",
"""cultured""",
"""cultures""",
"""cumulative""",
"""cumulatively""",
"""cumulus""",
"""cumulus-oocyte""",
"""cuniculus""",
"""cup""",
"""curative""",
"""curbing""",
"""curcumin""",
"""cure""",
"""cured""",
"""curettage""",
"""curiously""",
"""curl""",
"""curl-ups""",
"""curlin""",
"""current""",
"""currently""",
"""currents""",
"""curricula""",
"""curricular""",
"""curriculum""",
"""currie""",
"""curvature""",
"""curvatures""",
"""curve""",
"""curved""",
"""curvelet""",
"""curves""",
"""cushing""",
"""custody""",
"""custom""",
"""custom-built""",
"""custom-made""",
"""cut""",
"""cut-off""",
"""cut-points""",
"""cutaneous""",
"""cutibacterium""",
"""cutis""",
"""cutoff""",
"""cutoffs""",
"""cuts""",
"""cutting""",
"""cutting-edge""",
"""cv-free""",
"""cva""",
"""cvc""",
"""cvd""",
"""cvm""",
"""cvo""",
"""cvrf""",
"""cvt""",
"""cxcl2""",
"""cxl""",
"""cxr""",
"""cyanide""",
"""cyb5r""",
"""cyb5r-oe""",
"""cyb5r-overexpressing""",
"""cyb5r3""",
"""cyb5r3-tg""",
"""cycle""",
"""cycles""",
"""cyclic""",
"""cyclin-dependent""",
"""cycling""",
"""cyclohexanol""",
"""cyclophosphamide""",
"""cylinder""",
"""cynops""",
"""cynotherapy""",
"""cyp21a2""",
"""cyp2s1""",
"""cyst""",
"""cystectomy""",
"""cysteine""",
"""cystic""",
"""cysticercosis""",
"""cystoscope""",
"""cystoscopes""",
"""cystoscopies""",
"""cystoscopy""",
"""cystourethrography""",
"""cysts""",
"""cytarabine""",
"""cyto-architectural""",
"""cytochrome""",
"""cytogenetic""",
"""cytogenetically""",
"""cytogenic""",
"""cytokine""",
"""cytokines""",
"""cytokinin""",
"""cytologic""",
"""cytological""",
"""cytologically""",
"""cytology""",
"""cytometry""",
"""cytomorphological""",
"""cytopathologic""",
"""cytopathologically""",
"""cytopathology""",
"""cytopenias""",
"""cytoplasm""",
"""cytoplasmic""",
"""cytoprotective""",
"""cytoreduction""",
"""cytoreductive""",
"""cytosine""",
"""cytosine-motif""",
"""cytosine-rich""",
"""cytoskeletal""",
"""cytoskeleton""",
"""cytosolic""",
"""cytotoxic""",
"""cytotoxicity""",
"""cytotrophoblast-stem-like""",
"""cytotrophoblasts""",
"""czech""",
"""czs""",
"""d-cycloserine""",
"""d161""",
"""d2-type""",
"""dacaa""",
"""dacarbazine""",
"""dacia""",
"""daf""",
"""daily""",
"""dairy""",
"""dajun""",
"""dalff""",
"""dam""",
"""damage""",
"""damage-induced""",
"""damaged""",
"""damages""",
"""damaging""",
"""dampen""",
"""dams""",
"""danger""",
"""dangerous""",
"""danish""",
"""dans""",
"""dapsa""",
"""daratumumab""",
"""dare""",
"""dark-field""",
"""darker""",
"""darkness""",
"""darwinian""",
"""das""",
"""das28""",
"""dasypus""",
"""data""",
"""data-driven""",
"""database""",
"""databases""",
"""dataset""",
"""dataset-admissions""",
"""datasets""",
"""date""",
"""dates""",
"""datos""",
"""dau""",
"""daugan""",
"""daughter""",
"""dauricine""",
"""david""",
"""davis""",
"""dawley""",
"""day""",
"""day-1""",
"""day-to-day""",
"""daylong""",
"""days""",
"""daytime""",
"""dbp""",
"""dbps""",
"""dca""",
"""dcdc""",
"""dce""",
"""dcfi""",
"""ddh""",
"""ddi""",
"""ddi-major""",
"""ddis""",
"""ddpcr""",
"""ddr""",
"""dds""",
"""de-escalation""",
"""de-identified""",
"""de-stigmatization""",
"""deacetylase""",
"""deactivation""",
"""deadly""",
"""deaf""",
"""deafferentation""",
"""deal""",
"""dealer""",
"""dealing""",
"""death""",
"""deaths""",
"""debate""",
"""debated""",
"""debiasing""",
"""debilitating""",
"""debond""",
"""debridement""",
"""debris""",
"""debulking""",
"""dec""",
"""decade""",
"""decades""",
"""decalvans""",
"""decapeptyl""",
"""decay""",
"""decays""",
"""deceased""",
"""decedents""",
"""deceleration""",
"""decelerations""",
"""december""",
"""decide""",
"""decided""",
"""decipher""",
"""deciphered""",
"""decision""",
"""decision-making""",
"""decision-support""",
"""decisions""",
"""decisive""",
"""declaration""",
"""declarations""",
"""declared""",
"""declination""",
"""decline""",
"""declined""",
"""declines""",
"""declining""",
"""decodable""",
"""decoding""",
"""decombinator""",
"""decompensated""",
"""decompensation""",
"""decomposed""",
"""decomposition""",
"""decompression""",
"""decompressive""",
"""decontamination""",
"""decoy""",
"""decrease""",
"""decreased""",
"""decreases""",
"""decreasing""",
"""decriminalisation""",
"""dedicated""",
"""deductible""",
"""deductive""",
"""deem""",
"""deemed""",
"""deemphasised""",
"""deep""",
"""deep-diving""",
"""deepen""",
"""deeper""",
"""deeply""",
"""deer""",
"""default""",
"""defect""",
"""defective""",
"""defects""",
"""defend""",
"""defenders""",
"""defense""",
"""defense-related""",
"""defenses""",
"""defensin""",
"""deferoxamine""",
"""deferred""",
"""defibrillator""",
"""defibrillators""",
"""deficiencies""",
"""deficiency""",
"""deficient""",
"""deficit""",
"""deficits""",
"""define""",
"""defined""",
"""defines""",
"""defining""",
"""definite""",
"""definition""",
"""definitions""",
"""definitive""",
"""definitively""",
"""deflation""",
"""deflection""",
"""deflux""",
"""deforestation""",
"""deformation""",
"""deformed""",
"""deformities""",
"""deformity""",
"""deforms""",
"""degenerate""",
"""degeneration""",
"""degenerative""",
"""degludec""",
"""degludec-u100""",
"""deglycosylation""",
"""degradation""",
"""degradative""",
"""degrade""",
"""degraded""",
"""degranulation""",
"""degree""",
"""degrees""",
"""degs""",
"""dehiscence""",
"""dehydration""",
"""dehydrogenase""",
"""deidentification""",
"""deidentified""",
"""del""",
"""delamination""",
"""delay""",
"""delayed""",
"""delaying""",
"""delays""",
"""deletes""",
"""deletion""",
"""deletions""",
"""delineate""",
"""delineated""",
"""delineating""",
"""delineation""",
"""delirium""",
"""deliver""",
"""delivered""",
"""deliveries""",
"""delivering""",
"""delivers""",
"""delivery""",
"""delphi""",
"""delta""",
"""delta-decalactone""",
"""delta-dodecalactone""",
"""delta4""",
"""delustering""",
"""demand""",
"""demanded""",
"""demanding""",
"""demands""",
"""demarcated""",
"""dementia""",
"""dementias""",
"""demethylases""",
"""demethylation""",
"""demographic""",
"""demographical""",
"""demographics""",
"""demography""",
"""demonstrados""",
"""demonstrate""",
"""demonstrated""",
"""demonstrates""",
"""demonstrating""",
"""demonstration""",
"""demyelinating""",
"""demystified""",
"""den""",
"""dendrimer""",
"""dendrimer-based""",
"""dendrimers""",
"""dendritic""",
"""dengue""",
"""denmark""",
"""denoted""",
"""dense""",
"""densely""",
"""densidad""",
"""densities""",
"""density""",
"""dental""",
"""dentate""",
"""denticola""",
"""dentilisin""",
"""dentilisin-deficient""",
"""dentistry""",
"""dentists""",
"""dentocult""",
"""dentofacial""",
"""deodorant""",
"""departamentos""",
"""department""",
"""departmental""",
"""departments""",
"""departure""",
"""depend""",
"""dependable""",
"""dependence""",
"""dependency""",
"""dependent""",
"""depending""",
"""depends""",
"""depiction""",
"""depictions""",
"""depleted""",
"""depletion""",
"""depletion-induced""",
"""deployed""",
"""depois""",
"""deportation""",
"""deposit""",
"""deposited""",
"""deposition""",
"""deposits""",
"""depot""",
"""depressed""",
"""depression""",
"""depressive""",
"""deprivation""",
"""deprivation-evoked""",
"""depth""",
"""derangements""",
"""deregulated""",
"""derepressed""",
"""derivation""",
"""derivative""",
"""derivatives""",
"""derive""",
"""derived""",
"""derives""",
"""deriving""",
"""dermal""",
"""dermatitis""",
"""dermatologic""",
"""dermatological""",
"""dermatologists""",
"""dermatology""",
"""dermatome""",
"""dermatopathologic""",
"""dermatoscopic""",
"""dermatoscopy""",
"""dermatoses""",
"""dermoscopic""",
"""dermoscopist""",
"""dermoscopy""",
"""des""",
"""desaturation""",
"""descemet""",
"""descend""",
"""descending""",
"""describe""",
"""described""",
"""describes""",
"""describing""",
"""description""",
"""descriptive""",
"""descriptively""",
"""descriptors""",
"""desenlace""",
"""desenvolvimento""",
"""desert""",
"""deserve""",
"""deserves""",
"""desiccation""",
"""design""",
"""designated""",
"""designation""",
"""designed""",
"""designers""",
"""designing""",
"""designs""",
"""desirability""",
"""desirable""",
"""desire""",
"""desired""",
"""desiring""",
"""desmocollins""",
"""desmoglein""",
"""desmogleins""",
"""desmoplastic""",
"""desorbed""",
"""despair-like""",
"""desperately""",
"""despite""",
"""desserts""",
"""dest""",
"""destabilize""",
"""destabilizes""",
"""destabilizing""",
"""destroy""",
"""destruction""",
"""destructive""",
"""detach""",
"""detached""",
"""detaching""",
"""detachment""",
"""detail""",
"""detailed""",
"""details""",
"""detect""",
"""detectable""",
"""detected""",
"""detecting""",
"""detection""",
"""detections""",
"""detector""",
"""detectors""",
"""detects""",
"""deter""",
"""deteriorated""",
"""deterioration""",
"""determinant""",
"""determinants""",
"""determinar""",
"""determination""",
"""determine""",
"""determined""",
"""determines""",
"""determining""",
"""detoxification""",
"""detoxify""",
"""detriment""",
"""detrimental""",
"""detroit""",
"""detrusor""",
"""detrusor-sphincter""",
"""deucravacitinib""",
"""deuterostomes""",
"""deux""",
"""devastating""",
"""deve""",
"""develop""",
"""developed""",
"""developers""",
"""developing""",
"""development""",
"""developmental""",
"""developmentally""",
"""developments""",
"""develops""",
"""deviation""",
"""deviations""",
"""device""",
"""device-related""",
"""devices""",
"""devising""",
"""devoted""",
"""dexamethasone""",
"""dexin""",
"""dexmedetomidine""",
"""dexterity""",
"""dextran""",
"""dezembro""",
"""dflc""",
"""dfp""",
"""dfs""",
"""dfx""",
"""dgan""",
"""dgcr5""",
"""dha""",
"""dhi""",
"""dhl""",
"""dhrs1""",
"""dht""",
"""dht-treated""",
"""di-""",
"""diabetes""",
"""diabetic""",
"""diabetologist""",
"""diadochokinesis""",
"""diagnose""",
"""diagnosed""",
"""diagnoses""",
"""diagnosing""",
"""diagnosis""",
"""diagnostic""",
"""diagnostically""",
"""dialect""",
"""dialects""",
"""dialogue""",
"""dialysis""",
"""dialysis-related""",
"""dialyzer""",
"""dialyzers""",
"""diameter""",
"""diametral""",
"""diametrically""",
"""diana-mirpath""",
"""diaphragms""",
"""diarrhoea""",
"""diastasis""",
"""diastereo-""",
"""diastereoselectivity""",
"""diastolic""",
"""diathermy""",
"""diathesis-stress""",
"""diazoxide""",
"""dichotomous""",
"""diclofenac""",
"""dictate""",
"""dictating""",
"""did""",
"""didactics""",
"""diderms""",
"""die""",
"""died""",
"""diego""",
"""diel""",
"""dienogest""",
"""diet""",
"""diet-induced""",
"""dietary""",
"""diethylnitrosamine-induced""",
"""dietitian""",
"""diets""",
"""differ""",
"""differed""",
"""difference""",
"""differences""",
"""different""",
"""differential""",
"""differentially""",
"""differentiate""",
"""differentiated""",
"""differentiates""",
"""differentiating""",
"""differentiation""",
"""differently""",
"""differing""",
"""differs""",
"""difficult""",
"""difficult-to-control""",
"""difficult-to-test""",
"""difficulties""",
"""difficulty""",
"""diffident""",
"""diffraction""",
"""diffuse""",
"""diffusely""",
"""diffusion""",
"""diffusivity""",
"""digest""",
"""digestion""",
"""digestive""",
"""digit""",
"""digital""",
"""digoxin""",
"""diguanylate""",
"""dihydrotestosterone""",
"""dilation""",
"""dili""",
"""diluted""",
"""dilution""",
"""dim-light""",
"""dimension""",
"""dimensional""",
"""dimensionalities""",
"""dimensionality""",
"""dimensions""",
"""dimer-selective""",
"""dimeric""",
"""dimethyl""",
"""diminish""",
"""diminished""",
"""diminution""",
"""dimodal""",
"""dimorphism""",
"""dinucleotide""",
"""diopter""",
"""dioxide""",
"""dioxygenase""",
"""dipeptidase""",
"""dipeptide""",
"""dipeptidyl""",
"""diphosphate""",
"""diploid""",
"""diploids""",
"""diploma""",
"""direct""",
"""directed""",
"""direction""",
"""directional""",
"""directionality""",
"""directions""",
"""directly""",
"""directors""",
"""directory""",
"""disabilities""",
"""disability""",
"""disability-related""",
"""disaccharide""",
"""disadvantage""",
"""disadvantaged""",
"""disadvantages""",
"""disaggregated""",
"""disagreement""",
"""disagreements""",
"""disappearance""",
"""disappeared""",
"""disappointing""",
"""disappointments""",
"""disassembly""",
"""disaster""",
"""disasters""",
"""disc""",
"""discectomy""",
"""discern""",
"""discernible""",
"""discharge""",
"""discharged""",
"""discharges""",
"""discharging""",
"""discipline""",
"""disciplines""",
"""disclose""",
"""disclosed""",
"""discomfort""",
"""discontinuation""",
"""discontinue""",
"""discontinued""",
"""discontinuing""",
"""discontinuity""",
"""discordant""",
"""discourages""",
"""discourse""",
"""discover""",
"""discovered""",
"""discoveries""",
"""discovery""",
"""discrepancies""",
"""discrepancy""",
"""discrepant""",
"""discrete""",
"""discretionary""",
"""discriminant""",
"""discriminants""",
"""discriminate""",
"""discriminating""",
"""discrimination""",
"""discriminative""",
"""discriminators""",
"""discriminatory""",
"""discuss""",
"""discussed""",
"""discusses""",
"""discussing""",
"""discussion""",
"""discussions""",
"""disease""",
"""disease-derived""",
"""disease-free""",
"""disease-modifying""",
"""disease-related""",
"""disease-specific""",
"""diseases""",
"""diseases-is""",
"""diselenide""",
"""disenrollment""",
"""disentangle""",
"""disequilibrium""",
"""dish""",
"""disinfectant""",
"""disinfectants""",
"""disinfection""",
"""disinhibition""",
"""disintegrated""",
"""disk""",
"""dislocated""",
"""dislocation""",
"""dismal""",
"""dismutase""",
"""disorder""",
"""disordered""",
"""disorderly""",
"""disorders""",
"""disorders-2""",
"""disparate""",
"""disparities""",
"""disparity""",
"""dispelling""",
"""dispensing""",
"""dispersal""",
"""dispersed""",
"""dispersion""",
"""displaced""",
"""displacement""",
"""display""",
"""displayed""",
"""displaying""",
"""displays""",
"""disposable""",
"""disposition""",
"""disproportion""",
"""disproportionally""",
"""disproportionate""",
"""disproportionately""",
"""disrupt""",
"""disrupted""",
"""disruption""",
"""disruptions""",
"""disruptive""",
"""disrupts""",
"""dissatisfaction""",
"""dissected""",
"""dissecting""",
"""dissection""",
"""dissembled""",
"""disseminated""",
"""dissemination""",
"""dissertations""",
"""dissipated""",
"""dissipation""",
"""disso""",
"""dissolution""",
"""dissolved""",
"""dissolves""",
"""distal""",
"""distance""",
"""distancing""",
"""distant""",
"""distension""",
"""distill""",
"""distilled""",
"""distinct""",
"""distinction""",
"""distinctive""",
"""distinguish""",
"""distinguished""",
"""distinguishes""",
"""distinguishing""",
"""distintos""",
"""distortion""",
"""distortions""",
"""distractors""",
"""distress""",
"""distribute""",
"""distributed""",
"""distributing""",
"""distribution""",
"""distributions""",
"""district""",
"""districts""",
"""disturbance""",
"""disturbances""",
"""disturbed""",
"""disturbing""",
"""disulfide""",
"""disulfiram""",
"""dithiothreitol""",
"""diuretics""",
"""diurnal""",
"""dive""",
"""diverged""",
"""divergence""",
"""divergences""",
"""divergent""",
"""diverges""",
"""diverging""",
"""diverse""",
"""diversely""",
"""diversification""",
"""diversified""",
"""diversion""",
"""diversity""",
"""diverticular""",
"""divided""",
"""divides""",
"""divididos""",
"""dividing""",
"""diving""",
"""division""",
"""divisions""",
"""dizziness""",
"""dl-based""",
"""dlbcl""",
"""dlbcl-derived""",
"""dlbcls""",
"""dlpfc""",
"""dmard""",
"""dmard-related""",
"""dmards""",
"""dmc1""",
"""dmgs""",
"""dmri""",
"""dms""",
"""dmt""",
"""dna""",
"""dna-based""",
"""dna-containing""",
"""dnaa""",
"""dnaa-dependent""",
"""dnas""",
"""dnc""",
"""dnd""",
"""dnmt1""",
"""dntp""",
"""docetaxel""",
"""docking""",
"""docosahexaenoic""",
"""doctor""",
"""doctors""",
"""doctrine""",
"""document""",
"""documentation""",
"""documented""",
"""documents""",
"""does""",
"""doetinchem""",
"""dog""",
"""dog-assisted""",
"""dogs""",
"""doing""",
"""dois""",
"""dollars""",
"""domaidisruptor""",
"""domain""",
"""domain-containing""",
"""domain-experts""",
"""domains""",
"""dome""",
"""domes""",
"""domestic""",
"""domesticated""",
"""domiciliares""",
"""dominant""",
"""dominates""",
"""domino""",
"""donation""",
"""donations""",
"""done""",
"""dong""",
"""donor""",
"""donor-specific""",
"""donors""",
"""dopamine""",
"""doppler""",
"""dormant""",
"""dorsal""",
"""dorsolateral""",
"""dos""",
"""dosage""",
"""dosages""",
"""dose""",
"""dose-adjusted""",
"""dose-escalation""",
"""dose-limited""",
"""dose-limiting""",
"""dose-ranging""",
"""dose-rate""",
"""dose-rate-specific""",
"""dose-reduced""",
"""dose-related""",
"""dose-response""",
"""doses""",
"""dosimeter""",
"""dosimetry""",
"""dosing""",
"""dot1-like""",
"""dot1l""",
"""dot1l-af9""",
"""dot1l1-af10""",
"""dots""",
"""dotted""",
"""double""",
"""double-blind""",
"""double-masked""",
"""double-poling""",
"""double-stapling""",
"""double-strand""",
"""double-stranded""",
"""double-surface""",
"""doubling""",
"""doublings""",
"""down""",
"""down-regulating""",
"""down-regulation""",
"""downgraded""",
"""downloaded""",
"""downregulate""",
"""downregulated""",
"""downregulating""",
"""downregulation""",
"""downscaling""",
"""downstaging""",
"""downstream""",
"""doxorubicin""",
"""doxycycline""",
"""dozen""",
"""dozens""",
"""dp-tt""",
"""dpann""",
"""dpc""",
"""dpi""",
"""dps""",
"""dq8-positive""",
"""dra""",
"""drafting""",
"""drain""",
"""drainage""",
"""dramatic""",
"""dramatically""",
"""draw""",
"""drawbacks""",
"""drawing""",
"""drawings""",
"""drawn""",
"""draws""",
"""drb1-like""",
"""drb2""",
"""drd1""",
"""dream""",
"""dress""",
"""dressing""",
"""dressings""",
"""dressingsin""",
"""drew""",
"""drg""",
"""drgs""",
"""dried""",
"""drink""",
"""drinkers""",
"""drinking""",
"""drinks""",
"""drip""",
"""drive""",
"""drive-through""",
"""driven""",
"""driver""",
"""driverless""",
"""drivers""",
"""drives""",
"""driving""",
"""drones""",
"""drop""",
"""droplet""",
"""droplets""",
"""dropout""",
"""dropouts""",
"""drops""",
"""drosophila""",
"""drought""",
"""drr""",
"""drs""",
"""drug""",
"""drug-combinations""",
"""drug-drug""",
"""drug-drug-interactions""",
"""drug-induced""",
"""drug-interaction""",
"""drug-like""",
"""drug-related""",
"""druggable""",
"""drugs""",
"""drugs-""",
"""drusen""",
"""dry""",
"""dryer""",
"""drying""",
"""dsa""",
"""dsas""",
"""dsb""",
"""dsbs""",
"""dsd""",
"""dsd-life""",
"""dsek""",
"""dsg""",
"""dsg-specific""",
"""dsg3""",
"""dsgs""",
"""dsm-5""",
"""dsm-defined""",
"""dsm-iv""",
"""dsm-iv-tr""",
"""dsm-v""",
"""dsrna""",
"""dsrna-b2""",
"""dsrna-binding""",
"""dss""",
"""dtaop""",
"""dtc""",
"""dti""",
"""dtt""",
"""dtx1""",
"""dtx2""",
"""dual""",
"""dual-aim""",
"""dual-energy""",
"""dual-isolation""",
"""duas""",
"""dubh""",
"""ducks""",
"""duct""",
"""ductal""",
"""ductus""",
"""dud""",
"""duds""",
"""due""",
"""duel""",
"""duet""",
"""duodenal""",
"""dupilumab""",
"""duplex""",
"""duplicate""",
"""duplicated""",
"""duplicates""",
"""duplication""",
"""durability""",
"""durable""",
"""durably""",
"""duration""",
"""during""",
"""durotomy""",
"""durvalumab""",
"""dust""",
"""duty""",
"""dwarf""",
"""dwell""",
"""dxa""",
"""dyad""",
"""dyads""",
"""dyes""",
"""dying""",
"""dynamic""",
"""dynamical""",
"""dynamically""",
"""dynamics""",
"""dyrk1""",
"""dysalbuminemic""",
"""dysbiosis""",
"""dyschezia""",
"""dyscrasia""",
"""dysfibrinogenemia""",
"""dysfunction""",
"""dysfunction-induced""",
"""dysfunctions""",
"""dysglycemia""",
"""dyslipidemia""",
"""dysmenorrhea""",
"""dysmenorrhoea""",
"""dysmetabolic""",
"""dysmetabolism""",
"""dyspareunia""",
"""dysphagia""",
"""dysphonia""",
"""dysphonic""",
"""dysplasia""",
"""dyspnea""",
"""dysregulated""",
"""dysregulation""",
"""dyssynchrony""",
"""dyssynergia""",
"""dystrophy""",
"""dysuria""",
"""e-cigarette""",
"""e-cigarettes""",
"""e-health""",
"""e-proteins""",
"""e2f""",
"""eaa""",
"""eac""",
"""each""",
"""eactions""",
"""ear""",
"""earlier""",
"""earliest""",
"""early""",
"""early-life""",
"""early-onset""",
"""early-stage""",
"""ears""",
"""earth""",
"""earthquakes""",
"""ease""",
"""easier""",
"""easily""",
"""east""",
"""eastern""",
"""easy""",
"""eating""",
"""ebastine""",
"""ebd""",
"""ebl""",
"""ebola""",
"""ebsco""",
"""eca""",
"""eca-mri""",
"""ecc""",
"""eccentric""",
"""eccentricities""",
"""eccrine""",
"""ecg""",
"""ecgs""",
"""echinoderms""",
"""echo""",
"""echocardiographers""",
"""echocardiographic""",
"""echocardiography""",
"""ecl""",
"""eclampsia""",
"""ecm""",
"""ecmltg""",
"""ecmo""",
"""eco-epidemiological""",
"""eco-friendly""",
"""ecog""",
"""ecological""",
"""ecology""",
"""ecomorphological""",
"""ecomorphs""",
"""ecomp""",
"""ecomp-based""",
"""econlit""",
"""economic""",
"""economical""",
"""economy""",
"""ecosystem""",
"""ecosystems""",
"""ecotype""",
"""ecs""",
"""ectoderm""",
"""ectopic""",
"""ecv""",
"""ecv-cmr""",
"""eczema""",
"""eddy""",
"""edema""",
"""edge""",
"""edges""",
"""editing""",
"""edition""",
"""editor""",
"""editors""",
"""edoema""",
"""eds""",
"""educate""",
"""educated""",
"""education""",
"""education-accredited""",
"""educational""",
"""educators""",
"""edulis""",
"""eefs""",
"""eeg""",
"""eel""",
"""efa""",
"""efeitos""",
"""eff""",
"""effacement""",
"""effect""",
"""effective""",
"""effectively""",
"""effectiveness""",
"""effector""",
"""effectors""",
"""effects""",
"""effet""",
"""effets""",
"""efficacies""",
"""efficacious""",
"""efficacy""",
"""efficiencies""",
"""efficiency""",
"""efficient""",
"""efficiently""",
"""effort""",
"""efforts""",
"""effusion""",
"""efgri""",
"""efh""",
"""eficacia""",
"""egf""",
"""egf-like""",
"""egfr""",
"""egfri""",
"""egfris""",
"""egg""",
"""egg-like""",
"""eggs""",
"""egress""",
"""egypt""",
"""ehd1""",
"""eheals""",
"""ehfmrg""",
"""ehr""",
"""ehrs""",
"""ehs""",
"""ehss""",
"""eicosapentaenoic""",
"""eid""",
"""eif2ak4""",
"""eight""",
"""eight-month""",
"""eighteen""",
"""eighth""",
"""eighty""",
"""eighty-four""",
"""eighty-six""",
"""eims""",
"""either""",
"""ejection""",
"""ekam""",
"""elabela""",
"""elaborated""",
"""elaborates""",
"""elastase""",
"""elastases""",
"""elastic""",
"""elasticity""",
"""elastographic""",
"""elastography""",
"""elateriospermum""",
"""elbow""",
"""elderly""",
"""elected""",
"""elective""",
"""electives""",
"""electric""",
"""electrical""",
"""electrically""",
"""electro""",
"""electro-acupuncture""",
"""electrocardiogram""",
"""electrochemical""",
"""electrochemistry""",
"""electrochromatography""",
"""electrocoagulation""",
"""electrocochleography""",
"""electrode""",
"""electrodes""",
"""electroencephalogram""",
"""electroencephalographic""",
"""electroencephalography""",
"""electrolaryn""",
"""electrolaryngeal""",
"""electrolyte""",
"""electron""",
"""electronic""",
"""electroosmotic""",
"""electrophoretic""",
"""electrophotochemical""",
"""electrophysiological""",
"""electroplaque""",
"""electrospray""",
"""electrosurgical""",
"""electrosynthesis""",
"""electrotrophic""",
"""elegans""",
"""elekta""",
"""element""",
"""element-wise""",
"""elemental""",
"""elementary""",
"""elements""",
"""elephant""",
"""eletrocardiograma""",
"""elevadas""",
"""elevate""",
"""elevated""",
"""elevates""",
"""elevation""",
"""elevations""",
"""eleven""",
"""elibrary""",
"""elicit""",
"""elicitation""",
"""elicited""",
"""eliciting""",
"""elicits""",
"""eligibility""",
"""eligible""",
"""eliminate""",
"""eliminated""",
"""eliminates""",
"""eliminating""",
"""elimination""",
"""elisa""",
"""elite""",
"""elns""",
"""elongated""",
"""elongation""",
"""else""",
"""elsewhere""",
"""eltrombopag""",
"""elucidate""",
"""elucidated""",
"""elucidates""",
"""elucidating""",
"""elucidation""",
"""elusive""",
"""elusiveness""",
"""em""",
"""emailed""",
"""emails""",
"""emanating""",
"""embark""",
"""embarrassment""",
"""embase""",
"""embedded""",
"""emboli""",
"""embolism""",
"""embolisms""",
"""embolization""",
"""embrace""",
"""embryo""",
"""embryogenesis""",
"""embryonal""",
"""embryonic""",
"""embryos""",
"""emerald""",
"""emerge""",
"""emerged""",
"""emergence""",
"""emergencies""",
"""emergency""",
"""emergent""",
"""emergentes""",
"""emerging""",
"""emesis""",
"""emission""",
"""emit""",
"""emlpgp""",
"""emlpgps""",
"""emollient""",
"""emollients""",
"""emotion""",
"""emotional""",
"""emotionally""",
"""emotions""",
"""empathy""",
"""emperor""",
"""emphasis""",
"""emphasize""",
"""emphasized""",
"""emphasizing""",
"""empire""",
"""empiric""",
"""empirical""",
"""empirically""",
"""employ""",
"""employed""",
"""employee""",
"""employees""",
"""employing""",
"""employment""",
"""empower""",
"""empty""",
"""emptying""",
"""ems""",
"""emt""",
"""emtansine""",
"""emtct""",
"""emulsification""",
"""enable""",
"""enabled""",
"""enables""",
"""enabling""",
"""enactment""",
"""enamel""",
"""enamel-conditioning""",
"""enantio-""",
"""enantiomeric""",
"""enantiomers""",
"""enantioselective""",
"""encapsulated""",
"""encase""",
"""encephalitis""",
"""encephalomyelitis""",
"""encephalopathy""",
"""enclosed""",
"""encoded""",
"""encoder-decoder""",
"""encodes""",
"""encoding""",
"""encompasses""",
"""encompassing""",
"""encorafenib""",
"""encounter""",
"""encountered""",
"""encounters""",
"""encourage""",
"""encouraged""",
"""encouragement""",
"""encourages""",
"""encouraging""",
"""encryption""",
"""end""",
"""end-diastolic""",
"""end-joining""",
"""end-of-life""",
"""end-of-study""",
"""end-of-treatment""",
"""end-point""",
"""end-points""",
"""end-processing""",
"""end-stage""",
"""end-to-end""",
"""end-to-side""",
"""endeavor""",
"""endemic""",
"""endoanal""",
"""endocrine""",
"""endocrinologic""",
"""endocrinological""",
"""endocrinology""",
"""endocytic""",
"""endoderm""",
"""endodermal""",
"""endogenous""",
"""endoluminal""",
"""endomembrane""",
"""endometrial""",
"""endometrioid""",
"""endometrioma""",
"""endometriomas""",
"""endometriosis""",
"""endometriosis-associated""",
"""endometriotic""",
"""endonuclease""",
"""endonucleases""",
"""endoparasites""",
"""endophytic""",
"""endoplasmic""",
"""endoribonuclease""",
"""endorsed""",
"""endorsing""",
"""endoscope""",
"""endoscope-assisted""",
"""endoscopic""",
"""endoscopies""",
"""endoscopy""",
"""endosomal""",
"""endosomes""",
"""endosperm""",
"""endosymbiont""",
"""endothelial""",
"""endotoxin""",
"""endotracheal""",
"""endovascular""",
"""endovascularly""",
"""endows""",
"""endpoint""",
"""endpoints""",
"""ends""",
"""endurance""",
"""endure""",
"""endures""",
"""energies""",
"""energy""",
"""energy-dependent""",
"""energy-weighted""",
"""enfermeiros""",
"""enforced""",
"""enforcement""",
"""enforcing""",
"""engage""",
"""engaged""",
"""engagement""",
"""engaging""",
"""engine""",
"""engineered""",
"""engineering""",
"""england""",
"""english""",
"""english-speaking""",
"""engrafted""",
"""enhance""",
"""enhanced""",
"""enhancement""",
"""enhancers""",
"""enhances""",
"""enhancing""",
"""enhydra""",
"""enjoy""",
"""enjoyment""",
"""enlarged""",
"""enlargement""",
"""enlisted""",
"""enough""",
"""enquiry""",
"""enrich""",
"""enriched""",
"""enriches""",
"""enriching""",
"""enrichment""",
"""enroll""",
"""enrolled""",
"""enrollees""",
"""enrolling""",
"""enrollment""",
"""enrolment""",
"""ensayos""",
"""ensemble""",
"""ensembles""",
"""ensign""",
"""ensure""",
"""ensured""",
"""ensures""",
"""ensuring""",
"""entailing""",
"""entails""",
"""entecavir""",
"""enteral""",
"""entered""",
"""enteric""",
"""enteric-excipient""",
"""enteric-excipient-coated""",
"""entering""",
"""enterobacteriaceae""",
"""enterococcus""",
"""enterocolitis""",
"""enteropathy""",
"""enterprises""",
"""enters""",
"""entertainment""",
"""enthusiasm""",
"""entire""",
"""entirely""",
"""entities""",
"""entity""",
"""entorhinal""",
"""entrained""",
"""entrance""",
"""entrapment""",
"""entre""",
"""entry""",
"""enucleated""",
"""enucleation""",
"""enuresis""",
"""envelope""",
"""envelope-spanning""",
"""environment""",
"""environmental""",
"""environmentally""",
"""environments""",
"""envisioned""",
"""enzalutamide""",
"""enzymatic""",
"""enzyme""",
"""enzyme-linked""",
"""enzymes""",
"""eo-crc""",
"""eoc""",
"""eol""",
"""eor""",
"""eosin""",
"""eosinophil""",
"""eosinophilic""",
"""eosinophils""",
"""ep300""",
"""ependymomas""",
"""epi""",
"""epi-data""",
"""epicrispr""",
"""epidemic""",
"""epidemics""",
"""epidemiologic""",
"""epidemiological""",
"""epidemiology""",
"""epidermal""",
"""epidermidis""",
"""epidermis""",
"""epidural""",
"""epigenetic""",
"""epigenetics""",
"""epigenomes""",
"""epigenomic""",
"""epiglottitis""",
"""epilepsy""",
"""epileptic""",
"""epilepticus""",
"""epileptogenic""",
"""epinephrine""",
"""epiril""",
"""episcleritis""",
"""episode""",
"""episodes""",
"""episodic""",
"""epistaxis""",
"""epithelia""",
"""epithelial""",
"""epithelial-mesenchymal""",
"""epithelial-to-mesenchymal""",
"""epithelioid""",
"""epithelium""",
"""epitome""",
"""epitope""",
"""epitope-tagged""",
"""epitopes""",
"""eplerenone""",
"""epp""",
"""eps""",
"""epworth""",
"""eq-5d-3l""",
"""eq-5d-5l""",
"""eq5d""",
"""equal""",
"""equally""",
"""equation""",
"""equations""",
"""equilibrium""",
"""equina""",
"""equine""",
"""equip""",
"""equipes""",
"""equipment""",
"""equitable""",
"""equity""",
"""equity-oriented""",
"""equivalence""",
"""equivalent""",
"""equivalent-that""",
"""equivalents""",
"""equivocal""",
"""er""",
"""er-associated""",
"""er-resident""",
"""era""",
"""erad""",
"""eradicate""",
"""eradication""",
"""eras""",
"""erb-b2""",
"""erbb1""",
"""erbb1-erbb4""",
"""erbb1-mediated""",
"""erbb2""",
"""erbb4""",
"""ercp""",
"""ercps""",
"""ergometer""",
"""ergonomics""",
"""erk""",
"""erosion""",
"""erosions""",
"""err""",
"""erroneous""",
"""error""",
"""error-free""",
"""errors""",
"""erythema""",
"""erythematosus""",
"""erythematous""",
"""erythropoietin""",
"""es-18tzled""",
"""es-assisted""",
"""es-hl""",
"""escalating""",
"""escalation""",
"""escape""",
"""eschar""",
"""escherichia""",
"""escitalopram""",
"""escolar""",
"""eses""",
"""esi""",
"""esmolol""",
"""esophageal""",
"""esophagectomy""",
"""esophagogastric""",
"""esophagogastrostomies""",
"""esophagus""",
"""especially""",
"""espironolactona""",
"""esr1""",
"""esrd""",
"""essential""",
"""essentially""",
"""establish""",
"""established""",
"""establishes""",
"""establishing""",
"""establishment""",
"""estado""",
"""estar""",
"""estatisticamente""",
"""estava""",
"""estavam""",
"""estem""",
"""ester""",
"""estimate""",
"""estimated""",
"""estimates""",
"""estimating""",
"""estimation""",
"""estimations""",
"""estranged""",
"""estrogen""",
"""estrogen-like""",
"""estroprogestins""",
"""estudo""",
"""etc""",
"""etched""",
"""etching""",
"""etem""",
"""ethanol""",
"""ethanol-based""",
"""ethanol-type""",
"""ethical""",
"""ethics""",
"""ethiopia""",
"""ethiopian""",
"""ethnic""",
"""ethnicities""",
"""ethnicity""",
"""ethnicity-matched""",
"""ethnographic""",
"""ethnography""",
"""ethyl""",
"""ethylene""",
"""eti""",
"""etiological""",
"""etiologies""",
"""etiology""",
"""etiopathogenesis""",
"""etpb""",
"""ets""",
"""eubacterium""",
"""euc""",
"""eudaimonic""",
"""eudra""",
"""eukaryotes""",
"""eukaryotic""",
"""eular""",
"""eurasia""",
"""euro-american""",
"""europe""",
"""european""",
"""europeans""",
"""euroqol""",
"""euroquol-5-dimension""",
"""euryarchaeota""",
"""euthanasia""",
"""euthanize""",
"""euthyroid""",
"""euthyroidism""",
"""eutomer""",
"""eutylone""",
"""ev-mimecan""",
"""ev-tgfbi""",
"""evacuating""",
"""evacuation""",
"""evacuations""",
"""evade""",
"""evaluable""",
"""evaluate""",
"""evaluated""",
"""evaluates""",
"""evaluating""",
"""evaluation""",
"""evaluations""",
"""evaluators""",
"""evans""",
"""evasion""",
"""evd""",
"""even""",
"""evening""",
"""evenly""",
"""event""",
"""event-based""",
"""event-free""",
"""event-time""",
"""eventos""",
"""events""",
"""eventual""",
"""eventually""",
"""ever""",
"""ever-changing""",
"""ever-complex""",
"""everolimus""",
"""every""",
"""everyday""",
"""everyone""",
"""eviction""",
"""evidence""",
"""evidence-based""",
"""evidenced""",
"""evidences""",
"""evidencia""",
"""evidencing""",
"""evident""",
"""evo-devo""",
"""evoactg""",
"""evoked""",
"""evokes""",
"""evolution""",
"""evolutionarily""",
"""evolutionary""",
"""evolve""",
"""evolved""",
"""evolving""",
"""evs""",
"""evt-like""",
"""evts""",
"""ewing""",
"""ewsr1-fli1""",
"""exacerbate""",
"""exacerbation""",
"""exacerbations""",
"""exact""",
"""exactly""",
"""exam""",
"""exames""",
"""examination""",
"""examinations""",
"""examine""",
"""examined""",
"""examiner-dependent""",
"""examiners""",
"""examines""",
"""examining""",
"""example""",
"""examples""",
"""exams""",
"""excavate""",
"""excavated""",
"""exceed""",
"""exceeded""",
"""exceeding""",
"""excellent""",
"""except""",
"""exception""",
"""exceptional""",
"""exceptionally""",
"""exceptions""",
"""excess""",
"""excessive""",
"""excessively""",
"""exchange""",
"""excipient""",
"""excised""",
"""excision""",
"""excisional""",
"""excitation""",
"""excitatory""",
"""exclude""",
"""excluded""",
"""excluding""",
"""exclusion""",
"""exclusions""",
"""exclusive""",
"""exclusively""",
"""excreted""",
"""excretion""",
"""excretions""",
"""executed""",
"""executing""",
"""execution""",
"""executive""",
"""exemplar""",
"""exemplify""",
"""exempt""",
"""exenteration""",
"""exercise""",
"""exercised""",
"""exercises""",
"""exert""",
"""exerted""",
"""exerting""",
"""exertion""",
"""exertional""",
"""exerts""",
"""exhaled""",
"""exham""",
"""exhausted""",
"""exhaustion""",
"""exhaustive""",
"""exhibit""",
"""exhibited""",
"""exhibiting""",
"""exhibits""",
"""exist""",
"""existence""",
"""existing""",
"""exists""",
"""exit""",
"""exits""",
"""exoelectrogens""",
"""exoflagellated""",
"""exogenous""",
"""exome""",
"""exon""",
"""exonic""",
"""exophiala""",
"""exophytic""",
"""exosomal""",
"""exosome""",
"""exosome-mediated""",
"""exosomes""",
"""exotic""",
"""expand""",
"""expandable""",
"""expanded""",
"""expanding""",
"""expansion""",
"""expansion-phase""",
"""expansive""",
"""expect""",
"""expectant""",
"""expectantly""",
"""expectations""",
"""expected""",
"""expeditious""",
"""expenditure""",
"""expenditures""",
"""expenses""",
"""expensive""",
"""experience""",
"""experienced""",
"""experiences""",
"""experiencing""",
"""experientially""",
"""experiment""",
"""experimental""",
"""experimentally""",
"""experimentation""",
"""experimenters""",
"""experiments""",
"""expert""",
"""expert-based""",
"""expertise""",
"""expertos""",
"""experts""",
"""expiration""",
"""expiratory""",
"""explain""",
"""explained""",
"""explaining""",
"""explains""",
"""explanation""",
"""explanations""",
"""explanatory""",
"""explantation""",
"""explanted""",
"""explicable""",
"""explicate""",
"""explicit""",
"""explicitly""",
"""exploded""",
"""exploit""",
"""exploitation""",
"""exploited""",
"""exploiting""",
"""exploits""",
"""exploration""",
"""explorations""",
"""exploratory""",
"""explore""",
"""explored""",
"""explores""",
"""exploring""",
"""explosive""",
"""exponential""",
"""exponentially""",
"""export""",
"""exported""",
"""expose""",
"""exposed""",
"""exposes""",
"""exposing""",
"""exposure""",
"""exposure-agent""",
"""exposures""",
"""express""",
"""expressed""",
"""expresses""",
"""expressing""",
"""expression""",
"""expressional""",
"""expressions""",
"""expressivity""",
"""expressly""",
"""extant""",
"""extem""",
"""extend""",
"""extendable""",
"""extended""",
"""extending""",
"""extends""",
"""extension""",
"""extensions""",
"""extensive""",
"""extensively""",
"""extensor""",
"""extensors""",
"""extent""",
"""extents""",
"""external""",
"""externalization""",
"""externalized""",
"""externalizing""",
"""extinction""",
"""extra""",
"""extra-articular""",
"""extra-chomosomal""",
"""extra-intestinal""",
"""extracellular""",
"""extracellular-regulated""",
"""extracorporeal""",
"""extract""",
"""extracted""",
"""extracting""",
"""extraction""",
"""extraction-icp-aes""",
"""extractions""",
"""extracts""",
"""extramarital""",
"""extraordinarily""",
"""extraordinary""",
"""extraperitoneal""",
"""extrapleural""",
"""extrapolate""",
"""extraterminal""",
"""extravasation""",
"""extraversion""",
"""extravillous""",
"""extreme""",
"""extremely""",
"""extremities""",
"""extremity""",
"""extruded""",
"""extrusion""",
"""extubated""",
"""extubating""",
"""extubation""",
"""exudativa""",
"""exudative""",
"""exudativo""",
"""eye""",
"""eyebrows""",
"""eyelashes""",
"""eyelid""",
"""eyes""",
"""eyespot""",
"""eynde""",
"""ezh1""",
"""ezh2""",
"""f1-f4""",
"""f1-score""",
"""f1000""",
"""f223""",
"""fab""",
"""fabp-4""",
"""fabricated""",
"""fabrication""",
"""face""",
"""face-to-face""",
"""facebook""",
"""faced""",
"""facemask""",
"""faces""",
"""facet""",
"""facets""",
"""facial""",
"""facie""",
"""facile""",
"""facilitate""",
"""facilitated""",
"""facilitates""",
"""facilitating""",
"""facilitators""",
"""facilities""",
"""facility""",
"""facing""",
"""fact""",
"""fact-p""",
"""factor""",
"""factor-i""",
"""factor-negative""",
"""factor-related""",
"""factores""",
"""factors""",
"""facultative""",
"""faculties""",
"""faculty""",
"""fadden""",
"""faded""",
"""faecal""",
"""faecalibacterium""",
"""faeces""",
"""faecium""",
"""fail""",
"""failed""",
"""failure""",
"""failure-free""",
"""failures""",
"""fair""",
"""fait""",
"""faithfully""",
"""fake""",
"""falciparum""",
"""fall""",
"""fall-related""",
"""falling""",
"""fallopian""",
"""fallot""",
"""falls""",
"""false""",
"""false-negative""",
"""false-positive""",
"""faltering""",
"""familial""",
"""familiar""",
"""familiarity""",
"""families""",
"""family""",
"""fan""",
"""far""",
"""far-reaching""",
"""farm""",
"""farmer""",
"""farmers""",
"""farming""",
"""farms""",
"""farnesoid""",
"""fas""",
"""fascia""",
"""fascial""",
"""fasciitis""",
"""fascinated""",
"""fascinating""",
"""fasciocutaneous""",
"""fascism""",
"""fashion""",
"""fast""",
"""fast-growing""",
"""faster""",
"""fastest""",
"""fasting""",
"""fat""",
"""fatal""",
"""fatalities""",
"""fatality""",
"""fate""",
"""fates""",
"""father""",
"""fatigability""",
"""fatigue""",
"""fatness""",
"""fatty""",
"""fault""",
"""favor""",
"""favorable""",
"""favorably""",
"""favored""",
"""favoring""",
"""favorisait""",
"""favorisant""",
"""favour""",
"""favourable""",
"""favoured""",
"""fazekas""",
"""fbf""",
"""fbp1""",
"""fbs""",
"""fccp""",
"""fcg""",
"""fcm""",
"""fda""",
"""fda-approved""",
"""fe3""",
"""fear""",
"""feared""",
"""fearful""",
"""feasibility""",
"""feasible""",
"""feature""",
"""feature-based""",
"""featured""",
"""features""",
"""featuring""",
"""febrile""",
"""february""",
"""febs""",
"""fecal""",
"""fecal-oral""",
"""feces""",
"""fed""",
"""federal""",
"""federally""",
"""federation""",
"""feed""",
"""feedback""",
"""feeding""",
"""feeds""",
"""feel""",
"""feeling""",
"""fef25-75""",
"""felt""",
"""female""",
"""females""",
"""feminine""",
"""feminino""",
"""femoral""",
"""femoroplasty""",
"""femtosecond""",
"""femur""",
"""femurs""",
"""feno""",
"""fentanyl""",
"""fepsp""",
"""fermentation""",
"""ferric""",
"""ferritin""",
"""ferritin2""",
"""ferrocenyl""",
"""ferrocenyl-terminated""",
"""ferroptosis""",
"""ferroptosis-related""",
"""fertility""",
"""fertilization""",
"""ferum""",
"""fetal""",
"""fetus""",
"""fetuses""",
"""fev1""",
"""fever""",
"""few""",
"""fewer""",
"""ffls""",
"""ffp""",
"""ffpe""",
"""ffq""",
"""ffr""",
"""ffs""",
"""fgf4""",
"""fgfr""",
"""fgfr1-4""",
"""fgfr2""",
"""fgfr2mrna""",
"""fgfr3""",
"""fgfr3mrna""",
"""fiber""",
"""fiber-strengthening""",
"""fibers""",
"""fibril""",
"""fibrillar""",
"""fibrillarin""",
"""fibrillation""",
"""fibrils""",
"""fibrinogen""",
"""fibroblast""",
"""fibroblast-like""",
"""fibroblast-mediated""",
"""fibroblasts""",
"""fibrogenesis""",
"""fibroids""",
"""fibroma""",
"""fibromyalgia""",
"""fibronectin""",
"""fibrosis""",
"""fibrosis-related""",
"""fibrotic""",
"""fibrous""",
"""fidelity""",
"""fidelity-enforcing""",
"""field""",
"""field-based""",
"""field-far""",
"""field-testing""",
"""fieldnotes""",
"""fields""",
"""fieldwork""",
"""fifteen""",
"""fifth""",
"""fifty""",
"""fifty-five""",
"""fifty-four""",
"""fifty-six""",
"""fifty-three""",
"""fifty-two""",
"""fig""",
"""figaro""",
"""fight""",
"""fighting""",
"""figo""",
"""figure""",
"""figured""",
"""figures""",
"""filament""",
"""filaments""",
"""file""",
"""files""",
"""fill""",
"""filled""",
"""filling""",
"""film""",
"""films""",
"""filopodia""",
"""filter""",
"""filtered""",
"""filtering""",
"""filtration""",
"""final""",
"""finalizing""",
"""finally""",
"""financial""",
"""financially""",
"""financing""",
"""find""",
"""finding""",
"""findings""",
"""finds""",
"""fine""",
"""fine-grained""",
"""fine-needle""",
"""fine-tune""",
"""fine-tuned""",
"""finely""",
"""finger""",
"""fingernail""",
"""fingerprints""",
"""fingers""",
"""finishing""",
"""finland""",
"""finnish""",
"""fio""",
"""fire""",
"""firecracker-related""",
"""fires""",
"""firing""",
"""firm""",
"""firmly""",
"""firmness""",
"""firms""",
"""firre""",
"""first""",
"""first-born""",
"""first-generation""",
"""first-level""",
"""first-line""",
"""first-order""",
"""first-year""",
"""firstly""",
"""fis""",
"""fiscal""",
"""fischer""",
"""fish""",
"""fisher""",
"""fission""",
"""fission-yeast""",
"""fistula""",
"""fit""",
"""fit-dna""",
"""fitbit""",
"""fitc-dextran""",
"""fitness""",
"""fits""",
"""fitted""",
"""fitting""",
"""five""",
"""five-day""",
"""five-dimensional""",
"""five-week-long""",
"""five-year""",
"""fix""",
"""fixation""",
"""fixations""",
"""fixative""",
"""fixed""",
"""fixed-anchor""",
"""fixed-duration""",
"""fixed-speed""",
"""fizemos""",
"""flagella""",
"""flagellar""",
"""flagellin-only""",
"""flagellum""",
"""flank""",
"""flanked""",
"""flanking""",
"""flap""",
"""flaps""",
"""flare""",
"""flares""",
"""flash""",
"""flat""",
"""flavin-containing""",
"""flavine""",
"""flavivirus-endemic""",
"""flavonoids""",
"""flaws""",
"""flesh-colored""",
"""flexibility""",
"""flexible""",
"""flexion""",
"""flexor""",
"""flexors""",
"""flies""",
"""flim""",
"""flint""",
"""flip""",
"""flls""",
"""floods""",
"""floor""",
"""flooring""",
"""flooring-attributable""",
"""floorings""",
"""flora""",
"""floral""",
"""florence""",
"""florescence""",
"""florida""",
"""florigen""",
"""flow""",
"""flow-through""",
"""flower""",
"""flowering""",
"""flowering-time""",
"""flowrate""",
"""fls""",
"""flt3""",
"""flt3-itd""",
"""flt3-itd-driven""",
"""flt3-itd-induced""",
"""flualprazolam""",
"""fluctuate""",
"""fluctuating""",
"""fluctuation""",
"""fluctuations""",
"""fluence""",
"""fluency""",
"""fluid""",
"""fluidity""",
"""fluids""",
"""fluorescein""",
"""fluorescence""",
"""fluorescent""",
"""fluorimetry""",
"""fluorinated""",
"""fluorine""",
"""fluorodeoxyglucose""",
"""fluorophore-labelled""",
"""fluorophores""",
"""fluoroquinolone""",
"""fluorous""",
"""flutter""",
"""flux""",
"""fluxes""",
"""flw""",
"""fly""",
"""fmd""",
"""fmri""",
"""fms""",
"""fms-like""",
"""fnac""",
"""fnhct""",
"""fnhcts""",
"""foam""",
"""fobt""",
"""focal""",
"""foci""",
"""focus""",
"""focused""",
"""focuses""",
"""focusing""",
"""focussed""",
"""focusses""",
"""foetal""",
"""foetus""",
"""foi""",
"""foil""",
"""fois""",
"""fold""",
"""folders""",
"""folding""",
"""folds""",
"""foldx""",
"""foliar""",
"""follicle""",
"""follicles""",
"""follicular""",
"""follow""",
"""follow-up""",
"""followed""",
"""followed-up""",
"""followers""",
"""following""",
"""follows""",
"""fomites""",
"""fontanelle""",
"""food""",
"""food-related""",
"""foodlit-pro""",
"""foodlit-tool""",
"""foods""",
"""foot""",
"""foot-head""",
"""for""",
"""for-profit""",
"""foraging""",
"""foram""",
"""foramen""",
"""force""",
"""forced""",
"""forceful""",
"""forces""",
"""forearm""",
"""forecast""",
"""forecasting""",
"""forefront""",
"""forehead""",
"""foreign""",
"""foreign-accented""",
"""forensic""",
"""forest""",
"""foresters""",
"""forestland""",
"""forestry""",
"""forests""",
"""fork""",
"""forks""",
"""form""",
"""formal""",
"""formalin""",
"""formalin-fixed""",
"""formally""",
"""formant""",
"""formant-based""",
"""formants""",
"""format""",
"""formation""",
"""formative""",
"""formats""",
"""formed""",
"""former""",
"""formidable""",
"""forming""",
"""forms""",
"""formula""",
"""formulated""",
"""formulation""",
"""formulations""",
"""formylation""",
"""forthcoming""",
"""fortunately""",
"""forty""",
"""forty-one""",
"""forty-seven""",
"""forty-six""",
"""forty-three""",
"""forty-two""",
"""forum""",
"""forums""",
"""forward""",
"""forwarded""",
"""forwards""",
"""fossa""",
"""foster""",
"""fostered""",
"""fostering""",
"""found""",
"""foundation""",
"""foundational""",
"""founded""",
"""four""",
"""four-cell""",
"""four-dose""",
"""four-item""",
"""four-level""",
"""four-stranded""",
"""four-year""",
"""fournier""",
"""fourteen""",
"""fourth""",
"""fourth-year""",
"""fox""",
"""foxa1""",
"""foxo1""",
"""foxo3a""",
"""foxp3""",
"""fpa""",
"""fpl""",
"""fqhc""",
"""fraco""",
"""fraction""",
"""fractional""",
"""fractionation""",
"""fracture""",
"""fractured""",
"""fractures""",
"""fragile""",
"""fragment""",
"""fragmentation""",
"""fragmented""",
"""fragments""",
"""frame""",
"""framed""",
"""frames""",
"""frameshift""",
"""framework""",
"""frameworks""",
"""framing""",
"""framingham""",
"""framings""",
"""france""",
"""fraud""",
"""frc""",
"""free""",
"""free-living""",
"""freed""",
"""freely""",
"""freezers""",
"""freezing""",
"""freiburg""",
"""french""",
"""frequencies""",
"""frequency""",
"""frequency-matched""",
"""frequent""",
"""frequentist""",
"""frequently""",
"""fresh""",
"""fresh-frozen""",
"""freshmen""",
"""frey""",
"""fri""",
"""friedman""",
"""friendly""",
"""friends""",
"""from""",
"""front""",
"""frontal""",
"""frontier""",
"""frontline""",
"""frontoparietal""",
"""frontotemporal""",
"""frozen""",
"""frrs1l""",
"""fructose""",
"""fruit""",
"""fruitfully""",
"""fruiting""",
"""fruits""",
"""frustrating""",
"""fst""",
"""fsta""",
"""ft4""",
"""ftr""",
"""fuchs""",
"""fuel""",
"""fujian""",
"""fukuoka""",
"""fulfil""",
"""fulfill""",
"""fulfilled""",
"""fulfilling""",
"""full""",
"""full-adjusted""",
"""full-length""",
"""full-night""",
"""full-scale""",
"""full-siblings""",
"""full-term""",
"""full-text""",
"""full-time""",
"""fully""",
"""fully-adjusted""",
"""fully-differentiated""",
"""fulminant""",
"""fulvic""",
"""fun""",
"""function""",
"""function-induced""",
"""function-related""",
"""functional""",
"""functionality""",
"""functionalization""",
"""functionalizations""",
"""functionally""",
"""functioned""",
"""functioning""",
"""functions""",
"""fund""",
"""fundamental""",
"""funded""",
"""funding""",
"""funds""",
"""fundus""",
"""fungal""",
"""fungicide""",
"""fungus""",
"""fur""",
"""furan""",
"""furoate""",
"""further""",
"""furthermore""",
"""furthest""",
"""fus""",
"""fus-targeted""",
"""fused""",
"""fusiform""",
"""fusin""",
"""fusion""",
"""fusions""",
"""fusobacterium""",
"""future""",
"""fuzzy""",
"""fvc""",
"""fxi""",
"""fxii""",
"""g-bp""",
"""g-quadruplex""",
"""g163""",
"""g26r""",
"""g2m""",
"""gaas""",
"""gaba""",
"""gabapentin""",
"""gacs""",
"""gadd45a""",
"""gadolinium""",
"""gain""",
"""gain-""",
"""gain-frequency""",
"""gain-of-function""",
"""gained""",
"""gaining""",
"""gains""",
"""gait""",
"""gal-3""",
"""galactis""",
"""galactorrhoea""",
"""galectin-3""",
"""galidesivir""",
"""galileo""",
"""gallbladder""",
"""gamble""",
"""game""",
"""gamete""",
"""gamma""",
"""gamma-glutamyl""",
"""ganglia""",
"""ganglion""",
"""ganglionopathy""",
"""gangrene""",
"""ganja""",
"""gantry""",
"""gao""",
"""gap""",
"""gap43""",
"""gaps""",
"""garch""",
"""garlanda""",
"""garment""",
"""garments""",
"""garnered""",
"""garp""",
"""gas""",
"""gas-solid""",
"""gases""",
"""gasotransmitter""",
"""gastrectomy""",
"""gastric""",
"""gastro-esophageal""",
"""gastro-oesophageal""",
"""gastroenteritis""",
"""gastroenterologist""",
"""gastrointestinal""",
"""gastroretentive""",
"""gastroschisis""",
"""gastrula""",
"""gastrulation""",
"""gata3""",
"""gatekeepers""",
"""gather""",
"""gathered""",
"""gauge""",
"""gauged""",
"""gauze""",
"""gave""",
"""gay""",
"""gaze""",
"""gbm""",
"""gbm-organoids""",
"""gbms""",
"""gbs""",
"""gc-ecd""",
"""gc-ms""",
"""gca""",
"""gcc""",
"""gci""",
"""gck""",
"""gck-mody""",
"""gcl""",
"""gcn2""",
"""gcns""",
"""gcs""",
"""gdm""",
"""gdmt""",
"""geal""",
"""gel""",
"""gelatin""",
"""gels""",
"""gender""",
"""gender-""",
"""gender-adjusted""",
"""gender-based""",
"""gender-matched""",
"""genderless""",
"""genders""",
"""gene""",
"""gene-1193c""",
"""gene-184t""",
"""gene-based""",
"""gene-editing""",
"""gene-regulatory""",
"""gene-specific""",
"""genera""",
"""general""",
"""generalisability""",
"""generalizability""",
"""generalizable""",
"""generalized""",
"""generally""",
"""generate""",
"""generated""",
"""generates""",
"""generating""",
"""generation""",
"""generational""",
"""generations""",
"""generic""",
"""genes""",
"""genesis""",
"""genetic""",
"""genetically""",
"""geneticists""",
"""genetics""",
"""genital""",
"""genitourinary""",
"""genome""",
"""genome-edited""",
"""genome-wide""",
"""genomes""",
"""genomic""",
"""genomics""",
"""genotoxic""",
"""genotoxicity""",
"""genotype""",
"""genotype-phenotype""",
"""genotyped""",
"""genotypes""",
"""genotypic""",
"""genotyping""",
"""genu""",
"""genus""",
"""geocoded""",
"""geographic""",
"""geographical""",
"""geographically""",
"""geometric""",
"""geometries""",
"""geometry""",
"""georgia-a""",
"""geospatial""",
"""geriatric""",
"""germ""",
"""germania""",
"""germany""",
"""germinal""",
"""germline""",
"""ges""",
"""gestation""",
"""gestational""",
"""gestion""",
"""gesture""",
"""gestures""",
"""get""",
"""getting""",
"""gfi""",
"""gfp""",
"""gfp-bound""",
"""gfp-mediated""",
"""gfp-tagged""",
"""gfr""",
"""gg4""",
"""gg5""",
"""gh-cna""",
"""gh-cnas""",
"""ghd""",
"""ghd7""",
"""ghent""",
"""ghs""",
"""giant""",
"""gibbs""",
"""giganteum""",
"""gill""",
"""gills""",
"""gingiva""",
"""gingival""",
"""ginglymostoma""",
"""ginkgo""",
"""girl""",
"""girlfriends""",
"""girls""",
"""gis""",
"""githago""",
"""give""",
"""given""",
"""gives""",
"""giving""",
"""gland""",
"""glands""",
"""glans""",
"""glargine-u100""",
"""glasgow""",
"""glassy""",
"""glaucoma""",
"""glaucomatous""",
"""gleaning""",
"""gleason""",
"""glia""",
"""glial""",
"""glial-specific""",
"""glioblastoma""",
"""glioblastomas""",
"""gliogenesis""",
"""glioma""",
"""gliomas""",
"""gliptin-associated""",
"""gliptins""",
"""global""",
"""globalization""",
"""globally""",
"""globe""",
"""globes""",
"""globular""",
"""globules""",
"""globulin""",
"""glomerular""",
"""glomerulonephritis""",
"""glomerulosa""",
"""glottal""",
"""glottic""",
"""glottis""",
"""glove""",
"""glucocorticoid""",
"""glucocorticoids""",
"""glucose""",
"""glucose-lowering""",
"""glun1""",
"""glutamate""",
"""glutamatergic""",
"""glutathione""",
"""gluten""",
"""glycan""",
"""glycans""",
"""glycated""",
"""glycemia""",
"""glycine""",
"""glycocalyx""",
"""glycol""",
"""glycol-protected""",
"""glycolipophosphoproteins""",
"""glycolysis""",
"""glycolytic""",
"""glycoprotein""",
"""glycosylated""",
"""glycosylation""",
"""glycosyltransferase""",
"""gmbh""",
"""gmc""",
"""gmp""",
"""gnas""",
"""gnomad""",
"""gnrh""",
"""gnrha""",
"""goal""",
"""goal-setting""",
"""goals""",
"""goals-of-care""",
"""goat""",
"""god""",
"""gof""",
"""going""",
"""gojjam""",
"""gold""",
"""goldmann""",
"""golestan""",
"""golgi""",
"""gonadotropins""",
"""gone""",
"""gong""",
"""gonorrhoea""",
"""gonzalvez""",
"""good""",
"""google""",
"""gose""",
"""gose2""",
"""gose3""",
"""gose8""",
"""got""",
"""govern""",
"""governance""",
"""governing""",
"""government""",
"""governmental""",
"""governments""",
"""gpcrs""",
"""gps""",
"""gpus""",
"""gpx4""",
"""gqyt""",
"""gracilicutes""",
"""grade""",
"""graded""",
"""grader""",
"""graders""",
"""grades""",
"""gradient""",
"""gradients""",
"""grading""",
"""gradual""",
"""gradually""",
"""graduate""",
"""graft""",
"""graft-versus-host""",
"""grafting""",
"""grafts""",
"""grain""",
"""gram-negative""",
"""grammar""",
"""grande""",
"""granular""",
"""granulate""",
"""granule""",
"""granulocytes""",
"""granulocytic""",
"""granulomatous""",
"""granulosa""",
"""graph""",
"""graph-theoretical""",
"""graphene""",
"""graphic""",
"""graphical""",
"""graphite""",
"""gras""",
"""grasp""",
"""grasping""",
"""grassland""",
"""grasslands""",
"""grassroots""",
"""graves""",
"""gravid""",
"""gravida""",
"""gravis""",
"""gravitational""",
"""gravity""",
"""gray""",
"""grazing""",
"""grbas""",
"""grbash""",
"""great""",
"""greater""",
"""greatest""",
"""greatly""",
"""greco-roman""",
"""greek""",
"""green""",
"""green-reflectance""",
"""greenstick""",
"""gregg""",
"""gregoryi""",
"""grew""",
"""grey""",
"""griess""",
"""grimage""",
"""grimeaa""",
"""grip""",
"""grison""",
"""grocott""",
"""groin""",
"""groinss-v""",
"""groinss-v-ii""",
"""groningen""",
"""groomer""",
"""groomers""",
"""grooming""",
"""groove""",
"""gross""",
"""grossly""",
"""ground""",
"""grounded""",
"""group""",
"""group-""",
"""group-based""",
"""group-ica""",
"""grouped""",
"""grouping""",
"""groups""",
"""growing""",
"""grown""",
"""growth""",
"""grupo""",
"""grupos""",
"""gsc""",
"""gscs""",
"""gse14520""",
"""gsea""",
"""gsh""",
"""gshs""",
"""gsl""",
"""gsr""",
"""gt""",
"""guadalupe""",
"""guaiac""",
"""guanine""",
"""guanosine""",
"""guarantee""",
"""guardian""",
"""guardians""",
"""guatemala""",
"""guatemalan""",
"""guessed""",
"""guidance""",
"""guide""",
"""guided""",
"""guideline""",
"""guideline-directed""",
"""guideline-specific""",
"""guidelines""",
"""guides""",
"""guiding""",
"""guild""",
"""guillain""",
"""guinea""",
"""gulf""",
"""guo""",
"""gustatory""",
"""gut""",
"""gut-liver""",
"""gvhd""",
"""gvhd-free""",
"""gw-related""",
"""gwas""",
"""gwass""",
"""gwi""",
"""gymnophiona""",
"""gynaecological""",
"""gynaecology""",
"""gynecologic""",
"""gynecological""",
"""gynecologists""",
"""gynecology""",
"""gyra""",
"""gyrus""",
"""h2b""",
"""h2o""",
"""h2o2""",
"""h2s""",
"""h2s-generating""",
"""h3k18ac""",
"""h3k56ac""",
"""h3k79""",
"""h9c2""",
"""habit""",
"""habitable""",
"""habitat""",
"""habitats""",
"""habits""",
"""habitually""",
"""habnc""",
"""hacettepe""",
"""had""",
"""haematological""",
"""haematopoietic-cell""",
"""haematoxylin""",
"""haemefs""",
"""haemoglobin""",
"""haemorrhage""",
"""hai""",
"""haifeng""",
"""haiming""",
"""hair""",
"""hairpin""",
"""hairs""",
"""hairy""",
"""haiwang""",
"""halarachne""",
"""halarachnid""",
"""halarachnidae""",
"""half""",
"""half-century""",
"""half-siblings""",
"""halichoeri""",
"""halides""",
"""hallmark""",
"""halls""",
"""hallucinogen""",
"""hallucinogens""",
"""halophilic""",
"""halos""",
"""halt""",
"""halted""",
"""halved""",
"""ham-d17""",
"""ham-d6""",
"""hamartoma""",
"""hamburger""",
"""hamilton""",
"""hamper""",
"""hampers""",
"""han""",
"""hand""",
"""hand-sewn""",
"""hand-tool""",
"""handbook""",
"""handful""",
"""handheld""",
"""handicap""",
"""handle""",
"""handles""",
"""handling""",
"""handrail""",
"""hands""",
"""handsearched""",
"""handsearches""",
"""handsearching""",
"""hang""",
"""hanna-attisha""",
"""hannah-attisha""",
"""haochuan""",
"""haploidentical""",
"""haploidisation-type""",
"""haplotype""",
"""haplotypic""",
"""happened""",
"""happens""",
"""happy""",
"""harbor""",
"""harbored""",
"""harbors""",
"""hard""",
"""hardest""",
"""hardness""",
"""hardware""",
"""hare""",
"""harm""",
"""harm-benefit""",
"""harmful""",
"""harming""",
"""harmless""",
"""harmonic""",
"""harmonics""",
"""harms""",
"""harness""",
"""harnessed""",
"""harris""",
"""harris-benedict""",
"""harsh""",
"""harvest""",
"""harvested""",
"""harvesting""",
"""harvey""",
"""has""",
"""hash""",
"""hashimoto""",
"""hasls""",
"""hassles""",
"""hasson""",
"""hasta""",
"""hastening""",
"""hastens""",
"""hatching""",
"""have""",
"""having""",
"""hawaii""",
"""hazard""",
"""hazardous""",
"""hazards""",
"""haze""",
"""hb-4""",
"""hba1c""",
"""hbeag""",
"""hbeag-positive""",
"""hbi""",
"""hbp""",
"""hbsag""",
"""hbt""",
"""hbv""",
"""hcc""",
"""hcc-related""",
"""hccs""",
"""hcid""",
"""hck""",
"""hcl""",
"""hcov""",
"""hcovs-severe""",
"""hcps""",
"""hcs""",
"""hct""",
"""hcws""",
"""hd1""",
"""hd3a""",
"""hda6""",
"""hdac3""",
"""hdl""",
"""hdl-c""",
"""hdp""",
"""hdr""",
"""hds""",
"""hds-induced""",
"""hdss""",
"""head""",
"""head-on""",
"""head-to-head""",
"""headache""",
"""headaches""",
"""healed""",
"""healing""",
"""health""",
"""health-""",
"""health-an""",
"""health-related""",
"""healthcare""",
"""healthspan""",
"""healthy""",
"""hearing""",
"""hearing-impaired""",
"""hearing-impairment""",
"""heart""",
"""heat""",
"""heated""",
"""heatshock""",
"""heavily""",
"""heavy""",
"""heavy-cac""",
"""heavy-load""",
"""hect""",
"""hedgehog""",
"""heidelberg""",
"""height""",
"""heightened""",
"""heights""",
"""hela""",
"""held""",
"""helically""",
"""helicase""",
"""helicobacter""",
"""helix-turn-helix""",
"""help""",
"""help-seeking""",
"""helped""",
"""helper""",
"""helpful""",
"""helping""",
"""helplessness""",
"""helps""",
"""hema-topoietic""",
"""hemagglutinin""",
"""hemato-oncologic""",
"""hematocrit""",
"""hematologic""",
"""hematological""",
"""hematoma""",
"""hematomas""",
"""hematopoietic""",
"""hematoxylin""",
"""hemi-contusion""",
"""hemidesmosomal""",
"""hemispheres""",
"""hemispherical""",
"""hemodialysis""",
"""hemodynamic""",
"""hemodynamic-malformation""",
"""hemodynamics""",
"""hemoglobin""",
"""hemoglobinopathies""",
"""hemoglobinopathy""",
"""hemolymph""",
"""hemolysis""",
"""hemolytic""",
"""hemoperitoneum""",
"""hemophilia""",
"""hemoptysis""",
"""hemorrhage""",
"""hemorrhages""",
"""hemorrhagic""",
"""hemostasis""",
"""hemostatic""",
"""hence""",
"""hep3b""",
"""heparin""",
"""heparin-free""",
"""hepatectomy""",
"""hepatic""",
"""hepatitic""",
"""hepatitis""",
"""hepato-biliary""",
"""hepatobiliary-pancreatic""",
"""hepatocellular""",
"""hepatocyte-specific""",
"""hepatocytes""",
"""hepatological""",
"""hepatologist""",
"""hepatologists""",
"""hepatopancreaticobiliary""",
"""hepatorenal""",
"""hepatotoxicity""",
"""hepg2""",
"""her""",
"""her2""",
"""herald""",
"""heralding""",
"""herbal""",
"""herbicides""",
"""herbivory""",
"""herbs""",
"""herd""",
"""here""",
"""hereditary""",
"""herein""",
"""heritability""",
"""heritable""",
"""hernia""",
"""hernias""",
"""herniated""",
"""herniation""",
"""heroin""",
"""herpes""",
"""herpesvirus""",
"""herpetiformis""",
"""hesitancy""",
"""hesitant""",
"""heteroatomic""",
"""heterochromatin""",
"""heteroduplex""",
"""heterogeneity""",
"""heterogeneous""",
"""heterogenous""",
"""heterologous""",
"""heterophilic""",
"""heterosexual""",
"""heterotrimeric""",
"""heterozygosis""",
"""heterozygous""",
"""heuristic""",
"""hexacyanide""",
"""hfa""",
"""hfcd""",
"""hfcwo""",
"""hfd""",
"""hfls""",
"""hfpef""",
"""hfref""",
"""hfte""",
"""hgd""",
"""hgsc""",
"""hh29""",
"""hhas""",
"""hhd""",
"""hhpred""",
"""hhqrp""",
"""hht""",
"""hhv8""",
"""hidden""",
"""hidradenitis""",
"""hidroacanthoma""",
"""hids""",
"""hie""",
"""hierarchical""",
"""hierarchies""",
"""hierarchy""",
"""high""",
"""high-acuity""",
"""high-certainty""",
"""high-consequence""",
"""high-content""",
"""high-copy-number""",
"""high-deductible""",
"""high-density""",
"""high-detecting""",
"""high-dose""",
"""high-energy""",
"""high-entropy""",
"""high-familial""",
"""high-fat""",
"""high-frequency""",
"""high-grade""",
"""high-gradient""",
"""high-income""",
"""high-intensity""",
"""high-level""",
"""high-molecular-weight""",
"""high-oxidation""",
"""high-performance""",
"""high-profile""",
"""high-quality""",
"""high-resolution""",
"""high-risk""",
"""high-spatial-resolution""",
"""high-speed""",
"""high-strength""",
"""high-sucrose""",
"""high-temperature""",
"""high-throughput""",
"""high-titer""",
"""high-value""",
"""high-volume""",
"""higher""",
"""higher-dose""",
"""higher-income""",
"""higher-powered""",
"""higher-quality""",
"""higher-than-expected""",
"""highest""",
"""highest-quality""",
"""highest-ranking""",
"""highlight""",
"""highlighted""",
"""highlighting""",
"""highlights""",
"""highly""",
"""highway""",
"""hilar""",
"""hill-climbing""",
"""hind""",
"""hinder""",
"""hindered""",
"""hindustani""",
"""hinges""",
"""hip""",
"""hipaa""",
"""hipaa-compliant""",
"""hipaa-noncompliant""",
"""hipec""",
"""hipertensos""",
"""hippa""",
"""hippocampal""",
"""hippocampus""",
"""hips""",
"""hirsutism""",
"""his""",
"""hispanic""",
"""histiocytoma""",
"""histocompatibility""",
"""histologic""",
"""histological""",
"""histologically""",
"""histology""",
"""histomorphometric""",
"""histone""",
"""histone-involved""",
"""histones""",
"""histopathologic""",
"""histopathological""",
"""histopathologically""",
"""histopathology""",
"""histoplasmosis""",
"""historic""",
"""historical""",
"""historically""",
"""histories""",
"""history""",
"""histotype""",
"""hit""",
"""hitch""",
"""hits""",
"""hiv""",
"""hiv-1""",
"""hiv-related""",
"""hivd""",
"""hla""",
"""hla-b27""",
"""hla-dq2""",
"""hla-dr-presented""",
"""hla-drb1""",
"""hla-matched""",
"""hld""",
"""hld-reprocessed""",
"""hli""",
"""hllw""",
"""hmg-coa""",
"""hmga2""",
"""hmmer""",
"""hmnc""",
"""hmt""",
"""hnc""",
"""hncs""",
"""hno3""",
"""hnscc""",
"""hoc""",
"""hodgkin""",
"""hoffmann-la""",
"""hold""",
"""holds""",
"""holes""",
"""holidays""",
"""holoenzyme""",
"""hologenome""",
"""home""",
"""home-based""",
"""home-prepared""",
"""home-to-home""",
"""homelessness""",
"""homeobox""",
"""homeostasis""",
"""homeostatic""",
"""homes""",
"""homicide""",
"""homme""",
"""homo-tetramer""",
"""homochirality""",
"""homogenates""",
"""homogeneity""",
"""homogeneous""",
"""homogenise""",
"""homogenous""",
"""homolog""",
"""homologous""",
"""homologs""",
"""homologue""",
"""homology""",
"""homology-based""",
"""homopolymers""",
"""homosexual""",
"""homozygous""",
"""hone""",
"""honey""",
"""hongliang""",
"""hope""",
"""hopg""",
"""hoping""",
"""hopkins""",
"""horizon""",
"""horizontal""",
"""horizontally""",
"""hormonal""",
"""hormone""",
"""hormone-binding""",
"""hormone-producing""",
"""hormone-sensitive""",
"""hormones""",
"""horse""",
"""hosmer-lemeshow""",
"""hospitais""",
"""hospital""",
"""hospital-based""",
"""hospital-level""",
"""hospitalar""",
"""hospitalares""",
"""hospitalisation""",
"""hospitalisations""",
"""hospitalised""",
"""hospitalization""",
"""hospitalizations""",
"""hospitalized""",
"""hospitals""",
"""host""",
"""host-associated""",
"""host-encoded""",
"""host-mite""",
"""hostile""",
"""hosts""",
"""hot""",
"""hot-spot""",
"""hotline""",
"""hotlines""",
"""hotspot""",
"""hotspots""",
"""hours""",
"""house""",
"""house-dust-mite-specific""",
"""housebound""",
"""household""",
"""households""",
"""houses""",
"""housing""",
"""how""",
"""however""",
"""hox""",
"""hoxa10""",
"""hoxa9""",
"""hpa""",
"""hpde6-c7""",
"""hpg""",
"""hplc""",
"""hplc-uv""",
"""hpmc""",
"""hpv""",
"""hpv-associated""",
"""hpv-induced""",
"""hpv-negative""",
"""hpv-positive""",
"""hpvod""",
"""hr-mediated""",
"""hras""",
"""hrm""",
"""hrp""",
"""hrql""",
"""hrs""",
"""hsa-mir-361-3p""",
"""hsa-mir-3907""",
"""hsa-mir-4257""",
"""hsa-mir-4788""",
"""hsa-mir-5001-5p""",
"""hsa-mir-501-5p""",
"""hsa-mir-6732-3p""",
"""hsa-mir-767-5p""",
"""hsc""",
"""hscl-25""",
"""hscs""",
"""hsct""",
"""hsct-related""",
"""hsp70""",
"""hspc""",
"""hspcs""",
"""hst""",
"""hte""",
"""hth""",
"""htn""",
"""hts""",
"""htt""",
"""https""",
"""hub""",
"""hubei""",
"""hudson""",
"""huge""",
"""hum00169949""",
"""humain""",
"""humaines""",
"""human""",
"""human-associated""",
"""human-computer""",
"""human-gastrointestinal""",
"""human-made""",
"""human-mobilized""",
"""human-specific""",
"""humanities""",
"""humans""",
"""humby""",
"""humic""",
"""humidified""",
"""humidity""",
"""humoral""",
"""humors""",
"""humphrey""",
"""humsafar""",
"""hundred""",
"""hundreds""",
"""hungary""",
"""hunger""",
"""hunter""",
"""hunting""",
"""huntington""",
"""hurdles""",
"""hush""",
"""huvec""",
"""hyalinizing""",
"""hyaluronic""",
"""hybrid""",
"""hybridization""",
"""hybrids""",
"""hydrating""",
"""hydration""",
"""hydraulic""",
"""hydrocephalus""",
"""hydrochloride""",
"""hydrofit""",
"""hydrogels""",
"""hydrogen""",
"""hydromorphone""",
"""hydronephrosis""",
"""hydrophilic""",
"""hydrophobic""",
"""hydroxocobalamin""",
"""hydroxyl""",
"""hydroxylated""",
"""hydroxymethyl""",
"""hydroxypropyl""",
"""hygiene""",
"""hygienic""",
"""hyou1""",
"""hyper-dmgs""",
"""hyper-low-density""",
"""hyper-oxygenation""",
"""hyperactivity""",
"""hyperaemia""",
"""hyperaggregable""",
"""hyperalgesia""",
"""hyperandrogenism""",
"""hypercalcaemia""",
"""hypercalciuria""",
"""hypercellular""",
"""hypercontractile""",
"""hyperdivergent""",
"""hyperemia-dependent""",
"""hyperexcitability""",
"""hyperglycemia""",
"""hypergonadotropic""",
"""hyperhidrosis""",
"""hyperinfectivity""",
"""hyperinflammatory""",
"""hyperintensities""",
"""hyperkalaemia""",
"""hyperkalemia""",
"""hyperlipidemia""",
"""hypermetabolic""",
"""hypermetabolism""",
"""hypermethylated""",
"""hypermethylation""",
"""hyperpermeability""",
"""hyperphosphataemia""",
"""hyperphosphorylated""",
"""hyperphosphorylation""",
"""hyperpigmented""",
"""hyperplasia""",
"""hyperplastic""",
"""hyperpolarized""",
"""hyperreflective""",
"""hypersensitivity""",
"""hypersomnia""",
"""hypersonic""",
"""hypertension""",
"""hypertension-related""",
"""hypertensive""",
"""hyperthermic""",
"""hyperthyroid""",
"""hyperthyroidism""",
"""hyperthyroxinemia""",
"""hypertrophic""",
"""hypertrophy""",
"""hypervascularized""",
"""hypervelocity""",
"""hyphae""",
"""hypo-""",
"""hypo-dmgs""",
"""hypo-hormonal""",
"""hypoalbuminemia""",
"""hypoallergenic""",
"""hypocalcemia""",
"""hypocretin""",
"""hypoesthesia""",
"""hypogammaglobulinemia""",
"""hypogastric""",
"""hypoglycemia""",
"""hypogonadism""",
"""hypogonadotrophic""",
"""hypoinflammatory""",
"""hypometabolic""",
"""hypometabolism""",
"""hypomethylated""",
"""hypomethylating""",
"""hypomethylation""",
"""hypomyelination""",
"""hyponatremia""",
"""hypoparathyroid""",
"""hypoparathyroidism""",
"""hypoperfusion""",
"""hypopharynx""",
"""hypophysis""",
"""hypopnea""",
"""hyporesponsive""",
"""hypospadias""",
"""hypotension""",
"""hypothalamic-pituitary""",
"""hypothalamic-pituitary-gonadal""",
"""hypothalamus-pituitary-adrenal""",
"""hypothermia""",
"""hypothermic""",
"""hypotheses""",
"""hypothesis""",
"""hypothesise""",
"""hypothesize""",
"""hypothesized""",
"""hypothetical""",
"""hypotonia""",
"""hypotony""",
"""hypoxemia""",
"""hypoxia""",
"""hypoxia-triggered""",
"""hypoxic""",
"""hysterectomy""",
"""hysteroscopic""",
"""hysteroscopy""",
"""i-6""",
"""i-bp""",
"""i-dna""",
"""i-f3""",
"""i-ii""",
"""i-ionm""",
"""i-motif""",
"""i-square""",
"""iai""",
"""iamcsst""",
"""iatrogenic""",
"""ibd""",
"""ibm""",
"""ibrutinib""",
"""ibrutinib-""",
"""ibrutinib-resistant""",
"""ibs""",
"""ibuprofen""",
"""ic95""",
"""ica""",
"""icans""",
"""icc""",
"""icd""",
"""icd-10""",
"""icd-10-cm""",
"""icd-9""",
"""icd-9-cm""",
"""ice""",
"""ich""",
"""ici""",
"""icis""",
"""icosapent""",
"""ics""",
"""icsi""",
"""ictrp""",
"""icu""",
"""icus""",
"""idade""",
"""idc""",
"""idcr""",
"""idd""",
"""idea""",
"""ideal""",
"""idealized""",
"""ideally""",
"""ideas""",
"""idecabtagene""",
"""identical""",
"""identifiable""",
"""identification""",
"""identified""",
"""identifier""",
"""identifiers""",
"""identifies""",
"""identify""",
"""identifying""",
"""identity""",
"""identity-based""",
"""identity-discrimination""",
"""idfs""",
"""idh1""",
"""idh1-wild-type""",
"""idioma""",
"""idiopathic""",
"""idiosyncratic""",
"""ido1""",
"""ids""",
"""idx-184""",
"""ifcc""",
"""ifl""",
"""ifn""",
"""ifn-stimulated""",
"""iga""",
"""ige""",
"""igf""",
"""igf-1""",
"""igf-i""",
"""igg""",
"""igh""",
"""igk""",
"""igm""",
"""ignoring""",
"""ih-dependent""",
"""ih10""",
"""ihc""",
"""ihd""",
"""ii-iv""",
"""iii""",
"""iii-iv""",
"""iii-v""",
"""ikdc""",
"""ikt""",
"""il-10""",
"""il-12p40""",
"""il-15""",
"""il-17""",
"""il-17-mediated""",
"""il-17a""",
"""il-17i""",
"""il-17r""",
"""il-17ra""",
"""il-17rc""",
"""il-18""",
"""il-18-driven""",
"""il-18-treated""",
"""il-2-inducible""",
"""il-23i""",
"""il-6""",
"""il-6r""",
"""il-9""",
"""ileal""",
"""iliac""",
"""iliaca""",
"""ilibrary""",
"""ill""",
"""ill-defined""",
"""illegal""",
"""illicit""",
"""illinois""",
"""illness""",
"""illnesses""",
"""illuminated""",
"""illuminates""",
"""illuminating""",
"""illumination""",
"""illustrate""",
"""illustrated""",
"""illustrates""",
"""illustrating""",
"""illustrations""",
"""ils""",
"""im20""",
"""im60""",
"""ima""",
"""ima-p""",
"""ima-r""",
"""image""",
"""image-based""",
"""imaged""",
"""imagej""",
"""images""",
"""imagined""",
"""imaging""",
"""imagining""",
"""imbalance""",
"""imbalances""",
"""imc""",
"""imiquimod""",
"""imitated""",
"""imitating""",
"""imitation""",
"""immature""",
"""immediate""",
"""immediately""",
"""immigrants""",
"""imminent""",
"""immobility""",
"""immobilization""",
"""immune""",
"""immune-based""",
"""immune-inflammation""",
"""immune-mediated""",
"""immune-related""",
"""immune-system-related""",
"""immunisation""",
"""immunity""",
"""immunization""",
"""immunized""",
"""immuno-oncology""",
"""immunoabsorption""",
"""immunoassay""",
"""immunoassays""",
"""immunobiology""",
"""immunoblotting""",
"""immunochemical""",
"""immunocompetence""",
"""immunocompetent""",
"""immunocytochemical""",
"""immunodeficiency""",
"""immunodeficient""",
"""immunodepletion""",
"""immunofluorescence""",
"""immunogenic""",
"""immunogenicity""",
"""immunogenomic""",
"""immunoglobulin""",
"""immunoglobulin-like""",
"""immunohistochemical""",
"""immunohistochemistry""",
"""immunoinformatic""",
"""immunologic""",
"""immunological""",
"""immunologists""",
"""immunomodulation""",
"""immunomodulatory""",
"""immunopathological""",
"""immunopathology""",
"""immunoperoxidase""",
"""immunophenotyping""",
"""immunoprecipitation""",
"""immunoprecipitation-sequencing""",
"""immunoreactive""",
"""immunoregulatory""",
"""immunosorbent""",
"""immunostaining""",
"""immunostimulatory""",
"""immunosuppressant""",
"""immunosuppressants""",
"""immunosuppressed""",
"""immunosuppression""",
"""immunosuppressive""",
"""immunotherapies""",
"""immunotherapy""",
"""immunotherapy-treated""",
"""immunotolerant""",
"""impact""",
"""impacted""",
"""impactful""",
"""impacting""",
"""impacts""",
"""impair""",
"""impaired""",
"""impairing""",
"""impairment""",
"""impairments""",
"""impairs""",
"""impart""",
"""impassable""",
"""impediments""",
"""impeding""",
"""impending""",
"""imperative""",
"""imperfect""",
"""imperiled""",
"""implant""",
"""implantable""",
"""implantation""",
"""implanted""",
"""implants""",
"""implement""",
"""implementation""",
"""implemented""",
"""implementers""",
"""implementing""",
"""implicate""",
"""implicated""",
"""implicating""",
"""implication""",
"""implications""",
"""implicit""",
"""implied""",
"""implies""",
"""imply""",
"""implying""",
"""import""",
"""import2""",
"""importance""",
"""important""",
"""importantly""",
"""imported""",
"""importing""",
"""impose""",
"""imposed""",
"""imposing""",
"""imprecision""",
"""impression""",
"""impression-improvement""",
"""imprint""",
"""imprinted""",
"""imprints""",
"""improper""",
"""improve""",
"""improved""",
"""improvement""",
"""improvements""",
"""improves""",
"""improving""",
"""impthe""",
"""impulse""",
"""impulsivity""",
"""impurity""",
"""imputation""",
"""impute""",
"""imsi""",
"""in""",
"""in-bag""",
"""in-center""",
"""in-depth""",
"""in-frame""",
"""in-hospital""",
"""in-house""",
"""in-office""",
"""in-person""",
"""in-phase""",
"""in-process""",
"""in-silico""",
"""in-situ""",
"""in-vitro""",
"""in-vivo""",
"""inability""",
"""inaccessible""",
"""inaccurate""",
"""inactivated""",
"""inactivating""",
"""inactivation""",
"""inactive""",
"""inactivity""",
"""inadequacies""",
"""inadequate""",
"""inadequately""",
"""inadvertently""",
"""inappropriate""",
"""inappropriately""",
"""inbred""",
"""inc""",
"""inca""",
"""incarcerated""",
"""incentive""",
"""incentives""",
"""inception""",
"""inchoate""",
"""incidence""",
"""incidences""",
"""incident""",
"""incidental""",
"""incidentally""",
"""incidentaloma""",
"""incidents""",
"""incision""",
"""incisional""",
"""incisions""",
"""incisor""",
"""incisors""",
"""inclination""",
"""inclination-dependent""",
"""inclinometers""",
"""include""",
"""included""",
"""includes""",
"""including""",
"""incluiu""",
"""inclusion""",
"""inclusive""",
"""income""",
"""incoming""",
"""incompatibilities""",
"""incompatibility""",
"""incompatible""",
"""incomplete""",
"""incompletely""",
"""inconclusive""",
"""inconsistencies""",
"""inconsistency""",
"""inconsistent""",
"""incontinence""",
"""incorporate""",
"""incorporated""",
"""incorporates""",
"""incorporating""",
"""incorporation""",
"""incorrect""",
"""incorrectly""",
"""increase""",
"""increased""",
"""increases""",
"""increasing""",
"""increasingly""",
"""increment""",
"""incremental""",
"""incubate""",
"""incubated""",
"""incubating""",
"""incubation""",
"""incubator""",
"""incubators""",
"""incurable""",
"""indeed""",
"""indefinite""",
"""indel""",
"""indels""",
"""independent""",
"""independent-signal""",
"""independentes""",
"""independently""",
"""index""",
"""index-""",
"""indexed""",
"""indexical""",
"""indexing""",
"""india""",
"""indian""",
"""indiana""",
"""indica""",
"""indicate""",
"""indicated""",
"""indicates""",
"""indicating""",
"""indication""",
"""indications""",
"""indicative""",
"""indicator""",
"""indicators""",
"""indices""",
"""indigenous""",
"""indirect""",
"""indirectly""",
"""indirectness""",
"""indispensable""",
"""indisputable""",
"""indistinguishable""",
"""individual""",
"""individual-""",
"""individual-level""",
"""individualised""",
"""individualized""",
"""individually""",
"""individuals""",
"""indocyanine""",
"""indole-3-acetic""",
"""indoleamine""",
"""indolent""",
"""indomethacin""",
"""indonesia""",
"""indonesian""",
"""indoor""",
"""induce""",
"""induced""",
"""inducers""",
"""induces""",
"""inducible""",
"""inducibly""",
"""inducing""",
"""induction""",
"""inductions""",
"""inductive""",
"""inductively""",
"""indurated""",
"""industrial""",
"""industrial-scale""",
"""industrialized""",
"""industrially""",
"""industries""",
"""industry""",
"""industry-based""",
"""ineffective""",
"""ineffectiveness""",
"""ineligibility""",
"""inequalities""",
"""inequities""",
"""inert""",
"""inertia""",
"""inertial""",
"""inexpensive""",
"""inexperienced""",
"""inf""",
"""infancy""",
"""infant""",
"""infantile""",
"""infants""",
"""infarct""",
"""infarcted""",
"""infarction""",
"""infarctions""",
"""infarto""",
"""infected""",
"""infecting""",
"""infection""",
"""infection-induced""",
"""infectionacross""",
"""infectionis""",
"""infections""",
"""infectionvary""",
"""infectious""",
"""infective""",
"""infectivity""",
"""infer""",
"""inference""",
"""inferences""",
"""inferential""",
"""inferior""",
"""inferotemporal""",
"""inferring""",
"""infertility""",
"""infestation""",
"""infestations""",
"""infested""",
"""infesting""",
"""infiltrate""",
"""infiltrated""",
"""infiltrates""",
"""infiltrating""",
"""infiltration""",
"""infiltrative""",
"""infinite""",
"""infinium""",
"""inflamed""",
"""inflammation""",
"""inflammatory""",
"""inflatable""",
"""inflating""",
"""inflation""",
"""inflation-adjusted""",
"""inflection""",
"""inflicting""",
"""infliximab""",
"""inflorescences""",
"""influence""",
"""influenced""",
"""influences""",
"""influencing""",
"""influential""",
"""influenza""",
"""info""",
"""inform""",
"""informally""",
"""informants""",
"""information""",
"""informational""",
"""informative""",
"""informativos""",
"""informed""",
"""informing""",
"""informs""",
"""infrapontine-suprasacral""",
"""infrared""",
"""infrastructure""",
"""infrequently""",
"""infringe""",
"""infundibula""",
"""infundibulocystic""",
"""infusion""",
"""infusions""",
"""ingenol""",
"""ingenuity""",
"""ingestion""",
"""ingredient""",
"""ingredients""",
"""ingresos""",
"""inguinofemoral""",
"""inhalation""",
"""inhalational""",
"""inhaled""",
"""inhaler""",
"""inhd""",
"""inherent""",
"""inherently""",
"""inheritable""",
"""inheritance""",
"""inherited""",
"""inhibait""",
"""inhibant""",
"""inhibit""",
"""inhibited""",
"""inhibiting""",
"""inhibition""",
"""inhibitor""",
"""inhibitor-associated""",
"""inhibitors""",
"""inhibitory""",
"""inhibits""",
"""inicialmente""",
"""initial""",
"""initially""",
"""initiate""",
"""initiated""",
"""initiating""",
"""initiation""",
"""initiative""",
"""initiatives""",
"""initiator""",
"""initio""",
"""inject""",
"""injectable""",
"""injected""",
"""injecting""",
"""injection""",
"""injections""",
"""injured""",
"""injuries""",
"""injury""",
"""injury-""",
"""injury-related""",
"""injustice""",
"""inland""",
"""inlet""",
"""innate""",
"""inner""",
"""innervation""",
"""innovation""",
"""innovations""",
"""innovative""",
"""inorganic""",
"""inotropes""",
"""inpatient""",
"""inpatients""",
"""input""",
"""inputs""",
"""inquires""",
"""inquiring""",
"""inr""",
"""inrud""",
"""inscriptions""",
"""insecticide""",
"""insecticides""",
"""insecurity""",
"""insensitive""",
"""insert""",
"""insertase""",
"""insertion""",
"""insertions""",
"""inside""",
"""insight""",
"""insights""",
"""insignificant""",
"""insoluble""",
"""insomnia""",
"""inspected""",
"""inspection""",
"""inspiration""",
"""inspire""",
"""inspired""",
"""inspiring""",
"""instability""",
"""instagram""",
"""installations""",
"""instance""",
"""instances""",
"""instantaneous""",
"""instead""",
"""instigate""",
"""institute""",
"""instituted""",
"""institutes""",
"""institution""",
"""institutional""",
"""institutions""",
"""instruct""",
"""instructed""",
"""instruction""",
"""instructions""",
"""instructive""",
"""instrument""",
"""instrumental""",
"""instrumentation""",
"""instruments""",
"""insufficiency""",
"""insufficient""",
"""insufficiently""",
"""insula""",
"""insulin""",
"""insulin-like""",
"""insulin-secreting""",
"""insulinoma""",
"""insulinomas""",
"""insulins""",
"""insult""",
"""insurance""",
"""insured""",
"""insurer""",
"""insurers""",
"""intact""",
"""intake""",
"""intakes""",
"""integral""",
"""integrate""",
"""integrated""",
"""integrates""",
"""integrating""",
"""integration""",
"""integrative""",
"""integrity""",
"""intellectual""",
"""intelligence""",
"""intelligent""",
"""intelligibility""",
"""intelligible""",
"""intend""",
"""intended""",
"""intends""",
"""intense""",
"""intensification""",
"""intensified""",
"""intensities""",
"""intensity""",
"""intensity-modulated""",
"""intensive""",
"""intensive-care""",
"""intensively""",
"""intent""",
"""intent-to-treat""",
"""intention""",
"""intention-to-treat""",
"""intentional""",
"""intentionally""",
"""intentions""",
"""inter-domain""",
"""inter-homolog""",
"""inter-operator""",
"""inter-phylum""",
"""inter-quartile""",
"""inter-rater""",
"""inter-reader""",
"""interact""",
"""interacted""",
"""interacting""",
"""interaction""",
"""interactionist""",
"""interactions""",
"""interactive""",
"""interactomes""",
"""interacts""",
"""interbody""",
"""intercalated""",
"""interception""",
"""intercepts""",
"""interconnected""",
"""interconnection""",
"""intercourse""",
"""interdisciplinary""",
"""interest""",
"""interested""",
"""interesting""",
"""interestingly""",
"""interests""",
"""interface""",
"""interfaces""",
"""interfere""",
"""interference""",
"""interfering""",
"""interferon""",
"""interfollicular""",
"""intergenerational""",
"""intergovernmental""",
"""interictal""",
"""interim""",
"""interindividual""",
"""interior""",
"""interleukin""",
"""interleukin-1-associated""",
"""interleukin-15""",
"""interleukin-2""",
"""interleukin-6""",
"""intermediate""",
"""intermediate-""",
"""intermediate-acting""",
"""intermediate-risk""",
"""intermediates""",
"""intermedius""",
"""intermittent""",
"""intermolecular""",
"""internados""",
"""internal""",
"""internalization""",
"""internalize""",
"""internalized""",
"""internalizing""",
"""internally-generated""",
"""internation""",
"""international""",
"""internationally""",
"""internet""",
"""interneurons""",
"""interobserver""",
"""interpersonal""",
"""interplay""",
"""interposition""",
"""interpretation""",
"""interpreted""",
"""interpreting""",
"""interprofessional""",
"""interquartile""",
"""interrater""",
"""interrelated""",
"""interrelationships""",
"""interrogate""",
"""interrogating""",
"""interrogation""",
"""interrupted""",
"""interrupts""",
"""interscalene""",
"""interscapular""",
"""intersect""",
"""intersection""",
"""intersex""",
"""interspecies""",
"""interstitial""",
"""intersubject""",
"""intersubspecific""",
"""intertwined""",
"""interval""",
"""intervals""",
"""intervene""",
"""intervention""",
"""intervention-the""",
"""interventional""",
"""interventions""",
"""interventricular""",
"""intervertebral""",
"""interview""",
"""interviewed""",
"""interviewer-administered""",
"""interviewing""",
"""interviewing-an""",
"""interviews""",
"""interweaving""",
"""interwoven""",
"""intestinal""",
"""intestine""",
"""intimate""",
"""intimately""",
"""into""",
"""intolerance""",
"""intolerant""",
"""intoxicated""",
"""intoxication""",
"""intoxications""",
"""intra-""",
"""intra-abdominal""",
"""intra-arterial""",
"""intra-articular""",
"""intra-cranial""",
"""intra-hepatic""",
"""intra-individual""",
"""intra-operative""",
"""intra-operator""",
"""intra-osseous""",
"""intra-tongue""",
"""intraamniotic""",
"""intraarticular""",
"""intracellular""",
"""intracerebral""",
"""intraclass""",
"""intracordal""",
"""intracorporal""",
"""intracorporeal""",
"""intracranial""",
"""intracytoplasmic""",
"""intradermal""",
"""intradialytic""",
"""intraductal""",
"""intraepidermal""",
"""intraepithelial""",
"""intraglottal""",
"""intralesional""",
"""intramedullary""",
"""intramuscular""",
"""intranasal""",
"""intranasally""",
"""intraocular""",
"""intraoperative""",
"""intraoperatively""",
"""intraoral""",
"""intraparenchymal""",
"""intrapartum""",
"""intraperitoneal""",
"""intraplaque""",
"""intrarater""",
"""intrasplenic""",
"""intrathecal""",
"""intrathoracic""",
"""intratumor""",
"""intratumoral""",
"""intraurban""",
"""intrauterine""",
"""intravascular""",
"""intravenous""",
"""intravenously""",
"""intravitreally""",
"""intricate""",
"""intricately""",
"""intriguingly""",
"""intrinsic""",
"""intrinsically""",
"""introduce""",
"""introduced""",
"""introducing""",
"""introduction""",
"""introductions""",
"""intron-exon""",
"""intronic""",
"""intrusive""",
"""intubated""",
"""intubation""",
"""intuitive""",
"""invade""",
"""invading""",
"""invalid""",
"""invalidates""",
"""invaluable""",
"""invariably""",
"""invariance""",
"""invariant""",
"""invasion""",
"""invasions""",
"""invasive""",
"""invasivity""",
"""invasivo""",
"""inventory""",
"""inventory-student""",
"""inverse""",
"""inversely""",
"""inversion""",
"""invertebrate""",
"""invertebrates""",
"""inverted""",
"""invest""",
"""investigar""",
"""investigate""",
"""investigated""",
"""investigates""",
"""investigating""",
"""investigation""",
"""investigations""",
"""investigative""",
"""investigator-reported""",
"""investigators""",
"""investing""",
"""invigorated""",
"""invited""",
"""invoke""",
"""involve""",
"""involved""",
"""involvement""",
"""involvements""",
"""involves""",
"""involving""",
"""inwardly""",
"""iodide""",
"""iodine""",
"""iodine-deficient""",
"""iodine-sufficient""",
"""iodometry""",
"""ion""",
"""ion-exchange""",
"""ion-size""",
"""ionic""",
"""ionized""",
"""ionizing""",
"""ionm""",
"""ions""",
"""iop""",
"""ipaq""",
"""ipe""",
"""ipilimumab""",
"""ipmn""",
"""ipn""",
"""ipp""",
"""ippv""",
"""ipsa""",
"""ipsc""",
"""ipscs""",
"""ipsilateral""",
"""iptp-sp""",
"""ipv""",
"""iqr""",
"""ir-induced""",
"""iraes""",
"""irak4""",
"""iran""",
"""iranian""",
"""iraq""",
"""irbd""",
"""irees""",
"""irf5""",
"""irf8""",
"""irg1""",
"""iri""",
"""iris""",
"""irish""",
"""iroc""",
"""iron""",
"""iron-dependent""",
"""irradiated""",
"""irradiation""",
"""irregular""",
"""irregularities""",
"""irregularity""",
"""irrelevant""",
"""irrespective""",
"""irreversible""",
"""irritability""",
"""irritable""",
"""irritated""",
"""irritation""",
"""irs1""",
"""irt""",
"""isc""",
"""ischemia""",
"""ischemia-reperfusion""",
"""ischemic""",
"""isg""",
"""isgs""",
"""isk""",
"""isks""",
"""islam""",
"""islamic""",
"""island""",
"""islander""",
"""isln""",
"""isln-mediated""",
"""iso""",
"""isobolographic""",
"""isocortical""",
"""isoenzyme""",
"""isoform""",
"""isoform-expressing""",
"""isoforms""",
"""isogenic""",
"""isohemagglutinin""",
"""isolate""",
"""isolated""",
"""isolates""",
"""isolating""",
"""isolation""",
"""isomer""",
"""isometric""",
"""isothiocyanate-labelled""",
"""isotonitazene""",
"""isotropic""",
"""isotype""",
"""isoxazole-9""",
"""ispor""",
"""isrctn""",
"""isrib""",
"""iss""",
"""issue""",
"""issued""",
"""issues""",
"""isuog""",
"""isup""",
"""isx-9""",
"""it""",
"""itaconic""",
"""italian""",
"""italy""",
"""item""",
"""items""",
"""iterquartile""",
"""itk""",
"""itp""",
"""itregs""",
"""its""",
"""itself""",
"""iugr""",
"""iuthree""",
"""ivc""",
"""ivf""",
"""ivig""",
"""ixekizumab""",
"""ixodes""",
"""j-shaped""",
"""jackknife""",
"""jail""",
"""jails""",
"""jak""",
"""jak2-positive""",
"""jan""",
"""jane""",
"""janeiro""",
"""january""",
"""january-march""",
"""january-may""",
"""janus""",
"""japan""",
"""japanese""",
"""japonicus""",
"""jasmonate""",
"""jasmonic""",
"""jaundice""",
"""javanese""",
"""javeriana""",
"""jem-ef""",
"""jeopardy""",
"""jersey""",
"""jia""",
"""jiangxi""",
"""jimma""",
"""jingbo""",
"""jingze""",
"""jingzhe""",
"""jirovecii""",
"""jitter""",
"""jml-1""",
"""jms""",
"""joanna""",
"""job""",
"""jobs""",
"""john""",
"""johns""",
"""joined""",
"""joining""",
"""joint""",
"""jointly""",
"""joints""",
"""jolis""",
"""jonckheere-terpstra""",
"""jordanian""",
"""journal""",
"""journals""",
"""journey""",
"""jsct-atg15""",
"""juan""",
"""judged""",
"""judgements""",
"""judgment""",
"""judgments""",
"""judicious""",
"""julia""",
"""july""",
"""jumps""",
"""jun""",
"""junction""",
"""junctional""",
"""junctions""",
"""june""",
"""june-august""",
"""junior""",
"""junk""",
"""jurisdiction""",
"""just""",
"""justice""",
"""justice-referred""",
"""justified""",
"""justify""",
"""juvenile""",
"""juxta-articular""",
"""juxtaposed""",
"""k-means""",
"""k-mer""",
"""k-ras-pi3k""",
"""k187""",
"""k76""",
"""kaai""",
"""kallikrein""",
"""kansas""",
"""kaplan-meier""",
"""kaposi""",
"""kappa""",
"""kappa-b""",
"""karaj""",
"""karnofsky""",
"""karyotyping""",
"""kathmandu""",
"""kazakhs""",
"""kcnj10""",
"""kcnj5""",
"""kdigo""",
"""kdm2b""",
"""kdr""",
"""keap1""",
"""kedah""",
"""keep""",
"""keeping""",
"""kelantan""",
"""keloid-like""",
"""kentucky""",
"""kept""",
"""keratinocyte""",
"""keratoconus""",
"""keratocytes""",
"""keratometric""",
"""keratopathy""",
"""keratoplasty""",
"""keratosis""",
"""kerman""",
"""kessler""",
"""ketamine""",
"""ketoacidosis""",
"""ketogenic""",
"""ketone""",
"""ketones""",
"""ketorolac""",
"""kettering""",
"""key""",
"""keyed""",
"""keywords""",
"""khz""",
"""ki67""",
"""kibo""",
"""kidney""",
"""kidneys""",
"""kids""",
"""kill""",
"""killer""",
"""killing""",
"""killip""",
"""kilocalories""",
"""kilogram""",
"""kilovoltage""",
"""kim-1""",
"""kin-8194""",
"""kinase""",
"""kinase-associated""",
"""kinase-signal""",
"""kinases""",
"""kind""",
"""kindergartens""",
"""kinds""",
"""kinematics""",
"""kinetic""",
"""kinetically""",
"""kinetics""",
"""kingston""",
"""kinky""",
"""kirkpatrick""",
"""kit""",
"""kits""",
"""klebsiella""",
"""klf4""",
"""kmm520""",
"""kmt""",
"""kmt2d""",
"""knee""",
"""knife""",
"""knn""",
"""knock""",
"""knock-down""",
"""knock-in""",
"""knock-out""",
"""knockdown""",
"""knockdown-resistant""",
"""knocked""",
"""knockout""",
"""knot""",
"""know""",
"""know-how""",
"""knowing""",
"""knowledge""",
"""known""",
"""kolmogorov-smirnov""",
"""konno""",
"""koos""",
"""korea""",
"""korean""",
"""kpa""",
"""kpatients""",
"""kps""",
"""kragujevac""",
"""kras""",
"""kras-targeted""",
"""krasg12c""",
"""krill""",
"""krt""",
"""kruskal-wallis""",
"""kwon""",
"""kyphosis""",
"""kyrgyzstan""",
"""l-1""",
"""l1cam""",
"""la-dqb1""",
"""lab""",
"""lab-assessed""",
"""lab-based""",
"""label""",
"""label-free""",
"""labeled""",
"""labeling""",
"""labelled""",
"""labels""",
"""labial""",
"""labor""",
"""laboratoire""",
"""laboratoriais""",
"""laboratories-non-linear""",
"""laboratory""",
"""laboratory-""",
"""laboratory-confirmed""",
"""laboratory-scale""",
"""laborious""",
"""labour""",
"""labral""",
"""labskin""",
"""laceration""",
"""lack""",
"""lacked""",
"""lacking""",
"""lacks""",
"""lactate""",
"""lactating""",
"""lactis""",
"""lactobacillus""",
"""lactose""",
"""ladder""",
"""lag""",
"""lagged""",
"""lagoons""",
"""lake""",
"""laliga""",
"""lalitpur""",
"""lamella""",
"""lamellae""",
"""lamin""",
"""laminectomy""",
"""laminin""",
"""lamins""",
"""lamotrigine""",
"""lamp""",
"""lancet""",
"""landmark""",
"""landmarks""",
"""landowners""",
"""landscape""",
"""landscapes""",
"""lange""",
"""language""",
"""languages""",
"""laos""",
"""laparoendoscopic""",
"""laparoscopic""",
"""laparoscopy""",
"""laparotomy""",
"""lar""",
"""large""",
"""large-for-gestational-age""",
"""large-scale""",
"""large-sized""",
"""large-vessel""",
"""largely""",
"""larger""",
"""largest""",
"""laricina""",
"""larix""",
"""larkin""",
"""lars""",
"""larvae""",
"""larval""",
"""laryngeal""",
"""laryngeal-based""",
"""laryngological""",
"""laryngostroboscopic""",
"""larynx""",
"""las""",
"""laser""",
"""laser-induced""",
"""laser-scanning""",
"""lasso-regularized""",
"""last""",
"""lastly""",
"""late""",
"""late-life""",
"""late-onset""",
"""late-stage""",
"""latency""",
"""latent""",
"""later""",
"""lateral""",
"""laterality""",
"""laterally""",
"""latest""",
"""latex""",
"""laticifer""",
"""laticifers""",
"""latin""",
"""latinx""",
"""latitude""",
"""latitudes""",
"""latter""",
"""lattice""",
"""lattice-based""",
"""lattices""",
"""laughing""",
"""launched""",
"""lavage""",
"""laval""",
"""law""",
"""laws""",
"""lay""",
"""layer""",
"""layered""",
"""layers""",
"""lbc""",
"""lbcl""",
"""lbd""",
"""lbd2""",
"""lbw""",
"""lc-ms""",
"""lc3ii""",
"""lca""",
"""lck""",
"""lcm""",
"""lcs""",
"""ldct""",
"""ldg-derived""",
"""ldgs""",
"""ldh""",
"""ldh-a""",
"""ldl""",
"""ldl-c""",
"""ldlr""",
"""ldlt""",
"""ldr""",
"""lea-2""",
"""lead""",
"""leaders""",
"""leadership""",
"""leading""",
"""leading-strand""",
"""leads""",
"""leaf""",
"""leaflet""",
"""leak""",
"""leakage""",
"""leaks""",
"""lean""",
"""learn""",
"""learned""",
"""learners""",
"""learning""",
"""least""",
"""leave""",
"""leaves""",
"""leaving""",
"""lebanese""",
"""lebanon""",
"""lectin""",
"""lectura""",
"""led""",
"""leep""",
"""left""",
"""left-hand""",
"""left-side""",
"""left-sided""",
"""leftover""",
"""leg""",
"""legacies""",
"""legacy""",
"""legal""",
"""legalisation""",
"""legalization""",
"""legionella""",
"""legions""",
"""legislation""",
"""legislative""",
"""leibovich""",
"""leiden""",
"""leilei""",
"""leiomyosarcoma""",
"""leishmaniasis""",
"""leisure""",
"""leisure-time""",
"""lemon""",
"""lenalidomide""",
"""lends""",
"""length""",
"""lengthening""",
"""lengths""",
"""lens""",
"""lens-type""",
"""lenses""",
"""lentiginous""",
"""lentigo""",
"""leone""",
"""lep""",
"""leptin""",
"""leptospira""",
"""les""",
"""lesion""",
"""lesion-specific""",
"""lesional""",
"""lesions""",
"""less""",
"""lessen""",
"""lessened""",
"""lessening""",
"""lesser""",
"""lesson""",
"""lessons""",
"""let""",
"""let-7b""",
"""lethal""",
"""lethality""",
"""letrozole""",
"""letter""",
"""letters""",
"""lettuce""",
"""leucocyte""",
"""leukaemia""",
"""leukaemias""",
"""leukaemic""",
"""leukemia""",
"""leukemianet""",
"""leukemias""",
"""leukemogenic""",
"""leukocyte""",
"""leukocytes""",
"""leukopenia""",
"""leukoplakia""",
"""leuprorelin""",
"""levac""",
"""levator""",
"""level""",
"""levels""",
"""leverage""",
"""leveraged""",
"""leveraging""",
"""levies""",
"""levine""",
"""levofloxacin""",
"""levor""",
"""levothyroxine""",
"""leydig""",
"""leydig-like""",
"""lflg""",
"""lga""",
"""lgan""",
"""lgb""",
"""lgbt-advocacy""",
"""lgbti""",
"""lgbtq""",
"""lgbtq-based""",
"""lgd""",
"""lge""",
"""lgx818""",
"""lhm""",
"""lhms""",
"""lhrh""",
"""liabilities""",
"""liang""",
"""liberalization""",
"""librarian""",
"""libraries""",
"""library""",
"""libre""",
"""license""",
"""licensed""",
"""licensing""",
"""licheng""",
"""lidocaine""",
"""lie""",
"""lies""",
"""life""",
"""life-cycle-engineered""",
"""life-threatening""",
"""lifecycle""",
"""lifeform""",
"""lifelong""",
"""lifesaving""",
"""lifespan""",
"""lifespans""",
"""lifestyle""",
"""lifestyle-related""",
"""lifetime""",
"""lifetimes""",
"""lifting""",
"""ligament""",
"""ligaments""",
"""ligand""",
"""ligand-binding""",
"""ligand-free""",
"""ligand-receptor""",
"""ligands""",
"""ligases""",
"""ligation""",
"""ligation-based""",
"""ligations""",
"""ligature""",
"""ligature-induced""",
"""light""",
"""light-filtering""",
"""light-load""",
"""light-sensitive""",
"""lighter""",
"""lightgbm""",
"""lightness""",
"""lights""",
"""lightweight""",
"""lignin""",
"""liguria""",
"""lijian""",
"""like""",
"""likeable""",
"""likelihood""",
"""likelihoods""",
"""likely""",
"""likert""",
"""likewise""",
"""lilacs""",
"""limb""",
"""limb-predominant""",
"""limbic""",
"""limbs""",
"""limelight""",
"""limit""",
"""limitation""",
"""limitations""",
"""limited""",
"""limiting""",
"""limits""",
"""linc00337""",
"""lindley""",
"""line""",
"""linea""",
"""lineage""",
"""lineage-smooth""",
"""lineages""",
"""linear""",
"""linear-quadratic""",
"""linear-stapled""",
"""linearly""",
"""lined""",
"""liner""",
"""lines""",
"""ling""",
"""lingual""",
"""linguistic""",
"""linguistically""",
"""link""",
"""linkage""",
"""linked""",
"""linking""",
"""links""",
"""linnaeus""",
"""linoleic""",
"""linolenic""",
"""lions""",
"""lip""",
"""lipid""",
"""lipid-binding""",
"""lipid-rich""",
"""lipidation""",
"""lipidic""",
"""lipids""",
"""lipodystrophy""",
"""lipopolysaccharide""",
"""lipoprotein""",
"""lips""",
"""liquid""",
"""liquid-based""",
"""liquids""",
"""liquor""",
"""list""",
"""listed""",
"""listener""",
"""listeners""",
"""listening""",
"""listing""",
"""lists""",
"""liteqtl""",
"""literacy""",
"""literature""",
"""literatures""",
"""lithium""",
"""lithium-pilocarpine""",
"""lithothamnium""",
"""little""",
"""liu""",
"""lius""",
"""live""",
"""live-births""",
"""lived""",
"""livedo""",
"""liver""",
"""liver-only""",
"""lives""",
"""living""",
"""lizards""",
"""lkb1-mutant""",
"""llc""",
"""lld""",
"""llds""",
"""lletz""",
"""lls""",
"""llsv""",
"""lm-smcs""",
"""lmic""",
"""lmics""",
"""lmpr""",
"""lms""",
"""ln-esrd""",
"""lnc""",
"""lncrna""",
"""lncrna-mirna-mrna""",
"""lncrna-ror""",
"""lncrnas""",
"""load""",
"""loader""",
"""loading""",
"""loadings""",
"""loads""",
"""lobar""",
"""lobe""",
"""lobectomy""",
"""local""",
"""localised""",
"""localization""",
"""localizations""",
"""localize""",
"""localized""",
"""localizes""",
"""localizing""",
"""locally""",
"""located""",
"""location""",
"""locations""",
"""lochans""",
"""loci""",
"""lock-in""",
"""lockdown""",
"""loco-regional""",
"""locomotor""",
"""locus""",
"""locus-specific""",
"""lod""",
"""log""",
"""log-linear""",
"""log-rank""",
"""log10""",
"""log2kim-1""",
"""log2nag""",
"""log2ngal""",
"""log2nt-probnp""",
"""logcs""",
"""logiboost""",
"""logic""",
"""logistic""",
"""logistics""",
"""logit""",
"""logmar""",
"""logs""",
"""loneliness""",
"""lonely""",
"""long""",
"""long-acting""",
"""long-chain""",
"""long-day""",
"""long-distance""",
"""long-lasting""",
"""long-lived""",
"""long-term""",
"""longer""",
"""longer-term""",
"""longest""",
"""longevity""",
"""longicaudis""",
"""longitudes""",
"""longitudinal""",
"""longitudinally""",
"""longspur""",
"""longspurs""",
"""longstanding""",
"""lontra""",
"""look""",
"""looked""",
"""loop""",
"""loop-a""",
"""loosening""",
"""loosenings""",
"""lordosis""",
"""los""",
"""lose""",
"""losing""",
"""loss""",
"""loss-""",
"""loss-of-function""",
"""losses""",
"""lost""",
"""lot""",
"""loudness""",
"""low""",
"""low-""",
"""low-barrier""",
"""low-certainty""",
"""low-confidence""",
"""low-cost""",
"""low-cryogen""",
"""low-density""",
"""low-dose""",
"""low-ejection""",
"""low-fat""",
"""low-flow""",
"""low-frequency""",
"""low-grade""",
"""low-gradient""",
"""low-income""",
"""low-intensity""",
"""low-level""",
"""low-order""",
"""low-quality""",
"""low-resolution""",
"""low-resource""",
"""low-risk""",
"""low-salt""",
"""low-temperature""",
"""low-threshold""",
"""low-titer""",
"""lower""",
"""lower-age""",
"""lower-dose""",
"""lower-income""",
"""lowered""",
"""lowering""",
"""lowers""",
"""lowest""",
"""lowest-income""",
"""loyalty""",
"""lpcc""",
"""lpp""",
"""lps""",
"""lps-induced""",
"""lrtis""",
"""lsd""",
"""lsr""",
"""lstm""",
"""ltc""",
"""ltd""",
"""ltp""",
"""ltt""",
"""luad""",
"""lubben""",
"""lublin""",
"""lucidum""",
"""luciferase""",
"""ludgunensis""",
"""lum-201""",
"""lumbar""",
"""lumen""",
"""lumenal""",
"""luminal""",
"""luminance""",
"""luminancia""",
"""luminescence""",
"""lump""",
"""lumpectomy""",
"""lund-mackay""",
"""lung""",
"""lungs""",
"""lupus""",
"""lusterless""",
"""luteinising""",
"""lutrinids""",
"""lutris""",
"""luts""",
"""lvef""",
"""lxxlxxlxx""",
"""lyme""",
"""lymph""",
"""lymphadenectomy""",
"""lymphangiography-guided""",
"""lymphangioproliferative""",
"""lymphatic""",
"""lymphatics""",
"""lymphoblastic""",
"""lymphocyte""",
"""lymphocyte-associated""",
"""lymphocyte-specific""",
"""lymphocytes""",
"""lymphocytic""",
"""lymphohematologic""",
"""lymphoid""",
"""lymphoma""",
"""lymphomas""",
"""lymphopenia""",
"""lysine""",
"""lysis""",
"""lysophospholipids""",
"""lysosomal""",
"""lysosome""",
"""lysosomes""",
"""lysozyme""",
"""lyst""",
"""lytic""",
"""m-1""",
"""m-3""",
"""m-cs""",
"""m-tor""",
"""ma2""",
"""mabc""",
"""mac""",
"""mace""",
"""machine""",
"""machinery""",
"""machines""",
"""macro-""",
"""macro-plastics""",
"""macrocephaly""",
"""macroglobulinemia""",
"""macromolecular""",
"""macromolecules""",
"""macronutrients""",
"""macrophage""",
"""macrophage-associated""",
"""macrophages""",
"""macroscopic""",
"""macroscopically""",
"""macrostructural""",
"""macrostructure""",
"""macrovascular""",
"""macular""",
"""made""",
"""mae""",
"""maf""",
"""magiran""",
"""magnesium""",
"""magnetic""",
"""magnetite""",
"""magnetoreception""",
"""magnetoreceptor""",
"""magnification""",
"""magnified""",
"""magnitude""",
"""magrini""",
"""main""",
"""main-duct""",
"""mainland""",
"""mainly""",
"""mainstay""",
"""mainstream""",
"""maintain""",
"""maintained""",
"""maintaining""",
"""maintenait""",
"""maintenance""",
"""maintenant""",
"""maintenir""",
"""maio""",
"""maior""",
"""maiores""",
"""mais""",
"""major""",
"""majority""",
"""make""",
"""makers""",
"""makes""",
"""making""",
"""makra-test""",
"""malacia""",
"""maladaptive""",
"""malaria""",
"""malaysia""",
"""malaysian""",
"""male""",
"""male-typical""",
"""males""",
"""malformations""",
"""maligna""",
"""malignancies""",
"""malignancy""",
"""malignant""",
"""malignantly""",
"""malls""",
"""malnutrition""",
"""malocclusion""",
"""malondialdehyde""",
"""malperfusion""",
"""malposition""",
"""mammalian""",
"""mammals""",
"""mammary""",
"""mammogram""",
"""mammography""",
"""man""",
"""manage""",
"""managed""",
"""management""",
"""manager""",
"""managing""",
"""mandate""",
"""mandated""",
"""mandates""",
"""mandatory""",
"""mandibular""",
"""maned""",
"""manganese""",
"""mangifera""",
"""mango""",
"""manifest""",
"""manifestation""",
"""manifestations""",
"""manifested""",
"""manipulandum""",
"""manipulate""",
"""manipulating""",
"""manipulation""",
"""manipulations""",
"""mann-whitney""",
"""mann-whitney-u""",
"""manner""",
"""manners""",
"""mannose""",
"""mannose-sensitive""",
"""manoeuvres""",
"""manometric""",
"""manometry""",
"""manual""",
"""manually""",
"""manufactured""",
"""manufacturer""",
"""manufacturers""",
"""manufacturing""",
"""manuscript""",
"""manuscripts""",
"""many""",
"""map""",
"""map2k2""",
"""mapk""",
"""mapk-mediated""",
"""mapped""",
"""mapping""",
"""maps""",
"""marcescens""",
"""march""",
"""march-""",
"""march-april""",
"""marco""",
"""margin""",
"""marginal""",
"""marginally""",
"""margins""",
"""marijuana""",
"""marine""",
"""mariner""",
"""marital""",
"""mark""",
"""marked""",
"""markedly""",
"""marker""",
"""markers""",
"""market""",
"""marketing""",
"""marketplaces""",
"""markets""",
"""marketscan""",
"""markings""",
"""markov""",
"""maroons""",
"""maropitant""",
"""married""",
"""marrow""",
"""marrow-derived""",
"""marrying""",
"""marte""",
"""mary""",
"""maryland""",
"""mas""",
"""mascarada""",
"""masculine""",
"""masculinity""",
"""masculinity-femininity""",
"""masculinization""",
"""masculinized""",
"""mask""",
"""masked""",
"""masking""",
"""maslach""",
"""maspat""",
"""mass""",
"""mass-spectrometry""",
"""massa""",
"""massachusetts""",
"""masses""",
"""masseter""",
"""massive""",
"""massively""",
"""masson""",
"""mast""",
"""master""",
"""mat""",
"""match""",
"""matched""",
"""matched-pair""",
"""matches""",
"""matching""",
"""mate""",
"""material""",
"""materials""",
"""maternal""",
"""maternal-fetal""",
"""maternally""",
"""maternity""",
"""mathematical""",
"""mathematics""",
"""mating""",
"""matrices""",
"""matriculation""",
"""matrix""",
"""matrixules""",
"""matter""",
"""maturation""",
"""mature""",
"""maturity""",
"""mavs""",
"""max""",
"""maxillary""",
"""maxim""",
"""maxima""",
"""maximal""",
"""maximize""",
"""maximizes""",
"""maximizing""",
"""maximum""",
"""may""",
"""may-september""",
"""mayo""",
"""maze""",
"""mazzella""",
"""mbc""",
"""mbs""",
"""mc3t3-e1""",
"""mcc""",
"""mcc-related""",
"""mcc-specific""",
"""mcf""",
"""mcf-7""",
"""mcg""",
"""mcid""",
"""mcmaster""",
"""mcnemar""",
"""mcrpc""",
"""mcs""",
"""mctp""",
"""md-ipmn""",
"""mda""",
"""mda-mb-231""",
"""mdb""",
"""mdd""",
"""mdr""",
"""mds""",
"""meal""",
"""meal-induced""",
"""meals""",
"""mean""",
"""meaning""",
"""meaningful""",
"""meaningfully""",
"""means""",
"""meant""",
"""meanwhile""",
"""measurable""",
"""measure""",
"""measured""",
"""measurement""",
"""measurements""",
"""measures""",
"""measuring""",
"""meat""",
"""meat-fed""",
"""meatpacking""",
"""meatus""",
"""mebutate""",
"""mechanical""",
"""mechanics""",
"""mechanism""",
"""mechanisms""",
"""mechanistic""",
"""mechanistically""",
"""mechano-gated""",
"""mechanomyographic""",
"""mechanoreceptor""",
"""mechanoreceptors""",
"""mechanosensory""",
"""mechanotransduction""",
"""media""",
"""medial""",
"""medialization""",
"""median""",
"""mediante""",
"""mediastinal""",
"""mediate""",
"""mediated""",
"""mediates""",
"""mediating""",
"""mediation""",
"""mediational""",
"""mediator""",
"""mediators""",
"""medicaid""",
"""medicaid-insured""",
"""medical""",
"""medically""",
"""medicamentos""",
"""medicare""",
"""medicated""",
"""medication""",
"""medications""",
"""medicinal""",
"""medicine""",
"""medicines""",
"""medicorp""",
"""medicus""",
"""medida""",
"""medidas""",
"""medidos""",
"""medieval""",
"""meditated""",
"""mediterranean-style""",
"""medium""",
"""medium-""",
"""medium-high""",
"""medium-sized""",
"""medium-to-large-sized""",
"""medline""",
"""medulloblastomas""",
"""meessi""",
"""meet""",
"""meeting""",
"""meetings""",
"""meg3""",
"""megakaryopoiesis""",
"""meihan""",
"""meiosis""",
"""meiotic""",
"""meiotically""",
"""meis1""",
"""mejor""",
"""mek""",
"""melaka""",
"""melancholia""",
"""melancholic""",
"""melanin""",
"""melanocytes""",
"""melanocytic""",
"""melanogaster""",
"""melanoma""",
"""melanoma-associated""",
"""melanoma-but""",
"""melanomas""",
"""melanosis""",
"""melatonin""",
"""melbourne""",
"""meld""",
"""mellifera""",
"""mellitus""",
"""melphalan""",
"""melting-ultrasonic""",
"""melville""",
"""member""",
"""members""",
"""membership""",
"""membrane""",
"""membrane-bound""",
"""membranes""",
"""memeq""",
"""memorial""",
"""memories""",
"""memory""",
"""men""",
"""menarche""",
"""mendelian""",
"""mending""",
"""meningioma""",
"""meniscal""",
"""menisci""",
"""meniscus""",
"""menor""",
"""menos""",
"""menses""",
"""menstrual""",
"""menstruation""",
"""mental""",
"""mentally""",
"""mentation""",
"""mentees""",
"""mentioned""",
"""mentions""",
"""mentor""",
"""mentors""",
"""mentorship""",
"""ments""",
"""meo""",
"""mepb""",
"""mercury""",
"""mere""",
"""merely""",
"""merged""",
"""mergers""",
"""merit""",
"""merosomes""",
"""merriam""",
"""mers""",
"""mesenchymal""",
"""mesendoderm""",
"""mesenteric""",
"""meses""",
"""mesh""",
"""meshmixer""",
"""mesial""",
"""mesmo""",
"""mesoblastic""",
"""mesocolic""",
"""mesoderm""",
"""mesodermal""",
"""mesothelioma""",
"""mesotheliomas""",
"""message""",
"""messages""",
"""messaging""",
"""messenger""",
"""met""",
"""meta-analyses""",
"""meta-analysis""",
"""meta-analytic""",
"""meta-regression""",
"""metabolic""",
"""metabolically""",
"""metabolism""",
"""metabolite""",
"""metabolites""",
"""metabolize""",
"""metabolized""",
"""metabolome""",
"""metabolomes""",
"""metabolomic""",
"""metabolomics""",
"""metabolomics-based""",
"""metaborate""",
"""metadata""",
"""metagenomic""",
"""metagenomics""",
"""metal""",
"""metal-organic""",
"""metallic""",
"""metallo-beta-lactamases""",
"""metalloproteinase""",
"""metalloproteinases""",
"""metals""",
"""metamorphosis""",
"""metaphase""",
"""metaphor""",
"""metaplasia""",
"""metaplasias""",
"""metaplastic""",
"""metastases""",
"""metastasis""",
"""metastatic""",
"""metastatic-at-diagnosis""",
"""metatarsals""",
"""metatstatic""",
"""metformin""",
"""meth-hoxa9""",
"""methacrylic""",
"""methacryloly""",
"""methadone""",
"""methamphetamine""",
"""methamphetamine-related""",
"""methanogens""",
"""methanol""",
"""methemoglobinemia""",
"""methenamine""",
"""method""",
"""methodological""",
"""methodologies""",
"""methodology""",
"""methods""",
"""methotrexate""",
"""methyl""",
"""methyl-adducts""",
"""methylated""",
"""methylation""",
"""methylation-based""",
"""methylcellulose""",
"""methyldopa""",
"""methylene""",
"""methylidyne""",
"""methylmalonate""",
"""methylome""",
"""methyltransferase""",
"""methyltransferases""",
"""meticulous""",
"""metoclopramide""",
"""metoprolol""",
"""metric""",
"""metrics""",
"""metropolitan""",
"""mets""",
"""metyrapone""",
"""mevalonate""",
"""mexican""",
"""mexico""",
"""mfd""",
"""mff""",
"""mff-1""",
"""mfou""",
"""mgh""",
"""mgmt""",
"""mgso4""",
"""mhc""",
"""mhhs""",
"""mhs""",
"""mic""",
"""mice""",
"""michigan""",
"""micro""",
"""micro-""",
"""micro-computed""",
"""micro-organismes""",
"""micro-vascular""",
"""microanalysis""",
"""microangiopathy""",
"""microarray""",
"""microarrays""",
"""microassay""",
"""microbe""",
"""microbes""",
"""microbial""",
"""microbial-based""",
"""microbial-host""",
"""microbiological""",
"""microbiome""",
"""microbiota""",
"""microbubble""",
"""microcaecilia""",
"""microcephaly""",
"""microchannel""",
"""microchannels""",
"""microchip""",
"""microchips""",
"""microcirculation""",
"""microcontroller""",
"""microdebris""",
"""microdissection""",
"""microenvironment""",
"""microenvironmental""",
"""microextraction""",
"""microfat""",
"""microfluidics""",
"""microglia""",
"""microlaryngeal""",
"""micromanipulation""",
"""micromedex""",
"""micrometeoroids""",
"""micromixer""",
"""micron""",
"""micronutrient""",
"""micronutrients""",
"""microorganism""",
"""microorganisms""",
"""microparticles""",
"""micropeptides""",
"""microprocessor""",
"""microrna""",
"""microrna-132""",
"""microrna-203""",
"""microrna-204-3p""",
"""microrna-211""",
"""microrna-29b""",
"""microrna-376c-3p""",
"""microrna-9""",
"""micrornas""",
"""microsatellite""",
"""microscale""",
"""microscope""",
"""microscopic""",
"""microscopical""",
"""microscopically""",
"""microscopy""",
"""microsurgery""",
"""microsurgical""",
"""microsyntenic""",
"""microtubule-based""",
"""microvascular""",
"""microvasculature""",
"""microvessel""",
"""microwave""",
"""mid-""",
"""mid-2020""",
"""mid-dose""",
"""mid-life""",
"""mid-teen""",
"""mid-term""",
"""mid-thigh""",
"""midcab""",
"""middle""",
"""middle-""",
"""middle-aged""",
"""middle-income""",
"""midfielders""",
"""midlife""",
"""midline""",
"""midshaft""",
"""midurethra""",
"""midurethral""",
"""midway""",
"""mie""",
"""miet""",
"""mifflin-st-jeor""",
"""might""",
"""migraine""",
"""migrate""",
"""migrated""",
"""migrating""",
"""migration""",
"""migrations""",
"""mild""",
"""mild-cac""",
"""mild-to-moderate""",
"""mild-to-severe""",
"""mildly""",
"""miles""",
"""milestones""",
"""milieu""",
"""military""",
"""milk""",
"""miller""",
"""millimeters""",
"""million""",
"""millions""",
"""milliseconds""",
"""mimecan""",
"""mimecan-evs""",
"""mimic""",
"""mimicked""",
"""mimicking""",
"""mimics""",
"""min""",
"""mind""",
"""mind-body""",
"""mindfulness""",
"""mindless""",
"""miner""",
"""mineral""",
"""mineralization""",
"""mineralocorticoid""",
"""minerals""",
"""miners""",
"""mingran""",
"""mini""",
"""mini-invasive""",
"""mini-laparoscopic""",
"""mini-laparoscopy""",
"""mini-lps""",
"""mini-mental""",
"""mini-review""",
"""mini-tn""",
"""minilaparoscopic""",
"""minilaparoscopy""",
"""minimal""",
"""minimally""",
"""minimisation""",
"""minimising""",
"""minimize""",
"""minimized""",
"""minimizing""",
"""minimum""",
"""ministry""",
"""minnesota""",
"""minor""",
"""minoris""",
"""minoritized""",
"""minority""",
"""minority-advocacy""",
"""minors""",
"""minshawi""",
"""minus""",
"""minute""",
"""minutes""",
"""mir""",
"""mir-1-3p""",
"""mir-100""",
"""mir-103""",
"""mir-125a-5p""",
"""mir-125b-5p""",
"""mir-126""",
"""mir-134""",
"""mir-145""",
"""mir-146a""",
"""mir-146a-5p""",
"""mir-150""",
"""mir-155""",
"""mir-181b""",
"""mir-20a-5p""",
"""mir-21""",
"""mir-22""",
"""mir-221""",
"""mir-222""",
"""mir-320a""",
"""mir-34a-5p""",
"""mir-34b-3p""",
"""mir-376c-3p""",
"""mir-448""",
"""mir-448-dependent""",
"""mir-4727-3p""",
"""mir-5090""",
"""mir-5189-5p""",
"""mir-6089""",
"""mir-6810-5p""",
"""mir-9-5p""",
"""mir-92a""",
"""mir-939-5p""",
"""mir221""",
"""mirna""",
"""mirna-mediated""",
"""mirnas""",
"""mironov""",
"""mirounga""",
"""miroungae""",
"""mirror-covered""",
"""mirrors""",
"""mirs""",
"""mis""",
"""miscalculated""",
"""miscarriage""",
"""misclassified""",
"""misconception""",
"""misconceptions""",
"""misconnections""",
"""misdiagnosed""",
"""misdiagnosis""",
"""misidentified""",
"""misinformation""",
"""misinformed""",
"""mislocalization""",
"""mislocalizations""",
"""mismatch""",
"""mismatched""",
"""mismatches""",
"""misperceptions""",
"""miss""",
"""missed""",
"""missense""",
"""missing""",
"""missingness""",
"""mission""",
"""missouri""",
"""misspecified""",
"""mistaken""",
"""misuse""",
"""misused""",
"""mite""",
"""mites""",
"""mitigate""",
"""mitigated""",
"""mitigating""",
"""mitigation""",
"""mitochondria""",
"""mitochondrial""",
"""mitochondrial-encoded""",
"""mitocondrial""",
"""mitogen-activated""",
"""mitogenomes""",
"""mitomycin""",
"""mitophagy""",
"""mitosis""",
"""mitotic""",
"""mitral""",
"""mivat""",
"""mix""",
"""mixed""",
"""mixed-cell""",
"""mixed-effects""",
"""mixed-field""",
"""mixed-metal""",
"""mixed-model""",
"""mixes""",
"""mixing""",
"""mixture""",
"""mixtures""",
"""ml-1""",
"""mlal""",
"""mlc""",
"""mlck""",
"""mlh1-mlh3-dependent""",
"""mln4924""",
"""mltg""",
"""mm-pbsa""",
"""mm2""",
"""mm3""",
"""mmc""",
"""mmhg""",
"""mmi""",
"""mmol""",
"""mmp1""",
"""mmps""",
"""mmr""",
"""mmr-related""",
"""mms""",
"""mms19""",
"""mmt""",
"""mmtv""",
"""mnase""",
"""mnch""",
"""mnps""",
"""mntmpyp""",
"""mobile""",
"""mobile-based""",
"""mobilenet""",
"""mobility""",
"""mobilization""",
"""mobius""",
"""mobocertinib""",
"""moca""",
"""modafinil""",
"""modalities""",
"""modality""",
"""mode""",
"""mode-specific""",
"""model""",
"""model-based""",
"""model-selection""",
"""modeled""",
"""modeling""",
"""modelled""",
"""modelling""",
"""models""",
"""moderate""",
"""moderate-""",
"""moderate-cac""",
"""moderate-certainty""",
"""moderate-quality""",
"""moderate-severe""",
"""moderate-to-severe""",
"""moderate-to-vigorous""",
"""moderated""",
"""moderately""",
"""moderates""",
"""moderating""",
"""moderators""",
"""modern""",
"""modern-day""",
"""modes""",
"""modest""",
"""modestly""",
"""modifiable""",
"""modification""",
"""modifications""",
"""modified""",
"""modifier""",
"""modifiers""",
"""modifies""",
"""modify""",
"""modifying""",
"""modularity""",
"""modulate""",
"""modulated""",
"""modulates""",
"""modulating""",
"""modulation""",
"""modulations""",
"""modulator""",
"""modulators""",
"""modulatory""",
"""module""",
"""modules""",
"""moesia""",
"""mogibacterium""",
"""moieties""",
"""moisturizers""",
"""mol""",
"""molecular""",
"""molecular-based""",
"""molecularly""",
"""molecule""",
"""molecules""",
"""molten""",
"""moment""",
"""momentary""",
"""momentum""",
"""mometasone""",
"""mona""",
"""monetary""",
"""money""",
"""monitor""",
"""monitored""",
"""monitoring""",
"""monkeys""",
"""mono-oxygenation""",
"""monoallelic""",
"""monocenter""",
"""monocentric""",
"""monoclonal""",
"""monogenic""",
"""monolayer""",
"""monolithic""",
"""monoliths""",
"""monoliths-based""",
"""monomeric""",
"""monomicrobial""",
"""mononuclear""",
"""monooxygenase""",
"""monophyletic""",
"""monosaccharides""",
"""monosymotomatic""",
"""monotherapy""",
"""monoxide""",
"""montana""",
"""monte""",
"""monteiro""",
"""montelukast""",
"""month""",
"""monthly""",
"""months""",
"""montreal""",
"""mood""",
"""morbid""",
"""morbidities""",
"""morbidity""",
"""morbimortality""",
"""morcellated""",
"""morcellation""",
"""more""",
"""moreover""",
"""morning""",
"""morphine""",
"""morphogenesis""",
"""morphogenetic""",
"""morphogens""",
"""morphologic""",
"""morphological""",
"""morphologically""",
"""morphologies""",
"""morphology""",
"""morphotypes""",
"""morris""",
"""mortalidade""",
"""mortality""",
"""mortality-to-incidence""",
"""morte""",
"""mortes""",
"""mos""",
"""mosaicism""",
"""mosquito""",
"""most""",
"""mostly""",
"""mostramos""",
"""mostraram-se""",
"""mother""",
"""mother-baby""",
"""mother-to-child""",
"""mothers""",
"""motic""",
"""motif""",
"""motifs""",
"""motile""",
"""motility""",
"""motion""",
"""motion-capture""",
"""motions""",
"""motivate""",
"""motivation""",
"""motivations""",
"""motivator""",
"""motivators""",
"""motives""",
"""motor""",
"""moud""",
"""mouflon""",
"""mount""",
"""mountains""",
"""mounted""",
"""mounting""",
"""mouse""",
"""mouse-sensitized""",
"""mouth""",
"""mouthpiece""",
"""move""",
"""movement""",
"""movements""",
"""moves""",
"""movies""",
"""moving""",
"""mpa-1""",
"""mpecs""",
"""mpi""",
"""mpo""",
"""mpp8""",
"""mps""",
"""mr-linac""",
"""mrd""",
"""mrd-guided""",
"""mrds""",
"""mre11-ctip""",
"""mri""",
"""mri-guided""",
"""mris""",
"""mrl""",
"""mrna""",
"""mrna-unwinding""",
"""mrnas""",
"""mrpa""",
"""msa""",
"""msc""",
"""mscs""",
"""msea""",
"""msh""",
"""msha""",
"""msha-mediated""",
"""mshe""",
"""msi""",
"""msk""",
"""msne""",
"""msp""",
"""msp-deficient""",
"""msp-linked""",
"""mss""",
"""mswat""",
"""mt-trna""",
"""mtbi""",
"""mtdna""",
"""mtle""",
"""mtnr1a""",
"""mtnr1b""",
"""mtor""",
"""mtorc1""",
"""mtt-based""",
"""muca""",
"""much""",
"""mucicat""",
"""mucin""",
"""mucinous""",
"""mucociliary""",
"""mucolytic""",
"""mucosa""",
"""mucosa-airway""",
"""mucosal""",
"""mucous""",
"""mucus""",
"""muerte""",
"""muito""",
"""muitos""",
"""mujian""",
"""mulheres""",
"""multi-center""",
"""multi-compartmental""",
"""multi-component""",
"""multi-disciplinary""",
"""multi-element""",
"""multi-faceted""",
"""multi-host""",
"""multi-institutional""",
"""multi-joint""",
"""multi-leaf""",
"""multi-method""",
"""multi-microinjections""",
"""multi-omics""",
"""multi-sectional""",
"""multi-span""",
"""multi-stage""",
"""multi-step""",
"""multi-surface""",
"""multi-variate""",
"""multi-vector""",
"""multi-wavelength""",
"""multicellular""",
"""multicenter""",
"""multicentre""",
"""multicentric""",
"""multiclass""",
"""multicolor""",
"""multicomponent""",
"""multicopy""",
"""multidimensional""",
"""multidimensionality""",
"""multidisciplinary""",
"""multidomain""",
"""multidrug-binding""",
"""multidrug-resistant""",
"""multielement""",
"""multiethnic""",
"""multifaceted""",
"""multifactorial""",
"""multifocal""",
"""multifollicular""",
"""multifunctional""",
"""multilevel""",
"""multiloculated""",
"""multimedia""",
"""multimethod""",
"""multimethods""",
"""multimodal""",
"""multimodality""",
"""multinomial""",
"""multiorgan""",
"""multiparameter""",
"""multiphase""",
"""multiple""",
"""multiple-ascending-dose""",
"""multiple-camera""",
"""multiple-dose""",
"""multiplexed""",
"""multiplication""",
"""multiplied""",
"""multipotent""",
"""multiregional""",
"""multistage""",
"""multistate""",
"""multisymptomatic""",
"""multitude""",
"""multivariable""",
"""multivariada""",
"""multivariate""",
"""mumbai""",
"""mundial""",
"""municipal""",
"""murine""",
"""mus""",
"""muscarinic""",
"""muscle""",
"""muscles""",
"""muscular""",
"""musculoskeletal""",
"""musculus""",
"""mushrooms""",
"""music-kidney""",
"""muslim""",
"""must""",
"""mutagenesis""",
"""mutans""",
"""mutant""",
"""mutants""",
"""mutated""",
"""mutation""",
"""mutation-specific""",
"""mutation-targeted""",
"""mutational""",
"""mutations""",
"""mutations-many""",
"""mutator""",
"""mutual""",
"""mutually""",
"""mvd""",
"""mvpa""",
"""mwm""",
"""mxc""",
"""mxci""",
"""mxcs""",
"""myalgia""",
"""myasthenia""",
"""mych""",
"""mycobacterium""",
"""mycoplasma""",
"""mycosis""",
"""myd88""",
"""myd88-dependent""",
"""myd88-mutated""",
"""myectomy""",
"""myelin""",
"""myelinated""",
"""myeloablative""",
"""myeloablative-conditioning""",
"""myelodysplastic""",
"""myelofibrosis""",
"""myelogenous""",
"""myeloid""",
"""myeloid-derived""",
"""myeloma""",
"""myelomeningocele""",
"""myeloperoxidase""",
"""myelosuppression""",
"""myoblast""",
"""myocardial""",
"""myocardium""",
"""myoclonic""",
"""myocyte""",
"""myod""",
"""myofibroblasts""",
"""myog""",
"""myogenesis""",
"""myogenic""",
"""myokines""",
"""myoma""",
"""myomas""",
"""myomectomy""",
"""myopathy""",
"""myopic""",
"""myosin""",
"""myotube""",
"""myriad""",
"""mysterious""",
"""myxoid""",
"""n-260""",
"""n-3""",
"""n-acetylcysteine""",
"""n-ethylpentylone""",
"""n-methyl-d-aspartate""",
"""n-methyl-d-aspartic""",
"""n-mixture""",
"""n-oxide""",
"""n-terminal""",
"""n-tosylamides""",
"""n241""",
"""nac""",
"""nac-derived""",
"""nacional""",
"""nact""",
"""nadir""",
"""nadph""",
"""nafld""",
"""nag""",
"""nail""",
"""nail-specific""",
"""nailfold""",
"""naive""",
"""nal-nl2""",
"""naloxone""",
"""naltrexone""",
"""named""",
"""namely""",
"""naming""",
"""nanchang""",
"""nano-sandwich""",
"""nanocatalyst""",
"""nanocellulose""",
"""nanocomposite""",
"""nanocrystalline""",
"""nanofabricated""",
"""nanofibers""",
"""nanog""",
"""nanomachine""",
"""nanomaterials""",
"""nanomedicine""",
"""nanoparticle""",
"""nanoparticles""",
"""nanopore""",
"""nanoscale""",
"""nanoseq""",
"""nanostructures""",
"""nanotechnology""",
"""nanotheranostic""",
"""nanotool""",
"""nanotriangles""",
"""nanovesicles""",
"""naoh""",
"""napa""",
"""napa-linked""",
"""naproxen""",
"""naps""",
"""narcolepsy""",
"""narrated""",
"""narrative""",
"""narrow""",
"""narrower""",
"""narrowing""",
"""nas""",
"""nas-associated""",
"""nasal""",
"""nasal-swab""",
"""nascent""",
"""nasopharyngeal""",
"""nasopulmonary""",
"""nasorespiratory""",
"""natesto""",
"""national""",
"""national-level""",
"""nationalities""",
"""nationally""",
"""nationally-representative""",
"""nations""",
"""nationwide""",
"""native""",
"""natriuretic""",
"""natronobacterium""",
"""natural""",
"""naturalizing""",
"""naturally""",
"""nature""",
"""naturopaths""",
"""nausea""",
"""navigate""",
"""navigated""",
"""navigating""",
"""navigation""",
"""navigators""",
"""navitoclax""",
"""nbccs""",
"""nbme""",
"""nbs""",
"""nbt""",
"""ncam-1""",
"""ncarra""",
"""nci""",
"""nciu""",
"""ncpap""",
"""ncr""",
"""nct00929253""",
"""nct01035255""",
"""nct01920711""",
"""nct02106546""",
"""nct02232737""",
"""nct02250534""",
"""nct02250664""",
"""nct02345863""",
"""nct02401503""",
"""nct02485769""",
"""nct02768792""",
"""nct02803814""",
"""nct03181126""",
"""nct03188393""",
"""nct03314181""",
"""nct03962816""",
"""ndr1""",
"""ndr2""",
"""near""",
"""near-daily""",
"""near-infrared""",
"""near-real-time""",
"""near-surface""",
"""near-total""",
"""nearby""",
"""neared""",
"""nearing""",
"""nearly""",
"""nebivolol""",
"""nebraska""",
"""necessarily""",
"""necessary""",
"""necessitates""",
"""necessitating""",
"""necessity""",
"""neck""",
"""necro-inflammatory""",
"""necrosis""",
"""necrotising""",
"""necrotizing""",
"""nedd8""",
"""neddylation""",
"""need""",
"""needed""",
"""needing""",
"""needle""",
"""needles""",
"""needs""",
"""needs-based""",
"""negating""",
"""negative""",
"""negatively""",
"""negativity""",
"""neglected""",
"""neglects""",
"""negligence""",
"""negligible""",
"""negligibly""",
"""negotiations""",
"""neighborhood""",
"""neighborhoods""",
"""neighboring""",
"""neither""",
"""neo""",
"""neo-angiogenesis""",
"""neo-vessel""",
"""neoadjuvant""",
"""neoantigen""",
"""neoantigens""",
"""neoliberal""",
"""neoliberalism""",
"""neonatal""",
"""neonates""",
"""neonatologists""",
"""neonicotinoid""",
"""neoplasia""",
"""neoplasm""",
"""neoplasms""",
"""neoplastic""",
"""neotropical""",
"""neovascularization""",
"""neovascularized""",
"""nepal""",
"""nephrectomy""",
"""nephritis""",
"""nephrology""",
"""nephroma""",
"""nephrometry""",
"""nephropathy""",
"""nephrosclerosis""",
"""ner""",
"""nereis""",
"""ners""",
"""nerve""",
"""nerve-derived""",
"""nerve-mediated""",
"""nerve-muscle""",
"""nerves""",
"""nervosa""",
"""nervous""",
"""nervousness""",
"""nest""",
"""nest-site""",
"""nested""",
"""nested-pcr""",
"""nests""",
"""net""",
"""net-associated""",
"""net-bound""",
"""netherlands""",
"""nets""",
"""network""",
"""networking""",
"""networks""",
"""neural""",
"""neuralgia""",
"""neuraxial""",
"""neuregulin""",
"""neurite""",
"""neuro-guide""",
"""neuro-monitoring""",
"""neuro-oncology""",
"""neuro-radiologists""",
"""neuro-vascular""",
"""neuroanatomic""",
"""neuroanatomical""",
"""neuroanatomy""",
"""neurobehavioral""",
"""neurobiological""",
"""neurochemical""",
"""neurocircuitry""",
"""neurocognitive""",
"""neurodegeneration""",
"""neurodegenerative""",
"""neurodevelopment""",
"""neurodevelopmental""",
"""neuroendocrine""",
"""neurofibrillary""",
"""neurofibromatosis""",
"""neurofilament""",
"""neurogenesis""",
"""neurogenetic""",
"""neurogenic""",
"""neurogranin""",
"""neuroimaging""",
"""neuroinflammation""",
"""neurologic""",
"""neurological""",
"""neurologically""",
"""neurologist""",
"""neurologists""",
"""neurology""",
"""neuromodulation""",
"""neuromodulators""",
"""neuromuscular""",
"""neuron""",
"""neuron-like""",
"""neuronal""",
"""neurons""",
"""neuroparenchymal""",
"""neuropathic""",
"""neuropathies""",
"""neuropathy""",
"""neuropelveologic""",
"""neuropeptides""",
"""neuroplasticity""",
"""neuroprotection""",
"""neuroprotective""",
"""neuropsychiatric""",
"""neuropsychological""",
"""neuroradiologists""",
"""neuroregeneration""",
"""neuroretinal""",
"""neuroscience""",
"""neuroscientists""",
"""neurosensory""",
"""neurospheres""",
"""neurosteroid""",
"""neurosurgery""",
"""neurosurgical""",
"""neuroticism""",
"""neurotoxicity""",
"""neurotoxin""",
"""neurotransmission""",
"""neurotransmitter""",
"""neurotransmitters""",
"""neurovascular""",
"""neurovegetative""",
"""neurturin""",
"""neurula""",
"""neurulation""",
"""neutered""",
"""neutral""",
"""neutropenia""",
"""neutrophil""",
"""neutrophil-predominant""",
"""neutrophilic""",
"""neutrophils""",
"""never""",
"""nevertheless""",
"""nevi""",
"""nevoid""",
"""nevus""",
"""new""",
"""new-onset""",
"""newborn""",
"""newborns""",
"""newcastle-ottawa""",
"""newer""",
"""newly""",
"""news""",
"""newspaper""",
"""next""",
"""next-day""",
"""next-generation""",
"""nf-treated""",
"""nf1""",
"""nfb""",
"""nfbc1966""",
"""nfib""",
"""nfkb1""",
"""nflg""",
"""nft""",
"""nfts""",
"""ngago""",
"""ngago-guided""",
"""ngal""",
"""ngs""",
"""nhas""",
"""nhej""",
"""nhis""",
"""nhseed""",
"""niche""",
"""niches""",
"""nick""",
"""nicking""",
"""nicotinamide""",
"""nicotine""",
"""nicu""",
"""nid1""",
"""nidogen""",
"""nifurtimox""",
"""nigeria""",
"""nigerians""",
"""night""",
"""night-time""",
"""nightlife""",
"""nightly""",
"""nih""",
"""nimbc""",
"""nimotuzuma""",
"""nine""",
"""nine-banded""",
"""ninety-eight""",
"""ninety-one""",
"""ninety-two""",
"""ninteen""",
"""ninth""",
"""nipbl""",
"""nipbl-a""",
"""nipbl-depleted""",
"""nipped-b-like""",
"""nirs""",
"""nitrate""",
"""nitric""",
"""nitrite""",
"""nitrofurans""",
"""nitrogen""",
"""njpies""",
"""nk-cell""",
"""nld""",
"""nld-sfn""",
"""nlst""",
"""nmda""",
"""nmda-treated""",
"""nmdar""",
"""nmdar-dependent""",
"""nmdar-independent""",
"""nmibc""",
"""nmj""",
"""nmr""",
"""nmsc""",
"""nnt""",
"""no""",
"""no-cac""",
"""no-covid""",
"""no-isolation""",
"""no2""",
"""no3""",
"""nociception""",
"""nocturnal""",
"""nodal""",
"""node""",
"""nodes""",
"""nodosum""",
"""nodular""",
"""nodule""",
"""nodules""",
"""noise""",
"""noise-robust""",
"""nolla""",
"""nomads""",
"""nominal""",
"""non""",
"""non-ad""",
"""non-atg""",
"""non-bf-iol""",
"""non-bf-iols""",
"""non-bleeding""",
"""non-bornian""",
"""non-breast""",
"""non-cancer""",
"""non-canonical""",
"""non-challenged""",
"""non-cleft""",
"""non-clinical""",
"""non-clinically""",
"""non-coding""",
"""non-commercial""",
"""non-communicable""",
"""non-complex""",
"""non-consanguineous""",
"""non-conventional""",
"""non-core""",
"""non-corrosive""",
"""non-covid-19""",
"""non-critical""",
"""non-crossovers""",
"""non-dermatological""",
"""non-desmoglein""",
"""non-diabetic""",
"""non-disabled""",
"""non-disclosure""",
"""non-disease""",
"""non-duplicated""",
"""non-ecg-gated""",
"""non-emergency""",
"""non-english-speaking""",
"""non-expansion""",
"""non-exudative""",
"""non-faculty""",
"""non-fatal""",
"""non-fi""",
"""non-fluorinated""",
"""non-fruiting""",
"""non-genetics""",
"""non-government""",
"""non-gpcr""",
"""non-halophilic""",
"""non-hct""",
"""non-hds""",
"""non-health""",
"""non-hilar""",
"""non-hispanic""",
"""non-hodgkin""",
"""non-homologous""",
"""non-humic""",
"""non-imprinted""",
"""non-indexed""",
"""non-infectious""",
"""non-inferiority""",
"""non-inflammatory""",
"""non-interactive""",
"""non-invasive""",
"""non-invasively""",
"""non-irradiated""",
"""non-length-dependent""",
"""non-linear""",
"""non-linearly""",
"""non-maf""",
"""non-medical""",
"""non-melancholic""",
"""non-melanoma""",
"""non-muscle-invasive""",
"""non-neoplastic""",
"""non-newtonian""",
"""non-obesity-related""",
"""non-overlapping""",
"""non-owners""",
"""non-pcos""",
"""non-pharmacologic""",
"""non-pharmacological""",
"""non-platinum""",
"""non-pregnant""",
"""non-prescribed""",
"""non-prescription""",
"""non-probabilistically""",
"""non-problematic""",
"""non-profit""",
"""non-profits""",
"""non-radiographic""",
"""non-radiological""",
"""non-randomised""",
"""non-recovered""",
"""non-recovering""",
"""non-referral""",
"""non-relapse""",
"""non-scd""",
"""non-selectively""",
"""non-serous""",
"""non-set""",
"""non-shivering""",
"""non-significant""",
"""non-sle""",
"""non-small""",
"""non-small-cell""",
"""non-smokers""",
"""non-specified""",
"""non-spectrum""",
"""non-spurious""",
"""non-statistically""",
"""non-stemi""",
"""non-steroidal""",
"""non-structural""",
"""non-surgical""",
"""non-survey""",
"""non-survival""",
"""non-tc""",
"""non-technical""",
"""non-treated""",
"""non-treatment""",
"""non-uniform""",
"""non-use""",
"""non-valvular""",
"""non-ventilated""",
"""non-verbal""",
"""non-vertebral""",
"""non-watson-crick""",
"""non-white""",
"""nonalcoholic""",
"""nonallergy""",
"""nonambulators""",
"""nonanaplastic""",
"""nonattendance""",
"""nonblocking""",
"""noncancer""",
"""noncanonical""",
"""noncarcinoma""",
"""noncoding""",
"""noncommunicable""",
"""noncompliance""",
"""nonconforming""",
"""nonconventional""",
"""noncorticosteroid""",
"""noncovalent""",
"""nondiabetic""",
"""none""",
"""nonenrollment""",
"""nonetheless""",
"""nonexudative""",
"""nonfish""",
"""nonhospice""",
"""nonhuman""",
"""noninfectious""",
"""noninferior""",
"""noninferiority""",
"""noninstrumental""",
"""noninvasive""",
"""nonlinear""",
"""nonlymphoid""",
"""nonmajor""",
"""nonmalignant""",
"""nonmammalian""",
"""nonmanual""",
"""nonmedical""",
"""nonmelanoma""",
"""nonmetastatic""",
"""nonmyeloablative""",
"""nonnative""",
"""nonoperative""",
"""nonoverlapping""",
"""nonparametric""",
"""nonpayment""",
"""nonpigmented""",
"""nonpregnant""",
"""nonprogressive""",
"""nonprogressors""",
"""nonrandomized""",
"""nonrandomness""",
"""nonregenerative""",
"""nonreproductive""",
"""nonresectable""",
"""nonresponsive""",
"""nonreversible""",
"""nonselective""",
"""nonsense""",
"""nonsense-associated""",
"""nonsevere""",
"""nonsilent""",
"""nonsmokers""",
"""nonspecific""",
"""nonspecified""",
"""nonstandard""",
"""nonstandardized""",
"""nonsteroidal""",
"""nonsteroidogenic""",
"""nonsurgical""",
"""nonsurvivors""",
"""nontraditional""",
"""nontraumatic""",
"""nootropic""",
"""nor""",
"""noradrenaline""",
"""nordic""",
"""nordic-uk""",
"""norflurazon""",
"""noricum""",
"""norm""",
"""normais""",
"""normal""",
"""normal-flow""",
"""normal-hearing""",
"""normalised""",
"""normality""",
"""normalization""",
"""normalize""",
"""normalized""",
"""normally""",
"""normalmente""",
"""normative""",
"""normobaric""",
"""normotensive""",
"""normoxia""",
"""norms""",
"""north""",
"""north-south""",
"""north-west""",
"""northern""",
"""northwest""",
"""norway""",
"""norwegian""",
"""nos""",
"""nosd""",
"""nose""",
"""nose-to-brain""",
"""nosocomial""",
"""nosso""",
"""not""",
"""notable""",
"""notably""",
"""note""",
"""noted""",
"""nothern""",
"""nothing""",
"""notice""",
"""noticed""",
"""notifications""",
"""notion""",
"""nouveaux""",
"""novel""",
"""novelty""",
"""november""",
"""novemcinctus""",
"""novice""",
"""novo""",
"""now""",
"""nowadays""",
"""nox2""",
"""nozzle""",
"""npc""",
"""npds""",
"""nps""",
"""npv""",
"""nras""",
"""nras-mutant""",
"""nrg-cc006""",
"""nrg1""",
"""nrg1-induced""",
"""nrgn""",
"""nrm""",
"""nrs""",
"""nsaids""",
"""nsaidsone""",
"""nsc""",
"""nsclc""",
"""nsduh""",
"""nsp""",
"""nsps""",
"""nsqip""",
"""nsti""",
"""nstis""",
"""nsu""",
"""nsus""",
"""nt-probnp""",
"""ntil""",
"""nts""",
"""ntt""",
"""ntu""",
"""nuances""",
"""nuancing""",
"""nuclear""",
"""nuclear-encoded""",
"""nuclease""",
"""nucleic""",
"""nucleocapsid-igg""",
"""nucleolar""",
"""nucleophosmin""",
"""nucleosidic""",
"""nucleosome""",
"""nucleosomes""",
"""nucleotide""",
"""nucleotide-based""",
"""nucleus""",
"""nude""",
"""nudging""",
"""null""",
"""number""",
"""numbers""",
"""numer""",
"""numeric""",
"""numerical""",
"""numerically""",
"""numerous""",
"""nunca""",
"""nup98""",
"""nupo""",
"""nurse""",
"""nursed""",
"""nurseries""",
"""nurses""",
"""nursing""",
"""nurturing""",
"""nutrient""",
"""nutrients""",
"""nutrition""",
"""nutritional""",
"""nutritionally""",
"""nvaf""",
"""nwi""",
"""nyha""",
"""nylon""",
"""nzdts""",
"""o-glcnac""",
"""o-glcnacase""",
"""o-glcnacylated""",
"""o-glcnacylation""",
"""o-linked-n-acetylglucosaminylation""",
"""o6-methylguanine-dna""",
"""oasis""",
"""oat""",
"""obese""",
"""obesity""",
"""obesity-associated""",
"""obesity-related""",
"""obinutuzumab""",
"""object""",
"""objectif""",
"""objective""",
"""objectively""",
"""objectives""",
"""objet""",
"""objetivo""",
"""obligate""",
"""obligatory""",
"""obliquus""",
"""obliteration""",
"""oblong""",
"""obscure""",
"""obscured""",
"""obscures""",
"""observance""",
"""observation""",
"""observational""",
"""observations""",
"""observatoire""",
"""observe""",
"""observed""",
"""observed-to-expected""",
"""observer""",
"""observer-""",
"""observers""",
"""observing""",
"""obsessive-compulsive""",
"""obstacle""",
"""obstacles""",
"""obstetric""",
"""obstetrical""",
"""obstetrician""",
"""obstetrics""",
"""obstruction""",
"""obstructive""",
"""obtain""",
"""obtained""",
"""obtaining""",
"""obtainment""",
"""obtains""",
"""obviate""",
"""obvious""",
"""oca""",
"""occasion""",
"""occasionally""",
"""occasions""",
"""occiput""",
"""occludens-1""",
"""occludin""",
"""occlusal""",
"""occlusion""",
"""occlusive""",
"""occult""",
"""occupancy""",
"""occupants""",
"""occupational""",
"""occupations""",
"""occupies""",
"""occupy""",
"""occupying""",
"""occur""",
"""occurred""",
"""occurrence""",
"""occurrences""",
"""occurring""",
"""occurs""",
"""ocd""",
"""oceans""",
"""ocorreu""",
"""ocs""",
"""oct""",
"""oct4""",
"""octaethylporphyrin""",
"""october""",
"""ocular""",
"""oculoplastic""",
"""odd""",
"""odds""",
"""odeur""",
"""odeurs""",
"""odh""",
"""odour""",
"""odour-causing""",
"""oecd""",
"""oenocytes""",
"""oestrogen""",
"""of""",
"""ofc""",
"""off""",
"""off-screen""",
"""off-the-shelf""",
"""offender""",
"""offenders""",
"""offer""",
"""offered""",
"""offering""",
"""offers""",
"""office""",
"""officers""",
"""offices""",
"""official""",
"""offshore""",
"""offspring""",
"""ofloxacin""",
"""often""",
"""oftentimes""",
"""oga""",
"""ogd""",
"""ogt""",
"""oha""",
"""ohca""",
"""ohe""",
"""ohfrs""",
"""oil""",
"""oil-free""",
"""oils""",
"""okay""",
"""olaparib""",
"""old""",
"""older""",
"""oldest""",
"""olds""",
"""oleic""",
"""olfactory""",
"""oligodeoxynucleotides""",
"""oligomerization""",
"""oligometastatic""",
"""oligonucleotides""",
"""ols""",
"""omax""",
"""omega-3""",
"""omethoate""",
"""omi""",
"""omics""",
"""omis""",
"""omission""",
"""omissions""",
"""omit""",
"""omitted""",
"""omnipresent""",
"""omphalocele""",
"""oms""",
"""on-licence""",
"""on-line""",
"""on-screen""",
"""on-snow""",
"""on-target""",
"""on-treatment""",
"""on-versus""",
"""once""",
"""once-daily""",
"""oncocytic""",
"""oncodermatology""",
"""oncogene""",
"""oncogenes""",
"""oncogenic""",
"""oncogenicity""",
"""oncologic""",
"""oncological""",
"""oncologist""",
"""oncologists""",
"""oncology""",
"""one""",
"""one-fourth""",
"""one-hybrid""",
"""one-on-one""",
"""one-sided""",
"""one-stage""",
"""one-step""",
"""one-third""",
"""one-time""",
"""one-to-one""",
"""one-way""",
"""one-week""",
"""one-year""",
"""ones""",
"""oneself""",
"""onfh""",
"""ongoing""",
"""online""",
"""only""",
"""onset""",
"""ont""",
"""ontario""",
"""onto""",
"""onward""",
"""onychomycosis""",
"""oocyte""",
"""oocytes""",
"""oomycete""",
"""oop""",
"""opal""",
"""open""",
"""open-angle""",
"""open-ended""",
"""open-field""",
"""open-label""",
"""open-tubular""",
"""opened""",
"""opening""",
"""openings""",
"""openness""",
"""opens""",
"""opensigle""",
"""operate""",
"""operated""",
"""operates""",
"""operating""",
"""operation""",
"""operational""",
"""operationalise""",
"""operationalised""",
"""operationalize""",
"""operationalized""",
"""operations""",
"""operative""",
"""operatively""",
"""operator""",
"""operators""",
"""opg""",
"""opg-mediated""",
"""ophthalmic""",
"""ophthalmoplegia""",
"""opiate""",
"""opinion""",
"""opinions""",
"""opioid""",
"""opioid-""",
"""opioid-based""",
"""opioid-related""",
"""opioids""",
"""opportunistic""",
"""opportunities""",
"""opportunity""",
"""oppose""",
"""opposed""",
"""opposing""",
"""opposite""",
"""opposition""",
"""opra""",
"""ops""",
"""opscc""",
"""opsin""",
"""optic""",
"""optical""",
"""optically""",
"""optimal""",
"""optimally""",
"""optimisation""",
"""optimising""",
"""optimization""",
"""optimize""",
"""optimized""",
"""optimizing""",
"""optimum""",
"""option""",
"""options""",
"""optoelectronic""",
"""oral""",
"""orally""",
"""orbit""",
"""orbital""",
"""orbitofrontal""",
"""orbitopathy""",
"""orcatech""",
"""orchestrated""",
"""orchestrating""",
"""ord""",
"""order""",
"""ordered""",
"""ordering""",
"""orders""",
"""ordinal""",
"""ordinary""",
"""ore""",
"""oregon""",
"""organ""",
"""organ-level""",
"""organ-specific""",
"""organellar""",
"""organelle""",
"""organelles""",
"""organic""",
"""organics""",
"""organisation""",
"""organisations""",
"""organised""",
"""organism""",
"""organismal""",
"""organisms""",
"""organizamos""",
"""organization""",
"""organizational""",
"""organizations""",
"""organize""",
"""organized""",
"""organizing""",
"""organogenesis""",
"""organophosphorus""",
"""organotypic""",
"""organs""",
"""oric""",
"""orientalis""",
"""orientation""",
"""oriented""",
"""orifice""",
"""origin""",
"""original""",
"""originally""",
"""originate""",
"""originated""",
"""originating""",
"""origins""",
"""orlistat""",
"""ornatus""",
"""orofacial""",
"""oropharyngeal""",
"""oropharynx""",
"""orphan""",
"""ors""",
"""ort""",
"""orth""",
"""orthodontic""",
"""orthodontics""",
"""orthodox""",
"""orthodoxy""",
"""orthogonal""",
"""orthogonalizing""",
"""orthogonally""",
"""orthohalarachne""",
"""ortholog""",
"""orthologs""",
"""orthopaedic""",
"""orthopedic""",
"""orthopedics""",
"""orthotopic""",
"""orthotopically""",
"""oryza""",
"""os""",
"""osa""",
"""osce""",
"""oscillating""",
"""oscillation""",
"""oscillations""",
"""osf""",
"""osmoprotectant""",
"""osmoprotectants""",
"""osmotic""",
"""osnf-yb1""",
"""osseous""",
"""osseous-integration""",
"""ostensibly""",
"""osteoarthritis""",
"""osteoblast""",
"""osteoblastic""",
"""osteoblasts""",
"""osteochondral""",
"""osteoclast""",
"""osteoclast-like""",
"""osteoclasts""",
"""osteogenesis""",
"""osteogenesis-regulatory""",
"""osteogenic""",
"""osteoid""",
"""osteonecrosis""",
"""osteopenia""",
"""osteophytes""",
"""osteopontin""",
"""osteoporosis""",
"""osteoprotegerin""",
"""osteosarcoma""",
"""osteotomy""",
"""ostrea""",
"""osyuc11""",
"""osyuc11-mediated""",
"""otariids""",
"""otc""",
"""other""",
"""others""",
"""otherwise""",
"""otm""",
"""otolaryngology""",
"""ototoxicity""",
"""ottawa""",
"""otter""",
"""otters""",
"""oud""",
"""oudemans""",
"""ought""",
"""oulu""",
"""our""",
"""out""",
"""out-of-hospital""",
"""out-of-pocket""",
"""out-patient""",
"""outbreak""",
"""outbreaks""",
"""outbred""",
"""outcome""",
"""outcome-based""",
"""outcome-oriented""",
"""outcomes""",
"""outdoor""",
"""outer""",
"""outflow""",
"""outgroups""",
"""outgrow""",
"""outgrowth""",
"""outlier""",
"""outline""",
"""outlined""",
"""outlines""",
"""outlining""",
"""outlook""",
"""outpatient""",
"""outpatients""",
"""outperformed""",
"""output""",
"""outputs""",
"""outreach""",
"""outside""",
"""outsourcing""",
"""outwardly""",
"""ovarian""",
"""ovaries""",
"""ovariohysterectomy""",
"""ovary""",
"""over""",
"""over-""",
"""over-excision""",
"""over-expression""",
"""over-the-counter""",
"""overactivity""",
"""overadjustment""",
"""overall""",
"""overall-survival""",
"""overarching""",
"""overcome""",
"""overcoming""",
"""overconfidence""",
"""overdose""",
"""overdose-related""",
"""overdoses""",
"""overdrive""",
"""overestimate""",
"""overestimated""",
"""overestimates""",
"""overexpressed""",
"""overexpressing""",
"""overexpression""",
"""overgrowth""",
"""overinterpret""",
"""overlaid""",
"""overlap""",
"""overlapping""",
"""overlaps""",
"""overlooks""",
"""overly""",
"""overnight""",
"""overnutrition""",
"""overprotection""",
"""overprotective""",
"""overranging""",
"""overrepresented""",
"""overrides""",
"""overseas""",
"""overtime""",
"""overtreatment""",
"""overview""",
"""overviews""",
"""overweight""",
"""overwhelm""",
"""overwhelmingly""",
"""ovid""",
"""ovine""",
"""ovo""",
"""ovulation""",
"""ovulator""",
"""owen""",
"""owes""",
"""owing""",
"""own""",
"""owner-dog""",
"""owners""",
"""ownership""",
"""owning""",
"""oxa1""",
"""oxazoline""",
"""oxford""",
"""oxford-astrazeneca""",
"""oxidase""",
"""oxidation""",
"""oxidative""",
"""oxide""",
"""oxidized""",
"""oxidizing""",
"""oxphos""",
"""oxr""",
"""oxygen""",
"""oxygenation""",
"""oxyhemoglobin""",
"""oxynitrophosphide""",
"""oxytocin""",
"""oyster""",
"""ozonation""",
"""ozone""",
"""p-bq""",
"""p-mlc""",
"""p-selectin""",
"""p-tau""",
"""p-value""",
"""p14""",
"""p16-ihc""",
"""p27""",
"""p2y12""",
"""p38""",
"""p450""",
"""p53""",
"""p53-mutant""",
"""p57""",
"""p63""",
"""paa""",
"""paas""",
"""pac""",
"""paca""",
"""pacd""",
"""pace""",
"""paced""",
"""pacemaker""",
"""pacer""",
"""pachymetry""",
"""pachyvessels""",
"""pacientes""",
"""pacific""",
"""pacing""",
"""pack""",
"""package""",
"""packages""",
"""packed""",
"""packs""",
"""paclitaxel""",
"""pad""",
"""paddock""",
"""paediatric""",
"""pafenolol""",
"""page""",
"""pago""",
"""pagos""",
"""paid""",
"""pain""",
"""pain-free""",
"""pain-related""",
"""pain-relieving""",
"""painful""",
"""pair""",
"""paired""",
"""pairing""",
"""pairs""",
"""pairwise""",
"""pais""",
"""palatability""",
"""palatable""",
"""palate""",
"""palliative""",
"""palmar""",
"""palmitate""",
"""palnd""",
"""palpable""",
"""palpated""",
"""palpation""",
"""palsies""",
"""palsy""",
"""pam""",
"""paml""",
"""pan-glioma""",
"""pan-jak""",
"""pan-pi3k""",
"""panavia""",
"""panc02""",
"""panc02-bearing""",
"""pancreas""",
"""pancreata""",
"""pancreatectomy""",
"""pancreatic""",
"""pancreatitis""",
"""pancreatoduodenectomy""",
"""pandemic""",
"""pandemic-related""",
"""pandemics""",
"""panel""",
"""panelists""",
"""panels""",
"""panfletos""",
"""pangenomes""",
"""panniculectomy""",
"""panniculitis""",
"""pannonia""",
"""panometry""",
"""panoramic""",
"""panspermia""",
"""panurethral""",
"""pap""",
"""paper""",
"""papers""",
"""papillary""",
"""papillary-ventricular""",
"""papillomavirus""",
"""paps""",
"""papules""",
"""papulonodules""",
"""par""",
"""para""",
"""para-aortic""",
"""para-medical""",
"""paraben""",
"""parabens""",
"""paracetamol""",
"""paracoccidioides""",
"""paracoccidioidomycosis""",
"""paracrine""",
"""paradigm""",
"""paradigm-hf""",
"""paradigms""",
"""paradoxical""",
"""paraffin-embedded""",
"""paraganglioma""",
"""paragon-hf""",
"""parajuli""",
"""parallel""",
"""parallel-designed""",
"""parallelizable""",
"""parallelization""",
"""parallelized""",
"""parallels""",
"""paralog""",
"""paralogs""",
"""paralysing""",
"""paralysis""",
"""parameter""",
"""parameterized""",
"""parameters""",
"""parametria""",
"""parametric""",
"""parametrium""",
"""paramount""",
"""pararectal""",
"""parasitaemia""",
"""parasite""",
"""parasites""",
"""parasitic""",
"""parastomal""",
"""parathyroid""",
"""paratonia""",
"""parauterine""",
"""parcellation""",
"""pareado""",
"""parenchyma""",
"""parenchyma-sparing""",
"""parenchymal""",
"""parent""",
"""parent-clinician""",
"""parent-infant""",
"""parent-reported""",
"""parent-to-child""",
"""parental""",
"""parenting""",
"""parents""",
"""paresis""",
"""parity""",
"""parkinson""",
"""parotid""",
"""paroxysmal""",
"""parp""",
"""parrot""",
"""parsimonious""",
"""part""",
"""partial""",
"""partially""",
"""participant""",
"""participantes""",
"""participants""",
"""participaram""",
"""participate""",
"""participated""",
"""participating""",
"""participation""",
"""participatory""",
"""particle""",
"""particle-capturing""",
"""particles""",
"""particular""",
"""particularities""",
"""particularly""",
"""particulate""",
"""partitioning""",
"""partly""",
"""partner""",
"""partnering""",
"""partners""",
"""partnership""",
"""partridge""",
"""parts""",
"""parturient""",
"""parturients""",
"""party""",
"""parvalbumin""",
"""pas""",
"""pasi75""",
"""pasi90""",
"""pasmcs""",
"""pasna""",
"""pasq""",
"""pass""",
"""passage""",
"""passageway""",
"""passed""",
"""passing""",
"""passive""",
"""passively""",
"""passo""",
"""password-protected""",
"""past""",
"""past-month""",
"""pasta""",
"""pastes""",
"""pasture""",
"""patch""",
"""patch-clamp""",
"""patches""",
"""patchy""",
"""patency""",
"""paternal""",
"""patescibacteria""",
"""path""",
"""pathogen""",
"""pathogen-specific""",
"""pathogenesis""",
"""pathogenesis-related1""",
"""pathogenetic""",
"""pathogenic""",
"""pathogens""",
"""pathognomonic""",
"""pathologic""",
"""pathological""",
"""pathologically""",
"""pathologically-proven""",
"""pathologies""",
"""pathologist""",
"""pathologists""",
"""pathology""",
"""pathophysiologic""",
"""pathophysiological""",
"""pathophysiologically""",
"""pathophysiology""",
"""pathway""",
"""pathways""",
"""patient""",
"""patient-""",
"""patient-acceptable""",
"""patient-based""",
"""patient-centered""",
"""patient-derived""",
"""patient-perceived""",
"""patient-provider""",
"""patient-related""",
"""patient-reported""",
"""patient-specific""",
"""patient-years""",
"""patients""",
"""pattern""",
"""pattern-recognition""",
"""patterned""",
"""patterning""",
"""patterns""",
"""patting""",
"""paucity""",
"""paulo""",
"""paved""",
"""paving""",
"""paw""",
"""pax8-glis3""",
"""pay""",
"""paying""",
"""payment""",
"""payments""",
"""pbct""",
"""pbe""",
"""pbm""",
"""pbmcs""",
"""pbp1""",
"""pbp2b""",
"""pbps""",
"""pbsct""",
"""pca""",
"""pcaf""",
"""pchf""",
"""pci""",
"""pcm""",
"""pcos""",
"""pcos-like""",
"""pcos-related""",
"""pcr""",
"""pcr-rflp""",
"""pcv""",
"""pd-1""",
"""pdac""",
"""pdacs""",
"""pdb""",
"""pdi""",
"""pdl1""",
"""pdoep-lg12""",
"""pdpv""",
"""peak""",
"""peaking""",
"""peaks""",
"""pearce""",
"""pearldiver""",
"""pearson""",
"""peat""",
"""peatland""",
"""peatlands""",
"""peau""",
"""peculiar""",
"""pecvd""",
"""ped""",
"""pedagogical""",
"""pedagogy""",
"""pediatric""",
"""pediatrician""",
"""pediatrics""",
"""pedicled""",
"""pediculated""",
"""pediveliger""",
"""peduncles""",
"""peer""",
"""peer-reviewed""",
"""peers""",
"""peg10""",
"""pela""",
"""pelagic""",
"""pelo""",
"""pelvic""",
"""pelvis""",
"""pem""",
"""pembrolizumab""",
"""pemphigoid""",
"""pemphigus""",
"""penalties""",
"""pendant""",
"""penectomy""",
"""penguin""",
"""penguins""",
"""pengxi""",
"""penicillin""",
"""penile""",
"""peninsular""",
"""penis""",
"""pennsylvania""",
"""penny""",
"""pense""",
"""pentose""",
"""people""",
"""peoples""",
"""pep""",
"""pepexl""",
"""pepper""",
"""peptidase""",
"""peptidase-4""",
"""peptide""",
"""peptides""",
"""peptidoglycan""",
"""peptidoglycan-associated""",
"""per""",
"""per-""",
"""per-patient""",
"""per-person""",
"""per-procedure""",
"""per-protocol""",
"""per-topology""",
"""perceive""",
"""perceived""",
"""percent""",
"""percentage""",
"""percentages""",
"""percentile""",
"""perception""",
"""perceptions""",
"""percepts""",
"""perceptual""",
"""perceptual-motor""",
"""perceptually""",
"""percussion""",
"""percutaneous""",
"""perennial""",
"""peres""",
"""perfect""",
"""perfectionistic""",
"""perfluorinated""",
"""perfluorocarbons""",
"""perforation""",
"""perforations""",
"""perform""",
"""performa""",
"""performance""",
"""performances""",
"""performed""",
"""performing""",
"""performs""",
"""perfusion""",
"""perhaps""",
"""peri-""",
"""periampullary""",
"""pericellular""",
"""pericentromeric""",
"""pericyte""",
"""peridiagnostic""",
"""perimenstrual""",
"""perinatal""",
"""perineum""",
"""period""",
"""periodic""",
"""periodicals""",
"""periodontal""",
"""periodontally""",
"""periodontitis""",
"""periodontium""",
"""periods""",
"""perioperative""",
"""perioperatively""",
"""peripheral""",
"""periphery""",
"""periplasm""",
"""periplasmic""",
"""periprosthetic""",
"""peristalsis""",
"""peristaltic""",
"""peritoneal""",
"""perivalvular""",
"""perivascular""",
"""periventricular""",
"""permanent""",
"""permanently""",
"""permeability""",
"""permis""",
"""permission""",
"""permit""",
"""permits""",
"""permitted""",
"""permitting""",
"""peroxidase""",
"""peroxidation""",
"""peroxide""",
"""peroxides""",
"""peroxisomes""",
"""peroxules""",
"""perpetuation""",
"""persian""",
"""persist""",
"""persisted""",
"""persistence""",
"""persistent""",
"""persistently""",
"""persisting""",
"""persists""",
"""person""",
"""person-centered""",
"""person-years""",
"""personal""",
"""personalised""",
"""personality""",
"""personalize""",
"""personalized""",
"""personas""",
"""personnels""",
"""persons""",
"""perspective""",
"""perspectives""",
"""perspex""",
"""perspiration""",
"""pertaining""",
"""pertinent""",
"""perturb""",
"""perturbation""",
"""perturbations""",
"""perturbed""",
"""pertussis""",
"""pervasive""",
"""pervasively""",
"""pes""",
"""pesquisa""",
"""pessoal""",
"""pesticides""",
"""pet""",
"""pet-directed""",
"""pet-negative""",
"""peuvent""",
"""pevonedistat""",
"""pf-06650833""",
"""pf-06700841""",
"""pf-06826647""",
"""pfc""",
"""pfcrt""",
"""pfdhfr""",
"""pfdhps""",
"""pfizer""",
"""pfmdr1""",
"""pfor""",
"""pfp""",
"""pfs""",
"""pg-specific""",
"""pgc""",
"""pgd""",
"""ph-neutral""",
"""phaeohyphomycosis""",
"""phage""",
"""phages""",
"""phantom""",
"""phantoms""",
"""pharmaceutical""",
"""pharmaceuticals""",
"""pharmacies""",
"""pharmacists""",
"""pharmacodynamic""",
"""pharmacodynamics""",
"""pharmacoeconomics""",
"""pharmacokinetic""",
"""pharmacokinetic-pharamacodynamic""",
"""pharmacokinetics""",
"""pharmacologic""",
"""pharmacological""",
"""pharmacologically""",
"""pharmacology""",
"""pharmacopeias""",
"""pharmacy""",
"""pharyngeal""",
"""phase""",
"""phase-ii""",
"""phase-lag""",
"""phases""",
"""phenoage""",
"""phenoeaa""",
"""phenolic""",
"""phenological""",
"""phenols""",
"""phenomena""",
"""phenomenal""",
"""phenomenological""",
"""phenomenon""",
"""phenotype""",
"""phenotyped""",
"""phenotypes""",
"""phenotypic""",
"""phenotyping""",
"""phenyl""",
"""phenylalanine""",
"""phenylpropanoids""",
"""phgdh""",
"""phh""",
"""phh-pt-csf""",
"""phh-related""",
"""phh-tea-csf""",
"""phi""",
"""philadelphia""",
"""philanthropic""",
"""philippii""",
"""philippines""",
"""philosophy""",
"""phis""",
"""phloem""",
"""phlorizin""",
"""phoca""",
"""phocids""",
"""phonation""",
"""phone""",
"""phones""",
"""phoning""",
"""phonon""",
"""phonons""",
"""phosphatase""",
"""phosphate""",
"""phospho""",
"""phospho-pr""",
"""phosphoinositide-3-kinase""",
"""phospholamban""",
"""phosphomimetic""",
"""phosphonic""",
"""phosphopeptide""",
"""phosphoprotein""",
"""phosphoproteins""",
"""phosphoric""",
"""phosphorous""",
"""phosphorus""",
"""phosphorylated""",
"""phosphorylation""",
"""photo""",
"""photo-elicitation""",
"""photobiomodulation""",
"""photodynamic""",
"""photoelectron""",
"""photoexcitation""",
"""photograph""",
"""photographs""",
"""photography""",
"""photometric""",
"""photon""",
"""photons""",
"""photoperiod""",
"""photoperiodic""",
"""photophobia""",
"""photoreceptor""",
"""photoreceptors""",
"""photoreduction""",
"""photos""",
"""photosynthesis""",
"""photosynthesis-associated""",
"""photosynthetic""",
"""phototherapy""",
"""photothermal""",
"""phox""",
"""phrases""",
"""phyb""",
"""phyla""",
"""phylogenetic""",
"""phylogenetically""",
"""phylogenic""",
"""phylogenies""",
"""phylogeny""",
"""physical""",
"""physical-chemical""",
"""physically""",
"""physician""",
"""physician-""",
"""physician-assessed""",
"""physician-determined""",
"""physicians""",
"""physicists""",
"""physics""",
"""physiol""",
"""physiologic""",
"""physiological""",
"""physiologically""",
"""physiology""",
"""physiopathological""",
"""physiotherapy""",
"""phytochemicals""",
"""phytohormone""",
"""phytohormones""",
"""pi3k""",
"""pi3k-akt-mtor""",
"""picea""",
"""pick-up""",
"""picture""",
"""piece""",
"""piers-harris""",
"""piezoresistive""",
"""pig""",
"""pigeons""",
"""pigment""",
"""pigmentation""",
"""pigmented""",
"""pigments""",
"""pigs""",
"""pigtail""",
"""pik3ca""",
"""pile-up""",
"""pili""",
"""pillar""",
"""pills""",
"""pilocytic""",
"""pilot""",
"""piloted""",
"""pilt""",
"""pilus""",
"""pink""",
"""pink1""",
"""pioneer""",
"""pioneering""",
"""pipe""",
"""pipeline""",
"""pipelines""",
"""piracetam""",
"""piroxicam""",
"""pita""",
"""pitch""",
"""pitfall""",
"""pitfalls""",
"""pittsburgh""",
"""pituitary""",
"""piver""",
"""pivotal""",
"""piwi""",
"""pji""",
"""pjis""",
"""pk7300""",
"""pka""",
"""pkd""",
"""pkg""",
"""pkm2""",
"""place""",
"""place-based""",
"""placebo""",
"""placebo-controlled""",
"""placed""",
"""placement""",
"""placenta""",
"""placental""",
"""placental-fetal""",
"""placentas""",
"""places""",
"""placing""",
"""plagues""",
"""plain""",
"""plan""",
"""planar""",
"""plane""",
"""planes""",
"""planet""",
"""planktonic""",
"""planned""",
"""planning""",
"""plans""",
"""plans-optimised""",
"""plant""",
"""plant-based""",
"""plant-made""",
"""planta""",
"""plants""",
"""plaque""",
"""plaques""",
"""plasma""",
"""plasmid""",
"""plasmid-diversity""",
"""plasmids""",
"""plasmodium""",
"""plasmon""",
"""plastic""",
"""plasticity""",
"""plastics""",
"""plastid""",
"""plastids""",
"""plataforma""",
"""plate""",
"""plateau""",
"""platelet""",
"""platelet-derived""",
"""platelet-leukocyte""",
"""platelets""",
"""plates""",
"""platform""",
"""platforms""",
"""plating""",
"""platinum""",
"""platinum-based""",
"""platinum-free""",
"""platinum-refractory""",
"""platinum-resistant""",
"""platinum-sensitivity""",
"""plausible""",
"""play""",
"""played""",
"""players""",
"""playing""",
"""plays""",
"""pleasurable""",
"""pleasure""",
"""pleasures""",
"""pleiotropic""",
"""pleomorphy""",
"""pleural""",
"""plexus""",
"""plhiv""",
"""plic""",
"""plication""",
"""plk1""",
"""plnd""",
"""plot""",
"""plot-""",
"""plot-cec""",
"""plot-cec-ms""",
"""plot-scale""",
"""plots""",
"""plpro""",
"""pls-da""",
"""plugging""",
"""plump""",
"""plurality""",
"""pluripotency""",
"""pluripotent""",
"""plus""",
"""pmax""",
"""pneumocystis""",
"""pneumonectomy""",
"""pneumonia""",
"""pneumoniae""",
"""pneumonias""",
"""pneumoperitoneum""",
"""pneumophila""",
"""pneumothorax""",
"""pnfi""",
"""pni""",
"""pnn50""",
"""pns""",
"""pnse""",
"""pntb""",
"""po4""",
"""pocket""",
"""pockets""",
"""pod""",
"""pode""",
"""podem""",
"""poems""",
"""poetry""",
"""point""",
"""point-in-time""",
"""point-of-care""",
"""pointed""",
"""pointing""",
"""points""",
"""poison""",
"""poisoned""",
"""poisoning""",
"""poisoning-related""",
"""poisonings""",
"""poisons""",
"""poisson""",
"""pol""",
"""poland""",
"""polar""",
"""polarity""",
"""polarized""",
"""pole""",
"""police""",
"""policies""",
"""policing""",
"""policy""",
"""policy-""",
"""policymakers""",
"""polio""",
"""polish""",
"""political""",
"""pollen""",
"""pollutants""",
"""polluted""",
"""polluting""",
"""pollution""",
"""pollutions""",
"""polo-like""",
"""poly""",
"""polyaluminum""",
"""polyarticular""",
"""polyaxial""",
"""polycistronic""",
"""polycomb""",
"""polycystic""",
"""polydrug""",
"""polyethylene""",
"""polygenetic""",
"""polyglutamine""",
"""polyglutamine-expanded""",
"""polymer""",
"""polymerase""",
"""polymerases""",
"""polymerisation""",
"""polymerization""",
"""polymers""",
"""polymicrobial""",
"""polymorphism""",
"""polymorphisms""",
"""polyneuropathy""",
"""polynomial""",
"""polyp""",
"""polyphenols""",
"""polypoidal""",
"""polyposis""",
"""polyps""",
"""polyradiculoneuropathy""",
"""polysomal""",
"""polysomnography""",
"""polysubstance""",
"""polysulfides""",
"""polytetrafluoroethylene""",
"""polyunsaturated""",
"""pomelo""",
"""pond""",
"""ponders""",
"""pons""",
"""pontine""",
"""pool""",
"""pool-seq""",
"""pooled""",
"""pooling""",
"""pools""",
"""poolsnp""",
"""poor""",
"""poorer""",
"""poorest""",
"""poorly""",
"""pop""",
"""popliteal""",
"""popular""",
"""popularity""",
"""populated""",
"""population""",
"""population-based""",
"""population-level""",
"""populations""",
"""por""",
"""porcine""",
"""porcupine""",
"""poroma""",
"""porous""",
"""port""",
"""portability""",
"""portable""",
"""portal""",
"""portion""",
"""portions""",
"""portland""",
"""portosystemic""",
"""portray""",
"""portuguese""",
"""posas""",
"""pose""",
"""posed""",
"""poses""",
"""posit""",
"""positing""",
"""position""",
"""positioned""",
"""positioning""",
"""positions""",
"""positive""",
"""positively""",
"""positives""",
"""positivity""",
"""positron""",
"""possess""",
"""possessed""",
"""possesses""",
"""possession""",
"""possibilities""",
"""possibility""",
"""possible""",
"""possibly""",
"""post""",
"""post-aca""",
"""post-acl""",
"""post-administration""",
"""post-burn""",
"""post-covid""",
"""post-covid-19""",
"""post-cqit""",
"""post-crs-hipec""",
"""post-discharge""",
"""post-ed""",
"""post-exercise""",
"""post-extubation""",
"""post-graduate""",
"""post-hct""",
"""post-herpetic""",
"""post-hoc""",
"""post-hsct""",
"""post-injection""",
"""post-injury""",
"""post-intervention""",
"""post-ligation""",
"""post-mi""",
"""post-mortem""",
"""post-myocardial""",
"""post-nasal""",
"""post-nhas""",
"""post-operative""",
"""post-processing""",
"""post-prostatectomy""",
"""post-radiation""",
"""post-recurrence""",
"""post-rp""",
"""post-surgery""",
"""post-tbi""",
"""post-test""",
"""post-thrombotic""",
"""post-training""",
"""post-transcriptional""",
"""post-transcriptionally""",
"""post-translational""",
"""post-transplant""",
"""post-transplantation""",
"""post-traumatic""",
"""post-traumatic-stress-disorder""",
"""post-treatment""",
"""post-ups""",
"""post-urethroplasty""",
"""post-vaccination""",
"""post-visit""",
"""postdischarge""",
"""posterior""",
"""postgraduate""",
"""postinfarction""",
"""postinjury""",
"""postlaryngectomy""",
"""postlobectomy""",
"""postnatal""",
"""postnatally""",
"""postneurosurgical""",
"""postop""",
"""postoperative""",
"""postoperatively""",
"""postpartum""",
"""postponing""",
"""postrepair""",
"""posts""",
"""postsecondary""",
"""postsurgery""",
"""posttest""",
"""posttranscriptional""",
"""posttranslational""",
"""posttraumatic""",
"""posttreatment""",
"""postulated""",
"""postulates""",
"""postulating""",
"""postulation""",
"""postural""",
"""postures""",
"""posturography""",
"""pot""",
"""potassium""",
"""potency""",
"""potent""",
"""potential""",
"""potentially""",
"""potentially-causal""",
"""potentials""",
"""potentiate""",
"""potentiated""",
"""potentiation""",
"""potently""",
"""pou5f3""",
"""poultry""",
"""pour""",
"""pourrait""",
"""poverty""",
"""povidone-iodine""",
"""powder""",
"""power""",
"""power-oriented""",
"""powered""",
"""powerful""",
"""powers""",
"""pp1""",
"""ppb""",
"""ppc""",
"""ppc-i""",
"""ppc-ii""",
"""ppc-iii""",
"""ppc-iv""",
"""ppc-v""",
"""ppc-vi""",
"""ppc-vii""",
"""ppd""",
"""ppds""",
"""ppi""",
"""ppi2""",
"""ppm1h""",
"""ppnad""",
"""ppns""",
"""ppp""",
"""pprom""",
"""pps""",
"""ppt""",
"""ppto""",
"""ppv""",
"""ppwr""",
"""pqm-1""",
"""pqm-1-related""",
"""pr-a""",
"""pr-b""",
"""pr-driven""",
"""practical""",
"""practice""",
"""practiced""",
"""practices""",
"""practicing""",
"""practitioner""",
"""practitioners""",
"""pranayama""",
"""prasugrel""",
"""prausnitzii""",
"""prc2""",
"""prc2-altered""",
"""prdm9""",
"""pre""",
"""pre-""",
"""pre-2020""",
"""pre-aca""",
"""pre-adapted""",
"""pre-analytical""",
"""pre-clinical""",
"""pre-conceptional""",
"""pre-covid""",
"""pre-cqit""",
"""pre-defined""",
"""pre-emptive""",
"""pre-existing""",
"""pre-gl""",
"""pre-harvest""",
"""pre-hospital""",
"""pre-hsct""",
"""pre-intervention""",
"""pre-licensure""",
"""pre-mrna""",
"""pre-operative""",
"""pre-operatively""",
"""pre-pandemic""",
"""pre-post""",
"""pre-pregnancy""",
"""pre-stressed""",
"""pre-surgical""",
"""pre-symptomatic""",
"""pre-test""",
"""pre-tests""",
"""pre-trained""",
"""pre-treatment""",
"""pre-treatments""",
"""pre-vaccination""",
"""prebiotic""",
"""prebiotics""",
"""precaution""",
"""precautionary""",
"""precautions""",
"""precede""",
"""preceded""",
"""precedes""",
"""preceding""",
"""precipitate""",
"""precipitated""",
"""precipitation""",
"""precise""",
"""precisely""",
"""precision""",
"""preclinical""",
"""precocious""",
"""preconception""",
"""preconditioned""",
"""preconditioning""",
"""preconfigured""",
"""precursor""",
"""precursors""",
"""predator""",
"""predators""",
"""predatory""",
"""predecessors""",
"""predefined""",
"""predetermined""",
"""prediagnostic""",
"""predialysis""",
"""predict""",
"""predictability""",
"""predictable""",
"""predictably""",
"""predicted""",
"""predicting""",
"""prediction""",
"""predictions""",
"""predictive""",
"""predictor""",
"""predictors""",
"""predicts""",
"""predisposes""",
"""predisposing""",
"""preditores""",
"""prednisolone""",
"""prednisone""",
"""predominance""",
"""predominant""",
"""predominantly""",
"""preeclampsia""",
"""preencheram""",
"""prefecture""",
"""prefer""",
"""preferable""",
"""preferably""",
"""preference""",
"""preferences""",
"""preferentially""",
"""preferred""",
"""prefrontal""",
"""pregnancies""",
"""pregnancy""",
"""pregnancy-related""",
"""pregnant""",
"""preguidelines""",
"""prehistoric""",
"""prehospital""",
"""prehospitally""",
"""preliminary""",
"""prelingually""",
"""premalignant""",
"""premature""",
"""prematurity""",
"""premenopausal""",
"""premenstrual""",
"""premises""",
"""premium""",
"""premiums""",
"""premolars""",
"""prenatal""",
"""prenyl""",
"""preoperative""",
"""preoperatively""",
"""preparation""",
"""preparations""",
"""prepare""",
"""prepared""",
"""preparedness""",
"""preparing""",
"""preplating""",
"""prepro-orexin""",
"""prepubertal""",
"""prepuce""",
"""preregistered""",
"""prerequisite""",
"""presbycusis""",
"""preschool""",
"""prescribe""",
"""prescribed""",
"""prescriber""",
"""prescribing""",
"""prescription""",
"""prescriptions""",
"""prescriptive""",
"""presence""",
"""presensitization""",
"""present""",
"""presentation""",
"""presentations""",
"""presented""",
"""presenting""",
"""presents""",
"""preservation""",
"""preserve""",
"""preserved""",
"""preserving""",
"""presets""",
"""prespecification""",
"""prespecified""",
"""press""",
"""pressing""",
"""pressure""",
"""pressure-area""",
"""pressures""",
"""presumably""",
"""presume""",
"""presumptive""",
"""presurgical""",
"""preterm""",
"""pretested""",
"""pretransplant""",
"""pretreated""",
"""pretreatment""",
"""prevailing""",
"""prevalence""",
"""prevalent""",
"""prevenir""",
"""prevent""",
"""preventable""",
"""preventative""",
"""prevented""",
"""preventing""",
"""prevention""",
"""preventive""",
"""prevents""",
"""previews""",
"""previous""",
"""previously""",
"""previously-obscured""",
"""previstos""",
"""prevotella""",
"""price""",
"""prices""",
"""pricing""",
"""prima""",
"""primarily""",
"""primary""",
"""primate""",
"""primate-specific""",
"""primates""",
"""prime-knee""",
"""primer""",
"""primers""",
"""priming""",
"""primitive""",
"""prince""",
"""principais""",
"""principal""",
"""principales""",
"""principally""",
"""principle""",
"""principled""",
"""principles""",
"""print""",
"""printed""",
"""prion""",
"""prior""",
"""priori""",
"""priorities""",
"""prioritisation""",
"""prioritised""",
"""prioritization""",
"""prioritize""",
"""prioritized""",
"""priority""",
"""prisma""",
"""prisma-scr""",
"""pristane-induced""",
"""privacy""",
"""private""",
"""privately""",
"""prkar1a""",
"""pro""",
"""pro-apoptosis""",
"""pro-b-type""",
"""pro-bnp""",
"""pro-drug""",
"""pro-inflammatory""",
"""pro-oxidant""",
"""pro-survival""",
"""pro-tumor""",
"""pro-vaccine""",
"""proactive""",
"""proactively""",
"""proangiogenetic""",
"""probabilistic""",
"""probability""",
"""probable""",
"""probably""",
"""probe""",
"""probed""",
"""probes""",
"""probiotic""",
"""probiotics""",
"""probiotiques""",
"""problem""",
"""problemas""",
"""problematic""",
"""problems""",
"""procaspase""",
"""procedimiento""",
"""procedimientos""",
"""procedural""",
"""procedure""",
"""procedures""",
"""proceed""",
"""proceeded""",
"""proceeding""",
"""proceedings""",
"""proceeds""",
"""process""",
"""process-of-care""",
"""processed""",
"""processes""",
"""processing""",
"""procure""",
"""procyon""",
"""prodromal""",
"""prodrug""",
"""produce""",
"""produced""",
"""producers""",
"""produces""",
"""producing""",
"""product""",
"""product-specific""",
"""production""",
"""productive""",
"""productivities""",
"""productivity""",
"""products""",
"""produit""",
"""produits""",
"""proerd""",
"""professional""",
"""professionalism""",
"""professionals""",
"""professor""",
"""proficiency""",
"""proficient""",
"""profile""",
"""profiles""",
"""profiling""",
"""profissionais""",
"""profitability""",
"""profitable""",
"""proforma""",
"""profound""",
"""profoundly""",
"""progenitor""",
"""progesterone""",
"""progestin""",
"""progestin-induced""",
"""progestins""",
"""prognoses""",
"""prognosis""",
"""prognostic""",
"""prognostication""",
"""prognosticator""",
"""program""",
"""programa""",
"""programas""",
"""programmatic""",
"""programme""",
"""programmed""",
"""programmes""",
"""programming""",
"""programs""",
"""progress""",
"""progressed""",
"""progresses""",
"""progressing""",
"""progression""",
"""progression-free""",
"""progressive""",
"""progressors""",
"""prohibition""",
"""prohibitively""",
"""prohormone""",
"""proinflammatory""",
"""project""",
"""projection""",
"""projections""",
"""projects""",
"""prokaryotes""",
"""prokaryotic""",
"""prolapse""",
"""proliferation""",
"""proliferative""",
"""prolong""",
"""prolongation""",
"""prolonged""",
"""prominence""",
"""prominent""",
"""prominently""",
"""promiscuous""",
"""promise""",
"""promises""",
"""promising""",
"""promisingly""",
"""promote""",
"""promoted""",
"""promoter""",
"""promoters""",
"""promotes""",
"""promoting""",
"""promotion""",
"""promotional""",
"""promotive""",
"""prompt""",
"""prompted""",
"""prompting""",
"""promptly""",
"""prompts""",
"""proms""",
"""prone""",
"""prong""",
"""prongs""",
"""pronounced""",
"""prontos-socorros""",
"""proof""",
"""proof-of-concept""",
"""propagate""",
"""propagation""",
"""propamocarb""",
"""propels""",
"""propensities""",
"""propensity""",
"""propensity-matched""",
"""proper""",
"""properly""",
"""properties""",
"""property""",
"""prophylactic""",
"""prophylaxis""",
"""propofol""",
"""proponents""",
"""proportion""",
"""proportional""",
"""proportions""",
"""proposal""",
"""propose""",
"""proposed""",
"""proposes""",
"""proprietary""",
"""propuesto""",
"""propyl""",
"""proquest""",
"""proresolving""",
"""prosecution""",
"""prospect""",
"""prospective""",
"""prospectively""",
"""prospectively-collected""",
"""prospectivo""",
"""prospero""",
"""prostate""",
"""prostatectomy""",
"""prostheses""",
"""prosthesis""",
"""prosthetic""",
"""protamine""",
"""protease""",
"""proteases""",
"""proteasomal""",
"""proteasome""",
"""protect""",
"""protected""",
"""protecting""",
"""protection""",
"""protections""",
"""protective""",
"""protects""",
"""protein""",
"""protein-based""",
"""protein-binding""",
"""protein-coupled""",
"""protein-protein""",
"""protein-rna-dna""",
"""proteinlens""",
"""proteins""",
"""proteinuria""",
"""proteoglycan""",
"""proteoglycans""",
"""proteolysis""",
"""proteome""",
"""proteomes""",
"""proteomic""",
"""proteomics""",
"""prothrombin""",
"""protocol""",
"""protocol-defined""",
"""protocol-specific""",
"""protocols""",
"""proton""",
"""proton-pump""",
"""protonated""",
"""protons""",
"""protoplasmic""",
"""protospacer""",
"""prototype""",
"""prototypes""",
"""prototypic""",
"""prototypical""",
"""protraction""",
"""protrusion""",
"""protrusions""",
"""protumoral""",
"""prove""",
"""proved""",
"""proven""",
"""proves""",
"""provide""",
"""provided""",
"""provider""",
"""provider-centric""",
"""providers""",
"""providers-namely""",
"""provides""",
"""providing""",
"""province""",
"""provinces""",
"""proving""",
"""provision""",
"""provocation""",
"""provocative""",
"""provoked""",
"""proxies""",
"""proximal""",
"""proximally""",
"""proximate""",
"""proximity""",
"""proxy""",
"""prp""",
"""prpb""",
"""prr""",
"""prs""",
"""pruning""",
"""psa""",
"""psd93""",
"""pseudoalteromonas""",
"""pseudocyst""",
"""pseudocysts""",
"""pseudoepitheliomatous""",
"""pseudomonas""",
"""pseudophakes""",
"""pseudotumor""",
"""pseudotumoral""",
"""psi-blast""",
"""psilocybin""",
"""psm""",
"""pso""",
"""psoriasis""",
"""psoriatic""",
"""pstat3""",
"""pstat3-inhibited""",
"""psychedelic""",
"""psychedelic-assisted""",
"""psychiatric""",
"""psychiatrist""",
"""psychiatrists""",
"""psychiatry""",
"""psychoactive""",
"""psychobehavioral""",
"""psychodrama""",
"""psychoeducation""",
"""psychological""",
"""psychologically""",
"""psychologist""",
"""psychologists""",
"""psychology""",
"""psychometric""",
"""psychomotor""",
"""psychopathic""",
"""psychopathologies""",
"""psychopathology""",
"""psychophysical""",
"""psychosocial""",
"""psychotherapy""",
"""psycinfo""",
"""ptb""",
"""ptbp1""",
"""ptc-associated""",
"""ptcs""",
"""ptcy""",
"""pten""",
"""pten-induced""",
"""pth""",
"""ptm""",
"""ptrcast""",
"""ptrend""",
"""ptsd""",
"""pttb""",
"""pubchem""",
"""pubertal""",
"""puberty""",
"""public""",
"""public-key""",
"""publication""",
"""publications""",
"""publicly""",
"""publish""",
"""published""",
"""pubmed""",
"""pubmed-medline""",
"""puducherry""",
"""puf""",
"""puf-8""",
"""puf-binding""",
"""pufa""",
"""pufas""",
"""puffy""",
"""pull""",
"""pull-down""",
"""pulling""",
"""pullout""",
"""pulmonary""",
"""pulmonology""",
"""pulse""",
"""pulses""",
"""puma""",
"""pumilio""",
"""pump""",
"""pump-probe""",
"""punch""",
"""puncture""",
"""punish""",
"""punishment""",
"""punitive""",
"""pupil""",
"""puppets""",
"""purchase""",
"""purchased""",
"""purchasers""",
"""purchasing""",
"""pure""",
"""purely""",
"""purge""",
"""purification""",
"""purified""",
"""purine""",
"""purity""",
"""purported""",
"""purpose""",
"""purposeful""",
"""purposefully""",
"""purposes""",
"""purposive""",
"""purposively""",
"""purpura""",
"""purpuras""",
"""purpuric""",
"""purse-string""",
"""pursued""",
"""pursuing""",
"""pursuit""",
"""push""",
"""pushing""",
"""put""",
"""putative""",
"""puts""",
"""pvi""",
"""pvod""",
"""pwd""",
"""pwh""",
"""pwid""",
"""pwud""",
"""pwuds""",
"""pwv""",
"""pylori""",
"""pylori-positive""",
"""pyramidal""",
"""pyrazine""",
"""pyrolytic""",
"""pyruvate""",
"""python""",
"""q10""",
"""q472""",
"""q61""",
"""qaladar""",
"""qiao""",
"""qing""",
"""qol""",
"""qpcr""",
"""qrdr""",
"""qrt-pcr""",
"""qskin""",
"""qsofa""",
"""qst""",
"""qtl""",
"""quadrant""",
"""quadratic""",
"""quadruple""",
"""quadrupole-tof""",
"""quais""",
"""qualified""",
"""qualifying""",
"""qualitative""",
"""qualitatively""",
"""qualities""",
"""quality""",
"""quality-of-life""",
"""qualtrics""",
"""quantifiable""",
"""quantification""",
"""quantified""",
"""quantify""",
"""quantifying""",
"""quantile""",
"""quantitated""",
"""quantitative""",
"""quantitative-swath""",
"""quantitatively""",
"""quantities""",
"""quantity""",
"""quantum""",
"""quarantine""",
"""quarterly""",
"""quartile""",
"""quartiles""",
"""quasi-experimental""",
"""quasi-randomised""",
"""quasi-randomized""",
"""quasi-rcts""",
"""quassia""",
"""quaternary""",
"""quatre""",
"""quatro""",
"""que""",
"""quechers""",
"""queensland""",
"""queried""",
"""queries""",
"""query""",
"""question""",
"""question-asking""",
"""questioned""",
"""questioning""",
"""questionnaire""",
"""questionnaires""",
"""questions""",
"""queyrat""",
"""qui""",
"""qui-quadrado""",
"""quick""",
"""quickly""",
"""quid""",
"""quiescence""",
"""quiescent""",
"""quiet""",
"""quinate""",
"""quinn""",
"""quinolone-resistance""",
"""quinquennium""",
"""quintessential""",
"""quintile""",
"""quintiles""",
"""quite""",
"""quitting""",
"""quo""",
"""quotient""",
"""r-hgh""",
"""r-loops""",
"""r-squared""",
"""r-vnotes""",
"""r132c""",
"""r2val""",
"""rabbit""",
"""rabbits""",
"""raccoon""",
"""race""",
"""race-based""",
"""racemosa""",
"""racial""",
"""racism""",
"""rad-seq""",
"""rad23b""",
"""rad51""",
"""rad51-dependent""",
"""rad51-mediated""",
"""rad51-ssdna""",
"""rad51c""",
"""rad52""",
"""rad54""",
"""radial""",
"""radiation""",
"""radiation-attenuated""",
"""radiation-free""",
"""radiation-induced""",
"""radiation-related""",
"""radiations""",
"""radical""",
"""radicality""",
"""radio""",
"""radio-labelled""",
"""radioactive""",
"""radiodermatitis""",
"""radiofrequency""",
"""radiograph""",
"""radiographic""",
"""radiographically""",
"""radiographs""",
"""radiography""",
"""radioiodine""",
"""radiologic""",
"""radiological""",
"""radiologically""",
"""radiologists""",
"""radiology""",
"""radiolucencies""",
"""radiolysis""",
"""radiomics""",
"""radioresistant""",
"""radiosensitive""",
"""radiotherapy""",
"""radius""",
"""radon""",
"""radon-associated""",
"""raetia""",
"""raging""",
"""rags""",
"""rainy""",
"""raise""",
"""raised""",
"""raises""",
"""raising""",
"""ral-phr""",
"""rals""",
"""ralstonia""",
"""ram""",
"""ramadan""",
"""ran""",
"""rand""",
"""random""",
"""random-effects""",
"""randomisation""",
"""randomised""",
"""randomised-controlled""",
"""randomising""",
"""randomization""",
"""randomized""",
"""randomly""",
"""randomly-selected""",
"""range""",
"""ranged""",
"""rangelands""",
"""ranges""",
"""ranging""",
"""rank""",
"""rank-based""",
"""ranked""",
"""rankin""",
"""ranking""",
"""rankl""",
"""ranks""",
"""ranzcp""",
"""rap1""",
"""rapid""",
"""rapid-acting""",
"""rapid-eye""",
"""rapidly""",
"""rapport""",
"""rare""",
"""rare-allele""",
"""rarefaction""",
"""rarely""",
"""rarer""",
"""rarity""",
"""ras""",
"""ras-like""",
"""ras-pi3k-akt""",
"""ras-raf""",
"""rasch""",
"""rash""",
"""rat""",
"""rate""",
"""rate-based""",
"""rate-limiting""",
"""rated""",
"""raters""",
"""rates""",
"""rather""",
"""rating""",
"""ratings""",
"""ratio""",
"""rational""",
"""rationale""",
"""ratios""",
"""rats""",
"""rattle""",
"""raunkiaer""",
"""ravl""",
"""raw""",
"""ray""",
"""raynaud""",
"""rbc""",
"""rbe""",
"""rbt""",
"""rcda""",
"""rcm""",
"""rct""",
"""rcts""",
"""rddm""",
"""rdh54""",
"""rdp""",
"""rdrp""",
"""rds""",
"""re-admission""",
"""re-creating""",
"""re-detect""",
"""re-emergence""",
"""re-epithelialization""",
"""re-evaluated""",
"""re-evaluation""",
"""re-exploration""",
"""re-extubation""",
"""re-implantation""",
"""re-insertion""",
"""re-intubation""",
"""re-programming""",
"""re-ruptured""",
"""reach""",
"""reached""",
"""reaches""",
"""reaching""",
"""react""",
"""reaction""",
"""reaction-restriction""",
"""reactions""",
"""reactivation""",
"""reactive""",
"""reactivity""",
"""reactor""",
"""reactors""",
"""read""",
"""readability""",
"""readers""",
"""readily""",
"""readiness""",
"""reading""",
"""readings""",
"""readmission""",
"""readmissions""",
"""readmitted""",
"""readout""",
"""reads""",
"""ready""",
"""reagent""",
"""reagents""",
"""real""",
"""real-time""",
"""real-world""",
"""realised""",
"""realism""",
"""realistic""",
"""realities""",
"""reality""",
"""realizamos""",
"""realizaron""",
"""realization""",
"""realize""",
"""realized""",
"""realms""",
"""reappearance""",
"""reappraisal""",
"""reared""",
"""rearrangement""",
"""rearrangements""",
"""reason""",
"""reasonable""",
"""reasoning""",
"""reasons""",
"""reassessment""",
"""reassured""",
"""reassuring""",
"""reca-ssdna""",
"""recalcitrant""",
"""recall""",
"""recalls""",
"""recap""",
"""recapitulate""",
"""recapitulated""",
"""recapitulating""",
"""receipt""",
"""receive""",
"""received""",
"""receiver""",
"""receiver-operating""",
"""receives""",
"""receiving""",
"""recent""",
"""recently""",
"""recently-developed""",
"""reception""",
"""receptivity""",
"""receptor""",
"""receptor-2""",
"""receptor-directed""",
"""receptor-neprilysin""",
"""receptors""",
"""recessive""",
"""recessively""",
"""recharged""",
"""recharging""",
"""recherche""",
"""recipient""",
"""recipients""",
"""reciprocal""",
"""reciprocally""",
"""recognised""",
"""recognition""",
"""recognize""",
"""recognized""",
"""recognizing""",
"""recombinant""",
"""recombination""",
"""recommend""",
"""recommendation""",
"""recommendations""",
"""recommended""",
"""recommending""",
"""recommends""",
"""reconheciam""",
"""reconnected""",
"""reconsidered""",
"""reconstruct""",
"""reconstructed""",
"""reconstruction""",
"""reconstructions""",
"""reconstructive""",
"""record""",
"""recorded""",
"""recording""",
"""recordings""",
"""records""",
"""recover""",
"""recovered""",
"""recovering""",
"""recovers""",
"""recovery""",
"""recq""",
"""recreational""",
"""recreationally""",
"""recruit""",
"""recruited""",
"""recruiting""",
"""recruitment""",
"""recruits""",
"""rectal""",
"""rectale""",
"""recti""",
"""rectified""",
"""rectifying""",
"""rectovaginal""",
"""rectum""",
"""rectus""",
"""recumbency""",
"""recumbent""",
"""recurred""",
"""recurrence""",
"""recurrence-free""",
"""recurrences""",
"""recurrent""",
"""recursive""",
"""recyclable""",
"""recycle""",
"""recycled""",
"""recycling""",
"""red""",
"""red-emission""",
"""red-free""",
"""redcap""",
"""reddit""",
"""redesigned""",
"""redesigning""",
"""redirect""",
"""redo""",
"""redox""",
"""reduce""",
"""reduced""",
"""reduced-intensity""",
"""reduced-nicotine-content""",
"""reduced-toxicity""",
"""reduces""",
"""reducing""",
"""reductant""",
"""reductase""",
"""reductases""",
"""reduction""",
"""reductions""",
"""reductive""",
"""redundant""",
"""reduzir""",
"""ree""",
"""reefs""",
"""reevaluate""",
"""refer""",
"""reference""",
"""references""",
"""referencing""",
"""referents""",
"""referral""",
"""referrals""",
"""referred""",
"""referring""",
"""refers""",
"""refine""",
"""refined""",
"""refining""",
"""reflect""",
"""reflectance""",
"""reflected""",
"""reflecting""",
"""reflection""",
"""reflective""",
"""reflects""",
"""reflex""",
"""reflexes""",
"""reflexive""",
"""reflexogenic""",
"""reflux""",
"""refolding""",
"""reform""",
"""reformed""",
"""refractive""",
"""refractoriness""",
"""refractory""",
"""refusal""",
"""refutation""",
"""refute""",
"""regadenoson""",
"""regard""",
"""regarded""",
"""regarding""",
"""regardless""",
"""regards""",
"""regenerate""",
"""regeneration""",
"""regeneration-associated""",
"""regenerative""",
"""regime""",
"""regimen""",
"""regimens""",
"""regimes""",
"""regio-""",
"""region""",
"""region-of-interest""",
"""region-specific""",
"""regional""",
"""regionally""",
"""regions""",
"""register""",
"""registered""",
"""registers""",
"""registration""",
"""registries""",
"""registro""",
"""registry""",
"""rego""",
"""regrafts""",
"""regress""",
"""regressed""",
"""regressing""",
"""regression""",
"""regression-based""",
"""regressions""",
"""regressions-analyzed""",
"""regret""",
"""regrowth""",
"""regular""",
"""regularization""",
"""regularly""",
"""regulate""",
"""regulated""",
"""regulates""",
"""regulating""",
"""regulation""",
"""regulations""",
"""regulator""",
"""regulators""",
"""regulatory""",
"""regulatory-approved""",
"""regurgitation""",
"""rehabilitation""",
"""rehospitalization""",
"""reimbursed""",
"""reimbursement""",
"""reimbursements""",
"""reimbursing""",
"""reimplantation""",
"""reinforce""",
"""reinforced""",
"""reinforcement""",
"""reinforces""",
"""reinforcing""",
"""reinstatement""",
"""reintervention""",
"""reintroduced""",
"""reintubation""",
"""reinvigorating""",
"""reject""",
"""rejecting""",
"""rejection""",
"""relacionada""",
"""relacionadas""",
"""relapse""",
"""relapse-free""",
"""relapsed""",
"""relapses""",
"""relapsing""",
"""relate""",
"""related""",
"""relates""",
"""relating""",
"""relation""",
"""relational""",
"""relations""",
"""relationship""",
"""relationships""",
"""relative""",
"""relatively""",
"""relatives""",
"""relaxation""",
"""relaxed""",
"""relay""",
"""release""",
"""released""",
"""releases""",
"""releasing""",
"""relevance""",
"""relevant""",
"""reliability""",
"""reliable""",
"""reliably""",
"""reliance""",
"""relic""",
"""relied""",
"""relief""",
"""relies""",
"""relieve""",
"""relieved""",
"""relieving""",
"""religiosity""",
"""religious""",
"""relocalization""",
"""rely""",
"""relying""",
"""remain""",
"""remainder""",
"""remained""",
"""remaining""",
"""remains""",
"""remarkable""",
"""remarkably""",
"""remdesivir""",
"""remedy""",
"""reminiscent""",
"""remission""",
"""remissions""",
"""remitted""",
"""remitters""",
"""remnant""",
"""remodel""",
"""remodelers""",
"""remodeling""",
"""remodelling""",
"""remote""",
"""remotely""",
"""removal""",
"""remove""",
"""removed""",
"""removes""",
"""removing""",
"""renaissance""",
"""renal""",
"""rendering""",
"""renders""",
"""renew""",
"""renin""",
"""renin-angiotensin-aldosterone""",
"""renina-angiotensina-aldosterona""",
"""rentrop""",
"""reoccurred""",
"""reoperation""",
"""reoperation-related""",
"""reoperations""",
"""reorganization""",
"""rep""",
"""repa""",
"""repair""",
"""repair-deficient""",
"""repair-proficient""",
"""repair-related""",
"""repaired""",
"""repairs""",
"""repeat""",
"""repeatability""",
"""repeatable""",
"""repeated""",
"""repeated-measures""",
"""repeatedly""",
"""repeats""",
"""reperfusion""",
"""repertoire""",
"""repetitive""",
"""replace""",
"""replaced""",
"""replacement""",
"""replacements""",
"""replacing""",
"""replenishing""",
"""replicability""",
"""replicable""",
"""replicate""",
"""replicated""",
"""replicates""",
"""replicating""",
"""replication""",
"""replication-based""",
"""replicative""",
"""replisome""",
"""report""",
"""reported""",
"""reporter""",
"""reporters""",
"""reporting""",
"""reports""",
"""reposait""",
"""reposition""",
"""repositories""",
"""repository""",
"""represent""",
"""representation""",
"""representations""",
"""representative""",
"""representatives""",
"""represented""",
"""representing""",
"""represents""",
"""repressed""",
"""represses""",
"""repressing""",
"""repression""",
"""repressive""",
"""repressor""",
"""reprocessing""",
"""reprocessor""",
"""reproduce""",
"""reproduced""",
"""reproducibility""",
"""reproducible""",
"""reproduction""",
"""reproductive""",
"""reproductive-age""",
"""reprogrammed""",
"""reprogramming""",
"""republic""",
"""repulsive""",
"""repurposed""",
"""reputed""",
"""request""",
"""requested""",
"""requesting""",
"""requests""",
"""require""",
"""required""",
"""requirement""",
"""requirements""",
"""requires""",
"""requiring""",
"""rescue""",
"""rescued""",
"""rescues""",
"""rescuing""",
"""research""",
"""research-can""",
"""researchers""",
"""researches""",
"""resected""",
"""resection""",
"""resections""",
"""resembled""",
"""resembling""",
"""reserve""",
"""reserved""",
"""reserves""",
"""reshapes""",
"""reside""",
"""resided""",
"""residence""",
"""residencial""",
"""residency""",
"""resident""",
"""residential""",
"""residents""",
"""resides""",
"""residual""",
"""residuals""",
"""residue""",
"""residues""",
"""resilience""",
"""resiliency""",
"""resilient""",
"""resin""",
"""resin-packed""",
"""resins""",
"""resist""",
"""resistance""",
"""resistant""",
"""resistentiae""",
"""resistive""",
"""resistome""",
"""resolution""",
"""resolution-e""",
"""resolutions""",
"""resolve""",
"""resolved""",
"""resolving""",
"""resonance""",
"""resonances""",
"""resonant""",
"""resorption""",
"""resource""",
"""resource-limited""",
"""resources""",
"""respect""",
"""respectfully""",
"""respectivamente""",
"""respective""",
"""respectively""",
"""respectueuse""",
"""respiration""",
"""respiratory""",
"""respiratory-health""",
"""respirometry""",
"""respond""",
"""responded""",
"""respondent""",
"""respondents""",
"""responders""",
"""responding""",
"""responsable""",
"""responsables""",
"""response""",
"""response-dependent""",
"""responses""",
"""responsibility""",
"""responsible""",
"""responsive""",
"""responsiveness""",
"""responsivity""",
"""resprouting""",
"""rest""",
"""restart""",
"""restaurar""",
"""resting""",
"""resting-state""",
"""restitution""",
"""restoration""",
"""restorations""",
"""restorative""",
"""restore""",
"""restored""",
"""restoring""",
"""restrain""",
"""restraining""",
"""restrains""",
"""restraints""",
"""restricciones""",
"""restrict""",
"""restricted""",
"""restricting""",
"""restriction""",
"""restriction-fragment""",
"""restrictions""",
"""restrictive""",
"""restricts""",
"""result""",
"""resultant""",
"""resulted""",
"""resulting""",
"""results""",
"""resume""",
"""resumption""",
"""resurgence""",
"""resurgery""",
"""resurvey""",
"""resuscitated""",
"""resuscitation""",
"""retailers""",
"""retain""",
"""retained""",
"""retaining""",
"""retardation""",
"""retards""",
"""retention""",
"""retested""",
"""reticulated""",
"""reticulum""",
"""retina""",
"""retinal""",
"""retinas""",
"""retiniano""",
"""retinogenesis""",
"""retinol""",
"""retinopathy""",
"""retinyl""",
"""retract""",
"""retractable""",
"""retracted""",
"""retraction""",
"""retrieval""",
"""retrieved""",
"""retrocervical""",
"""retrograde""",
"""retropancreatic""",
"""retropubic""",
"""retrosigmoid""",
"""retrospective""",
"""retrospectively""",
"""return""",
"""returned""",
"""returns""",
"""reuptake""",
"""reusable""",
"""revascularization""",
"""reveal""",
"""revealed""",
"""revealing""",
"""reveals""",
"""revel""",
"""revenue""",
"""reversal""",
"""reverse""",
"""reversed""",
"""reversed-phase""",
"""reversible""",
"""reversibly""",
"""revert""",
"""reverted""",
"""review""",
"""reviewed""",
"""reviewers""",
"""reviewing""",
"""reviews""",
"""revise""",
"""revised""",
"""revision""",
"""revisit""",
"""revman""",
"""revolution""",
"""revolutionary""",
"""reward""",
"""rewarding""",
"""rewards""",
"""rfs""",
"""rft1""",
"""rgc""",
"""rgc-like""",
"""rgcs""",
"""rhabdomyolysis""",
"""rhabdomyosarcoma""",
"""rheological""",
"""rheology""",
"""rheumatic""",
"""rheumatoid""",
"""rheumatologic""",
"""rheumatologist""",
"""rheumatology""",
"""rhgh""",
"""rhgh-treatment""",
"""rhinatrema""",
"""rhinosinusitis""",
"""rhizoctonia""",
"""rho""",
"""rhodococci""",
"""rhodococcus""",
"""rhodopsin""",
"""rhq""",
"""rhythm""",
"""rhythms""",
"""ri-oa""",
"""ribavirin""",
"""ribociclib""",
"""ribociclib-everolimus""",
"""ribonucleic""",
"""ribonucleotide""",
"""ribosome""",
"""ribosomes""",
"""ric""",
"""rice""",
"""rich""",
"""rich-electronic""",
"""richardii""",
"""richer""",
"""richness""",
"""ridge""",
"""riely""",
"""rif1""",
"""rif1-pp1""",
"""right""",
"""right-""",
"""right-sided""",
"""righting""",
"""rights""",
"""rigid""",
"""rigidification""",
"""rigorous""",
"""rigour""",
"""rim""",
"""ring""",
"""ringer-lactate""",
"""rinses""",
"""rio""",
"""ripc""",
"""rise""",
"""risen""",
"""rising""",
"""risk""",
"""risk-assessment""",
"""risk-stratification""",
"""risk-stratify""",
"""risk-taking""",
"""risking""",
"""risks""",
"""risky""",
"""risperidone-induced""",
"""ritual""",
"""rituals""",
"""rituximab""",
"""rivaroxaban""",
"""rivaroxaban-treated""",
"""river""",
"""rl2""",
"""rl3""",
"""rl4""",
"""rlnp""",
"""rlv""",
"""rlwe-based""",
"""rmb""",
"""rmpp""",
"""rmrp""",
"""rmse""",
"""rmsea""",
"""rmssd""",
"""rna""",
"""rna-based""",
"""rna-binding""",
"""rna-dependent""",
"""rna-directed""",
"""rna-dna""",
"""rna-seq""",
"""rna-sequencing""",
"""rnai""",
"""rnap""",
"""rnas""",
"""rnascope""",
"""rnase""",
"""rnccs""",
"""rnfl""",
"""rnfls""",
"""road""",
"""road-killed""",
"""roadmap""",
"""roads""",
"""roaming""",
"""rob""",
"""robins-i""",
"""robot""",
"""robot-assisted""",
"""robotic""",
"""robotic-assisted""",
"""robust""",
"""robustly""",
"""robustness""",
"""roc""",
"""roche""",
"""rocs""",
"""rod""",
"""rod-shaped""",
"""rodent""",
"""rodentium""",
"""rodentium-specific""",
"""rodents""",
"""roe""",
"""role""",
"""role-modelling""",
"""role-playing""",
"""roles""",
"""roll-on""",
"""roller""",
"""roller-ski""",
"""roller-skiing""",
"""rollout""",
"""rom""",
"""roman""",
"""romantic""",
"""room""",
"""root""",
"""roots""",
"""rootstock""",
"""ros""",
"""rose""",
"""ross""",
"""rotation""",
"""rotations""",
"""rotators""",
"""rotifers""",
"""rotina""",
"""roughening""",
"""roughly""",
"""roughness""",
"""roukema""",
"""round""",
"""rounds""",
"""route""",
"""routes""",
"""routine""",
"""routinely""",
"""rouviere""",
"""roux-en-y""",
"""rpap""",
"""rral""",
"""rrc""",
"""rrmm""",
"""rrna""",
"""rrt""",
"""rs-dependent""",
"""rs10794418""",
"""rs2119882""",
"""rs4753426""",
"""rsd""",
"""rsfc""",
"""rsi""",
"""rst2""",
"""rswt""",
"""rt-pcr""",
"""rt-qpcr""",
"""rtca""",
"""rubber""",
"""rubbery""",
"""ruffling""",
"""rukwa""",
"""rule""",
"""rules""",
"""rumination""",
"""rumination-reflection""",
"""run""",
"""run-tt""",
"""runners""",
"""running""",
"""runx2""",
"""runx2-related-o-glcnacylated-proteins""",
"""rupert""",
"""rupture""",
"""ruptured""",
"""ruptures""",
"""rural""",
"""rush""",
"""rutin""",
"""rutin-nsaids""",
"""rutin-paracetamol""",
"""ruxolitinib""",
"""rwe""",
"""s-1""",
"""s-phase""",
"""s-transferase-1""",
"""s2058""",
"""s2058d""",
"""s4s""",
"""s51""",
"""sa-dependent""",
"""sabg""",
"""sac""",
"""sacatepequez""",
"""saccular""",
"""sacrifice""",
"""sacrificed""",
"""sacrocolpopexy""",
"""sacrohysteropexy""",
"""sad""",
"""sadness""",
"""safe""",
"""safeguard""",
"""safely""",
"""safer""",
"""safest""",
"""safety""",
"""safety-net""",
"""sagging""",
"""sagittal""",
"""saharan""",
"""said""",
"""sain""",
"""sains""",
"""sale""",
"""sales""",
"""salff""",
"""salicylic""",
"""salient""",
"""saline""",
"""saliva""",
"""salivary""",
"""salmon""",
"""salt""",
"""salt-wasting""",
"""salts""",
"""salud""",
"""salvage""",
"""sam""",
"""samaritan""",
"""same""",
"""same-day""",
"""samhda""",
"""samhsa""",
"""sample""",
"""sampled""",
"""samples""",
"""sampling""",
"""samplings""",
"""sams""",
"""samsung""",
"""san""",
"""sandblasted""",
"""sandblasting""",
"""sandwich""",
"""sandwiched""",
"""sanger""",
"""sanitiser""",
"""santiago""",
"""sao""",
"""sap""",
"""sarcoma""",
"""sarcomas""",
"""sarcopenia""",
"""sarcopterygii""",
"""sars""",
"""sars-cov""",
"""sars-cov-2""",
"""sars-cov-2-mediated""",
"""sars-cov-2-positive""",
"""sas""",
"""sat""",
"""satellite""",
"""satellites""",
"""satisfaction""",
"""satisfactory""",
"""satisfied""",
"""sativa""",
"""saturated""",
"""saturation""",
"""sau""",
"""saudi""",
"""saves""",
"""savg""",
"""savgs""",
"""saving""",
"""saw""",
"""say""",
"""sb-431542""",
"""sb203580""",
"""sbce""",
"""sbp""",
"""sbt""",
"""sca""",
"""scaffold""",
"""scaffolding""",
"""scalability""",
"""scalable""",
"""scald""",
"""scale""",
"""scale-12""",
"""scale-6""",
"""scale-up""",
"""scaled""",
"""scales""",
"""scaling""",
"""scalp""",
"""scalpel""",
"""scan""",
"""scanned""",
"""scanner""",
"""scanners""",
"""scanning""",
"""scans""",
"""scant""",
"""scar""",
"""scar-pressure""",
"""scarce""",
"""scarcity""",
"""scared""",
"""scarring""",
"""scars""",
"""scattered""",
"""scavenger""",
"""scavengers""",
"""scc""",
"""sccs""",
"""scd""",
"""scenario""",
"""scenarios""",
"""scene""",
"""scenedesmus""",
"""scenery""",
"""scenes""",
"""scg""",
"""schamberg""",
"""schedule""",
"""scheduled""",
"""schema""",
"""scheme""",
"""schemes""",
"""schiavonia""",
"""schizophrenia""",
"""scholar""",
"""scholarity""",
"""scholars""",
"""scholarship""",
"""scholastic""",
"""school""",
"""school-based""",
"""schoolers""",
"""schools""",
"""schwannomas""",
"""sci""",
"""sciatic""",
"""scielo""",
"""science""",
"""sciencedirect""",
"""sciences""",
"""scientific""",
"""scientists""",
"""scimitar""",
"""scintigraphy""",
"""scissors""",
"""scl-20""",
"""sclerosis""",
"""sclerosis-frontotemporal""",
"""sclerosis-related""",
"""scoff""",
"""scoff-br""",
"""scoliosis""",
"""scope""",
"""scope-seq2""",
"""scopes""",
"""scoping""",
"""scopus""",
"""scorad""",
"""score""",
"""scores""",
"""scoring""",
"""scoring-based""",
"""scotland""",
"""scr""",
"""screen""",
"""screened""",
"""screening""",
"""screening-detected""",
"""screenings""",
"""screens""",
"""screw""",
"""screw-rod""",
"""screwdriver""",
"""screwdrivers""",
"""screwing""",
"""screws""",
"""scripts""",
"""scrna-seq""",
"""scrotal""",
"""scrupulously""",
"""scrutinize""",
"""scthi""",
"""sculpt""",
"""sculpts""",
"""sd-oct""",
"""sdm""",
"""sdn""",
"""sdq""",
"""sds""",
"""sea""",
"""seal""",
"""seals""",
"""search""",
"""searched""",
"""searches""",
"""searching""",
"""season""",
"""seasonal""",
"""seasons""",
"""sebaceous""",
"""seborrheic""",
"""sec""",
"""second""",
"""second-""",
"""second-degree""",
"""second-messengers""",
"""secondary""",
"""secosteroid""",
"""secretagogue""",
"""secretagogues""",
"""secrete""",
"""secreted""",
"""secreting""",
"""secretion""",
"""secretions""",
"""secteur""",
"""section""",
"""sectional""",
"""sectioned""",
"""sectioning""",
"""sections""",
"""sector""",
"""sectors""",
"""secukinumab""",
"""secure""",
"""security""",
"""sedating""",
"""sedation""",
"""sedative""",
"""sedatives""",
"""sedd""",
"""sedentary""",
"""sedsb""",
"""sedsbs""",
"""see""",
"""seed""",
"""seed-based""",
"""seeded""",
"""seedling""",
"""seedlings""",
"""seeds""",
"""seek""",
"""seekers""",
"""seeking""",
"""seeks""",
"""seem""",
"""seemed""",
"""seemingly""",
"""seems""",
"""seen""",
"""segment""",
"""segmental""",
"""segmentation""",
"""segmentectomy""",
"""segmented""",
"""segments""",
"""segregating""",
"""segregation""",
"""seguridad""",
"""seir""",
"""seizure""",
"""seizure-free""",
"""seizures""",
"""selangor""",
"""selecionados""",
"""select""",
"""select-by-audiogram""",
"""select-by-questionnaire""",
"""select-by-self-test""",
"""select-by-trying""",
"""selected""",
"""selecting""",
"""selection""",
"""selections""",
"""selective""",
"""selective-pi3k""",
"""selectively""",
"""selectivity""",
"""selenium""",
"""self-administered""",
"""self-assemble""",
"""self-assembles""",
"""self-assembly""",
"""self-care""",
"""self-catheterization""",
"""self-concept""",
"""self-controlled""",
"""self-efficacy""",
"""self-efficiency""",
"""self-esteem""",
"""self-expandable""",
"""self-guided""",
"""self-identifying""",
"""self-inducing""",
"""self-ligating""",
"""self-limited""",
"""self-lipids""",
"""self-management""",
"""self-medication""",
"""self-monitoring""",
"""self-organization""",
"""self-poisoning""",
"""self-rated""",
"""self-rating""",
"""self-ratings""",
"""self-reactive""",
"""self-reflection""",
"""self-regulate""",
"""self-regulated""",
"""self-renewal""",
"""self-repair""",
"""self-report""",
"""self-reported""",
"""self-select""",
"""selibs""",
"""sell""",
"""sem""",
"""semaines""",
"""semantic""",
"""semelhantes""",
"""semen""",
"""semi-quantitative""",
"""semi-solid""",
"""semi-structured""",
"""semialdehyde""",
"""semiallogeneic""",
"""semicircular""",
"""semiconducting""",
"""semiochemicals""",
"""semistructured""",
"""sendai""",
"""sending""",
"""sendo""",
"""senescence""",
"""senil""",
"""senior""",
"""seniors""",
"""sensation""",
"""sense""",
"""sensibilidad""",
"""sensing""",
"""sensitive""",
"""sensitivities""",
"""sensitivity""",
"""sensitivity-maximizing""",
"""sensitization""",
"""sensitize""",
"""sensitized""",
"""sensor""",
"""sensorimotor""",
"""sensorineural""",
"""sensors""",
"""sensory""",
"""sensory-based""",
"""sent""",
"""sentence""",
"""sentence-initial""",
"""sentences""",
"""sentiment""",
"""sentinel""",
"""seoul""",
"""separate""",
"""separated""",
"""separately""",
"""separates""",
"""separation""",
"""sepsis""",
"""septal""",
"""september""",
"""septic""",
"""septicaemia-related""",
"""sequelae""",
"""sequence""",
"""sequence-specific""",
"""sequenced""",
"""sequences""",
"""sequencing""",
"""sequential""",
"""sequestering""",
"""sequoia""",
"""sequoiadendron""",
"""ser""",
"""sera""",
"""serbia""",
"""serca2""",
"""serial""",
"""series""",
"""serine""",
"""serious""",
"""seriously""",
"""seroconversion""",
"""serologic""",
"""serology""",
"""seronegative""",
"""seropositivity""",
"""serotonergic""",
"""serotonin""",
"""serotonin-norepinephrine""",
"""serous""",
"""serrated""",
"""serratus""",
"""serum""",
"""serum-based""",
"""serum-derived""",
"""serve""",
"""served""",
"""serves""",
"""service""",
"""services""",
"""serving""",
"""servir""",
"""ses""",
"""sessile""",
"""session""",
"""sessions""",
"""set""",
"""set-up""",
"""set-ups""",
"""setrobuvir""",
"""sets""",
"""setting""",
"""settings""",
"""settle""",
"""settlement""",
"""settlements""",
"""seven""",
"""seven-grade""",
"""seventeen""",
"""seventh""",
"""seventy-five""",
"""seventy-four""",
"""seventy-nine""",
"""seventy-two""",
"""several""",
"""severe""",
"""severely""",
"""severity""",
"""sevo""",
"""sevo-treated""",
"""sevoflurane""",
"""sex""",
"""sex-adjusted""",
"""sex-atypical""",
"""sex-dependent""",
"""sex-dependently""",
"""sex-independent""",
"""sex-matched""",
"""sex-related""",
"""sex-specific""",
"""sex-stratified""",
"""sexes""",
"""sexo""",
"""sextuple""",
"""sexual""",
"""sexuality""",
"""sexually""",
"""sf-36""",
"""sf3b1""",
"""sfn""",
"""sfs""",
"""sfsat""",
"""sglt""",
"""sglt2""",
"""sglts""",
"""shade-tolerant""",
"""shaded""",
"""shades""",
"""shading""",
"""shaft""",
"""sham""",
"""sham-injection""",
"""sham-operated""",
"""shame""",
"""shanghai""",
"""shape""",
"""shape-sensing""",
"""shaped""",
"""shapes""",
"""shaping""",
"""share""",
"""shared""",
"""shares""",
"""sharing""",
"""shark""",
"""sharks""",
"""sharp""",
"""shbg""",
"""she""",
"""shear""",
"""sheath""",
"""shed""",
"""sheep""",
"""sheet-like""",
"""sheets""",
"""shelf""",
"""shell""",
"""shell-like""",
"""shelterin""",
"""shh""",
"""shield""",
"""shielding""",
"""shift""",
"""shifted""",
"""shifting""",
"""shifts""",
"""shiny""",
"""shl""",
"""shock""",
"""shocks""",
"""shockwave""",
"""shoot""",
"""short""",
"""short-""",
"""short-acting""",
"""short-chain""",
"""short-day""",
"""short-form""",
"""short-lived""",
"""short-term""",
"""short-terms""",
"""shortage""",
"""shortages""",
"""shortcomings""",
"""shortcuts""",
"""shorten""",
"""shortened""",
"""shortening""",
"""shortens""",
"""shorter""",
"""shorting""",
"""shotgun""",
"""should""",
"""shoulder""",
"""shoulders""",
"""show""",
"""showed""",
"""showing""",
"""shown""",
"""shows""",
"""shrews""",
"""shriners""",
"""shu""",
"""shunt""",
"""shunting""",
"""shuttle""",
"""shuttling""",
"""si-containing""",
"""si-ion""",
"""si-ions""",
"""sialic""",
"""sibling""",
"""siblings""",
"""sick""",
"""sickle""",
"""sid""",
"""side""",
"""side-by""",
"""side-effect""",
"""side-effects""",
"""side-to-side""",
"""sidelines""",
"""sides""",
"""sids-alte""",
"""siemens""",
"""sighted""",
"""sightedness""",
"""siglas""",
"""siglec""",
"""siglec-3""",
"""siglec-6""",
"""siglec-6-specific""",
"""sigma""",
"""sigmoid""",
"""sigmoidal""",
"""sigmoidoscopy""",
"""sign""",
"""signal""",
"""signaled""",
"""signaling""",
"""signaling-transduction""",
"""signalling""",
"""signals""",
"""signature""",
"""signatures""",
"""significance""",
"""significant""",
"""significantly""",
"""significatif""",
"""significative""",
"""significativement""",
"""signify""",
"""signs""",
"""siguientes""",
"""sii""",
"""silenced""",
"""silencing""",
"""silene""",
"""silent""",
"""silhouette""",
"""silica""",
"""silicate""",
"""silico""",
"""silicon""",
"""silicon-ions""",
"""silk""",
"""silver""",
"""silver-based""",
"""silver-plated""",
"""simians""",
"""similar""",
"""similar-sized""",
"""similarities""",
"""similarity""",
"""similarly""",
"""simple""",
"""simple-to-use""",
"""simpler""",
"""simplest""",
"""simplex""",
"""simplificados""",
"""simplified""",
"""simplify""",
"""simulado""",
"""simulate""",
"""simulated""",
"""simulates""",
"""simulating""",
"""simulation""",
"""simulations""",
"""simultaneous""",
"""simultaneously""",
"""sin""",
"""sinai""",
"""since""",
"""single""",
"""single-""",
"""single-agent""",
"""single-arm""",
"""single-atom""",
"""single-cell""",
"""single-center""",
"""single-centre""",
"""single-copy""",
"""single-dose""",
"""single-ended""",
"""single-institution""",
"""single-item""",
"""single-measures""",
"""single-molecule""",
"""single-nucleotide""",
"""single-payer""",
"""single-port""",
"""single-site""",
"""single-stranded""",
"""singleton""",
"""singletons""",
"""singular""",
"""sink""",
"""sinks""",
"""sino-nasal""",
"""sinus""",
"""sio2""",
"""sionp""",
"""sirna""",
"""sirolimus""",
"""sis""",
"""sistema""",
"""sister""",
"""sit""",
"""sit-stand""",
"""sit-to-stand""",
"""sita""",
"""sitagliptin""",
"""sitchensis""",
"""site""",
"""site-specific""",
"""sites""",
"""sitka""",
"""sitting""",
"""situ""",
"""situated""",
"""situation""",
"""situations""",
"""six""",
"""six-month""",
"""six-months""",
"""six-week-old""",
"""six-weeks""",
"""sixteen""",
"""sixty""",
"""sixty-eight""",
"""sixty-nine""",
"""sixty-two""",
"""size""",
"""sizeable""",
"""sizer""",
"""sizes""",
"""skating""",
"""skeletal""",
"""skeleton""",
"""skepticism""",
"""skewed""",
"""ski""",
"""skiers""",
"""skiing""",
"""skill""",
"""skilled""",
"""skills""",
"""skin""",
"""skin-care""",
"""skin-colored""",
"""skin-friendly""",
"""skin-homing""",
"""skin-limited""",
"""skin-oriented""",
"""skin-related""",
"""skin-to-skin""",
"""skip-inducing""",
"""skipping""",
"""skis""",
"""skp2""",
"""skp2-associated""",
"""skp2-c25""",
"""skull""",
"""skyline""",
"""skylines""",
"""slb""",
"""slcs""",
"""sle""",
"""sle-associated""",
"""slecs""",
"""sleep""",
"""sleepiness""",
"""sleeve""",
"""slew""",
"""slices""",
"""slight""",
"""slightly""",
"""sling""",
"""slip-resistance""",
"""slit-lamp""",
"""sloan""",
"""sloan-kettering""",
"""slope""",
"""sloth""",
"""slow""",
"""slowed""",
"""slower""",
"""slowing""",
"""slowly""",
"""sm1""",
"""sm10""",
"""sm4""",
"""sm5""",
"""sm6""",
"""sm7""",
"""sm9""",
"""sma""",
"""smad""",
"""smad1""",
"""smad2""",
"""smad3""",
"""smad4""",
"""small""",
"""small-""",
"""small-molecule""",
"""small-to-moderate""",
"""smaller""",
"""smallest""",
"""smartphone""",
"""smartphones""",
"""smas""",
"""smc""",
"""smd""",
"""smds""",
"""smears""",
"""smile""",
"""smoke""",
"""smoked""",
"""smokers""",
"""smoking""",
"""smoking-related""",
"""smooth""",
"""smpi""",
"""smpi-cr""",
"""smpi-sr""",
"""smr""",
"""smrs""",
"""snack""",
"""snakes""",
"""snap""",
"""snap-frozen""",
"""snape-pooled""",
"""snapshot""",
"""snc""",
"""snf2""",
"""snomed""",
"""snore""",
"""snoring""",
"""snort""",
"""snorting""",
"""snot-22""",
"""snowball""",
"""snp""",
"""snps""",
"""snri""",
"""snris""",
"""sns""",
"""so-called""",
"""soap""",
"""sobre""",
"""soc""",
"""soccer""",
"""social""",
"""social-demographic""",
"""social-structural""",
"""sociality""",
"""socially""",
"""sociedade""",
"""societal""",
"""societies""",
"""society""",
"""socio-demographic""",
"""socio-demographics""",
"""socio-economic""",
"""socio-emotional""",
"""sociodemographic""",
"""socioeconomic""",
"""socioeconomically""",
"""socs1""",
"""sod""",
"""sod1""",
"""sod2""",
"""sodium""",
"""sodium-glucose""",
"""sofa""",
"""sofosbuvir""",
"""soft""",
"""softest""",
"""software""",
"""soil""",
"""soil-borne""",
"""soins""",
"""solanacearum""",
"""solanum""",
"""sold""",
"""soldiers""",
"""sole""",
"""solely""",
"""solid""",
"""solid-like""",
"""solid-organ""",
"""solid-phase""",
"""solid-state""",
"""solitary""",
"""soluble""",
"""solumedrol""",
"""solution""",
"""solutions""",
"""solvation""",
"""solve""",
"""solved""",
"""solvent""",
"""solvents""",
"""solving""",
"""somatic""",
"""somatomotor""",
"""somatosensation""",
"""some""",
"""somebody""",
"""someone""",
"""sometimes""",
"""somewhat""",
"""son""",
"""sondhi""",
"""songbird""",
"""sonic""",
"""sonodynamic""",
"""sonographers""",
"""sonosenitizer""",
"""sons""",
"""sont""",
"""soon""",
"""sooner""",
"""sophisticated""",
"""sorafenib""",
"""sore""",
"""sort""",
"""sorted""",
"""sorting""",
"""sought""",
"""sound""",
"""soundness""",
"""sounds""",
"""soup""",
"""source""",
"""sources""",
"""south""",
"""south-central""",
"""southeast""",
"""southern""",
"""southwest""",
"""sox11""",
"""sox17""",
"""sox2""",
"""sox9""",
"""spa""",
"""space""",
"""space-time""",
"""spaces""",
"""spacing""",
"""spain""",
"""span""",
"""spanish""",
"""spanning""",
"""spans""",
"""sparc""",
"""spare""",
"""spares""",
"""sparing""",
"""sparked""",
"""sparse""",
"""sparsity""",
"""spartan""",
"""spasm""",
"""spasmodic""",
"""spasms""",
"""spastic-reactive""",
"""spasticity""",
"""spatial""",
"""spatially""",
"""spatio-temporal""",
"""spatio-temporally""",
"""spatiotemporal""",
"""spdef""",
"""speak""",
"""speakers""",
"""speaking""",
"""spearman""",
"""special""",
"""specialised""",
"""specialist""",
"""specialists""",
"""specialization""",
"""specialized""",
"""specially""",
"""specialty""",
"""speciation""",
"""species""",
"""species-appropriate""",
"""specific""",
"""specifically""",
"""specification""",
"""specificities""",
"""specificity""",
"""specified""",
"""specify""",
"""specimen""",
"""specimens""",
"""spect""",
"""spect-mps""",
"""spectra""",
"""spectral""",
"""spectral-domain""",
"""spectralis""",
"""spectrometer""",
"""spectrometry""",
"""spectrometry-based""",
"""spectrophotometry""",
"""spectroscopic""",
"""spectroscopy""",
"""spectrum""",
"""speculate""",
"""speculation""",
"""speech""",
"""speech-regional""",
"""speed""",
"""speedballing""",
"""spending""",
"""spent""",
"""sperm""",
"""spermatogenesis""",
"""spermatozoa""",
"""spf""",
"""spf-treated""",
"""sphere""",
"""spheres""",
"""spheroid""",
"""sphincter""",
"""spiculation""",
"""spielberger""",
"""spike-1-iga""",
"""spike-1-igg""",
"""spikes""",
"""spikes-and-waves""",
"""spillover""",
"""spillovers""",
"""spinal""",
"""spine""",
"""spinifera""",
"""spinning""",
"""spinocerebellar""",
"""spinosus""",
"""spinous""",
"""spinylobsters""",
"""spiritual""",
"""spirituality""",
"""spirochete""",
"""spit""",
"""spl""",
"""spleen""",
"""splenic""",
"""splice""",
"""splicing""",
"""split""",
"""spm""",
"""spme""",
"""spms""",
"""spoken""",
"""spondylarthritis""",
"""spondylitis""",
"""spondyloarthritis""",
"""sponging""",
"""sponsor-initiated""",
"""spontaneous""",
"""spontaneously""",
"""sporadic""",
"""sporadically""",
"""spore-like""",
"""sporozoite""",
"""sporozoites""",
"""sport""",
"""sportdiscus""",
"""sports""",
"""spot""",
"""spotlight""",
"""spots""",
"""spotting""",
"""spp""",
"""sprague""",
"""spray""",
"""spread""",
"""spreading""",
"""spreads""",
"""spreadsheet""",
"""spretus""",
"""sprr1a""",
"""spruce""",
"""sps""",
"""spss""",
"""spur""",
"""sqh""",
"""sqnsclc""",
"""squamata""",
"""squamate""",
"""squamous""",
"""squamous-lined""",
"""square""",
"""squared""",
"""squares""",
"""squares-discriminant""",
"""squats""",
"""srcr""",
"""srmr""",
"""srs""",
"""ssadhd""",
"""ssc""",
"""sscd""",
"""sse""",
"""ssi""",
"""ssm""",
"""ssri""",
"""ssris""",
"""ssti""",
"""st-depression""",
"""st-elevation""",
"""st-pd""",
"""st-segment""",
"""stabilisation""",
"""stability""",
"""stabilization""",
"""stabilize""",
"""stabilized""",
"""stabilizes""",
"""stabilizing""",
"""stable""",
"""stably""",
"""stack""",
"""staff""",
"""staff-to-patient""",
"""staffed""",
"""staffing""",
"""stage""",
"""stage-specific""",
"""staged""",
"""stages""",
"""staging""",
"""stagnates""",
"""stain""",
"""staining""",
"""stains""",
"""stakeholder""",
"""stakeholders""",
"""stalled""",
"""stalling""",
"""stand""",
"""standard""",
"""standard-of-care""",
"""standard-strength""",
"""standardisation""",
"""standardised""",
"""standardization""",
"""standardized""",
"""standardizes""",
"""standards""",
"""standing""",
"""stands""",
"""stanford""",
"""staphylococcus""",
"""staple""",
"""star""",
"""starch""",
"""stargardt""",
"""stark""",
"""start""",
"""started""",
"""starter""",
"""starting""",
"""starts""",
"""starvation""",
"""stasis""",
"""stat3""",
"""stata""",
"""state""",
"""state-by-state""",
"""state-of-the-art""",
"""state-trait""",
"""stated""",
"""statement""",
"""statements""",
"""states""",
"""statewide""",
"""static""",
"""statically""",
"""statin""",
"""statin-treated""",
"""stating""",
"""statins""",
"""station""",
"""stationary""",
"""stations""",
"""statistical""",
"""statistically""",
"""statistics""",
"""status""",
"""statutory""",
"""stay""",
"""stay-at-home""",
"""stays""",
"""steadily""",
"""steady""",
"""steady-state""",
"""stearic""",
"""steatosis""",
"""steel""",
"""steep""",
"""steepest""",
"""steer""",
"""steering""",
"""stefanescu""",
"""stella-2""",
"""stem""",
"""stem-cell-based""",
"""stemi""",
"""stemming""",
"""stemness""",
"""stems""",
"""stenosis""",
"""stent""",
"""stenting""",
"""stents""",
"""step""",
"""step-to-step""",
"""step-up""",
"""steps""",
"""stepwise""",
"""stereo""",
"""stereotyped""",
"""stereotypic""",
"""stereotypy""",
"""sterile""",
"""sterilisation""",
"""sterility""",
"""steroid""",
"""steroid-refractory""",
"""steroids""",
"""stewardship""",
"""sti""",
"""stic""",
"""stics""",
"""stiffening""",
"""stiffness""",
"""stigma""",
"""stigmatization""",
"""stigmatizing""",
"""still""",
"""stillbirth""",
"""stillbirths""",
"""stimulant""",
"""stimulants""",
"""stimulate""",
"""stimulated""",
"""stimulates""",
"""stimulating""",
"""stimulation""",
"""stimuli""",
"""stimuli-responsive""",
"""stimulus""",
"""stimulus-outcome""",
"""stitched""",
"""stitches""",
"""stochastic""",
"""stockholm""",
"""stockpiling""",
"""stoichiometry""",
"""stokes""",
"""stoma""",
"""stomach""",
"""stone""",
"""stones""",
"""stood""",
"""stool""",
"""stools""",
"""stop""",
"""stopped""",
"""stopping""",
"""storage""",
"""store""",
"""stored""",
"""stories""",
"""storm""",
"""stoss""",
"""stoss-dosed""",
"""straight-wire""",
"""strain""",
"""strain-induced""",
"""strain-specific""",
"""strains""",
"""strand""",
"""stranded""",
"""strandedness""",
"""strands""",
"""strasbourg""",
"""strategies""",
"""strategy""",
"""stratification""",
"""stratified""",
"""stratifiers""",
"""stratifies""",
"""stratify""",
"""stratifying""",
"""streaks""",
"""stream""",
"""streamlined""",
"""streams""",
"""street""",
"""street-based""",
"""street-level""",
"""street-view""",
"""strength""",
"""strengthen""",
"""strengthened""",
"""strengthening""",
"""strengthens""",
"""strengths""",
"""strepsirrhines""",
"""streptococcus""",
"""streptozotocin""",
"""stress""",
"""stress-depression""",
"""stress-inducing""",
"""stress-morphology""",
"""stress-related""",
"""stresses""",
"""stressful""",
"""stressing""",
"""stressor""",
"""stressors""",
"""stretch""",
"""strict""",
"""stricter""",
"""strictly""",
"""stricture""",
"""strictures""",
"""strikingly""",
"""string""",
"""stringent""",
"""strip""",
"""stripping""",
"""strive""",
"""strives""",
"""stroboscopic""",
"""stroke""",
"""strokes""",
"""stroking""",
"""stroma""",
"""stroma-derived""",
"""stromal""",
"""stromules""",
"""strong""",
"""stronger""",
"""strongest""",
"""strongly""",
"""strontium""",
"""stroop""",
"""structural""",
"""structurally""",
"""structure""",
"""structure-activity""",
"""structured""",
"""structureless""",
"""structures""",
"""struggle""",
"""sts""",
"""stsrnet""",
"""student""",
"""student-generated""",
"""students""",
"""students-""",
"""students-study""",
"""studied""",
"""studies""",
"""studiesno""",
"""study""",
"""study-using""",
"""studying""",
"""stuff""",
"""stump""",
"""stumps""",
"""stunting""",
"""style""",
"""styrene""",
"""stz""",
"""sub""",
"""sub-analyses""",
"""sub-cellular""",
"""sub-diagnoses""",
"""sub-distribution""",
"""sub-glottal""",
"""sub-groups""",
"""sub-optimal""",
"""sub-saharan""",
"""sub-types""",
"""subacute""",
"""subanalysis""",
"""subarachnoid""",
"""subaxial""",
"""subcellular""",
"""subchondral""",
"""subclasses""",
"""subclonal""",
"""subcortical""",
"""subcutaneous""",
"""subdivided""",
"""subdural""",
"""subfamily""",
"""subfertility""",
"""subgingival""",
"""subgroup""",
"""subgroup-specific""",
"""subgrouped""",
"""subgroups""",
"""subheading""",
"""subject""",
"""subject-rated""",
"""subject-specific""",
"""subjected""",
"""subjecting""",
"""subjective""",
"""subjects""",
"""subjugated""",
"""sublethal""",
"""sublocation""",
"""subluminophores""",
"""submit""",
"""submitted""",
"""submodel""",
"""suboptimal""",
"""subphenotypes""",
"""subpopulation""",
"""subpopulations""",
"""subretinal""",
"""subsample""",
"""subscale""",
"""subscales""",
"""subsequent""",
"""subsequently""",
"""subserves""",
"""subset""",
"""subsets""",
"""subsided""",
"""subspace""",
"""subspecies""",
"""substance""",
"""substance-involved""",
"""substance-related""",
"""substances""",
"""substantial""",
"""substantially""",
"""substantiated""",
"""substantive""",
"""substitute""",
"""substituted""",
"""substitution""",
"""substitutions""",
"""substrate""",
"""substrates""",
"""subthemes""",
"""subtilis""",
"""subtilis-type""",
"""subtle""",
"""subtleties""",
"""subtopic""",
"""subtopics""",
"""subtotal""",
"""subtracting""",
"""subtype""",
"""subtypes""",
"""subungual""",
"""subunit""",
"""subunits""",
"""suburban""",
"""subvalvar""",
"""subvalvular""",
"""succeeded""",
"""success""",
"""successful""",
"""successfully""",
"""successional""",
"""successive""",
"""such""",
"""sucrose""",
"""suction""",
"""sud""",
"""sudden""",
"""suffer""",
"""suffered""",
"""suffering""",
"""suffers""",
"""sufficiency""",
"""sufficient""",
"""sufficiently""",
"""suga""",
"""sugar""",
"""sugar-coated""",
"""sugar-risk""",
"""sugars""",
"""sugary""",
"""suggest""",
"""suggested""",
"""suggesting""",
"""suggestions""",
"""suggestive""",
"""suggests""",
"""suicidal""",
"""suicidality""",
"""suicide""",
"""suicides""",
"""suid""",
"""suitability""",
"""suitable""",
"""suitably""",
"""suite""",
"""suited""",
"""sujeitos""",
"""sul""",
"""sulcus""",
"""sulfane""",
"""sulfate""",
"""sulfation""",
"""sulfide""",
"""sulfonamides""",
"""sulfur""",
"""sulphate""",
"""sulphoxide""",
"""sulthiame""",
"""sum""",
"""summaries""",
"""summarise""",
"""summarised""",
"""summarising""",
"""summarize""",
"""summarized""",
"""summarizes""",
"""summarizing""",
"""summary""",
"""summative""",
"""summed""",
"""summer""",
"""summertime""",
"""sumo-based""",
"""sumoylation""",
"""sun""",
"""sunnyvale""",
"""sunshine""",
"""super""",
"""super-resolution""",
"""supercoiled""",
"""supercoiling""",
"""superfamily""",
"""superficial""",
"""superfolder""",
"""superimposing""",
"""superior""",
"""superiority""",
"""superiorly""",
"""superonasal""",
"""superotemporal""",
"""superoxide""",
"""superparamagnetic""",
"""superphyla""",
"""supersede""",
"""supersedes""",
"""supervised""",
"""supervision""",
"""supervisor""",
"""supplement""",
"""supplemental""",
"""supplementary""",
"""supplementation""",
"""supplemented""",
"""supplements""",
"""supplies""",
"""supply""",
"""supplying""",
"""support""",
"""supported""",
"""supporting""",
"""supportive""",
"""supports""",
"""suppose""",
"""suppress""",
"""suppressed""",
"""suppresses""",
"""suppressing""",
"""suppression""",
"""suppressive""",
"""suppressor""",
"""suppuration""",
"""suppurativa""",
"""supra""",
"""supraclavicular""",
"""suprameatal""",
"""supramolecular""",
"""suprapubic""",
"""sur""",
"""sure""",
"""surface""",
"""surface-based""",
"""surfaces""",
"""surge""",
"""surgeon""",
"""surgeons""",
"""surgeries""",
"""surgery""",
"""surgical""",
"""surgically""",
"""suriname""",
"""surpassing""",
"""surplus""",
"""surprising""",
"""surprisingly""",
"""surrogate""",
"""surrounding""",
"""surroundings""",
"""surrounds""",
"""surveil""",
"""surveillance""",
"""survey""",
"""survey-36v2tm""",
"""surveyed""",
"""surveys""",
"""survival""",
"""survive""",
"""survived""",
"""surviving""",
"""survivor""",
"""survivors""",
"""survivorship""",
"""sus""",
"""susceptibility""",
"""susceptible""",
"""suspect""",
"""suspected""",
"""suspecting""",
"""suspension""",
"""suspicion""",
"""suspicions""",
"""suspicious""",
"""sustain""",
"""sustainability""",
"""sustainable""",
"""sustained""",
"""sustaining""",
"""suture""",
"""sutured""",
"""sutureless""",
"""svf""",
"""svs""",
"""swab""",
"""swallowing""",
"""swallows""",
"""swath-ms""",
"""sway""",
"""swe""",
"""sweating""",
"""sweden""",
"""swedish""",
"""sweeps""",
"""sweet""",
"""sweetened""",
"""sweets""",
"""swelling""",
"""swiftness""",
"""swimming""",
"""swine""",
"""switch""",
"""switched""",
"""switching""",
"""switzerland""",
"""swivel-like""",
"""swot""",
"""swot-ahp""",
"""sydney""",
"""symbiont""",
"""symbiotic""",
"""sympathetic""",
"""sympatric""",
"""symptom""",
"""symptom-based""",
"""symptomatic""",
"""symptomatology""",
"""symptoms""",
"""synapses""",
"""synapsis""",
"""synaptic""",
"""synbiotics""",
"""sync""",
"""synchronous""",
"""synchronously""",
"""synchrony""",
"""syncope""",
"""syndrome""",
"""syndrome-associated""",
"""syndromes""",
"""synergistic""",
"""synergistota""",
"""synergize""",
"""synergizes""",
"""synergy""",
"""synonymous""",
"""synovial""",
"""synoviocyte""",
"""synoviocytes""",
"""syntenin""",
"""synteny""",
"""synthase""",
"""synthases""",
"""syntheses""",
"""synthesis""",
"""synthesise""",
"""synthesising""",
"""synthesize""",
"""synthesized""",
"""synthetase""",
"""synthetases""",
"""synthetic""",
"""synthetically""",
"""synucleinopathies""",
"""syphilis""",
"""syringe""",
"""system""",
"""system-specific""",
"""systematic""",
"""systematically""",
"""systemic""",
"""systems""",
"""systems-based""",
"""systems-level""",
"""systolic""",
"""t-acute""",
"""t-all""",
"""t-cell""",
"""t-cell-derived""",
"""t-cell-mediated""",
"""t-chol""",
"""t-dm1""",
"""t-dna""",
"""t-lymphocyte""",
"""t-regulatory""",
"""t-test""",
"""t-vnotes""",
"""t-wave""",
"""t1-t4""",
"""t1-weighted""",
"""t139""",
"""t146""",
"""t1d""",
"""t2d""",
"""t3-ajcc8""",
"""t4ss""",
"""t50""",
"""t90""",
"""taa""",
"""table""",
"""tables""",
"""tablet""",
"""taboo""",
"""tachy-""",
"""tachycardia""",
"""tackled""",
"""tackling""",
"""tacpk7-d""",
"""tacrolimus""",
"""tactics""",
"""tadpole""",
"""taerf3""",
"""tag""",
"""tag1""",
"""tagged-amplicon""",
"""tagst1""",
"""tail""",
"""tailor""",
"""tailored""",
"""tailoring""",
"""taiwan""",
"""tak-599""",
"""take""",
"""takedown""",
"""taken""",
"""takeover""",
"""takes""",
"""taking""",
"""talker""",
"""talks""",
"""tall""",
"""tallied""",
"""tam""",
"""tamandua""",
"""tamarack""",
"""tamkk5""",
"""tamkk5-6b""",
"""tamkk5-tampk3""",
"""tamkk5-tampk3-taerf3""",
"""tampk3""",
"""tan""",
"""tandem""",
"""tandis""",
"""tang""",
"""tangential""",
"""tangle""",
"""tank""",
"""tanpopo""",
"""tanzania""",
"""taper""",
"""tapering""",
"""tapos""",
"""target""",
"""targetability""",
"""targetable""",
"""targeted""",
"""targeting""",
"""targets""",
"""tariff""",
"""task""",
"""task-fmri""",
"""task-functional""",
"""tasked""",
"""tasks""",
"""taste""",
"""tat""",
"""tau""",
"""taught""",
"""tavr""",
"""tavr-explant""",
"""taxa""",
"""taxas""",
"""taxes""",
"""taxon""",
"""taxonomic""",
"""taxonomy""",
"""taxus""",
"""tba""",
"""tbars""",
"""tbbpa""",
"""tbi""",
"""tbsa""",
"""tbxta""",
"""tces""",
"""tcga""",
"""tcga-luad""",
"""tcls""",
"""tcr""",
"""tcrs""",
"""tdh""",
"""tdp-43q331k""",
"""tdt""",
"""tea""",
"""teach""",
"""teachers""",
"""teaches""",
"""teaching""",
"""team""",
"""teams""",
"""tear""",
"""tearing""",
"""tears""",
"""tebentafusp""",
"""tecab""",
"""technical""",
"""technically""",
"""technicians""",
"""technique""",
"""technique-related""",
"""technique-specific""",
"""techniques""",
"""technological""",
"""technologies""",
"""technology""",
"""techonlogy""",
"""teds-a""",
"""tee""",
"""teenagers""",
"""teeth""",
"""teff""",
"""tehran""",
"""teixeira""",
"""tejido""",
"""tele-delirium""",
"""tele-ecocardiografia""",
"""tele-encounter""",
"""tele-encounters""",
"""teleburns""",
"""telecommunication""",
"""telegram""",
"""telehealth""",
"""telemedicine""",
"""telemrpa""",
"""telencephalic""",
"""telephone""",
"""television""",
"""televisits""",
"""telles""",
"""telomerase""",
"""telomere""",
"""telomere-specific""",
"""telomeric""",
"""tels""",
"""tem""",
"""temozolomide""",
"""temperament""",
"""temperate""",
"""temperature""",
"""temperature-dependent""",
"""temperature-sensing""",
"""temperatures""",
"""tempered""",
"""template""",
"""templated""",
"""templates""",
"""temporal""",
"""temporally""",
"""temporarily""",
"""temporary""",
"""temporize""",
"""temporizing""",
"""temporomandibular""",
"""ten""",
"""ten-year""",
"""tend""",
"""tended""",
"""tendency""",
"""tenderness""",
"""tendineus""",
"""tending""",
"""tendon""",
"""tendons""",
"""tends""",
"""tenet""",
"""teng""",
"""tennis""",
"""tenofovir""",
"""tens""",
"""tensegrity""",
"""tensile""",
"""tension""",
"""tension-free""",
"""tension-type""",
"""tensor""",
"""tentatively""",
"""tenth""",
"""term""",
"""term-equivalent""",
"""termed""",
"""terminal""",
"""terminate""",
"""terminated""",
"""termination""",
"""terminations""",
"""terminological""",
"""terminologies""",
"""terminology""",
"""terms""",
"""terpenes""",
"""terpyridine""",
"""terrabacteria""",
"""terrain""",
"""terrestrial""",
"""territories""",
"""territory""",
"""tert""",
"""tert-mutated""",
"""tertiary""",
"""tertiles""",
"""terumo""",
"""tesified""",
"""test""",
"""test-negative""",
"""test-retest""",
"""teste""",
"""tested""",
"""testes""",
"""testicular""",
"""testimonials""",
"""testing""",
"""testis""",
"""testosterone""",
"""testosterone-responsive""",
"""tests""",
"""tethering""",
"""tetr""",
"""tetrabromobisphenol""",
"""tetrad""",
"""tetradactyla""",
"""tetrahydro-2-isopentyl-5-propyl""",
"""tetrahydroindenoindole""",
"""tetrakis""",
"""tetralogy""",
"""tetraploid""",
"""tetraploids""",
"""tetrapods""",
"""tetrodotoxin-sensitive""",
"""texas""",
"""texas-mexico""",
"""text""",
"""textbooks""",
"""textile""",
"""textual""",
"""tfc""",
"""tfd""",
"""tfe3""",
"""tff""",
"""tfh""",
"""tfh13""",
"""tfr""",
"""tfs""",
"""tga""",
"""tgfbi""",
"""th1""",
"""th17""",
"""th2""",
"""tha""",
"""thai""",
"""thailand""",
"""thalamus""",
"""thalassemia""",
"""thaliana""",
"""than""",
"""thanks""",
"""that""",
"""the""",
"""theatres""",
"""theiler""",
"""their""",
"""them""",
"""thematic""",
"""thematically""",
"""theme""",
"""themes""",
"""themselves""",
"""then""",
"""theobromine""",
"""theoretical""",
"""theoretically""",
"""theoretically-informed""",
"""theories""",
"""theorized""",
"""theory""",
"""therapeutic""",
"""therapeutically""",
"""therapeutics""",
"""therapies""",
"""therapy""",
"""therapy-induced""",
"""therapy-oat""",
"""therapy-prostate""",
"""there""",
"""thereafter""",
"""thereby""",
"""therefore""",
"""therein""",
"""thermal""",
"""thermal-initiated""",
"""thermatogota""",
"""thermodynamic""",
"""thermogenesis""",
"""thermometer""",
"""thermophysical""",
"""these""",
"""theses""",
"""thetransanal""",
"""they""",
"""thiamethoxam""",
"""thick""",
"""thickened""",
"""thickening""",
"""thicker""",
"""thickness""",
"""thin""",
"""thin-film""",
"""things""",
"""thinking""",
"""thinly""",
"""thinner""",
"""thinnest""",
"""thinning""",
"""thiobarbituric""",
"""third""",
"""third-degree""",
"""third-generation""",
"""third-party""",
"""third-trimester""",
"""third-year""",
"""thirds""",
"""thirteen""",
"""thirty""",
"""thirty-five""",
"""thirty-four""",
"""thirty-nine""",
"""thirty-one""",
"""thirty-six""",
"""thirty-three""",
"""thirty-two""",
"""this""",
"""thisin""",
"""thoracic""",
"""thoracoscopic""",
"""thorn""",
"""thorough""",
"""thoroughly""",
"""those""",
"""though""",
"""thought""",
"""thought-provoking""",
"""thoughts""",
"""thous""",
"""thousand""",
"""thousands""",
"""threads""",
"""threat""",
"""threaten""",
"""threatened""",
"""threatening""",
"""threatens""",
"""threats""",
"""three""",
"""three-cohort""",
"""three-dimensional""",
"""three-fold""",
"""three-mirna""",
"""three-months""",
"""three-quarter""",
"""three-quarters""",
"""three-toed""",
"""three-vessel""",
"""three-year""",
"""threefold""",
"""threshold""",
"""thresholds""",
"""thrive""",
"""throat""",
"""thrombin""",
"""thrombin-antithrombin""",
"""thrombo-embolism""",
"""thrombocytopenia""",
"""thrombocytopenic""",
"""thromboelastometry""",
"""thromboembolic""",
"""thromboembolism""",
"""thromboprophylaxis""",
"""thrombosis""",
"""thrombosis-related""",
"""thrombospondin-1""",
"""thrombotic""",
"""through""",
"""throughout""",
"""throughput""",
"""throw""",
"""thunderclap""",
"""thus""",
"""thymic""",
"""thymoglobulin""",
"""thymoma""",
"""thyroid""",
"""thyroid-stimulating""",
"""thyroidectomy""",
"""thyroiditis""",
"""thyroseq""",
"""thyroxine""",
"""tian""",
"""tibati""",
"""tibetan""",
"""tibetans""",
"""tibia""",
"""tibiae""",
"""tibial""",
"""ticagrelor""",
"""tick""",
"""tidal""",
"""tied""",
"""ties""",
"""tight""",
"""tightly""",
"""tigray""",
"""tikur""",
"""tilt""",
"""timber""",
"""time""",
"""time-action""",
"""time-based""",
"""time-consuming""",
"""time-course""",
"""time-dependent""",
"""time-lapse""",
"""time-lining""",
"""time-points""",
"""time-to-event""",
"""time-trial""",
"""time-varying""",
"""timed""",
"""timeframe""",
"""timelines""",
"""timely""",
"""timepoint""",
"""times""",
"""timescale""",
"""timing""",
"""timor-leste""",
"""tingting""",
"""tipo""",
"""tipsc""",
"""tipsc-eb""",
"""tipscs""",
"""tissue""",
"""tissue-engineering""",
"""tissue-specific""",
"""tissues""",
"""titer""",
"""titering""",
"""titers""",
"""title""",
"""titles""",
"""titrated""",
"""titration""",
"""titres""",
"""tiveram""",
"""tka""",
"""tkas""",
"""tld""",
"""tlds""",
"""tlif""",
"""tlr""",
"""tlr-7""",
"""tlr4""",
"""tlrs""",
"""tmao""",
"""tmd""",
"""tmd-8""",
"""tmem106b""",
"""tmev""",
"""tmev-induced""",
"""tmj""",
"""tmjs""",
"""tmp-smx""",
"""tmt""",
"""tmt-a""",
"""tmt-b""",
"""tmz""",
"""tn6677""",
"""tnbc""",
"""tnd""",
"""tnf""",
"""tnfi""",
"""tnfrsf14""",
"""tnfsf13""",
"""tngs""",
"""tnsf13""",
"""to""",
"""tobacco""",
"""tobit""",
"""tobramycin""",
"""tocilizumab""",
"""tocolysis""",
"""today""",
"""toddler""",
"""todos""",
"""toe-in""",
"""toe-out""",
"""together""",
"""togetherness""",
"""toilet""",
"""tokyo""",
"""tolerability""",
"""tolerable""",
"""tolerance""",
"""tolerant""",
"""tolerate""",
"""tolerated""",
"""tolerating""",
"""tolerizing""",
"""toll""",
"""toll-like""",
"""tomographic""",
"""tomography""",
"""tone""",
"""tones""",
"""tongji""",
"""tongue""",
"""tonic""",
"""tonic-clonic""",
"""tonometry""",
"""tonsillectomy""",
"""tonsils""",
"""too""",
"""took""",
"""tool""",
"""toolbox""",
"""toolkit""",
"""tools""",
"""tooth""",
"""top""",
"""topic""",
"""topical""",
"""topics""",
"""topiramate""",
"""topographer""",
"""topographic""",
"""topological""",
"""topologies""",
"""topology""",
"""topotecan""",
"""tops""",
"""torque""",
"""torques""",
"""torti""",
"""torticollis""",
"""tortosa""",
"""tot""",
"""total""",
"""totality""",
"""totally""",
"""totem""",
"""touch""",
"""tourism""",
"""tout""",
"""toward""",
"""towards""",
"""town""",
"""townsendi""",
"""toxic""",
"""toxicants""",
"""toxicities""",
"""toxicity""",
"""toxicologists""",
"""toxicology""",
"""toxoplasmosis""",
"""tp53""",
"""tpe""",
"""tpzna-his""",
"""tr1""",
"""tr2""",
"""trabecular""",
"""trabeculectomy""",
"""tracab""",
"""trace""",
"""traced""",
"""tracers""",
"""trachea""",
"""tracheal""",
"""tracheoesophagea""",
"""tracheoesophageal""",
"""track""",
"""tracked""",
"""tracking""",
"""tract""",
"""tractable""",
"""tracts""",
"""trade""",
"""trade-offs""",
"""traded""",
"""tradition""",
"""traditional""",
"""traditionally""",
"""traditionnels""",
"""traf6""",
"""traffic""",
"""traffic-related""",
"""trafficking""",
"""trail""",
"""trails""",
"""train""",
"""train-the-trainer""",
"""trained""",
"""trainee""",
"""trainees""",
"""traineeships""",
"""training""",
"""trait""",
"""traiter""",
"""traits""",
"""trajectories""",
"""trajectory""",
"""tramadol""",
"""tranexamic""",
"""tranglycosylases""",
"""transabdominal""",
"""transaction""",
"""transactions""",
"""transaminase""",
"""transanal""",
"""transcatheter""",
"""transcends""",
"""transcranial""",
"""transcribed""",
"""transcribes""",
"""transcript""",
"""transcription""",
"""transcription-factor""",
"""transcription-factor-encoding""",
"""transcription-pcr""",
"""transcriptional""",
"""transcriptionally""",
"""transcriptome""",
"""transcriptomes""",
"""transcriptomic""",
"""transcriptomic-based""",
"""transcriptomics""",
"""transcripts""",
"""transcutaneous""",
"""transdiagnostic""",
"""transdifferentiated""",
"""transdifferentiation""",
"""transduced""",
"""transducer""",
"""transduction""",
"""transect""",
"""transfer""",
"""transfer-dependence""",
"""transferase""",
"""transferred""",
"""transferrin""",
"""transferring""",
"""transfers""",
"""transforaminal""",
"""transform""",
"""transformation""",
"""transformations""",
"""transformed""",
"""transforming""",
"""transfused""",
"""transfusing""",
"""transfusion""",
"""transfusion-related""",
"""transfusions""",
"""transgender""",
"""transgender-competent""",
"""transgenic""",
"""transglutaminase""",
"""transglycosylase""",
"""transient""",
"""transients""",
"""transistor""",
"""transition""",
"""transition-related""",
"""transitional""",
"""transitioned""",
"""transitions""",
"""transitory""",
"""transjugular""",
"""translabyrinthine""",
"""translatability""",
"""translatable""",
"""translate""",
"""translated""",
"""translating""",
"""translation""",
"""translational""",
"""translations""",
"""translatome""",
"""translocases""",
"""translocation""",
"""translucent""",
"""translucida""",
"""transluminal""",
"""transmastoid""",
"""transmembrane""",
"""transmission""",
"""transmit""",
"""transmitted""",
"""transmitters""",
"""transmitting""",
"""transoesophageal""",
"""transparency""",
"""transparent""",
"""transpeptidase""",
"""transperineal""",
"""transperitoneal""",
"""transpiration""",
"""transpired""",
"""transplant""",
"""transplant-associated""",
"""transplant-free""",
"""transplantation""",
"""transplanted""",
"""transplants""",
"""transport""",
"""transportation""",
"""transported""",
"""transporter""",
"""transporters""",
"""transposable""",
"""transposases""",
"""transposition""",
"""transthoracic""",
"""transurethral""",
"""transvaginal""",
"""transvenous""",
"""transversal""",
"""transversarium""",
"""transverse""",
"""transwell""",
"""trap""",
"""traps""",
"""trastuzumab""",
"""tratados""",
"""tratamiento""",
"""trauma""",
"""trauma-exposed""",
"""traumatic""",
"""travel""",
"""travelling""",
"""traverse""",
"""treat""",
"""treatable""",
"""treated""",
"""treating""",
"""treatment""",
"""treatment-induced""",
"""treatment-naive""",
"""treatment-related""",
"""treatment-resistant""",
"""treatment-seeking""",
"""treatments""",
"""treatmentthere""",
"""treats""",
"""treats-analytical""",
"""tree""",
"""tree-base""",
"""tree-top""",
"""trees""",
"""trefoil""",
"""treg""",
"""tregs""",
"""treinamento""",
"""treinamentos""",
"""treinar""",
"""treinou""",
"""trem2""",
"""tremellen""",
"""tremendous""",
"""tremendously""",
"""trend""",
"""trends""",
"""treosulfan""",
"""trf2""",
"""triad""",
"""triage""",
"""triaged""",
"""triaging""",
"""trial""",
"""trial-and-error""",
"""trialists""",
"""trials""",
"""triamcinolone""",
"""triangle""",
"""triazole""",
"""triazolylferrocenyl""",
"""trichomegaly""",
"""trichoscopic""",
"""trichoscopic-histologic""",
"""triclocarban""",
"""triclosan""",
"""tricuspid""",
"""tricyclics""",
"""tried""",
"""trif-dependent""",
"""trigger""",
"""triggered""",
"""triggering""",
"""triggers""",
"""triglyceride""",
"""triglycerides""",
"""trigone""",
"""trimester""",
"""trimesters""",
"""trimethoprim-sulfamethoxazole""",
"""trimethylamine""",
"""trinucleotide""",
"""triphosphate""",
"""triphosphate-binding""",
"""triple""",
"""triple-mutant""",
"""triple-negative""",
"""triplet""",
"""triplet-triplet""",
"""tripod""",
"""tris""",
"""triticum""",
"""trivial-to-small""",
"""trna""",
"""trnas""",
"""trocar""",
"""trocars""",
"""trophoblasts""",
"""trophocytes""",
"""tropical""",
"""tropiduridae""",
"""tropidurus""",
"""troponin""",
"""troponina""",
"""troublemakers""",
"""trough""",
"""trrosetta""",
"""trt""",
"""truancy""",
"""true""",
"""trunk""",
"""trunks""",
"""trust""",
"""trusted""",
"""truven""",
"""trv""",
"""try""",
"""try-in""",
"""trying""",
"""trypanocidal""",
"""trypanosoma""",
"""trypsin""",
"""tryptophan""",
"""tsat""",
"""tsb""",
"""tsc""",
"""tsd""",
"""tsh""",
"""tsh-1188""",
"""tsh1188""",
"""tshr""",
"""tshr-specific""",
"""tsr-5""",
"""tta-uc""",
"""ttet""",
"""ttmie""",
"""ttt""",
"""tubal""",
"""tube""",
"""tuberculosis""",
"""tuberosum""",
"""tubular""",
"""tubules""",
"""tufted""",
"""tugging""",
"""tuj1""",
"""tumor""",
"""tumor-associated""",
"""tumor-free""",
"""tumor-host""",
"""tumor-related""",
"""tumor-specific""",
"""tumoral""",
"""tumorigenesis""",
"""tumors""",
"""tumour""",
"""tumours""",
"""tunable""",
"""tuned""",
"""tuning""",
"""tunnel""",
"""tup-family""",
"""turb""",
"""turbidity""",
"""turbines""",
"""turbt""",
"""turbulence""",
"""turkey""",
"""turn""",
"""turning""",
"""turnover""",
"""tutorial""",
"""twb""",
"""tweetorial""",
"""tweetorials""",
"""twelve""",
"""twenty""",
"""twenty-eight""",
"""twenty-first""",
"""twenty-five""",
"""twenty-four""",
"""twenty-nine""",
"""twenty-one""",
"""twenty-seven""",
"""twenty-six""",
"""twenty-two""",
"""twice""",
"""twin""",
"""twisting""",
"""twitch""",
"""twitter""",
"""two""",
"""two-""",
"""two-hospital""",
"""two-hundred""",
"""two-hybrid""",
"""two-sample""",
"""two-stage""",
"""two-step""",
"""two-tailed""",
"""two-thirds""",
"""two-way""",
"""two-year""",
"""twofold""",
"""twp""",
"""tyk2""",
"""type""",
"""type-the""",
"""types""",
"""typical""",
"""typically""",
"""typified""",
"""typifies""",
"""typify""",
"""typing""",
"""typologies""",
"""tyrosine""",
"""tz9""",
"""u-47700""",
"""u-test""",
"""ua-to-ra""",
"""uacr""",
"""uas""",
"""ube2b""",
"""ube2b-deficient""",
"""ubiquitin""",
"""ubiquitin-mediated""",
"""ubiquitin-proteasome""",
"""ubiquitinated""",
"""ubiquitination""",
"""ubiquitination-ip""",
"""ubiquitylation""",
"""ucla""",
"""uclp""",
"""ucp1""",
"""ucva""",
"""uem""",
"""ugra""",
"""ukb""",
"""ukraine""",
"""ulcer""",
"""ulcerated""",
"""ulcerative""",
"""ulk1""",
"""ulnar""",
"""ultimate""",
"""ultimately""",
"""ultra""",
"""ultra-high""",
"""ultracentrifugation-based""",
"""ultraclean""",
"""ultrafast""",
"""ultrasonic""",
"""ultrasonographic""",
"""ultrasonography""",
"""ultrasound""",
"""ultrasound-mediated""",
"""ultrasounds""",
"""ultraviolet""",
"""umbilical""",
"""umbilicus""",
"""umls""",
"""umrd""",
"""una""",
"""unable""",
"""unacceptable""",
"""unacceptably""",
"""unachieved""",
"""unadjusted""",
"""unaffected""",
"""unaided""",
"""unani""",
"""unanswered""",
"""unanticipated""",
"""unappreciated""",
"""unavailable""",
"""unaware""",
"""unbiased""",
"""unblinded""",
"""uncertain""",
"""uncertainties""",
"""uncertainty""",
"""unchanged""",
"""uncharacterized""",
"""unclarity""",
"""unclear""",
"""uncommon""",
"""uncommonly""",
"""uncomplicated""",
"""unconsidered""",
"""unconstrained""",
"""uncontained""",
"""uncontrollable""",
"""uncontrolled""",
"""uncorrectable""",
"""uncorrected""",
"""uncoupled""",
"""uncoupler""",
"""uncoupling""",
"""uncover""",
"""uncovered""",
"""uncovering""",
"""uncovers""",
"""undecanoate""",
"""undefined""",
"""under""",
"""under-""",
"""under-developed""",
"""under-predicts""",
"""under-reported""",
"""underage""",
"""underappreciated""",
"""underarm""",
"""underestimated""",
"""underexpressed""",
"""undergo""",
"""undergoing""",
"""undergone""",
"""undergraduate""",
"""undergraduates""",
"""underground""",
"""underlie""",
"""underlies""",
"""underlying""",
"""undermine""",
"""undermines""",
"""underpin""",
"""underpinned""",
"""underpinning""",
"""underpinnings""",
"""underrecognized""",
"""underrepresentation""",
"""underrepresented""",
"""underscore""",
"""underscoring""",
"""underserved""",
"""understand""",
"""understandable""",
"""understanding""",
"""understandings""",
"""understood""",
"""understudied""",
"""undertake""",
"""undertaken""",
"""undertook""",
"""underused""",
"""underutilized""",
"""underwater""",
"""underweight""",
"""underwent""",
"""undesirable""",
"""undetectable""",
"""undetected""",
"""undetermined""",
"""undifferentiated""",
"""undisturbed""",
"""undoubtedly""",
"""undulation""",
"""undulations""",
"""une""",
"""unemployed""",
"""unemployment""",
"""unequal""",
"""unequally""",
"""unequivocally""",
"""uneventful""",
"""unexpected""",
"""unexpectedly""",
"""unexplained""",
"""unexplored""",
"""unexposed""",
"""unfamiliar""",
"""unfamiliarity""",
"""unfavorable""",
"""unfolded""",
"""unfolding""",
"""unfortunately""",
"""unfractionated""",
"""ungulate""",
"""unhealthier""",
"""unhealthiness""",
"""unhealthy""",
"""unhelpful""",
"""uni-""",
"""uni-variate""",
"""unicef""",
"""unicolor""",
"""unidirectional""",
"""unified""",
"""uniform""",
"""uniformly""",
"""unilateral""",
"""uninfected""",
"""uninhibited""",
"""uninsured""",
"""unintended""",
"""unintentional-occupational""",
"""uninterrupted""",
"""uninvestigated""",
"""uninvolved""",
"""union""",
"""unions""",
"""unique""",
"""uniquely""",
"""unit""",
"""united""",
"""units""",
"""unity""",
"""univariable""",
"""univariate""",
"""universal""",
"""universality""",
"""universally""",
"""universiti""",
"""universities""",
"""university""",
"""university-wide""",
"""univocally-interpretable""",
"""unknot""",
"""unknown""",
"""unless""",
"""unlike""",
"""unlikely""",
"""unmanned""",
"""unmasked""",
"""unmatched""",
"""unmet""",
"""unmitigated""",
"""unmodified""",
"""unmyelinated""",
"""unnecessary""",
"""unnoticed""",
"""unobserved""",
"""unobtrusive""",
"""unplanned""",
"""unprecedented""",
"""unpredictability""",
"""unprocessed""",
"""unproductive""",
"""unprofessional""",
"""unprotected""",
"""unpublished""",
"""unravel""",
"""unraveled""",
"""unravelled""",
"""unrecognized""",
"""unregulated""",
"""unrelated""",
"""unreliability""",
"""unreliable""",
"""unreported""",
"""unresolved""",
"""unresponsive""",
"""unsatisfactory""",
"""unsatisfied""",
"""unsatisfying""",
"""unscheduled""",
"""unselected""",
"""unshared""",
"""unsolved""",
"""unspecified""",
"""unstable""",
"""unsteady""",
"""unstructured""",
"""unsuitable""",
"""unsupervised""",
"""unsure""",
"""untargeted""",
"""untested""",
"""untied""",
"""until""",
"""untouched""",
"""untranslated""",
"""untreated""",
"""unusual""",
"""unveil""",
"""unwanted""",
"""unwarranted""",
"""unwind""",
"""unwinding""",
"""unwinding-defective""",
"""unwinds""",
"""up-regulate""",
"""up-regulated""",
"""up-to-date""",
"""upcoming""",
"""upconversion""",
"""upconverted""",
"""update""",
"""updated""",
"""updates""",
"""upgraded""",
"""upheld""",
"""uphill""",
"""upon""",
"""upper""",
"""upregulate""",
"""upregulated""",
"""upregulating""",
"""upregulation""",
"""uprising""",
"""ups""",
"""upstaging""",
"""upstream""",
"""upstream-activating""",
"""uptake""",
"""uptaken""",
"""uptick""",
"""uranium""",
"""urban""",
"""urban-dwelling""",
"""urea""",
"""ureteral""",
"""ureteroenteric""",
"""ureteroneocystostomy""",
"""ureteroneocystotomy""",
"""urethra""",
"""urethral""",
"""urethrolysis""",
"""urethroplasty""",
"""urgency""",
"""urgent""",
"""urgently""",
"""urinary""",
"""urine""",
"""url""",
"""urls""",
"""urodele""",
"""urodynamic""",
"""urologic""",
"""urological""",
"""urologists""",
"""urology""",
"""urostream101""",
"""urothelial""",
"""ursinus""",
"""urticarial""",
"""urtis""",
"""us-based""",
"""usa""",
"""usada""",
"""usage""",
"""usd""",
"""use""",
"""used""",
"""used-and""",
"""useful""",
"""usefulness""",
"""user""",
"""user-friendly""",
"""users""",
"""uses""",
"""ush3""",
"""using""",
"""usm""",
"""uso""",
"""ustekinumab""",
"""usual""",
"""usually""",
"""utah""",
"""uterine""",
"""uterosacral""",
"""uterus""",
"""uti""",
"""utilisation""",
"""utilise""",
"""utilised""",
"""utilising""",
"""utilities""",
"""utility""",
"""utilizados""",
"""utilizaron""",
"""utilization""",
"""utilize""",
"""utilized""",
"""utilizes""",
"""utilizing""",
"""utilizou""",
"""utis""",
"""utr""",
"""uv-vis""",
"""uveal""",
"""uveitis""",
"""uvfp""",
"""uvrd""",
"""uvulopalatopharyngoplasty""",
"""v-domain""",
"""v-vi""",
"""v600-mutant""",
"""v600e""",
"""v600e-mutant""",
"""vac7""",
"""vaccinating""",
"""vaccination""",
"""vaccine""",
"""vaccine-derived""",
"""vaccines""",
"""vacuolar""",
"""vacuole""",
"""vad""",
"""vaers""",
"""vagina""",
"""vaginal""",
"""vaginally""",
"""valence""",
"""valgus""",
"""valid""",
"""validate""",
"""validated""",
"""validating""",
"""validation""",
"""validations""",
"""validity""",
"""valores""",
"""valproate""",
"""valsalva""",
"""valuable""",
"""valuate""",
"""value""",
"""value-laden""",
"""valued""",
"""values""",
"""valuing""",
"""valve""",
"""valve-""",
"""valves""",
"""valvular""",
"""vamp2""",
"""van""",
"""vancouver""",
"""vanillins""",
"""vanishing""",
"""vapor""",
"""vaporizes""",
"""variability""",
"""variable""",
"""variables""",
"""variably""",
"""variance""",
"""variant""",
"""variants""",
"""variation""",
"""variations""",
"""variceal""",
"""varied""",
"""variegatus""",
"""varies""",
"""varieties""",
"""variety""",
"""various""",
"""vary""",
"""varying""",
"""vas""",
"""vascular""",
"""vascular-related""",
"""vascularization""",
"""vascularized""",
"""vasculature""",
"""vasculitis""",
"""vasculopathy""",
"""vaso""",
"""vaso-occlusive""",
"""vasoactive""",
"""vasodilation""",
"""vasogenic""",
"""vasopressin""",
"""vasopressor""",
"""vasospasm""",
"""vast""",
"""vastly""",
"""vat""",
"""vats""",
"""vatsg""",
"""vazegepant""",
"""vcg""",
"""vch""",
"""vchcast""",
"""vdac2""",
"""vector""",
"""vectorized""",
"""vectors""",
"""vegetable""",
"""vegetation""",
"""vegetative""",
"""vegetatively""",
"""vegf""",
"""vegfr-tki""",
"""vegfr-tkis""",
"""vehicle""",
"""vehicles""",
"""veil""",
"""vein""",
"""veliparib""",
"""velocidad""",
"""velocimetry""",
"""velocities""",
"""velocity""",
"""vena""",
"""vendd""",
"""vending""",
"""vendors""",
"""vendvd""",
"""venetoclax""",
"""venetoclax-based""",
"""venlafaxine""",
"""veno-occlusive""",
"""venoarterial""",
"""venosus""",
"""venous""",
"""venovenous""",
"""ventilated""",
"""ventilation""",
"""ventilator""",
"""ventilators""",
"""ventral""",
"""ventricle""",
"""ventricular""",
"""ventriculomegaly""",
"""verbal""",
"""verbatim""",
"""verge""",
"""verification""",
"""verified""",
"""verify""",
"""verifying""",
"""versa""",
"""versatile""",
"""version""",
"""versions""",
"""versus""",
"""vertebral""",
"""vertebrate""",
"""vertebrates""",
"""vertical""",
"""vertically""",
"""verus""",
"""very""",
"""very-low""",
"""vesicles""",
"""vesicoureteral""",
"""vesosomes""",
"""vessel""",
"""vessels""",
"""vestibular""",
"""vestibule""",
"""vestiges""",
"""veterans""",
"""vetted""",
"""vez""",
"""vezes""",
"""vfq-48""",
"""vfs""",
"""vghr""",
"""vhi""",
"""vhi-30""",
"""vhl""",
"""via""",
"""viability""",
"""viable""",
"""vials""",
"""vibration""",
"""vibrio""",
"""vicarious""",
"""vice""",
"""vicleucel""",
"""victim""",
"""victims""",
"""vida""",
"""video""",
"""video-assisted""",
"""video-based""",
"""video-conferencing""",
"""videoconference""",
"""videolaryngostroboscopic""",
"""videos""",
"""vietnam""",
"""vietnamese""",
"""view""",
"""viewed""",
"""viewers""",
"""viewership""",
"""viewing""",
"""viewpoint""",
"""views""",
"""vigilance""",
"""vigorously""",
"""vildagliptin""",
"""villous""",
"""vinblastine""",
"""vinci""",
"""violence""",
"""violence-informed""",
"""violent""",
"""vip""",
"""viral""",
"""virginia""",
"""virilizing""",
"""virions""",
"""virtual""",
"""virtually""",
"""virtude""",
"""virtue""",
"""virtuous""",
"""virulence""",
"""virus""",
"""virus-""",
"""virus-induced""",
"""virus-like""",
"""viruses""",
"""visceral""",
"""visco-elastic""",
"""viscus""",
"""visibility""",
"""visible""",
"""vision""",
"""vision-related""",
"""visit""",
"""visited""",
"""visiting""",
"""visits""",
"""vison""",
"""vissim""",
"""vista""",
"""visual""",
"""visualised""",
"""visualization""",
"""visualizations""",
"""visualize""",
"""visualized""",
"""visualizing""",
"""visually""",
"""vit""",
"""vita""",
"""vital""",
"""vitamin""",
"""vitamins""",
"""vitd3""",
"""vitellogenins""",
"""vitreous""",
"""vitro""",
"""vitulina""",
"""vivo""",
"""vka""",
"""vkas""",
"""vlp""",
"""vml""",
"""vnirs""",
"""vnotes""",
"""vns""",
"""vo2max""",
"""vocabulary""",
"""vocal""",
"""vocalization""",
"""vocalizations""",
"""vocational""",
"""vocs""",
"""voice""",
"""voices""",
"""voicing""",
"""voiding""",
"""volatile""",
"""volatility""",
"""volitional""",
"""volkamer""",
"""volkameriana""",
"""voltage""",
"""voltage-dependent""",
"""volume""",
"""volume-based""",
"""volume-load-matched""",
"""volume-to-cerebrospinal""",
"""volumes""",
"""volumetric""",
"""voluntarily""",
"""volunteer-led""",
"""volunteering""",
"""volunteers""",
"""vomiting""",
"""von""",
"""vortex""",
"""vortices""",
"""voted""",
"""vowel""",
"""vowels""",
"""voxels""",
"""vrd""",
"""vrt""",
"""vsa""",
"""vsr""",
"""vss""",
"""vte""",
"""vtg""",
"""vtgi""",
"""vtgs""",
"""vulgaris""",
"""vulnerabilities""",
"""vulnerability""",
"""vulnerable""",
"""vulva""",
"""vulvar""",
"""vulvodynia""",
"""vur""",
"""wages""",
"""waist""",
"""waist-hip""",
"""wait""",
"""wait-and-see""",
"""waiting""",
"""waitlist""",
"""waitlisted""",
"""waived""",
"""wake""",
"""wakefield""",
"""wakefulness""",
"""wakening""",
"""walkability""",
"""walking""",
"""wall""",
"""walls""",
"""wane""",
"""wanfang""",
"""wang""",
"""waning""",
"""want""",
"""war""",
"""war-time""",
"""warburg""",
"""ward""",
"""warm-up""",
"""warming""",
"""warmup""",
"""warning""",
"""warrant""",
"""warranted""",
"""warranting""",
"""warrants""",
"""was""",
"""washing""",
"""washington""",
"""wastage""",
"""waste""",
"""wasteful""",
"""wastes""",
"""wastewater""",
"""wastewater-based""",
"""wasting""",
"""watch""",
"""watching""",
"""water""",
"""water-absorbing""",
"""water-based""",
"""water-deficit""",
"""water-miscible""",
"""water-quality""",
"""wattle""",
"""wau""",
"""wave""",
"""waveform""",
"""waveforms""",
"""wavefronts""",
"""wavelength""",
"""way""",
"""ways""",
"""wb3t""",
"""wbc""",
"""wbe""",
"""we""",
"""weak""",
"""weakened""",
"""weakest""",
"""weakly""",
"""weakness""",
"""weaknesses""",
"""wealth""",
"""wealthy""",
"""weaning""",
"""wear""",
"""wearable""",
"""wearables""",
"""weather""",
"""web""",
"""web-based""",
"""website""",
"""websites""",
"""wedged""",
"""wedges""",
"""weed""",
"""week""",
"""week-to-week""",
"""weekly""",
"""weeks""",
"""wei""",
"""weigh""",
"""weighing""",
"""weighs""",
"""weight""",
"""weight-based""",
"""weight-loss""",
"""weight-related""",
"""weight-stable""",
"""weighted""",
"""weights""",
"""weihua""",
"""weixuan""",
"""welfare""",
"""well""",
"""well-baby""",
"""well-being""",
"""well-characterized""",
"""well-conducted""",
"""well-controlled""",
"""well-defined""",
"""well-designed""",
"""well-documented""",
"""well-established""",
"""well-functioning""",
"""well-known""",
"""well-mixed""",
"""well-planned""",
"""well-tolerated""",
"""well-validated""",
"""wellbeing""",
"""wells""",
"""welsch""",
"""wenjun""",
"""were""",
"""wes""",
"""west""",
"""western""",
"""wgbs""",
"""whale""",
"""what""",
"""whatsapp""",
"""wheat""",
"""wheel""",
"""when""",
"""where""",
"""whereas""",
"""whereby""",
"""wherein""",
"""whether""",
"""which""",
"""whichever""",
"""while""",
"""whilst""",
"""whiplash""",
"""whiplash-associated""",
"""whistleblower""",
"""white""",
"""whites""",
"""whitish""",
"""who""",
"""who-5""",
"""whole""",
"""whole-body""",
"""whole-brain""",
"""whole-cell""",
"""whole-embryo""",
"""whole-genome""",
"""whole-transcriptome""",
"""whole-tree""",
"""wholis""",
"""whom""",
"""whooping""",
"""whorled""",
"""whose""",
"""whr""",
"""whtr""",
"""why""",
"""wide""",
"""wide-ranging""",
"""widely""",
"""widening""",
"""wider""",
"""widespread""",
"""width""",
"""wild""",
"""wild-type""",
"""wildlife""",
"""wiley""",
"""will""",
"""willebrand""",
"""willing""",
"""willingness""",
"""wilms""",
"""wilt""",
"""wind""",
"""windfarms""",
"""window""",
"""wingless-type""",
"""winter""",
"""wiping""",
"""wire""",
"""wireless""",
"""wiring""",
"""wisconsin""",
"""wistar""",
"""with""",
"""withdraw""",
"""withdrawal""",
"""withdrawing""",
"""withdrawn""",
"""withdrew""",
"""withheld""",
"""within""",
"""within-component""",
"""within-person""",
"""within-subject""",
"""without""",
"""witnessing""",
"""wlv""",
"""wmd""",
"""wmh""",
"""wmhs""",
"""wnt-7a""",
"""wnt2b""",
"""wolf""",
"""wolves""",
"""womac""",
"""woman""",
"""women""",
"""woodiness""",
"""word""",
"""words""",
"""work""",
"""work-related""",
"""work-up""",
"""worked""",
"""worker""",
"""worker-years""",
"""workers""",
"""workflow""",
"""workforce""",
"""workgroup""",
"""working""",
"""workload""",
"""workplace""",
"""workplaces""",
"""works""",
"""workshift""",
"""workshop""",
"""workstation""",
"""workup""",
"""workups""",
"""world""",
"""worldwide""",
"""worms""",
"""worrisome""",
"""worry""",
"""worrying""",
"""worse""",
"""worsen""",
"""worsened""",
"""worsening""",
"""worship""",
"""worst""",
"""worst-case""",
"""wos""",
"""would""",
"""wound""",
"""wounds""",
"""wps""",
"""wrap""",
"""wrapping""",
"""wri""",
"""wris""",
"""wrist""",
"""wrist-to-forearm""",
"""wristed""",
"""wrists""",
"""writer""",
"""writhing""",
"""written""",
"""wrn""",
"""wrong""",
"""wrote""",
"""wst""",
"""wuhan""",
"""wvcg""",
"""wwtp""",
"""wwtps""",
"""x-ray""",
"""xab2""",
"""xad-8""",
"""xcelligence""",
"""xen""",
"""xenodiagnosis""",
"""xenogeneic""",
"""xenograft""",
"""xenografted""",
"""xenografts""",
"""xenopus""",
"""xgboost""",
"""xia""",
"""xiangpeng""",
"""xianrui""",
"""xin""",
"""xpo1""",
"""xps""",
"""xue""",
"""xylem""",
"""yak""",
"""yale""",
"""yan""",
"""yanagawa""",
"""yang""",
"""yanning""",
"""yanran""",
"""yao""",
"""yaping""",
"""yards""",
"""yb-1""",
"""ybx1""",
"""year""",
"""yearly""",
"""years""",
"""yeast""",
"""yellow""",
"""yet""",
"""yet-untested""",
"""yew""",
"""yezhou""",
"""yhrd""",
"""yidc""",
"""yield""",
"""yielded""",
"""yields""",
"""yin""",
"""ying""",
"""ykl-40""",
"""yo-ifos""",
"""yoga""",
"""yonggang""",
"""yongkun""",
"""york""",
"""yorkshire-cross""",
"""you""",
"""youden""",
"""young""",
"""younger""",
"""youngest""",
"""your""",
"""youth""",
"""youths""",
"""youtube""",
"""yrs""",
"""yuan""",
"""yuc""",
"""yucca""",
"""yuchio""",
"""yunnan""",
"""yuxi""",
"""z-score""",
"""z-scores""",
"""zagros""",
"""zalophus""",
"""zealand""",
"""zebrafish""",
"""zenith""",
"""zero""",
"""zero-knowledge""",
"""zeta""",
"""zhang""",
"""zhao""",
"""zhaoling""",
"""zhong""",
"""zhonghui""",
"""zhongyuan""",
"""zhou""",
"""zika""",
"""zinc""",
"""zip""",
"""zirconia""",
"""zirconia-reinforced""",
"""zno""",
"""zo-1""",
"""zona""",
"""zone""",
"""zones""",
"""zonula""",
"""zoom""",
"""zooming""",
"""zooniverse""",
"""zoonotic""",
"""zucchini""",
"""zygote""",
"""zygotic""",
"""zymography"""]
|
from django.db import models
from kbspre.users import models as user_models
# Create your models here.
class TimeStampedModel(models.Model):
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
abstract = True
class Image(TimeStampedModel):
""" Image Model """
#id = THIS USER ID
id = 1
file = models.ImageField()
location = models.CharField(max_length=140)
caption = models.TextField()
creator = models.ForeignKey(user_models.User, on_delete=models.CASCADE, null=True)
'''
comment_set
comment_set = (LOOK IN ALL THE COMMENTS FOR THE ONES THAT HAVE 'IMAGE' = 1)
image_set = (LOOK IN ALL THE COMMENTS FOR THE ONES THAT HAVE 'IMAGE' = THIS IMAGE ID)
comment_set = (LOOK IN ALL THE COMMENTS FOR THE ONES THAT HAVE 'IMAGE' = THIS IMAGE ID)
like_set = (LOOK IN ALL THE COMMENTS FOR THE ONES THAT HAVE 'IMAGE' = THIS IMAGE ID)
'''
def __str__(self):
return '{} - {}'.format(self.location, self.caption)
class Comment(TimeStampedModel):
""" Comment Model """
message = models.TextField(null=True)
playername = models.TextField(null=True)
score = models.IntegerField(null=True)
tel = models.TextField(null=True)
survey = models.IntegerField(null=True)
note = models.TextField(null=True)
step = models.IntegerField(null=True)
mail = models.TextField(null=True)
creator = models.ForeignKey(user_models.User, on_delete=models.CASCADE, null=True)
image = models.ForeignKey(Image, on_delete=models.CASCADE, null=True,related_name='comments')
def __str__(self):
#return self.score
#return self.step
return self.playername
return self.tel
return self.mail
#return self.survey
return self.note
return self.message
class Meta:
ordering = ['-created_at']
class Like(TimeStampedModel):
""" Like Model """
creator = models.ForeignKey(user_models.User, on_delete=models.CASCADE, null=True)
image = models.ForeignKey(Image, on_delete=models.CASCADE, null=True,related_name='likes')
def __str__(self):
return 'User: {} - Image Caption: {}'.format(self.creator.username, self.image.caption)
|
import logging
import pickle
import collections
logging.basicConfig(format='%(asctime)s : %(levelname)s :: %(message)s', level=logging.DEBUG)
logging.debug('started.')
input_file = './most_common.pickle'
with open(input_file, 'rb') as input_fp:
data = pickle.load(input_fp)
logging.debug('read preprocessed data from %s.' % input_file)
for key in data.keys():
logging.debug('input data has key %s' % key)
file_names = list()
if 'file_names' in data.keys():
file_names = data['file_names']
counts_from_corpus = collections.Counter()
if 'counts_from_corpus' in data.keys():
counts_from_corpus = data['counts_from_corpus']
counts_from_documents = list()
if 'counts_from_documents' in data.keys():
counts_from_documents = data['counts_from_documents']
corpus_most_common = counts_from_corpus.most_common(10)
t0 = set([item[0] for item in corpus_most_common])
if True:
for index, item in enumerate(corpus_most_common):
logging.debug('%s: %s :: %d' % (index + 1, item[0], item[1]))
most_words = set([each[0] for each in corpus_most_common])
logging.debug(most_words)
for index, item in enumerate(counts_from_documents):
t1 = item.most_common(15)
current = set([each[0] for each in t1])
intersection = t0.intersection(current)
difference = t0.difference(current)
logging.debug('%d : intersection : %d %s :: difference : %d %s' % (
index, len(intersection), intersection, len(difference), difference))
|
# With two given lists [1,3,6,78,35,55] and [12,24,35,24,88,120,155],
# write a program to make a list whose elements are intersection of the above given lists.
list1=[1,3,6,78,35,55]
list2=[12,24,35,24,88,120,155]
set1=set(list1)
set2=set(list2)
print set1 & set2
|
import json
import logging
import os
import boto3
from pyspark.sql import SparkSession
boto3.setup_default_session(region_name=os.environ.get('REGION', 'us-east-1'))
source_location_uri = os.path.join(os.environ['SILVER_LAKE_S3URI'], '')
target_location_uri = os.path.join(os.environ['GOLD_LAKE_S3URI'], '')
log_level = os.environ.get('LOG_LEVEL', 'INFO')
logging.basicConfig(
format='%(asctime)s | %(levelname)s | %(message)s',
datefmt='%Y-%m-%d %H:%M:%S',
level=log_level)
def get_hudi_options(instant_time):
hudi_options = {
'hoodie.datasource.query.type': 'incremental',
'hoodie.datasource.read.begin.instanttime': instant_time
}
return hudi_options
def main():
f = open('/mnt/var/lib/instance-controller/public/runtime_configs/configs.json')
config_dict = json.load(f)
logging.info(json.dumps(config_dict, indent=4))
database_config = config_dict['DatabaseConfig']
db_name = database_config['target_db_name']
print(database_config)
spark = SparkSession \
.builder \
.appName(f'{db_name}_denormalize') \
.getOrCreate()
# Check to see if the target denormalized table exists, if it does, grab the max hudi instant time from the previous load
try:
client = boto3.client('glue')
client.get_table(DatabaseName=db_name, Name='analytics_order_line')
spark.read.format('org.apache.hudi').load(os.path.join(target_location_uri, 'analytics_order_line', ''))\
.createOrReplaceTempView('aol')
instant_time = spark.sql('''
SELECT date_format(MAX(ol_instant_time), 'yyyyMMddHHmmss') as instant_time FROM aol
''').collect()[0][0]
logging.info(f'Table exists and records current as of {instant_time}')
dn_table_exists = True
# There is no good way to catch botocore.errorfactory exceptions, so this...
except Exception as e:
if type(e).__name__ == 'EntityNotFoundException':
dn_table_exists = False
instant_time = None
logging.warning('Table analytics_order_line does not exist')
else:
raise
# Register tables as temporary views
for table in ['hammerdb_public_orders','hammerdb_public_customer', 'hammerdb_public_district',
'hammerdb_public_warehouse', 'hammerdb_public_item', 'hammerdb_public_order_line']:
# We are using snapshot reads for dimension tables and incremental for order_line (if possible)
if dn_table_exists is True and table == 'hammerdb_public_order_line':
hudi_options = {
'hoodie.datasource.query.type': 'incremental',
'hoodie.datasource.read.begin.instanttime': instant_time
}
else:
hudi_options = {
'hoodie.datasource.query.type': 'snapshot'
}
spark.read.format('org.apache.hudi').options(**hudi_options).load(os.path.join(source_location_uri, table, ''))\
.createOrReplaceTempView(table)
# Create the denormalized dataframe
df = spark.sql('''
SELECT
concat(cast(c_id as string), '-', cast(w_id as string), '-', cast(d_id as string), '-', cast(o_id as string)) as aol_sk,
concat(cast(c_id as string), '-', cast(w_id as string), '-', cast(d_id as string)) as c_sk,
c_id,
w_id,
d_id,
o_id,
ol_number,
o_entry_d,
date_format(o_entry_d, 'yyyy/MM/dd') as order_date,
i_id,
c_first || ' ' || c_middle || ' ' || c_last as full_name,
c_zip,
c_phone,
c_credit,
c_credit_lim,
c_discount,
c_balance,
c_ytd_payment,
c_payment_cnt,
c_delivery_cnt,
w_name whouse_name,
d_name district_name,
ol_delivery_d delivery_date,
ol_quantity quantity,
ol_amount amount,
i_name item_name,
i_price item_price,
to_date(ol._hoodie_commit_time, 'yyyyMMddHHmmss') as ol_instant_time
FROM hammerdb_public_orders
JOIN hammerdb_public_customer
ON o_c_id = c_id
AND o_d_id = c_d_id
AND o_w_id = c_w_id
JOIN hammerdb_public_district
ON c_d_id = d_id
AND c_w_id = d_w_id
JOIN hammerdb_public_warehouse
ON d_w_id = w_id
JOIN hammerdb_public_order_line ol
ON o_id = ol_o_id
AND o_d_id = ol_d_id
AND o_w_id = ol_w_id
JOIN hammerdb_public_item
ON ol_i_id = i_id
ORDER BY aol_sk, ol_number, ol_instant_time
''')
# If we are doing a full load because the table doesn't exist, persist it.. we'll need it for aggregation step as well
if dn_table_exists is False:
df.persist()
hudi_conf = {
'hoodie.table.name': 'analytics_order_line',
'hoodie.datasource.write.recordkey.field': 'aol_sk,ol_number',
'hoodie.datasource.write.precombine.field': 'ol_instant_time',
'hoodie.datasource.write.partitionpath.field': 'order_date',
'hoodie.datasource.write.keygenerator.class': 'org.apache.hudi.keygen.ComplexKeyGenerator',
'hoodie.datasource.hive_sync.database': db_name,
'hoodie.datasource.hive_sync.enable': 'true',
'hoodie.datasource.hive_sync.table': 'analytics_order_line',
'hoodie.datasource.hive_sync.partition_extractor_class': 'org.apache.hudi.hive.SlashEncodedDayPartitionValueExtractor'
}
if dn_table_exists is False:
hudi_conf['hoodie.datasource.write.operation'] = 'bulk_insert'
hudi_conf['hoodie.bulkinsert.sort.mode'] = 'PARTITION_SORT'
hudi_conf['hoodie.bulkinsert.shuffle.parallelism'] = '32'
writer = df.write.format('org.apache.hudi').mode('overwrite')
else:
hudi_conf['hoodie.datasource.write.operation'] = 'upsert'
hudi_conf['hoodie.upsert.shuffle.parallelism'] = '32'
writer = df.write.format('org.apache.hudi').mode('append')
writer.options(**hudi_conf)\
.save(os.path.join(target_location_uri, 'analytics_order_line', ''))
if __name__ == '__main__':
main()
|
import torch
import torch.nn as nn
import torch.nn.functional as F
class LuongAttention(nn.Module):
def __init__(self, hidden_size_enc, hidden_size_dec, use_cuda=True, method='general'):
super().__init__()
self.hidden_size_enc = hidden_size_enc
self.hidden_size_dec = hidden_size_dec
self.use_cuda = use_cuda
self.method = method
if self.method not in ['dot', 'general', 'concat']:
raise ValueError(self.method, "is not an appropriate attention method.")
if self.method == 'general':
self.general_weights = torch.nn.Parameter(torch.randn(hidden_size_dec, hidden_size_enc))
elif self.method == 'concat':
self.general_weights = torch.nn.Parameter(torch.randn(hidden_size_dec, hidden_size_enc))
self.v = torch.nn.Parameter(torch.randn(hidden_size_dec, hidden_size_enc))
def forward(self,
encoder_outputs,
encoder_outputs_length,
decoder_outputs,
decoder_outputs_length,
enc_mask=None):
dec_len = decoder_outputs.size(0)
enc_len = encoder_outputs.size(0)
decoder_outputs = torch.transpose(decoder_outputs, 0, 1)
encoder_outputs = encoder_outputs.permute(1, 2, 0)
score = torch.bmm(decoder_outputs @ self.general_weights, encoder_outputs)
if enc_mask is not None:
enc_mask = enc_mask.unsqueeze(1)
enc_mask = torch.transpose(enc_mask, 0, 2)
score = score.masked_fill(enc_mask == 0, -1e12)
weights_flat = F.softmax(score.view(-1, enc_len), dim=1)
weights = weights_flat.view(-1, dec_len, enc_len)
attention_vector = torch.bmm(weights, encoder_outputs.permute(0, 2, 1))
attention_vector = attention_vector.permute(1, 0, 2)
return attention_vector, weights.view(-1, enc_len)
|
import functools
import logging
def logger(func):
logging.basicConfig(filename=f"logger", level=logging.INFO)
@functools.wraps(func)
def aninhada(*args, **kwargs):
logging.info(f"'{func.__name__}' executado com os argumentos: {args}")
return func(*args, **kwargs)
return aninhada
def recibo(func):
@functools.wraps(func)
def aninhada(*args, **kwargs):
with open("recibos.txt", "a") as arquivo:
if func.__name__ == "depositar":
arquivo.write(f"{args[0].nome} depositou R$ {args[1]}\n")
elif func.__name__ == "sacar":
arquivo.write(f"{args[0].nome} sacou R$ {args[1]}\n")
else:
arquivo.write(f"{args[0].nome} transferiu R$ {args[2]} para {args[1].nome}\n")
return func(*args, **kwargs)
return aninhada
|
from save_the_giphies.runner import create_app
from save_the_giphies.config import config
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from flask import Flask
if __name__ == '__main__':
""" Entry point """
app: "Flask" = create_app()
app.run(debug=config.debug)
|
from django.contrib import admin
from .models import (Domain,mcqQuestions,typeQuestions,Responses,User)
from django.utils.html import format_html
import csv
from django.http import HttpResponse
class ExportCsvMixin:
def export_as_csv(self, request, queryset):
meta = self.model._meta
field_names = [field.name for field in meta.fields]
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename={}.csv'.format(meta)
writer = csv.writer(response)
writer.writerow(field_names)
for obj in queryset:
row = writer.writerow([getattr(obj, field) for field in field_names])
return response
export_as_csv.short_description = "Export Selected"
@admin.register(User)
class UserAdmin(admin.ModelAdmin,ExportCsvMixin):
list_display = ['username','reg_no','email','phone_number']
actions = ["export_as_csv"]
# Register your models here.
@admin.register(Domain)
class DomainAdmin(admin.ModelAdmin):
list_display = ['domain_name','id']
@admin.register(mcqQuestions)
class mcqQuestionsAdmin(admin.ModelAdmin):
list_display = ['domain','question_id','get_question']
def get_question(self, obj):
html = "<img src='" + str(obj.question) + "' style='height:200px;width:300px' />";
return format_html(html)
get_question.short_description = 'question'
@admin.register(typeQuestions)
class typeQuestionsAdmin(admin.ModelAdmin):
list_display = ['domain','question_id','get_question']
def get_question(self, obj):
html = "<img src='" + str(obj.question) + "' style='height:200px;width:300px' />";
return format_html(html)
get_question.short_description = 'question'
@admin.register(Responses)
class ResponsesAdmin(admin.ModelAdmin,ExportCsvMixin):
list_display = ['user','get_reg_no','domain','get_question','answer']
list_filter = ['domain']
search_fields = ['user__username']
actions = ["export_as_csv"]
def get_reg_no(self, obj):
return obj.user.reg_no
get_reg_no.short_description = 'reg_no'
def get_question(self, obj):
html = "<img src='" + str(obj.question) + "' style='height:200px;width:300px' />";
return format_html(html)
get_question.short_description = 'question'
# class MyModelAdmin(admin.ModelAdmin):
# ...
#
# list_display = ['get_description', ]
#
# def get_description(self, obj):
# return format_html(obj)
# get_description.short_description = 'description'
|
r"""This module provides decoding functionality of struct
+-------+--------+-------------------------------+---+---+---+---+---+---+---+---+
| Byte | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+-------+--------+---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+
| bit | | 7 | 6 | 5 | 4 | 3 | 2 | 1 | 0 | |
+-------+--------+---+---+---+---+---+---+---+---+-------------------------------+
| Field | opcode | |fua| lba (big endian) |
+-------+--------+---------------------------+---+-------------------------------+
If command block has layout shown as above and there is 5 command as following:
+--------+-----+-------------+
| opcode | fua | Command |
+--------+-----+-------------+
| 0x00 | X | NOP |
+--------+-----+-------------+
| 0x01 | 0 | Cache Write |
+--------+-----+-------------+
| 0x01 | 1 | Force Write |
+--------+-----+-------------+
| 0x02 | 0 | Cache Read |
+--------+-----+-------------+
| 0x02 | 1 | Force Read |
+--------+-----+-------------+
Command classes can be defined as following:
>>> from pystr import Decodable
>>> from enum import IntEnum
>>> class Opcode(IntEnum):
... Nop = 0
... Write = 1
... Read = 2
>>> class BaseCommand(Decodable):
... _layout_ = dict(
... opcode=dict(
... offset=0,
... ),
... fua=dict(
... offset=1,
... bit=0,
... ),
... lba=dict(
... offset=2,
... width=64,
... endian='be'
... ),
... )
... opcode: Opcode
... fua: bool
... lba: int
>>> class NopCommand(BaseCommand):
... opcode = Opcode.Nop
>>> class WriteCommand(BaseCommand):
... opcode = Opcode.Write
>>> class ReadCommand(BaseCommand):
... opcode = Opcode.Read
>>> class CacheWriteCommand(WriteCommand):
... fua = False
>>> class ForceWriteCommand(WriteCommand):
... fua = True
>>> class CacheReadCommand(ReadCommand):
... fua = False
>>> class ForceReadCommand(ReadCommand):
... fua = True
The results of each case are same as following:
>>> print(BaseCommand(b'\0\0\0\0\0\0\0\0').decode())
NopCommand(lba=0, fua=False, opcode=<Opcode.Nop: 0>)
>>> print(BaseCommand(b'\x01\0\0\0\0\0\0\0').decode())
CacheWriteCommand(lba=0, fua=False, opcode=<Opcode.Write: 1>)
>>> print(BaseCommand(b'\x01\x01\0\0\0\0\0\0').decode())
ForceWriteCommand(lba=0, fua=True, opcode=<Opcode.Write: 1>)
>>> print(BaseCommand(b'\x02\0\0\0\0\0\0\0').decode())
CacheReadCommand(lba=0, fua=False, opcode=<Opcode.Read: 2>)
>>> print(BaseCommand(b'\x02\x01\0\0\0\0\0\0').decode())
ForceReadCommand(lba=0, fua=True, opcode=<Opcode.Read: 2>)
If you want to add initial value to sub struct, `initial` parameter can be used.
>>> class SomeDecodable(Decodable):
... _layout_ = ...
... child: ChildDecodable
... class DerivedDecodable(SomeDecodable, initial={"child.value": 1}):
... pass
"""
import typing
from .struct import Struct
DerivedDecodable = typing.TypeVar('DerivedDecodable', bound='Decodable')
class Decodable(Struct):
"""Decoding facility added Struct"""
_decode_map: typing.List[
typing.Tuple[
typing.Dict[str, typing.Any],
typing.Type['Decodable']
]
] = []
def __init_subclass__(cls, **kwargs: typing.Any): #pylint: disable=arguments-differ
super().__init_subclass__(**kwargs)
if cls._initial:
cls._decode_map.append((cls._initial, cls))
cls._decode_map = []
def decode(self: DerivedDecodable) -> DerivedDecodable:
"""Decode struct by derived Decodables"""
dmap = self._decode_map
ret_tp = type(self)
while True:
for cond, child_tp in reversed(dmap):
if all(getattr(self, k) == v for k, v in cond.items()):
dmap = child_tp._decode_map #pylint: disable=protected-access
ret_tp = typing.cast(typing.Type[DerivedDecodable], child_tp)
break
else:
return self if ret_tp is type(self) else ret_tp(ref=self.buffer)
|
from protocol_lib import IHashable
def test_hashable() -> None:
class Impl:
def __hash__(self) -> int:
return 42
impl: IHashable = Impl()
assert hash(impl) == 42
|
import os
import re
from pathlib import Path
from typing import List
import testsuites
from resultinterpretation import CSVFileExtractor
from resultinterpretation.model import TestSuiteResult, TestStepResult
class ResultInterpreter:
def __init__(self, inputDir: Path):
"""
:param inputDir: The root directory which contains the output files of the CouchEdit Test Suites
(directories with <datetime>_<TestSuiteName>
"""
self.__inputDir = inputDir.absolute()
# Pattern to match if a directory name is a CouchEdit test suite result pattern.
# Those look like yyyyMMddhhmmss_<TestSuiteName>
self.__couchEditTestSuiteDirPattern = re.compile("\\d+_\\w+")
def interpretResults(self) -> List[TestSuiteResult]:
if not self.__inputDir.exists():
raise Exception("Input Directory {0} cannot be found!".format(self.__inputDir))
if not self.__inputDir.is_dir():
raise Exception("Input Location {0} is not a directory!".format(self.__inputDir))
dirList = [dirPath for dirPath in self.__inputDir.glob('*/')
if self.__isCouchEditTestSuiteDirName(dirPath.name)]
return self.processDirectories(dirList)
def processDirectories(self, dirList: List[Path]) -> List[TestSuiteResult]:
testSuiteDirDict = {}
for path in dirList:
suiteName = self.__getCouchEditTestSuiteNameFromDirName(path)
if suiteName not in testSuiteDirDict:
testSuiteDirDict[suiteName] = []
testSuiteDirDict[suiteName].append(path)
ret = []
for suiteName, suiteDirs in testSuiteDirDict.items():
suiteInfo = testsuites.suites[suiteName]
independentVariableName = suiteInfo['iv']
if 'pre' in suiteInfo:
prefix = suiteInfo['pre']
else:
prefix = 'result_'
suiteCsvExtractor = CSVFileExtractor(independentVariableName, prefix)
results = self.processSingleTestSuiteDirectories(suiteDirs, suiteCsvExtractor)
ret.append(TestSuiteResult(suiteName, results, {
"suiteName": suiteName,
"prefix": prefix,
"independentVariable": independentVariableName
}))
return ret
def processSingleTestSuiteDirectories(
self,
dirList: List[Path],
suiteCsvExtractor: CSVFileExtractor
) -> List[TestStepResult]:
testStepFileDict = {}
for dir in dirList:
files = dir.glob('*.csv')
for file in files:
stepNumber = self.__getStepNumberFromFileName(file)
if stepNumber not in testStepFileDict:
testStepFileDict[stepNumber] = []
testStepFileDict[stepNumber].append(file)
ret = []
for number, files in testStepFileDict.items():
extracted = suiteCsvExtractor.extract(files)
resultObject = TestStepResult(number, extracted)
ret.append(resultObject)
return ret
def __getStepNumberFromFileName(self, path: Path) -> int:
pat = re.compile("\\d+")
number = pat.match(str(path.stem)).group(0)
return int(number)
def __getCouchEditTestSuiteNameFromDirName(self, path: Path) -> str:
nameParts = path.name.split('_', 1)
return nameParts[1]
def __isCouchEditTestSuiteDirName(self, name: str) -> bool:
return self.__couchEditTestSuiteDirPattern.match(name) is not None
|
# Copyright 2013 OpenStack Foundation.
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import time
import mock
from vmware_nsx.api_client import client as nsx_client
from vmware_nsx.api_client import eventlet_client
from vmware_nsx import extensions
import vmware_nsx.plugin as neutron_plugin
from vmware_nsx.plugins.nsx_v.vshield.common import (
VcnsApiClient as vcnsapi)
from vmware_nsx.plugins.nsx_v.vshield import edge_utils
from vmware_nsx.plugins.nsx_v.vshield import vcns
import vmware_nsx.plugins.nsx_v.vshield.vcns_driver as vcnsdriver
plugin = neutron_plugin.NsxPlugin
api_client = nsx_client.NsxApiClient
evt_client = eventlet_client.EventletApiClient
vcns_class = vcns.Vcns
vcns_driver = vcnsdriver.VcnsDriver
vcns_api_helper = vcnsapi.VcnsApiHelper
edge_manage_class = edge_utils.EdgeManager
STUBS_PATH = os.path.join(os.path.dirname(__file__), 'etc')
NSXEXT_PATH = os.path.dirname(extensions.__file__)
NSXAPI_NAME = '%s.%s' % (api_client.__module__, api_client.__name__)
PLUGIN_NAME = '%s.%s' % (plugin.__module__, plugin.__name__)
CLIENT_NAME = '%s.%s' % (evt_client.__module__, evt_client.__name__)
VCNS_NAME = '%s.%s' % (vcns_class.__module__, vcns_class.__name__)
VCNS_DRIVER_NAME = '%s.%s' % (vcns_driver.__module__, vcns_driver.__name__)
VCNSAPI_NAME = '%s.%s' % (vcns_api_helper.__module__, vcns_api_helper.__name__)
EDGE_MANAGE_NAME = '%s.%s' % (edge_manage_class.__module__,
edge_manage_class.__name__)
# Mock for the tenacity retrying sleeping method
mocked_retry_sleep = mock.patch.object(time, 'sleep')
mocked_retry_sleep.start()
def get_fake_conf(filename):
return os.path.join(STUBS_PATH, filename)
def nsx_method(method_name, module_name='nsxlib'):
return '%s.%s.%s' % ('vmware_nsx', module_name,
method_name)
|
img_norm_cfg = dict(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(
type='ResizeOCR',
height=32,
min_width=128,
max_width=128,
keep_aspect_ratio=False,
width_downsample_ratio=0.25),
dict(
type='RandomWrapper',
p=0.5,
transforms=[
dict(
type='OneOfWrapper',
transforms=[
dict(
type='RandomRotateTextDet',
max_angle=15,
),
dict(
type='TorchVisionWrapper',
op='RandomAffine',
degrees=15,
translate=(0.3, 0.3),
scale=(0.5, 2.),
shear=(-45, 45),
),
dict(
type='TorchVisionWrapper',
op='RandomPerspective',
distortion_scale=0.5,
p=1,
),
])
],
),
dict(
type='RandomWrapper',
p=0.25,
transforms=[
dict(type='PyramidRescale'),
dict(
type='Albu',
transforms=[
dict(type='GaussNoise', var_limit=(20, 20), p=0.5),
dict(type='MotionBlur', blur_limit=6, p=0.5),
]),
]),
dict(
type='RandomWrapper',
p=0.25,
transforms=[
dict(
type='TorchVisionWrapper',
op='ColorJitter',
brightness=0.5,
saturation=0.5,
contrast=0.5,
hue=0.1),
]),
dict(type='ToTensorOCR'),
dict(type='NormalizeOCR', **img_norm_cfg),
dict(
type='Collect',
keys=['img'],
meta_keys=[
'filename', 'ori_shape', 'img_shape', 'text', 'valid_ratio',
'resize_shape'
]),
]
test_pipeline = [
dict(type='LoadImageFromFile'),
dict(
type='MultiRotateAugOCR',
rotate_degrees=[0, 90, 270],
transforms=[
dict(
type='ResizeOCR',
height=32,
min_width=128,
max_width=128,
keep_aspect_ratio=False,
width_downsample_ratio=0.25),
dict(type='ToTensorOCR'),
dict(type='NormalizeOCR', **img_norm_cfg),
dict(
type='Collect',
keys=['img'],
meta_keys=[
'filename', 'ori_shape', 'img_shape', 'valid_ratio',
'resize_shape'
]),
])
]
|
from __future__ import print_function # PY2
import sys
import traceback
import platform
import ctypes.util
from ctypes import (POINTER, CFUNCTYPE, CDLL, pythonapi, cast, addressof,
c_int, c_char_p, c_void_p, c_size_t, py_object)
WINDOWS = platform.system().lower() == "windows"
def get_libc():
if WINDOWS:
path = "msvcrt"
else:
path = ctypes.util.find_library("c")
if path is None:
raise RuntimeError("cannot locate libc")
return CDLL(path)
def get_file_pointers_Python2():
PyFile_AsFile = pythonapi.PyFile_AsFile
PyFile_AsFile.restype = c_void_p
PyFile_AsFile.argtypes = [py_object]
stdin = PyFile_AsFile(sys.stdin)
stdout = PyFile_AsFile(sys.stdout)
return stdin, stdout
def get_file_pointers_Unix():
runtime = CDLL(None)
stdin = c_void_p.in_dll(runtime, "stdin").value
stdout = c_void_p.in_dll(runtime, "stdout").value
return stdin, stdout
def get_file_pointers_msvcrt():
class FILE(ctypes.Structure):
_fields_ = [
("_ptr", c_char_p),
("_cnt", c_int),
("_base", c_char_p),
("_flag", c_int),
("_file", c_int),
("_charbuf", c_int),
("_bufsize", c_int),
("_tmpfname", c_char_p),
]
msvcrt = CDLL(ctypes.util.find_msvcrt())
iob_func = msvcrt.__iob_func
iob_func.restype = POINTER(FILE)
iob_func.argtypes = []
array = iob_func()
stdin = addressof(array[0])
stdout = addressof(array[1])
return stdin, stdout
def get_file_pointers_ucrtbase():
ucrtbase = CDLL("ucrtbase")
iob_func = ucrtbase.__acrt_iob_func
iob_func.restype = c_void_p
iob_func.argtypes = [c_int]
stdin = iob_func(0)
stdout = iob_func(1)
return stdin, stdout
def get_file_pointers():
if sys.version_info < (3,):
return get_file_pointers_Python2()
elif WINDOWS:
if sys.version_info >= (3, 5):
return get_file_pointers_ucrtbase()
else:
return get_file_pointers_msvcrt()
else:
return get_file_pointers_Unix()
HOOKFUNC = CFUNCTYPE(c_char_p, c_void_p, c_void_p, c_char_p)
LIBC = get_libc()
strncpy = LIBC.strncpy
strncpy.restype = c_char_p
strncpy.argtypes = [c_char_p, c_char_p, c_size_t]
PyMem_Malloc = pythonapi.PyMem_Malloc
PyMem_Malloc.restype = c_size_t
PyMem_Malloc.argtypes = [c_size_t]
PyOS_ReadlineFunctionPointer = c_void_p.in_dll(pythonapi, "PyOS_ReadlineFunctionPointer")
STDIN_FILE_POINTER, STDOUT_FILE_POINTER = get_file_pointers()
def get_function_address(func):
return cast(func, c_void_p).value
def new_zero_terminated_string(b):
p = PyMem_Malloc(len(b) + 1)
strncpy(cast(p, c_char_p), b, len(b) + 1)
return p
def readline_wrapper(stdin_fp, stdout_fp, prompt_bytes):
try:
prompt = prompt_bytes.decode(sys.stdout.encoding)
try:
line = readline_hook(prompt)
except KeyboardInterrupt:
return 0
else:
line_bytes = line.encode(sys.stdin.encoding)
return new_zero_terminated_string(line_bytes)
except:
print("An error occured in a readline hook", file=sys.stderr)
traceback.print_exc(file=sys.stderr)
return new_zero_terminated_string(b"\n")
readline_hook = None
readline_hook_ref = HOOKFUNC(readline_wrapper)
def get_readline_hook():
our_address = cast(readline_hook_ref, c_void_p).value
actual_address = PyOS_ReadlineFunctionPointer.value
if actual_address == our_address:
return readline_hook
elif actual_address is None:
return None
readline_bytes = HOOKFUNC(actual_address)
def readline(prompt=""):
prompt_bytes = prompt.encode(sys.stdout.encoding)
line_bytes = readline_bytes(STDIN_FILE_POINTER, STDOUT_FILE_POINTER, prompt_bytes)
if line_bytes is None:
raise KeyboardInterrupt
line = line_bytes.decode(sys.stdin.encoding)
return line
readline.__readline_bytes__ = readline_bytes
return readline
def set_readline_hook(hook):
global readline_hook
if hook is None:
address = 0
elif hasattr(hook, "__readline_bytes__"):
address = get_function_address(hook.__readline_bytes__)
else:
readline_hook = hook
address = get_function_address(readline_hook_ref)
PyOS_ReadlineFunctionPointer.value = address
def stdio_readline(prompt=""):
sys.stdout.write(prompt)
sys.stdout.flush()
return sys.stdin.readline()
#sys.__readlinehook__ = get_readline_hook()
|
import requests
import json
from globalVar import log
class SendMessage(object):
"""发送信息到微信"""
appid = ""
appsecret = ""
access_token = ""
def get_access_token(self):
urlAccessToken = "https://api.weixin.qq.com/cgi-bin/token?grant_type=client_credential&appid=" + \
SendMessage.appid + "&secret=" + SendMessage.appsecret
res = requests.get(urlAccessToken)
SendMessage.access_token = res.json()["access_token"]
log.logger.info("重新获取access token成功")
def send_message(self, user_id, template_id, name, price, time):
"""把模板信息发送到指定用户
Args:
user_id:用户id,关注该公众号的用户所对应的用户id
template_id:模板id
name:股票名称
price:价格
"""
urlSendMessage = "https://api.weixin.qq.com/cgi-bin/message/template/send?access_token=" + \
SendMessage.access_token
param = {
"touser": user_id,
"template_id": template_id,
"topcolor": "#FF0000",
"data": {
"name": {
"value": name,
"color": "#ff0000"
},
"price": {
"value": price,
"color": "#ff0000"
},
"time": {
"value": time,
"color": "#ff0000"
}
}
}
requests.post(url=urlSendMessage, data=json.dumps(param)).text
log.logger.info("向用户:{} 发送消息:{},股票:{},现价:{},时间:{}".format(
user_id, template_id, name, price, time))
|
import torchtext.data as data
class CMUDict(data.Dataset):
def __init__(self, data_lines, g_field, p_field):
fields = [('grapheme', g_field), ('phoneme', p_field)]
examples = [] # maybe ignore '...-1' grapheme
for line in data_lines:
grapheme, phoneme = line.split(maxsplit=1)
examples.append(data.Example.fromlist([grapheme, phoneme],
fields))
self.sort_key = lambda x: len(x.grapheme)
super(CMUDict, self).__init__(examples, fields)
@classmethod
def splits(cls, path, g_field, p_field, seed=None):
import random
if seed is not None:
random.seed(seed)
with open(path) as f:
lines = f.readlines()
random.shuffle(lines)
train_lines, val_lines, test_lines = [], [], []
for i, line in enumerate(lines):
if i % 20 == 0:
val_lines.append(line)
elif i % 20 < 3:
test_lines.append(line)
else:
train_lines.append(line)
train_data = cls(train_lines, g_field, p_field)
val_data = cls(val_lines, g_field, p_field)
test_data = cls(test_lines, g_field, p_field)
return (train_data, val_data, test_data)
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from openstack_controller.admission.validators import base
from openstack_controller import exception
class KeystoneValidator(base.BaseValidator):
service = "identity"
def validate(self, review_request):
keycloak_section = (
review_request.get("object", {})
.get("spec", {})
.get("features", {})
.get("keystone", {})
.get("keycloak", {})
)
if (
keycloak_section.get("enabled", False)
and keycloak_section.get("url") is None
):
raise exception.OsDplValidationFailed(
"Malformed OpenStackDeployment spec, if keycloak is "
"enabled for identity service, you need to specify url."
)
|
# Generated by Django 3.1.14 on 2022-03-24 09:47
from django.db import migrations, models
import uuid
class Migration(migrations.Migration):
dependencies = [
('feedback', '0029_auto_20220324_0944'),
]
operations = [
migrations.AddField(
model_name='report',
name='uuid',
field=models.UUIDField(default=uuid.uuid4, editable=False, null=True),
),
migrations.AlterField(
model_name='report',
name='external_uuid',
field=models.UUIDField(editable=False, null=True, unique=True, verbose_name='Identifier'),
),
]
|
# ========== Thanks https://github.com/Eric-mingjie/rethinking-network-pruning ============
# ========== we adopt the code from the above link and did modifications ============
# ========== the comments as #=== === were added by us, while the comments as # were the original one ============
from __future__ import print_function
import argparse
import math
import os
import random
import shutil
import time
import numpy as np
import torch
import torch.nn as nn
import torch.backends.cudnn as cudnn
import torch.optim as optim
import torch.utils.data as data
import torchvision.transforms as transforms
import torchvision.datasets as datasets
# from tensorboardX import SummaryWriter
import models as models
from utils import Bar, Logger, AverageMeter, accuracy, mkdir_p, savefig, get_sv, get_hook, run_once, detach_hook, get_heuristic_sv
from utils.misc import get_zero_param
from pruner.GraSP import GraSP
from pruner.SNIP import SNIP
from pruner.SmartRatio import SmartRatio
model_names = sorted(name for name in models.__dict__
if name.islower() and not name.startswith("__")
and callable(models.__dict__[name]))
parser = argparse.ArgumentParser(description='PyTorch CIFAR10/100/TinyImagenet Training')
# Datasets
parser.add_argument('-d', '--dataset', default='cifar10', type=str)
parser.add_argument('-j', '--workers', default=4, type=int, metavar='N',
help='number of data loading workers (default: 4)')
# Optimization options
parser.add_argument('--epochs', default=160, type=int, metavar='N',
help='number of total epochs to run')
parser.add_argument('--start-epoch', default=0, type=int, metavar='N',
help='manual epoch number (useful on restarts)')
parser.add_argument('--train-batch', default=64, type=int, metavar='N',
help='train batchsize')
parser.add_argument('--test-batch', default=50, type=int, metavar='N',
help='test batchsize')
parser.add_argument('--lr', '--learning-rate', default=0.1, type=float,
metavar='LR', help='initial learning rate')
parser.add_argument('--drop', '--dropout', default=0, type=float,
metavar='Dropout', help='Dropout ratio')
parser.add_argument('--schedule', type=int, nargs='+', default=[80, 120],
help='Decrease learning rate at these epochs.')
parser.add_argument('--gamma', type=float, default=0.1, help='LR is multiplied by gamma on schedule.')
parser.add_argument('--momentum', default=0.9, type=float, metavar='M',
help='momentum')
parser.add_argument('--weight-decay', '--wd', default=1e-4, type=float,
metavar='W', help='weight decay (default: 1e-4)')
# Checkpoints
parser.add_argument('--resume', default='', type=str, metavar='PATH',
help='path to latest checkpoint (default: none)')
parser.add_argument('--model', default='', type=str, metavar='PATH',
help='path to the initialization checkpoint (default: none)')
# Architecture
parser.add_argument('--arch', '-a', metavar='ARCH', default='resnet20',
choices=model_names,
help='model architecture: ' +
' | '.join(model_names) +
' (default: resnet18)')
parser.add_argument('--depth', type=int, default=29, help='Model depth.')
# Miscs
parser.add_argument('--manualSeed', type=int, help='manual seed')
parser.add_argument('-e', '--evaluate', dest='evaluate', action='store_true',
help='evaluate model on validation set')
parser.add_argument('--save_dir', default='results/', type=str)
#Device options
parser.add_argument('--gpu-id', default='0', type=str,
help='id(s) for CUDA_VISIBLE_DEVICES')
# ========== these attributes were added by us in order to meet the needs of our experiment ============
# ========== the following 3 attributes have str/float type ============
# ========== for --writerdir, you should name it as Yourfolder/Expname, then set your tensorboard path to Yourfolder/ ============
parser.add_argument('--writerdir',default = 'InitExp/', type = str)
# ========== the linear_keep_ratio attribute should be used together with the smart_ratio attribute ============
parser.add_argument('--linear_keep_ratio', type=float, default=0.3, help='smart ratio: linear keep ratio')
# ========== the init_prune_ratio attribute should use together with the smart_ratio/GraSP/SNIP attribute ============
parser.add_argument('--init_prune_ratio', type=float, default=0.98, help='init pruning ratio')
# ========== the following attributes have INT type, but actually they are BOOLEAN: zero or NONZERO ============
parser.add_argument('--rearrange',type = int, default = 0,help = 'rearrange the masks')
parser.add_argument('--shuffle_unmasked_weights',default = 0, type = int)
parser.add_argument('--smart_ratio',default = 0, type = int,help = 'using smart ratio')
parser.add_argument('--GraSP', type=int, default=0, help='Using GraSP')
parser.add_argument('--SNIP', type=int, default=0, help='Using SNIP')
parser.add_argument('--randLabel',type=int, default=0,help = 'Using randLabel Dataset for GraSP/SNIP')
parser.add_argument('--shufflePixel',type=int, default=0,help = 'Using shufflePixel AND RANDLABEL Dataset for GraSP/SNIP')
parser.add_argument('--hybrid',type=int, default=0,help = 'the Hybrid Method, should use with Smart Ratio')
parser.add_argument('--linear_decay',type=int, default=0,help = 'Ablation: Using Linear Decay,should use with Smart Ratio')
parser.add_argument('--ascend',type=int, default=0,help = 'Ablation: Using Ascend Smart Ratio')
parser.add_argument('--uniform',type=int, default=0,help = 'Ablation: Using Balance Keep_Ratio')
parser.add_argument('--cubic',type=int, default=0,help = 'Ablation: Using Cubic Keep_Ratio')
# ========== Can use this BOOLEAN attribute to read in the model in and Run it on the Trainloader to see ACC ============
parser.add_argument('--print_output',default = 0, type = int)
parser.add_argument('--sv', dest='compute_sv', action='store_true',
help='compute_sv throughout training')
args = parser.parse_args()
state = {k: v for k, v in args._get_kwargs()}
# Validate dataset
assert args.dataset == 'cifar10' or args.dataset == 'cifar100' or args.dataset == 'tinyimagenet', 'Dataset can only be cifar10 or cifar100 or tinyimagenet.'
gpu_id = args.gpu_id
os.environ["CUDA_VISIBLE_DEVICES"] = gpu_id
use_cuda = torch.cuda.is_available()
# Random seed
if args.manualSeed is None:
args.manualSeed = random.randint(1, 100000)
random.seed(args.manualSeed)
torch.manual_seed(args.manualSeed)
if use_cuda:
torch.cuda.manual_seed_all(args.manualSeed)
best_acc = 0 # best test accuracy
class CIFAR10RandomLabels(datasets.CIFAR10):
"""CIFAR10 dataset, with support for randomly corrupt labels.
Params
------
corrupt_prob: float
Default 1.0. The probability of a label being replaced with
random label.
num_classes: int
Default 10. The number of classes in the dataset.
"""
def __init__(self, corrupt_prob=1.0, num_classes=10, **kwargs):
super(CIFAR10RandomLabels, self).__init__(**kwargs)
self.n_classes = num_classes
if corrupt_prob > 0:
self.corrupt_labels(corrupt_prob)
def corrupt_labels(self, corrupt_prob):
labels = np.array(self.targets)
np.random.seed(12345)
mask = np.random.rand(len(labels)) <= corrupt_prob
rnd_labels = np.random.choice(self.n_classes, mask.sum())
labels[mask] = rnd_labels
# we need to explicitly cast the labels from npy.int64 to
# builtin int type, otherwise pytorch will fail...
targets = [int(x) for x in labels]
self.targets = targets
if args.shufflePixel != 0:
print('********************* DEBUG PRINT : ADDITION : SHUFFLE PIXEL ************************')
xs = torch.tensor(self.data)
Size = xs.size()
# e.g. for CIFAR10, is 50000 * 32 * 32 * 3
xs = xs.reshape(Size[0],-1)
for i in range(Size[0]):
xs[i] = xs[i][torch.randperm(xs[i].nelement())]
xs = xs.reshape(Size)
xs = xs.numpy()
self.data = xs
class CIFAR100RandomLabels(datasets.CIFAR100):
"""CIFAR100 dataset, with support for randomly corrupt labels.
Params
------
corrupt_prob: float
Default 1.0. The probability of a label being replaced with
random label.
num_classes: int
Default 100. The number of classes in the dataset.
"""
def __init__(self, corrupt_prob=1.0, num_classes=100, **kwargs):
super(CIFAR100RandomLabels, self).__init__(**kwargs)
self.n_classes = num_classes
if corrupt_prob > 0:
self.corrupt_labels(corrupt_prob)
def corrupt_labels(self, corrupt_prob):
labels = np.array(self.targets)
np.random.seed(12345)
mask = np.random.rand(len(labels)) <= corrupt_prob
rnd_labels = np.random.choice(self.n_classes, mask.sum())
labels[mask] = rnd_labels
# we need to explicitly cast the labels from npy.int64 to
# builtin int type, otherwise pytorch will fail...
targets = [int(x) for x in labels]
self.targets = targets
if args.shufflePixel != 0:
print('********************* DEBUG PRINT : ADDITION : SHUFFLE PIXEL ************************')
xs = torch.tensor(self.data)
Size = xs.size()
# e.g. for CIFAR100, is 50000 * 32 * 32 * 3
xs = xs.reshape(Size[0],-1)
for i in range(Size[0]):
xs[i] = xs[i][torch.randperm(xs[i].nelement())]
xs = xs.reshape(Size)
xs = xs.numpy()
self.data = xs
def main():
global best_acc
start_epoch = args.start_epoch # start from epoch 0 or last checkpoint epoch
if args.print_output == 0:
# writer = SummaryWriter(args.writerdir)
os.makedirs(args.save_dir, exist_ok=True)
# Data
# ========== The following preprocessing procedure is adopted from https://github.com/alecwangcq/GraSP ============
print('==> Preparing dataset %s' % args.dataset)
if args.dataset == 'cifar10':
transform_train = transforms.Compose([
transforms.RandomCrop(32, padding=4),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),
])
transform_test = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),
])
dataloader = datasets.CIFAR10
num_classes = 10
elif args.dataset == 'cifar100':
dataloader = datasets.CIFAR100
num_classes = 100
transform_train = transforms.Compose([
transforms.RandomCrop(32, padding=4),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize((0.5071, 0.4867, 0.4408), (0.2675, 0.2565, 0.2761)),
])
transform_test = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.5071, 0.4867, 0.4408), (0.2675, 0.2565, 0.2761)),
])
elif args.dataset == 'tinyimagenet':
args.schedule = [150,225]
num_classes = 200
tiny_mean = [0.48024578664982126, 0.44807218089384643, 0.3975477478649648]
tiny_std = [0.2769864069088257, 0.26906448510256, 0.282081906210584]
transform_train = transforms.Compose([
transforms.RandomCrop(64, padding=4),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize(tiny_mean, tiny_std)])
transform_test = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize(tiny_mean, tiny_std)])
args.workers = 16
args.epochs = 300
if args.dataset != 'tinyimagenet':
trainset = dataloader(root='./data', train=True, download=True, transform=transform_train)
else:
trainset = datasets.ImageFolder('./data' + '/tiny_imagenet/train', transform=transform_train)
trainloader = data.DataLoader(trainset, batch_size=args.train_batch, shuffle=True, num_workers=args.workers)
if args.dataset != 'tinyimagenet':
testset = dataloader(root='./data', train=False, download=False, transform=transform_test)
testloader = data.DataLoader(testset, batch_size=args.test_batch, shuffle=False, num_workers=args.workers)
else:
testset = datasets.ImageFolder('./data' + '/tiny_imagenet/val', transform=transform_test)
testloader = data.DataLoader(testset, batch_size=args.test_batch, shuffle=False,
num_workers=args.workers)
# Model
print("==> creating model '{}'".format(args.arch))
if args.arch.endswith('resnet'):
model = models.__dict__[args.arch](
num_classes=num_classes,
depth=args.depth,
)
model_ref = models.__dict__[args.arch](
num_classes=num_classes,
depth=args.depth,
)
else:
model = models.__dict__[args.arch](num_classes=num_classes)
model_ref = models.__dict__[args.arch](num_classes=num_classes)
model.cuda()
model_ref.cuda()
cudnn.benchmark = True
print(' Total Conv and Linear Params: %.2fM' % (sum(p.weight.data.numel() for p in model.modules() if isinstance(p,nn.Linear) or isinstance(p,nn.Conv2d))/1000000.0))
criterion = nn.CrossEntropyLoss()
optimizer = optim.SGD(model.parameters(), lr=args.lr, momentum=args.momentum, weight_decay=args.weight_decay) # default is 0.001
# Resume
if args.dataset == 'cifar10':
title = 'cifar-10-' + args.arch
elif args.dataset == 'cifar100':
title = 'cifar-100-' + args.arch
else:
title = 'tinyimagenet' + args.arch
if args.resume:
# Load checkpoint.
print('==> Getting reference model from checkpoint..')
assert os.path.isfile(args.resume), 'Error: no checkpoint directory found!'
checkpoint = torch.load(args.resume, map_location='cpu')
start_epoch = args.start_epoch
model_ref.load_state_dict(checkpoint['state_dict'])
if args.randLabel != 0 or args.shufflePixel != 0:
assert args.dataset == 'cifar10' or args.dataset == 'cifar100','randLabel/shufflePixel can only be used together with cifar10/100.'
print('###################### DEBUG PRINT : USING RANDLABEL TO CALCULATE ####################')
if args.dataset == 'cifar10':
trainset = CIFAR10RandomLabels(root='./data', train=True, download=True, transform=transform_train)
else:
trainset = CIFAR100RandomLabels(root='./data', train=True, download=True, transform=transform_train)
trainloader = data.DataLoader(trainset, batch_size=args.train_batch, shuffle=True, num_workers=args.workers)
if args.print_output == 0:
logger = Logger(os.path.join(args.save_dir, 'log_scratch.txt'), title=title)
logger.set_names(['Learning Rate', 'Train Loss', 'Valid Loss', 'Train Acc.', 'Valid Acc.'])
# set some weights to zero, according to model_ref ---------------------------------
if args.model:
print('==> Loading init model from %s'%args.model)
checkpoint = torch.load(args.model, map_location='cpu')
model.load_state_dict(checkpoint['state_dict'])
size_hook = None
if args.compute_sv:
training_sv = []
training_sv_avg = []
training_sv_std = []
h_sv = []
print('[*] Will compute singular values throught training.')
size_hook = get_hook(model, (nn.Linear, nn.Conv2d, nn.ConvTranspose2d))
run_once(trainloader, model)
detach_hook([size_hook])
# ========== the following code is the implementation of Smart Ratio ============
if args.smart_ratio != 0:
print("################### DEBUG PRINT : USING SMART RATIO ###################")
masks = SmartRatio(model,args.init_prune_ratio,'cuda',args)
# ========== the following code is the implementation of GraSP ============
if args.GraSP != 0:
print("################### DEBUG PRINT : USING GraSP ###################")
# ========== If use ResNet56, there will be risk to meet the CUDA OUT OF MEMORY ERROR ============
samples_per_class = 10
num_iters = 1
if args.arch == 'resnet' and args.depth > 32:
samples_per_class = 1
num_iters = 10
if args.dataset == 'tinyimagenet':
samples_per_class = 1
num_iters = 10
if args.randLabel != 0 or args.shufflePixel != 0:
assert args.dataset == 'cifar10' or args.dataset == 'cifar100','randLabel/shufflePixel can only be used together with cifar10/100.'
print('###################### DEBUG PRINT : USING RANDLABEL TO CALCULATE ####################')
if args.dataset == 'cifar10':
randset = CIFAR10RandomLabels(root='./data', train=True, download=True, transform=transform_train)
else:
randset = CIFAR100RandomLabels(root='./data', train=True, download=True, transform=transform_train)
randloader = data.DataLoader(randset, batch_size=args.train_batch, shuffle=True, num_workers=args.workers)
masks = GraSP(model, args.init_prune_ratio, randloader, 'cuda',num_classes,samples_per_class,num_iters)
else:
masks = GraSP(model, args.init_prune_ratio, trainloader, 'cuda',num_classes,samples_per_class,num_iters)
# ========== the following code is the implementation of SNIP ============
if args.SNIP != 0:
print("################### DEBUG PRINT : USING SNIP ###################")
# ========== If use ResNet56, there will be risk to meet the CUDA OUT OF MEMORY ERROR ============
samples_per_class = 10
num_iters = 1
if args.arch == 'resnet' and args.depth > 32:
samples_per_class = 1
num_iters = 10
if args.dataset == 'tinyimagenet':
samples_per_class = 1
num_iters = 10
if args.randLabel != 0 or args.shufflePixel != 0:
assert args.dataset == 'cifar10' or args.dataset == 'cifar100','randLabel/shufflePixel can only be used together with cifar10/100.'
print('###################### DEBUG PRINT : USING RANDLABEL TO CALCULATE ####################')
if args.dataset == 'cifar10':
randset = CIFAR10RandomLabels(root='./data', train=True, download=True, transform=transform_train)
else:
randset = CIFAR100RandomLabels(root='./data', train=True, download=True, transform=transform_train)
randloader = data.DataLoader(randset, batch_size=args.train_batch, shuffle=True, num_workers=args.workers)
masks = SNIP(model, args.init_prune_ratio, randloader, 'cuda',num_classes,samples_per_class,num_iters)
else:
masks = SNIP(model, args.init_prune_ratio, trainloader, 'cuda',num_classes,samples_per_class,num_iters)
CNT = 0
for m,m_ref in zip(model.modules(),model_ref.modules()):
if isinstance(m, nn.Conv2d) or isinstance(m,nn.Linear):
if isinstance(m,nn.Conv2d):
TYPE = "Conv"
else:
TYPE = "Linear"
weight_copy = m_ref.weight.data.abs().clone()
# DEFAULT : generate the masks from model_ref, i.e. the LT method
mask = weight_copy.gt(0).float().cuda()
# Else : generate the masks using the Smart Ratio / GraSP / SNIP
# ========== set the Smart Ratio / GraSP / SNIP masks ============
if args.smart_ratio != 0:
mask = masks[CNT]
elif args.GraSP != 0:
mask = masks[m]
elif args.SNIP != 0:
mask = masks[m]
CNT += 1
total = mask.numel()
# ========== print the keep-ratio and #para, #remained ============
remained = int(torch.sum(mask))
keep_ratio = remained/total
print("LAYER %d(%s) : KEEP_RATIO = %.6f NUM_PARA = %d REMAINED_PARA = %d" % (CNT,TYPE,keep_ratio*100,total,remained))
# ========== rearrange the masks (if stated) ============
# ========== note that this operation will also change the weight retained ============
if args.rearrange != 0:
print("################### DEBUG PRINT : REARRANGE ###################")
mask = mask.view(-1)[torch.randperm(mask.nelement())].view(mask.size())
# ========== set the pruned weights to 0 ============
m.weight.data.mul_(mask)
# ========== Ablation study: Shuffle Weights ============
# ========== shuffle the unmasked weights (if stated) ============
# ========== we keep the arch but change the position of the weight ============
if args.shuffle_unmasked_weights != 0:
print("################### DEBUG PRINT : SHUFFLE UNMASKED WEIGHTS ###################")
Size = mask.size()
mask = mask.view(-1)
m.weight.data = m.weight.data.view(-1)
non_zero = int(sum(mask).item())
value,idx = torch.topk(mask,non_zero)
rand_idx = idx.view(-1)[torch.randperm(idx.nelement())].view(idx.size())
m.weight.data[rand_idx] = m.weight.data[idx]
mask = mask.view(Size)
m.weight.data = m.weight.data.view(Size)
# ========== print the training acc and RETURN (if stated) ============
if args.print_output != 0:
for batch_idx, (inputs, targets) in enumerate(trainloader):
# measure data loading time
if use_cuda:
inputs, targets = inputs.cuda(), targets.cuda()
inputs, targets = torch.autograd.Variable(inputs), torch.autograd.Variable(targets)
# compute output
outputs = model(inputs)
print(outputs)
return
loss = criterion(outputs, targets)
# measure accuracy and record loss
prec1, prec5 = accuracy(outputs.data, targets.data, topk=(1, 5))
losses.update(loss.data.item(), inputs.size(0))
top1.update(prec1.item(), inputs.size(0))
top5.update(prec5.item(), inputs.size(0))
print("Train acc : {}".format(top1.avg))
return
# Train and val
for epoch in range(start_epoch, args.epochs):
adjust_learning_rate(optimizer, epoch)
num_parameters = 0
print('\nEpoch: [%d | %d] LR: %f' % (epoch + 1, args.epochs, state['lr']))
# ========== calculate #ZERO paras (including zero Conv weights and zero Linear weights) ============
ZERO_parameters = get_zero_param(model)
print('Zero parameters: {}'.format(ZERO_parameters))
# ========== calculate #paras (including Conv weights and Linear weights) ============
for m in model.modules():
if isinstance(m,nn.Conv2d) or isinstance(m,nn.Linear):
num_parameters += m.weight.data.numel()
# ========== print the #weights information at every epoch to make sure the pruning pipeline is executed ============
print('Parameters: {}'.format(num_parameters))
print('Overall Pruning Ratio : {}'.format(float(ZERO_parameters)/float(num_parameters)))
train_loss, train_acc = train(trainloader, model, criterion, optimizer, epoch, use_cuda)
test_loss, test_acc = test(testloader, model, criterion, epoch, use_cuda)
if args.compute_sv and epoch % 10 == 0:
sv, sv_avg, sv_std = get_sv(model, size_hook)
training_sv.append(sv)
training_sv_avg.append(sv_avg)
training_sv_std.append(sv_std)
h_sv, _, _ = get_heuristic_sv(model)
np.save(os.path.join(args.save_dir, 'sv.npy'), training_sv)
np.save(os.path.join(args.save_dir, 'sv_avg.npy'), training_sv_avg)
np.save(os.path.join(args.save_dir, 'sv_std.npy'), training_sv_std)
np.save(os.path.join(args.save_dir, 'h_sv.npy'), h_sv)
# ========== write the scalar to tensorboard ============
# writer.add_scalar('train_loss', train_loss,epoch)
# writer.add_scalar('test_loss',test_loss,epoch)
# writer.add_scalar('train_acc', train_acc,epoch)
# writer.add_scalar('test_acc', test_acc,epoch)
# append logger file
logger.append([state['lr'], train_loss, test_loss, train_acc, test_acc])
# save model
is_best = test_acc > best_acc
best_acc = max(test_acc, best_acc)
save_checkpoint({
'epoch': epoch + 1,
'state_dict': model.state_dict(),
'acc': test_acc,
'best_acc': best_acc,
'optimizer' : optimizer.state_dict(),
}, is_best, checkpoint=args.save_dir)
logger.close()
writer.close()
print('Best acc:')
print(best_acc)
def train(trainloader, model, criterion, optimizer, epoch, use_cuda):
# switch to train mode
model.train()
batch_time = AverageMeter()
data_time = AverageMeter()
losses = AverageMeter()
top1 = AverageMeter()
top5 = AverageMeter()
end = time.time()
bar = Bar('Processing', max=len(trainloader)//100+1)
print(args)
for batch_idx, (inputs, targets) in enumerate(trainloader):
# measure data loading time
data_time.update(time.time() - end)
if use_cuda:
inputs, targets = inputs.cuda(), targets.cuda()
inputs, targets = torch.autograd.Variable(inputs), torch.autograd.Variable(targets)
# compute output
outputs = model(inputs)
loss = criterion(outputs, targets)
# measure accuracy and record loss
prec1, prec5 = accuracy(outputs.data, targets.data, topk=(1, 5))
losses.update(loss.data.item(), inputs.size(0))
top1.update(prec1.item(), inputs.size(0))
top5.update(prec5.item(), inputs.size(0))
# compute gradient and do SGD step
optimizer.zero_grad()
loss.backward()
for k, m in enumerate(model.modules()):
# print(k, m)
if isinstance(m, nn.Conv2d) or isinstance(m,nn.Linear):
weight_copy = m.weight.data.abs().clone()
mask = weight_copy.gt(0).float().cuda()
m.weight.grad.data.mul_(mask)
optimizer.step()
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
# plot progress
if batch_idx % 100 ==0:
bar.suffix = '({batch}/{size}) Data: {data:.3f}s | Batch: {bt:.3f}s | Total: {total:} | ETA: {eta:} | Loss: {loss:.4f} | top1: {top1: .4f} | top5: {top5: .4f}'.format(
batch=batch_idx + 1,
size=len(trainloader),
data=data_time.avg,
bt=batch_time.avg,
total=bar.elapsed_td,
eta=bar.eta_td,
loss=losses.avg,
top1=top1.avg,
top5=top5.avg,
)
bar.next()
bar.finish()
print("Train acc : {}".format(top1.avg))
return (losses.avg, top1.avg)
def test(testloader, model, criterion, epoch, use_cuda):
global best_acc
batch_time = AverageMeter()
data_time = AverageMeter()
losses = AverageMeter()
top1 = AverageMeter()
top5 = AverageMeter()
# switch to evaluate mode
model.eval()
end = time.time()
bar = Bar('Processing', max=len(testloader)//100+1)
for batch_idx, (inputs, targets) in enumerate(testloader):
# measure data loading time
data_time.update(time.time() - end)
if use_cuda:
inputs, targets = inputs.cuda(), targets.cuda()
inputs, targets = torch.autograd.Variable(inputs, volatile=True), torch.autograd.Variable(targets)
# compute output
outputs = model(inputs)
loss = criterion(outputs, targets)
# measure accuracy and record loss
prec1, prec5 = accuracy(outputs.data, targets.data, topk=(1, 5))
losses.update(loss.data.item(), inputs.size(0))
top1.update(prec1.item(), inputs.size(0))
top5.update(prec5.item(), inputs.size(0))
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
# plot progress
if batch_idx % 100 ==0:
bar.suffix = '({batch}/{size}) Data: {data:.3f}s | Batch: {bt:.3f}s | Total: {total:} | ETA: {eta:} | Loss: {loss:.4f} | top1: {top1: .4f} | top5: {top5: .4f}'.format(
batch=batch_idx + 1,
size=len(testloader),
data=data_time.avg,
bt=batch_time.avg,
total=bar.elapsed_td,
eta=bar.eta_td,
loss=losses.avg,
top1=top1.avg,
top5=top5.avg,
)
bar.next()
bar.finish()
print("Test acc : {}".format(top1.avg))
return (losses.avg, top1.avg)
def save_checkpoint(state, is_best, checkpoint, filename='scratch.pth.tar'):
filepath = os.path.join(checkpoint, filename)
torch.save(state, filepath)
def adjust_learning_rate(optimizer, epoch):
global state
if epoch in args.schedule:
state['lr'] *= args.gamma
for param_group in optimizer.param_groups:
param_group['lr'] = state['lr']
if __name__ == '__main__':
main()
|
from reb import P, PTNode
def same(extraction, expected):
"""Assert that extraction result has the same effect with expected"""
assert isinstance(extraction, list)
assert isinstance(expected, list)
assert len(extraction) == len(expected)
for ext, exp in zip(extraction, expected):
assert isinstance(ext, PTNode)
if isinstance(exp, str):
assert ext.content == exp
elif isinstance(exp, PTNode):
assert pt_for_user(ext) == pt_for_user(exp)
def pt_for_user(ptnode: PTNode) -> PTNode:
def without_children(ptn):
return PTNode(text=ptn.text, start=ptn.start(), end=ptn.end(), tag=ptn.tag)
def node_with_tag(ptn):
for n in ptn.children:
if n.tag is not None:
yield without_children(n)
yield from node_with_tag(n)
return [without_children(ptnode)] + sorted(node_with_tag(ptnode),
key=lambda n: (n.start, n.end, n.tag))
class ExtractionTestCases(object):
def test_ptext(self):
self.case(P.pattern('a'), 'a', ['a'])
self.case(P.pattern('a'), 'aa', ['a', 'a'])
self.case(P.pattern('a'), 'aba', ['a', 'a'])
self.case(P.pattern('a'), 'b', [])
def test_panychar(self):
self.case(P.ANYCHAR, 'a', ['a'])
self.case(P.ANYCHAR, 'b', ['b'])
self.case(P.ANYCHAR, ' ', [' '])
self.case(P.ANYCHAR, '\n', ['\n'])
self.case(P.ANYCHAR, '', [])
self.case(P.ANYCHAR, 'abc', ['a', 'b', 'c'])
def test_pinchars(self):
self.case(P.ic('bcd'), 'a', [])
self.case(P.ic('abc'), 'a', ['a'])
self.case(P.ic('abc'), 'b', ['b'])
self.case(P.ic('abc'), 'c', ['c'])
self.case(P.ic('abc'), 'abcdef', ['a', 'b', 'c'])
def test_pnotinchars(self):
self.case(P.nic('bcd'), 'a', ['a'])
self.case(P.nic('abc'), 'a', [])
self.case(P.nic('abc'), 'b', [])
self.case(P.nic('abc'), 'c', [])
self.case(P.nic('abc'), 'abcdef', ['d', 'e', 'f'])
def test_pany(self):
self.case(P.any('ab', 'abc', 'cd'), 'abcdef', ['ab', 'cd'])
self.case(P.any('aa', 'ab', 'ac'), 'aaaaaa', ['aa', 'aa', 'aa'])
def test_prepeat(self):
self.case(P.n('a'), 'aaa', ['aaa'])
self.case(P.n('a', 0, 1), 'aaa', ['a', 'a', 'a'])
self.case(P.n('a', 0, 1), '', [])
self.case(P.n('a', 4), 'a' * 3, [])
self.case(P.n('a', 4), 'a' * 4, ['a' * 4])
self.case(P.n('a', 4), 'a' * 5, ['a' * 5])
self.case(P.n('a', 4), 'a' * 20, ['a' * 20])
self.case(P.n('a', 0, 5), 'a' * 6, ['aaaaa', 'a'])
self.case(P.n('a', 0, 5), 'a' * 10, ['aaaaa', 'aaaaa'])
self.case(P.n('a', 2, 3), 'a' * 6, ['aaa', 'aaa'])
self.case(P.n('a', 3, 5), 'a' * 9, ['aaaaa', 'aaaa'])
self.case(P.n('a', exact=2), 'a' * 5, ['aa', 'aa'])
self.case(P.n('a', greedy=False), 'aaa', [])
def test_padjacent(self):
self.case(P.pattern('a') + P.ic('abcde'), 'ab', ['ab'])
self.case(P.pattern('a') + P.ic('abcde'), 'ac', ['ac'])
self.case(P.pattern('a') + P.ic('abcde'), 'ad', ['ad'])
self.case(P.pattern('a') + P.ic('abcde'), 'af', [])
self.case(P.pattern('a') + P.ic('abcde'), 'ba', [])
self.case(P.ic('ab') + P.ic('cd') + P.ic('ef'), 'aacee', ['ace'])
self.case(P.ic('ab') + P.ic('cd') + P.ic('ef'), 'abdfe', ['bdf'])
self.case(P.ic('ab') + P.ic('cd') + P.ic('ef'), 'acdfe', [])
self.case(P.ic('ab') + P.ic('cd') + P.ic('ef'), 'aaafe', [])
self.case(P.ic('ab') + P.ic('cd') + P.ic('ef'), 'aacae', [])
def test_pstarting(self):
self.case(P.STARTING + 'a', 'aaa', [PTNode('aaa', start=0, end=1)])
self.case(P.STARTING + 'a', 'baa', [])
def test_pending(self):
self.case('a' + P.ENDING, 'aaa', [PTNode('aaa', start=2, end=3)])
self.case('a' + P.ENDING, 'aab', [])
def test_overall1(self):
text = 'a' * 10 + 'b'
self.case(P.tag(P.n('a'), tag='A') + 'b', text, [
PTNode(text=text, start=0, end=11, children=[
PTNode(text=text, start=0, end=10, tag='A')
])
])
def test_overall2(self):
text = 'a' * 30 + 'c'
self.case(P.n('a') + 'b', text, [])
def test_overall3(self):
text = 'a' * 6
self.case(P.n(P.n('a', exact=3)), text, ['a' * 6])
text = 'a' * 8
self.case(P.n(P.n('a', exact=3)), text, ['a' * 6])
def test_overall4(self):
text = 'aaaab'
self.case(P.n('a', 2, 3) + 'b', text, ['aaab'])
def test_overall5(self):
ptn = P.pattern('aaaaab') | P.pattern('aaa') | P.pattern('aac')
text = 'aaaaac'
self.case(ptn, text, ['aaa', 'aac'])
def test_overall6(self):
ptn = (P.tag('ab', tag='A') + 'c' | P.tag(P.n(P.ic('abc'), exact=3), tag='B')) + 'd'
text = 'abcd'
self.case(ptn, text, [
PTNode(text, start=0, end=4, children=[
PTNode(text, start=0, end=2, tag='A')
])
])
def test_overall7(self):
ptn = P.n01('a') \
+ P.n(P.ic('ab'), exact=2) \
+ P.n01(P.n(P.ic('ab'), exact=2)) \
+ 'b'
self.case(ptn, 'aaaab', ['aaaab'])
def test_overall8_1(self):
self.case(P.n01('a' + P.n('b')), 'abbb', ['abbb'])
def test_overall8_2(self):
self.case(P.n01('a' + P.n('b', greedy=False)), 'abbb', ['a'])
def test_overall8_3(self):
self.case(P.n01('a' + P.n('b'), greedy=False), 'abbb', [])
def test_overall8_4(self):
self.case(P.n01('a' + P.n('b', greedy=False), greedy=False), 'abbb', [])
class TestExtractionPlain(ExtractionTestCases):
def case(self, pattern, text, expect_pt):
same(pattern.extractall(text, engine='plain'), expect_pt)
class TestExtractionVM(ExtractionTestCases):
def case(self, pattern, text, expect_pt):
same(pattern.extractall(text, engine='vm'), expect_pt)
class TestExtractionVM2(ExtractionTestCases):
def case(self, pattern, text, expect_pt):
same(pattern.extractall(text, engine='vm2'), expect_pt)
|
#!/usr/bin/env python3
### Importing
# Importing Common Files
from botModule.importCom import *
### Logout Handler
@Client.on_message(filters.private & filters.command("revoke"))
async def revoke_handler(bot:Update, msg:Message):
userid = msg.chat.id
query = {
'userid' : userid
}
# If user found in db
if collection_login.find_one(query):
#Removing Login Detail from Database
collection_login.delete_one(query)
await msg.reply_text(
"Your account is now logged out🥺.\nTo Login again send your login detail.",
parse_mode = 'html'
)
# If user not found in db
else:
await msg.reply_text(
f"<b><u>You are not even logged in😒. So how can I remove your account.</u></b>{common_text}",
parse_mode = 'html'
)
return
|
import calendar
import datetime
class UTC(datetime.tzinfo):
def utcoffset(self, dt):
return datetime.timedelta(0)
def tzname(self, dt):
return "UTC"
def dst(self, dt):
return datetime.timedelta(0)
utc = UTC()
def is_tz_aware(value):
return value.tzinfo is not None and value.tzinfo.utcoffset(value) is not None
def datetime_to_timestamp(value):
return calendar.timegm(value.utctimetuple())
def timestamp_to_datetime(value):
return datetime.datetime.utcfromtimestamp(value)
|
import logging
log = logging.getLogger(__name__)
handler = logging.StreamHandler()
formatter = logging.Formatter(fmt='%(asctime)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
handler.setLevel('DEBUG')
log.addHandler(handler)
|
from expungeservice.models.charge_types.civil_offense import CivilOffense
from tests.factories.charge_factory import ChargeFactory
from tests.models.test_charge import Dispositions
def test_00_is_not_a_civil_offense():
charge = ChargeFactory.create(statute="00", level="N/A", disposition=Dispositions.CONVICTED)
assert not isinstance(charge, CivilOffense)
def test_100_is_not_a_civil_offense():
charge = ChargeFactory.create(statute="100", level="N/A", disposition=Dispositions.CONVICTED)
assert not isinstance(charge, CivilOffense)
def test_99_is_a_civil_offense():
charge = ChargeFactory.create(statute="99", level="N/A", disposition=Dispositions.CONVICTED)
assert isinstance(charge, CivilOffense)
def test_55_is_a_civil_offense():
charge = ChargeFactory.create(statute="55", level="N/A", disposition=Dispositions.CONVICTED)
assert isinstance(charge, CivilOffense)
def test_fugitive_complaint():
charge = ChargeFactory.create(
statute="0", level="N/A", name="Fugitive Complaint", disposition=Dispositions.CONVICTED
)
assert isinstance(charge, CivilOffense)
|
from game.common.enums import Upgrades, ObjectType
from game.common.stats import GameStats
from game.controllers.controller import Controller
class UpgradeController(Controller):
def __init__(self):
super().__init__()
def handle_actions(self, client):
# if the client wants to drop an item, try to do it
if client.action.item_to_drop != 0 and client.action.item_sub_type_to_drop != 0:
obj = client.shooter.remove_from_inventory_enum(client.action.item_to_drop)
if obj and obj.object_type == ObjectType.upgrade:
if obj.upgrade_enum == Upgrades.armor:
client.shooter.armor = 1.0
# mark other armor as not applied
for item in client.shooter.inventory['upgrades']:
if item.upgrade_enum == Upgrades.armor:
item.applied = False
if obj.upgrade_enum == Upgrades.movement_boots:
client.shooter.max_speed /= 1 + GameStats.upgrade_stats['movement_boost']
if obj.upgrade_enum == Upgrades.backpack:
client.shooter.remove_consumable_slots(GameStats.upgrade_stats["backpack_slot_increase"])
for upgrade in client.shooter.inventory['upgrades']:
if upgrade is not None:
if upgrade.applied:
continue
if upgrade.upgrade_enum == Upgrades.armor:
client.shooter.armor = GameStats.upgrade_stats["armor_damage_reduction"]
elif upgrade.upgrade_enum == Upgrades.movement_boots:
client.shooter.max_speed *= 1 + GameStats.upgrade_stats["movement_boost"]
elif upgrade.upgrade_enum == Upgrades.backpack:
for i in range(GameStats.upgrade_stats["backpack_slot_increase"]):
client.shooter.add_new_slot("consumables")
upgrade.applied = True
|
import os
from flask import Flask, render_template
from flask_bootstrap import Bootstrap
import config
app = Flask(__name__)
config_name = os.environ.get('FLASK_CONFIG', 'dev')
app.config.from_object(getattr(config, config_name.title() + 'Config'))
Bootstrap(app)
@app.route('/')
def index():
"""Serve client-side application."""
return render_template('index.html',
use_socketio=not app.config['NO_SOCKETIO'])
if __name__ == '__main__':
app.run() # pragma: no cover
|
'''
goalboost.model package
The goalboost model package consists of MongoEngine models along with
Marshmallow schemas. MongoEngine is our database ORM to MongoDB,
and Marshmallow is a serialization library that helps us validate, consume,
and expose these Orm objects for clients that need it at the API layer.
For MongoEngine, see http://mongoengine.org/
For Marshmallow and the MongoEngine integration piece, see:
https://marshmallow.readthedocs.org/en/latest/
https://github.com/touilleMan/marshmallow-mongoengine
'''
from flask.ext.mongoengine import MongoEngine
db = MongoEngine()
def init_db(app):
global db
db.init_app(app)
|
import argparse
from csat.acquisition import get_factories
from csat.acquisition.runner import get_runner_class
class ListAction(argparse.Action):
def __init__(self, option_strings, dest, const, default=None,
required=False, help=None, metavar=None):
super(ListAction, self).__init__(option_strings=option_strings,
dest=dest, nargs=0, const=const,
default=default, required=required,
help=help)
def __call__(self, parser, namespace, values, option_string):
for collector in self.const:
print collector
parser.exit()
def main():
parser = argparse.ArgumentParser('csat-collect')
runner_class = get_runner_class()
subparsers = parser.add_subparsers()
collectors = []
for factory in get_factories():
subparser = subparsers.add_parser(factory.key)
runner = runner_class(factory)
subparser = runner.build_parser(subparser)
subparser.set_defaults(runner=runner)
collectors.append(factory.key)
parser.add_argument('-l', '--list', action=ListAction, const=collectors)
args = parser.parse_args()
return args.runner.run_as_subcommand(args)
if __name__ == '__main__':
main()
|
from __future__ import absolute_import
from __future__ import print_function
import sys
import time
import ujson
from typing import Any, Callable, Dict, List, Set, Text, TypeVar
from psycopg2.extensions import cursor
CursorObj = TypeVar('CursorObj', bound=cursor)
from argparse import ArgumentParser
from django.core.management.base import CommandError
from django.db import connection
from zerver.lib.management import ZulipBaseCommand
from zerver.lib.topic_mutes import build_topic_mute_checker
from zerver.models import (
Realm,
UserProfile
)
def update_unread_flags(cursor, user_message_ids):
# type: (CursorObj, List[int]) -> None
um_id_list = ', '.join(str(id) for id in user_message_ids)
query = '''
UPDATE zerver_usermessage
SET flags = flags | 1
WHERE id IN (%s)
''' % (um_id_list,)
cursor.execute(query)
def get_timing(message, f):
# type: (str, Callable) -> None
start = time.time()
print(message)
f()
elapsed = time.time() - start
print('elapsed time: %.03f\n' % (elapsed,))
def fix_unsubscribed(cursor, user_profile):
# type: (CursorObj, UserProfile) -> None
recipient_ids = []
def find_recipients():
# type: () -> None
query = '''
SELECT
zerver_subscription.recipient_id
FROM
zerver_subscription
INNER JOIN zerver_recipient ON (
zerver_recipient.id = zerver_subscription.recipient_id
)
WHERE (
zerver_subscription.user_profile_id = '%s' AND
zerver_recipient.type = 2 AND
(NOT zerver_subscription.active)
)
'''
cursor.execute(query, [user_profile.id])
rows = cursor.fetchall()
for row in rows:
recipient_ids.append(row[0])
print(recipient_ids)
get_timing(
'get recipients',
find_recipients
)
if not recipient_ids:
return
user_message_ids = []
def find():
# type: () -> None
recips = ', '.join(str(id) for id in recipient_ids)
query = '''
SELECT
zerver_usermessage.id
FROM
zerver_usermessage
INNER JOIN zerver_message ON (
zerver_message.id = zerver_usermessage.message_id
)
WHERE (
zerver_usermessage.user_profile_id = %s AND
(zerver_usermessage.flags & 1) = 0 AND
zerver_message.recipient_id in (%s)
)
''' % (user_profile.id, recips)
print('''
EXPLAIN analyze''' + query.rstrip() + ';')
cursor.execute(query)
rows = cursor.fetchall()
for row in rows:
user_message_ids.append(row[0])
print('rows found: %d' % (len(user_message_ids),))
get_timing(
'finding unread messages for non-active streams',
find
)
if not user_message_ids:
return
def fix():
# type: () -> None
update_unread_flags(cursor, user_message_ids)
get_timing(
'fixing unread messages for non-active streams',
fix
)
def fix_pre_pointer(cursor, user_profile):
# type: (CursorObj, UserProfile) -> None
pointer = user_profile.pointer
if not pointer:
return
is_topic_muted = build_topic_mute_checker(user_profile)
recipient_ids = []
def find_non_muted_recipients():
# type: () -> None
query = '''
SELECT
zerver_subscription.recipient_id
FROM
zerver_subscription
INNER JOIN zerver_recipient ON (
zerver_recipient.id = zerver_subscription.recipient_id
)
WHERE (
zerver_subscription.user_profile_id = '%s' AND
zerver_recipient.type = 2 AND
zerver_subscription.in_home_view AND
zerver_subscription.active
)
'''
cursor.execute(query, [user_profile.id])
rows = cursor.fetchall()
for row in rows:
recipient_ids.append(row[0])
print(recipient_ids)
get_timing(
'find_non_muted_recipients',
find_non_muted_recipients
)
if not recipient_ids:
return
user_message_ids = []
def find_old_ids():
# type: () -> None
recips = ', '.join(str(id) for id in recipient_ids)
query = '''
SELECT
zerver_usermessage.id,
zerver_recipient.type_id,
subject
FROM
zerver_usermessage
INNER JOIN zerver_message ON (
zerver_message.id = zerver_usermessage.message_id
)
INNER JOIN zerver_recipient ON (
zerver_recipient.id = zerver_message.recipient_id
)
WHERE (
zerver_usermessage.user_profile_id = %s AND
zerver_usermessage.message_id <= %s AND
(zerver_usermessage.flags & 1) = 0 AND
zerver_message.recipient_id in (%s)
)
''' % (user_profile.id, pointer, recips)
print('''
EXPLAIN analyze''' + query.rstrip() + ';')
cursor.execute(query)
rows = cursor.fetchall()
for (um_id, stream_id, topic) in rows:
if not is_topic_muted(stream_id, topic):
user_message_ids.append(um_id)
print('rows found: %d' % (len(user_message_ids),))
get_timing(
'finding pre-pointer messages that are not muted',
find_old_ids
)
if not user_message_ids:
return
def fix():
# type: () -> None
update_unread_flags(cursor, user_message_ids)
get_timing(
'fixing unread messages for pre-pointer non-muted messages',
fix
)
def fix(user_profile):
# type: (UserProfile) -> None
print('\n---\nFixing %s:' % (user_profile.email,))
with connection.cursor() as cursor:
fix_unsubscribed(cursor, user_profile)
fix_pre_pointer(cursor, user_profile)
connection.commit()
class Command(ZulipBaseCommand):
help = """Fix problems related to unread counts."""
def add_arguments(self, parser):
# type: (ArgumentParser) -> None
parser.add_argument('emails',
metavar='<emails>',
type=str,
nargs='*',
help='email address to spelunk')
parser.add_argument('--all',
action='store_true',
dest='all',
default=False,
help='fix all users in specified realm')
self.add_realm_args(parser)
def fix_all_users(self, realm):
# type: (Realm) -> None
user_profiles = list(UserProfile.objects.filter(
realm=realm,
is_bot=False
))
for user_profile in user_profiles:
fix(user_profile)
def fix_emails(self, realm, emails):
# type: (Realm, List[Text]) -> None
for email in emails:
try:
user_profile = self.get_user(email, realm)
except CommandError:
print("e-mail %s doesn't exist in the realm %s, skipping" % (email, realm))
return
fix(user_profile)
def handle(self, *args, **options):
# type: (*Any, **Any) -> None
realm = self.get_realm(options)
if options['all']:
if realm is None:
print('You must specify a realm if you choose the --all option.')
sys.exit(1)
self.fix_all_users(realm)
return
self.fix_emails(realm, options['emails'])
|
#!/usr/bin/env python
import os
filename = raw_input('Enter file name: ')
fobj = open(filename, 'w')
while True:
aLine = raw_input("Enter a line ('.' to quit): ")
if aLine != ".":
fobj.write('%s%s' % (aLine, os.linesep))
else:
break
fobj.close
|
"""Api Handler tests."""
import unittest
import webapp2
from grow.pods import pods
from grow.server import main
from grow.testing import testing
class ApiHandlerTestCase(unittest.TestCase):
"""Tests for the server API Handler."""
def test_request(self):
"""Test that api requests can be completed correctly."""
dir_path = testing.create_test_pod_dir()
pod = pods.Pod(dir_path)
pod.router.add_all()
# When serving a pod, should 200.
app = main.create_wsgi_app(pod, 'localhost', 8080)
request = webapp2.Request.blank(
'/_grow/api/editor/content?pod_path=/content/pages/home.yaml')
response = request.get_response(app)
self.assertEqual(200, response.status_int)
if __name__ == '__main__':
unittest.main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.