source_code stringlengths 45 7.24k | function_name stringlengths 1 111 | messages list |
|---|---|---|
def x(self):
self.network['ij'] = list(map(lambda x: tuple(sorted(x)), list(zip(*[
self.network['i'].values, self.network['j'].values]))))
self.network.drop_duplicates(['ij', 't'], inplace=True)
self.network.reset_index(inplace=True, drop=True)
self.network.drop('ij', inplace=True, axis=1)
| drop_duplicate_ij | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n self.network['ij'] = list(map(lambda x: tuple(sorted(x)), list(zip(*[\n self.network['i'].values, self.network['j'].values]))))\n self.network.drop_duplicates(['ij', 't'], inplace=True)\n self.network.reset_index(inplace=True, drop=True)\n self.network.drop('ij', inplace=True, axis=1)\n",
"role": "user"
},
{
"content": "drop_duplicate_ij",
"role": "assistant"
}
] |
def x(self):
self.network = self.network.where(self.network['i'] != self.network['j']
).dropna()
self.network.reset_index(inplace=True, drop=True)
| drop_diagonal | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n self.network = self.network.where(self.network['i'] != self.network['j']\n ).dropna()\n self.network.reset_index(inplace=True, drop=True)\n",
"role": "user"
},
{
"content": "drop_diagonal",
"role": "assistant"
}
] |
def x(self, edgelist):
if not isinstance(edgelist[0], list):
edgelist = [edgelist]
teneto.utils.check_TemporalNetwork_input(edgelist, 'edgelist')
if len(edgelist[0]) == 4:
colnames = ['i', 'j', 't', 'weight']
elif len(edgelist[0]) == 3:
colnames = ['i', 'j', 't']
if self.hdf5:
with pd.HDFStore(self.network) as hdf:
rows = hdf.get_storer('network').nrows
hdf.append('network', pd.DataFrame(edgelist, columns=colnames,
index=np.arange(rows, rows + len(edgelist))), format=
'table', data_columns=True)
edgelist = np.array(edgelist)
if np.max(edgelist[:, :2]) > self.netshape[0]:
self.netshape[0] = np.max(edgelist[:, :2])
if np.max(edgelist[:, 2]) > self.netshape[1]:
self.netshape[1] = np.max(edgelist[:, 2])
else:
newedges = pd.DataFrame(edgelist, columns=colnames)
self.network = pd.concat([self.network, newedges], ignore_index=
True, sort=True)
self._update_network()
| add_edge | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, edgelist):\n if not isinstance(edgelist[0], list):\n edgelist = [edgelist]\n teneto.utils.check_TemporalNetwork_input(edgelist, 'edgelist')\n if len(edgelist[0]) == 4:\n colnames = ['i', 'j', 't', 'weight']\n elif len(edgelist[0]) == 3:\n colnames = ['i', 'j', 't']\n if self.hdf5:\n with pd.HDFStore(self.network) as hdf:\n rows = hdf.get_storer('network').nrows\n hdf.append('network', pd.DataFrame(edgelist, columns=colnames,\n index=np.arange(rows, rows + len(edgelist))), format=\n 'table', data_columns=True)\n edgelist = np.array(edgelist)\n if np.max(edgelist[:, :2]) > self.netshape[0]:\n self.netshape[0] = np.max(edgelist[:, :2])\n if np.max(edgelist[:, 2]) > self.netshape[1]:\n self.netshape[1] = np.max(edgelist[:, 2])\n else:\n newedges = pd.DataFrame(edgelist, columns=colnames)\n self.network = pd.concat([self.network, newedges], ignore_index=\n True, sort=True)\n self._update_network()\n",
"role": "user"
},
{
"content": "add_edge",
"role": "assistant"
}
] |
def x(self, edgelist):
if not isinstance(edgelist[0], list):
edgelist = [edgelist]
teneto.utils.check_TemporalNetwork_input(edgelist, 'edgelist')
if self.hdf5:
with pd.HDFStore(self.network) as hdf:
for e in edgelist:
hdf.remove('network', 'i == ' + str(e[0]) + ' & ' + 'j == ' +
str(e[1]) + ' & ' + 't == ' + str(e[2]))
print('HDF5 delete warning. This will not reduce the size of the file.'
)
else:
for e in edgelist:
idx = self.network[(self.network['i'] == e[0]) & (self.network[
'j'] == e[1]) & (self.network['t'] == e[2])].index
self.network.drop(idx, inplace=True)
self.network.reset_index(inplace=True, drop=True)
self._update_network()
| drop_edge | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, edgelist):\n if not isinstance(edgelist[0], list):\n edgelist = [edgelist]\n teneto.utils.check_TemporalNetwork_input(edgelist, 'edgelist')\n if self.hdf5:\n with pd.HDFStore(self.network) as hdf:\n for e in edgelist:\n hdf.remove('network', 'i == ' + str(e[0]) + ' & ' + 'j == ' +\n str(e[1]) + ' & ' + 't == ' + str(e[2]))\n print('HDF5 delete warning. This will not reduce the size of the file.'\n )\n else:\n for e in edgelist:\n idx = self.network[(self.network['i'] == e[0]) & (self.network[\n 'j'] == e[1]) & (self.network['t'] == e[2])].index\n self.network.drop(idx, inplace=True)\n self.network.reset_index(inplace=True, drop=True)\n self._update_network()\n",
"role": "user"
},
{
"content": "drop_edge",
"role": "assistant"
}
] |
def x(self, networkmeasure, **measureparams):
availablemeasures = [f for f in dir(teneto.networkmeasures) if not f.
startswith('__')]
if networkmeasure not in availablemeasures:
raise ValueError(
'Unknown network measure. Available network measures are: ' +
', '.join(availablemeasures))
funs = inspect.getmembers(teneto.networkmeasures)
funs = {m[0]: m[1] for m in funs if not m[0].startswith('__')}
measure = funs[networkmeasure](self, **measureparams)
return measure
| calc_networkmeasure | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, networkmeasure, **measureparams):\n availablemeasures = [f for f in dir(teneto.networkmeasures) if not f.\n startswith('__')]\n if networkmeasure not in availablemeasures:\n raise ValueError(\n 'Unknown network measure. Available network measures are: ' +\n ', '.join(availablemeasures))\n funs = inspect.getmembers(teneto.networkmeasures)\n funs = {m[0]: m[1] for m in funs if not m[0].startswith('__')}\n measure = funs[networkmeasure](self, **measureparams)\n return measure\n",
"role": "user"
},
{
"content": "calc_networkmeasure",
"role": "assistant"
}
] |
def x(self, networktype, **networkparams):
availabletypes = [f for f in dir(teneto.generatenetwork) if not f.
startswith('__')]
if networktype not in availabletypes:
raise ValueError(
'Unknown network measure. Available networks to generate are: ' +
', '.join(availabletypes))
funs = inspect.getmembers(teneto.generatenetwork)
funs = {m[0]: m[1] for m in funs if not m[0].startswith('__')}
network = funs[networktype](**networkparams)
self.network_from_array(network)
if self.nettype[1] == 'u':
self._drop_duplicate_ij()
| generatenetwork | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, networktype, **networkparams):\n availabletypes = [f for f in dir(teneto.generatenetwork) if not f.\n startswith('__')]\n if networktype not in availabletypes:\n raise ValueError(\n 'Unknown network measure. Available networks to generate are: ' +\n ', '.join(availabletypes))\n funs = inspect.getmembers(teneto.generatenetwork)\n funs = {m[0]: m[1] for m in funs if not m[0].startswith('__')}\n network = funs[networktype](**networkparams)\n self.network_from_array(network)\n if self.nettype[1] == 'u':\n self._drop_duplicate_ij()\n",
"role": "user"
},
{
"content": "generatenetwork",
"role": "assistant"
}
] |
def x(self, fname):
if fname[-4:] != '.pkl':
fname += '.pkl'
with open(fname, 'wb') as f:
pickle.dump(self, f, pickle.HIGHEST_PROTOCOL)
| save_aspickle | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, fname):\n if fname[-4:] != '.pkl':\n fname += '.pkl'\n with open(fname, 'wb') as f:\n pickle.dump(self, f, pickle.HIGHEST_PROTOCOL)\n",
"role": "user"
},
{
"content": "save_aspickle",
"role": "assistant"
}
] |
def x(data, report=None):
if not report:
report = {}
data[data < -0.99999999999999] = -1
data[data > 0.99999999999999] = 1
fisher_data = 0.5 * np.log((1 + data) / (1 - data))
report['fisher'] = {}
report['fisher']['performed'] = 'yes'
return fisher_data, report
| postpro_fisher | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(data, report=None):\n if not report:\n report = {}\n data[data < -0.99999999999999] = -1\n data[data > 0.99999999999999] = 1\n fisher_data = 0.5 * np.log((1 + data) / (1 - data))\n report['fisher'] = {}\n report['fisher']['performed'] = 'yes'\n return fisher_data, report\n",
"role": "user"
},
{
"content": "postpro_fisher",
"role": "assistant"
}
] |
def x(data, report=None):
if not report:
report = {}
mindata = 1 - np.nanmin(data)
data = data + mindata
ind = np.triu_indices(data.shape[0], k=1)
boxcox_list = np.array([sp.stats.boxcox(np.squeeze(data[ind[0][n], ind[
1][n], :])) for n in range(0, len(ind[0]))])
boxcox_data = np.zeros(data.shape)
boxcox_data[ind[0], ind[1], :] = np.vstack(boxcox_list[:, 0])
boxcox_data[ind[1], ind[0], :] = np.vstack(boxcox_list[:, 0])
bccheck = np.array(np.transpose(boxcox_data, [2, 0, 1]))
bccheck = (bccheck - bccheck.mean(axis=0)) / bccheck.std(axis=0)
bccheck = np.squeeze(np.mean(bccheck, axis=0))
np.fill_diagonal(bccheck, 0)
report['boxcox'] = {}
report['boxcox']['performed'] = 'yes'
report['boxcox']['lambda'] = [tuple([ind[0][n], ind[1][n], boxcox_list[
n, -1]]) for n in range(0, len(ind[0]))]
report['boxcox']['shift'] = mindata
report['boxcox']['shited_to'] = 1
if np.sum(np.isnan(bccheck)) > 0:
report['boxcox'] = {}
report['boxcox']['performed'] = 'FAILED'
report['boxcox']['failure_reason'] = (
'Box cox transform is returning edges with uniform values through time. This is probabaly due to one or more outliers or a very skewed distribution. Have you corrected for sources of noise (e.g. movement)? If yes, some time-series might need additional transforms to approximate to Gaussian.'
)
report['boxcox']['failure_consequence'
] = 'Box cox transform was skipped from the postprocess pipeline.'
boxcox_data = data - mindata
error_msg = """TENETO WARNING: Box Cox transform problem.
Box Cox transform not performed.
See report for more details."""
print(error_msg)
return boxcox_data, report
| postpro_boxcox | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(data, report=None):\n if not report:\n report = {}\n mindata = 1 - np.nanmin(data)\n data = data + mindata\n ind = np.triu_indices(data.shape[0], k=1)\n boxcox_list = np.array([sp.stats.boxcox(np.squeeze(data[ind[0][n], ind[\n 1][n], :])) for n in range(0, len(ind[0]))])\n boxcox_data = np.zeros(data.shape)\n boxcox_data[ind[0], ind[1], :] = np.vstack(boxcox_list[:, 0])\n boxcox_data[ind[1], ind[0], :] = np.vstack(boxcox_list[:, 0])\n bccheck = np.array(np.transpose(boxcox_data, [2, 0, 1]))\n bccheck = (bccheck - bccheck.mean(axis=0)) / bccheck.std(axis=0)\n bccheck = np.squeeze(np.mean(bccheck, axis=0))\n np.fill_diagonal(bccheck, 0)\n report['boxcox'] = {}\n report['boxcox']['performed'] = 'yes'\n report['boxcox']['lambda'] = [tuple([ind[0][n], ind[1][n], boxcox_list[\n n, -1]]) for n in range(0, len(ind[0]))]\n report['boxcox']['shift'] = mindata\n report['boxcox']['shited_to'] = 1\n if np.sum(np.isnan(bccheck)) > 0:\n report['boxcox'] = {}\n report['boxcox']['performed'] = 'FAILED'\n report['boxcox']['failure_reason'] = (\n 'Box cox transform is returning edges with uniform values through time. This is probabaly due to one or more outliers or a very skewed distribution. Have you corrected for sources of noise (e.g. movement)? If yes, some time-series might need additional transforms to approximate to Gaussian.'\n )\n report['boxcox']['failure_consequence'\n ] = 'Box cox transform was skipped from the postprocess pipeline.'\n boxcox_data = data - mindata\n error_msg = \"\"\"TENETO WARNING: Box Cox transform problem. \nBox Cox transform not performed. \nSee report for more details.\"\"\"\n print(error_msg)\n return boxcox_data, report\n",
"role": "user"
},
{
"content": "postpro_boxcox",
"role": "assistant"
}
] |
def x(data, report=None):
if not report:
report = {}
data = np.transpose(data, [2, 0, 1])
standardized_data = (data - data.mean(axis=0)) / data.std(axis=0)
standardized_data = np.transpose(standardized_data, [1, 2, 0])
report['standardize'] = {}
report['standardize']['performed'] = 'yes'
report['standardize']['method'] = 'Z-score'
data = set_diagonal(data, 1)
return standardized_data, report
| postpro_standardize | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(data, report=None):\n if not report:\n report = {}\n data = np.transpose(data, [2, 0, 1])\n standardized_data = (data - data.mean(axis=0)) / data.std(axis=0)\n standardized_data = np.transpose(standardized_data, [1, 2, 0])\n report['standardize'] = {}\n report['standardize']['performed'] = 'yes'\n report['standardize']['method'] = 'Z-score'\n data = set_diagonal(data, 1)\n return standardized_data, report\n",
"role": "user"
},
{
"content": "postpro_standardize",
"role": "assistant"
}
] |
def x(data, params):
report = {}
if 'dimord' not in params.keys():
params['dimord'] = 'node,time'
if 'report' not in params.keys():
params['report'] = False
if 'analysis_id' not in params.keys():
params['analysis_id'] = ''
if 'postpro' not in params.keys():
params['postpro'] = 'no'
if params['report'] == 'yes' or params['report'] == True:
if 'analysis_id' not in params.keys():
params['analysis_id'] = ''
if 'report_path' not in params.keys():
params['report_path'] = './report/' + params['analysis_id']
if 'report_filename' not in params.keys():
params['report_filename'] = 'derivation_report.html'
if params['dimord'] == 'node,time':
data = data.transpose()
if isinstance(params['method'], str):
if params['method'] == 'jackknife':
weights, report = _weightfun_jackknife(data.shape[0], report)
relation = 'weight'
elif params['method'] == 'sliding window' or params['method'
] == 'slidingwindow':
weights, report = _weightfun_sliding_window(data.shape[0],
params, report)
relation = 'weight'
elif params['method'] == 'tapered sliding window' or params['method'
] == 'taperedslidingwindow':
weights, report = _weightfun_tapered_sliding_window(data.shape[
0], params, report)
relation = 'weight'
elif params['method'] == 'distance' or params['method'
] == 'spatial distance' or params['method'
] == 'node distance' or params['method'
] == 'nodedistance' or params['method'] == 'spatialdistance':
weights, report = _weightfun_spatial_distance(data, params, report)
relation = 'weight'
elif params['method'] == 'mtd' or params['method'
] == 'multiply temporal derivative' or params['method'
] == 'multiplytemporalderivative' or params['method'
] == 'temporal derivative' or params['method'
] == 'temporalderivative':
R, report = _temporal_derivative(data, params, report)
relation = 'coupling'
else:
raise ValueError(
'Unrecognoized method. See derive_with_weighted_pearson documentation for predefined methods or enter own weight matrix'
)
else:
try:
weights = np.array(params['method'])
relation = 'weight'
except:
raise ValueError(
'Unrecognoized method. See documentation for predefined methods'
)
if weights.shape[0] != weights.shape[1]:
raise ValueError('weight matrix should be square')
if weights.shape[0] != data.shape[0]:
raise ValueError('weight matrix must equal number of time points')
if relation == 'weight':
R = np.array([DescrStatsW(data, weights[i, :]).corrcoef for i in
range(0, weights.shape[0])])
R = R.transpose([1, 2, 0])
if params['method'] == 'jackknife':
R = R * -1
jc_z = 0
if 'weight-var' in params.keys():
R = np.transpose(R, [2, 0, 1])
R = (R - R.mean(axis=0)) / R.std(axis=0)
jc_z = 1
R = R * params['weight-var']
R = R.transpose([1, 2, 0])
if 'weight-mean' in params.keys():
R = np.transpose(R, [2, 0, 1])
if jc_z == 0:
R = (R - R.mean(axis=0)) / R.std(axis=0)
R = R + params['weight-mean']
R = np.transpose(R, [1, 2, 0])
R = set_diagonal(R, 1)
if params['postpro'] != 'no':
R, report = postpro_pipeline(R, params['postpro'], report)
R = set_diagonal(R, 1)
if params['report'] == 'yes' or params['report'] == True:
gen_report(report, params['report_path'], params['report_filename'])
return R
| derive_temporalnetwork | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(data, params):\n report = {}\n if 'dimord' not in params.keys():\n params['dimord'] = 'node,time'\n if 'report' not in params.keys():\n params['report'] = False\n if 'analysis_id' not in params.keys():\n params['analysis_id'] = ''\n if 'postpro' not in params.keys():\n params['postpro'] = 'no'\n if params['report'] == 'yes' or params['report'] == True:\n if 'analysis_id' not in params.keys():\n params['analysis_id'] = ''\n if 'report_path' not in params.keys():\n params['report_path'] = './report/' + params['analysis_id']\n if 'report_filename' not in params.keys():\n params['report_filename'] = 'derivation_report.html'\n if params['dimord'] == 'node,time':\n data = data.transpose()\n if isinstance(params['method'], str):\n if params['method'] == 'jackknife':\n weights, report = _weightfun_jackknife(data.shape[0], report)\n relation = 'weight'\n elif params['method'] == 'sliding window' or params['method'\n ] == 'slidingwindow':\n weights, report = _weightfun_sliding_window(data.shape[0],\n params, report)\n relation = 'weight'\n elif params['method'] == 'tapered sliding window' or params['method'\n ] == 'taperedslidingwindow':\n weights, report = _weightfun_tapered_sliding_window(data.shape[\n 0], params, report)\n relation = 'weight'\n elif params['method'] == 'distance' or params['method'\n ] == 'spatial distance' or params['method'\n ] == 'node distance' or params['method'\n ] == 'nodedistance' or params['method'] == 'spatialdistance':\n weights, report = _weightfun_spatial_distance(data, params, report)\n relation = 'weight'\n elif params['method'] == 'mtd' or params['method'\n ] == 'multiply temporal derivative' or params['method'\n ] == 'multiplytemporalderivative' or params['method'\n ] == 'temporal derivative' or params['method'\n ] == 'temporalderivative':\n R, report = _temporal_derivative(data, params, report)\n relation = 'coupling'\n else:\n raise ValueError(\n 'Unrecognoized method. See derive_with_weighted_pearson documentation for predefined methods or enter own weight matrix'\n )\n else:\n try:\n weights = np.array(params['method'])\n relation = 'weight'\n except:\n raise ValueError(\n 'Unrecognoized method. See documentation for predefined methods'\n )\n if weights.shape[0] != weights.shape[1]:\n raise ValueError('weight matrix should be square')\n if weights.shape[0] != data.shape[0]:\n raise ValueError('weight matrix must equal number of time points')\n if relation == 'weight':\n R = np.array([DescrStatsW(data, weights[i, :]).corrcoef for i in\n range(0, weights.shape[0])])\n R = R.transpose([1, 2, 0])\n if params['method'] == 'jackknife':\n R = R * -1\n jc_z = 0\n if 'weight-var' in params.keys():\n R = np.transpose(R, [2, 0, 1])\n R = (R - R.mean(axis=0)) / R.std(axis=0)\n jc_z = 1\n R = R * params['weight-var']\n R = R.transpose([1, 2, 0])\n if 'weight-mean' in params.keys():\n R = np.transpose(R, [2, 0, 1])\n if jc_z == 0:\n R = (R - R.mean(axis=0)) / R.std(axis=0)\n R = R + params['weight-mean']\n R = np.transpose(R, [1, 2, 0])\n R = set_diagonal(R, 1)\n if params['postpro'] != 'no':\n R, report = postpro_pipeline(R, params['postpro'], report)\n R = set_diagonal(R, 1)\n if params['report'] == 'yes' or params['report'] == True:\n gen_report(report, params['report_path'], params['report_filename'])\n return R\n",
"role": "user"
},
{
"content": "derive_temporalnetwork",
"role": "assistant"
}
] |
def x(T, report):
weights = np.ones([T, T])
np.fill_diagonal(weights, 0)
report['method'] = 'jackknife'
report['jackknife'] = ''
return weights, report
| weightfun_jackknife | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(T, report):\n weights = np.ones([T, T])\n np.fill_diagonal(weights, 0)\n report['method'] = 'jackknife'\n report['jackknife'] = ''\n return weights, report\n",
"role": "user"
},
{
"content": "weightfun_jackknife",
"role": "assistant"
}
] |
def x(T, params, report):
weightat0 = np.zeros(T)
weightat0[0:params['windowsize']] = np.ones(params['windowsize'])
weights = np.array([np.roll(weightat0, i) for i in range(0, T + 1 -
params['windowsize'])])
report['method'] = 'slidingwindow'
report['slidingwindow'] = params
report['slidingwindow']['taper'] = 'untapered/uniform'
return weights, report
| weightfun_sliding_window | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(T, params, report):\n weightat0 = np.zeros(T)\n weightat0[0:params['windowsize']] = np.ones(params['windowsize'])\n weights = np.array([np.roll(weightat0, i) for i in range(0, T + 1 -\n params['windowsize'])])\n report['method'] = 'slidingwindow'\n report['slidingwindow'] = params\n report['slidingwindow']['taper'] = 'untapered/uniform'\n return weights, report\n",
"role": "user"
},
{
"content": "weightfun_sliding_window",
"role": "assistant"
}
] |
def x(T, params, report):
x = np.arange(-(params['windowsize'] - 1) / 2, params['windowsize'] / 2)
distribution_parameters = ','.join(map(str, params['distribution_params']))
taper = eval('sps.' + params['distribution'] + '.pdf(x,' +
distribution_parameters + ')')
weightat0 = np.zeros(T)
weightat0[0:params['windowsize']] = taper
weights = np.array([np.roll(weightat0, i) for i in range(0, T + 1 -
params['windowsize'])])
report['method'] = 'slidingwindow'
report['slidingwindow'] = params
report['slidingwindow']['taper'] = taper
report['slidingwindow']['taper_window'] = x
return weights, report
| weightfun_tapered_sliding_window | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(T, params, report):\n x = np.arange(-(params['windowsize'] - 1) / 2, params['windowsize'] / 2)\n distribution_parameters = ','.join(map(str, params['distribution_params']))\n taper = eval('sps.' + params['distribution'] + '.pdf(x,' +\n distribution_parameters + ')')\n weightat0 = np.zeros(T)\n weightat0[0:params['windowsize']] = taper\n weights = np.array([np.roll(weightat0, i) for i in range(0, T + 1 -\n params['windowsize'])])\n report['method'] = 'slidingwindow'\n report['slidingwindow'] = params\n report['slidingwindow']['taper'] = taper\n report['slidingwindow']['taper_window'] = x\n return weights, report\n",
"role": "user"
},
{
"content": "weightfun_tapered_sliding_window",
"role": "assistant"
}
] |
def x(data, params, report):
distance = getDistanceFunction(params['distance'])
weights = np.array([distance(data[n, :], data[t, :]) for n in np.arange
(0, data.shape[0]) for t in np.arange(0, data.shape[0])])
weights = np.reshape(weights, [data.shape[0], data.shape[0]])
np.fill_diagonal(weights, np.nan)
weights = 1 / weights
weights = (weights - np.nanmin(weights)) / (np.nanmax(weights) - np.
nanmin(weights))
np.fill_diagonal(weights, 1)
return weights, report
| weightfun_spatial_distance | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(data, params, report):\n distance = getDistanceFunction(params['distance'])\n weights = np.array([distance(data[n, :], data[t, :]) for n in np.arange\n (0, data.shape[0]) for t in np.arange(0, data.shape[0])])\n weights = np.reshape(weights, [data.shape[0], data.shape[0]])\n np.fill_diagonal(weights, np.nan)\n weights = 1 / weights\n weights = (weights - np.nanmin(weights)) / (np.nanmax(weights) - np.\n nanmin(weights))\n np.fill_diagonal(weights, 1)\n return weights, report\n",
"role": "user"
},
{
"content": "weightfun_spatial_distance",
"role": "assistant"
}
] |
def x(data, params, report):
report = {}
tdat = data[1:, :] - data[:-1, :]
tdat = tdat / np.std(tdat, axis=0)
coupling = np.array([(tdat[:, i] * tdat[:, j]) for i in np.arange(0,
tdat.shape[1]) for j in np.arange(0, tdat.shape[1])])
coupling = np.reshape(coupling, [tdat.shape[1], tdat.shape[1], tdat.
shape[0]])
shape = coupling.shape[:-1] + (coupling.shape[-1] - params['windowsize'
] + 1, params['windowsize'])
strides = coupling.strides + (coupling.strides[-1],)
coupling_windowed = np.mean(np.lib.stride_tricks.as_strided(coupling,
shape=shape, strides=strides), -1)
report = {}
report['method'] = 'temporalderivative'
report['temporalderivative'] = {}
report['temporalderivative']['windowsize'] = params['windowsize']
return coupling_windowed, report
| temporal_derivative | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(data, params, report):\n report = {}\n tdat = data[1:, :] - data[:-1, :]\n tdat = tdat / np.std(tdat, axis=0)\n coupling = np.array([(tdat[:, i] * tdat[:, j]) for i in np.arange(0,\n tdat.shape[1]) for j in np.arange(0, tdat.shape[1])])\n coupling = np.reshape(coupling, [tdat.shape[1], tdat.shape[1], tdat.\n shape[0]])\n shape = coupling.shape[:-1] + (coupling.shape[-1] - params['windowsize'\n ] + 1, params['windowsize'])\n strides = coupling.strides + (coupling.strides[-1],)\n coupling_windowed = np.mean(np.lib.stride_tricks.as_strided(coupling,\n shape=shape, strides=strides), -1)\n report = {}\n report['method'] = 'temporalderivative'\n report['temporalderivative'] = {}\n report['temporalderivative']['windowsize'] = params['windowsize']\n return coupling_windowed, report\n",
"role": "user"
},
{
"content": "temporal_derivative",
"role": "assistant"
}
] |
def x(netin, level, sign='pos', axis='time'):
netin, netinfo = process_input(netin, ['C', 'G', 'TO'])
netin = set_diagonal(netin, 0)
if axis == 'graphlet' and netinfo['nettype'][-1] == 'u':
triu = np.triu_indices(netinfo['netshape'][0], k=1)
netin = netin[triu[0], triu[1], :]
netin = netin.transpose()
if sign == 'both':
net_sorted = np.argsort(np.abs(netin), axis=-1)
elif sign == 'pos':
net_sorted = np.argsort(netin, axis=-1)
elif sign == 'neg':
net_sorted = np.argsort(-1 * netin, axis=-1)
else:
raise ValueError('Unknown value for parameter: sign')
netout = np.zeros(netinfo['netshape'])
if axis == 'time':
for i in range(netinfo['netshape'][0]):
for j in range(netinfo['netshape'][1]):
netout[i, j, net_sorted[i, j, -int(round(net_sorted.shape[-
1]) * level):]] = 1
elif axis == 'graphlet':
netout_tmp = np.zeros(netin.shape)
for i in range(netout_tmp.shape[0]):
netout_tmp[i, net_sorted[i, -int(round(net_sorted.shape[-1]) *
level):]] = 1
netout_tmp = netout_tmp.transpose()
netout[triu[0], triu[1], :] = netout_tmp
netout[triu[1], triu[0], :] = netout_tmp
netout = set_diagonal(netout, 0)
if netinfo['inputtype'] == 'C':
netinfo['nettype'] = 'b' + netinfo['nettype'][1]
netout = graphlet2contact(netout, netinfo)
netout.pop('inputtype')
netout.pop('values')
netout['diagonal'] = 0
return netout
| binarize_percent | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(netin, level, sign='pos', axis='time'):\n netin, netinfo = process_input(netin, ['C', 'G', 'TO'])\n netin = set_diagonal(netin, 0)\n if axis == 'graphlet' and netinfo['nettype'][-1] == 'u':\n triu = np.triu_indices(netinfo['netshape'][0], k=1)\n netin = netin[triu[0], triu[1], :]\n netin = netin.transpose()\n if sign == 'both':\n net_sorted = np.argsort(np.abs(netin), axis=-1)\n elif sign == 'pos':\n net_sorted = np.argsort(netin, axis=-1)\n elif sign == 'neg':\n net_sorted = np.argsort(-1 * netin, axis=-1)\n else:\n raise ValueError('Unknown value for parameter: sign')\n netout = np.zeros(netinfo['netshape'])\n if axis == 'time':\n for i in range(netinfo['netshape'][0]):\n for j in range(netinfo['netshape'][1]):\n netout[i, j, net_sorted[i, j, -int(round(net_sorted.shape[-\n 1]) * level):]] = 1\n elif axis == 'graphlet':\n netout_tmp = np.zeros(netin.shape)\n for i in range(netout_tmp.shape[0]):\n netout_tmp[i, net_sorted[i, -int(round(net_sorted.shape[-1]) *\n level):]] = 1\n netout_tmp = netout_tmp.transpose()\n netout[triu[0], triu[1], :] = netout_tmp\n netout[triu[1], triu[0], :] = netout_tmp\n netout = set_diagonal(netout, 0)\n if netinfo['inputtype'] == 'C':\n netinfo['nettype'] = 'b' + netinfo['nettype'][1]\n netout = graphlet2contact(netout, netinfo)\n netout.pop('inputtype')\n netout.pop('values')\n netout['diagonal'] = 0\n return netout\n",
"role": "user"
},
{
"content": "binarize_percent",
"role": "assistant"
}
] |
def x(netin, level, sign='pos', axis='time'):
netin, netinfo = process_input(netin, ['C', 'G', 'TO'])
trajectory = rdp(netin, level)
contacts = []
for n in range(trajectory['index'].shape[0]):
if sign == 'pos':
sel = trajectory['trajectory_points'][n][trajectory[
'trajectory'][n][trajectory['trajectory_points'][n]] > 0]
elif sign == 'neg':
sel = trajectory['trajectory_points'][n][trajectory[
'trajectory'][n][trajectory['trajectory_points'][n]] < 0]
else:
sel = trajectory['trajectory_points']
i_ind = np.repeat(trajectory['index'][n, 0], len(sel))
j_ind = np.repeat(trajectory['index'][n, 1], len(sel))
contacts.append(np.array([i_ind, j_ind, sel]).transpose())
contacts = np.concatenate(contacts)
netout = dict(netinfo)
netout['contacts'] = contacts
netout['nettype'] = 'b' + netout['nettype'][1]
netout['dimord'] = 'node,node,time'
netout['timetype'] = 'discrete'
netout['diagonal'] = 0
if netinfo['inputtype'] == 'G':
netout = contact2graphlet(netout)
else:
netout.pop('inputtype')
return netout
| binarize_rdp | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(netin, level, sign='pos', axis='time'):\n netin, netinfo = process_input(netin, ['C', 'G', 'TO'])\n trajectory = rdp(netin, level)\n contacts = []\n for n in range(trajectory['index'].shape[0]):\n if sign == 'pos':\n sel = trajectory['trajectory_points'][n][trajectory[\n 'trajectory'][n][trajectory['trajectory_points'][n]] > 0]\n elif sign == 'neg':\n sel = trajectory['trajectory_points'][n][trajectory[\n 'trajectory'][n][trajectory['trajectory_points'][n]] < 0]\n else:\n sel = trajectory['trajectory_points']\n i_ind = np.repeat(trajectory['index'][n, 0], len(sel))\n j_ind = np.repeat(trajectory['index'][n, 1], len(sel))\n contacts.append(np.array([i_ind, j_ind, sel]).transpose())\n contacts = np.concatenate(contacts)\n netout = dict(netinfo)\n netout['contacts'] = contacts\n netout['nettype'] = 'b' + netout['nettype'][1]\n netout['dimord'] = 'node,node,time'\n netout['timetype'] = 'discrete'\n netout['diagonal'] = 0\n if netinfo['inputtype'] == 'G':\n netout = contact2graphlet(netout)\n else:\n netout.pop('inputtype')\n return netout\n",
"role": "user"
},
{
"content": "binarize_rdp",
"role": "assistant"
}
] |
def x(netin, threshold_type, threshold_level, sign='pos', axis='time'):
if threshold_type == 'percent':
netout = binarize_percent(netin, threshold_level, sign, axis)
elif threshold_type == 'magnitude':
netout = binarize_magnitude(netin, threshold_level, sign)
elif threshold_type == 'rdp':
netout = binarize_rdp(netin, threshold_level, sign, axis)
else:
raise ValueError('Unknown value to parameter: threshold_type.')
return netout
| binarize | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(netin, threshold_type, threshold_level, sign='pos', axis='time'):\n if threshold_type == 'percent':\n netout = binarize_percent(netin, threshold_level, sign, axis)\n elif threshold_type == 'magnitude':\n netout = binarize_magnitude(netin, threshold_level, sign)\n elif threshold_type == 'rdp':\n netout = binarize_rdp(netin, threshold_level, sign, axis)\n else:\n raise ValueError('Unknown value to parameter: threshold_type.')\n return netout\n",
"role": "user"
},
{
"content": "binarize",
"role": "assistant"
}
] |
def x(netIn, allowedformats, outputformat='G'):
inputtype = checkInput(netIn)
if inputtype == 'TN' and 'TN' in allowedformats and outputformat != 'TN':
G = netIn.df_to_array()
netInfo = {'nettype': netIn.nettype, 'netshape': netIn.netshape}
elif inputtype == 'TN' and 'TN' in allowedformats and outputformat == 'TN':
TN = netIn
elif inputtype == 'C' and 'C' in allowedformats and outputformat == 'G':
G = contact2graphlet(netIn)
netInfo = dict(netIn)
netInfo.pop('contacts')
elif inputtype == 'C' and 'C' in allowedformats and outputformat == 'TN':
TN = TemporalNetwork(from_dict=netIn)
elif inputtype == 'G' and 'G' in allowedformats and outputformat == 'TN':
TN = TemporalNetwork(from_array=netIn)
elif inputtype == 'G' and 'G' in allowedformats:
netInfo = {}
netInfo['netshape'] = netIn.shape
netInfo['nettype'] = gen_nettype(netIn)
G = netIn
elif inputtype == 'C' and outputformat == 'C':
pass
else:
raise ValueError('Input invalid.')
if outputformat == 'TN' and not isinstance(TN.network, str):
TN.network['i'] = TN.network['i'].astype(int)
TN.network['j'] = TN.network['j'].astype(int)
TN.network['t'] = TN.network['t'].astype(int)
if outputformat == 'C' or outputformat == 'G':
netInfo['inputtype'] = inputtype
if inputtype != 'C' and outputformat == 'C':
C = graphlet2contact(G, netInfo)
if outputformat == 'G':
return G, netInfo
elif outputformat == 'C':
return C
elif outputformat == 'TN':
return TN
| process_input | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(netIn, allowedformats, outputformat='G'):\n inputtype = checkInput(netIn)\n if inputtype == 'TN' and 'TN' in allowedformats and outputformat != 'TN':\n G = netIn.df_to_array()\n netInfo = {'nettype': netIn.nettype, 'netshape': netIn.netshape}\n elif inputtype == 'TN' and 'TN' in allowedformats and outputformat == 'TN':\n TN = netIn\n elif inputtype == 'C' and 'C' in allowedformats and outputformat == 'G':\n G = contact2graphlet(netIn)\n netInfo = dict(netIn)\n netInfo.pop('contacts')\n elif inputtype == 'C' and 'C' in allowedformats and outputformat == 'TN':\n TN = TemporalNetwork(from_dict=netIn)\n elif inputtype == 'G' and 'G' in allowedformats and outputformat == 'TN':\n TN = TemporalNetwork(from_array=netIn)\n elif inputtype == 'G' and 'G' in allowedformats:\n netInfo = {}\n netInfo['netshape'] = netIn.shape\n netInfo['nettype'] = gen_nettype(netIn)\n G = netIn\n elif inputtype == 'C' and outputformat == 'C':\n pass\n else:\n raise ValueError('Input invalid.')\n if outputformat == 'TN' and not isinstance(TN.network, str):\n TN.network['i'] = TN.network['i'].astype(int)\n TN.network['j'] = TN.network['j'].astype(int)\n TN.network['t'] = TN.network['t'].astype(int)\n if outputformat == 'C' or outputformat == 'G':\n netInfo['inputtype'] = inputtype\n if inputtype != 'C' and outputformat == 'C':\n C = graphlet2contact(G, netInfo)\n if outputformat == 'G':\n return G, netInfo\n elif outputformat == 'C':\n return C\n elif outputformat == 'TN':\n return TN\n",
"role": "user"
},
{
"content": "process_input",
"role": "assistant"
}
] |
def x(communityID):
communityID = np.array(communityID)
cid_shape = communityID.shape
if len(cid_shape) > 1:
communityID = communityID.flatten()
new_communityID = np.zeros(len(communityID))
for i, n in enumerate(np.unique(communityID)):
new_communityID[communityID == n] = i
if len(cid_shape) > 1:
new_communityID = new_communityID.reshape(cid_shape)
return new_communityID
| clean_community_indexes | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(communityID):\n communityID = np.array(communityID)\n cid_shape = communityID.shape\n if len(cid_shape) > 1:\n communityID = communityID.flatten()\n new_communityID = np.zeros(len(communityID))\n for i, n in enumerate(np.unique(communityID)):\n new_communityID[communityID == n] = i\n if len(cid_shape) > 1:\n new_communityID = new_communityID.reshape(cid_shape)\n return new_communityID\n",
"role": "user"
},
{
"content": "clean_community_indexes",
"role": "assistant"
}
] |
def x(C):
d = collections.OrderedDict()
for c in C['contacts']:
ct = tuple(c)
if ct in d:
d[ct] += 1
else:
d[ct] = 1
new_contacts = []
new_values = []
for key, value in d.items():
new_values.append(value)
new_contacts.append(key)
C_out = C
C_out['contacts'] = new_contacts
C_out['values'] = new_values
return C_out
| multiple_contacts_get_values | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(C):\n d = collections.OrderedDict()\n for c in C['contacts']:\n ct = tuple(c)\n if ct in d:\n d[ct] += 1\n else:\n d[ct] = 1\n new_contacts = []\n new_values = []\n for key, value in d.items():\n new_values.append(value)\n new_contacts.append(key)\n C_out = C\n C_out['contacts'] = new_contacts\n C_out['values'] = new_values\n return C_out\n",
"role": "user"
},
{
"content": "multiple_contacts_get_values",
"role": "assistant"
}
] |
def x(df, netshape, nettype):
if len(df) > 0:
idx = np.array(list(map(list, df.values)))
G = np.zeros([netshape[0], netshape[0], netshape[1]])
if idx.shape[1] == 3:
if nettype[-1] == 'u':
idx = np.vstack([idx, idx[:, [1, 0, 2]]])
idx = idx.astype(int)
G[idx[:, 0], idx[:, 1], idx[:, 2]] = 1
elif idx.shape[1] == 4:
if nettype[-1] == 'u':
idx = np.vstack([idx, idx[:, [1, 0, 2, 3]]])
weights = idx[:, 3]
idx = np.array(idx[:, :3], dtype=int)
G[idx[:, 0], idx[:, 1], idx[:, 2]] = weights
else:
G = np.zeros([netshape[0], netshape[0], netshape[1]])
return G
| df_to_array | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(df, netshape, nettype):\n if len(df) > 0:\n idx = np.array(list(map(list, df.values)))\n G = np.zeros([netshape[0], netshape[0], netshape[1]])\n if idx.shape[1] == 3:\n if nettype[-1] == 'u':\n idx = np.vstack([idx, idx[:, [1, 0, 2]]])\n idx = idx.astype(int)\n G[idx[:, 0], idx[:, 1], idx[:, 2]] = 1\n elif idx.shape[1] == 4:\n if nettype[-1] == 'u':\n idx = np.vstack([idx, idx[:, [1, 0, 2, 3]]])\n weights = idx[:, 3]\n idx = np.array(idx[:, :3], dtype=int)\n G[idx[:, 0], idx[:, 1], idx[:, 2]] = weights\n else:\n G = np.zeros([netshape[0], netshape[0], netshape[1]])\n return G\n",
"role": "user"
},
{
"content": "df_to_array",
"role": "assistant"
}
] |
def x(distance_func_name, netinfo):
if distance_func_name == 'default' and netinfo['nettype'][0] == 'b':
print(
'Default distance funciton specified. As network is binary, using Hamming'
)
distance_func_name = 'hamming'
elif distance_func_name == 'default' and netinfo['nettype'][0] == 'w':
distance_func_name = 'euclidean'
print(
'Default distance funciton specified. As network is weighted, using Euclidean'
)
return distance_func_name
| check_distance_funciton_input | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(distance_func_name, netinfo):\n if distance_func_name == 'default' and netinfo['nettype'][0] == 'b':\n print(\n 'Default distance funciton specified. As network is binary, using Hamming'\n )\n distance_func_name = 'hamming'\n elif distance_func_name == 'default' and netinfo['nettype'][0] == 'w':\n distance_func_name = 'euclidean'\n print(\n 'Default distance funciton specified. As network is weighted, using Euclidean'\n )\n return distance_func_name\n",
"role": "user"
},
{
"content": "check_distance_funciton_input",
"role": "assistant"
}
] |
def x(parcellation_name):
path = tenetopath[0] + '/data/parcellation/' + parcellation_name + '.csv'
parc = np.loadtxt(path, skiprows=1, delimiter=',', usecols=[1, 2, 3])
return parc
| load_parcellation_coords | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(parcellation_name):\n path = tenetopath[0] + '/data/parcellation/' + parcellation_name + '.csv'\n parc = np.loadtxt(path, skiprows=1, delimiter=',', usecols=[1, 2, 3])\n return parc\n",
"role": "user"
},
{
"content": "load_parcellation_coords",
"role": "assistant"
}
] |
def x(data_path, parcellation, parc_type=None, parc_params=None):
if isinstance(parcellation, str):
parcin = ''
if '+' in parcellation:
parcin = parcellation
parcellation = parcellation.split('+')[0]
if '+OH' in parcin:
subcortical = True
else:
subcortical = None
if '+SUIT' in parcin:
cerebellar = True
else:
cerebellar = None
if not parc_type or not parc_params:
path = tenetopath[0] + '/data/parcellation_defaults/defaults.json'
with open(path) as data_file:
defaults = json.load(data_file)
if not parc_type:
parc_type = defaults[parcellation]['type']
print('Using default parcellation type')
if not parc_params:
parc_params = defaults[parcellation]['params']
print('Using default parameters')
if parc_type == 'sphere':
parcellation = load_parcellation_coords(parcellation)
seed = NiftiSpheresMasker(np.array(parcellation), **parc_params)
data = seed.fit_transform(data_path)
elif parc_type == 'region':
path = tenetopath[0] + '/data/parcellation/' + parcellation + '.nii.gz'
region = NiftiLabelsMasker(path, **parc_params)
data = region.fit_transform(data_path)
else:
raise ValueError('Unknown parc_type specified')
if subcortical:
subatlas = fetch_atlas_harvard_oxford('sub-maxprob-thr0-2mm')['maps']
region = NiftiLabelsMasker(subatlas, **parc_params)
data_sub = region.fit_transform(data_path)
data = np.hstack([data, data_sub])
if cerebellar:
path = (tenetopath[0] +
'/data/parcellation/Cerebellum-SUIT_space-MNI152NLin2009cAsym.nii.gz'
)
region = NiftiLabelsMasker(path, **parc_params)
data_cerebellar = region.fit_transform(data_path)
data = np.hstack([data, data_cerebellar])
return data
| make_parcellation | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(data_path, parcellation, parc_type=None, parc_params=None):\n if isinstance(parcellation, str):\n parcin = ''\n if '+' in parcellation:\n parcin = parcellation\n parcellation = parcellation.split('+')[0]\n if '+OH' in parcin:\n subcortical = True\n else:\n subcortical = None\n if '+SUIT' in parcin:\n cerebellar = True\n else:\n cerebellar = None\n if not parc_type or not parc_params:\n path = tenetopath[0] + '/data/parcellation_defaults/defaults.json'\n with open(path) as data_file:\n defaults = json.load(data_file)\n if not parc_type:\n parc_type = defaults[parcellation]['type']\n print('Using default parcellation type')\n if not parc_params:\n parc_params = defaults[parcellation]['params']\n print('Using default parameters')\n if parc_type == 'sphere':\n parcellation = load_parcellation_coords(parcellation)\n seed = NiftiSpheresMasker(np.array(parcellation), **parc_params)\n data = seed.fit_transform(data_path)\n elif parc_type == 'region':\n path = tenetopath[0] + '/data/parcellation/' + parcellation + '.nii.gz'\n region = NiftiLabelsMasker(path, **parc_params)\n data = region.fit_transform(data_path)\n else:\n raise ValueError('Unknown parc_type specified')\n if subcortical:\n subatlas = fetch_atlas_harvard_oxford('sub-maxprob-thr0-2mm')['maps']\n region = NiftiLabelsMasker(subatlas, **parc_params)\n data_sub = region.fit_transform(data_path)\n data = np.hstack([data, data_sub])\n if cerebellar:\n path = (tenetopath[0] +\n '/data/parcellation/Cerebellum-SUIT_space-MNI152NLin2009cAsym.nii.gz'\n )\n region = NiftiLabelsMasker(path, **parc_params)\n data_cerebellar = region.fit_transform(data_path)\n data = np.hstack([data, data_cerebellar])\n return data\n",
"role": "user"
},
{
"content": "make_parcellation",
"role": "assistant"
}
] |
def x(start, stop, N):
steps = 1.0 / (N - 1) * (stop - start)
if np.isscalar(steps):
return steps * np.arange(N) + start
else:
return steps[:, None] * np.arange(N) + start[:, None]
| create_traj_ranges | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(start, stop, N):\n steps = 1.0 / (N - 1) * (stop - start)\n if np.isscalar(steps):\n return steps * np.arange(N) + start\n else:\n return steps[:, None] * np.arange(N) + start[:, None]\n",
"role": "user"
},
{
"content": "create_traj_ranges",
"role": "assistant"
}
] |
def x(measure, calc=None, community=None):
if not calc:
calc = ''
else:
calc = '_' + calc
if not community:
community = ''
else:
community = 'community'
if 'community' in calc and 'community' in community:
community = ''
if calc == 'community_avg' or calc == 'community_pairs':
community = ''
dimord_dict = {'temporal_closeness_centrality': 'node',
'temporal_degree_centrality': 'node',
'temporal_degree_centralit_avg': 'node',
'temporal_degree_centrality_time': 'node,time',
'temporal_efficiency': 'global', 'temporal_efficiency_global':
'global', 'temporal_efficiency_node': 'node',
'temporal_efficiency_to': 'node', 'sid_global': 'global,time',
'community_pairs': 'community,community,time', 'community_avg':
'community,time', 'sid': 'community,community,time',
'reachability_latency_global': 'global', 'reachability_latency':
'global', 'reachability_latency_node': 'node', 'fluctuability':
'node', 'fluctuability_global': 'global', 'bursty_coeff':
'edge,edge', 'bursty_coeff_edge': 'edge,edge', 'bursty_coeff_node':
'node', 'bursty_coeff_meanEdgePerNode': 'node', 'volatility_global':
'time'}
if measure + calc + community in dimord_dict:
return dimord_dict[measure + calc + community]
else:
print('WARNINGL: get_dimord() returned unknown dimension labels')
return 'unknown'
| get_dimord | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(measure, calc=None, community=None):\n if not calc:\n calc = ''\n else:\n calc = '_' + calc\n if not community:\n community = ''\n else:\n community = 'community'\n if 'community' in calc and 'community' in community:\n community = ''\n if calc == 'community_avg' or calc == 'community_pairs':\n community = ''\n dimord_dict = {'temporal_closeness_centrality': 'node',\n 'temporal_degree_centrality': 'node',\n 'temporal_degree_centralit_avg': 'node',\n 'temporal_degree_centrality_time': 'node,time',\n 'temporal_efficiency': 'global', 'temporal_efficiency_global':\n 'global', 'temporal_efficiency_node': 'node',\n 'temporal_efficiency_to': 'node', 'sid_global': 'global,time',\n 'community_pairs': 'community,community,time', 'community_avg':\n 'community,time', 'sid': 'community,community,time',\n 'reachability_latency_global': 'global', 'reachability_latency':\n 'global', 'reachability_latency_node': 'node', 'fluctuability':\n 'node', 'fluctuability_global': 'global', 'bursty_coeff':\n 'edge,edge', 'bursty_coeff_edge': 'edge,edge', 'bursty_coeff_node':\n 'node', 'bursty_coeff_meanEdgePerNode': 'node', 'volatility_global':\n 'time'}\n if measure + calc + community in dimord_dict:\n return dimord_dict[measure + calc + community]\n else:\n print('WARNINGL: get_dimord() returned unknown dimension labels')\n return 'unknown'\n",
"role": "user"
},
{
"content": "get_dimord",
"role": "assistant"
}
] |
def x(tnet, i=None, j=None, t=None, ij=None, logic='and', copy=False,
asarray=False):
if isinstance(tnet, pd.DataFrame):
network = tnet
hdf5 = False
elif isinstance(tnet, object):
network = tnet.network
hdf5 = tnet.hdf5
if ij is not None and (i is not None or j is not None):
raise ValueError('ij cannoed be specifed along with i or j')
if i is not None and not isinstance(i, list):
i = [i]
if j is not None and not isinstance(j, list):
j = [j]
if t is not None and not isinstance(t, list):
t = [t]
if ij is not None and not isinstance(ij, list):
ij = [ij]
if hdf5:
if (i is not None and j is not None and t is not None and logic ==
'and'):
isinstr = 'i in ' + str(i) + ' & ' + 'j in ' + str(j
) + ' & ' + 't in ' + str(t)
elif ij is not None and t is not None and logic == 'and':
isinstr = '(i in ' + str(ij) + ' | ' + 'j in ' + str(ij
) + ') & ' + 't in ' + str(t)
elif ij is not None and t is not None and logic == 'or':
isinstr = 'i in ' + str(ij) + ' | ' + 'j in ' + str(ij
) + ' | ' + 't in ' + str(t)
elif i is not None and j is not None and logic == 'and':
isinstr = 'i in ' + str(i) + ' & ' + 'j in ' + str(j)
elif i is not None and t is not None and logic == 'and':
isinstr = 'i in ' + str(i) + ' & ' + 't in ' + str(t)
elif j is not None and t is not None and logic == 'and':
isinstr = 'j in ' + str(j) + ' & ' + 't in ' + str(t)
elif i is not None and j is not None and t is not None and logic == 'or':
isinstr = 'i in ' + str(i) + ' | ' + 'j in ' + str(j
) + ' | ' + 't in ' + str(t)
elif i is not None and j is not None and logic == 'or':
isinstr = 'i in ' + str(i) + ' | ' + 'j in ' + str(j)
elif i is not None and t is not None and logic == 'or':
isinstr = 'i in ' + str(i) + ' | ' + 't in ' + str(t)
elif j is not None and t is not None and logic == 'or':
isinstr = 'j in ' + str(j) + ' | ' + 't in ' + str(t)
elif i is not None:
isinstr = 'i in ' + str(i)
elif j is not None:
isinstr = 'j in ' + str(j)
elif t is not None:
isinstr = 't in ' + str(t)
elif ij is not None:
isinstr = 'i in ' + str(ij) + ' | ' + 'j in ' + str(ij)
df = pd.read_hdf(network, where=isinstr)
else:
if (i is not None and j is not None and t is not None and logic ==
'and'):
df = network[network['i'].isin(i) & network['j'].isin(j) &
network['t'].isin(t)]
elif ij is not None and t is not None and logic == 'and':
df = network[(network['i'].isin(ij) | network['j'].isin(ij)) &
network['t'].isin(t)]
elif ij is not None and t is not None and logic == 'or':
df = network[network['i'].isin(ij) | network['j'].isin(ij) |
network['t'].isin(t)]
elif i is not None and j is not None and logic == 'and':
df = network[network['i'].isin(i) & network['j'].isin(j)]
elif i is not None and t is not None and logic == 'and':
df = network[network['i'].isin(i) & network['t'].isin(t)]
elif j is not None and t is not None and logic == 'and':
df = network[network['j'].isin(j) & network['t'].isin(t)]
elif i is not None and j is not None and t is not None and logic == 'or':
df = network[network['i'].isin(i) | network['j'].isin(j) |
network['t'].isin(t)]
elif i is not None and j is not None and logic == 'or':
df = network[network['i'].isin(i) | network['j'].isin(j)]
elif i is not None and t is not None and logic == 'or':
df = network[network['i'].isin(i) | network['t'].isin(t)]
elif j is not None and t is not None and logic == 'or':
df = network[network['j'].isin(j) | network['t'].isin(t)]
elif i is not None:
df = network[network['i'].isin(i)]
elif j is not None:
df = network[network['j'].isin(j)]
elif t is not None:
df = network[network['t'].isin(t)]
elif ij is not None:
df = network[network['i'].isin(ij) | network['j'].isin(ij)]
if copy:
df = df.copy()
if asarray:
df = df.values
return df
| get_network_when | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(tnet, i=None, j=None, t=None, ij=None, logic='and', copy=False,\n asarray=False):\n if isinstance(tnet, pd.DataFrame):\n network = tnet\n hdf5 = False\n elif isinstance(tnet, object):\n network = tnet.network\n hdf5 = tnet.hdf5\n if ij is not None and (i is not None or j is not None):\n raise ValueError('ij cannoed be specifed along with i or j')\n if i is not None and not isinstance(i, list):\n i = [i]\n if j is not None and not isinstance(j, list):\n j = [j]\n if t is not None and not isinstance(t, list):\n t = [t]\n if ij is not None and not isinstance(ij, list):\n ij = [ij]\n if hdf5:\n if (i is not None and j is not None and t is not None and logic ==\n 'and'):\n isinstr = 'i in ' + str(i) + ' & ' + 'j in ' + str(j\n ) + ' & ' + 't in ' + str(t)\n elif ij is not None and t is not None and logic == 'and':\n isinstr = '(i in ' + str(ij) + ' | ' + 'j in ' + str(ij\n ) + ') & ' + 't in ' + str(t)\n elif ij is not None and t is not None and logic == 'or':\n isinstr = 'i in ' + str(ij) + ' | ' + 'j in ' + str(ij\n ) + ' | ' + 't in ' + str(t)\n elif i is not None and j is not None and logic == 'and':\n isinstr = 'i in ' + str(i) + ' & ' + 'j in ' + str(j)\n elif i is not None and t is not None and logic == 'and':\n isinstr = 'i in ' + str(i) + ' & ' + 't in ' + str(t)\n elif j is not None and t is not None and logic == 'and':\n isinstr = 'j in ' + str(j) + ' & ' + 't in ' + str(t)\n elif i is not None and j is not None and t is not None and logic == 'or':\n isinstr = 'i in ' + str(i) + ' | ' + 'j in ' + str(j\n ) + ' | ' + 't in ' + str(t)\n elif i is not None and j is not None and logic == 'or':\n isinstr = 'i in ' + str(i) + ' | ' + 'j in ' + str(j)\n elif i is not None and t is not None and logic == 'or':\n isinstr = 'i in ' + str(i) + ' | ' + 't in ' + str(t)\n elif j is not None and t is not None and logic == 'or':\n isinstr = 'j in ' + str(j) + ' | ' + 't in ' + str(t)\n elif i is not None:\n isinstr = 'i in ' + str(i)\n elif j is not None:\n isinstr = 'j in ' + str(j)\n elif t is not None:\n isinstr = 't in ' + str(t)\n elif ij is not None:\n isinstr = 'i in ' + str(ij) + ' | ' + 'j in ' + str(ij)\n df = pd.read_hdf(network, where=isinstr)\n else:\n if (i is not None and j is not None and t is not None and logic ==\n 'and'):\n df = network[network['i'].isin(i) & network['j'].isin(j) &\n network['t'].isin(t)]\n elif ij is not None and t is not None and logic == 'and':\n df = network[(network['i'].isin(ij) | network['j'].isin(ij)) &\n network['t'].isin(t)]\n elif ij is not None and t is not None and logic == 'or':\n df = network[network['i'].isin(ij) | network['j'].isin(ij) |\n network['t'].isin(t)]\n elif i is not None and j is not None and logic == 'and':\n df = network[network['i'].isin(i) & network['j'].isin(j)]\n elif i is not None and t is not None and logic == 'and':\n df = network[network['i'].isin(i) & network['t'].isin(t)]\n elif j is not None and t is not None and logic == 'and':\n df = network[network['j'].isin(j) & network['t'].isin(t)]\n elif i is not None and j is not None and t is not None and logic == 'or':\n df = network[network['i'].isin(i) | network['j'].isin(j) |\n network['t'].isin(t)]\n elif i is not None and j is not None and logic == 'or':\n df = network[network['i'].isin(i) | network['j'].isin(j)]\n elif i is not None and t is not None and logic == 'or':\n df = network[network['i'].isin(i) | network['t'].isin(t)]\n elif j is not None and t is not None and logic == 'or':\n df = network[network['j'].isin(j) | network['t'].isin(t)]\n elif i is not None:\n df = network[network['i'].isin(i)]\n elif j is not None:\n df = network[network['j'].isin(j)]\n elif t is not None:\n df = network[network['t'].isin(t)]\n elif ij is not None:\n df = network[network['i'].isin(ij) | network['j'].isin(ij)]\n if copy:\n df = df.copy()\n if asarray:\n df = df.values\n return df\n",
"role": "user"
},
{
"content": "get_network_when",
"role": "assistant"
}
] |
def x(tnet, intersliceweight=1):
newnetwork = tnet.network.copy()
newnetwork['i'] = tnet.network['i'] + tnet.netshape[0] * tnet.network['t']
newnetwork['j'] = tnet.network['j'] + tnet.netshape[0] * tnet.network['t']
if 'weight' not in newnetwork.columns:
newnetwork['weight'] = 1
newnetwork.drop('t', axis=1, inplace=True)
timepointconns = pd.DataFrame()
timepointconns['i'] = np.arange(0, tnet.N * tnet.T - tnet.N)
timepointconns['j'] = np.arange(tnet.N, tnet.N * tnet.T)
timepointconns['weight'] = intersliceweight
supranet = pd.concat([newnetwork, timepointconns]).reset_index(drop=True)
return supranet
| create_supraadjacency_matrix | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(tnet, intersliceweight=1):\n newnetwork = tnet.network.copy()\n newnetwork['i'] = tnet.network['i'] + tnet.netshape[0] * tnet.network['t']\n newnetwork['j'] = tnet.network['j'] + tnet.netshape[0] * tnet.network['t']\n if 'weight' not in newnetwork.columns:\n newnetwork['weight'] = 1\n newnetwork.drop('t', axis=1, inplace=True)\n timepointconns = pd.DataFrame()\n timepointconns['i'] = np.arange(0, tnet.N * tnet.T - tnet.N)\n timepointconns['j'] = np.arange(tnet.N, tnet.N * tnet.T)\n timepointconns['weight'] = intersliceweight\n supranet = pd.concat([newnetwork, timepointconns]).reset_index(drop=True)\n return supranet\n",
"role": "user"
},
{
"content": "create_supraadjacency_matrix",
"role": "assistant"
}
] |
def x(df, t=None):
if t is not None:
df = get_network_when(df, t=t)
if 'weight' in df.columns:
nxobj = nx.from_pandas_edgelist(df, source='i', target='j',
edge_attr='weight')
else:
nxobj = nx.from_pandas_edgelist(df, source='i', target='j')
return nxobj
| tnet_to_nx | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(df, t=None):\n if t is not None:\n df = get_network_when(df, t=t)\n if 'weight' in df.columns:\n nxobj = nx.from_pandas_edgelist(df, source='i', target='j',\n edge_attr='weight')\n else:\n nxobj = nx.from_pandas_edgelist(df, source='i', target='j')\n return nxobj\n",
"role": "user"
},
{
"content": "tnet_to_nx",
"role": "assistant"
}
] |
def x(tnet, resolution=1, intersliceweight=1, n_iter=100, negativeedge=
'ignore', randomseed=None, consensus_threshold=0.5, temporal_consensus=
True, njobs=1):
tnet = process_input(tnet, ['C', 'G', 'TN'], 'TN')
resolution = resolution / tnet.T
supranet = create_supraadjacency_matrix(tnet, intersliceweight=
intersliceweight)
if negativeedge == 'ignore':
supranet = supranet[supranet['weight'] > 0]
nxsupra = tnet_to_nx(supranet)
np.random.seed(randomseed)
while True:
comtmp = []
with ProcessPoolExecutor(max_workers=njobs) as executor:
job = {executor.submit(_run_louvain, nxsupra, resolution, tnet.
N, tnet.T) for n in range(n_iter)}
for j in as_completed(job):
comtmp.append(j.result())
comtmp = np.stack(comtmp)
comtmp = comtmp.transpose()
comtmp = np.reshape(comtmp, [tnet.N, tnet.T, n_iter], order='F')
if n_iter == 1:
break
nxsupra_old = nxsupra
nxsupra = make_consensus_matrix(comtmp, consensus_threshold)
if nxsupra is None:
break
if (nx.to_numpy_array(nxsupra, nodelist=np.arange(tnet.N * tnet.T)) ==
nx.to_numpy_array(nxsupra_old, nodelist=np.arange(tnet.N * tnet.T))
).all():
break
communities = comtmp[:, :, 0]
if temporal_consensus == True:
communities = make_temporal_consensus(communities)
return communities
| temporal_louvain | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(tnet, resolution=1, intersliceweight=1, n_iter=100, negativeedge=\n 'ignore', randomseed=None, consensus_threshold=0.5, temporal_consensus=\n True, njobs=1):\n tnet = process_input(tnet, ['C', 'G', 'TN'], 'TN')\n resolution = resolution / tnet.T\n supranet = create_supraadjacency_matrix(tnet, intersliceweight=\n intersliceweight)\n if negativeedge == 'ignore':\n supranet = supranet[supranet['weight'] > 0]\n nxsupra = tnet_to_nx(supranet)\n np.random.seed(randomseed)\n while True:\n comtmp = []\n with ProcessPoolExecutor(max_workers=njobs) as executor:\n job = {executor.submit(_run_louvain, nxsupra, resolution, tnet.\n N, tnet.T) for n in range(n_iter)}\n for j in as_completed(job):\n comtmp.append(j.result())\n comtmp = np.stack(comtmp)\n comtmp = comtmp.transpose()\n comtmp = np.reshape(comtmp, [tnet.N, tnet.T, n_iter], order='F')\n if n_iter == 1:\n break\n nxsupra_old = nxsupra\n nxsupra = make_consensus_matrix(comtmp, consensus_threshold)\n if nxsupra is None:\n break\n if (nx.to_numpy_array(nxsupra, nodelist=np.arange(tnet.N * tnet.T)) ==\n nx.to_numpy_array(nxsupra_old, nodelist=np.arange(tnet.N * tnet.T))\n ).all():\n break\n communities = comtmp[:, :, 0]\n if temporal_consensus == True:\n communities = make_temporal_consensus(communities)\n return communities\n",
"role": "user"
},
{
"content": "temporal_louvain",
"role": "assistant"
}
] |
def x(com_membership, th=0.5):
com_membership = np.array(com_membership)
D = []
for i in range(com_membership.shape[0]):
for j in range(i + 1, com_membership.shape[0]):
con = np.sum(com_membership[i, :] - com_membership[j, :] == 0,
axis=-1) / com_membership.shape[-1]
twhere = np.where(con > th)[0]
D += list(zip(*[np.repeat(i, len(twhere)).tolist(), np.repeat(j,
len(twhere)).tolist(), twhere.tolist(), con[twhere].tolist()]))
if len(D) > 0:
D = pd.DataFrame(D, columns=['i', 'j', 't', 'weight'])
D = TemporalNetwork(from_df=D)
D = create_supraadjacency_matrix(D, intersliceweight=0)
Dnx = tnet_to_nx(D)
else:
Dnx = None
return Dnx
| make_consensus_matrix | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(com_membership, th=0.5):\n com_membership = np.array(com_membership)\n D = []\n for i in range(com_membership.shape[0]):\n for j in range(i + 1, com_membership.shape[0]):\n con = np.sum(com_membership[i, :] - com_membership[j, :] == 0,\n axis=-1) / com_membership.shape[-1]\n twhere = np.where(con > th)[0]\n D += list(zip(*[np.repeat(i, len(twhere)).tolist(), np.repeat(j,\n len(twhere)).tolist(), twhere.tolist(), con[twhere].tolist()]))\n if len(D) > 0:\n D = pd.DataFrame(D, columns=['i', 'j', 't', 'weight'])\n D = TemporalNetwork(from_df=D)\n D = create_supraadjacency_matrix(D, intersliceweight=0)\n Dnx = tnet_to_nx(D)\n else:\n Dnx = None\n return Dnx\n",
"role": "user"
},
{
"content": "make_consensus_matrix",
"role": "assistant"
}
] |
def x(com_membership):
com_membership = np.array(com_membership)
com_membership[:, 0] = clean_community_indexes(com_membership[:, 0])
for t in range(1, com_membership.shape[1]):
ct, counts_t = np.unique(com_membership[:, t], return_counts=True)
ct = ct[np.argsort(counts_t)[::-1]]
c1back = np.unique(com_membership[:, t - 1])
new_index = np.zeros(com_membership.shape[0])
for n in ct:
if len(c1back) > 0:
d = np.ones(int(c1back.max()) + 1)
for m in c1back:
v1 = np.zeros(com_membership.shape[0])
v2 = np.zeros(com_membership.shape[0])
v1[com_membership[:, t] == n] = 1
v2[com_membership[:, t - 1] == m] = 1
d[int(m)] = jaccard(v1, v2)
bestval = np.argmin(d)
else:
bestval = new_index.max() + 1
new_index[com_membership[:, t] == n] = bestval
c1back = np.array(np.delete(c1back, np.where(c1back == bestval)))
com_membership[:, t] = new_index
return com_membership
| make_temporal_consensus | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(com_membership):\n com_membership = np.array(com_membership)\n com_membership[:, 0] = clean_community_indexes(com_membership[:, 0])\n for t in range(1, com_membership.shape[1]):\n ct, counts_t = np.unique(com_membership[:, t], return_counts=True)\n ct = ct[np.argsort(counts_t)[::-1]]\n c1back = np.unique(com_membership[:, t - 1])\n new_index = np.zeros(com_membership.shape[0])\n for n in ct:\n if len(c1back) > 0:\n d = np.ones(int(c1back.max()) + 1)\n for m in c1back:\n v1 = np.zeros(com_membership.shape[0])\n v2 = np.zeros(com_membership.shape[0])\n v1[com_membership[:, t] == n] = 1\n v2[com_membership[:, t - 1] == m] = 1\n d[int(m)] = jaccard(v1, v2)\n bestval = np.argmin(d)\n else:\n bestval = new_index.max() + 1\n new_index[com_membership[:, t] == n] = bestval\n c1back = np.array(np.delete(c1back, np.where(c1back == bestval)))\n com_membership[:, t] = new_index\n return com_membership\n",
"role": "user"
},
{
"content": "make_temporal_consensus",
"role": "assistant"
}
] |
def x(communities):
flex = np.zeros(communities.shape[0])
for t in range(1, communities.shape[1]):
flex[communities[:, t] != communities[:, t - 1]] += 1
flex = flex / (communities.shape[1] - 1)
return flex
| flexibility | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(communities):\n flex = np.zeros(communities.shape[0])\n for t in range(1, communities.shape[1]):\n flex[communities[:, t] != communities[:, t - 1]] += 1\n flex = flex / (communities.shape[1] - 1)\n return flex\n",
"role": "user"
},
{
"content": "flexibility",
"role": "assistant"
}
] |
def x(netin, ax, nodelabels=None, timelabels=None, communities=None,
plotedgeweights=False, edgeweightscalar=1, timeunit='', linestyle='k-',
cmap=None, nodesize=100, nodekwargs=None, edgekwargs=None):
inputType = checkInput(netin)
if inputType == 'G':
netin = graphlet2contact(netin)
inputType = 'C'
edgelist = [tuple(np.array(e[0:2]) + e[2] * netin['netshape'][0]) for e in
netin['contacts']]
if nodelabels is not None and len(nodelabels) == netin['netshape'][0]:
pass
elif nodelabels is not None and len(nodelabels) != netin['netshape'][0]:
raise ValueError('specified node label length does not match netshape')
elif nodelabels is None and netin['nodelabels'] == '':
nodelabels = np.arange(1, netin['netshape'][0] + 1)
else:
nodelabels = netin['nodelabels']
if timelabels is not None and len(timelabels) == netin['netshape'][-1]:
pass
elif timelabels is not None and len(timelabels) != netin['netshape'][-1]:
raise ValueError('specified time label length does not match netshape')
elif timelabels is None and str(netin['t0']) == '':
timelabels = np.arange(1, netin['netshape'][-1] + 1)
else:
timelabels = np.arange(netin['t0'], netin['Fs'] * netin['netshape']
[-1] + netin['t0'], netin['Fs'])
if timeunit is None:
timeunit = netin['timeunit']
timeNum = len(timelabels)
nodeNum = len(nodelabels)
posy = np.tile(list(range(0, nodeNum)), timeNum)
posx = np.repeat(list(range(0, timeNum)), nodeNum)
if nodekwargs is None:
nodekwargs = {}
if edgekwargs is None:
edgekwargs = {}
if cmap:
nodekwargs['cmap'] = cmap
if 'c' not in nodekwargs:
nodekwargs['c'] = posy
if communities is not None:
if len(communities.shape) == 1:
nodekwargs['c'] = np.tile(communities, timeNum)
else:
nodekwargs['c'] = communities.flatten(order='F')
for ei, edge in enumerate(edgelist):
if plotedgeweights == True and netin['nettype'][0] == 'w':
edgekwargs['linewidth'] = netin['values'][ei] * edgeweightscalar
bvx, bvy = bezier_points((posx[edge[0]], posy[edge[0]]), (posx[edge
[1]], posy[edge[1]]), nodeNum, 20)
ax.plot(bvx, bvy, linestyle, **edgekwargs)
ax.set_yticks(range(0, len(nodelabels)))
ax.set_xticks(range(0, len(timelabels)))
ax.set_yticklabels(nodelabels)
ax.set_xticklabels(timelabels)
ax.grid()
ax.set_frame_on(False)
ax.spines['top'].set_visible(False)
ax.spines['right'].set_visible(False)
ax.get_xaxis().tick_bottom()
ax.get_yaxis().tick_left()
ax.set_xlim([min(posx) - 1, max(posx) + 1])
ax.set_ylim([min(posy) - 1, max(posy) + 1])
ax.scatter(posx, posy, s=nodesize, zorder=10, **nodekwargs)
if timeunit != '':
timeunit = ' (' + timeunit + ')'
ax.set_xlabel('Time' + timeunit)
return ax
| slice_plot | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(netin, ax, nodelabels=None, timelabels=None, communities=None,\n plotedgeweights=False, edgeweightscalar=1, timeunit='', linestyle='k-',\n cmap=None, nodesize=100, nodekwargs=None, edgekwargs=None):\n inputType = checkInput(netin)\n if inputType == 'G':\n netin = graphlet2contact(netin)\n inputType = 'C'\n edgelist = [tuple(np.array(e[0:2]) + e[2] * netin['netshape'][0]) for e in\n netin['contacts']]\n if nodelabels is not None and len(nodelabels) == netin['netshape'][0]:\n pass\n elif nodelabels is not None and len(nodelabels) != netin['netshape'][0]:\n raise ValueError('specified node label length does not match netshape')\n elif nodelabels is None and netin['nodelabels'] == '':\n nodelabels = np.arange(1, netin['netshape'][0] + 1)\n else:\n nodelabels = netin['nodelabels']\n if timelabels is not None and len(timelabels) == netin['netshape'][-1]:\n pass\n elif timelabels is not None and len(timelabels) != netin['netshape'][-1]:\n raise ValueError('specified time label length does not match netshape')\n elif timelabels is None and str(netin['t0']) == '':\n timelabels = np.arange(1, netin['netshape'][-1] + 1)\n else:\n timelabels = np.arange(netin['t0'], netin['Fs'] * netin['netshape']\n [-1] + netin['t0'], netin['Fs'])\n if timeunit is None:\n timeunit = netin['timeunit']\n timeNum = len(timelabels)\n nodeNum = len(nodelabels)\n posy = np.tile(list(range(0, nodeNum)), timeNum)\n posx = np.repeat(list(range(0, timeNum)), nodeNum)\n if nodekwargs is None:\n nodekwargs = {}\n if edgekwargs is None:\n edgekwargs = {}\n if cmap:\n nodekwargs['cmap'] = cmap\n if 'c' not in nodekwargs:\n nodekwargs['c'] = posy\n if communities is not None:\n if len(communities.shape) == 1:\n nodekwargs['c'] = np.tile(communities, timeNum)\n else:\n nodekwargs['c'] = communities.flatten(order='F')\n for ei, edge in enumerate(edgelist):\n if plotedgeweights == True and netin['nettype'][0] == 'w':\n edgekwargs['linewidth'] = netin['values'][ei] * edgeweightscalar\n bvx, bvy = bezier_points((posx[edge[0]], posy[edge[0]]), (posx[edge\n [1]], posy[edge[1]]), nodeNum, 20)\n ax.plot(bvx, bvy, linestyle, **edgekwargs)\n ax.set_yticks(range(0, len(nodelabels)))\n ax.set_xticks(range(0, len(timelabels)))\n ax.set_yticklabels(nodelabels)\n ax.set_xticklabels(timelabels)\n ax.grid()\n ax.set_frame_on(False)\n ax.spines['top'].set_visible(False)\n ax.spines['right'].set_visible(False)\n ax.get_xaxis().tick_bottom()\n ax.get_yaxis().tick_left()\n ax.set_xlim([min(posx) - 1, max(posx) + 1])\n ax.set_ylim([min(posy) - 1, max(posy) + 1])\n ax.scatter(posx, posy, s=nodesize, zorder=10, **nodekwargs)\n if timeunit != '':\n timeunit = ' (' + timeunit + ')'\n ax.set_xlabel('Time' + timeunit)\n return ax\n",
"role": "user"
},
{
"content": "slice_plot",
"role": "assistant"
}
] |
def x(data):
ict = 0
if isinstance(data, dict):
if [k for k in list(data.keys()) if k == 'intercontacttimes'] == [
'intercontacttimes']:
ict = 1
if ict == 0:
data = intercontacttimes(data)
if data['nettype'][1] == 'u':
ind = np.triu_indices(data['intercontacttimes'].shape[0], k=1)
if data['nettype'][1] == 'd':
triu = np.triu_indices(data['intercontacttimes'].shape[0], k=1)
tril = np.tril_indices(data['intercontacttimes'].shape[0], k=-1)
ind = [[], []]
ind[0] = np.concatenate([tril[0], triu[0]])
ind[1] = np.concatenate([tril[1], triu[1]])
ind = tuple(ind)
ict_shape = data['intercontacttimes'].shape
lv = np.zeros(ict_shape)
for n in range(len(ind[0])):
icts = data['intercontacttimes'][ind[0][n], ind[1][n]]
if len(icts) > 0:
lv_nonnorm = np.sum(np.power((icts[:-1] - icts[1:]) / (icts[:-1
] + icts[1:]), 2))
lv[ind[0][n], ind[1][n]] = 3 / len(icts) * lv_nonnorm
else:
lv[ind[0][n], ind[1][n]] = np.nan
if data['nettype'][1] == 'u':
lv = lv + lv.transpose()
for n in range(lv.shape[0]):
lv[n, n] = np.nan
return lv
| local_variation | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(data):\n ict = 0\n if isinstance(data, dict):\n if [k for k in list(data.keys()) if k == 'intercontacttimes'] == [\n 'intercontacttimes']:\n ict = 1\n if ict == 0:\n data = intercontacttimes(data)\n if data['nettype'][1] == 'u':\n ind = np.triu_indices(data['intercontacttimes'].shape[0], k=1)\n if data['nettype'][1] == 'd':\n triu = np.triu_indices(data['intercontacttimes'].shape[0], k=1)\n tril = np.tril_indices(data['intercontacttimes'].shape[0], k=-1)\n ind = [[], []]\n ind[0] = np.concatenate([tril[0], triu[0]])\n ind[1] = np.concatenate([tril[1], triu[1]])\n ind = tuple(ind)\n ict_shape = data['intercontacttimes'].shape\n lv = np.zeros(ict_shape)\n for n in range(len(ind[0])):\n icts = data['intercontacttimes'][ind[0][n], ind[1][n]]\n if len(icts) > 0:\n lv_nonnorm = np.sum(np.power((icts[:-1] - icts[1:]) / (icts[:-1\n ] + icts[1:]), 2))\n lv[ind[0][n], ind[1][n]] = 3 / len(icts) * lv_nonnorm\n else:\n lv[ind[0][n], ind[1][n]] = np.nan\n if data['nettype'][1] == 'u':\n lv = lv + lv.transpose()\n for n in range(lv.shape[0]):\n lv[n, n] = np.nan\n return lv\n",
"role": "user"
},
{
"content": "local_variation",
"role": "assistant"
}
] |
def x(fname):
if '/' in fname:
split = fname.split('/')
dirnames = '/'.join(split[:-1]) + '/'
fname = split[-1]
else:
dirnames = ''
tags = [tag for tag in fname.split('_') if '-' in tag]
fname_head = '_'.join(tags)
fileformat = '.' + '.'.join(fname.split('.')[1:])
return dirnames + fname_head, fileformat
| drop_bids_suffix | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(fname):\n if '/' in fname:\n split = fname.split('/')\n dirnames = '/'.join(split[:-1]) + '/'\n fname = split[-1]\n else:\n dirnames = ''\n tags = [tag for tag in fname.split('_') if '-' in tag]\n fname_head = '_'.join(tags)\n fileformat = '.' + '.'.join(fname.split('.')[1:])\n return dirnames + fname_head, fileformat\n",
"role": "user"
},
{
"content": "drop_bids_suffix",
"role": "assistant"
}
] |
def x(fname, return_meta=False, header=True, index_col=True):
if index_col:
index_col = 0
else:
index_col = None
if header:
header = 0
else:
header = None
df = pd.read_csv(fname, header=header, index_col=index_col, sep='\t')
if return_meta:
json_fname = fname.replace('tsv', 'json')
meta = pd.read_json(json_fname)
return df, meta
else:
return df
| load_tabular_file | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(fname, return_meta=False, header=True, index_col=True):\n if index_col:\n index_col = 0\n else:\n index_col = None\n if header:\n header = 0\n else:\n header = None\n df = pd.read_csv(fname, header=header, index_col=index_col, sep='\\t')\n if return_meta:\n json_fname = fname.replace('tsv', 'json')\n meta = pd.read_json(json_fname)\n return df, meta\n else:\n return df\n",
"role": "user"
},
{
"content": "load_tabular_file",
"role": "assistant"
}
] |
def x(fname, allowedfileformats='default'):
if allowedfileformats == 'default':
allowedfileformats = ['.tsv', '.nii.gz']
for f in allowedfileformats:
fname = fname.split(f)[0]
fname += '.json'
if os.path.exists(fname):
with open(fname) as fs:
sidecar = json.load(fs)
else:
sidecar = {}
if 'filestatus' not in sidecar:
sidecar['filestatus'] = {}
sidecar['filestatus']['reject'] = False
sidecar['filestatus']['reason'] = []
return sidecar
| get_sidecar | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(fname, allowedfileformats='default'):\n if allowedfileformats == 'default':\n allowedfileformats = ['.tsv', '.nii.gz']\n for f in allowedfileformats:\n fname = fname.split(f)[0]\n fname += '.json'\n if os.path.exists(fname):\n with open(fname) as fs:\n sidecar = json.load(fs)\n else:\n sidecar = {}\n if 'filestatus' not in sidecar:\n sidecar['filestatus'] = {}\n sidecar['filestatus']['reject'] = False\n sidecar['filestatus']['reason'] = []\n return sidecar\n",
"role": "user"
},
{
"content": "get_sidecar",
"role": "assistant"
}
] |
def x(exclusion_criteria):
relfun = []
threshold = []
for ec in exclusion_criteria:
if ec[0:2] == '>=':
relfun.append(np.greater_equal)
threshold.append(float(ec[2:]))
elif ec[0:2] == '<=':
relfun.append(np.less_equal)
threshold.append(float(ec[2:]))
elif ec[0] == '>':
relfun.append(np.greater)
threshold.append(float(ec[1:]))
elif ec[0] == '<':
relfun.append(np.less)
threshold.append(float(ec[1:]))
else:
raise ValueError('exclusion crieria must being with >,<,>= or <=')
return relfun, threshold
| process_exclusion_criteria | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(exclusion_criteria):\n relfun = []\n threshold = []\n for ec in exclusion_criteria:\n if ec[0:2] == '>=':\n relfun.append(np.greater_equal)\n threshold.append(float(ec[2:]))\n elif ec[0:2] == '<=':\n relfun.append(np.less_equal)\n threshold.append(float(ec[2:]))\n elif ec[0] == '>':\n relfun.append(np.greater)\n threshold.append(float(ec[1:]))\n elif ec[0] == '<':\n relfun.append(np.less)\n threshold.append(float(ec[1:]))\n else:\n raise ValueError('exclusion crieria must being with >,<,>= or <=')\n return relfun, threshold\n",
"role": "user"
},
{
"content": "process_exclusion_criteria",
"role": "assistant"
}
] |
def x(tnet=None, paths=None, rratio=1, calc='global'):
if tnet is not None and paths is not None:
raise ValueError('Only network or path input allowed.')
if tnet is None and paths is None:
raise ValueError('No input.')
if tnet is not None:
paths = shortest_temporal_path(tnet)
pathmat = np.zeros([paths[['from', 'to']].max().max() + 1, paths[[
'from', 'to']].max().max() + 1, paths[['t_start']].max().max() + 1]
) * np.nan
pathmat[paths['from'].values, paths['to'].values, paths['t_start'].values
] = paths['temporal-distance']
netshape = pathmat.shape
edges_to_reach = netshape[0] - np.round(netshape[0] * rratio)
reach_lat = np.zeros([netshape[1], netshape[2]]) * np.nan
for t_ind in range(0, netshape[2]):
paths_sort = -np.sort(-pathmat[:, :, t_ind], axis=1)
reach_lat[:, t_ind] = paths_sort[:, edges_to_reach]
if calc == 'global':
reach_lat = np.nansum(reach_lat)
reach_lat = reach_lat / (netshape[0] * netshape[2])
elif calc == 'nodes':
reach_lat = np.nansum(reach_lat, axis=1)
reach_lat = reach_lat / netshape[2]
return reach_lat
| reachability_latency | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(tnet=None, paths=None, rratio=1, calc='global'):\n if tnet is not None and paths is not None:\n raise ValueError('Only network or path input allowed.')\n if tnet is None and paths is None:\n raise ValueError('No input.')\n if tnet is not None:\n paths = shortest_temporal_path(tnet)\n pathmat = np.zeros([paths[['from', 'to']].max().max() + 1, paths[[\n 'from', 'to']].max().max() + 1, paths[['t_start']].max().max() + 1]\n ) * np.nan\n pathmat[paths['from'].values, paths['to'].values, paths['t_start'].values\n ] = paths['temporal-distance']\n netshape = pathmat.shape\n edges_to_reach = netshape[0] - np.round(netshape[0] * rratio)\n reach_lat = np.zeros([netshape[1], netshape[2]]) * np.nan\n for t_ind in range(0, netshape[2]):\n paths_sort = -np.sort(-pathmat[:, :, t_ind], axis=1)\n reach_lat[:, t_ind] = paths_sort[:, edges_to_reach]\n if calc == 'global':\n reach_lat = np.nansum(reach_lat)\n reach_lat = reach_lat / (netshape[0] * netshape[2])\n elif calc == 'nodes':\n reach_lat = np.nansum(reach_lat, axis=1)\n reach_lat = reach_lat / netshape[2]\n return reach_lat\n",
"role": "user"
},
{
"content": "reachability_latency",
"role": "assistant"
}
] |
def x(netin, calc='global'):
netin, _ = process_input(netin, ['C', 'G', 'TN'])
netin[netin != 0] = 1
unique_edges = np.sum(netin, axis=2)
unique_edges[unique_edges > 0] = 1
unique_edges[unique_edges == 0] = 0
fluct = np.sum(unique_edges) / np.sum(netin)
return fluct
| fluctuability | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(netin, calc='global'):\n netin, _ = process_input(netin, ['C', 'G', 'TN'])\n netin[netin != 0] = 1\n unique_edges = np.sum(netin, axis=2)\n unique_edges[unique_edges > 0] = 1\n unique_edges[unique_edges == 0] = 0\n fluct = np.sum(unique_edges) / np.sum(netin)\n return fluct\n",
"role": "user"
},
{
"content": "fluctuability",
"role": "assistant"
}
] |
def x(tnet, calc='time'):
tnet = process_input(tnet, ['C', 'G', 'TN'])[0]
numerator = np.sum(tnet[:, :, :-1] * tnet[:, :, 1:], axis=1)
denominator = np.sqrt(np.sum(tnet[:, :, :-1], axis=1) * np.sum(tnet[:,
:, 1:], axis=1))
topo_overlap = numerator / denominator
topo_overlap[np.isnan(topo_overlap)] = 0
if calc == 'time':
topo_overlap = np.hstack([topo_overlap, np.zeros([topo_overlap.
shape[0], 1]) * np.nan])
else:
topo_overlap = np.mean(topo_overlap, axis=1)
if calc == 'node':
pass
elif calc == 'global':
topo_overlap = np.mean(topo_overlap)
return topo_overlap
| topological_overlap | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(tnet, calc='time'):\n tnet = process_input(tnet, ['C', 'G', 'TN'])[0]\n numerator = np.sum(tnet[:, :, :-1] * tnet[:, :, 1:], axis=1)\n denominator = np.sqrt(np.sum(tnet[:, :, :-1], axis=1) * np.sum(tnet[:,\n :, 1:], axis=1))\n topo_overlap = numerator / denominator\n topo_overlap[np.isnan(topo_overlap)] = 0\n if calc == 'time':\n topo_overlap = np.hstack([topo_overlap, np.zeros([topo_overlap.\n shape[0], 1]) * np.nan])\n else:\n topo_overlap = np.mean(topo_overlap, axis=1)\n if calc == 'node':\n pass\n elif calc == 'global':\n topo_overlap = np.mean(topo_overlap)\n return topo_overlap\n",
"role": "user"
},
{
"content": "topological_overlap",
"role": "assistant"
}
] |
def x(temporalcommunities, staticcommunities):
if staticcommunities.shape[0] != temporalcommunities.shape[0]:
raise ValueError(
'Temporal and static communities have different dimensions')
alleg = allegiance(temporalcommunities)
Rcoeff = np.zeros(len(staticcommunities))
for i, statcom in enumerate(staticcommunities):
Rcoeff[i] = np.mean(alleg[i, staticcommunities == statcom])
return Rcoeff
| recruitment | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(temporalcommunities, staticcommunities):\n if staticcommunities.shape[0] != temporalcommunities.shape[0]:\n raise ValueError(\n 'Temporal and static communities have different dimensions')\n alleg = allegiance(temporalcommunities)\n Rcoeff = np.zeros(len(staticcommunities))\n for i, statcom in enumerate(staticcommunities):\n Rcoeff[i] = np.mean(alleg[i, staticcommunities == statcom])\n return Rcoeff\n",
"role": "user"
},
{
"content": "recruitment",
"role": "assistant"
}
] |
def x(netIn, ax, nodelabels=None, linestyle='k-', nodesize=1000, cmap='Set2'):
inputType = checkInput(netIn, conMat=1)
if nodelabels is None:
nodelabels = []
if inputType == 'M':
shape = np.shape(netIn)
edg = np.where(np.abs(netIn) > 0)
contacts = [tuple([edg[0][i], edg[1][i]]) for i in range(0, len(edg
[0]))]
netIn = {}
netIn['contacts'] = contacts
netIn['netshape'] = shape
elif inputType == 'G':
netIn = graphlet2contact(netIn)
inputType = 'C'
if inputType == 'C':
edgeList = [tuple(np.array(e[0:2]) + e[2] * netIn['netshape'][0]) for
e in netIn['contacts']]
elif inputType == 'M':
edgeList = netIn['contacts']
n = netIn['netshape'][0]
posx = [math.cos(2 * math.pi * i / n) for i in range(0, n)]
posy = [math.sin(2 * math.pi * i / n) for i in range(0, n)]
cmap = cm.get_cmap(cmap)(np.linspace(0, 1, n))
for edge in edgeList:
bvx, bvy = bezier_circle((posx[edge[0]], posy[edge[0]]), (posx[edge
[1]], posy[edge[1]]), 20)
ax.plot(bvx, bvy, linestyle, zorder=0)
for i in range(n):
ax.scatter(posx[i], posy[i], s=nodesize, c=cmap[i], zorder=1)
ax.set_yticklabels([])
ax.set_xticklabels([])
ax.set_yticks([])
ax.set_xticks([])
ax.set_frame_on(False)
x0, x1 = ax.get_xlim()
y0, y1 = ax.get_ylim()
ax.set_aspect((x1 - x0) / (y1 - y0))
ax.spines['top'].set_visible(False)
ax.spines['right'].set_visible(False)
ax.spines['left'].set_visible(False)
ax.spines['bottom'].set_visible(False)
return ax
| circle_plot | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(netIn, ax, nodelabels=None, linestyle='k-', nodesize=1000, cmap='Set2'):\n inputType = checkInput(netIn, conMat=1)\n if nodelabels is None:\n nodelabels = []\n if inputType == 'M':\n shape = np.shape(netIn)\n edg = np.where(np.abs(netIn) > 0)\n contacts = [tuple([edg[0][i], edg[1][i]]) for i in range(0, len(edg\n [0]))]\n netIn = {}\n netIn['contacts'] = contacts\n netIn['netshape'] = shape\n elif inputType == 'G':\n netIn = graphlet2contact(netIn)\n inputType = 'C'\n if inputType == 'C':\n edgeList = [tuple(np.array(e[0:2]) + e[2] * netIn['netshape'][0]) for\n e in netIn['contacts']]\n elif inputType == 'M':\n edgeList = netIn['contacts']\n n = netIn['netshape'][0]\n posx = [math.cos(2 * math.pi * i / n) for i in range(0, n)]\n posy = [math.sin(2 * math.pi * i / n) for i in range(0, n)]\n cmap = cm.get_cmap(cmap)(np.linspace(0, 1, n))\n for edge in edgeList:\n bvx, bvy = bezier_circle((posx[edge[0]], posy[edge[0]]), (posx[edge\n [1]], posy[edge[1]]), 20)\n ax.plot(bvx, bvy, linestyle, zorder=0)\n for i in range(n):\n ax.scatter(posx[i], posy[i], s=nodesize, c=cmap[i], zorder=1)\n ax.set_yticklabels([])\n ax.set_xticklabels([])\n ax.set_yticks([])\n ax.set_xticks([])\n ax.set_frame_on(False)\n x0, x1 = ax.get_xlim()\n y0, y1 = ax.get_ylim()\n ax.set_aspect((x1 - x0) / (y1 - y0))\n ax.spines['top'].set_visible(False)\n ax.spines['right'].set_visible(False)\n ax.spines['left'].set_visible(False)\n ax.spines['bottom'].set_visible(False)\n return ax\n",
"role": "user"
},
{
"content": "circle_plot",
"role": "assistant"
}
] |
def x(temporalcommunities, staticcommunities):
if staticcommunities.shape[0] != temporalcommunities.shape[0]:
raise ValueError(
'Temporal and static communities have different dimensions')
alleg = allegiance(temporalcommunities)
Icoeff = np.zeros(len(staticcommunities))
for i, statcom in enumerate(len(staticcommunities)):
Icoeff[i] = np.mean(alleg[i, staticcommunities != statcom])
return Icoeff
| integration | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(temporalcommunities, staticcommunities):\n if staticcommunities.shape[0] != temporalcommunities.shape[0]:\n raise ValueError(\n 'Temporal and static communities have different dimensions')\n alleg = allegiance(temporalcommunities)\n Icoeff = np.zeros(len(staticcommunities))\n for i, statcom in enumerate(len(staticcommunities)):\n Icoeff[i] = np.mean(alleg[i, staticcommunities != statcom])\n return Icoeff\n",
"role": "user"
},
{
"content": "integration",
"role": "assistant"
}
] |
def x(tnet):
tnet = process_input(tnet, ['C', 'G', 'TN'], 'TN')
if tnet.nettype[0] == 'w':
print(
'WARNING: assuming connections to be binary when computing intercontacttimes'
)
contacts = np.array([[None] * tnet.netshape[0]] * tnet.netshape[0])
if tnet.nettype[1] == 'u':
for i in range(0, tnet.netshape[0]):
for j in range(i + 1, tnet.netshape[0]):
edge_on = tnet.get_network_when(i=i, j=j)['t'].values
if len(edge_on) > 0:
edge_on_diff = edge_on[1:] - edge_on[:-1]
contacts[i, j] = np.array(edge_on_diff)
contacts[j, i] = np.array(edge_on_diff)
else:
contacts[i, j] = []
contacts[j, i] = []
elif tnet.nettype[1] == 'd':
for i in range(0, tnet.netshape[0]):
for j in range(0, tnet.netshape[0]):
edge_on = tnet.get_network_when(i=i, j=j)['t'].values
if len(edge_on) > 0:
edge_on_diff = edge_on[1:] - edge_on[:-1]
contacts[i, j] = np.array(edge_on_diff)
else:
contacts[i, j] = []
out = {}
out['intercontacttimes'] = contacts
out['nettype'] = tnet.nettype
return out
| intercontacttimes | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(tnet):\n tnet = process_input(tnet, ['C', 'G', 'TN'], 'TN')\n if tnet.nettype[0] == 'w':\n print(\n 'WARNING: assuming connections to be binary when computing intercontacttimes'\n )\n contacts = np.array([[None] * tnet.netshape[0]] * tnet.netshape[0])\n if tnet.nettype[1] == 'u':\n for i in range(0, tnet.netshape[0]):\n for j in range(i + 1, tnet.netshape[0]):\n edge_on = tnet.get_network_when(i=i, j=j)['t'].values\n if len(edge_on) > 0:\n edge_on_diff = edge_on[1:] - edge_on[:-1]\n contacts[i, j] = np.array(edge_on_diff)\n contacts[j, i] = np.array(edge_on_diff)\n else:\n contacts[i, j] = []\n contacts[j, i] = []\n elif tnet.nettype[1] == 'd':\n for i in range(0, tnet.netshape[0]):\n for j in range(0, tnet.netshape[0]):\n edge_on = tnet.get_network_when(i=i, j=j)['t'].values\n if len(edge_on) > 0:\n edge_on_diff = edge_on[1:] - edge_on[:-1]\n contacts[i, j] = np.array(edge_on_diff)\n else:\n contacts[i, j] = []\n out = {}\n out['intercontacttimes'] = contacts\n out['nettype'] = tnet.nettype\n return out\n",
"role": "user"
},
{
"content": "intercontacttimes",
"role": "assistant"
}
] |
def x(report, sdir='./', report_name='report.html'):
if not os.path.exists(sdir):
os.makedirs(sdir)
if sdir[-1] != '/':
sdir += '/'
report_html = '<html><body>'
if 'method' in report.keys():
report_html += '<h1>Method: ' + report['method'] + '</h1><p>'
for i in report[report['method']]:
if i == 'taper_window':
fig, ax = plt.subplots(1)
ax.plot(report[report['method']]['taper_window'], report[
report['method']]['taper'])
ax.set_xlabel('Window (time). 0 in middle of window.')
ax.set_title('Taper from ' + report[report['method']][
'distribution'] + ' distribution (PDF).')
fig.savefig(sdir + 'taper.png')
report_html += "<img src='./taper.png' width=500>" + '<p>'
else:
report_html += '- <b>' + i + '</b>: ' + str(report[report[
'method']][i]) + '<br>'
if 'postprocess' in report.keys():
report_html += '<p><h2>Postprocessing:</h2><p>'
report_html += '<b>Pipeline: </b>'
for i in report['postprocess']:
report_html += ' ' + i + ','
for i in report['postprocess']:
report_html += '<p><h3>' + i + '</h3><p>'
for j in report[i]:
if j == 'lambda':
report_html += '- <b>' + j + '</b>: ' + '<br>'
lambda_val = np.array(report['boxcox']['lambda'])
fig, ax = plt.subplots(1)
ax.hist(lambda_val[:, -1])
ax.set_xlabel('lambda')
ax.set_ylabel('frequency')
ax.set_title('Histogram of lambda parameter')
fig.savefig(sdir + 'boxcox_lambda.png')
report_html += (
"<img src='./boxcox_lambda.png' width=500>" + '<p>')
report_html += ('Data located in ' + sdir +
'boxcox_lambda.csv <p>')
np.savetxt(sdir + 'boxcox_lambda.csv', lambda_val,
delimiter=',')
else:
report_html += '- <b>' + j + '</b>: ' + str(report[i][j]
) + '<br>'
report_html += '</body></html>'
with open(sdir + report_name, 'w') as file:
file.write(report_html)
file.close()
| gen_report | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(report, sdir='./', report_name='report.html'):\n if not os.path.exists(sdir):\n os.makedirs(sdir)\n if sdir[-1] != '/':\n sdir += '/'\n report_html = '<html><body>'\n if 'method' in report.keys():\n report_html += '<h1>Method: ' + report['method'] + '</h1><p>'\n for i in report[report['method']]:\n if i == 'taper_window':\n fig, ax = plt.subplots(1)\n ax.plot(report[report['method']]['taper_window'], report[\n report['method']]['taper'])\n ax.set_xlabel('Window (time). 0 in middle of window.')\n ax.set_title('Taper from ' + report[report['method']][\n 'distribution'] + ' distribution (PDF).')\n fig.savefig(sdir + 'taper.png')\n report_html += \"<img src='./taper.png' width=500>\" + '<p>'\n else:\n report_html += '- <b>' + i + '</b>: ' + str(report[report[\n 'method']][i]) + '<br>'\n if 'postprocess' in report.keys():\n report_html += '<p><h2>Postprocessing:</h2><p>'\n report_html += '<b>Pipeline: </b>'\n for i in report['postprocess']:\n report_html += ' ' + i + ','\n for i in report['postprocess']:\n report_html += '<p><h3>' + i + '</h3><p>'\n for j in report[i]:\n if j == 'lambda':\n report_html += '- <b>' + j + '</b>: ' + '<br>'\n lambda_val = np.array(report['boxcox']['lambda'])\n fig, ax = plt.subplots(1)\n ax.hist(lambda_val[:, -1])\n ax.set_xlabel('lambda')\n ax.set_ylabel('frequency')\n ax.set_title('Histogram of lambda parameter')\n fig.savefig(sdir + 'boxcox_lambda.png')\n report_html += (\n \"<img src='./boxcox_lambda.png' width=500>\" + '<p>')\n report_html += ('Data located in ' + sdir +\n 'boxcox_lambda.csv <p>')\n np.savetxt(sdir + 'boxcox_lambda.csv', lambda_val,\n delimiter=',')\n else:\n report_html += '- <b>' + j + '</b>: ' + str(report[i][j]\n ) + '<br>'\n report_html += '</body></html>'\n with open(sdir + report_name, 'w') as file:\n file.write(report_html)\n file.close()\n",
"role": "user"
},
{
"content": "gen_report",
"role": "assistant"
}
] |
def x(self, fname, fargs, init=0):
if init == 1:
self.history = []
self.history.append([fname, fargs])
| add_history | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, fname, fargs, init=0):\n if init == 1:\n self.history = []\n self.history.append([fname, fargs])\n",
"role": "user"
},
{
"content": "add_history",
"role": "assistant"
}
] |
def x(self, dirname):
mods = [(m.__name__, m.__version__) for m in sys.modules.values() if m if
hasattr(m, '__version__')]
with open(dirname + '/requirements.txt', 'w') as f:
for m in mods:
m = list(m)
if not isinstance(m[1], str):
m[1] = m[1].decode('utf-8')
f.writelines(m[0] + ' == ' + m[1] + '\n')
with open(dirname + '/TenetoBIDShistory.py', 'w') as f:
f.writelines('import teneto\n')
for func, args in self.history:
f.writelines(func + '(**' + str(args) + ')\n')
with open(dirname + '/tenetoinfo.json', 'w') as f:
json.dump(self.tenetoinfo, f)
| export_history | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, dirname):\n mods = [(m.__name__, m.__version__) for m in sys.modules.values() if m if\n hasattr(m, '__version__')]\n with open(dirname + '/requirements.txt', 'w') as f:\n for m in mods:\n m = list(m)\n if not isinstance(m[1], str):\n m[1] = m[1].decode('utf-8')\n f.writelines(m[0] + ' == ' + m[1] + '\\n')\n with open(dirname + '/TenetoBIDShistory.py', 'w') as f:\n f.writelines('import teneto\\n')\n for func, args in self.history:\n f.writelines(func + '(**' + str(args) + ')\\n')\n with open(dirname + '/tenetoinfo.json', 'w') as f:\n json.dump(self.tenetoinfo, f)\n",
"role": "user"
},
{
"content": "export_history",
"role": "assistant"
}
] |
def x(self, params, update_pipeline=True, tag=None, njobs=1,
confound_corr_report=True):
if not njobs:
njobs = self.njobs
self.add_history(inspect.stack()[0][3], locals(), 1)
files = self.get_selected_files(quiet=1)
confound_files = self.get_selected_files(quiet=1, pipeline='confound')
if confound_files:
confounds_exist = True
else:
confounds_exist = False
if not confound_corr_report:
confounds_exist = False
if not tag:
tag = ''
else:
tag = 'desc-' + tag
with ProcessPoolExecutor(max_workers=njobs) as executor:
job = {executor.submit(self._derive_temporalnetwork, f, i, tag,
params, confounds_exist, confound_files) for i, f in enumerate(
files) if f}
for j in as_completed(job):
j.result()
if update_pipeline == True:
if not self.confound_pipeline and len(self.get_selected_files(quiet
=1, pipeline='confound')) > 0:
self.set_confound_pipeline = self.pipeline
self.set_pipeline('teneto_' + teneto.__version__)
self.set_pipeline_subdir('tvc')
self.set_bids_suffix('tvcconn')
| derive_temporalnetwork | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, params, update_pipeline=True, tag=None, njobs=1,\n confound_corr_report=True):\n if not njobs:\n njobs = self.njobs\n self.add_history(inspect.stack()[0][3], locals(), 1)\n files = self.get_selected_files(quiet=1)\n confound_files = self.get_selected_files(quiet=1, pipeline='confound')\n if confound_files:\n confounds_exist = True\n else:\n confounds_exist = False\n if not confound_corr_report:\n confounds_exist = False\n if not tag:\n tag = ''\n else:\n tag = 'desc-' + tag\n with ProcessPoolExecutor(max_workers=njobs) as executor:\n job = {executor.submit(self._derive_temporalnetwork, f, i, tag,\n params, confounds_exist, confound_files) for i, f in enumerate(\n files) if f}\n for j in as_completed(job):\n j.result()\n if update_pipeline == True:\n if not self.confound_pipeline and len(self.get_selected_files(quiet\n =1, pipeline='confound')) > 0:\n self.set_confound_pipeline = self.pipeline\n self.set_pipeline('teneto_' + teneto.__version__)\n self.set_pipeline_subdir('tvc')\n self.set_bids_suffix('tvcconn')\n",
"role": "user"
},
{
"content": "derive_temporalnetwork",
"role": "assistant"
}
] |
def x(self, f, i, tag, params, confounds_exist, confound_files):
data = load_tabular_file(f, index_col=True, header=True)
fs, _ = drop_bids_suffix(f)
save_name, save_dir, _ = self._save_namepaths_bids_derivatives(fs, tag,
'tvc', 'tvcconn')
if 'weight-var' in params.keys():
if params['weight-var'] == 'from-subject-fc':
fc_files = self.get_selected_files(quiet=1, pipeline=
'functionalconnectivity', forfile=f)
if len(fc_files) == 1:
params['weight-var'] = load_tabular_file(fc_files[0]).values
else:
raise ValueError('Cannot correctly find FC files')
if 'weight-mean' in params.keys():
if params['weight-mean'] == 'from-subject-fc':
fc_files = self.get_selected_files(quiet=1, pipeline=
'functionalconnectivity', forfile=f)
if len(fc_files) == 1:
params['weight-mean'] = load_tabular_file(fc_files[0]).values
else:
raise ValueError('Cannot correctly find FC files')
params['report'] = 'yes'
params['report_path'] = save_dir + '/report/'
params['report_filename'] = save_name + '_derivationreport.html'
if not os.path.exists(params['report_path']):
os.makedirs(params['report_path'])
if 'dimord' not in params:
params['dimord'] = 'time,node'
dfc = teneto.timeseries.derive_temporalnetwork(data.values, params)
dfc_net = TemporalNetwork(from_array=dfc, nettype='wu')
dfc_net.network.to_csv(save_dir + save_name + '.tsv', sep='\t')
sidecar = get_sidecar(f)
sidecar['tvc'] = params
if 'weight-var' in sidecar['tvc']:
sidecar['tvc']['weight-var'] = True
sidecar['tvc']['fc source'] = fc_files
if 'weight-mean' in sidecar['tvc']:
sidecar['tvc']['weight-mean'] = True
sidecar['tvc']['fc source'] = fc_files
sidecar['tvc']['inputfile'] = f
sidecar['tvc']['description'] = 'Time varying connectivity information.'
with open(save_dir + save_name + '.json', 'w') as fs:
json.dump(sidecar, fs)
if confounds_exist:
analysis_step = 'tvc-derive'
df = pd.read_csv(confound_files[i], sep='\t')
df = df.fillna(df.median())
ind = np.triu_indices(dfc.shape[0], k=1)
dfc_df = pd.DataFrame(dfc[ind[0], ind[1], :].transpose())
if len(df) != len(dfc_df):
df = df.iloc[int(np.round((params['windowsize'] - 1) / 2)):int(
np.round((params['windowsize'] - 1) / 2) + len(dfc_df))]
df.reset_index(inplace=True, drop=True)
dfc_df_z = dfc_df - dfc_df.mean()
df_z = df - df.mean()
R_df = dfc_df_z.T.dot(df_z).div(len(dfc_df)).div(df_z.std(ddof=0)).div(
dfc_df_z.std(ddof=0), axis=0)
R_df_describe = R_df.describe()
desc_index = R_df_describe.index
confound_report_dir = params['report_path'
] + '/' + save_name + '_confoundcorr/'
confound_report_figdir = confound_report_dir + 'figures/'
if not os.path.exists(confound_report_figdir):
os.makedirs(confound_report_figdir)
report = '<html><body>'
report += ('<h1> Correlation of ' + analysis_step +
' and confounds.</h1>')
for c in R_df.columns:
fig, ax = plt.subplots(1)
ax = sns.distplot(R_df[c], hist=False, color='m', ax=ax,
kde_kws={'shade': True})
fig.savefig(confound_report_figdir + c + '.png')
plt.close(fig)
report += '<h2>' + c + '</h2>'
for ind_name, r in enumerate(R_df_describe[c]):
report += str(desc_index[ind_name]) + ': '
report += str(r) + '<br>'
report += 'Distribution of corrlation values:'
report += '<img src=' + os.path.abspath(confound_report_figdir
) + '/' + c + '.png><br><br>'
report += '</body></html>'
with open(confound_report_dir + save_name + '_confoundcorr.html', 'w'
) as file:
file.write(report)
| derive_temporalnetwork | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, f, i, tag, params, confounds_exist, confound_files):\n data = load_tabular_file(f, index_col=True, header=True)\n fs, _ = drop_bids_suffix(f)\n save_name, save_dir, _ = self._save_namepaths_bids_derivatives(fs, tag,\n 'tvc', 'tvcconn')\n if 'weight-var' in params.keys():\n if params['weight-var'] == 'from-subject-fc':\n fc_files = self.get_selected_files(quiet=1, pipeline=\n 'functionalconnectivity', forfile=f)\n if len(fc_files) == 1:\n params['weight-var'] = load_tabular_file(fc_files[0]).values\n else:\n raise ValueError('Cannot correctly find FC files')\n if 'weight-mean' in params.keys():\n if params['weight-mean'] == 'from-subject-fc':\n fc_files = self.get_selected_files(quiet=1, pipeline=\n 'functionalconnectivity', forfile=f)\n if len(fc_files) == 1:\n params['weight-mean'] = load_tabular_file(fc_files[0]).values\n else:\n raise ValueError('Cannot correctly find FC files')\n params['report'] = 'yes'\n params['report_path'] = save_dir + '/report/'\n params['report_filename'] = save_name + '_derivationreport.html'\n if not os.path.exists(params['report_path']):\n os.makedirs(params['report_path'])\n if 'dimord' not in params:\n params['dimord'] = 'time,node'\n dfc = teneto.timeseries.derive_temporalnetwork(data.values, params)\n dfc_net = TemporalNetwork(from_array=dfc, nettype='wu')\n dfc_net.network.to_csv(save_dir + save_name + '.tsv', sep='\\t')\n sidecar = get_sidecar(f)\n sidecar['tvc'] = params\n if 'weight-var' in sidecar['tvc']:\n sidecar['tvc']['weight-var'] = True\n sidecar['tvc']['fc source'] = fc_files\n if 'weight-mean' in sidecar['tvc']:\n sidecar['tvc']['weight-mean'] = True\n sidecar['tvc']['fc source'] = fc_files\n sidecar['tvc']['inputfile'] = f\n sidecar['tvc']['description'] = 'Time varying connectivity information.'\n with open(save_dir + save_name + '.json', 'w') as fs:\n json.dump(sidecar, fs)\n if confounds_exist:\n analysis_step = 'tvc-derive'\n df = pd.read_csv(confound_files[i], sep='\\t')\n df = df.fillna(df.median())\n ind = np.triu_indices(dfc.shape[0], k=1)\n dfc_df = pd.DataFrame(dfc[ind[0], ind[1], :].transpose())\n if len(df) != len(dfc_df):\n df = df.iloc[int(np.round((params['windowsize'] - 1) / 2)):int(\n np.round((params['windowsize'] - 1) / 2) + len(dfc_df))]\n df.reset_index(inplace=True, drop=True)\n dfc_df_z = dfc_df - dfc_df.mean()\n df_z = df - df.mean()\n R_df = dfc_df_z.T.dot(df_z).div(len(dfc_df)).div(df_z.std(ddof=0)).div(\n dfc_df_z.std(ddof=0), axis=0)\n R_df_describe = R_df.describe()\n desc_index = R_df_describe.index\n confound_report_dir = params['report_path'\n ] + '/' + save_name + '_confoundcorr/'\n confound_report_figdir = confound_report_dir + 'figures/'\n if not os.path.exists(confound_report_figdir):\n os.makedirs(confound_report_figdir)\n report = '<html><body>'\n report += ('<h1> Correlation of ' + analysis_step +\n ' and confounds.</h1>')\n for c in R_df.columns:\n fig, ax = plt.subplots(1)\n ax = sns.distplot(R_df[c], hist=False, color='m', ax=ax,\n kde_kws={'shade': True})\n fig.savefig(confound_report_figdir + c + '.png')\n plt.close(fig)\n report += '<h2>' + c + '</h2>'\n for ind_name, r in enumerate(R_df_describe[c]):\n report += str(desc_index[ind_name]) + ': '\n report += str(r) + '<br>'\n report += 'Distribution of corrlation values:'\n report += '<img src=' + os.path.abspath(confound_report_figdir\n ) + '/' + c + '.png><br><br>'\n report += '</body></html>'\n with open(confound_report_dir + save_name + '_confoundcorr.html', 'w'\n ) as file:\n file.write(report)\n",
"role": "user"
},
{
"content": "derive_temporalnetwork",
"role": "assistant"
}
] |
def x(self, njobs=None, returngroup=False, file_hdr=None, file_idx=None):
if not njobs:
njobs = self.njobs
self.add_history(inspect.stack()[0][3], locals(), 1)
files = self.get_selected_files(quiet=1)
R_group = []
with ProcessPoolExecutor(max_workers=njobs) as executor:
job = {executor.submit(self._run_make_functional_connectivity, f,
file_hdr, file_idx) for f in files}
for j in as_completed(job):
R_group.append(j.result())
if returngroup:
R_group = np.tanh(np.mean(np.arctanh(np.array(R_group)), axis=0))
return np.array(R_group)
| make_functional_connectivity | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, njobs=None, returngroup=False, file_hdr=None, file_idx=None):\n if not njobs:\n njobs = self.njobs\n self.add_history(inspect.stack()[0][3], locals(), 1)\n files = self.get_selected_files(quiet=1)\n R_group = []\n with ProcessPoolExecutor(max_workers=njobs) as executor:\n job = {executor.submit(self._run_make_functional_connectivity, f,\n file_hdr, file_idx) for f in files}\n for j in as_completed(job):\n R_group.append(j.result())\n if returngroup:\n R_group = np.tanh(np.mean(np.arctanh(np.array(R_group)), axis=0))\n return np.array(R_group)\n",
"role": "user"
},
{
"content": "make_functional_connectivity",
"role": "assistant"
}
] |
def x(self, f, tag, save_directory, suffix=None):
file_name = f.split('/')[-1].split('.')[0]
if tag != '':
tag = '_' + tag
if suffix:
file_name, _ = drop_bids_suffix(file_name)
save_name = file_name + tag
save_name += '_' + suffix
else:
save_name = file_name + tag
paths_post_pipeline = f.split(self.pipeline)
if self.pipeline_subdir:
paths_post_pipeline = paths_post_pipeline[1].split(self.pipeline_subdir
)[0]
else:
paths_post_pipeline = paths_post_pipeline[1].split(file_name)[0]
base_dir = (self.BIDS_dir + '/derivatives/' + 'teneto_' + teneto.
__version__ + '/' + paths_post_pipeline + '/')
save_dir = base_dir + '/' + save_directory + '/'
if not os.path.exists(save_dir):
try:
os.makedirs(save_dir)
except:
time.sleep(2)
if not os.path.exists(self.BIDS_dir + '/derivatives/' + 'teneto_' +
teneto.__version__ + '/dataset_description.json'):
try:
with open(self.BIDS_dir + '/derivatives/' + 'teneto_' + teneto.
__version__ + '/dataset_description.json', 'w') as fs:
json.dump(self.tenetoinfo, fs)
except:
time.sleep(2)
return save_name, save_dir, base_dir
| save_namepaths_bids_derivatives | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, f, tag, save_directory, suffix=None):\n file_name = f.split('/')[-1].split('.')[0]\n if tag != '':\n tag = '_' + tag\n if suffix:\n file_name, _ = drop_bids_suffix(file_name)\n save_name = file_name + tag\n save_name += '_' + suffix\n else:\n save_name = file_name + tag\n paths_post_pipeline = f.split(self.pipeline)\n if self.pipeline_subdir:\n paths_post_pipeline = paths_post_pipeline[1].split(self.pipeline_subdir\n )[0]\n else:\n paths_post_pipeline = paths_post_pipeline[1].split(file_name)[0]\n base_dir = (self.BIDS_dir + '/derivatives/' + 'teneto_' + teneto.\n __version__ + '/' + paths_post_pipeline + '/')\n save_dir = base_dir + '/' + save_directory + '/'\n if not os.path.exists(save_dir):\n try:\n os.makedirs(save_dir)\n except:\n time.sleep(2)\n if not os.path.exists(self.BIDS_dir + '/derivatives/' + 'teneto_' +\n teneto.__version__ + '/dataset_description.json'):\n try:\n with open(self.BIDS_dir + '/derivatives/' + 'teneto_' + teneto.\n __version__ + '/dataset_description.json', 'w') as fs:\n json.dump(self.tenetoinfo, fs)\n except:\n time.sleep(2)\n return save_name, save_dir, base_dir\n",
"role": "user"
},
{
"content": "save_namepaths_bids_derivatives",
"role": "assistant"
}
] |
def x(self, tag, quiet=1):
if not self.pipeline:
print('Please set pipeline first.')
self.get_pipeline_alternatives(quiet)
else:
if tag == 'sub':
datapath = self.BIDS_dir + '/derivatives/' + self.pipeline + '/'
tag_alternatives = [f.split('sub-')[1] for f in os.listdir(
datapath) if os.path.isdir(datapath + f) and 'sub-' in f]
elif tag == 'ses':
tag_alternatives = []
for sub in self.bids_tags['sub']:
tag_alternatives += [f.split('ses-')[1] for f in os.listdir
(self.BIDS_dir + '/derivatives/' + self.pipeline + '/' +
'sub-' + sub) if 'ses' in f]
tag_alternatives = set(tag_alternatives)
else:
files = self.get_selected_files(quiet=1)
tag_alternatives = []
for f in files:
f = f.split('.')[0]
f = f.split('/')[-1]
tag_alternatives += [t.split('-')[1] for t in f.split('_') if
t.split('-')[0] == tag]
tag_alternatives = set(tag_alternatives)
if quiet == 0:
print(tag + ' alternatives: ' + ', '.join(tag_alternatives))
return list(tag_alternatives)
| get_tags | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, tag, quiet=1):\n if not self.pipeline:\n print('Please set pipeline first.')\n self.get_pipeline_alternatives(quiet)\n else:\n if tag == 'sub':\n datapath = self.BIDS_dir + '/derivatives/' + self.pipeline + '/'\n tag_alternatives = [f.split('sub-')[1] for f in os.listdir(\n datapath) if os.path.isdir(datapath + f) and 'sub-' in f]\n elif tag == 'ses':\n tag_alternatives = []\n for sub in self.bids_tags['sub']:\n tag_alternatives += [f.split('ses-')[1] for f in os.listdir\n (self.BIDS_dir + '/derivatives/' + self.pipeline + '/' +\n 'sub-' + sub) if 'ses' in f]\n tag_alternatives = set(tag_alternatives)\n else:\n files = self.get_selected_files(quiet=1)\n tag_alternatives = []\n for f in files:\n f = f.split('.')[0]\n f = f.split('/')[-1]\n tag_alternatives += [t.split('-')[1] for t in f.split('_') if\n t.split('-')[0] == tag]\n tag_alternatives = set(tag_alternatives)\n if quiet == 0:\n print(tag + ' alternatives: ' + ', '.join(tag_alternatives))\n return list(tag_alternatives)\n",
"role": "user"
},
{
"content": "get_tags",
"role": "assistant"
}
] |
def x(self, quiet=0):
if not os.path.exists(self.BIDS_dir + '/derivatives/'):
print('Derivative directory not found. Is the data preprocessed?')
else:
pipeline_alternatives = os.listdir(self.BIDS_dir + '/derivatives/')
if quiet == 0:
print('Derivative alternatives: ' + ', '.join(
pipeline_alternatives))
return list(pipeline_alternatives)
| get_pipeline_alternatives | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, quiet=0):\n if not os.path.exists(self.BIDS_dir + '/derivatives/'):\n print('Derivative directory not found. Is the data preprocessed?')\n else:\n pipeline_alternatives = os.listdir(self.BIDS_dir + '/derivatives/')\n if quiet == 0:\n print('Derivative alternatives: ' + ', '.join(\n pipeline_alternatives))\n return list(pipeline_alternatives)\n",
"role": "user"
},
{
"content": "get_pipeline_alternatives",
"role": "assistant"
}
] |
def x(self, quiet=0):
if not self.pipeline:
print('Please set pipeline first.')
self.get_pipeline_alternatives()
else:
pipeline_subdir_alternatives = []
for s in self.bids_tags['sub']:
derdir_files = os.listdir(self.BIDS_dir + '/derivatives/' +
self.pipeline + '/sub-' + s + '/func/')
pipeline_subdir_alternatives += [f for f in derdir_files if os.
path.isdir(self.BIDS_dir + '/derivatives/' + self.pipeline +
'/sub-' + s + '/func/' + f)]
pipeline_subdir_alternatives = set(pipeline_subdir_alternatives)
if quiet == 0:
print('Pipeline_subdir alternatives: ' + ', '.join(
pipeline_subdir_alternatives))
return list(pipeline_subdir_alternatives)
| get_pipeline_subdir_alternatives | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, quiet=0):\n if not self.pipeline:\n print('Please set pipeline first.')\n self.get_pipeline_alternatives()\n else:\n pipeline_subdir_alternatives = []\n for s in self.bids_tags['sub']:\n derdir_files = os.listdir(self.BIDS_dir + '/derivatives/' +\n self.pipeline + '/sub-' + s + '/func/')\n pipeline_subdir_alternatives += [f for f in derdir_files if os.\n path.isdir(self.BIDS_dir + '/derivatives/' + self.pipeline +\n '/sub-' + s + '/func/' + f)]\n pipeline_subdir_alternatives = set(pipeline_subdir_alternatives)\n if quiet == 0:\n print('Pipeline_subdir alternatives: ' + ', '.join(\n pipeline_subdir_alternatives))\n return list(pipeline_subdir_alternatives)\n",
"role": "user"
},
{
"content": "get_pipeline_subdir_alternatives",
"role": "assistant"
}
] |
def x(self, pipeline='pipeline', forfile=None, quiet=0, allowedfileformats=
'default'):
file_dict = dict(self.bids_tags)
if allowedfileformats == 'default':
allowedfileformats = ['.tsv', '.nii.gz']
if forfile:
if isinstance(forfile, str):
forfile = get_bids_tag(forfile, 'all')
for n in forfile.keys():
file_dict[n] = [forfile[n]]
non_entries = []
for n in file_dict:
if not file_dict[n]:
non_entries.append(n)
for n in non_entries:
file_dict.pop(n)
file_components = []
for k in ['sub', 'ses', 'task', 'run']:
if k in file_dict:
file_components.append([(k + '-' + t) for t in file_dict[k]])
file_list = list(itertools.product(*file_components))
if pipeline == 'pipeline':
mdir = self.BIDS_dir + '/derivatives/' + self.pipeline
elif pipeline == 'confound' and self.confound_pipeline:
mdir = self.BIDS_dir + '/derivatives/' + self.confound_pipeline
elif pipeline == 'confound':
mdir = self.BIDS_dir + '/derivatives/' + self.pipeline
elif pipeline == 'functionalconnectivity':
mdir = self.BIDS_dir + '/derivatives/teneto_' + teneto.__version__
else:
raise ValueError('unknown request')
found_files = []
for f in file_list:
wdir = str(mdir)
sub = [t for t in f if t.startswith('sub')]
ses = [t for t in f if t.startswith('ses')]
wdir += '/' + sub[0] + '/'
if ses:
wdir += '/' + ses[0] + '/'
wdir += '/func/'
if pipeline == 'pipeline':
wdir += '/' + self.pipeline_subdir + '/'
fileending = [(self.bids_suffix + f) for f in allowedfileformats]
elif pipeline == 'functionalconnectivity':
wdir += '/fc/'
fileending = [('conn' + f) for f in allowedfileformats]
elif pipeline == 'confound':
fileending = [('confounds' + f) for f in allowedfileformats]
if os.path.exists(wdir):
found = []
for ff in os.listdir(wdir):
ftags = get_bids_tag(ff, 'all')
t = [t for t in ftags if t in file_dict and ftags[t] in
file_dict[t]]
if len(t) == len(file_dict):
found.append(ff)
found = [f for f in found for e in fileending if f.endswith(e)]
if pipeline == 'confound':
found = [i for i in found if '_confounds' in i]
else:
found = [i for i in found if '_confounds' not in i]
found = list(map(str.__add__, [re.sub('/+', '/', wdir)] * len(
found), found))
found = [i for i in found if not any([(bf in i) for bf in self.
bad_files])]
if found:
found_files += found
if quiet == -1:
print(wdir)
found_files = list(set(found_files))
if quiet == 0:
print(found_files)
return found_files
| get_selected_files | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, pipeline='pipeline', forfile=None, quiet=0, allowedfileformats=\n 'default'):\n file_dict = dict(self.bids_tags)\n if allowedfileformats == 'default':\n allowedfileformats = ['.tsv', '.nii.gz']\n if forfile:\n if isinstance(forfile, str):\n forfile = get_bids_tag(forfile, 'all')\n for n in forfile.keys():\n file_dict[n] = [forfile[n]]\n non_entries = []\n for n in file_dict:\n if not file_dict[n]:\n non_entries.append(n)\n for n in non_entries:\n file_dict.pop(n)\n file_components = []\n for k in ['sub', 'ses', 'task', 'run']:\n if k in file_dict:\n file_components.append([(k + '-' + t) for t in file_dict[k]])\n file_list = list(itertools.product(*file_components))\n if pipeline == 'pipeline':\n mdir = self.BIDS_dir + '/derivatives/' + self.pipeline\n elif pipeline == 'confound' and self.confound_pipeline:\n mdir = self.BIDS_dir + '/derivatives/' + self.confound_pipeline\n elif pipeline == 'confound':\n mdir = self.BIDS_dir + '/derivatives/' + self.pipeline\n elif pipeline == 'functionalconnectivity':\n mdir = self.BIDS_dir + '/derivatives/teneto_' + teneto.__version__\n else:\n raise ValueError('unknown request')\n found_files = []\n for f in file_list:\n wdir = str(mdir)\n sub = [t for t in f if t.startswith('sub')]\n ses = [t for t in f if t.startswith('ses')]\n wdir += '/' + sub[0] + '/'\n if ses:\n wdir += '/' + ses[0] + '/'\n wdir += '/func/'\n if pipeline == 'pipeline':\n wdir += '/' + self.pipeline_subdir + '/'\n fileending = [(self.bids_suffix + f) for f in allowedfileformats]\n elif pipeline == 'functionalconnectivity':\n wdir += '/fc/'\n fileending = [('conn' + f) for f in allowedfileformats]\n elif pipeline == 'confound':\n fileending = [('confounds' + f) for f in allowedfileformats]\n if os.path.exists(wdir):\n found = []\n for ff in os.listdir(wdir):\n ftags = get_bids_tag(ff, 'all')\n t = [t for t in ftags if t in file_dict and ftags[t] in\n file_dict[t]]\n if len(t) == len(file_dict):\n found.append(ff)\n found = [f for f in found for e in fileending if f.endswith(e)]\n if pipeline == 'confound':\n found = [i for i in found if '_confounds' in i]\n else:\n found = [i for i in found if '_confounds' not in i]\n found = list(map(str.__add__, [re.sub('/+', '/', wdir)] * len(\n found), found))\n found = [i for i in found if not any([(bf in i) for bf in self.\n bad_files])]\n if found:\n found_files += found\n if quiet == -1:\n print(wdir)\n found_files = list(set(found_files))\n if quiet == 0:\n print(found_files)\n return found_files\n",
"role": "user"
},
{
"content": "get_selected_files",
"role": "assistant"
}
] |
def x(self, confound, exclusion_criteria, confound_stat='mean'):
self.add_history(inspect.stack()[0][3], locals(), 1)
if isinstance(confound, str):
confound = [confound]
if isinstance(exclusion_criteria, str):
exclusion_criteria = [exclusion_criteria]
if isinstance(confound_stat, str):
confound_stat = [confound_stat]
if len(exclusion_criteria) != len(confound):
raise ValueError(
'Same number of confound names and exclusion criteria must be given'
)
if len(confound_stat) != len(confound):
raise ValueError(
'Same number of confound names and confound stats must be given')
relex, crit = process_exclusion_criteria(exclusion_criteria)
files = sorted(self.get_selected_files(quiet=1))
confound_files = sorted(self.get_selected_files(quiet=1, pipeline=
'confound'))
files, confound_files = confound_matching(files, confound_files)
bad_files = []
bs = 0
foundconfound = []
foundreason = []
for s, cfile in enumerate(confound_files):
df = load_tabular_file(cfile, index_col=None)
found_bad_subject = False
for i, _ in enumerate(confound):
if confound_stat[i] == 'median':
if relex[i](df[confound[i]].median(), crit[i]):
found_bad_subject = True
elif confound_stat[i] == 'mean':
if relex[i](df[confound[i]].mean(), crit[i]):
found_bad_subject = True
elif confound_stat[i] == 'std':
if relex[i](df[i][confound[i]].std(), crit[i]):
found_bad_subject = True
if found_bad_subject:
foundconfound.append(confound[i])
foundreason.append(exclusion_criteria[i])
if found_bad_subject:
bad_files.append(files[s])
bs += 1
self.set_bad_files(bad_files, reason=
'excluded file (confound over specfied stat threshold)')
for i, f in enumerate(bad_files):
sidecar = get_sidecar(f)
sidecar['file_exclusion'] = {}
sidecar['confound'] = foundconfound[i]
sidecar['threshold'] = foundreason[i]
for af in ['.tsv', '.nii.gz']:
f = f.split(af)[0]
f += '.json'
with open(f, 'w') as fs:
json.dump(sidecar, fs)
print('Removed ' + str(bs) + ' files from inclusion.')
| set_exclusion_file | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, confound, exclusion_criteria, confound_stat='mean'):\n self.add_history(inspect.stack()[0][3], locals(), 1)\n if isinstance(confound, str):\n confound = [confound]\n if isinstance(exclusion_criteria, str):\n exclusion_criteria = [exclusion_criteria]\n if isinstance(confound_stat, str):\n confound_stat = [confound_stat]\n if len(exclusion_criteria) != len(confound):\n raise ValueError(\n 'Same number of confound names and exclusion criteria must be given'\n )\n if len(confound_stat) != len(confound):\n raise ValueError(\n 'Same number of confound names and confound stats must be given')\n relex, crit = process_exclusion_criteria(exclusion_criteria)\n files = sorted(self.get_selected_files(quiet=1))\n confound_files = sorted(self.get_selected_files(quiet=1, pipeline=\n 'confound'))\n files, confound_files = confound_matching(files, confound_files)\n bad_files = []\n bs = 0\n foundconfound = []\n foundreason = []\n for s, cfile in enumerate(confound_files):\n df = load_tabular_file(cfile, index_col=None)\n found_bad_subject = False\n for i, _ in enumerate(confound):\n if confound_stat[i] == 'median':\n if relex[i](df[confound[i]].median(), crit[i]):\n found_bad_subject = True\n elif confound_stat[i] == 'mean':\n if relex[i](df[confound[i]].mean(), crit[i]):\n found_bad_subject = True\n elif confound_stat[i] == 'std':\n if relex[i](df[i][confound[i]].std(), crit[i]):\n found_bad_subject = True\n if found_bad_subject:\n foundconfound.append(confound[i])\n foundreason.append(exclusion_criteria[i])\n if found_bad_subject:\n bad_files.append(files[s])\n bs += 1\n self.set_bad_files(bad_files, reason=\n 'excluded file (confound over specfied stat threshold)')\n for i, f in enumerate(bad_files):\n sidecar = get_sidecar(f)\n sidecar['file_exclusion'] = {}\n sidecar['confound'] = foundconfound[i]\n sidecar['threshold'] = foundreason[i]\n for af in ['.tsv', '.nii.gz']:\n f = f.split(af)[0]\n f += '.json'\n with open(f, 'w') as fs:\n json.dump(sidecar, fs)\n print('Removed ' + str(bs) + ' files from inclusion.')\n",
"role": "user"
},
{
"content": "set_exclusion_file",
"role": "assistant"
}
] |
def x(self, confound, exclusion_criteria, replace_with, tol=1, overwrite=
True, desc=None):
self.add_history(inspect.stack()[0][3], locals(), 1)
if isinstance(confound, str):
confound = [confound]
if isinstance(exclusion_criteria, str):
exclusion_criteria = [exclusion_criteria]
if len(exclusion_criteria) != len(confound):
raise ValueError(
'Same number of confound names and exclusion criteria must be given'
)
relex, crit = process_exclusion_criteria(exclusion_criteria)
files = sorted(self.get_selected_files(quiet=1))
confound_files = sorted(self.get_selected_files(quiet=1, pipeline=
'confound'))
files, confound_files = confound_matching(files, confound_files)
bad_files = []
for i, cfile in enumerate(confound_files):
data = load_tabular_file(files[i]).values
df = load_tabular_file(cfile, index_col=None)
ind = []
for ci, c in enumerate(confound):
ind = df[relex[ci](df[c], crit[ci])].index
if replace_with == 'cubicspline':
if 0 in ind:
ind = np.delete(ind, np.where(ind == 0))
if df.index.max():
ind = np.delete(ind, np.where(ind == df.index.max()))
data[:, ind.astype(int)] = np.nan
nanind = np.where(np.isnan(data[0, :]))[0]
badpoints_n = len(nanind)
if badpoints_n / np.array(len(df)) > tol:
bad_files.append(files[i])
nonnanind = np.where(np.isnan(data[0, :]) == 0)[0]
nanind = nanind[nanind > nonnanind.min()]
nanind = nanind[nanind < nonnanind.max()]
if replace_with == 'cubicspline':
for n in range(data.shape[0]):
interp = interp1d(nonnanind, data[n, nonnanind], kind='cubic')
data[n, nanind] = interp(nanind)
data = pd.DataFrame(data)
sname, _ = drop_bids_suffix(files[i])
if self.pipeline != 'teneto_' + teneto.__version__:
sname = sname.split('/')[-1]
spath = (self.BIDS_dir + '/derivatives/' + 'teneto_' + teneto.
__version__ + '/')
tags = get_bids_tag(sname, ['sub', 'ses'])
spath += 'sub-' + tags['sub'] + '/'
if 'ses' in tags:
spath += 'ses-' + tags['ses'] + '/'
spath += 'func/'
if self.pipeline_subdir:
spath += self.pipeline_subdir + '/'
make_directories(spath)
sname = spath + sname
if 'desc' in sname and desc:
desctag = get_bids_tag(sname.split('/')[-1], 'desc')
sname = ''.join(sname.split('desc-' + desctag['desc']))
sname += '_desc-' + desc
if os.path.exists(sname + self.bids_suffix + '.tsv'
) and overwrite == False:
raise ValueError(
'overwrite is set to False, but non-unique filename. Set unique desc tag'
)
data.to_csv(sname + '_' + self.bids_suffix + '.tsv', sep='\t')
sidecar = get_sidecar(files[i])
sidecar['scrubbed_timepoints'] = {}
sidecar['scrubbed_timepoints']['description'] = (
'Scrubbing which censors timepoints where the confounds where above a certain time-points. Censored time-points are replaced with replacement value (nans or cubic spline). Output of teneto.TenetoBIDS.set_exclusion_timepoint.'
)
sidecar['scrubbed_timepoints']['confound'] = ','.join(confound)
sidecar['scrubbed_timepoints']['threshold'] = ','.join(
exclusion_criteria)
sidecar['scrubbed_timepoints']['replacement'] = replace_with
sidecar['scrubbed_timepoints']['badpoint_number'] = badpoints_n
sidecar['scrubbed_timepoints']['badpoint_ratio'
] = badpoints_n / np.array(len(df))
sidecar['scrubbed_timepoints']['file_exclusion_when_badpoint_ratio'
] = tol
with open(sname + '_' + self.bids_suffix + '.json', 'w') as fs:
json.dump(sidecar, fs)
self.set_bad_files(bad_files, reason=
'scrubbing (number of points over threshold)')
self.set_pipeline('teneto_' + teneto.__version__)
if desc:
self.set_bids_tags({'desc': desc.split('-')[1]})
| set_exclusion_timepoint | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, confound, exclusion_criteria, replace_with, tol=1, overwrite=\n True, desc=None):\n self.add_history(inspect.stack()[0][3], locals(), 1)\n if isinstance(confound, str):\n confound = [confound]\n if isinstance(exclusion_criteria, str):\n exclusion_criteria = [exclusion_criteria]\n if len(exclusion_criteria) != len(confound):\n raise ValueError(\n 'Same number of confound names and exclusion criteria must be given'\n )\n relex, crit = process_exclusion_criteria(exclusion_criteria)\n files = sorted(self.get_selected_files(quiet=1))\n confound_files = sorted(self.get_selected_files(quiet=1, pipeline=\n 'confound'))\n files, confound_files = confound_matching(files, confound_files)\n bad_files = []\n for i, cfile in enumerate(confound_files):\n data = load_tabular_file(files[i]).values\n df = load_tabular_file(cfile, index_col=None)\n ind = []\n for ci, c in enumerate(confound):\n ind = df[relex[ci](df[c], crit[ci])].index\n if replace_with == 'cubicspline':\n if 0 in ind:\n ind = np.delete(ind, np.where(ind == 0))\n if df.index.max():\n ind = np.delete(ind, np.where(ind == df.index.max()))\n data[:, ind.astype(int)] = np.nan\n nanind = np.where(np.isnan(data[0, :]))[0]\n badpoints_n = len(nanind)\n if badpoints_n / np.array(len(df)) > tol:\n bad_files.append(files[i])\n nonnanind = np.where(np.isnan(data[0, :]) == 0)[0]\n nanind = nanind[nanind > nonnanind.min()]\n nanind = nanind[nanind < nonnanind.max()]\n if replace_with == 'cubicspline':\n for n in range(data.shape[0]):\n interp = interp1d(nonnanind, data[n, nonnanind], kind='cubic')\n data[n, nanind] = interp(nanind)\n data = pd.DataFrame(data)\n sname, _ = drop_bids_suffix(files[i])\n if self.pipeline != 'teneto_' + teneto.__version__:\n sname = sname.split('/')[-1]\n spath = (self.BIDS_dir + '/derivatives/' + 'teneto_' + teneto.\n __version__ + '/')\n tags = get_bids_tag(sname, ['sub', 'ses'])\n spath += 'sub-' + tags['sub'] + '/'\n if 'ses' in tags:\n spath += 'ses-' + tags['ses'] + '/'\n spath += 'func/'\n if self.pipeline_subdir:\n spath += self.pipeline_subdir + '/'\n make_directories(spath)\n sname = spath + sname\n if 'desc' in sname and desc:\n desctag = get_bids_tag(sname.split('/')[-1], 'desc')\n sname = ''.join(sname.split('desc-' + desctag['desc']))\n sname += '_desc-' + desc\n if os.path.exists(sname + self.bids_suffix + '.tsv'\n ) and overwrite == False:\n raise ValueError(\n 'overwrite is set to False, but non-unique filename. Set unique desc tag'\n )\n data.to_csv(sname + '_' + self.bids_suffix + '.tsv', sep='\\t')\n sidecar = get_sidecar(files[i])\n sidecar['scrubbed_timepoints'] = {}\n sidecar['scrubbed_timepoints']['description'] = (\n 'Scrubbing which censors timepoints where the confounds where above a certain time-points. Censored time-points are replaced with replacement value (nans or cubic spline). Output of teneto.TenetoBIDS.set_exclusion_timepoint.'\n )\n sidecar['scrubbed_timepoints']['confound'] = ','.join(confound)\n sidecar['scrubbed_timepoints']['threshold'] = ','.join(\n exclusion_criteria)\n sidecar['scrubbed_timepoints']['replacement'] = replace_with\n sidecar['scrubbed_timepoints']['badpoint_number'] = badpoints_n\n sidecar['scrubbed_timepoints']['badpoint_ratio'\n ] = badpoints_n / np.array(len(df))\n sidecar['scrubbed_timepoints']['file_exclusion_when_badpoint_ratio'\n ] = tol\n with open(sname + '_' + self.bids_suffix + '.json', 'w') as fs:\n json.dump(sidecar, fs)\n self.set_bad_files(bad_files, reason=\n 'scrubbing (number of points over threshold)')\n self.set_pipeline('teneto_' + teneto.__version__)\n if desc:\n self.set_bids_tags({'desc': desc.split('-')[1]})\n",
"role": "user"
},
{
"content": "set_exclusion_timepoint",
"role": "assistant"
}
] |
def x(self, parcellation, parc_type=None, parc_params=None, network=
'defaults', update_pipeline=True, removeconfounds=False, tag=None,
njobs=None, clean_params=None, yeonetworkn=None):
if not njobs:
njobs = self.njobs
self.add_history(inspect.stack()[0][3], locals(), 1)
parc_name = parcellation.split('_')[0].lower()
if not self.confounds and removeconfounds:
raise ValueError(
"Specified confounds are not found. Make sure that you have run self.set_confunds(['Confound1','Confound2']) first."
)
if update_pipeline == False and removeconfounds:
raise ValueError(
'Pipeline must be updated in order to remove confounds within this funciton.'
)
files = self.get_selected_files(quiet=1)
self.set_network_communities(parcellation, netn=yeonetworkn)
if not tag:
tag = ''
else:
tag = 'desc-' + tag
if not parc_params:
parc_params = {}
with ProcessPoolExecutor(max_workers=njobs) as executor:
job = {executor.submit(self._run_make_parcellation, f, i, tag,
parcellation, parc_name, parc_type, parc_params) for i, f in
enumerate(files)}
for j in as_completed(job):
j.result()
if update_pipeline == True:
if not self.confound_pipeline and len(self.get_selected_files(quiet
=1, pipeline='confound')) > 0:
self.set_confound_pipeline(self.pipeline)
self.set_pipeline('teneto_' + teneto.__version__)
self.set_pipeline_subdir('parcellation')
if tag:
self.set_bids_tags({'desc': tag.split('-')[1]})
self.set_bids_suffix('roi')
if removeconfounds:
self.removeconfounds(clean_params=clean_params, transpose=None,
njobs=njobs)
| make_parcellation | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, parcellation, parc_type=None, parc_params=None, network=\n 'defaults', update_pipeline=True, removeconfounds=False, tag=None,\n njobs=None, clean_params=None, yeonetworkn=None):\n if not njobs:\n njobs = self.njobs\n self.add_history(inspect.stack()[0][3], locals(), 1)\n parc_name = parcellation.split('_')[0].lower()\n if not self.confounds and removeconfounds:\n raise ValueError(\n \"Specified confounds are not found. Make sure that you have run self.set_confunds(['Confound1','Confound2']) first.\"\n )\n if update_pipeline == False and removeconfounds:\n raise ValueError(\n 'Pipeline must be updated in order to remove confounds within this funciton.'\n )\n files = self.get_selected_files(quiet=1)\n self.set_network_communities(parcellation, netn=yeonetworkn)\n if not tag:\n tag = ''\n else:\n tag = 'desc-' + tag\n if not parc_params:\n parc_params = {}\n with ProcessPoolExecutor(max_workers=njobs) as executor:\n job = {executor.submit(self._run_make_parcellation, f, i, tag,\n parcellation, parc_name, parc_type, parc_params) for i, f in\n enumerate(files)}\n for j in as_completed(job):\n j.result()\n if update_pipeline == True:\n if not self.confound_pipeline and len(self.get_selected_files(quiet\n =1, pipeline='confound')) > 0:\n self.set_confound_pipeline(self.pipeline)\n self.set_pipeline('teneto_' + teneto.__version__)\n self.set_pipeline_subdir('parcellation')\n if tag:\n self.set_bids_tags({'desc': tag.split('-')[1]})\n self.set_bids_suffix('roi')\n if removeconfounds:\n self.removeconfounds(clean_params=clean_params, transpose=None,\n njobs=njobs)\n",
"role": "user"
},
{
"content": "make_parcellation",
"role": "assistant"
}
] |
def x(self, community_detection_params, community_type='temporal', tag=None,
file_hdr=False, file_idx=False, njobs=None):
if not njobs:
njobs = self.njobs
self.add_history(inspect.stack()[0][3], locals(), 1)
if not tag:
tag = ''
else:
tag = 'desc-' + tag
if community_type == 'temporal':
files = self.get_selected_files(quiet=True)
for f in files:
if 'tvc' not in f:
raise ValueError(
'tvc tag not found in filename. TVC data must be used in communitydetection (perhaps run TenetoBIDS.derive first?).'
)
elif community_type == 'static':
files = self.get_selected_files(quiet=True, pipeline=
'functionalconnectivity')
with ProcessPoolExecutor(max_workers=njobs) as executor:
job = {executor.submit(self._run_communitydetection, f,
community_detection_params, community_type, file_hdr, file_idx,
tag) for i, f in enumerate(files) if all([(t + '_' in f or t +
'.' in f) for t in tag])}
for j in as_completed(job):
j.result()
| communitydetection | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, community_detection_params, community_type='temporal', tag=None,\n file_hdr=False, file_idx=False, njobs=None):\n if not njobs:\n njobs = self.njobs\n self.add_history(inspect.stack()[0][3], locals(), 1)\n if not tag:\n tag = ''\n else:\n tag = 'desc-' + tag\n if community_type == 'temporal':\n files = self.get_selected_files(quiet=True)\n for f in files:\n if 'tvc' not in f:\n raise ValueError(\n 'tvc tag not found in filename. TVC data must be used in communitydetection (perhaps run TenetoBIDS.derive first?).'\n )\n elif community_type == 'static':\n files = self.get_selected_files(quiet=True, pipeline=\n 'functionalconnectivity')\n with ProcessPoolExecutor(max_workers=njobs) as executor:\n job = {executor.submit(self._run_communitydetection, f,\n community_detection_params, community_type, file_hdr, file_idx,\n tag) for i, f in enumerate(files) if all([(t + '_' in f or t +\n '.' in f) for t in tag])}\n for j in as_completed(job):\n j.result()\n",
"role": "user"
},
{
"content": "communitydetection",
"role": "assistant"
}
] |
def x(self, confounds=None, clean_params=None, transpose=None, njobs=None,
update_pipeline=True, overwrite=True, tag=None):
if not njobs:
njobs = self.njobs
self.add_history(inspect.stack()[0][3], locals(), 1)
if not self.confounds and not confounds:
raise ValueError(
"Specified confounds are not found. Make sure that you have run self.set_confunds(['Confound1','Confound2']) first or pass confounds as input to function."
)
if not tag:
tag = ''
else:
tag = 'desc-' + tag
if confounds:
self.set_confounds(confounds)
files = sorted(self.get_selected_files(quiet=1))
confound_files = sorted(self.get_selected_files(quiet=1, pipeline=
'confound'))
files, confound_files = confound_matching(files, confound_files)
if not clean_params:
clean_params = {}
with ProcessPoolExecutor(max_workers=njobs) as executor:
job = {executor.submit(self._run_removeconfounds, f, confound_files
[i], clean_params, transpose, overwrite, tag) for i, f in
enumerate(files)}
for j in as_completed(job):
j.result()
self.set_pipeline('teneto_' + teneto.__version__)
self.set_bids_suffix('roi')
if tag:
self.set_bids_tags({'desc': tag.split('-')[1]})
| removeconfounds | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, confounds=None, clean_params=None, transpose=None, njobs=None,\n update_pipeline=True, overwrite=True, tag=None):\n if not njobs:\n njobs = self.njobs\n self.add_history(inspect.stack()[0][3], locals(), 1)\n if not self.confounds and not confounds:\n raise ValueError(\n \"Specified confounds are not found. Make sure that you have run self.set_confunds(['Confound1','Confound2']) first or pass confounds as input to function.\"\n )\n if not tag:\n tag = ''\n else:\n tag = 'desc-' + tag\n if confounds:\n self.set_confounds(confounds)\n files = sorted(self.get_selected_files(quiet=1))\n confound_files = sorted(self.get_selected_files(quiet=1, pipeline=\n 'confound'))\n files, confound_files = confound_matching(files, confound_files)\n if not clean_params:\n clean_params = {}\n with ProcessPoolExecutor(max_workers=njobs) as executor:\n job = {executor.submit(self._run_removeconfounds, f, confound_files\n [i], clean_params, transpose, overwrite, tag) for i, f in\n enumerate(files)}\n for j in as_completed(job):\n j.result()\n self.set_pipeline('teneto_' + teneto.__version__)\n self.set_bids_suffix('roi')\n if tag:\n self.set_bids_tags({'desc': tag.split('-')[1]})\n",
"role": "user"
},
{
"content": "removeconfounds",
"role": "assistant"
}
] |
def x(self, measure=None, measure_params=None, tag=None, njobs=None):
if not njobs:
njobs = self.njobs
self.add_history(inspect.stack()[0][3], locals(), 1)
if isinstance(measure, str):
measure = [measure]
if isinstance(measure_params, dict):
measure_params = [measure_params]
if measure_params and len(measure) != len(measure_params):
raise ValueError('Number of identified measure_params (' + str(len(
measure_params)) +
') differs from number of identified measures (' + str(len(
measure)) +
'). Leave black dictionary if default methods are wanted')
files = self.get_selected_files(quiet=1)
if not tag:
tag = ''
else:
tag = 'desc-' + tag
with ProcessPoolExecutor(max_workers=njobs) as executor:
job = {executor.submit(self._run_networkmeasures, f, tag, measure,
measure_params) for f in files}
for j in as_completed(job):
j.result()
| networkmeasures | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, measure=None, measure_params=None, tag=None, njobs=None):\n if not njobs:\n njobs = self.njobs\n self.add_history(inspect.stack()[0][3], locals(), 1)\n if isinstance(measure, str):\n measure = [measure]\n if isinstance(measure_params, dict):\n measure_params = [measure_params]\n if measure_params and len(measure) != len(measure_params):\n raise ValueError('Number of identified measure_params (' + str(len(\n measure_params)) +\n ') differs from number of identified measures (' + str(len(\n measure)) +\n '). Leave black dictionary if default methods are wanted')\n files = self.get_selected_files(quiet=1)\n if not tag:\n tag = ''\n else:\n tag = 'desc-' + tag\n with ProcessPoolExecutor(max_workers=njobs) as executor:\n job = {executor.submit(self._run_networkmeasures, f, tag, measure,\n measure_params) for f in files}\n for j in as_completed(job):\n j.result()\n",
"role": "user"
},
{
"content": "networkmeasures",
"role": "assistant"
}
] |
def x(self, confound_pipeline):
self.add_history(inspect.stack()[0][3], locals(), 1)
if not os.path.exists(self.BIDS_dir + '/derivatives/' + confound_pipeline):
print('Specified direvative directory not found.')
self.get_pipeline_alternatives()
else:
self.confound_pipeline = confound_pipeline
| set_confound_pipeline | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, confound_pipeline):\n self.add_history(inspect.stack()[0][3], locals(), 1)\n if not os.path.exists(self.BIDS_dir + '/derivatives/' + confound_pipeline):\n print('Specified direvative directory not found.')\n self.get_pipeline_alternatives()\n else:\n self.confound_pipeline = confound_pipeline\n",
"role": "user"
},
{
"content": "set_confound_pipeline",
"role": "assistant"
}
] |
def x(self, bids_suffix):
self.add_history(inspect.stack()[0][3], locals(), 1)
self.bids_suffix = bids_suffix
| set_bids_suffix | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, bids_suffix):\n self.add_history(inspect.stack()[0][3], locals(), 1)\n self.bids_suffix = bids_suffix\n",
"role": "user"
},
{
"content": "set_bids_suffix",
"role": "assistant"
}
] |
def x(self, pipeline):
self.add_history(inspect.stack()[0][3], locals(), 1)
if not os.path.exists(self.BIDS_dir + '/derivatives/' + pipeline):
print('Specified direvative directory not found.')
self.get_pipeline_alternatives()
else:
self.pipeline = pipeline
| set_pipeline | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, pipeline):\n self.add_history(inspect.stack()[0][3], locals(), 1)\n if not os.path.exists(self.BIDS_dir + '/derivatives/' + pipeline):\n print('Specified direvative directory not found.')\n self.get_pipeline_alternatives()\n else:\n self.pipeline = pipeline\n",
"role": "user"
},
{
"content": "set_pipeline",
"role": "assistant"
}
] |
def x(self):
print('--- DATASET INFORMATION ---')
print('--- Subjects ---')
if self.raw_data_exists:
if self.BIDS.get_subjects():
print('Number of subjects (in dataset): ' + str(len(self.BIDS.
get_subjects())))
print('Subjects (in dataset): ' + ', '.join(self.BIDS.
get_subjects()))
else:
print(
'NO SUBJECTS FOUND (is the BIDS directory specified correctly?)'
)
print('Number of subjects (selected): ' + str(len(self.bids_tags['sub'])))
print('Subjects (selected): ' + ', '.join(self.bids_tags['sub']))
if isinstance(self.bad_subjects, list):
print('Bad subjects: ' + ', '.join(self.bad_subjects))
else:
print('Bad subjects: 0')
print('--- Tasks ---')
if self.raw_data_exists:
if self.BIDS.get_tasks():
print('Number of tasks (in dataset): ' + str(len(self.BIDS.
get_tasks())))
print('Tasks (in dataset): ' + ', '.join(self.BIDS.get_tasks()))
if 'task' in self.bids_tags:
print('Number of tasks (selected): ' + str(len(self.bids_tags['task']))
)
print('Tasks (selected): ' + ', '.join(self.bids_tags['task']))
else:
print('No task names found')
print('--- Runs ---')
if self.raw_data_exists:
if self.BIDS.get_runs():
print('Number of runs (in dataset): ' + str(len(self.BIDS.
get_runs())))
print('Runs (in dataset): ' + ', '.join(self.BIDS.get_runs()))
if 'run' in self.bids_tags:
print('Number of runs (selected): ' + str(len(self.bids_tags['run'])))
print('Rubs (selected): ' + ', '.join(self.bids_tags['run']))
else:
print('No run names found')
print('--- Sessions ---')
if self.raw_data_exists:
if self.BIDS.get_sessions():
print('Number of runs (in dataset): ' + str(len(self.BIDS.
get_sessions())))
print('Sessions (in dataset): ' + ', '.join(self.BIDS.
get_sessions()))
if 'ses' in self.bids_tags:
print('Number of sessions (selected): ' + str(len(self.bids_tags[
'ses'])))
print('Sessions (selected): ' + ', '.join(self.bids_tags['ses']))
else:
print('No session names found')
print('--- PREPROCESSED DATA (Pipelines/Derivatives) ---')
if not self.pipeline:
print('Derivative pipeline not set. To set, run TN.set_pipeline()')
else:
print('Pipeline: ' + self.pipeline)
if self.pipeline_subdir:
print('Pipeline subdirectories: ' + self.pipeline_subdir)
selected_files = self.get_selected_files(quiet=1)
if selected_files:
print('--- SELECTED DATA ---')
print('Numnber of selected files: ' + str(len(selected_files)))
print('\n - '.join(selected_files))
| print_dataset_summary | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n print('--- DATASET INFORMATION ---')\n print('--- Subjects ---')\n if self.raw_data_exists:\n if self.BIDS.get_subjects():\n print('Number of subjects (in dataset): ' + str(len(self.BIDS.\n get_subjects())))\n print('Subjects (in dataset): ' + ', '.join(self.BIDS.\n get_subjects()))\n else:\n print(\n 'NO SUBJECTS FOUND (is the BIDS directory specified correctly?)'\n )\n print('Number of subjects (selected): ' + str(len(self.bids_tags['sub'])))\n print('Subjects (selected): ' + ', '.join(self.bids_tags['sub']))\n if isinstance(self.bad_subjects, list):\n print('Bad subjects: ' + ', '.join(self.bad_subjects))\n else:\n print('Bad subjects: 0')\n print('--- Tasks ---')\n if self.raw_data_exists:\n if self.BIDS.get_tasks():\n print('Number of tasks (in dataset): ' + str(len(self.BIDS.\n get_tasks())))\n print('Tasks (in dataset): ' + ', '.join(self.BIDS.get_tasks()))\n if 'task' in self.bids_tags:\n print('Number of tasks (selected): ' + str(len(self.bids_tags['task']))\n )\n print('Tasks (selected): ' + ', '.join(self.bids_tags['task']))\n else:\n print('No task names found')\n print('--- Runs ---')\n if self.raw_data_exists:\n if self.BIDS.get_runs():\n print('Number of runs (in dataset): ' + str(len(self.BIDS.\n get_runs())))\n print('Runs (in dataset): ' + ', '.join(self.BIDS.get_runs()))\n if 'run' in self.bids_tags:\n print('Number of runs (selected): ' + str(len(self.bids_tags['run'])))\n print('Rubs (selected): ' + ', '.join(self.bids_tags['run']))\n else:\n print('No run names found')\n print('--- Sessions ---')\n if self.raw_data_exists:\n if self.BIDS.get_sessions():\n print('Number of runs (in dataset): ' + str(len(self.BIDS.\n get_sessions())))\n print('Sessions (in dataset): ' + ', '.join(self.BIDS.\n get_sessions()))\n if 'ses' in self.bids_tags:\n print('Number of sessions (selected): ' + str(len(self.bids_tags[\n 'ses'])))\n print('Sessions (selected): ' + ', '.join(self.bids_tags['ses']))\n else:\n print('No session names found')\n print('--- PREPROCESSED DATA (Pipelines/Derivatives) ---')\n if not self.pipeline:\n print('Derivative pipeline not set. To set, run TN.set_pipeline()')\n else:\n print('Pipeline: ' + self.pipeline)\n if self.pipeline_subdir:\n print('Pipeline subdirectories: ' + self.pipeline_subdir)\n selected_files = self.get_selected_files(quiet=1)\n if selected_files:\n print('--- SELECTED DATA ---')\n print('Numnber of selected files: ' + str(len(selected_files)))\n print('\\n - '.join(selected_files))\n",
"role": "user"
},
{
"content": "print_dataset_summary",
"role": "assistant"
}
] |
def x(cls, fname, reload_object=False):
if fname[-4:] != '.pkl':
fname += '.pkl'
with open(fname, 'rb') as f:
tnet = pickle.load(f)
if reload_object:
reloadnet = teneto.TenetoBIDS(tnet.BIDS_dir, pipeline=tnet.pipeline,
pipeline_subdir=tnet.pipeline_subdir, bids_tags=tnet.bids_tags,
bids_suffix=tnet.bids_suffix, bad_subjects=tnet.bad_subjects,
confound_pipeline=tnet.confound_pipeline, raw_data_exists=tnet.
raw_data_exists, njobs=tnet.njobs)
reloadnet.histroy = tnet.history
tnet = reloadnet
return tnet
| load_frompickle | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(cls, fname, reload_object=False):\n if fname[-4:] != '.pkl':\n fname += '.pkl'\n with open(fname, 'rb') as f:\n tnet = pickle.load(f)\n if reload_object:\n reloadnet = teneto.TenetoBIDS(tnet.BIDS_dir, pipeline=tnet.pipeline,\n pipeline_subdir=tnet.pipeline_subdir, bids_tags=tnet.bids_tags,\n bids_suffix=tnet.bids_suffix, bad_subjects=tnet.bad_subjects,\n confound_pipeline=tnet.confound_pipeline, raw_data_exists=tnet.\n raw_data_exists, njobs=tnet.njobs)\n reloadnet.histroy = tnet.history\n tnet = reloadnet\n return tnet\n",
"role": "user"
},
{
"content": "load_frompickle",
"role": "assistant"
}
] |
def x(self, datatype='tvc', tag=None, measure=''):
if datatype == 'temporalnetwork' and not measure:
raise ValueError(
"When datatype is temporalnetwork, 'measure' must also be specified."
)
self.add_history(inspect.stack()[0][3], locals(), 1)
data_list = []
trialinfo_list = []
for s in self.bids_tags['sub']:
base_path, file_list, datainfo = self._get_filelist(datatype, s,
tag, measure=measure)
if base_path:
for f in file_list:
try:
filetags = get_bids_tag(f, 'all')
data_list.append(load_tabular_file(base_path + f))
if datainfo == 'trlinfo':
trialinfo_list.append(pd.DataFrame(filetags, index=[0])
)
except pd.errors.EmptyDataError:
pass
if datatype == 'group' and len(data_list) == 1:
data_list = data_list[0]
if measure:
data_list = {measure: data_list}
setattr(self, datatype + '_data_', data_list)
if trialinfo_list:
out_trialinfo = pd.concat(trialinfo_list)
out_trialinfo.reset_index(inplace=True, drop=True)
setattr(self, datatype + '_trialinfo_', out_trialinfo)
| load_data | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, datatype='tvc', tag=None, measure=''):\n if datatype == 'temporalnetwork' and not measure:\n raise ValueError(\n \"When datatype is temporalnetwork, 'measure' must also be specified.\"\n )\n self.add_history(inspect.stack()[0][3], locals(), 1)\n data_list = []\n trialinfo_list = []\n for s in self.bids_tags['sub']:\n base_path, file_list, datainfo = self._get_filelist(datatype, s,\n tag, measure=measure)\n if base_path:\n for f in file_list:\n try:\n filetags = get_bids_tag(f, 'all')\n data_list.append(load_tabular_file(base_path + f))\n if datainfo == 'trlinfo':\n trialinfo_list.append(pd.DataFrame(filetags, index=[0])\n )\n except pd.errors.EmptyDataError:\n pass\n if datatype == 'group' and len(data_list) == 1:\n data_list = data_list[0]\n if measure:\n data_list = {measure: data_list}\n setattr(self, datatype + '_data_', data_list)\n if trialinfo_list:\n out_trialinfo = pd.concat(trialinfo_list)\n out_trialinfo.reset_index(inplace=True, drop=True)\n setattr(self, datatype + '_trialinfo_', out_trialinfo)\n",
"role": "user"
},
{
"content": "load_data",
"role": "assistant"
}
] |
def x(tnet=None, paths=None):
if tnet is not None and paths is not None:
raise ValueError('Only network or path input allowed.')
if tnet is None and paths is None:
raise ValueError('No input.')
if tnet is not None:
paths = shortest_temporal_path(tnet)
pathmat = np.zeros([paths[['from', 'to']].max().max() + 1, paths[[
'from', 'to']].max().max() + 1, paths[['t_start']].max().max() + 1]
) * np.nan
pathmat[paths['from'].values, paths['to'].values, paths['t_start'].values
] = paths['temporal-distance']
closeness = np.nansum(1 / np.nanmean(pathmat, axis=2), axis=1) / (pathmat
.shape[1] - 1)
return closeness
| temporal_closeness_centrality | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(tnet=None, paths=None):\n if tnet is not None and paths is not None:\n raise ValueError('Only network or path input allowed.')\n if tnet is None and paths is None:\n raise ValueError('No input.')\n if tnet is not None:\n paths = shortest_temporal_path(tnet)\n pathmat = np.zeros([paths[['from', 'to']].max().max() + 1, paths[[\n 'from', 'to']].max().max() + 1, paths[['t_start']].max().max() + 1]\n ) * np.nan\n pathmat[paths['from'].values, paths['to'].values, paths['t_start'].values\n ] = paths['temporal-distance']\n closeness = np.nansum(1 / np.nanmean(pathmat, axis=2), axis=1) / (pathmat\n .shape[1] - 1)\n return closeness\n",
"role": "user"
},
{
"content": "temporal_closeness_centrality",
"role": "assistant"
}
] |
def x(tnet, communities, axis=0, calc='global', decay=0):
tnet, netinfo = utils.process_input(tnet, ['C', 'G', 'TN'])
D = temporal_degree_centrality(tnet, calc='time', communities=
communities, decay=decay)
network_ids = np.unique(communities)
communities_size = np.array([sum(communities == n) for n in network_ids])
sid = np.zeros([network_ids.max() + 1, network_ids.max() + 1, tnet.
shape[-1]])
for n in network_ids:
for m in network_ids:
betweenmodulescaling = 1 / (communities_size[n] *
communities_size[m])
if netinfo['nettype'][1] == 'd':
withinmodulescaling = 1 / (communities_size[n] *
communities_size[n])
elif netinfo['nettype'][1] == 'u':
withinmodulescaling = 2 / (communities_size[n] * (
communities_size[n] - 1))
if n == m:
betweenmodulescaling = withinmodulescaling
sid[n, m, :] = withinmodulescaling * D[n, n, :
] - betweenmodulescaling * D[n, m, :]
sid[np.isnan(sid)] = 0
if calc == 'global':
return np.sum(np.sum(sid, axis=1), axis=0)
elif calc == 'communities_avg':
return np.sum(sid, axis=axis)
else:
return sid
| sid | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(tnet, communities, axis=0, calc='global', decay=0):\n tnet, netinfo = utils.process_input(tnet, ['C', 'G', 'TN'])\n D = temporal_degree_centrality(tnet, calc='time', communities=\n communities, decay=decay)\n network_ids = np.unique(communities)\n communities_size = np.array([sum(communities == n) for n in network_ids])\n sid = np.zeros([network_ids.max() + 1, network_ids.max() + 1, tnet.\n shape[-1]])\n for n in network_ids:\n for m in network_ids:\n betweenmodulescaling = 1 / (communities_size[n] *\n communities_size[m])\n if netinfo['nettype'][1] == 'd':\n withinmodulescaling = 1 / (communities_size[n] *\n communities_size[n])\n elif netinfo['nettype'][1] == 'u':\n withinmodulescaling = 2 / (communities_size[n] * (\n communities_size[n] - 1))\n if n == m:\n betweenmodulescaling = withinmodulescaling\n sid[n, m, :] = withinmodulescaling * D[n, n, :\n ] - betweenmodulescaling * D[n, m, :]\n sid[np.isnan(sid)] = 0\n if calc == 'global':\n return np.sum(np.sum(sid, axis=1), axis=0)\n elif calc == 'communities_avg':\n return np.sum(sid, axis=axis)\n else:\n return sid\n",
"role": "user"
},
{
"content": "sid",
"role": "assistant"
}
] |
def x(data, calc='edge', nodes='all', communities=None, threshold_type=None,
threshold_level=None, threshold_params=None):
if threshold_type is not None:
if threshold_params is None:
threshold_params = {}
data = binarize(data, threshold_type, threshold_level, **
threshold_params)
if calc == 'communities' and communities is None:
raise ValueError(
"Specified calc='communities' but no communities argument provided (list of clusters/modules)"
)
ict = 0
if isinstance(data, dict):
if [k for k in list(data.keys()) if k == 'intercontacttimes'] == [
'intercontacttimes']:
ict = 1
if ict == 0:
data = intercontacttimes(data)
ict_shape = data['intercontacttimes'].shape
if len(ict_shape) == 2:
node_len = ict_shape[0] * ict_shape[1]
elif len(ict_shape) == 1:
node_len = 1
else:
raise ValueError('more than two dimensions of intercontacttimes')
if isinstance(nodes, list) and len(ict_shape) > 1:
node_combinations = [[list(set(nodes))[t], list(set(nodes))[tt]] for
t in range(0, len(nodes)) for tt in range(0, len(nodes)) if t != tt
]
do_nodes = [np.ravel_multi_index(n, ict_shape) for n in
node_combinations]
else:
do_nodes = np.arange(0, node_len)
if calc == 'node':
ict = np.concatenate(data['intercontacttimes'][do_nodes, do_nodes],
axis=1)
elif calc == 'communities':
unique_communities = np.unique(communities)
ict_shape = len(unique_communities), len(unique_communities)
ict = np.array([[None] * ict_shape[0]] * ict_shape[1])
for i, s1 in enumerate(unique_communities):
for j, s2 in enumerate(unique_communities):
if s1 == s2:
ind = np.triu_indices(sum(communities == s1), k=1)
ict[i, j] = np.concatenate(data['intercontacttimes'][
ind[0], ind[1]])
else:
ict[i, j] = np.concatenate(np.concatenate(data[
'intercontacttimes'][communities == s1, :][:,
communities == s2]))
data['intercontacttimes'] = ict
do_nodes = np.arange(0, ict_shape[0] * ict_shape[1])
if len(ict_shape) > 1:
ict = data['intercontacttimes'].reshape(ict_shape[0] * ict_shape[1])
b_coeff = np.zeros(len(ict)) * np.nan
else:
b_coeff = np.zeros(1) * np.nan
ict = [data['intercontacttimes']]
for i in do_nodes:
if isinstance(ict[i], np.ndarray):
mu_ict = np.mean(ict[i])
sigma_ict = np.std(ict[i])
b_coeff[i] = (sigma_ict - mu_ict) / (sigma_ict + mu_ict)
else:
b_coeff[i] = np.nan
if len(ict_shape) > 1:
b_coeff = b_coeff.reshape(ict_shape)
return b_coeff
| bursty_coeff | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(data, calc='edge', nodes='all', communities=None, threshold_type=None,\n threshold_level=None, threshold_params=None):\n if threshold_type is not None:\n if threshold_params is None:\n threshold_params = {}\n data = binarize(data, threshold_type, threshold_level, **\n threshold_params)\n if calc == 'communities' and communities is None:\n raise ValueError(\n \"Specified calc='communities' but no communities argument provided (list of clusters/modules)\"\n )\n ict = 0\n if isinstance(data, dict):\n if [k for k in list(data.keys()) if k == 'intercontacttimes'] == [\n 'intercontacttimes']:\n ict = 1\n if ict == 0:\n data = intercontacttimes(data)\n ict_shape = data['intercontacttimes'].shape\n if len(ict_shape) == 2:\n node_len = ict_shape[0] * ict_shape[1]\n elif len(ict_shape) == 1:\n node_len = 1\n else:\n raise ValueError('more than two dimensions of intercontacttimes')\n if isinstance(nodes, list) and len(ict_shape) > 1:\n node_combinations = [[list(set(nodes))[t], list(set(nodes))[tt]] for\n t in range(0, len(nodes)) for tt in range(0, len(nodes)) if t != tt\n ]\n do_nodes = [np.ravel_multi_index(n, ict_shape) for n in\n node_combinations]\n else:\n do_nodes = np.arange(0, node_len)\n if calc == 'node':\n ict = np.concatenate(data['intercontacttimes'][do_nodes, do_nodes],\n axis=1)\n elif calc == 'communities':\n unique_communities = np.unique(communities)\n ict_shape = len(unique_communities), len(unique_communities)\n ict = np.array([[None] * ict_shape[0]] * ict_shape[1])\n for i, s1 in enumerate(unique_communities):\n for j, s2 in enumerate(unique_communities):\n if s1 == s2:\n ind = np.triu_indices(sum(communities == s1), k=1)\n ict[i, j] = np.concatenate(data['intercontacttimes'][\n ind[0], ind[1]])\n else:\n ict[i, j] = np.concatenate(np.concatenate(data[\n 'intercontacttimes'][communities == s1, :][:, \n communities == s2]))\n data['intercontacttimes'] = ict\n do_nodes = np.arange(0, ict_shape[0] * ict_shape[1])\n if len(ict_shape) > 1:\n ict = data['intercontacttimes'].reshape(ict_shape[0] * ict_shape[1])\n b_coeff = np.zeros(len(ict)) * np.nan\n else:\n b_coeff = np.zeros(1) * np.nan\n ict = [data['intercontacttimes']]\n for i in do_nodes:\n if isinstance(ict[i], np.ndarray):\n mu_ict = np.mean(ict[i])\n sigma_ict = np.std(ict[i])\n b_coeff[i] = (sigma_ict - mu_ict) / (sigma_ict + mu_ict)\n else:\n b_coeff[i] = np.nan\n if len(ict_shape) > 1:\n b_coeff = b_coeff.reshape(ict_shape)\n return b_coeff\n",
"role": "user"
},
{
"content": "bursty_coeff",
"role": "assistant"
}
] |
def x(d, reducer='tuple', inverse=False):
if isinstance(reducer, str):
reducer = REDUCER_DICT[reducer]
flat_dict = {}
def x(d, parent=None):
for key, value in six.viewitems(d):
flat_key = reducer(parent, key)
if isinstance(value, Mapping):
_flatten(value, flat_key)
else:
if inverse:
flat_key, value = value, flat_key
if flat_key in flat_dict:
raise ValueError("duplicated key '{}'".format(flat_key))
flat_dict[flat_key] = value
_flatten(d)
return flat_dict
| flatten | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(d, reducer='tuple', inverse=False):\n if isinstance(reducer, str):\n reducer = REDUCER_DICT[reducer]\n flat_dict = {}\n\n def x(d, parent=None):\n for key, value in six.viewitems(d):\n flat_key = reducer(parent, key)\n if isinstance(value, Mapping):\n _flatten(value, flat_key)\n else:\n if inverse:\n flat_key, value = value, flat_key\n if flat_key in flat_dict:\n raise ValueError(\"duplicated key '{}'\".format(flat_key))\n flat_dict[flat_key] = value\n _flatten(d)\n return flat_dict\n",
"role": "user"
},
{
"content": "flatten",
"role": "assistant"
}
] |
def x(d, keys, value):
assert keys
key = keys[0]
if len(keys) == 1:
if key in d:
raise ValueError("duplicated key '{}'".format(key))
d[key] = value
return
d = d.setdefault(key, {})
nested_set_dict(d, keys[1:], value)
| nested_set_dict | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(d, keys, value):\n assert keys\n key = keys[0]\n if len(keys) == 1:\n if key in d:\n raise ValueError(\"duplicated key '{}'\".format(key))\n d[key] = value\n return\n d = d.setdefault(key, {})\n nested_set_dict(d, keys[1:], value)\n",
"role": "user"
},
{
"content": "nested_set_dict",
"role": "assistant"
}
] |
def x(d, splitter='tuple', inverse=False):
if isinstance(splitter, str):
splitter = SPLITTER_DICT[splitter]
unflattened_dict = {}
for flat_key, value in six.viewitems(d):
if inverse:
flat_key, value = value, flat_key
key_tuple = splitter(flat_key)
nested_set_dict(unflattened_dict, key_tuple, value)
return unflattened_dict
| unflatten | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(d, splitter='tuple', inverse=False):\n if isinstance(splitter, str):\n splitter = SPLITTER_DICT[splitter]\n unflattened_dict = {}\n for flat_key, value in six.viewitems(d):\n if inverse:\n flat_key, value = value, flat_key\n key_tuple = splitter(flat_key)\n nested_set_dict(unflattened_dict, key_tuple, value)\n return unflattened_dict\n",
"role": "user"
},
{
"content": "unflatten",
"role": "assistant"
}
] |
def x(ax, pianoroll, is_drum=False, beat_resolution=None, downbeats=None,
preset='default', cmap='Blues', xtick='auto', ytick='octave',
xticklabel=True, yticklabel='auto', tick_loc=None, tick_direction='in',
label='both', grid='both', grid_linestyle=':', grid_linewidth=0.5):
if not HAS_MATPLOTLIB:
raise ImportError(
'matplotlib package is required for plotting supports.')
if pianoroll.ndim not in (2, 3):
raise ValueError('`pianoroll` must be a 2D or 3D numpy array')
if pianoroll.shape[1] != 128:
raise ValueError(
'The length of the second axis of `pianoroll` must be 128.')
if xtick not in ('auto', 'beat', 'step', 'off'):
raise ValueError(
"`xtick` must be one of {'auto', 'beat', 'step', 'none'}.")
if xtick == 'beat' and beat_resolution is None:
raise ValueError(
"`beat_resolution` must be specified when `xtick` is 'beat'.")
if ytick not in ('octave', 'pitch', 'off'):
raise ValueError("`ytick` must be one of {octave', 'pitch', 'off'}.")
if not isinstance(xticklabel, bool):
raise TypeError('`xticklabel` must be bool.')
if yticklabel not in ('auto', 'name', 'number', 'off'):
raise ValueError(
"`yticklabel` must be one of {'auto', 'name', 'number', 'off'}.")
if tick_direction not in ('in', 'out', 'inout'):
raise ValueError(
"`tick_direction` must be one of {'in', 'out','inout'}.")
if label not in ('x', 'y', 'both', 'off'):
raise ValueError("`label` must be one of {'x', 'y', 'both', 'off'}.")
if grid not in ('x', 'y', 'both', 'off'):
raise ValueError("`grid` must be one of {'x', 'y', 'both', 'off'}.")
if pianoroll.ndim > 2:
to_plot = pianoroll.transpose(1, 0, 2)
else:
to_plot = pianoroll.T
if np.issubdtype(pianoroll.dtype, np.bool_) or np.issubdtype(pianoroll.
dtype, np.floating):
ax.imshow(to_plot, cmap=cmap, aspect='auto', vmin=0, vmax=1, origin
='lower', interpolation='none')
elif np.issubdtype(pianoroll.dtype, np.integer):
ax.imshow(to_plot, cmap=cmap, aspect='auto', vmin=0, vmax=127,
origin='lower', interpolation='none')
else:
raise TypeError('Unsupported data type for `pianoroll`.')
if tick_loc is None:
tick_loc = 'bottom', 'left'
if xtick == 'auto':
xtick = 'beat' if beat_resolution is not None else 'step'
if yticklabel == 'auto':
yticklabel = 'name' if ytick == 'octave' else 'number'
if preset == 'plain':
ax.axis('off')
elif preset == 'frame':
ax.tick_params(direction=tick_direction, bottom=False, top=False,
left=False, right=False, labelbottom=False, labeltop=False,
labelleft=False, labelright=False)
else:
ax.tick_params(direction=tick_direction, bottom='bottom' in
tick_loc, top='top' in tick_loc, left='left' in tick_loc, right
='right' in tick_loc, labelbottom=xticklabel != 'off',
labelleft=yticklabel != 'off', labeltop=False, labelright=False)
if xtick == 'beat' and preset != 'frame':
num_beat = pianoroll.shape[0] // beat_resolution
xticks_major = beat_resolution * np.arange(0, num_beat)
xticks_minor = beat_resolution * (0.5 + np.arange(0, num_beat))
xtick_labels = np.arange(1, 1 + num_beat)
ax.set_xticks(xticks_major)
ax.set_xticklabels('')
ax.set_xticks(xticks_minor, minor=True)
ax.set_xticklabels(xtick_labels, minor=True)
ax.tick_params(axis='x', which='minor', width=0)
if ytick == 'octave':
ax.set_yticks(np.arange(0, 128, 12))
if yticklabel == 'name':
ax.set_yticklabels(['C{}'.format(i - 2) for i in range(11)])
elif ytick == 'step':
ax.set_yticks(np.arange(0, 128))
if yticklabel == 'name':
if is_drum:
ax.set_yticklabels([pretty_midi.note_number_to_drum_name(i) for
i in range(128)])
else:
ax.set_yticklabels([pretty_midi.note_number_to_name(i) for
i in range(128)])
if label == 'x' or label == 'both':
if xtick == 'step' or not xticklabel:
ax.set_xlabel('time (step)')
else:
ax.set_xlabel('time (beat)')
if label == 'y' or label == 'both':
if is_drum:
ax.set_ylabel('key name')
else:
ax.set_ylabel('pitch')
if grid != 'off':
ax.grid(axis=grid, color='k', linestyle=grid_linestyle, linewidth=
grid_linewidth)
if downbeats is not None and preset != 'plain':
for step in downbeats:
ax.axvline(x=step, color='k', linewidth=1)
| plot_pianoroll | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(ax, pianoroll, is_drum=False, beat_resolution=None, downbeats=None,\n preset='default', cmap='Blues', xtick='auto', ytick='octave',\n xticklabel=True, yticklabel='auto', tick_loc=None, tick_direction='in',\n label='both', grid='both', grid_linestyle=':', grid_linewidth=0.5):\n if not HAS_MATPLOTLIB:\n raise ImportError(\n 'matplotlib package is required for plotting supports.')\n if pianoroll.ndim not in (2, 3):\n raise ValueError('`pianoroll` must be a 2D or 3D numpy array')\n if pianoroll.shape[1] != 128:\n raise ValueError(\n 'The length of the second axis of `pianoroll` must be 128.')\n if xtick not in ('auto', 'beat', 'step', 'off'):\n raise ValueError(\n \"`xtick` must be one of {'auto', 'beat', 'step', 'none'}.\")\n if xtick == 'beat' and beat_resolution is None:\n raise ValueError(\n \"`beat_resolution` must be specified when `xtick` is 'beat'.\")\n if ytick not in ('octave', 'pitch', 'off'):\n raise ValueError(\"`ytick` must be one of {octave', 'pitch', 'off'}.\")\n if not isinstance(xticklabel, bool):\n raise TypeError('`xticklabel` must be bool.')\n if yticklabel not in ('auto', 'name', 'number', 'off'):\n raise ValueError(\n \"`yticklabel` must be one of {'auto', 'name', 'number', 'off'}.\")\n if tick_direction not in ('in', 'out', 'inout'):\n raise ValueError(\n \"`tick_direction` must be one of {'in', 'out','inout'}.\")\n if label not in ('x', 'y', 'both', 'off'):\n raise ValueError(\"`label` must be one of {'x', 'y', 'both', 'off'}.\")\n if grid not in ('x', 'y', 'both', 'off'):\n raise ValueError(\"`grid` must be one of {'x', 'y', 'both', 'off'}.\")\n if pianoroll.ndim > 2:\n to_plot = pianoroll.transpose(1, 0, 2)\n else:\n to_plot = pianoroll.T\n if np.issubdtype(pianoroll.dtype, np.bool_) or np.issubdtype(pianoroll.\n dtype, np.floating):\n ax.imshow(to_plot, cmap=cmap, aspect='auto', vmin=0, vmax=1, origin\n ='lower', interpolation='none')\n elif np.issubdtype(pianoroll.dtype, np.integer):\n ax.imshow(to_plot, cmap=cmap, aspect='auto', vmin=0, vmax=127,\n origin='lower', interpolation='none')\n else:\n raise TypeError('Unsupported data type for `pianoroll`.')\n if tick_loc is None:\n tick_loc = 'bottom', 'left'\n if xtick == 'auto':\n xtick = 'beat' if beat_resolution is not None else 'step'\n if yticklabel == 'auto':\n yticklabel = 'name' if ytick == 'octave' else 'number'\n if preset == 'plain':\n ax.axis('off')\n elif preset == 'frame':\n ax.tick_params(direction=tick_direction, bottom=False, top=False,\n left=False, right=False, labelbottom=False, labeltop=False,\n labelleft=False, labelright=False)\n else:\n ax.tick_params(direction=tick_direction, bottom='bottom' in\n tick_loc, top='top' in tick_loc, left='left' in tick_loc, right\n ='right' in tick_loc, labelbottom=xticklabel != 'off',\n labelleft=yticklabel != 'off', labeltop=False, labelright=False)\n if xtick == 'beat' and preset != 'frame':\n num_beat = pianoroll.shape[0] // beat_resolution\n xticks_major = beat_resolution * np.arange(0, num_beat)\n xticks_minor = beat_resolution * (0.5 + np.arange(0, num_beat))\n xtick_labels = np.arange(1, 1 + num_beat)\n ax.set_xticks(xticks_major)\n ax.set_xticklabels('')\n ax.set_xticks(xticks_minor, minor=True)\n ax.set_xticklabels(xtick_labels, minor=True)\n ax.tick_params(axis='x', which='minor', width=0)\n if ytick == 'octave':\n ax.set_yticks(np.arange(0, 128, 12))\n if yticklabel == 'name':\n ax.set_yticklabels(['C{}'.format(i - 2) for i in range(11)])\n elif ytick == 'step':\n ax.set_yticks(np.arange(0, 128))\n if yticklabel == 'name':\n if is_drum:\n ax.set_yticklabels([pretty_midi.note_number_to_drum_name(i) for\n i in range(128)])\n else:\n ax.set_yticklabels([pretty_midi.note_number_to_name(i) for\n i in range(128)])\n if label == 'x' or label == 'both':\n if xtick == 'step' or not xticklabel:\n ax.set_xlabel('time (step)')\n else:\n ax.set_xlabel('time (beat)')\n if label == 'y' or label == 'both':\n if is_drum:\n ax.set_ylabel('key name')\n else:\n ax.set_ylabel('pitch')\n if grid != 'off':\n ax.grid(axis=grid, color='k', linestyle=grid_linestyle, linewidth=\n grid_linewidth)\n if downbeats is not None and preset != 'plain':\n for step in downbeats:\n ax.axvline(x=step, color='k', linewidth=1)\n",
"role": "user"
},
{
"content": "plot_pianoroll",
"role": "assistant"
}
] |
def x(track, filename=None, beat_resolution=None, downbeats=None, preset=
'default', cmap='Blues', xtick='auto', ytick='octave', xticklabel=True,
yticklabel='auto', tick_loc=None, tick_direction='in', label='both',
grid='both', grid_linestyle=':', grid_linewidth=0.5):
if not HAS_MATPLOTLIB:
raise ImportError(
'matplotlib package is required for plotting supports.')
fig, ax = plt.subplots()
plot_pianoroll(ax, track.pianoroll, track.is_drum, beat_resolution,
downbeats, preset=preset, cmap=cmap, xtick=xtick, ytick=ytick,
xticklabel=xticklabel, yticklabel=yticklabel, tick_loc=tick_loc,
tick_direction=tick_direction, label=label, grid=grid,
grid_linestyle=grid_linestyle, grid_linewidth=grid_linewidth)
if filename is not None:
plt.savefig(filename)
return fig, ax
| plot_track | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(track, filename=None, beat_resolution=None, downbeats=None, preset=\n 'default', cmap='Blues', xtick='auto', ytick='octave', xticklabel=True,\n yticklabel='auto', tick_loc=None, tick_direction='in', label='both',\n grid='both', grid_linestyle=':', grid_linewidth=0.5):\n if not HAS_MATPLOTLIB:\n raise ImportError(\n 'matplotlib package is required for plotting supports.')\n fig, ax = plt.subplots()\n plot_pianoroll(ax, track.pianoroll, track.is_drum, beat_resolution,\n downbeats, preset=preset, cmap=cmap, xtick=xtick, ytick=ytick,\n xticklabel=xticklabel, yticklabel=yticklabel, tick_loc=tick_loc,\n tick_direction=tick_direction, label=label, grid=grid,\n grid_linestyle=grid_linestyle, grid_linewidth=grid_linewidth)\n if filename is not None:\n plt.savefig(filename)\n return fig, ax\n",
"role": "user"
},
{
"content": "plot_track",
"role": "assistant"
}
] |
def x(multitrack, filename=None, mode='separate', track_label='name',
preset='default', cmaps=None, xtick='auto', ytick='octave', xticklabel=
True, yticklabel='auto', tick_loc=None, tick_direction='in', label=
'both', grid='both', grid_linestyle=':', grid_linewidth=0.5):
if not HAS_MATPLOTLIB:
raise ImportError(
'matplotlib package is required for plotting supports.')
def x(track_label, track=None):
if track_label == 'name':
return track.name
elif track_label == 'program':
return pretty_midi.program_to_instrument_name(track.program)
elif track_label == 'family':
return pretty_midi.program_to_instrument_class(track.program)
elif track is None:
return track_label
def x(ax, track_label, track=None):
if not ax.get_ylabel():
return
ax.set_ylabel(get_track_label(track_label, track) + '\n\n' + ax.
get_ylabel())
multitrack.check_validity()
if not multitrack.tracks:
raise ValueError('There is no track to plot.')
if mode not in ('separate', 'stacked', 'hybrid'):
raise ValueError(
"`mode` must be one of {'separate', 'stacked', 'hybrid'}.")
if track_label not in ('name', 'program', 'family', 'off'):
raise ValueError(
"`track_label` must be one of {'name', 'program', 'family'}.")
if cmaps is None:
if mode == 'separate':
cmaps = 'Blues', 'Oranges', 'Greens', 'Reds', 'Purples', 'Greys'
elif mode == 'stacked':
cmaps = 'hsv'
else:
cmaps = 'Blues', 'Greens'
num_track = len(multitrack.tracks)
downbeats = multitrack.get_downbeat_steps()
if mode == 'separate':
if num_track > 1:
fig, axs = plt.subplots(num_track, sharex=True)
else:
fig, ax = plt.subplots()
axs = [ax]
for idx, track in enumerate(multitrack.tracks):
now_xticklabel = xticklabel if idx < num_track else False
plot_pianoroll(axs[idx], track.pianoroll, False, multitrack.
beat_resolution, downbeats, preset=preset, cmap=cmaps[idx %
len(cmaps)], xtick=xtick, ytick=ytick, xticklabel=
now_xticklabel, yticklabel=yticklabel, tick_loc=tick_loc,
tick_direction=tick_direction, label=label, grid=grid,
grid_linestyle=grid_linestyle, grid_linewidth=grid_linewidth)
if track_label != 'none':
add_tracklabel(axs[idx], track_label, track)
if num_track > 1:
fig.subplots_adjust(hspace=0)
if filename is not None:
plt.savefig(filename)
return fig, axs
elif mode == 'stacked':
is_all_drum = True
for track in multitrack.tracks:
if not track.is_drum:
is_all_drum = False
fig, ax = plt.subplots()
stacked = multitrack.get_stacked_pianorolls()
colormap = matplotlib.cm.get_cmap(cmaps[0])
cmatrix = colormap(np.arange(0, 1, 1 / num_track))[:, :3]
recolored = np.matmul(stacked.reshape(-1, num_track), cmatrix)
stacked = recolored.reshape(stacked.shape[:2] + (3,))
plot_pianoroll(ax, stacked, is_all_drum, multitrack.beat_resolution,
downbeats, preset=preset, xtick=xtick, ytick=ytick, xticklabel=
xticklabel, yticklabel=yticklabel, tick_loc=tick_loc,
tick_direction=tick_direction, label=label, grid=grid,
grid_linestyle=grid_linestyle, grid_linewidth=grid_linewidth)
if track_label != 'none':
patches = [Patch(color=cmatrix[idx], label=get_track_label(
track_label, track)) for idx, track in enumerate(multitrack
.tracks)]
plt.legend(handles=patches)
if filename is not None:
plt.savefig(filename)
return fig, [ax]
elif mode == 'hybrid':
drums = [i for i, track in enumerate(multitrack.tracks) if track.
is_drum]
others = [i for i in range(len(multitrack.tracks)) if i not in drums]
merged_drums = multitrack.get_merged_pianoroll(drums)
merged_others = multitrack.get_merged_pianoroll(others)
fig, (ax1, ax2) = plt.subplots(2, sharex=True, sharey=True)
plot_pianoroll(ax1, merged_drums, True, multitrack.beat_resolution,
downbeats, preset=preset, cmap=cmaps[0], xtick=xtick, ytick=
ytick, xticklabel=xticklabel, yticklabel=yticklabel, tick_loc=
tick_loc, tick_direction=tick_direction, label=label, grid=grid,
grid_linestyle=grid_linestyle, grid_linewidth=grid_linewidth)
plot_pianoroll(ax2, merged_others, False, multitrack.
beat_resolution, downbeats, preset=preset, cmap=cmaps[1], ytick
=ytick, xticklabel=xticklabel, yticklabel=yticklabel, tick_loc=
tick_loc, tick_direction=tick_direction, label=label, grid=grid,
grid_linestyle=grid_linestyle, grid_linewidth=grid_linewidth)
fig.subplots_adjust(hspace=0)
if track_label != 'none':
add_tracklabel(ax1, 'Drums')
add_tracklabel(ax2, 'Others')
if filename is not None:
plt.savefig(filename)
return fig, [ax1, ax2]
| plot_multitrack | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(multitrack, filename=None, mode='separate', track_label='name',\n preset='default', cmaps=None, xtick='auto', ytick='octave', xticklabel=\n True, yticklabel='auto', tick_loc=None, tick_direction='in', label=\n 'both', grid='both', grid_linestyle=':', grid_linewidth=0.5):\n if not HAS_MATPLOTLIB:\n raise ImportError(\n 'matplotlib package is required for plotting supports.')\n\n def x(track_label, track=None):\n if track_label == 'name':\n return track.name\n elif track_label == 'program':\n return pretty_midi.program_to_instrument_name(track.program)\n elif track_label == 'family':\n return pretty_midi.program_to_instrument_class(track.program)\n elif track is None:\n return track_label\n\n def x(ax, track_label, track=None):\n if not ax.get_ylabel():\n return\n ax.set_ylabel(get_track_label(track_label, track) + '\\n\\n' + ax.\n get_ylabel())\n multitrack.check_validity()\n if not multitrack.tracks:\n raise ValueError('There is no track to plot.')\n if mode not in ('separate', 'stacked', 'hybrid'):\n raise ValueError(\n \"`mode` must be one of {'separate', 'stacked', 'hybrid'}.\")\n if track_label not in ('name', 'program', 'family', 'off'):\n raise ValueError(\n \"`track_label` must be one of {'name', 'program', 'family'}.\")\n if cmaps is None:\n if mode == 'separate':\n cmaps = 'Blues', 'Oranges', 'Greens', 'Reds', 'Purples', 'Greys'\n elif mode == 'stacked':\n cmaps = 'hsv'\n else:\n cmaps = 'Blues', 'Greens'\n num_track = len(multitrack.tracks)\n downbeats = multitrack.get_downbeat_steps()\n if mode == 'separate':\n if num_track > 1:\n fig, axs = plt.subplots(num_track, sharex=True)\n else:\n fig, ax = plt.subplots()\n axs = [ax]\n for idx, track in enumerate(multitrack.tracks):\n now_xticklabel = xticklabel if idx < num_track else False\n plot_pianoroll(axs[idx], track.pianoroll, False, multitrack.\n beat_resolution, downbeats, preset=preset, cmap=cmaps[idx %\n len(cmaps)], xtick=xtick, ytick=ytick, xticklabel=\n now_xticklabel, yticklabel=yticklabel, tick_loc=tick_loc,\n tick_direction=tick_direction, label=label, grid=grid,\n grid_linestyle=grid_linestyle, grid_linewidth=grid_linewidth)\n if track_label != 'none':\n add_tracklabel(axs[idx], track_label, track)\n if num_track > 1:\n fig.subplots_adjust(hspace=0)\n if filename is not None:\n plt.savefig(filename)\n return fig, axs\n elif mode == 'stacked':\n is_all_drum = True\n for track in multitrack.tracks:\n if not track.is_drum:\n is_all_drum = False\n fig, ax = plt.subplots()\n stacked = multitrack.get_stacked_pianorolls()\n colormap = matplotlib.cm.get_cmap(cmaps[0])\n cmatrix = colormap(np.arange(0, 1, 1 / num_track))[:, :3]\n recolored = np.matmul(stacked.reshape(-1, num_track), cmatrix)\n stacked = recolored.reshape(stacked.shape[:2] + (3,))\n plot_pianoroll(ax, stacked, is_all_drum, multitrack.beat_resolution,\n downbeats, preset=preset, xtick=xtick, ytick=ytick, xticklabel=\n xticklabel, yticklabel=yticklabel, tick_loc=tick_loc,\n tick_direction=tick_direction, label=label, grid=grid,\n grid_linestyle=grid_linestyle, grid_linewidth=grid_linewidth)\n if track_label != 'none':\n patches = [Patch(color=cmatrix[idx], label=get_track_label(\n track_label, track)) for idx, track in enumerate(multitrack\n .tracks)]\n plt.legend(handles=patches)\n if filename is not None:\n plt.savefig(filename)\n return fig, [ax]\n elif mode == 'hybrid':\n drums = [i for i, track in enumerate(multitrack.tracks) if track.\n is_drum]\n others = [i for i in range(len(multitrack.tracks)) if i not in drums]\n merged_drums = multitrack.get_merged_pianoroll(drums)\n merged_others = multitrack.get_merged_pianoroll(others)\n fig, (ax1, ax2) = plt.subplots(2, sharex=True, sharey=True)\n plot_pianoroll(ax1, merged_drums, True, multitrack.beat_resolution,\n downbeats, preset=preset, cmap=cmaps[0], xtick=xtick, ytick=\n ytick, xticklabel=xticklabel, yticklabel=yticklabel, tick_loc=\n tick_loc, tick_direction=tick_direction, label=label, grid=grid,\n grid_linestyle=grid_linestyle, grid_linewidth=grid_linewidth)\n plot_pianoroll(ax2, merged_others, False, multitrack.\n beat_resolution, downbeats, preset=preset, cmap=cmaps[1], ytick\n =ytick, xticklabel=xticklabel, yticklabel=yticklabel, tick_loc=\n tick_loc, tick_direction=tick_direction, label=label, grid=grid,\n grid_linestyle=grid_linestyle, grid_linewidth=grid_linewidth)\n fig.subplots_adjust(hspace=0)\n if track_label != 'none':\n add_tracklabel(ax1, 'Drums')\n add_tracklabel(ax2, 'Others')\n if filename is not None:\n plt.savefig(filename)\n return fig, [ax1, ax2]\n",
"role": "user"
},
{
"content": "plot_multitrack",
"role": "assistant"
}
] |
def x(filename, pianoroll, window, hop=1, fps=None, is_drum=False,
beat_resolution=None, downbeats=None, preset='default', cmap='Blues',
xtick='auto', ytick='octave', xticklabel=True, yticklabel='auto',
tick_loc=None, tick_direction='in', label='both', grid='both',
grid_linestyle=':', grid_linewidth=0.5, **kwargs):
if not HAS_MOVIEPY:
raise ImportError('moviepy package is required for animation supports.'
)
def x(t):
fig = plt.gcf()
ax = plt.gca()
f_idx = int(t * fps)
start = hop * f_idx
end = start + window
to_plot = transposed[:, start:end]
extent = start, end - 1, 0, 127
ax.imshow(to_plot, cmap=cmap, aspect='auto', vmin=vmin, vmax=vmax,
origin='lower', interpolation='none', extent=extent)
if xtick == 'beat':
next_major_idx = beat_resolution - start % beat_resolution
if start % beat_resolution < beat_resolution // 2:
next_minor_idx = beat_resolution // 2 - start % beat_resolution
else:
next_minor_idx = (beat_resolution // 2 - start %
beat_resolution + beat_resolution)
xticks_major = np.arange(next_major_idx, window, beat_resolution)
xticks_minor = np.arange(next_minor_idx, window, beat_resolution)
if end % beat_resolution < beat_resolution // 2:
last_minor_idx = beat_resolution // 2 - end % beat_resolution
else:
last_minor_idx = (beat_resolution // 2 - end %
beat_resolution + beat_resolution)
xtick_labels = np.arange((start + next_minor_idx) //
beat_resolution, (end + last_minor_idx) // beat_resolution)
ax.set_xticks(xticks_major)
ax.set_xticklabels('')
ax.set_xticks(xticks_minor, minor=True)
ax.set_xticklabels(xtick_labels, minor=True)
ax.tick_params(axis='x', which='minor', width=0)
return mplfig_to_npimage(fig)
if xtick == 'auto':
xtick = 'beat' if beat_resolution is not None else 'step'
fig, ax = plt.subplots()
plot_pianoroll(ax, pianoroll[:window], is_drum, beat_resolution,
downbeats, preset=preset, cmap=cmap, xtick=xtick, ytick=ytick,
xticklabel=xticklabel, yticklabel=yticklabel, tick_loc=tick_loc,
tick_direction=tick_direction, label=label, grid=grid,
grid_linestyle=grid_linestyle, grid_linewidth=grid_linewidth)
num_frame = int((pianoroll.shape[0] - window) / hop)
duration = int(num_frame / fps)
if np.issubdtype(pianoroll.dtype, np.bool_) or np.issubdtype(pianoroll.
dtype, np.floating):
vmax = 1
elif np.issubdtype(pianoroll.dtype, np.integer):
vmax = 127
else:
raise TypeError('Unsupported data type for `pianoroll`.')
vmin = 0
transposed = pianoroll.T
animation = VideoClip(make_frame, duration=duration)
if filename.endswith('.gif'):
animation.write_gif(filename, fps, **kwargs)
else:
animation.write_videofile(filename, fps, **kwargs)
plt.close()
| save_animation | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(filename, pianoroll, window, hop=1, fps=None, is_drum=False,\n beat_resolution=None, downbeats=None, preset='default', cmap='Blues',\n xtick='auto', ytick='octave', xticklabel=True, yticklabel='auto',\n tick_loc=None, tick_direction='in', label='both', grid='both',\n grid_linestyle=':', grid_linewidth=0.5, **kwargs):\n if not HAS_MOVIEPY:\n raise ImportError('moviepy package is required for animation supports.'\n )\n\n def x(t):\n fig = plt.gcf()\n ax = plt.gca()\n f_idx = int(t * fps)\n start = hop * f_idx\n end = start + window\n to_plot = transposed[:, start:end]\n extent = start, end - 1, 0, 127\n ax.imshow(to_plot, cmap=cmap, aspect='auto', vmin=vmin, vmax=vmax,\n origin='lower', interpolation='none', extent=extent)\n if xtick == 'beat':\n next_major_idx = beat_resolution - start % beat_resolution\n if start % beat_resolution < beat_resolution // 2:\n next_minor_idx = beat_resolution // 2 - start % beat_resolution\n else:\n next_minor_idx = (beat_resolution // 2 - start %\n beat_resolution + beat_resolution)\n xticks_major = np.arange(next_major_idx, window, beat_resolution)\n xticks_minor = np.arange(next_minor_idx, window, beat_resolution)\n if end % beat_resolution < beat_resolution // 2:\n last_minor_idx = beat_resolution // 2 - end % beat_resolution\n else:\n last_minor_idx = (beat_resolution // 2 - end %\n beat_resolution + beat_resolution)\n xtick_labels = np.arange((start + next_minor_idx) //\n beat_resolution, (end + last_minor_idx) // beat_resolution)\n ax.set_xticks(xticks_major)\n ax.set_xticklabels('')\n ax.set_xticks(xticks_minor, minor=True)\n ax.set_xticklabels(xtick_labels, minor=True)\n ax.tick_params(axis='x', which='minor', width=0)\n return mplfig_to_npimage(fig)\n if xtick == 'auto':\n xtick = 'beat' if beat_resolution is not None else 'step'\n fig, ax = plt.subplots()\n plot_pianoroll(ax, pianoroll[:window], is_drum, beat_resolution,\n downbeats, preset=preset, cmap=cmap, xtick=xtick, ytick=ytick,\n xticklabel=xticklabel, yticklabel=yticklabel, tick_loc=tick_loc,\n tick_direction=tick_direction, label=label, grid=grid,\n grid_linestyle=grid_linestyle, grid_linewidth=grid_linewidth)\n num_frame = int((pianoroll.shape[0] - window) / hop)\n duration = int(num_frame / fps)\n if np.issubdtype(pianoroll.dtype, np.bool_) or np.issubdtype(pianoroll.\n dtype, np.floating):\n vmax = 1\n elif np.issubdtype(pianoroll.dtype, np.integer):\n vmax = 127\n else:\n raise TypeError('Unsupported data type for `pianoroll`.')\n vmin = 0\n transposed = pianoroll.T\n animation = VideoClip(make_frame, duration=duration)\n if filename.endswith('.gif'):\n animation.write_gif(filename, fps, **kwargs)\n else:\n animation.write_videofile(filename, fps, **kwargs)\n plt.close()\n",
"role": "user"
},
{
"content": "save_animation",
"role": "assistant"
}
] |
def x(self, track=None, pianoroll=None, program=0, is_drum=False, name=
'unknown'):
if track is not None:
if not isinstance(track, Track):
raise TypeError('`track` must be a pypianoroll.Track instance.')
track.check_validity()
else:
track = Track(pianoroll, program, is_drum, name)
self.tracks.append(track)
| append_track | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, track=None, pianoroll=None, program=0, is_drum=False, name=\n 'unknown'):\n if track is not None:\n if not isinstance(track, Track):\n raise TypeError('`track` must be a pypianoroll.Track instance.')\n track.check_validity()\n else:\n track = Track(pianoroll, program, is_drum, name)\n self.tracks.append(track)\n",
"role": "user"
},
{
"content": "append_track",
"role": "assistant"
}
] |
def x(self):
for track in self.tracks:
if not isinstance(track, Track):
raise TypeError(
'`tracks` must be a list of `pypianoroll.Track` instances.')
track.check_validity()
if not isinstance(self.tempo, np.ndarray):
raise TypeError('`tempo` must be int or a numpy array.')
elif not np.issubdtype(self.tempo.dtype, np.number):
raise TypeError('Data type of `tempo` must be a subdtype of np.number.'
)
elif self.tempo.ndim != 1:
raise ValueError('`tempo` must be a 1D numpy array.')
if np.any(self.tempo <= 0.0):
raise ValueError('`tempo` should contain only positive numbers.')
if self.downbeat is not None:
if not isinstance(self.downbeat, np.ndarray):
raise TypeError('`downbeat` must be a numpy array.')
if not np.issubdtype(self.downbeat.dtype, np.bool_):
raise TypeError('Data type of `downbeat` must be bool.')
if self.downbeat.ndim != 1:
raise ValueError('`downbeat` must be a 1D numpy array.')
if not isinstance(self.beat_resolution, int):
raise TypeError('`beat_resolution` must be int.')
if self.beat_resolution < 1:
raise ValueError('`beat_resolution` must be a positive integer.')
if not isinstance(self.name, string_types):
raise TypeError('`name` must be a string.')
| check_validity | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n for track in self.tracks:\n if not isinstance(track, Track):\n raise TypeError(\n '`tracks` must be a list of `pypianoroll.Track` instances.')\n track.check_validity()\n if not isinstance(self.tempo, np.ndarray):\n raise TypeError('`tempo` must be int or a numpy array.')\n elif not np.issubdtype(self.tempo.dtype, np.number):\n raise TypeError('Data type of `tempo` must be a subdtype of np.number.'\n )\n elif self.tempo.ndim != 1:\n raise ValueError('`tempo` must be a 1D numpy array.')\n if np.any(self.tempo <= 0.0):\n raise ValueError('`tempo` should contain only positive numbers.')\n if self.downbeat is not None:\n if not isinstance(self.downbeat, np.ndarray):\n raise TypeError('`downbeat` must be a numpy array.')\n if not np.issubdtype(self.downbeat.dtype, np.bool_):\n raise TypeError('Data type of `downbeat` must be bool.')\n if self.downbeat.ndim != 1:\n raise ValueError('`downbeat` must be a 1D numpy array.')\n if not isinstance(self.beat_resolution, int):\n raise TypeError('`beat_resolution` must be int.')\n if self.beat_resolution < 1:\n raise ValueError('`beat_resolution` must be a positive integer.')\n if not isinstance(self.name, string_types):\n raise TypeError('`name` must be a string.')\n",
"role": "user"
},
{
"content": "check_validity",
"role": "assistant"
}
] |
def x(self, lower=0, upper=127):
for track in self.tracks:
track.clip(lower, upper)
| clip | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, lower=0, upper=127):\n for track in self.tracks:\n track.clip(lower, upper)\n",
"role": "user"
},
{
"content": "clip",
"role": "assistant"
}
] |
def x(self):
active_length = 0
for track in self.tracks:
now_length = track.get_active_length()
if active_length < track.get_active_length():
active_length = now_length
return active_length
| get_active_length | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n active_length = 0\n for track in self.tracks:\n now_length = track.get_active_length()\n if active_length < track.get_active_length():\n active_length = now_length\n return active_length\n",
"role": "user"
},
{
"content": "get_active_length",
"role": "assistant"
}
] |
def x(self):
lowest, highest = self.tracks[0].get_active_pitch_range()
if len(self.tracks) > 1:
for track in self.tracks[1:]:
low, high = track.get_active_pitch_range()
if low < lowest:
lowest = low
if high > highest:
highest = high
return lowest, highest
| get_active_pitch_range | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n lowest, highest = self.tracks[0].get_active_pitch_range()\n if len(self.tracks) > 1:\n for track in self.tracks[1:]:\n low, high = track.get_active_pitch_range()\n if low < lowest:\n lowest = low\n if high > highest:\n highest = high\n return lowest, highest\n",
"role": "user"
},
{
"content": "get_active_pitch_range",
"role": "assistant"
}
] |
def x(self):
if self.downbeat is None:
return []
downbeat_steps = np.nonzero(self.downbeat)[0].tolist()
return downbeat_steps
| get_downbeat_steps | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n if self.downbeat is None:\n return []\n downbeat_steps = np.nonzero(self.downbeat)[0].tolist()\n return downbeat_steps\n",
"role": "user"
},
{
"content": "get_downbeat_steps",
"role": "assistant"
}
] |
def x(self):
empty_track_indices = [idx for idx, track in enumerate(self.tracks) if
not np.any(track.pianoroll)]
return empty_track_indices
| get_empty_tracks | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n empty_track_indices = [idx for idx, track in enumerate(self.tracks) if \n not np.any(track.pianoroll)]\n return empty_track_indices\n",
"role": "user"
},
{
"content": "get_empty_tracks",
"role": "assistant"
}
] |
def x(self):
max_length = 0
for track in self.tracks:
if max_length < track.pianoroll.shape[0]:
max_length = track.pianoroll.shape[0]
return max_length
| get_max_length | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n max_length = 0\n for track in self.tracks:\n if max_length < track.pianoroll.shape[0]:\n max_length = track.pianoroll.shape[0]\n return max_length\n",
"role": "user"
},
{
"content": "get_max_length",
"role": "assistant"
}
] |
def x(self, mode='sum'):
stacked = self.get_stacked_pianoroll()
if mode == 'any':
merged = np.any(stacked, axis=2)
elif mode == 'sum':
merged = np.sum(stacked, axis=2)
elif mode == 'max':
merged = np.max(stacked, axis=2)
else:
raise ValueError("`mode` must be one of {'max', 'sum', 'any'}.")
return merged
| get_merged_pianoroll | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, mode='sum'):\n stacked = self.get_stacked_pianoroll()\n if mode == 'any':\n merged = np.any(stacked, axis=2)\n elif mode == 'sum':\n merged = np.sum(stacked, axis=2)\n elif mode == 'max':\n merged = np.max(stacked, axis=2)\n else:\n raise ValueError(\"`mode` must be one of {'max', 'sum', 'any'}.\")\n return merged\n",
"role": "user"
},
{
"content": "get_merged_pianoroll",
"role": "assistant"
}
] |
def x(self):
multitrack = deepcopy(self)
multitrack.pad_to_same()
stacked = np.stack([track.pianoroll for track in multitrack.tracks], -1)
return stacked
| get_stacked_pianoroll | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n multitrack = deepcopy(self)\n multitrack.pad_to_same()\n stacked = np.stack([track.pianoroll for track in multitrack.tracks], -1)\n return stacked\n",
"role": "user"
},
{
"content": "get_stacked_pianoroll",
"role": "assistant"
}
] |
def x(self, filename):
def x(target_dict, name):
return csc_matrix((target_dict[name + '_csc_data'], target_dict[
name + '_csc_indices'], target_dict[name + '_csc_indptr']),
shape=target_dict[name + '_csc_shape']).toarray()
with np.load(filename) as loaded:
if 'info.json' not in loaded:
raise ValueError("Cannot find 'info.json' in the npz file.")
info_dict = json.loads(loaded['info.json'].decode('utf-8'))
self.name = info_dict['name']
self.beat_resolution = info_dict['beat_resolution']
self.tempo = loaded['tempo']
if 'downbeat' in loaded.files:
self.downbeat = loaded['downbeat']
else:
self.downbeat = None
idx = 0
self.tracks = []
while str(idx) in info_dict:
pianoroll = reconstruct_sparse(loaded, 'pianoroll_{}'.format(idx))
track = Track(pianoroll, info_dict[str(idx)]['program'],
info_dict[str(idx)]['is_drum'], info_dict[str(idx)]['name'])
self.tracks.append(track)
idx += 1
self.check_validity()
| load | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, filename):\n\n def x(target_dict, name):\n return csc_matrix((target_dict[name + '_csc_data'], target_dict[\n name + '_csc_indices'], target_dict[name + '_csc_indptr']),\n shape=target_dict[name + '_csc_shape']).toarray()\n with np.load(filename) as loaded:\n if 'info.json' not in loaded:\n raise ValueError(\"Cannot find 'info.json' in the npz file.\")\n info_dict = json.loads(loaded['info.json'].decode('utf-8'))\n self.name = info_dict['name']\n self.beat_resolution = info_dict['beat_resolution']\n self.tempo = loaded['tempo']\n if 'downbeat' in loaded.files:\n self.downbeat = loaded['downbeat']\n else:\n self.downbeat = None\n idx = 0\n self.tracks = []\n while str(idx) in info_dict:\n pianoroll = reconstruct_sparse(loaded, 'pianoroll_{}'.format(idx))\n track = Track(pianoroll, info_dict[str(idx)]['program'],\n info_dict[str(idx)]['is_drum'], info_dict[str(idx)]['name'])\n self.tracks.append(track)\n idx += 1\n self.check_validity()\n",
"role": "user"
},
{
"content": "load",
"role": "assistant"
}
] |
def x(self, track_indices=None, mode='sum', program=0, is_drum=False, name=
'merged', remove_merged=False):
if mode not in ('max', 'sum', 'any'):
raise ValueError("`mode` must be one of {'max', 'sum', 'any'}.")
merged = self[track_indices].get_merged_pianoroll(mode)
merged_track = Track(merged, program, is_drum, name)
self.append_track(merged_track)
if remove_merged:
self.remove_tracks(track_indices)
| merge_tracks | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, track_indices=None, mode='sum', program=0, is_drum=False, name=\n 'merged', remove_merged=False):\n if mode not in ('max', 'sum', 'any'):\n raise ValueError(\"`mode` must be one of {'max', 'sum', 'any'}.\")\n merged = self[track_indices].get_merged_pianoroll(mode)\n merged_track = Track(merged, program, is_drum, name)\n self.append_track(merged_track)\n if remove_merged:\n self.remove_tracks(track_indices)\n",
"role": "user"
},
{
"content": "merge_tracks",
"role": "assistant"
}
] |
def x(self):
max_length = self.get_max_length()
for track in self.tracks:
if track.pianoroll.shape[0] < max_length:
track.pad(max_length - track.pianoroll.shape[0])
| pad_to_same | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n max_length = self.get_max_length()\n for track in self.tracks:\n if track.pianoroll.shape[0] < max_length:\n track.pad(max_length - track.pianoroll.shape[0])\n",
"role": "user"
},
{
"content": "pad_to_same",
"role": "assistant"
}
] |
def x(self, filename, **kwargs):
pm = pretty_midi.PrettyMIDI(filename)
self.parse_pretty_midi(pm, **kwargs)
| parse_midi | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, filename, **kwargs):\n pm = pretty_midi.PrettyMIDI(filename)\n self.parse_pretty_midi(pm, **kwargs)\n",
"role": "user"
},
{
"content": "parse_midi",
"role": "assistant"
}
] |
def x(self, track_indices):
if isinstance(track_indices, int):
track_indices = [track_indices]
self.tracks = [track for idx, track in enumerate(self.tracks) if idx not in
track_indices]
| remove_tracks | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, track_indices):\n if isinstance(track_indices, int):\n track_indices = [track_indices]\n self.tracks = [track for idx, track in enumerate(self.tracks) if idx not in\n track_indices]\n",
"role": "user"
},
{
"content": "remove_tracks",
"role": "assistant"
}
] |
def x(self, filename, compressed=True):
def x(target_dict, sparse_matrix, name):
csc = csc_matrix(sparse_matrix)
target_dict[name + '_csc_data'] = csc.data
target_dict[name + '_csc_indices'] = csc.indices
target_dict[name + '_csc_indptr'] = csc.indptr
target_dict[name + '_csc_shape'] = csc.shape
self.check_validity()
array_dict = {'tempo': self.tempo}
info_dict = {'beat_resolution': self.beat_resolution, 'name': self.name}
if self.downbeat is not None:
array_dict['downbeat'] = self.downbeat
for idx, track in enumerate(self.tracks):
update_sparse(array_dict, track.pianoroll, 'pianoroll_{}'.format(idx))
info_dict[str(idx)] = {'program': track.program, 'is_drum': track.
is_drum, 'name': track.name}
if not filename.endswith('.npz'):
filename += '.npz'
if compressed:
np.savez_compressed(filename, **array_dict)
else:
np.savez(filename, **array_dict)
compression = zipfile.ZIP_DEFLATED if compressed else zipfile.ZIP_STORED
with zipfile.ZipFile(filename, 'a') as zip_file:
zip_file.writestr('info.json', json.dumps(info_dict), compression)
| save | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, filename, compressed=True):\n\n def x(target_dict, sparse_matrix, name):\n csc = csc_matrix(sparse_matrix)\n target_dict[name + '_csc_data'] = csc.data\n target_dict[name + '_csc_indices'] = csc.indices\n target_dict[name + '_csc_indptr'] = csc.indptr\n target_dict[name + '_csc_shape'] = csc.shape\n self.check_validity()\n array_dict = {'tempo': self.tempo}\n info_dict = {'beat_resolution': self.beat_resolution, 'name': self.name}\n if self.downbeat is not None:\n array_dict['downbeat'] = self.downbeat\n for idx, track in enumerate(self.tracks):\n update_sparse(array_dict, track.pianoroll, 'pianoroll_{}'.format(idx))\n info_dict[str(idx)] = {'program': track.program, 'is_drum': track.\n is_drum, 'name': track.name}\n if not filename.endswith('.npz'):\n filename += '.npz'\n if compressed:\n np.savez_compressed(filename, **array_dict)\n else:\n np.savez(filename, **array_dict)\n compression = zipfile.ZIP_DEFLATED if compressed else zipfile.ZIP_STORED\n with zipfile.ZipFile(filename, 'a') as zip_file:\n zip_file.writestr('info.json', json.dumps(info_dict), compression)\n",
"role": "user"
},
{
"content": "save",
"role": "assistant"
}
] |
def x(self, constant_tempo=None, constant_velocity=100):
self.check_validity()
pm = pretty_midi.PrettyMIDI(initial_tempo=self.tempo[0])
if constant_tempo is None:
constant_tempo = self.tempo[0]
time_step_size = 60.0 / constant_tempo / self.beat_resolution
for track in self.tracks:
instrument = pretty_midi.Instrument(program=track.program, is_drum=
track.is_drum, name=track.name)
copied = track.copy()
if copied.is_binarized():
copied.assign_constant(constant_velocity)
copied.clip()
clipped = copied.pianoroll.astype(np.uint8)
binarized = clipped > 0
padded = np.pad(binarized, ((1, 1), (0, 0)), 'constant')
diff = np.diff(padded.astype(np.int8), axis=0)
positives = np.nonzero((diff > 0).T)
pitches = positives[0]
note_ons = positives[1]
note_on_times = time_step_size * note_ons
note_offs = np.nonzero((diff < 0).T)[1]
note_off_times = time_step_size * note_offs
for idx, pitch in enumerate(pitches):
velocity = np.mean(clipped[note_ons[idx]:note_offs[idx], pitch])
note = pretty_midi.Note(velocity=int(velocity), pitch=pitch,
start=note_on_times[idx], end=note_off_times[idx])
instrument.notes.append(note)
instrument.notes.sort(key=lambda x: x.start)
pm.instruments.append(instrument)
return pm
| to_pretty_midi | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, constant_tempo=None, constant_velocity=100):\n self.check_validity()\n pm = pretty_midi.PrettyMIDI(initial_tempo=self.tempo[0])\n if constant_tempo is None:\n constant_tempo = self.tempo[0]\n time_step_size = 60.0 / constant_tempo / self.beat_resolution\n for track in self.tracks:\n instrument = pretty_midi.Instrument(program=track.program, is_drum=\n track.is_drum, name=track.name)\n copied = track.copy()\n if copied.is_binarized():\n copied.assign_constant(constant_velocity)\n copied.clip()\n clipped = copied.pianoroll.astype(np.uint8)\n binarized = clipped > 0\n padded = np.pad(binarized, ((1, 1), (0, 0)), 'constant')\n diff = np.diff(padded.astype(np.int8), axis=0)\n positives = np.nonzero((diff > 0).T)\n pitches = positives[0]\n note_ons = positives[1]\n note_on_times = time_step_size * note_ons\n note_offs = np.nonzero((diff < 0).T)[1]\n note_off_times = time_step_size * note_offs\n for idx, pitch in enumerate(pitches):\n velocity = np.mean(clipped[note_ons[idx]:note_offs[idx], pitch])\n note = pretty_midi.Note(velocity=int(velocity), pitch=pitch,\n start=note_on_times[idx], end=note_off_times[idx])\n instrument.notes.append(note)\n instrument.notes.sort(key=lambda x: x.start)\n pm.instruments.append(instrument)\n return pm\n",
"role": "user"
},
{
"content": "to_pretty_midi",
"role": "assistant"
}
] |
def x(self, semitone):
for track in self.tracks:
if not track.is_drum:
track.transpose(semitone)
| transpose | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, semitone):\n for track in self.tracks:\n if not track.is_drum:\n track.transpose(semitone)\n",
"role": "user"
},
{
"content": "transpose",
"role": "assistant"
}
] |
def x(self):
active_length = self.get_active_length()
for track in self.tracks:
track.pianoroll = track.pianoroll[:active_length]
| trim_trailing_silence | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n active_length = self.get_active_length()\n for track in self.tracks:\n track.pianoroll = track.pianoroll[:active_length]\n",
"role": "user"
},
{
"content": "trim_trailing_silence",
"role": "assistant"
}
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.