keyword stringclasses 7 values | repo_name stringlengths 8 98 | file_path stringlengths 4 244 | file_extension stringclasses 29 values | file_size int64 0 84.1M | line_count int64 0 1.6M | content stringlengths 1 84.1M ⌀ | language stringclasses 14 values |
|---|---|---|---|---|---|---|---|
3D | hku-mars/ImMesh | src/meshing/tinycolormap.hpp | .hpp | 104,096 | 2,467 | /*
MIT License
Copyright (c) 2018-2020 Yuki Koyama
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
-------------------------------------------------------------------------------
The lookup table for Turbo is derived by Shot511 in his PR,
https://github.com/yuki-koyama/tinycolormap/pull/27 , from
https://gist.github.com/mikhailov-work/6a308c20e494d9e0ccc29036b28faa7a , which
is released by Anton Mikhailov, copyrighted by Google LLC, and licensed under
the Apache 2.0 license. To the best of our knowledge, the Apache 2.0 license is
compatible with the MIT license, and thus we release the merged entire code
under the MIT license. The license notice for Anton's code is posted here:
Copyright 2019 Google LLC.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
#ifndef TINYCOLORMAP_HPP_
#define TINYCOLORMAP_HPP_
#include <cmath>
#include <cstdint>
#include <algorithm>
#if defined(TINYCOLORMAP_WITH_EIGEN)
#include <Eigen/Core>
#endif
#if defined(TINYCOLORMAP_WITH_QT5)
#include <QColor>
#endif
#if defined(TINYCOLORMAP_WITH_QT5) && defined(TINYCOLORMAP_WITH_EIGEN)
#include <QImage>
#include <QString>
#endif
#if defined(TINYCOLORMAP_WITH_GLM)
#include <glm/vec3.hpp>
#endif
namespace tinycolormap
{
//////////////////////////////////////////////////////////////////////////////////
// Interface
//////////////////////////////////////////////////////////////////////////////////
enum class ColormapType
{
Parula, Heat, Jet, Turbo, Hot, Gray, Magma, Inferno, Plasma, Viridis, Cividis, Github, Cubehelix
};
struct Color
{
explicit constexpr Color(double gray) noexcept : data{ gray, gray, gray } {}
constexpr Color(double r, double g, double b) noexcept : data{ r, g, b } {}
double data[3];
double& r() noexcept { return data[0]; }
double& g() noexcept { return data[1]; }
double& b() noexcept { return data[2]; }
constexpr double r() const noexcept { return data[0]; }
constexpr double g() const noexcept { return data[1]; }
constexpr double b() const noexcept { return data[2]; }
constexpr uint8_t ri() const noexcept { return static_cast<uint8_t>(data[0] * 255.0); }
constexpr uint8_t gi() const noexcept { return static_cast<uint8_t>(data[1] * 255.0); }
constexpr uint8_t bi() const noexcept { return static_cast<uint8_t>(data[2] * 255.0); }
double& operator[](std::size_t n) noexcept { return data[n]; }
constexpr double operator[](std::size_t n) const noexcept { return data[n]; }
double& operator()(std::size_t n) noexcept { return data[n]; }
constexpr double operator()(std::size_t n) const noexcept { return data[n]; }
friend constexpr Color operator+(const Color& c0, const Color& c1) noexcept
{
return { c0.r() + c1.r(), c0.g() + c1.g(), c0.b() + c1.b() };
}
friend constexpr Color operator*(double s, const Color& c) noexcept
{
return { s * c.r(), s * c.g(), s * c.b() };
}
#if defined(TINYCOLORMAP_WITH_QT5)
QColor ConvertToQColor() const { return QColor(data[0] * 255.0, data[1] * 255.0, data[2] * 255.0); }
#endif
#if defined(TINYCOLORMAP_WITH_EIGEN)
Eigen::Vector3d ConvertToEigen() const { return Eigen::Vector3d(data[0], data[1], data[2]); }
#endif
#if defined(TINYCOLORMAP_WITH_GLM)
glm::vec3 ConvertToGLM() const { return glm::vec3(data[0], data[1], data[2]); }
#endif
};
inline Color GetColor(double x, ColormapType type = ColormapType::Viridis);
inline Color GetQuantizedColor(double x, unsigned int num_levels, ColormapType type = ColormapType::Viridis);
inline Color GetParulaColor(double x);
inline Color GetHeatColor(double x);
inline Color GetJetColor(double x);
inline Color GetTurboColor(double x);
inline Color GetHotColor(double x);
inline constexpr Color GetGrayColor(double x) noexcept;
inline Color GetMagmaColor(double x);
inline Color GetInfernoColor(double x);
inline Color GetPlasmaColor(double x);
inline Color GetViridisColor(double x);
inline Color GetCividisColor(double x);
inline Color GetGithubColor(double x);
inline Color GetCubehelixColor(double x);
#if defined(TINYCOLORMAP_WITH_QT5) && defined(TINYCOLORMAP_WITH_EIGEN)
inline QImage CreateMatrixVisualization(const Eigen::MatrixXd& matrix);
inline void ExportMatrixVisualization(const Eigen::MatrixXd& matrix, const std::string& path);
#endif
//////////////////////////////////////////////////////////////////////////////////
// Private Implementation - public usage is not intended
//////////////////////////////////////////////////////////////////////////////////
namespace internal
{
inline constexpr double Clamp01(double x) noexcept
{
return (x < 0.0) ? 0.0 : (x > 1.0) ? 1.0 : x;
}
// A helper function to calculate linear interpolation
template <std::size_t N>
Color CalcLerp(double x, const Color (&data)[N])
{
const double a = Clamp01(x) * (N - 1);
const double i = std::floor(a);
const double t = a - i;
const Color& c0 = data[static_cast<std::size_t>(i)];
const Color& c1 = data[static_cast<std::size_t>(std::ceil(a))];
return (1.0 - t) * c0 + t * c1;
}
inline double QuantizeArgument(double x, unsigned int num_levels)
{
// Clamp num_classes to range [1, 255].
num_levels = (std::max)(1u, (std::min)(num_levels, 255u));
const double interval_length = 255.0 / num_levels;
// Calculate index of the interval to which the given x belongs to.
// Substracting eps prevents getting out of bounds index.
constexpr double eps = 0.0005;
const unsigned int index = static_cast<unsigned int>((x * 255.0 - eps) / interval_length);
// Calculate upper and lower bounds of the given interval.
const unsigned int upper_boundary = static_cast<unsigned int>(index * interval_length + interval_length);
const unsigned int lower_boundary = static_cast<unsigned int>(upper_boundary - interval_length);
// Get middle "coordinate" of the given interval and move it back to [0.0, 1.0] interval.
const double xx = static_cast<double>(upper_boundary + lower_boundary) * 0.5 / 255.0;
return xx;
}
}
//////////////////////////////////////////////////////////////////////////////////
// Public Implementation
//////////////////////////////////////////////////////////////////////////////////
inline Color GetColor(double x, ColormapType type)
{
switch (type)
{
case ColormapType::Parula:
return GetParulaColor(x);
case ColormapType::Heat:
return GetHeatColor(x);
case ColormapType::Jet:
return GetJetColor(x);
case ColormapType::Turbo:
return GetTurboColor(x);
case ColormapType::Hot:
return GetHotColor(x);
case ColormapType::Gray:
return GetGrayColor(x);
case ColormapType::Magma:
return GetMagmaColor(x);
case ColormapType::Inferno:
return GetInfernoColor(x);
case ColormapType::Plasma:
return GetPlasmaColor(x);
case ColormapType::Viridis:
return GetViridisColor(x);
case ColormapType::Cividis:
return GetCividisColor(x);
case ColormapType::Github:
return GetGithubColor(x);
case ColormapType::Cubehelix:
return GetCubehelixColor(x);
default:
break;
}
return GetViridisColor(x);
}
inline Color GetQuantizedColor(double x, unsigned int num_levels, ColormapType type)
{
return GetColor(internal::QuantizeArgument(x, num_levels), type);
}
inline Color GetParulaColor(double x)
{
constexpr Color data[] =
{
{ 0.2081, 0.1663, 0.5292 },
{ 0.2091, 0.1721, 0.5411 },
{ 0.2101, 0.1779, 0.553 },
{ 0.2109, 0.1837, 0.565 },
{ 0.2116, 0.1895, 0.5771 },
{ 0.2121, 0.1954, 0.5892 },
{ 0.2124, 0.2013, 0.6013 },
{ 0.2125, 0.2072, 0.6135 },
{ 0.2123, 0.2132, 0.6258 },
{ 0.2118, 0.2192, 0.6381 },
{ 0.2111, 0.2253, 0.6505 },
{ 0.2099, 0.2315, 0.6629 },
{ 0.2084, 0.2377, 0.6753 },
{ 0.2063, 0.244, 0.6878 },
{ 0.2038, 0.2503, 0.7003 },
{ 0.2006, 0.2568, 0.7129 },
{ 0.1968, 0.2632, 0.7255 },
{ 0.1921, 0.2698, 0.7381 },
{ 0.1867, 0.2764, 0.7507 },
{ 0.1802, 0.2832, 0.7634 },
{ 0.1728, 0.2902, 0.7762 },
{ 0.1641, 0.2975, 0.789 },
{ 0.1541, 0.3052, 0.8017 },
{ 0.1427, 0.3132, 0.8145 },
{ 0.1295, 0.3217, 0.8269 },
{ 0.1147, 0.3306, 0.8387 },
{ 0.0986, 0.3397, 0.8495 },
{ 0.0816, 0.3486, 0.8588 },
{ 0.0646, 0.3572, 0.8664 },
{ 0.0482, 0.3651, 0.8722 },
{ 0.0329, 0.3724, 0.8765 },
{ 0.0213, 0.3792, 0.8796 },
{ 0.0136, 0.3853, 0.8815 },
{ 0.0086, 0.3911, 0.8827 },
{ 0.006, 0.3965, 0.8833 },
{ 0.0051, 0.4017, 0.8834 },
{ 0.0054, 0.4066, 0.8831 },
{ 0.0067, 0.4113, 0.8825 },
{ 0.0089, 0.4159, 0.8816 },
{ 0.0116, 0.4203, 0.8805 },
{ 0.0148, 0.4246, 0.8793 },
{ 0.0184, 0.4288, 0.8779 },
{ 0.0223, 0.4329, 0.8763 },
{ 0.0264, 0.437, 0.8747 },
{ 0.0306, 0.441, 0.8729 },
{ 0.0349, 0.4449, 0.8711 },
{ 0.0394, 0.4488, 0.8692 },
{ 0.0437, 0.4526, 0.8672 },
{ 0.0477, 0.4564, 0.8652 },
{ 0.0514, 0.4602, 0.8632 },
{ 0.0549, 0.464, 0.8611 },
{ 0.0582, 0.4677, 0.8589 },
{ 0.0612, 0.4714, 0.8568 },
{ 0.064, 0.4751, 0.8546 },
{ 0.0666, 0.4788, 0.8525 },
{ 0.0689, 0.4825, 0.8503 },
{ 0.071, 0.4862, 0.8481 },
{ 0.0729, 0.4899, 0.846 },
{ 0.0746, 0.4937, 0.8439 },
{ 0.0761, 0.4974, 0.8418 },
{ 0.0773, 0.5012, 0.8398 },
{ 0.0782, 0.5051, 0.8378 },
{ 0.0789, 0.5089, 0.8359 },
{ 0.0794, 0.5129, 0.8341 },
{ 0.0795, 0.5169, 0.8324 },
{ 0.0793, 0.521, 0.8308 },
{ 0.0788, 0.5251, 0.8293 },
{ 0.0778, 0.5295, 0.828 },
{ 0.0764, 0.5339, 0.827 },
{ 0.0746, 0.5384, 0.8261 },
{ 0.0724, 0.5431, 0.8253 },
{ 0.0698, 0.5479, 0.8247 },
{ 0.0668, 0.5527, 0.8243 },
{ 0.0636, 0.5577, 0.8239 },
{ 0.06, 0.5627, 0.8237 },
{ 0.0562, 0.5677, 0.8234 },
{ 0.0523, 0.5727, 0.8231 },
{ 0.0484, 0.5777, 0.8228 },
{ 0.0445, 0.5826, 0.8223 },
{ 0.0408, 0.5874, 0.8217 },
{ 0.0372, 0.5922, 0.8209 },
{ 0.0342, 0.5968, 0.8198 },
{ 0.0317, 0.6012, 0.8186 },
{ 0.0296, 0.6055, 0.8171 },
{ 0.0279, 0.6097, 0.8154 },
{ 0.0265, 0.6137, 0.8135 },
{ 0.0255, 0.6176, 0.8114 },
{ 0.0248, 0.6214, 0.8091 },
{ 0.0243, 0.625, 0.8066 },
{ 0.0239, 0.6285, 0.8039 },
{ 0.0237, 0.6319, 0.801 },
{ 0.0235, 0.6352, 0.798 },
{ 0.0233, 0.6384, 0.7948 },
{ 0.0231, 0.6415, 0.7916 },
{ 0.023, 0.6445, 0.7881 },
{ 0.0229, 0.6474, 0.7846 },
{ 0.0227, 0.6503, 0.781, },
{ 0.0227, 0.6531, 0.7773 },
{ 0.0232, 0.6558, 0.7735 },
{ 0.0238, 0.6585, 0.7696 },
{ 0.0246, 0.6611, 0.7656 },
{ 0.0263, 0.6637, 0.7615 },
{ 0.0282, 0.6663, 0.7574 },
{ 0.0306, 0.6688, 0.7532 },
{ 0.0338, 0.6712, 0.749 },
{ 0.0373, 0.6737, 0.7446 },
{ 0.0418, 0.6761, 0.7402 },
{ 0.0467, 0.6784, 0.7358 },
{ 0.0516, 0.6808, 0.7313 },
{ 0.0574, 0.6831, 0.7267 },
{ 0.0629, 0.6854, 0.7221 },
{ 0.0692, 0.6877, 0.7173 },
{ 0.0755, 0.6899, 0.7126 },
{ 0.082, 0.6921, 0.7078 },
{ 0.0889, 0.6943, 0.7029 },
{ 0.0956, 0.6965, 0.6979 },
{ 0.1031, 0.6986, 0.6929 },
{ 0.1104, 0.7007, 0.6878 },
{ 0.118, 0.7028, 0.6827 },
{ 0.1258, 0.7049, 0.6775 },
{ 0.1335, 0.7069, 0.6723 },
{ 0.1418, 0.7089, 0.6669 },
{ 0.1499, 0.7109, 0.6616 },
{ 0.1585, 0.7129, 0.6561 },
{ 0.1671, 0.7148, 0.6507 },
{ 0.1758, 0.7168, 0.6451 },
{ 0.1849, 0.7186, 0.6395 },
{ 0.1938, 0.7205, 0.6338 },
{ 0.2033, 0.7223, 0.6281 },
{ 0.2128, 0.7241, 0.6223 },
{ 0.2224, 0.7259, 0.6165 },
{ 0.2324, 0.7275, 0.6107 },
{ 0.2423, 0.7292, 0.6048 },
{ 0.2527, 0.7308, 0.5988 },
{ 0.2631, 0.7324, 0.5929 },
{ 0.2735, 0.7339, 0.5869 },
{ 0.2845, 0.7354, 0.5809 },
{ 0.2953, 0.7368, 0.5749 },
{ 0.3064, 0.7381, 0.5689 },
{ 0.3177, 0.7394, 0.563 },
{ 0.3289, 0.7406, 0.557 },
{ 0.3405, 0.7417, 0.5512 },
{ 0.352, 0.7428, 0.5453 },
{ 0.3635, 0.7438, 0.5396 },
{ 0.3753, 0.7446, 0.5339 },
{ 0.3869, 0.7454, 0.5283 },
{ 0.3986, 0.7461, 0.5229 },
{ 0.4103, 0.7467, 0.5175 },
{ 0.4218, 0.7473, 0.5123 },
{ 0.4334, 0.7477, 0.5072 },
{ 0.4447, 0.7482, 0.5021 },
{ 0.4561, 0.7485, 0.4972 },
{ 0.4672, 0.7487, 0.4924 },
{ 0.4783, 0.7489, 0.4877 },
{ 0.4892, 0.7491, 0.4831 },
{ 0.5, 0.7491, 0.4786 },
{ 0.5106, 0.7492, 0.4741 },
{ 0.5212, 0.7492, 0.4698 },
{ 0.5315, 0.7491, 0.4655 },
{ 0.5418, 0.749, 0.4613 },
{ 0.5519, 0.7489, 0.4571 },
{ 0.5619, 0.7487, 0.4531 },
{ 0.5718, 0.7485, 0.449 },
{ 0.5816, 0.7482, 0.4451 },
{ 0.5913, 0.7479, 0.4412 },
{ 0.6009, 0.7476, 0.4374 },
{ 0.6103, 0.7473, 0.4335 },
{ 0.6197, 0.7469, 0.4298 },
{ 0.629, 0.7465, 0.4261 },
{ 0.6382, 0.746, 0.4224 },
{ 0.6473, 0.7456, 0.4188 },
{ 0.6564, 0.7451, 0.4152 },
{ 0.6653, 0.7446, 0.4116 },
{ 0.6742, 0.7441, 0.4081 },
{ 0.683, 0.7435, 0.4046 },
{ 0.6918, 0.743, 0.4011 },
{ 0.7004, 0.7424, 0.3976 },
{ 0.7091, 0.7418, 0.3942 },
{ 0.7176, 0.7412, 0.3908 },
{ 0.7261, 0.7405, 0.3874 },
{ 0.7346, 0.7399, 0.384 },
{ 0.743, 0.7392, 0.3806 },
{ 0.7513, 0.7385, 0.3773 },
{ 0.7596, 0.7378, 0.3739 },
{ 0.7679, 0.7372, 0.3706 },
{ 0.7761, 0.7364, 0.3673 },
{ 0.7843, 0.7357, 0.3639 },
{ 0.7924, 0.735, 0.3606 },
{ 0.8005, 0.7343, 0.3573 },
{ 0.8085, 0.7336, 0.3539 },
{ 0.8166, 0.7329, 0.3506 },
{ 0.8246, 0.7322, 0.3472 },
{ 0.8325, 0.7315, 0.3438 },
{ 0.8405, 0.7308, 0.3404 },
{ 0.8484, 0.7301, 0.337 },
{ 0.8563, 0.7294, 0.3336 },
{ 0.8642, 0.7288, 0.33 },
{ 0.872, 0.7282, 0.3265 },
{ 0.8798, 0.7276, 0.3229 },
{ 0.8877, 0.7271, 0.3193 },
{ 0.8954, 0.7266, 0.3156 },
{ 0.9032, 0.7262, 0.3117 },
{ 0.911, 0.7259, 0.3078 },
{ 0.9187, 0.7256, 0.3038 },
{ 0.9264, 0.7256, 0.2996 },
{ 0.9341, 0.7256, 0.2953 },
{ 0.9417, 0.7259, 0.2907 },
{ 0.9493, 0.7264, 0.2859 },
{ 0.9567, 0.7273, 0.2808 },
{ 0.9639, 0.7285, 0.2754 },
{ 0.9708, 0.7303, 0.2696 },
{ 0.9773, 0.7326, 0.2634 },
{ 0.9831, 0.7355, 0.257 },
{ 0.9882, 0.739, 0.2504 },
{ 0.9922, 0.7431, 0.2437 },
{ 0.9952, 0.7476, 0.2373 },
{ 0.9973, 0.7524, 0.231 },
{ 0.9986, 0.7573, 0.2251 },
{ 0.9991, 0.7624, 0.2195 },
{ 0.999, 0.7675, 0.2141 },
{ 0.9985, 0.7726, 0.209 },
{ 0.9976, 0.7778, 0.2042 },
{ 0.9964, 0.7829, 0.1995 },
{ 0.995, 0.788, 0.1949 },
{ 0.9933, 0.7931, 0.1905 },
{ 0.9914, 0.7981, 0.1863 },
{ 0.9894, 0.8032, 0.1821 },
{ 0.9873, 0.8083, 0.178 },
{ 0.9851, 0.8133, 0.174 },
{ 0.9828, 0.8184, 0.17 },
{ 0.9805, 0.8235, 0.1661 },
{ 0.9782, 0.8286, 0.1622 },
{ 0.9759, 0.8337, 0.1583 },
{ 0.9736, 0.8389, 0.1544 },
{ 0.9713, 0.8441, 0.1505 },
{ 0.9692, 0.8494, 0.1465 },
{ 0.9672, 0.8548, 0.1425 },
{ 0.9654, 0.8603, 0.1385 },
{ 0.9638, 0.8659, 0.1343 },
{ 0.9623, 0.8716, 0.1301 },
{ 0.9611, 0.8774, 0.1258 },
{ 0.96, 0.8834, 0.1215 },
{ 0.9593, 0.8895, 0.1171 },
{ 0.9588, 0.8958, 0.1126 },
{ 0.9586, 0.9022, 0.1082 },
{ 0.9587, 0.9088, 0.1036 },
{ 0.9591, 0.9155, 0.099 },
{ 0.9599, 0.9225, 0.0944 },
{ 0.961, 0.9296, 0.0897 },
{ 0.9624, 0.9368, 0.085 },
{ 0.9641, 0.9443, 0.0802 },
{ 0.9662, 0.9518, 0.0753 },
{ 0.9685, 0.9595, 0.0703 },
{ 0.971, 0.9673, 0.0651 },
{ 0.9736, 0.9752, 0.0597 },
{ 0.9763, 0.9831, 0.0538 }
};
return internal::CalcLerp(x, data);
}
inline Color GetHeatColor(double x)
{
constexpr Color data[] =
{
{ 0.0, 0.0, 1.0 },
{ 0.0, 1.0, 1.0 },
{ 0.0, 1.0, 0.0 },
{ 1.0, 1.0, 0.0 },
{ 1.0, 0.0, 0.0 }
};
return internal::CalcLerp(x, data);
}
inline Color GetJetColor(double x)
{
constexpr Color data[] =
{
{ 0.0, 0.0, 0.5 },
{ 0.0, 0.0, 1.0 },
{ 0.0, 0.5, 1.0 },
{ 0.0, 1.0, 1.0 },
{ 0.5, 1.0, 0.5 },
{ 1.0, 1.0, 0.0 },
{ 1.0, 0.5, 0.0 },
{ 1.0, 0.0, 0.0 },
{ 0.5, 0.0, 0.0 }
};
return internal::CalcLerp(x, data);
}
inline Color GetTurboColor(double x)
{
constexpr Color data[] =
{
{ 0.18995, 0.07176, 0.23217 },
{ 0.19483, 0.08339, 0.26149 },
{ 0.19956, 0.09498, 0.29024 },
{ 0.20415, 0.10652, 0.31844 },
{ 0.20860, 0.11802, 0.34607 },
{ 0.21291, 0.12947, 0.37314 },
{ 0.21708, 0.14087, 0.39964 },
{ 0.22111, 0.15223, 0.42558 },
{ 0.22500, 0.16354, 0.45096 },
{ 0.22875, 0.17481, 0.47578 },
{ 0.23236, 0.18603, 0.50004 },
{ 0.23582, 0.19720, 0.52373 },
{ 0.23915, 0.20833, 0.54686 },
{ 0.24234, 0.21941, 0.56942 },
{ 0.24539, 0.23044, 0.59142 },
{ 0.24830, 0.24143, 0.61286 },
{ 0.25107, 0.25237, 0.63374 },
{ 0.25369, 0.26327, 0.65406 },
{ 0.25618, 0.27412, 0.67381 },
{ 0.25853, 0.28492, 0.69300 },
{ 0.26074, 0.29568, 0.71162 },
{ 0.26280, 0.30639, 0.72968 },
{ 0.26473, 0.31706, 0.74718 },
{ 0.26652, 0.32768, 0.76412 },
{ 0.26816, 0.33825, 0.78050 },
{ 0.26967, 0.34878, 0.79631 },
{ 0.27103, 0.35926, 0.81156 },
{ 0.27226, 0.36970, 0.82624 },
{ 0.27334, 0.38008, 0.84037 },
{ 0.27429, 0.39043, 0.85393 },
{ 0.27509, 0.40072, 0.86692 },
{ 0.27576, 0.41097, 0.87936 },
{ 0.27628, 0.42118, 0.89123 },
{ 0.27667, 0.43134, 0.90254 },
{ 0.27691, 0.44145, 0.91328 },
{ 0.27701, 0.45152, 0.92347 },
{ 0.27698, 0.46153, 0.93309 },
{ 0.27680, 0.47151, 0.94214 },
{ 0.27648, 0.48144, 0.95064 },
{ 0.27603, 0.49132, 0.95857 },
{ 0.27543, 0.50115, 0.96594 },
{ 0.27469, 0.51094, 0.97275 },
{ 0.27381, 0.52069, 0.97899 },
{ 0.27273, 0.53040, 0.98461 },
{ 0.27106, 0.54015, 0.98930 },
{ 0.26878, 0.54995, 0.99303 },
{ 0.26592, 0.55979, 0.99583 },
{ 0.26252, 0.56967, 0.99773 },
{ 0.25862, 0.57958, 0.99876 },
{ 0.25425, 0.58950, 0.99896 },
{ 0.24946, 0.59943, 0.99835 },
{ 0.24427, 0.60937, 0.99697 },
{ 0.23874, 0.61931, 0.99485 },
{ 0.23288, 0.62923, 0.99202 },
{ 0.22676, 0.63913, 0.98851 },
{ 0.22039, 0.64901, 0.98436 },
{ 0.21382, 0.65886, 0.97959 },
{ 0.20708, 0.66866, 0.97423 },
{ 0.20021, 0.67842, 0.96833 },
{ 0.19326, 0.68812, 0.96190 },
{ 0.18625, 0.69775, 0.95498 },
{ 0.17923, 0.70732, 0.94761 },
{ 0.17223, 0.71680, 0.93981 },
{ 0.16529, 0.72620, 0.93161 },
{ 0.15844, 0.73551, 0.92305 },
{ 0.15173, 0.74472, 0.91416 },
{ 0.14519, 0.75381, 0.90496 },
{ 0.13886, 0.76279, 0.89550 },
{ 0.13278, 0.77165, 0.88580 },
{ 0.12698, 0.78037, 0.87590 },
{ 0.12151, 0.78896, 0.86581 },
{ 0.11639, 0.79740, 0.85559 },
{ 0.11167, 0.80569, 0.84525 },
{ 0.10738, 0.81381, 0.83484 },
{ 0.10357, 0.82177, 0.82437 },
{ 0.10026, 0.82955, 0.81389 },
{ 0.09750, 0.83714, 0.80342 },
{ 0.09532, 0.84455, 0.79299 },
{ 0.09377, 0.85175, 0.78264 },
{ 0.09287, 0.85875, 0.77240 },
{ 0.09267, 0.86554, 0.76230 },
{ 0.09320, 0.87211, 0.75237 },
{ 0.09451, 0.87844, 0.74265 },
{ 0.09662, 0.88454, 0.73316 },
{ 0.09958, 0.89040, 0.72393 },
{ 0.10342, 0.89600, 0.71500 },
{ 0.10815, 0.90142, 0.70599 },
{ 0.11374, 0.90673, 0.69651 },
{ 0.12014, 0.91193, 0.68660 },
{ 0.12733, 0.91701, 0.67627 },
{ 0.13526, 0.92197, 0.66556 },
{ 0.14391, 0.92680, 0.65448 },
{ 0.15323, 0.93151, 0.64308 },
{ 0.16319, 0.93609, 0.63137 },
{ 0.17377, 0.94053, 0.61938 },
{ 0.18491, 0.94484, 0.60713 },
{ 0.19659, 0.94901, 0.59466 },
{ 0.20877, 0.95304, 0.58199 },
{ 0.22142, 0.95692, 0.56914 },
{ 0.23449, 0.96065, 0.55614 },
{ 0.24797, 0.96423, 0.54303 },
{ 0.26180, 0.96765, 0.52981 },
{ 0.27597, 0.97092, 0.51653 },
{ 0.29042, 0.97403, 0.50321 },
{ 0.30513, 0.97697, 0.48987 },
{ 0.32006, 0.97974, 0.47654 },
{ 0.33517, 0.98234, 0.46325 },
{ 0.35043, 0.98477, 0.45002 },
{ 0.36581, 0.98702, 0.43688 },
{ 0.38127, 0.98909, 0.42386 },
{ 0.39678, 0.99098, 0.41098 },
{ 0.41229, 0.99268, 0.39826 },
{ 0.42778, 0.99419, 0.38575 },
{ 0.44321, 0.99551, 0.37345 },
{ 0.45854, 0.99663, 0.36140 },
{ 0.47375, 0.99755, 0.34963 },
{ 0.48879, 0.99828, 0.33816 },
{ 0.50362, 0.99879, 0.32701 },
{ 0.51822, 0.99910, 0.31622 },
{ 0.53255, 0.99919, 0.30581 },
{ 0.54658, 0.99907, 0.29581 },
{ 0.56026, 0.99873, 0.28623 },
{ 0.57357, 0.99817, 0.27712 },
{ 0.58646, 0.99739, 0.26849 },
{ 0.59891, 0.99638, 0.26038 },
{ 0.61088, 0.99514, 0.25280 },
{ 0.62233, 0.99366, 0.24579 },
{ 0.63323, 0.99195, 0.23937 },
{ 0.64362, 0.98999, 0.23356 },
{ 0.65394, 0.98775, 0.22835 },
{ 0.66428, 0.98524, 0.22370 },
{ 0.67462, 0.98246, 0.21960 },
{ 0.68494, 0.97941, 0.21602 },
{ 0.69525, 0.97610, 0.21294 },
{ 0.70553, 0.97255, 0.21032 },
{ 0.71577, 0.96875, 0.20815 },
{ 0.72596, 0.96470, 0.20640 },
{ 0.73610, 0.96043, 0.20504 },
{ 0.74617, 0.95593, 0.20406 },
{ 0.75617, 0.95121, 0.20343 },
{ 0.76608, 0.94627, 0.20311 },
{ 0.77591, 0.94113, 0.20310 },
{ 0.78563, 0.93579, 0.20336 },
{ 0.79524, 0.93025, 0.20386 },
{ 0.80473, 0.92452, 0.20459 },
{ 0.81410, 0.91861, 0.20552 },
{ 0.82333, 0.91253, 0.20663 },
{ 0.83241, 0.90627, 0.20788 },
{ 0.84133, 0.89986, 0.20926 },
{ 0.85010, 0.89328, 0.21074 },
{ 0.85868, 0.88655, 0.21230 },
{ 0.86709, 0.87968, 0.21391 },
{ 0.87530, 0.87267, 0.21555 },
{ 0.88331, 0.86553, 0.21719 },
{ 0.89112, 0.85826, 0.21880 },
{ 0.89870, 0.85087, 0.22038 },
{ 0.90605, 0.84337, 0.22188 },
{ 0.91317, 0.83576, 0.22328 },
{ 0.92004, 0.82806, 0.22456 },
{ 0.92666, 0.82025, 0.22570 },
{ 0.93301, 0.81236, 0.22667 },
{ 0.93909, 0.80439, 0.22744 },
{ 0.94489, 0.79634, 0.22800 },
{ 0.95039, 0.78823, 0.22831 },
{ 0.95560, 0.78005, 0.22836 },
{ 0.96049, 0.77181, 0.22811 },
{ 0.96507, 0.76352, 0.22754 },
{ 0.96931, 0.75519, 0.22663 },
{ 0.97323, 0.74682, 0.22536 },
{ 0.97679, 0.73842, 0.22369 },
{ 0.98000, 0.73000, 0.22161 },
{ 0.98289, 0.72140, 0.21918 },
{ 0.98549, 0.71250, 0.21650 },
{ 0.98781, 0.70330, 0.21358 },
{ 0.98986, 0.69382, 0.21043 },
{ 0.99163, 0.68408, 0.20706 },
{ 0.99314, 0.67408, 0.20348 },
{ 0.99438, 0.66386, 0.19971 },
{ 0.99535, 0.65341, 0.19577 },
{ 0.99607, 0.64277, 0.19165 },
{ 0.99654, 0.63193, 0.18738 },
{ 0.99675, 0.62093, 0.18297 },
{ 0.99672, 0.60977, 0.17842 },
{ 0.99644, 0.59846, 0.17376 },
{ 0.99593, 0.58703, 0.16899 },
{ 0.99517, 0.57549, 0.16412 },
{ 0.99419, 0.56386, 0.15918 },
{ 0.99297, 0.55214, 0.15417 },
{ 0.99153, 0.54036, 0.14910 },
{ 0.98987, 0.52854, 0.14398 },
{ 0.98799, 0.51667, 0.13883 },
{ 0.98590, 0.50479, 0.13367 },
{ 0.98360, 0.49291, 0.12849 },
{ 0.98108, 0.48104, 0.12332 },
{ 0.97837, 0.46920, 0.11817 },
{ 0.97545, 0.45740, 0.11305 },
{ 0.97234, 0.44565, 0.10797 },
{ 0.96904, 0.43399, 0.10294 },
{ 0.96555, 0.42241, 0.09798 },
{ 0.96187, 0.41093, 0.09310 },
{ 0.95801, 0.39958, 0.08831 },
{ 0.95398, 0.38836, 0.08362 },
{ 0.94977, 0.37729, 0.07905 },
{ 0.94538, 0.36638, 0.07461 },
{ 0.94084, 0.35566, 0.07031 },
{ 0.93612, 0.34513, 0.06616 },
{ 0.93125, 0.33482, 0.06218 },
{ 0.92623, 0.32473, 0.05837 },
{ 0.92105, 0.31489, 0.05475 },
{ 0.91572, 0.30530, 0.05134 },
{ 0.91024, 0.29599, 0.04814 },
{ 0.90463, 0.28696, 0.04516 },
{ 0.89888, 0.27824, 0.04243 },
{ 0.89298, 0.26981, 0.03993 },
{ 0.88691, 0.26152, 0.03753 },
{ 0.88066, 0.25334, 0.03521 },
{ 0.87422, 0.24526, 0.03297 },
{ 0.86760, 0.23730, 0.03082 },
{ 0.86079, 0.22945, 0.02875 },
{ 0.85380, 0.22170, 0.02677 },
{ 0.84662, 0.21407, 0.02487 },
{ 0.83926, 0.20654, 0.02305 },
{ 0.83172, 0.19912, 0.02131 },
{ 0.82399, 0.19182, 0.01966 },
{ 0.81608, 0.18462, 0.01809 },
{ 0.80799, 0.17753, 0.01660 },
{ 0.79971, 0.17055, 0.01520 },
{ 0.79125, 0.16368, 0.01387 },
{ 0.78260, 0.15693, 0.01264 },
{ 0.77377, 0.15028, 0.01148 },
{ 0.76476, 0.14374, 0.01041 },
{ 0.75556, 0.13731, 0.00942 },
{ 0.74617, 0.13098, 0.00851 },
{ 0.73661, 0.12477, 0.00769 },
{ 0.72686, 0.11867, 0.00695 },
{ 0.71692, 0.11268, 0.00629 },
{ 0.70680, 0.10680, 0.00571 },
{ 0.69650, 0.10102, 0.00522 },
{ 0.68602, 0.09536, 0.00481 },
{ 0.67535, 0.08980, 0.00449 },
{ 0.66449, 0.08436, 0.00424 },
{ 0.65345, 0.07902, 0.00408 },
{ 0.64223, 0.07380, 0.00401 },
{ 0.63082, 0.06868, 0.00401 },
{ 0.61923, 0.06367, 0.00410 },
{ 0.60746, 0.05878, 0.00427 },
{ 0.59550, 0.05399, 0.00453 },
{ 0.58336, 0.04931, 0.00486 },
{ 0.57103, 0.04474, 0.00529 },
{ 0.55852, 0.04028, 0.00579 },
{ 0.54583, 0.03593, 0.00638 },
{ 0.53295, 0.03169, 0.00705 },
{ 0.51989, 0.02756, 0.00780 },
{ 0.50664, 0.02354, 0.00863 },
{ 0.49321, 0.01963, 0.00955 },
{ 0.47960, 0.01583, 0.01055 }
};
return internal::CalcLerp(x, data);
}
inline Color GetHotColor(double x)
{
x = internal::Clamp01(x);
constexpr Color r{ 1.0, 0.0, 0.0 };
constexpr Color g{ 0.0, 1.0, 0.0 };
constexpr Color b{ 0.0, 0.0, 1.0 };
if (x < 0.4)
{
const double t = x / 0.4;
return t * r;
}
else if (x < 0.8)
{
const double t = (x - 0.4) / (0.8 - 0.4);
return r + t * g;
}
else
{
const double t = (x - 0.8) / (1.0 - 0.8);
return r + g + t * b;
}
}
inline constexpr Color GetGrayColor(double x) noexcept
{
return Color{ 1.0 - internal::Clamp01(x) };
}
inline Color GetMagmaColor(double x)
{
constexpr Color data[] =
{
{ 0.001462, 0.000466, 0.013866 },
{ 0.002258, 0.001295, 0.018331 },
{ 0.003279, 0.002305, 0.023708 },
{ 0.004512, 0.003490, 0.029965 },
{ 0.005950, 0.004843, 0.037130 },
{ 0.007588, 0.006356, 0.044973 },
{ 0.009426, 0.008022, 0.052844 },
{ 0.011465, 0.009828, 0.060750 },
{ 0.013708, 0.011771, 0.068667 },
{ 0.016156, 0.013840, 0.076603 },
{ 0.018815, 0.016026, 0.084584 },
{ 0.021692, 0.018320, 0.092610 },
{ 0.024792, 0.020715, 0.100676 },
{ 0.028123, 0.023201, 0.108787 },
{ 0.031696, 0.025765, 0.116965 },
{ 0.035520, 0.028397, 0.125209 },
{ 0.039608, 0.031090, 0.133515 },
{ 0.043830, 0.033830, 0.141886 },
{ 0.048062, 0.036607, 0.150327 },
{ 0.052320, 0.039407, 0.158841 },
{ 0.056615, 0.042160, 0.167446 },
{ 0.060949, 0.044794, 0.176129 },
{ 0.065330, 0.047318, 0.184892 },
{ 0.069764, 0.049726, 0.193735 },
{ 0.074257, 0.052017, 0.202660 },
{ 0.078815, 0.054184, 0.211667 },
{ 0.083446, 0.056225, 0.220755 },
{ 0.088155, 0.058133, 0.229922 },
{ 0.092949, 0.059904, 0.239164 },
{ 0.097833, 0.061531, 0.248477 },
{ 0.102815, 0.063010, 0.257854 },
{ 0.107899, 0.064335, 0.267289 },
{ 0.113094, 0.065492, 0.276784 },
{ 0.118405, 0.066479, 0.286321 },
{ 0.123833, 0.067295, 0.295879 },
{ 0.129380, 0.067935, 0.305443 },
{ 0.135053, 0.068391, 0.315000 },
{ 0.140858, 0.068654, 0.324538 },
{ 0.146785, 0.068738, 0.334011 },
{ 0.152839, 0.068637, 0.343404 },
{ 0.159018, 0.068354, 0.352688 },
{ 0.165308, 0.067911, 0.361816 },
{ 0.171713, 0.067305, 0.370771 },
{ 0.178212, 0.066576, 0.379497 },
{ 0.184801, 0.065732, 0.387973 },
{ 0.191460, 0.064818, 0.396152 },
{ 0.198177, 0.063862, 0.404009 },
{ 0.204935, 0.062907, 0.411514 },
{ 0.211718, 0.061992, 0.418647 },
{ 0.218512, 0.061158, 0.425392 },
{ 0.225302, 0.060445, 0.431742 },
{ 0.232077, 0.059889, 0.437695 },
{ 0.238826, 0.059517, 0.443256 },
{ 0.245543, 0.059352, 0.448436 },
{ 0.252220, 0.059415, 0.453248 },
{ 0.258857, 0.059706, 0.457710 },
{ 0.265447, 0.060237, 0.461840 },
{ 0.271994, 0.060994, 0.465660 },
{ 0.278493, 0.061978, 0.469190 },
{ 0.284951, 0.063168, 0.472451 },
{ 0.291366, 0.064553, 0.475462 },
{ 0.297740, 0.066117, 0.478243 },
{ 0.304081, 0.067835, 0.480812 },
{ 0.310382, 0.069702, 0.483186 },
{ 0.316654, 0.071690, 0.485380 },
{ 0.322899, 0.073782, 0.487408 },
{ 0.329114, 0.075972, 0.489287 },
{ 0.335308, 0.078236, 0.491024 },
{ 0.341482, 0.080564, 0.492631 },
{ 0.347636, 0.082946, 0.494121 },
{ 0.353773, 0.085373, 0.495501 },
{ 0.359898, 0.087831, 0.496778 },
{ 0.366012, 0.090314, 0.497960 },
{ 0.372116, 0.092816, 0.499053 },
{ 0.378211, 0.095332, 0.500067 },
{ 0.384299, 0.097855, 0.501002 },
{ 0.390384, 0.100379, 0.501864 },
{ 0.396467, 0.102902, 0.502658 },
{ 0.402548, 0.105420, 0.503386 },
{ 0.408629, 0.107930, 0.504052 },
{ 0.414709, 0.110431, 0.504662 },
{ 0.420791, 0.112920, 0.505215 },
{ 0.426877, 0.115395, 0.505714 },
{ 0.432967, 0.117855, 0.506160 },
{ 0.439062, 0.120298, 0.506555 },
{ 0.445163, 0.122724, 0.506901 },
{ 0.451271, 0.125132, 0.507198 },
{ 0.457386, 0.127522, 0.507448 },
{ 0.463508, 0.129893, 0.507652 },
{ 0.469640, 0.132245, 0.507809 },
{ 0.475780, 0.134577, 0.507921 },
{ 0.481929, 0.136891, 0.507989 },
{ 0.488088, 0.139186, 0.508011 },
{ 0.494258, 0.141462, 0.507988 },
{ 0.500438, 0.143719, 0.507920 },
{ 0.506629, 0.145958, 0.507806 },
{ 0.512831, 0.148179, 0.507648 },
{ 0.519045, 0.150383, 0.507443 },
{ 0.525270, 0.152569, 0.507192 },
{ 0.531507, 0.154739, 0.506895 },
{ 0.537755, 0.156894, 0.506551 },
{ 0.544015, 0.159033, 0.506159 },
{ 0.550287, 0.161158, 0.505719 },
{ 0.556571, 0.163269, 0.505230 },
{ 0.562866, 0.165368, 0.504692 },
{ 0.569172, 0.167454, 0.504105 },
{ 0.575490, 0.169530, 0.503466 },
{ 0.581819, 0.171596, 0.502777 },
{ 0.588158, 0.173652, 0.502035 },
{ 0.594508, 0.175701, 0.501241 },
{ 0.600868, 0.177743, 0.500394 },
{ 0.607238, 0.179779, 0.499492 },
{ 0.613617, 0.181811, 0.498536 },
{ 0.620005, 0.183840, 0.497524 },
{ 0.626401, 0.185867, 0.496456 },
{ 0.632805, 0.187893, 0.495332 },
{ 0.639216, 0.189921, 0.494150 },
{ 0.645633, 0.191952, 0.492910 },
{ 0.652056, 0.193986, 0.491611 },
{ 0.658483, 0.196027, 0.490253 },
{ 0.664915, 0.198075, 0.488836 },
{ 0.671349, 0.200133, 0.487358 },
{ 0.677786, 0.202203, 0.485819 },
{ 0.684224, 0.204286, 0.484219 },
{ 0.690661, 0.206384, 0.482558 },
{ 0.697098, 0.208501, 0.480835 },
{ 0.703532, 0.210638, 0.479049 },
{ 0.709962, 0.212797, 0.477201 },
{ 0.716387, 0.214982, 0.475290 },
{ 0.722805, 0.217194, 0.473316 },
{ 0.729216, 0.219437, 0.471279 },
{ 0.735616, 0.221713, 0.469180 },
{ 0.742004, 0.224025, 0.467018 },
{ 0.748378, 0.226377, 0.464794 },
{ 0.754737, 0.228772, 0.462509 },
{ 0.761077, 0.231214, 0.460162 },
{ 0.767398, 0.233705, 0.457755 },
{ 0.773695, 0.236249, 0.455289 },
{ 0.779968, 0.238851, 0.452765 },
{ 0.786212, 0.241514, 0.450184 },
{ 0.792427, 0.244242, 0.447543 },
{ 0.798608, 0.247040, 0.444848 },
{ 0.804752, 0.249911, 0.442102 },
{ 0.810855, 0.252861, 0.439305 },
{ 0.816914, 0.255895, 0.436461 },
{ 0.822926, 0.259016, 0.433573 },
{ 0.828886, 0.262229, 0.430644 },
{ 0.834791, 0.265540, 0.427671 },
{ 0.840636, 0.268953, 0.424666 },
{ 0.846416, 0.272473, 0.421631 },
{ 0.852126, 0.276106, 0.418573 },
{ 0.857763, 0.279857, 0.415496 },
{ 0.863320, 0.283729, 0.412403 },
{ 0.868793, 0.287728, 0.409303 },
{ 0.874176, 0.291859, 0.406205 },
{ 0.879464, 0.296125, 0.403118 },
{ 0.884651, 0.300530, 0.400047 },
{ 0.889731, 0.305079, 0.397002 },
{ 0.894700, 0.309773, 0.393995 },
{ 0.899552, 0.314616, 0.391037 },
{ 0.904281, 0.319610, 0.388137 },
{ 0.908884, 0.324755, 0.385308 },
{ 0.913354, 0.330052, 0.382563 },
{ 0.917689, 0.335500, 0.379915 },
{ 0.921884, 0.341098, 0.377376 },
{ 0.925937, 0.346844, 0.374959 },
{ 0.929845, 0.352734, 0.372677 },
{ 0.933606, 0.358764, 0.370541 },
{ 0.937221, 0.364929, 0.368567 },
{ 0.940687, 0.371224, 0.366762 },
{ 0.944006, 0.377643, 0.365136 },
{ 0.947180, 0.384178, 0.363701 },
{ 0.950210, 0.390820, 0.362468 },
{ 0.953099, 0.397563, 0.361438 },
{ 0.955849, 0.404400, 0.360619 },
{ 0.958464, 0.411324, 0.360014 },
{ 0.960949, 0.418323, 0.359630 },
{ 0.963310, 0.425390, 0.359469 },
{ 0.965549, 0.432519, 0.359529 },
{ 0.967671, 0.439703, 0.359810 },
{ 0.969680, 0.446936, 0.360311 },
{ 0.971582, 0.454210, 0.361030 },
{ 0.973381, 0.461520, 0.361965 },
{ 0.975082, 0.468861, 0.363111 },
{ 0.976690, 0.476226, 0.364466 },
{ 0.978210, 0.483612, 0.366025 },
{ 0.979645, 0.491014, 0.367783 },
{ 0.981000, 0.498428, 0.369734 },
{ 0.982279, 0.505851, 0.371874 },
{ 0.983485, 0.513280, 0.374198 },
{ 0.984622, 0.520713, 0.376698 },
{ 0.985693, 0.528148, 0.379371 },
{ 0.986700, 0.535582, 0.382210 },
{ 0.987646, 0.543015, 0.385210 },
{ 0.988533, 0.550446, 0.388365 },
{ 0.989363, 0.557873, 0.391671 },
{ 0.990138, 0.565296, 0.395122 },
{ 0.990871, 0.572706, 0.398714 },
{ 0.991558, 0.580107, 0.402441 },
{ 0.992196, 0.587502, 0.406299 },
{ 0.992785, 0.594891, 0.410283 },
{ 0.993326, 0.602275, 0.414390 },
{ 0.993834, 0.609644, 0.418613 },
{ 0.994309, 0.616999, 0.422950 },
{ 0.994738, 0.624350, 0.427397 },
{ 0.995122, 0.631696, 0.431951 },
{ 0.995480, 0.639027, 0.436607 },
{ 0.995810, 0.646344, 0.441361 },
{ 0.996096, 0.653659, 0.446213 },
{ 0.996341, 0.660969, 0.451160 },
{ 0.996580, 0.668256, 0.456192 },
{ 0.996775, 0.675541, 0.461314 },
{ 0.996925, 0.682828, 0.466526 },
{ 0.997077, 0.690088, 0.471811 },
{ 0.997186, 0.697349, 0.477182 },
{ 0.997254, 0.704611, 0.482635 },
{ 0.997325, 0.711848, 0.488154 },
{ 0.997351, 0.719089, 0.493755 },
{ 0.997351, 0.726324, 0.499428 },
{ 0.997341, 0.733545, 0.505167 },
{ 0.997285, 0.740772, 0.510983 },
{ 0.997228, 0.747981, 0.516859 },
{ 0.997138, 0.755190, 0.522806 },
{ 0.997019, 0.762398, 0.528821 },
{ 0.996898, 0.769591, 0.534892 },
{ 0.996727, 0.776795, 0.541039 },
{ 0.996571, 0.783977, 0.547233 },
{ 0.996369, 0.791167, 0.553499 },
{ 0.996162, 0.798348, 0.559820 },
{ 0.995932, 0.805527, 0.566202 },
{ 0.995680, 0.812706, 0.572645 },
{ 0.995424, 0.819875, 0.579140 },
{ 0.995131, 0.827052, 0.585701 },
{ 0.994851, 0.834213, 0.592307 },
{ 0.994524, 0.841387, 0.598983 },
{ 0.994222, 0.848540, 0.605696 },
{ 0.993866, 0.855711, 0.612482 },
{ 0.993545, 0.862859, 0.619299 },
{ 0.993170, 0.870024, 0.626189 },
{ 0.992831, 0.877168, 0.633109 },
{ 0.992440, 0.884330, 0.640099 },
{ 0.992089, 0.891470, 0.647116 },
{ 0.991688, 0.898627, 0.654202 },
{ 0.991332, 0.905763, 0.661309 },
{ 0.990930, 0.912915, 0.668481 },
{ 0.990570, 0.920049, 0.675675 },
{ 0.990175, 0.927196, 0.682926 },
{ 0.989815, 0.934329, 0.690198 },
{ 0.989434, 0.941470, 0.697519 },
{ 0.989077, 0.948604, 0.704863 },
{ 0.988717, 0.955742, 0.712242 },
{ 0.988367, 0.962878, 0.719649 },
{ 0.988033, 0.970012, 0.727077 },
{ 0.987691, 0.977154, 0.734536 },
{ 0.987387, 0.984288, 0.742002 },
{ 0.987053, 0.991438, 0.749504 }
};
return internal::CalcLerp(x, data);
}
inline Color GetInfernoColor(double x)
{
constexpr Color data[] =
{
{ 0.001462, 0.000466, 0.013866 },
{ 0.002267, 0.001270, 0.018570 },
{ 0.003299, 0.002249, 0.024239 },
{ 0.004547, 0.003392, 0.030909 },
{ 0.006006, 0.004692, 0.038558 },
{ 0.007676, 0.006136, 0.046836 },
{ 0.009561, 0.007713, 0.055143 },
{ 0.011663, 0.009417, 0.063460 },
{ 0.013995, 0.011225, 0.071862 },
{ 0.016561, 0.013136, 0.080282 },
{ 0.019373, 0.015133, 0.088767 },
{ 0.022447, 0.017199, 0.097327 },
{ 0.025793, 0.019331, 0.105930 },
{ 0.029432, 0.021503, 0.114621 },
{ 0.033385, 0.023702, 0.123397 },
{ 0.037668, 0.025921, 0.132232 },
{ 0.042253, 0.028139, 0.141141 },
{ 0.046915, 0.030324, 0.150164 },
{ 0.051644, 0.032474, 0.159254 },
{ 0.056449, 0.034569, 0.168414 },
{ 0.061340, 0.036590, 0.177642 },
{ 0.066331, 0.038504, 0.186962 },
{ 0.071429, 0.040294, 0.196354 },
{ 0.076637, 0.041905, 0.205799 },
{ 0.081962, 0.043328, 0.215289 },
{ 0.087411, 0.044556, 0.224813 },
{ 0.092990, 0.045583, 0.234358 },
{ 0.098702, 0.046402, 0.243904 },
{ 0.104551, 0.047008, 0.253430 },
{ 0.110536, 0.047399, 0.262912 },
{ 0.116656, 0.047574, 0.272321 },
{ 0.122908, 0.047536, 0.281624 },
{ 0.129285, 0.047293, 0.290788 },
{ 0.135778, 0.046856, 0.299776 },
{ 0.142378, 0.046242, 0.308553 },
{ 0.149073, 0.045468, 0.317085 },
{ 0.155850, 0.044559, 0.325338 },
{ 0.162689, 0.043554, 0.333277 },
{ 0.169575, 0.042489, 0.340874 },
{ 0.176493, 0.041402, 0.348111 },
{ 0.183429, 0.040329, 0.354971 },
{ 0.190367, 0.039309, 0.361447 },
{ 0.197297, 0.038400, 0.367535 },
{ 0.204209, 0.037632, 0.373238 },
{ 0.211095, 0.037030, 0.378563 },
{ 0.217949, 0.036615, 0.383522 },
{ 0.224763, 0.036405, 0.388129 },
{ 0.231538, 0.036405, 0.392400 },
{ 0.238273, 0.036621, 0.396353 },
{ 0.244967, 0.037055, 0.400007 },
{ 0.251620, 0.037705, 0.403378 },
{ 0.258234, 0.038571, 0.406485 },
{ 0.264810, 0.039647, 0.409345 },
{ 0.271347, 0.040922, 0.411976 },
{ 0.277850, 0.042353, 0.414392 },
{ 0.284321, 0.043933, 0.416608 },
{ 0.290763, 0.045644, 0.418637 },
{ 0.297178, 0.047470, 0.420491 },
{ 0.303568, 0.049396, 0.422182 },
{ 0.309935, 0.051407, 0.423721 },
{ 0.316282, 0.053490, 0.425116 },
{ 0.322610, 0.055634, 0.426377 },
{ 0.328921, 0.057827, 0.427511 },
{ 0.335217, 0.060060, 0.428524 },
{ 0.341500, 0.062325, 0.429425 },
{ 0.347771, 0.064616, 0.430217 },
{ 0.354032, 0.066925, 0.430906 },
{ 0.360284, 0.069247, 0.431497 },
{ 0.366529, 0.071579, 0.431994 },
{ 0.372768, 0.073915, 0.432400 },
{ 0.379001, 0.076253, 0.432719 },
{ 0.385228, 0.078591, 0.432955 },
{ 0.391453, 0.080927, 0.433109 },
{ 0.397674, 0.083257, 0.433183 },
{ 0.403894, 0.085580, 0.433179 },
{ 0.410113, 0.087896, 0.433098 },
{ 0.416331, 0.090203, 0.432943 },
{ 0.422549, 0.092501, 0.432714 },
{ 0.428768, 0.094790, 0.432412 },
{ 0.434987, 0.097069, 0.432039 },
{ 0.441207, 0.099338, 0.431594 },
{ 0.447428, 0.101597, 0.431080 },
{ 0.453651, 0.103848, 0.430498 },
{ 0.459875, 0.106089, 0.429846 },
{ 0.466100, 0.108322, 0.429125 },
{ 0.472328, 0.110547, 0.428334 },
{ 0.478558, 0.112764, 0.427475 },
{ 0.484789, 0.114974, 0.426548 },
{ 0.491022, 0.117179, 0.425552 },
{ 0.497257, 0.119379, 0.424488 },
{ 0.503493, 0.121575, 0.423356 },
{ 0.509730, 0.123769, 0.422156 },
{ 0.515967, 0.125960, 0.420887 },
{ 0.522206, 0.128150, 0.419549 },
{ 0.528444, 0.130341, 0.418142 },
{ 0.534683, 0.132534, 0.416667 },
{ 0.540920, 0.134729, 0.415123 },
{ 0.547157, 0.136929, 0.413511 },
{ 0.553392, 0.139134, 0.411829 },
{ 0.559624, 0.141346, 0.410078 },
{ 0.565854, 0.143567, 0.408258 },
{ 0.572081, 0.145797, 0.406369 },
{ 0.578304, 0.148039, 0.404411 },
{ 0.584521, 0.150294, 0.402385 },
{ 0.590734, 0.152563, 0.400290 },
{ 0.596940, 0.154848, 0.398125 },
{ 0.603139, 0.157151, 0.395891 },
{ 0.609330, 0.159474, 0.393589 },
{ 0.615513, 0.161817, 0.391219 },
{ 0.621685, 0.164184, 0.388781 },
{ 0.627847, 0.166575, 0.386276 },
{ 0.633998, 0.168992, 0.383704 },
{ 0.640135, 0.171438, 0.381065 },
{ 0.646260, 0.173914, 0.378359 },
{ 0.652369, 0.176421, 0.375586 },
{ 0.658463, 0.178962, 0.372748 },
{ 0.664540, 0.181539, 0.369846 },
{ 0.670599, 0.184153, 0.366879 },
{ 0.676638, 0.186807, 0.363849 },
{ 0.682656, 0.189501, 0.360757 },
{ 0.688653, 0.192239, 0.357603 },
{ 0.694627, 0.195021, 0.354388 },
{ 0.700576, 0.197851, 0.351113 },
{ 0.706500, 0.200728, 0.347777 },
{ 0.712396, 0.203656, 0.344383 },
{ 0.718264, 0.206636, 0.340931 },
{ 0.724103, 0.209670, 0.337424 },
{ 0.729909, 0.212759, 0.333861 },
{ 0.735683, 0.215906, 0.330245 },
{ 0.741423, 0.219112, 0.326576 },
{ 0.747127, 0.222378, 0.322856 },
{ 0.752794, 0.225706, 0.319085 },
{ 0.758422, 0.229097, 0.315266 },
{ 0.764010, 0.232554, 0.311399 },
{ 0.769556, 0.236077, 0.307485 },
{ 0.775059, 0.239667, 0.303526 },
{ 0.780517, 0.243327, 0.299523 },
{ 0.785929, 0.247056, 0.295477 },
{ 0.791293, 0.250856, 0.291390 },
{ 0.796607, 0.254728, 0.287264 },
{ 0.801871, 0.258674, 0.283099 },
{ 0.807082, 0.262692, 0.278898 },
{ 0.812239, 0.266786, 0.274661 },
{ 0.817341, 0.270954, 0.270390 },
{ 0.822386, 0.275197, 0.266085 },
{ 0.827372, 0.279517, 0.261750 },
{ 0.832299, 0.283913, 0.257383 },
{ 0.837165, 0.288385, 0.252988 },
{ 0.841969, 0.292933, 0.248564 },
{ 0.846709, 0.297559, 0.244113 },
{ 0.851384, 0.302260, 0.239636 },
{ 0.855992, 0.307038, 0.235133 },
{ 0.860533, 0.311892, 0.230606 },
{ 0.865006, 0.316822, 0.226055 },
{ 0.869409, 0.321827, 0.221482 },
{ 0.873741, 0.326906, 0.216886 },
{ 0.878001, 0.332060, 0.212268 },
{ 0.882188, 0.337287, 0.207628 },
{ 0.886302, 0.342586, 0.202968 },
{ 0.890341, 0.347957, 0.198286 },
{ 0.894305, 0.353399, 0.193584 },
{ 0.898192, 0.358911, 0.188860 },
{ 0.902003, 0.364492, 0.184116 },
{ 0.905735, 0.370140, 0.179350 },
{ 0.909390, 0.375856, 0.174563 },
{ 0.912966, 0.381636, 0.169755 },
{ 0.916462, 0.387481, 0.164924 },
{ 0.919879, 0.393389, 0.160070 },
{ 0.923215, 0.399359, 0.155193 },
{ 0.926470, 0.405389, 0.150292 },
{ 0.929644, 0.411479, 0.145367 },
{ 0.932737, 0.417627, 0.140417 },
{ 0.935747, 0.423831, 0.135440 },
{ 0.938675, 0.430091, 0.130438 },
{ 0.941521, 0.436405, 0.125409 },
{ 0.944285, 0.442772, 0.120354 },
{ 0.946965, 0.449191, 0.115272 },
{ 0.949562, 0.455660, 0.110164 },
{ 0.952075, 0.462178, 0.105031 },
{ 0.954506, 0.468744, 0.099874 },
{ 0.956852, 0.475356, 0.094695 },
{ 0.959114, 0.482014, 0.089499 },
{ 0.961293, 0.488716, 0.084289 },
{ 0.963387, 0.495462, 0.079073 },
{ 0.965397, 0.502249, 0.073859 },
{ 0.967322, 0.509078, 0.068659 },
{ 0.969163, 0.515946, 0.063488 },
{ 0.970919, 0.522853, 0.058367 },
{ 0.972590, 0.529798, 0.053324 },
{ 0.974176, 0.536780, 0.048392 },
{ 0.975677, 0.543798, 0.043618 },
{ 0.977092, 0.550850, 0.039050 },
{ 0.978422, 0.557937, 0.034931 },
{ 0.979666, 0.565057, 0.031409 },
{ 0.980824, 0.572209, 0.028508 },
{ 0.981895, 0.579392, 0.026250 },
{ 0.982881, 0.586606, 0.024661 },
{ 0.983779, 0.593849, 0.023770 },
{ 0.984591, 0.601122, 0.023606 },
{ 0.985315, 0.608422, 0.024202 },
{ 0.985952, 0.615750, 0.025592 },
{ 0.986502, 0.623105, 0.027814 },
{ 0.986964, 0.630485, 0.030908 },
{ 0.987337, 0.637890, 0.034916 },
{ 0.987622, 0.645320, 0.039886 },
{ 0.987819, 0.652773, 0.045581 },
{ 0.987926, 0.660250, 0.051750 },
{ 0.987945, 0.667748, 0.058329 },
{ 0.987874, 0.675267, 0.065257 },
{ 0.987714, 0.682807, 0.072489 },
{ 0.987464, 0.690366, 0.079990 },
{ 0.987124, 0.697944, 0.087731 },
{ 0.986694, 0.705540, 0.095694 },
{ 0.986175, 0.713153, 0.103863 },
{ 0.985566, 0.720782, 0.112229 },
{ 0.984865, 0.728427, 0.120785 },
{ 0.984075, 0.736087, 0.129527 },
{ 0.983196, 0.743758, 0.138453 },
{ 0.982228, 0.751442, 0.147565 },
{ 0.981173, 0.759135, 0.156863 },
{ 0.980032, 0.766837, 0.166353 },
{ 0.978806, 0.774545, 0.176037 },
{ 0.977497, 0.782258, 0.185923 },
{ 0.976108, 0.789974, 0.196018 },
{ 0.974638, 0.797692, 0.206332 },
{ 0.973088, 0.805409, 0.216877 },
{ 0.971468, 0.813122, 0.227658 },
{ 0.969783, 0.820825, 0.238686 },
{ 0.968041, 0.828515, 0.249972 },
{ 0.966243, 0.836191, 0.261534 },
{ 0.964394, 0.843848, 0.273391 },
{ 0.962517, 0.851476, 0.285546 },
{ 0.960626, 0.859069, 0.298010 },
{ 0.958720, 0.866624, 0.310820 },
{ 0.956834, 0.874129, 0.323974 },
{ 0.954997, 0.881569, 0.337475 },
{ 0.953215, 0.888942, 0.351369 },
{ 0.951546, 0.896226, 0.365627 },
{ 0.950018, 0.903409, 0.380271 },
{ 0.948683, 0.910473, 0.395289 },
{ 0.947594, 0.917399, 0.410665 },
{ 0.946809, 0.924168, 0.426373 },
{ 0.946392, 0.930761, 0.442367 },
{ 0.946403, 0.937159, 0.458592 },
{ 0.946903, 0.943348, 0.474970 },
{ 0.947937, 0.949318, 0.491426 },
{ 0.949545, 0.955063, 0.507860 },
{ 0.951740, 0.960587, 0.524203 },
{ 0.954529, 0.965896, 0.540361 },
{ 0.957896, 0.971003, 0.556275 },
{ 0.961812, 0.975924, 0.571925 },
{ 0.966249, 0.980678, 0.587206 },
{ 0.971162, 0.985282, 0.602154 },
{ 0.976511, 0.989753, 0.616760 },
{ 0.982257, 0.994109, 0.631017 },
{ 0.988362, 0.998364, 0.644924 }
};
return internal::CalcLerp(x, data);
}
inline Color GetPlasmaColor(double x)
{
constexpr Color data[] =
{
{ 0.050383, 0.029803, 0.527975 },
{ 0.063536, 0.028426, 0.533124 },
{ 0.075353, 0.027206, 0.538007 },
{ 0.086222, 0.026125, 0.542658 },
{ 0.096379, 0.025165, 0.547103 },
{ 0.105980, 0.024309, 0.551368 },
{ 0.115124, 0.023556, 0.555468 },
{ 0.123903, 0.022878, 0.559423 },
{ 0.132381, 0.022258, 0.563250 },
{ 0.140603, 0.021687, 0.566959 },
{ 0.148607, 0.021154, 0.570562 },
{ 0.156421, 0.020651, 0.574065 },
{ 0.164070, 0.020171, 0.577478 },
{ 0.171574, 0.019706, 0.580806 },
{ 0.178950, 0.019252, 0.584054 },
{ 0.186213, 0.018803, 0.587228 },
{ 0.193374, 0.018354, 0.590330 },
{ 0.200445, 0.017902, 0.593364 },
{ 0.207435, 0.017442, 0.596333 },
{ 0.214350, 0.016973, 0.599239 },
{ 0.221197, 0.016497, 0.602083 },
{ 0.227983, 0.016007, 0.604867 },
{ 0.234715, 0.015502, 0.607592 },
{ 0.241396, 0.014979, 0.610259 },
{ 0.248032, 0.014439, 0.612868 },
{ 0.254627, 0.013882, 0.615419 },
{ 0.261183, 0.013308, 0.617911 },
{ 0.267703, 0.012716, 0.620346 },
{ 0.274191, 0.012109, 0.622722 },
{ 0.280648, 0.011488, 0.625038 },
{ 0.287076, 0.010855, 0.627295 },
{ 0.293478, 0.010213, 0.629490 },
{ 0.299855, 0.009561, 0.631624 },
{ 0.306210, 0.008902, 0.633694 },
{ 0.312543, 0.008239, 0.635700 },
{ 0.318856, 0.007576, 0.637640 },
{ 0.325150, 0.006915, 0.639512 },
{ 0.331426, 0.006261, 0.641316 },
{ 0.337683, 0.005618, 0.643049 },
{ 0.343925, 0.004991, 0.644710 },
{ 0.350150, 0.004382, 0.646298 },
{ 0.356359, 0.003798, 0.647810 },
{ 0.362553, 0.003243, 0.649245 },
{ 0.368733, 0.002724, 0.650601 },
{ 0.374897, 0.002245, 0.651876 },
{ 0.381047, 0.001814, 0.653068 },
{ 0.387183, 0.001434, 0.654177 },
{ 0.393304, 0.001114, 0.655199 },
{ 0.399411, 0.000859, 0.656133 },
{ 0.405503, 0.000678, 0.656977 },
{ 0.411580, 0.000577, 0.657730 },
{ 0.417642, 0.000564, 0.658390 },
{ 0.423689, 0.000646, 0.658956 },
{ 0.429719, 0.000831, 0.659425 },
{ 0.435734, 0.001127, 0.659797 },
{ 0.441732, 0.001540, 0.660069 },
{ 0.447714, 0.002080, 0.660240 },
{ 0.453677, 0.002755, 0.660310 },
{ 0.459623, 0.003574, 0.660277 },
{ 0.465550, 0.004545, 0.660139 },
{ 0.471457, 0.005678, 0.659897 },
{ 0.477344, 0.006980, 0.659549 },
{ 0.483210, 0.008460, 0.659095 },
{ 0.489055, 0.010127, 0.658534 },
{ 0.494877, 0.011990, 0.657865 },
{ 0.500678, 0.014055, 0.657088 },
{ 0.506454, 0.016333, 0.656202 },
{ 0.512206, 0.018833, 0.655209 },
{ 0.517933, 0.021563, 0.654109 },
{ 0.523633, 0.024532, 0.652901 },
{ 0.529306, 0.027747, 0.651586 },
{ 0.534952, 0.031217, 0.650165 },
{ 0.540570, 0.034950, 0.648640 },
{ 0.546157, 0.038954, 0.647010 },
{ 0.551715, 0.043136, 0.645277 },
{ 0.557243, 0.047331, 0.643443 },
{ 0.562738, 0.051545, 0.641509 },
{ 0.568201, 0.055778, 0.639477 },
{ 0.573632, 0.060028, 0.637349 },
{ 0.579029, 0.064296, 0.635126 },
{ 0.584391, 0.068579, 0.632812 },
{ 0.589719, 0.072878, 0.630408 },
{ 0.595011, 0.077190, 0.627917 },
{ 0.600266, 0.081516, 0.625342 },
{ 0.605485, 0.085854, 0.622686 },
{ 0.610667, 0.090204, 0.619951 },
{ 0.615812, 0.094564, 0.617140 },
{ 0.620919, 0.098934, 0.614257 },
{ 0.625987, 0.103312, 0.611305 },
{ 0.631017, 0.107699, 0.608287 },
{ 0.636008, 0.112092, 0.605205 },
{ 0.640959, 0.116492, 0.602065 },
{ 0.645872, 0.120898, 0.598867 },
{ 0.650746, 0.125309, 0.595617 },
{ 0.655580, 0.129725, 0.592317 },
{ 0.660374, 0.134144, 0.588971 },
{ 0.665129, 0.138566, 0.585582 },
{ 0.669845, 0.142992, 0.582154 },
{ 0.674522, 0.147419, 0.578688 },
{ 0.679160, 0.151848, 0.575189 },
{ 0.683758, 0.156278, 0.571660 },
{ 0.688318, 0.160709, 0.568103 },
{ 0.692840, 0.165141, 0.564522 },
{ 0.697324, 0.169573, 0.560919 },
{ 0.701769, 0.174005, 0.557296 },
{ 0.706178, 0.178437, 0.553657 },
{ 0.710549, 0.182868, 0.550004 },
{ 0.714883, 0.187299, 0.546338 },
{ 0.719181, 0.191729, 0.542663 },
{ 0.723444, 0.196158, 0.538981 },
{ 0.727670, 0.200586, 0.535293 },
{ 0.731862, 0.205013, 0.531601 },
{ 0.736019, 0.209439, 0.527908 },
{ 0.740143, 0.213864, 0.524216 },
{ 0.744232, 0.218288, 0.520524 },
{ 0.748289, 0.222711, 0.516834 },
{ 0.752312, 0.227133, 0.513149 },
{ 0.756304, 0.231555, 0.509468 },
{ 0.760264, 0.235976, 0.505794 },
{ 0.764193, 0.240396, 0.502126 },
{ 0.768090, 0.244817, 0.498465 },
{ 0.771958, 0.249237, 0.494813 },
{ 0.775796, 0.253658, 0.491171 },
{ 0.779604, 0.258078, 0.487539 },
{ 0.783383, 0.262500, 0.483918 },
{ 0.787133, 0.266922, 0.480307 },
{ 0.790855, 0.271345, 0.476706 },
{ 0.794549, 0.275770, 0.473117 },
{ 0.798216, 0.280197, 0.469538 },
{ 0.801855, 0.284626, 0.465971 },
{ 0.805467, 0.289057, 0.462415 },
{ 0.809052, 0.293491, 0.458870 },
{ 0.812612, 0.297928, 0.455338 },
{ 0.816144, 0.302368, 0.451816 },
{ 0.819651, 0.306812, 0.448306 },
{ 0.823132, 0.311261, 0.444806 },
{ 0.826588, 0.315714, 0.441316 },
{ 0.830018, 0.320172, 0.437836 },
{ 0.833422, 0.324635, 0.434366 },
{ 0.836801, 0.329105, 0.430905 },
{ 0.840155, 0.333580, 0.427455 },
{ 0.843484, 0.338062, 0.424013 },
{ 0.846788, 0.342551, 0.420579 },
{ 0.850066, 0.347048, 0.417153 },
{ 0.853319, 0.351553, 0.413734 },
{ 0.856547, 0.356066, 0.410322 },
{ 0.859750, 0.360588, 0.406917 },
{ 0.862927, 0.365119, 0.403519 },
{ 0.866078, 0.369660, 0.400126 },
{ 0.869203, 0.374212, 0.396738 },
{ 0.872303, 0.378774, 0.393355 },
{ 0.875376, 0.383347, 0.389976 },
{ 0.878423, 0.387932, 0.386600 },
{ 0.881443, 0.392529, 0.383229 },
{ 0.884436, 0.397139, 0.379860 },
{ 0.887402, 0.401762, 0.376494 },
{ 0.890340, 0.406398, 0.373130 },
{ 0.893250, 0.411048, 0.369768 },
{ 0.896131, 0.415712, 0.366407 },
{ 0.898984, 0.420392, 0.363047 },
{ 0.901807, 0.425087, 0.359688 },
{ 0.904601, 0.429797, 0.356329 },
{ 0.907365, 0.434524, 0.352970 },
{ 0.910098, 0.439268, 0.349610 },
{ 0.912800, 0.444029, 0.346251 },
{ 0.915471, 0.448807, 0.342890 },
{ 0.918109, 0.453603, 0.339529 },
{ 0.920714, 0.458417, 0.336166 },
{ 0.923287, 0.463251, 0.332801 },
{ 0.925825, 0.468103, 0.329435 },
{ 0.928329, 0.472975, 0.326067 },
{ 0.930798, 0.477867, 0.322697 },
{ 0.933232, 0.482780, 0.319325 },
{ 0.935630, 0.487712, 0.315952 },
{ 0.937990, 0.492667, 0.312575 },
{ 0.940313, 0.497642, 0.309197 },
{ 0.942598, 0.502639, 0.305816 },
{ 0.944844, 0.507658, 0.302433 },
{ 0.947051, 0.512699, 0.299049 },
{ 0.949217, 0.517763, 0.295662 },
{ 0.951344, 0.522850, 0.292275 },
{ 0.953428, 0.527960, 0.288883 },
{ 0.955470, 0.533093, 0.285490 },
{ 0.957469, 0.538250, 0.282096 },
{ 0.959424, 0.543431, 0.278701 },
{ 0.961336, 0.548636, 0.275305 },
{ 0.963203, 0.553865, 0.271909 },
{ 0.965024, 0.559118, 0.268513 },
{ 0.966798, 0.564396, 0.265118 },
{ 0.968526, 0.569700, 0.261721 },
{ 0.970205, 0.575028, 0.258325 },
{ 0.971835, 0.580382, 0.254931 },
{ 0.973416, 0.585761, 0.251540 },
{ 0.974947, 0.591165, 0.248151 },
{ 0.976428, 0.596595, 0.244767 },
{ 0.977856, 0.602051, 0.241387 },
{ 0.979233, 0.607532, 0.238013 },
{ 0.980556, 0.613039, 0.234646 },
{ 0.981826, 0.618572, 0.231287 },
{ 0.983041, 0.624131, 0.227937 },
{ 0.984199, 0.629718, 0.224595 },
{ 0.985301, 0.635330, 0.221265 },
{ 0.986345, 0.640969, 0.217948 },
{ 0.987332, 0.646633, 0.214648 },
{ 0.988260, 0.652325, 0.211364 },
{ 0.989128, 0.658043, 0.208100 },
{ 0.989935, 0.663787, 0.204859 },
{ 0.990681, 0.669558, 0.201642 },
{ 0.991365, 0.675355, 0.198453 },
{ 0.991985, 0.681179, 0.195295 },
{ 0.992541, 0.687030, 0.192170 },
{ 0.993032, 0.692907, 0.189084 },
{ 0.993456, 0.698810, 0.186041 },
{ 0.993814, 0.704741, 0.183043 },
{ 0.994103, 0.710698, 0.180097 },
{ 0.994324, 0.716681, 0.177208 },
{ 0.994474, 0.722691, 0.174381 },
{ 0.994553, 0.728728, 0.171622 },
{ 0.994561, 0.734791, 0.168938 },
{ 0.994495, 0.740880, 0.166335 },
{ 0.994355, 0.746995, 0.163821 },
{ 0.994141, 0.753137, 0.161404 },
{ 0.993851, 0.759304, 0.159092 },
{ 0.993482, 0.765499, 0.156891 },
{ 0.993033, 0.771720, 0.154808 },
{ 0.992505, 0.777967, 0.152855 },
{ 0.991897, 0.784239, 0.151042 },
{ 0.991209, 0.790537, 0.149377 },
{ 0.990439, 0.796859, 0.147870 },
{ 0.989587, 0.803205, 0.146529 },
{ 0.988648, 0.809579, 0.145357 },
{ 0.987621, 0.815978, 0.144363 },
{ 0.986509, 0.822401, 0.143557 },
{ 0.985314, 0.828846, 0.142945 },
{ 0.984031, 0.835315, 0.142528 },
{ 0.982653, 0.841812, 0.142303 },
{ 0.981190, 0.848329, 0.142279 },
{ 0.979644, 0.854866, 0.142453 },
{ 0.977995, 0.861432, 0.142808 },
{ 0.976265, 0.868016, 0.143351 },
{ 0.974443, 0.874622, 0.144061 },
{ 0.972530, 0.881250, 0.144923 },
{ 0.970533, 0.887896, 0.145919 },
{ 0.968443, 0.894564, 0.147014 },
{ 0.966271, 0.901249, 0.148180 },
{ 0.964021, 0.907950, 0.149370 },
{ 0.961681, 0.914672, 0.150520 },
{ 0.959276, 0.921407, 0.151566 },
{ 0.956808, 0.928152, 0.152409 },
{ 0.954287, 0.934908, 0.152921 },
{ 0.951726, 0.941671, 0.152925 },
{ 0.949151, 0.948435, 0.152178 },
{ 0.946602, 0.955190, 0.150328 },
{ 0.944152, 0.961916, 0.146861 },
{ 0.941896, 0.968590, 0.140956 },
{ 0.940015, 0.975158, 0.131326 }
};
return internal::CalcLerp(x, data);
}
inline Color GetViridisColor(double x)
{
constexpr Color data[] =
{
{ 0.267004, 0.004874, 0.329415 },
{ 0.268510, 0.009605, 0.335427 },
{ 0.269944, 0.014625, 0.341379 },
{ 0.271305, 0.019942, 0.347269 },
{ 0.272594, 0.025563, 0.353093 },
{ 0.273809, 0.031497, 0.358853 },
{ 0.274952, 0.037752, 0.364543 },
{ 0.276022, 0.044167, 0.370164 },
{ 0.277018, 0.050344, 0.375715 },
{ 0.277941, 0.056324, 0.381191 },
{ 0.278791, 0.062145, 0.386592 },
{ 0.279566, 0.067836, 0.391917 },
{ 0.280267, 0.073417, 0.397163 },
{ 0.280894, 0.078907, 0.402329 },
{ 0.281446, 0.084320, 0.407414 },
{ 0.281924, 0.089666, 0.412415 },
{ 0.282327, 0.094955, 0.417331 },
{ 0.282656, 0.100196, 0.422160 },
{ 0.282910, 0.105393, 0.426902 },
{ 0.283091, 0.110553, 0.431554 },
{ 0.283197, 0.115680, 0.436115 },
{ 0.283229, 0.120777, 0.440584 },
{ 0.283187, 0.125848, 0.444960 },
{ 0.283072, 0.130895, 0.449241 },
{ 0.282884, 0.135920, 0.453427 },
{ 0.282623, 0.140926, 0.457517 },
{ 0.282290, 0.145912, 0.461510 },
{ 0.281887, 0.150881, 0.465405 },
{ 0.281412, 0.155834, 0.469201 },
{ 0.280868, 0.160771, 0.472899 },
{ 0.280255, 0.165693, 0.476498 },
{ 0.279574, 0.170599, 0.479997 },
{ 0.278826, 0.175490, 0.483397 },
{ 0.278012, 0.180367, 0.486697 },
{ 0.277134, 0.185228, 0.489898 },
{ 0.276194, 0.190074, 0.493001 },
{ 0.275191, 0.194905, 0.496005 },
{ 0.274128, 0.199721, 0.498911 },
{ 0.273006, 0.204520, 0.501721 },
{ 0.271828, 0.209303, 0.504434 },
{ 0.270595, 0.214069, 0.507052 },
{ 0.269308, 0.218818, 0.509577 },
{ 0.267968, 0.223549, 0.512008 },
{ 0.266580, 0.228262, 0.514349 },
{ 0.265145, 0.232956, 0.516599 },
{ 0.263663, 0.237631, 0.518762 },
{ 0.262138, 0.242286, 0.520837 },
{ 0.260571, 0.246922, 0.522828 },
{ 0.258965, 0.251537, 0.524736 },
{ 0.257322, 0.256130, 0.526563 },
{ 0.255645, 0.260703, 0.528312 },
{ 0.253935, 0.265254, 0.529983 },
{ 0.252194, 0.269783, 0.531579 },
{ 0.250425, 0.274290, 0.533103 },
{ 0.248629, 0.278775, 0.534556 },
{ 0.246811, 0.283237, 0.535941 },
{ 0.244972, 0.287675, 0.537260 },
{ 0.243113, 0.292092, 0.538516 },
{ 0.241237, 0.296485, 0.539709 },
{ 0.239346, 0.300855, 0.540844 },
{ 0.237441, 0.305202, 0.541921 },
{ 0.235526, 0.309527, 0.542944 },
{ 0.233603, 0.313828, 0.543914 },
{ 0.231674, 0.318106, 0.544834 },
{ 0.229739, 0.322361, 0.545706 },
{ 0.227802, 0.326594, 0.546532 },
{ 0.225863, 0.330805, 0.547314 },
{ 0.223925, 0.334994, 0.548053 },
{ 0.221989, 0.339161, 0.548752 },
{ 0.220057, 0.343307, 0.549413 },
{ 0.218130, 0.347432, 0.550038 },
{ 0.216210, 0.351535, 0.550627 },
{ 0.214298, 0.355619, 0.551184 },
{ 0.212395, 0.359683, 0.551710 },
{ 0.210503, 0.363727, 0.552206 },
{ 0.208623, 0.367752, 0.552675 },
{ 0.206756, 0.371758, 0.553117 },
{ 0.204903, 0.375746, 0.553533 },
{ 0.203063, 0.379716, 0.553925 },
{ 0.201239, 0.383670, 0.554294 },
{ 0.199430, 0.387607, 0.554642 },
{ 0.197636, 0.391528, 0.554969 },
{ 0.195860, 0.395433, 0.555276 },
{ 0.194100, 0.399323, 0.555565 },
{ 0.192357, 0.403199, 0.555836 },
{ 0.190631, 0.407061, 0.556089 },
{ 0.188923, 0.410910, 0.556326 },
{ 0.187231, 0.414746, 0.556547 },
{ 0.185556, 0.418570, 0.556753 },
{ 0.183898, 0.422383, 0.556944 },
{ 0.182256, 0.426184, 0.557120 },
{ 0.180629, 0.429975, 0.557282 },
{ 0.179019, 0.433756, 0.557430 },
{ 0.177423, 0.437527, 0.557565 },
{ 0.175841, 0.441290, 0.557685 },
{ 0.174274, 0.445044, 0.557792 },
{ 0.172719, 0.448791, 0.557885 },
{ 0.171176, 0.452530, 0.557965 },
{ 0.169646, 0.456262, 0.558030 },
{ 0.168126, 0.459988, 0.558082 },
{ 0.166617, 0.463708, 0.558119 },
{ 0.165117, 0.467423, 0.558141 },
{ 0.163625, 0.471133, 0.558148 },
{ 0.162142, 0.474838, 0.558140 },
{ 0.160665, 0.478540, 0.558115 },
{ 0.159194, 0.482237, 0.558073 },
{ 0.157729, 0.485932, 0.558013 },
{ 0.156270, 0.489624, 0.557936 },
{ 0.154815, 0.493313, 0.557840 },
{ 0.153364, 0.497000, 0.557724 },
{ 0.151918, 0.500685, 0.557587 },
{ 0.150476, 0.504369, 0.557430 },
{ 0.149039, 0.508051, 0.557250 },
{ 0.147607, 0.511733, 0.557049 },
{ 0.146180, 0.515413, 0.556823 },
{ 0.144759, 0.519093, 0.556572 },
{ 0.143343, 0.522773, 0.556295 },
{ 0.141935, 0.526453, 0.555991 },
{ 0.140536, 0.530132, 0.555659 },
{ 0.139147, 0.533812, 0.555298 },
{ 0.137770, 0.537492, 0.554906 },
{ 0.136408, 0.541173, 0.554483 },
{ 0.135066, 0.544853, 0.554029 },
{ 0.133743, 0.548535, 0.553541 },
{ 0.132444, 0.552216, 0.553018 },
{ 0.131172, 0.555899, 0.552459 },
{ 0.129933, 0.559582, 0.551864 },
{ 0.128729, 0.563265, 0.551229 },
{ 0.127568, 0.566949, 0.550556 },
{ 0.126453, 0.570633, 0.549841 },
{ 0.125394, 0.574318, 0.549086 },
{ 0.124395, 0.578002, 0.548287 },
{ 0.123463, 0.581687, 0.547445 },
{ 0.122606, 0.585371, 0.546557 },
{ 0.121831, 0.589055, 0.545623 },
{ 0.121148, 0.592739, 0.544641 },
{ 0.120565, 0.596422, 0.543611 },
{ 0.120092, 0.600104, 0.542530 },
{ 0.119738, 0.603785, 0.541400 },
{ 0.119512, 0.607464, 0.540218 },
{ 0.119423, 0.611141, 0.538982 },
{ 0.119483, 0.614817, 0.537692 },
{ 0.119699, 0.618490, 0.536347 },
{ 0.120081, 0.622161, 0.534946 },
{ 0.120638, 0.625828, 0.533488 },
{ 0.121380, 0.629492, 0.531973 },
{ 0.122312, 0.633153, 0.530398 },
{ 0.123444, 0.636809, 0.528763 },
{ 0.124780, 0.640461, 0.527068 },
{ 0.126326, 0.644107, 0.525311 },
{ 0.128087, 0.647749, 0.523491 },
{ 0.130067, 0.651384, 0.521608 },
{ 0.132268, 0.655014, 0.519661 },
{ 0.134692, 0.658636, 0.517649 },
{ 0.137339, 0.662252, 0.515571 },
{ 0.140210, 0.665859, 0.513427 },
{ 0.143303, 0.669459, 0.511215 },
{ 0.146616, 0.673050, 0.508936 },
{ 0.150148, 0.676631, 0.506589 },
{ 0.153894, 0.680203, 0.504172 },
{ 0.157851, 0.683765, 0.501686 },
{ 0.162016, 0.687316, 0.499129 },
{ 0.166383, 0.690856, 0.496502 },
{ 0.170948, 0.694384, 0.493803 },
{ 0.175707, 0.697900, 0.491033 },
{ 0.180653, 0.701402, 0.488189 },
{ 0.185783, 0.704891, 0.485273 },
{ 0.191090, 0.708366, 0.482284 },
{ 0.196571, 0.711827, 0.479221 },
{ 0.202219, 0.715272, 0.476084 },
{ 0.208030, 0.718701, 0.472873 },
{ 0.214000, 0.722114, 0.469588 },
{ 0.220124, 0.725509, 0.466226 },
{ 0.226397, 0.728888, 0.462789 },
{ 0.232815, 0.732247, 0.459277 },
{ 0.239374, 0.735588, 0.455688 },
{ 0.246070, 0.738910, 0.452024 },
{ 0.252899, 0.742211, 0.448284 },
{ 0.259857, 0.745492, 0.444467 },
{ 0.266941, 0.748751, 0.440573 },
{ 0.274149, 0.751988, 0.436601 },
{ 0.281477, 0.755203, 0.432552 },
{ 0.288921, 0.758394, 0.428426 },
{ 0.296479, 0.761561, 0.424223 },
{ 0.304148, 0.764704, 0.419943 },
{ 0.311925, 0.767822, 0.415586 },
{ 0.319809, 0.770914, 0.411152 },
{ 0.327796, 0.773980, 0.406640 },
{ 0.335885, 0.777018, 0.402049 },
{ 0.344074, 0.780029, 0.397381 },
{ 0.352360, 0.783011, 0.392636 },
{ 0.360741, 0.785964, 0.387814 },
{ 0.369214, 0.788888, 0.382914 },
{ 0.377779, 0.791781, 0.377939 },
{ 0.386433, 0.794644, 0.372886 },
{ 0.395174, 0.797475, 0.367757 },
{ 0.404001, 0.800275, 0.362552 },
{ 0.412913, 0.803041, 0.357269 },
{ 0.421908, 0.805774, 0.351910 },
{ 0.430983, 0.808473, 0.346476 },
{ 0.440137, 0.811138, 0.340967 },
{ 0.449368, 0.813768, 0.335384 },
{ 0.458674, 0.816363, 0.329727 },
{ 0.468053, 0.818921, 0.323998 },
{ 0.477504, 0.821444, 0.318195 },
{ 0.487026, 0.823929, 0.312321 },
{ 0.496615, 0.826376, 0.306377 },
{ 0.506271, 0.828786, 0.300362 },
{ 0.515992, 0.831158, 0.294279 },
{ 0.525776, 0.833491, 0.288127 },
{ 0.535621, 0.835785, 0.281908 },
{ 0.545524, 0.838039, 0.275626 },
{ 0.555484, 0.840254, 0.269281 },
{ 0.565498, 0.842430, 0.262877 },
{ 0.575563, 0.844566, 0.256415 },
{ 0.585678, 0.846661, 0.249897 },
{ 0.595839, 0.848717, 0.243329 },
{ 0.606045, 0.850733, 0.236712 },
{ 0.616293, 0.852709, 0.230052 },
{ 0.626579, 0.854645, 0.223353 },
{ 0.636902, 0.856542, 0.216620 },
{ 0.647257, 0.858400, 0.209861 },
{ 0.657642, 0.860219, 0.203082 },
{ 0.668054, 0.861999, 0.196293 },
{ 0.678489, 0.863742, 0.189503 },
{ 0.688944, 0.865448, 0.182725 },
{ 0.699415, 0.867117, 0.175971 },
{ 0.709898, 0.868751, 0.169257 },
{ 0.720391, 0.870350, 0.162603 },
{ 0.730889, 0.871916, 0.156029 },
{ 0.741388, 0.873449, 0.149561 },
{ 0.751884, 0.874951, 0.143228 },
{ 0.762373, 0.876424, 0.137064 },
{ 0.772852, 0.877868, 0.131109 },
{ 0.783315, 0.879285, 0.125405 },
{ 0.793760, 0.880678, 0.120005 },
{ 0.804182, 0.882046, 0.114965 },
{ 0.814576, 0.883393, 0.110347 },
{ 0.824940, 0.884720, 0.106217 },
{ 0.835270, 0.886029, 0.102646 },
{ 0.845561, 0.887322, 0.099702 },
{ 0.855810, 0.888601, 0.097452 },
{ 0.866013, 0.889868, 0.095953 },
{ 0.876168, 0.891125, 0.095250 },
{ 0.886271, 0.892374, 0.095374 },
{ 0.896320, 0.893616, 0.096335 },
{ 0.906311, 0.894855, 0.098125 },
{ 0.916242, 0.896091, 0.100717 },
{ 0.926106, 0.897330, 0.104071 },
{ 0.935904, 0.898570, 0.108131 },
{ 0.945636, 0.899815, 0.112838 },
{ 0.955300, 0.901065, 0.118128 },
{ 0.964894, 0.902323, 0.123941 },
{ 0.974417, 0.903590, 0.130215 },
{ 0.983868, 0.904867, 0.136897 },
{ 0.993248, 0.906157, 0.143936 }
};
return internal::CalcLerp(x, data);
}
inline Color GetCividisColor(double x)
{
constexpr Color data[] =
{
{ 0.0000, 0.1262, 0.3015 },
{ 0.0000, 0.1292, 0.3077 },
{ 0.0000, 0.1321, 0.3142 },
{ 0.0000, 0.1350, 0.3205 },
{ 0.0000, 0.1379, 0.3269 },
{ 0.0000, 0.1408, 0.3334 },
{ 0.0000, 0.1437, 0.3400 },
{ 0.0000, 0.1465, 0.3467 },
{ 0.0000, 0.1492, 0.3537 },
{ 0.0000, 0.1519, 0.3606 },
{ 0.0000, 0.1546, 0.3676 },
{ 0.0000, 0.1574, 0.3746 },
{ 0.0000, 0.1601, 0.3817 },
{ 0.0000, 0.1629, 0.3888 },
{ 0.0000, 0.1657, 0.3960 },
{ 0.0000, 0.1685, 0.4031 },
{ 0.0000, 0.1714, 0.4102 },
{ 0.0000, 0.1743, 0.4172 },
{ 0.0000, 0.1773, 0.4241 },
{ 0.0000, 0.1798, 0.4307 },
{ 0.0000, 0.1817, 0.4347 },
{ 0.0000, 0.1834, 0.4363 },
{ 0.0000, 0.1852, 0.4368 },
{ 0.0000, 0.1872, 0.4368 },
{ 0.0000, 0.1901, 0.4365 },
{ 0.0000, 0.1930, 0.4361 },
{ 0.0000, 0.1958, 0.4356 },
{ 0.0000, 0.1987, 0.4349 },
{ 0.0000, 0.2015, 0.4343 },
{ 0.0000, 0.2044, 0.4336 },
{ 0.0000, 0.2073, 0.4329 },
{ 0.0055, 0.2101, 0.4322 },
{ 0.0236, 0.2130, 0.4314 },
{ 0.0416, 0.2158, 0.4308 },
{ 0.0576, 0.2187, 0.4301 },
{ 0.0710, 0.2215, 0.4293 },
{ 0.0827, 0.2244, 0.4287 },
{ 0.0932, 0.2272, 0.4280 },
{ 0.1030, 0.2300, 0.4274 },
{ 0.1120, 0.2329, 0.4268 },
{ 0.1204, 0.2357, 0.4262 },
{ 0.1283, 0.2385, 0.4256 },
{ 0.1359, 0.2414, 0.4251 },
{ 0.1431, 0.2442, 0.4245 },
{ 0.1500, 0.2470, 0.4241 },
{ 0.1566, 0.2498, 0.4236 },
{ 0.1630, 0.2526, 0.4232 },
{ 0.1692, 0.2555, 0.4228 },
{ 0.1752, 0.2583, 0.4224 },
{ 0.1811, 0.2611, 0.4220 },
{ 0.1868, 0.2639, 0.4217 },
{ 0.1923, 0.2667, 0.4214 },
{ 0.1977, 0.2695, 0.4212 },
{ 0.2030, 0.2723, 0.4209 },
{ 0.2082, 0.2751, 0.4207 },
{ 0.2133, 0.2780, 0.4205 },
{ 0.2183, 0.2808, 0.4204 },
{ 0.2232, 0.2836, 0.4203 },
{ 0.2281, 0.2864, 0.4202 },
{ 0.2328, 0.2892, 0.4201 },
{ 0.2375, 0.2920, 0.4200 },
{ 0.2421, 0.2948, 0.4200 },
{ 0.2466, 0.2976, 0.4200 },
{ 0.2511, 0.3004, 0.4201 },
{ 0.2556, 0.3032, 0.4201 },
{ 0.2599, 0.3060, 0.4202 },
{ 0.2643, 0.3088, 0.4203 },
{ 0.2686, 0.3116, 0.4205 },
{ 0.2728, 0.3144, 0.4206 },
{ 0.2770, 0.3172, 0.4208 },
{ 0.2811, 0.3200, 0.4210 },
{ 0.2853, 0.3228, 0.4212 },
{ 0.2894, 0.3256, 0.4215 },
{ 0.2934, 0.3284, 0.4218 },
{ 0.2974, 0.3312, 0.4221 },
{ 0.3014, 0.3340, 0.4224 },
{ 0.3054, 0.3368, 0.4227 },
{ 0.3093, 0.3396, 0.4231 },
{ 0.3132, 0.3424, 0.4236 },
{ 0.3170, 0.3453, 0.4240 },
{ 0.3209, 0.3481, 0.4244 },
{ 0.3247, 0.3509, 0.4249 },
{ 0.3285, 0.3537, 0.4254 },
{ 0.3323, 0.3565, 0.4259 },
{ 0.3361, 0.3593, 0.4264 },
{ 0.3398, 0.3622, 0.4270 },
{ 0.3435, 0.3650, 0.4276 },
{ 0.3472, 0.3678, 0.4282 },
{ 0.3509, 0.3706, 0.4288 },
{ 0.3546, 0.3734, 0.4294 },
{ 0.3582, 0.3763, 0.4302 },
{ 0.3619, 0.3791, 0.4308 },
{ 0.3655, 0.3819, 0.4316 },
{ 0.3691, 0.3848, 0.4322 },
{ 0.3727, 0.3876, 0.4331 },
{ 0.3763, 0.3904, 0.4338 },
{ 0.3798, 0.3933, 0.4346 },
{ 0.3834, 0.3961, 0.4355 },
{ 0.3869, 0.3990, 0.4364 },
{ 0.3905, 0.4018, 0.4372 },
{ 0.3940, 0.4047, 0.4381 },
{ 0.3975, 0.4075, 0.4390 },
{ 0.4010, 0.4104, 0.4400 },
{ 0.4045, 0.4132, 0.4409 },
{ 0.4080, 0.4161, 0.4419 },
{ 0.4114, 0.4189, 0.4430 },
{ 0.4149, 0.4218, 0.4440 },
{ 0.4183, 0.4247, 0.4450 },
{ 0.4218, 0.4275, 0.4462 },
{ 0.4252, 0.4304, 0.4473 },
{ 0.4286, 0.4333, 0.4485 },
{ 0.4320, 0.4362, 0.4496 },
{ 0.4354, 0.4390, 0.4508 },
{ 0.4388, 0.4419, 0.4521 },
{ 0.4422, 0.4448, 0.4534 },
{ 0.4456, 0.4477, 0.4547 },
{ 0.4489, 0.4506, 0.4561 },
{ 0.4523, 0.4535, 0.4575 },
{ 0.4556, 0.4564, 0.4589 },
{ 0.4589, 0.4593, 0.4604 },
{ 0.4622, 0.4622, 0.4620 },
{ 0.4656, 0.4651, 0.4635 },
{ 0.4689, 0.4680, 0.4650 },
{ 0.4722, 0.4709, 0.4665 },
{ 0.4756, 0.4738, 0.4679 },
{ 0.4790, 0.4767, 0.4691 },
{ 0.4825, 0.4797, 0.4701 },
{ 0.4861, 0.4826, 0.4707 },
{ 0.4897, 0.4856, 0.4714 },
{ 0.4934, 0.4886, 0.4719 },
{ 0.4971, 0.4915, 0.4723 },
{ 0.5008, 0.4945, 0.4727 },
{ 0.5045, 0.4975, 0.4730 },
{ 0.5083, 0.5005, 0.4732 },
{ 0.5121, 0.5035, 0.4734 },
{ 0.5158, 0.5065, 0.4736 },
{ 0.5196, 0.5095, 0.4737 },
{ 0.5234, 0.5125, 0.4738 },
{ 0.5272, 0.5155, 0.4739 },
{ 0.5310, 0.5186, 0.4739 },
{ 0.5349, 0.5216, 0.4738 },
{ 0.5387, 0.5246, 0.4739 },
{ 0.5425, 0.5277, 0.4738 },
{ 0.5464, 0.5307, 0.4736 },
{ 0.5502, 0.5338, 0.4735 },
{ 0.5541, 0.5368, 0.4733 },
{ 0.5579, 0.5399, 0.4732 },
{ 0.5618, 0.5430, 0.4729 },
{ 0.5657, 0.5461, 0.4727 },
{ 0.5696, 0.5491, 0.4723 },
{ 0.5735, 0.5522, 0.4720 },
{ 0.5774, 0.5553, 0.4717 },
{ 0.5813, 0.5584, 0.4714 },
{ 0.5852, 0.5615, 0.4709 },
{ 0.5892, 0.5646, 0.4705 },
{ 0.5931, 0.5678, 0.4701 },
{ 0.5970, 0.5709, 0.4696 },
{ 0.6010, 0.5740, 0.4691 },
{ 0.6050, 0.5772, 0.4685 },
{ 0.6089, 0.5803, 0.4680 },
{ 0.6129, 0.5835, 0.4673 },
{ 0.6168, 0.5866, 0.4668 },
{ 0.6208, 0.5898, 0.4662 },
{ 0.6248, 0.5929, 0.4655 },
{ 0.6288, 0.5961, 0.4649 },
{ 0.6328, 0.5993, 0.4641 },
{ 0.6368, 0.6025, 0.4632 },
{ 0.6408, 0.6057, 0.4625 },
{ 0.6449, 0.6089, 0.4617 },
{ 0.6489, 0.6121, 0.4609 },
{ 0.6529, 0.6153, 0.4600 },
{ 0.6570, 0.6185, 0.4591 },
{ 0.6610, 0.6217, 0.4583 },
{ 0.6651, 0.6250, 0.4573 },
{ 0.6691, 0.6282, 0.4562 },
{ 0.6732, 0.6315, 0.4553 },
{ 0.6773, 0.6347, 0.4543 },
{ 0.6813, 0.6380, 0.4532 },
{ 0.6854, 0.6412, 0.4521 },
{ 0.6895, 0.6445, 0.4511 },
{ 0.6936, 0.6478, 0.4499 },
{ 0.6977, 0.6511, 0.4487 },
{ 0.7018, 0.6544, 0.4475 },
{ 0.7060, 0.6577, 0.4463 },
{ 0.7101, 0.6610, 0.4450 },
{ 0.7142, 0.6643, 0.4437 },
{ 0.7184, 0.6676, 0.4424 },
{ 0.7225, 0.6710, 0.4409 },
{ 0.7267, 0.6743, 0.4396 },
{ 0.7308, 0.6776, 0.4382 },
{ 0.7350, 0.6810, 0.4368 },
{ 0.7392, 0.6844, 0.4352 },
{ 0.7434, 0.6877, 0.4338 },
{ 0.7476, 0.6911, 0.4322 },
{ 0.7518, 0.6945, 0.4307 },
{ 0.7560, 0.6979, 0.4290 },
{ 0.7602, 0.7013, 0.4273 },
{ 0.7644, 0.7047, 0.4258 },
{ 0.7686, 0.7081, 0.4241 },
{ 0.7729, 0.7115, 0.4223 },
{ 0.7771, 0.7150, 0.4205 },
{ 0.7814, 0.7184, 0.4188 },
{ 0.7856, 0.7218, 0.4168 },
{ 0.7899, 0.7253, 0.4150 },
{ 0.7942, 0.7288, 0.4129 },
{ 0.7985, 0.7322, 0.4111 },
{ 0.8027, 0.7357, 0.4090 },
{ 0.8070, 0.7392, 0.4070 },
{ 0.8114, 0.7427, 0.4049 },
{ 0.8157, 0.7462, 0.4028 },
{ 0.8200, 0.7497, 0.4007 },
{ 0.8243, 0.7532, 0.3984 },
{ 0.8287, 0.7568, 0.3961 },
{ 0.8330, 0.7603, 0.3938 },
{ 0.8374, 0.7639, 0.3915 },
{ 0.8417, 0.7674, 0.3892 },
{ 0.8461, 0.7710, 0.3869 },
{ 0.8505, 0.7745, 0.3843 },
{ 0.8548, 0.7781, 0.3818 },
{ 0.8592, 0.7817, 0.3793 },
{ 0.8636, 0.7853, 0.3766 },
{ 0.8681, 0.7889, 0.3739 },
{ 0.8725, 0.7926, 0.3712 },
{ 0.8769, 0.7962, 0.3684 },
{ 0.8813, 0.7998, 0.3657 },
{ 0.8858, 0.8035, 0.3627 },
{ 0.8902, 0.8071, 0.3599 },
{ 0.8947, 0.8108, 0.3569 },
{ 0.8992, 0.8145, 0.3538 },
{ 0.9037, 0.8182, 0.3507 },
{ 0.9082, 0.8219, 0.3474 },
{ 0.9127, 0.8256, 0.3442 },
{ 0.9172, 0.8293, 0.3409 },
{ 0.9217, 0.8330, 0.3374 },
{ 0.9262, 0.8367, 0.3340 },
{ 0.9308, 0.8405, 0.3306 },
{ 0.9353, 0.8442, 0.3268 },
{ 0.9399, 0.8480, 0.3232 },
{ 0.9444, 0.8518, 0.3195 },
{ 0.9490, 0.8556, 0.3155 },
{ 0.9536, 0.8593, 0.3116 },
{ 0.9582, 0.8632, 0.3076 },
{ 0.9628, 0.8670, 0.3034 },
{ 0.9674, 0.8708, 0.2990 },
{ 0.9721, 0.8746, 0.2947 },
{ 0.9767, 0.8785, 0.2901 },
{ 0.9814, 0.8823, 0.2856 },
{ 0.9860, 0.8862, 0.2807 },
{ 0.9907, 0.8901, 0.2759 },
{ 0.9954, 0.8940, 0.2708 },
{ 1.0000, 0.8979, 0.2655 },
{ 1.0000, 0.9018, 0.2600 },
{ 1.0000, 0.9057, 0.2593 },
{ 1.0000, 0.9094, 0.2634 },
{ 1.0000, 0.9131, 0.2680 },
{ 1.0000, 0.9169, 0.2731 }
};
return internal::CalcLerp(x, data);
}
inline Color GetGithubColor(double x)
{
constexpr Color data[] =
{
{ 0.933333, 0.933333, 0.933333 },
{ 0.776470, 0.894117, 0.545098 },
{ 0.482352, 0.788235, 0.435294 },
{ 0.137254, 0.603921, 0.231372 },
{ 0.098039, 0.380392, 0.152941 }
};
return internal::CalcLerp(x, data);
}
inline Color GetCubehelixColor(double x)
{
constexpr Color data[] =
{
{ 0.000000, 0.000000, 0.000000 },
{ 0.006716, 0.002119, 0.005970 },
{ 0.013252, 0.004287, 0.012162 },
{ 0.019599, 0.006514, 0.018563 },
{ 0.025748, 0.008803, 0.025162 },
{ 0.031691, 0.011164, 0.031946 },
{ 0.037421, 0.013600, 0.038902 },
{ 0.042932, 0.016118, 0.046016 },
{ 0.048217, 0.018724, 0.053275 },
{ 0.053271, 0.021423, 0.060666 },
{ 0.058090, 0.024220, 0.068173 },
{ 0.062670, 0.027119, 0.075781 },
{ 0.067008, 0.030126, 0.083478 },
{ 0.071101, 0.033243, 0.091246 },
{ 0.074947, 0.036475, 0.099072 },
{ 0.078546, 0.039824, 0.106939 },
{ 0.081898, 0.043295, 0.114834 },
{ 0.085002, 0.046889, 0.122740 },
{ 0.087860, 0.050609, 0.130643 },
{ 0.090474, 0.054457, 0.138527 },
{ 0.092845, 0.058434, 0.146378 },
{ 0.094978, 0.062542, 0.154180 },
{ 0.096875, 0.066781, 0.161918 },
{ 0.098542, 0.071152, 0.169579 },
{ 0.099984, 0.075655, 0.177147 },
{ 0.101205, 0.080290, 0.184609 },
{ 0.102212, 0.085055, 0.191951 },
{ 0.103013, 0.089951, 0.199159 },
{ 0.103615, 0.094975, 0.206221 },
{ 0.104025, 0.100126, 0.213124 },
{ 0.104252, 0.105403, 0.219855 },
{ 0.104305, 0.110801, 0.226402 },
{ 0.104194, 0.116320, 0.232755 },
{ 0.103929, 0.121956, 0.238903 },
{ 0.103519, 0.127705, 0.244834 },
{ 0.102976, 0.133564, 0.250541 },
{ 0.102310, 0.139529, 0.256012 },
{ 0.101534, 0.145596, 0.261240 },
{ 0.100659, 0.151759, 0.266217 },
{ 0.099697, 0.158016, 0.270935 },
{ 0.098661, 0.164359, 0.275388 },
{ 0.097563, 0.170785, 0.279569 },
{ 0.096415, 0.177287, 0.283474 },
{ 0.095232, 0.183860, 0.287097 },
{ 0.094026, 0.190498, 0.290434 },
{ 0.092810, 0.197194, 0.293483 },
{ 0.091597, 0.203943, 0.296240 },
{ 0.090402, 0.210739, 0.298703 },
{ 0.089237, 0.217573, 0.300873 },
{ 0.088115, 0.224441, 0.302747 },
{ 0.087051, 0.231334, 0.304327 },
{ 0.086056, 0.238247, 0.305612 },
{ 0.085146, 0.245171, 0.306606 },
{ 0.084331, 0.252101, 0.307310 },
{ 0.083626, 0.259028, 0.307728 },
{ 0.083043, 0.265946, 0.307863 },
{ 0.082594, 0.272848, 0.307720 },
{ 0.082291, 0.279726, 0.307304 },
{ 0.082148, 0.286573, 0.306621 },
{ 0.082174, 0.293383, 0.305677 },
{ 0.082381, 0.300147, 0.304480 },
{ 0.082780, 0.306860, 0.303037 },
{ 0.083383, 0.313514, 0.301356 },
{ 0.084198, 0.320102, 0.299448 },
{ 0.085235, 0.326618, 0.297320 },
{ 0.086504, 0.333055, 0.294984 },
{ 0.088014, 0.339406, 0.292449 },
{ 0.089772, 0.345666, 0.289728 },
{ 0.091787, 0.351829, 0.286831 },
{ 0.094066, 0.357887, 0.283771 },
{ 0.096615, 0.363836, 0.280560 },
{ 0.099441, 0.369671, 0.277211 },
{ 0.102549, 0.375385, 0.273736 },
{ 0.105944, 0.380974, 0.270151 },
{ 0.109630, 0.386433, 0.266468 },
{ 0.113611, 0.391757, 0.262703 },
{ 0.117891, 0.396943, 0.258868 },
{ 0.122472, 0.401985, 0.254979 },
{ 0.127356, 0.406881, 0.251051 },
{ 0.132543, 0.411627, 0.247099 },
{ 0.138035, 0.416220, 0.243137 },
{ 0.143832, 0.420656, 0.239182 },
{ 0.149933, 0.424934, 0.235247 },
{ 0.156336, 0.429052, 0.231350 },
{ 0.163040, 0.433007, 0.227504 },
{ 0.170042, 0.436798, 0.223726 },
{ 0.177339, 0.440423, 0.220029 },
{ 0.184927, 0.443882, 0.216431 },
{ 0.192802, 0.447175, 0.212944 },
{ 0.200958, 0.450301, 0.209585 },
{ 0.209391, 0.453260, 0.206367 },
{ 0.218092, 0.456053, 0.203306 },
{ 0.227057, 0.458680, 0.200415 },
{ 0.236277, 0.461144, 0.197707 },
{ 0.245744, 0.463444, 0.195197 },
{ 0.255451, 0.465584, 0.192898 },
{ 0.265388, 0.467565, 0.190822 },
{ 0.275545, 0.469391, 0.188982 },
{ 0.285913, 0.471062, 0.187389 },
{ 0.296481, 0.472584, 0.186055 },
{ 0.307239, 0.473959, 0.184992 },
{ 0.318175, 0.475191, 0.184208 },
{ 0.329277, 0.476285, 0.183716 },
{ 0.340534, 0.477243, 0.183523 },
{ 0.351934, 0.478072, 0.183638 },
{ 0.363463, 0.478776, 0.184071 },
{ 0.375109, 0.479360, 0.184829 },
{ 0.386858, 0.479829, 0.185918 },
{ 0.398697, 0.480190, 0.187345 },
{ 0.410613, 0.480448, 0.189115 },
{ 0.422591, 0.480609, 0.191235 },
{ 0.434618, 0.480679, 0.193708 },
{ 0.446680, 0.480665, 0.196538 },
{ 0.458762, 0.480574, 0.199728 },
{ 0.470850, 0.480412, 0.203280 },
{ 0.482930, 0.480186, 0.207197 },
{ 0.494987, 0.479903, 0.211478 },
{ 0.507008, 0.479572, 0.216124 },
{ 0.518978, 0.479198, 0.221136 },
{ 0.530883, 0.478789, 0.226510 },
{ 0.542708, 0.478353, 0.232247 },
{ 0.554441, 0.477898, 0.238342 },
{ 0.566067, 0.477430, 0.244794 },
{ 0.577573, 0.476958, 0.251597 },
{ 0.588945, 0.476490, 0.258747 },
{ 0.600171, 0.476032, 0.266239 },
{ 0.611237, 0.475592, 0.274067 },
{ 0.622132, 0.475178, 0.282223 },
{ 0.632842, 0.474798, 0.290702 },
{ 0.643357, 0.474459, 0.299495 },
{ 0.653665, 0.474168, 0.308593 },
{ 0.663755, 0.473933, 0.317987 },
{ 0.673616, 0.473761, 0.327668 },
{ 0.683239, 0.473658, 0.337626 },
{ 0.692613, 0.473632, 0.347849 },
{ 0.701729, 0.473690, 0.358327 },
{ 0.710579, 0.473838, 0.369047 },
{ 0.719155, 0.474083, 0.379998 },
{ 0.727448, 0.474430, 0.391167 },
{ 0.735453, 0.474886, 0.402541 },
{ 0.743162, 0.475457, 0.414106 },
{ 0.750569, 0.476148, 0.425849 },
{ 0.757669, 0.476964, 0.437755 },
{ 0.764458, 0.477911, 0.449811 },
{ 0.770932, 0.478994, 0.462001 },
{ 0.777086, 0.480216, 0.474310 },
{ 0.782918, 0.481583, 0.486725 },
{ 0.788426, 0.483098, 0.499228 },
{ 0.793609, 0.484765, 0.511805 },
{ 0.798465, 0.486587, 0.524441 },
{ 0.802993, 0.488567, 0.537119 },
{ 0.807196, 0.490708, 0.549824 },
{ 0.811072, 0.493013, 0.562540 },
{ 0.814625, 0.495483, 0.575253 },
{ 0.817855, 0.498121, 0.587945 },
{ 0.820767, 0.500927, 0.600602 },
{ 0.823364, 0.503903, 0.613208 },
{ 0.825649, 0.507050, 0.625748 },
{ 0.827628, 0.510368, 0.638207 },
{ 0.829305, 0.513857, 0.650570 },
{ 0.830688, 0.517516, 0.662822 },
{ 0.831781, 0.521346, 0.674949 },
{ 0.832593, 0.525345, 0.686938 },
{ 0.833130, 0.529511, 0.698773 },
{ 0.833402, 0.533844, 0.710443 },
{ 0.833416, 0.538342, 0.721933 },
{ 0.833181, 0.543001, 0.733232 },
{ 0.832708, 0.547820, 0.744327 },
{ 0.832006, 0.552795, 0.755206 },
{ 0.831086, 0.557924, 0.765859 },
{ 0.829958, 0.563202, 0.776274 },
{ 0.828633, 0.568627, 0.786443 },
{ 0.827124, 0.574193, 0.796354 },
{ 0.825442, 0.579897, 0.805999 },
{ 0.823599, 0.585733, 0.815370 },
{ 0.821608, 0.591698, 0.824459 },
{ 0.819482, 0.597785, 0.833258 },
{ 0.817233, 0.603990, 0.841761 },
{ 0.814875, 0.610307, 0.849963 },
{ 0.812421, 0.616730, 0.857858 },
{ 0.809884, 0.623252, 0.865441 },
{ 0.807278, 0.629869, 0.872709 },
{ 0.804617, 0.636573, 0.879658 },
{ 0.801914, 0.643359, 0.886286 },
{ 0.799183, 0.650218, 0.892592 },
{ 0.796438, 0.657146, 0.898574 },
{ 0.793692, 0.664134, 0.904231 },
{ 0.790959, 0.671176, 0.909565 },
{ 0.788253, 0.678264, 0.914576 },
{ 0.785586, 0.685392, 0.919267 },
{ 0.782973, 0.692553, 0.923639 },
{ 0.780425, 0.699738, 0.927695 },
{ 0.777957, 0.706942, 0.931441 },
{ 0.775579, 0.714157, 0.934879 },
{ 0.773305, 0.721375, 0.938016 },
{ 0.771147, 0.728589, 0.940857 },
{ 0.769116, 0.735793, 0.943409 },
{ 0.767224, 0.742979, 0.945678 },
{ 0.765481, 0.750140, 0.947673 },
{ 0.763898, 0.757269, 0.949402 },
{ 0.762485, 0.764360, 0.950874 },
{ 0.761251, 0.771405, 0.952098 },
{ 0.760207, 0.778399, 0.953084 },
{ 0.759360, 0.785335, 0.953843 },
{ 0.758718, 0.792207, 0.954386 },
{ 0.758290, 0.799008, 0.954724 },
{ 0.758082, 0.805734, 0.954869 },
{ 0.758101, 0.812378, 0.954833 },
{ 0.758353, 0.818934, 0.954629 },
{ 0.758842, 0.825399, 0.954270 },
{ 0.759575, 0.831767, 0.953769 },
{ 0.760554, 0.838033, 0.953140 },
{ 0.761784, 0.844192, 0.952397 },
{ 0.763267, 0.850242, 0.951554 },
{ 0.765006, 0.856178, 0.950625 },
{ 0.767001, 0.861997, 0.949624 },
{ 0.769255, 0.867695, 0.948567 },
{ 0.771766, 0.873270, 0.947467 },
{ 0.774535, 0.878718, 0.946340 },
{ 0.777561, 0.884039, 0.945201 },
{ 0.780841, 0.889230, 0.944063 },
{ 0.784374, 0.894289, 0.942942 },
{ 0.788156, 0.899216, 0.941853 },
{ 0.792184, 0.904010, 0.940809 },
{ 0.796453, 0.908669, 0.939825 },
{ 0.800958, 0.913194, 0.938916 },
{ 0.805694, 0.917586, 0.938095 },
{ 0.810654, 0.921845, 0.937376 },
{ 0.815832, 0.925971, 0.936772 },
{ 0.821221, 0.929967, 0.936297 },
{ 0.826811, 0.933833, 0.935962 },
{ 0.832595, 0.937572, 0.935781 },
{ 0.838565, 0.941187, 0.935766 },
{ 0.844709, 0.944679, 0.935927 },
{ 0.851018, 0.948053, 0.936275 },
{ 0.857482, 0.951311, 0.936822 },
{ 0.864090, 0.954457, 0.937578 },
{ 0.870830, 0.957495, 0.938550 },
{ 0.877690, 0.960430, 0.939749 },
{ 0.884659, 0.963266, 0.941183 },
{ 0.891723, 0.966009, 0.942858 },
{ 0.898871, 0.968662, 0.944783 },
{ 0.906088, 0.971233, 0.946962 },
{ 0.913362, 0.973726, 0.949402 },
{ 0.920679, 0.976147, 0.952108 },
{ 0.928026, 0.978504, 0.955083 },
{ 0.935387, 0.980802, 0.958331 },
{ 0.942750, 0.983048, 0.961854 },
{ 0.950101, 0.985249, 0.965654 },
{ 0.957424, 0.987412, 0.969733 },
{ 0.964706, 0.989543, 0.974090 },
{ 0.971932, 0.991652, 0.978724 },
{ 0.979088, 0.993744, 0.983635 },
{ 0.986161, 0.995828, 0.988820 },
{ 0.993136, 0.997910, 0.994276 },
{ 1.000000, 1.000000, 1.000000 }
};
return internal::CalcLerp(x, data);
}
#if defined(TINYCOLORMAP_WITH_QT5) && defined(TINYCOLORMAP_WITH_EIGEN)
inline QImage CreateMatrixVisualization(const Eigen::MatrixXd& matrix)
{
const int w = matrix.cols();
const int h = matrix.rows();
const double max_coeff = matrix.maxCoeff();
const double min_coeff = matrix.minCoeff();
const Eigen::MatrixXd normalized = (1.0 / (max_coeff - min_coeff)) * (matrix - Eigen::MatrixXd::Constant(h, w, min_coeff));
QImage image(w, h, QImage::Format_ARGB32);
for (int x = 0; x < w; ++ x)
{
for (int y = 0; y < h; ++ y)
{
const QColor color = tinycolormap::GetColor(normalized(y, x)).ConvertToQColor();
image.setPixel(x, y, color.rgb());
}
}
return image;
}
inline void ExportMatrixVisualization(const Eigen::MatrixXd& matrix, const std::string& path)
{
CreateMatrixVisualization(matrix).save(QString::fromStdString(path));
}
#endif
}
#endif
| Unknown |
3D | hku-mars/ImMesh | src/meshing/mesh_rec_display.cpp | .cpp | 12,849 | 327 | #define STB_IMAGE_WRITE_IMPLEMENTATION
#define STBI_MSC_SECURE_CRT
#include "mesh_rec_display.hpp"
#include "tools/openGL_libs/gl_draw_founction.hpp"
#include "tools_timer.hpp"
#include "tinycolormap.hpp"
#include "tools/openGL_libs/openGL_camera.hpp"
extern Global_map g_map_rgb_pts_mesh;
extern Triangle_manager g_triangles_manager;
extern LiDAR_frame_pts_and_pose_vec g_eigen_vec_vec;
extern Eigen::Matrix3d g_camera_K;
// extern Eigen::Matrix3d lidar_frame_to_camera_frame;
template < int M, int option = EIGEN_DATA_TYPE_DEFAULT_OPTION >
using eigen_vec_uc = Eigen::Matrix< unsigned char, M, 1, option >;
extern GL_camera g_gl_camera;
Common_tools::Point_cloud_shader g_path_shader;
Common_tools::Point_cloud_shader g_LiDAR_point_shader;
Common_tools::Triangle_facet_shader g_triangle_facet_shader;
Common_tools::Axis_shader g_axis_shader;
Common_tools::Ground_plane_shader g_ground_plane_shader;
Common_tools::Camera_pose_shader g_camera_pose_shader;
// std::map< Visibility_region_ptr, Common_tools::Triangle_facet_shader > g_map_region_triangle_shader;
// ANCHOR - draw_triangle
#include <chrono>
#include <thread>
std::mutex mutex_triangle_vec;
extern unsigned int vbo;
extern unsigned int vbo_color;
extern int g_current_frame;
#define ENABLE_BUFFER 0
extern bool g_display_mesh;
extern float g_ply_smooth_factor;
extern int g_ply_smooth_k;
extern double g_kd_tree_accept_pt_dis;
extern bool g_force_refresh_triangle;
vec_3f g_axis_min_max[ 2 ];
struct Region_triangles_shader
{
std::vector< vec_3f > m_triangle_pt_vec;
Common_tools::Triangle_facet_shader m_triangle_facet_shader;
int m_need_init_shader = true;
int m_need_refresh_shader = true;
int m_if_set_color = false;
std::shared_ptr< std::mutex > m_mutex_ptr = nullptr;
Region_triangles_shader() { m_mutex_ptr = std::make_shared< std::mutex >(); }
void init_openGL_shader() { m_triangle_facet_shader.init( SHADER_DIR ); }
Common_tools::Triangle_facet_shader *get_shader_ptr() { return &m_triangle_facet_shader; }
void init_pointcloud()
{
std::unique_lock< std::mutex > lock( *m_mutex_ptr );
if ( m_if_set_color )
{
m_triangle_facet_shader.set_pointcloud( m_triangle_pt_vec, g_axis_min_max, 2 );
}
else
{
m_triangle_facet_shader.set_pointcloud( m_triangle_pt_vec );
}
}
void unparse_triangle_set_to_vector( const Triangle_set &tri_angle_set )
{
// TODO: synchronized data buffer here:
std::unique_lock< std::mutex > lock( *m_mutex_ptr );
m_triangle_pt_vec.resize( tri_angle_set.size() * 3 );
// cout << "Number of pt_size = " << m_triangle_pt_list.size() << endl;
int count = 0;
for ( Triangle_set::iterator it = tri_angle_set.begin(); it != tri_angle_set.end(); it++ )
{
for ( size_t pt_idx = 0; pt_idx < 3; pt_idx++ )
{
if ( g_map_rgb_pts_mesh.m_rgb_pts_vec[ ( *it )->m_tri_pts_id[ pt_idx ] ]->m_smoothed == false )
{
g_map_rgb_pts_mesh.smooth_pts( g_map_rgb_pts_mesh.m_rgb_pts_vec[ ( *it )->m_tri_pts_id[ pt_idx ] ], g_ply_smooth_factor,
g_ply_smooth_k, g_kd_tree_accept_pt_dis );
}
}
vec_3 pt_a = g_map_rgb_pts_mesh.m_rgb_pts_vec[ ( *it )->m_tri_pts_id[ 0 ] ]->get_pos( 1 );
vec_3 pt_b = g_map_rgb_pts_mesh.m_rgb_pts_vec[ ( *it )->m_tri_pts_id[ 1 ] ]->get_pos( 1 );
vec_3 pt_c = g_map_rgb_pts_mesh.m_rgb_pts_vec[ ( *it )->m_tri_pts_id[ 2 ] ]->get_pos( 1 );
m_triangle_pt_vec[ count ] = pt_a.cast< float >();
m_triangle_pt_vec[ count + 1 ] = pt_b.cast< float >();
m_triangle_pt_vec[ count + 2 ] = pt_c.cast< float >();
count = count + 3;
}
}
void get_axis_min_max( vec_3f *axis_min_max = nullptr )
{
if ( axis_min_max != nullptr )
{
for ( int i = 0; i < m_triangle_pt_vec.size(); i++ )
{
if ( axis_min_max[ 0 ]( 0 ) > m_triangle_pt_vec[ i ]( 0 ) )
{
axis_min_max[ 0 ]( 0 ) = m_triangle_pt_vec[ i ]( 0 );
}
if ( axis_min_max[ 0 ]( 1 ) > m_triangle_pt_vec[ i ]( 1 ) )
{
axis_min_max[ 0 ]( 1 ) = m_triangle_pt_vec[ i ]( 1 );
}
if ( axis_min_max[ 0 ]( 2 ) > m_triangle_pt_vec[ i ]( 2 ) )
{
axis_min_max[ 0 ]( 2 ) = m_triangle_pt_vec[ i ]( 2 );
}
if ( axis_min_max[ 1 ]( 0 ) < m_triangle_pt_vec[ i ]( 0 ) )
{
axis_min_max[ 1 ]( 0 ) = m_triangle_pt_vec[ i ]( 0 );
}
if ( axis_min_max[ 1 ]( 1 ) < m_triangle_pt_vec[ i ]( 1 ) )
{
axis_min_max[ 1 ]( 1 ) = m_triangle_pt_vec[ i ]( 1 );
}
if ( axis_min_max[ 1 ]( 2 ) < m_triangle_pt_vec[ i ]( 2 ) )
{
axis_min_max[ 1 ]( 2 ) = m_triangle_pt_vec[ i ]( 2 );
}
}
}
}
void synchronized_from_region( Sync_triangle_set *sync_triangle_set, vec_3f *axis_min_max = nullptr )
{
if ( sync_triangle_set == nullptr )
{
cout << "sync_triangle_set == nullptr" << endl;
return;
}
if ( sync_triangle_set->m_if_required_synchronized )
{
Triangle_set triangle_set;
sync_triangle_set->get_triangle_set( triangle_set, true );
unparse_triangle_set_to_vector( triangle_set );
get_axis_min_max( axis_min_max );
std::this_thread::sleep_for( std::chrono::microseconds( 100 ) );
m_need_refresh_shader = true;
}
}
void draw( const Cam_view &gl_cam )
{
if ( m_need_init_shader )
{
init_openGL_shader();
m_need_init_shader = false;
}
if ( m_triangle_pt_vec.size() < 3 )
{
return;
}
if ( m_need_refresh_shader )
{
init_pointcloud();
m_need_refresh_shader = false;
}
m_triangle_facet_shader.draw( gl_cam.m_glm_projection_mat, Common_tools::eigen2glm( gl_cam.m_camera_pose_mat44_inverse ) );
}
};
void display_current_LiDAR_pts( int current_frame_idx, double pts_size, vec_4f color )
{
if ( current_frame_idx < 1 )
{
return;
}
g_LiDAR_point_shader.set_point_attr( pts_size );
g_LiDAR_point_shader.set_pointcloud( g_eigen_vec_vec[ current_frame_idx ].first, vec_3( 1.0, 1.0, 1.0 ) );
g_LiDAR_point_shader.draw( g_gl_camera.m_gl_cam.m_glm_projection_mat,
Common_tools::eigen2glm( g_gl_camera.m_gl_cam.m_camera_pose_mat44_inverse ) );
}
void display_reinforced_LiDAR_pts( std::vector< vec_3f > &pt_vec, double pts_size, vec_3f color )
{
g_LiDAR_point_shader.set_point_attr( pts_size );
g_LiDAR_point_shader.set_pointcloud( pt_vec, color.cast< double >() );
g_LiDAR_point_shader.draw( g_gl_camera.m_gl_cam.m_glm_projection_mat,
Common_tools::eigen2glm( g_gl_camera.m_gl_cam.m_camera_pose_mat44_inverse ) );
}
void init_openGL_shader()
{
g_LiDAR_point_shader.init( SHADER_DIR );
g_path_shader.init( SHADER_DIR );
// Init axis buffer
g_axis_shader.init( SHADER_DIR, 1 );
// Init ground shader
g_ground_plane_shader.init( SHADER_DIR, 10, 10 );
g_camera_pose_shader.init( SHADER_DIR );
g_triangle_facet_shader.init( SHADER_DIR );
}
std::mutex g_region_triangle_shader_mutex;
std::vector< std::shared_ptr< Region_triangles_shader > > g_region_triangles_shader_vec;
std::map< Sync_triangle_set *, std::shared_ptr< Region_triangles_shader > > g_map_region_triangles_shader;
extern bool g_mesh_if_color;
extern float g_wireframe_width;
extern bool g_display_face;
std::vector< vec_3 > pt_camera_traj;
// ANCHOR - synchronize_triangle_list_for_disp
void synchronize_triangle_list_for_disp()
{
int region_size = g_triangles_manager.m_triangle_set_vector.size();
bool if_force_refresh = g_force_refresh_triangle;
for ( int region_idx = 0; region_idx < region_size; region_idx++ )
{
Sync_triangle_set * sync_triangle_set_ptr = g_triangles_manager.m_triangle_set_vector[ region_idx ];
std::shared_ptr< Region_triangles_shader > region_triangles_shader_ptr = nullptr;
if ( g_map_region_triangles_shader.find( sync_triangle_set_ptr ) == g_map_region_triangles_shader.end() )
{
// new a shader
region_triangles_shader_ptr = std::make_shared< Region_triangles_shader >();
g_region_triangle_shader_mutex.lock();
g_map_region_triangles_shader.insert( std::make_pair( sync_triangle_set_ptr, region_triangles_shader_ptr ) );
g_region_triangles_shader_vec.push_back( region_triangles_shader_ptr );
g_region_triangle_shader_mutex.unlock();
}
else
{
region_triangles_shader_ptr = g_map_region_triangles_shader[ sync_triangle_set_ptr ];
}
if ( region_triangles_shader_ptr != nullptr )
{
if ( g_force_refresh_triangle && if_force_refresh == false )
{
if_force_refresh = true;
region_idx = -1; // refresh from the start
}
if(if_force_refresh)
{
sync_triangle_set_ptr->m_if_required_synchronized = true;
}
region_triangles_shader_ptr->synchronized_from_region( sync_triangle_set_ptr, g_axis_min_max );
}
}
if ( g_force_refresh_triangle )
{
g_force_refresh_triangle = false;
}
}
void service_refresh_and_synchronize_triangle( double sleep_time )
{
g_axis_min_max[ 0 ] = vec_3f( 1e8, 1e8, 1e8 );
g_axis_min_max[ 1 ] = vec_3f( -1e8, -1e8, -1e8 );
while ( 1 )
{
std::this_thread::sleep_for( std::chrono::milliseconds( ( int ) sleep_time ) );
synchronize_triangle_list_for_disp();
}
}
void draw_triangle( const Cam_view &gl_cam )
{
int region_size = g_region_triangles_shader_vec.size();
for ( int region_idx = 0; region_idx < region_size; region_idx++ )
{
g_region_triangles_shader_vec[ region_idx ]->m_triangle_facet_shader.m_if_draw_face = g_display_face;
g_region_triangles_shader_vec[ region_idx ]->m_if_set_color = g_mesh_if_color;
g_region_triangles_shader_vec[ region_idx ]->draw( gl_cam );
}
}
void display_camera_traj( float display_size )
{
if ( pt_camera_traj.size() == 0 )
{
return;
}
g_path_shader.set_pointcloud( pt_camera_traj );
g_path_shader.set_point_attr( display_size + 2, 0, 1.0 );
g_path_shader.m_draw_points_number = pt_camera_traj.size();
g_path_shader.draw( g_gl_camera.m_gl_cam.m_glm_projection_mat, Common_tools::eigen2glm( g_gl_camera.m_gl_cam.m_camera_pose_mat44_inverse ),
GL_LINE_STRIP );
}
void draw_camera_pose( int current_frame_idx, float pt_disp_size, float display_cam_size )
{
Eigen::Quaterniond pose_q( g_eigen_vec_vec[ current_frame_idx ].second.head< 4 >() );
vec_3 pose_t = g_eigen_vec_vec[ current_frame_idx ].second.block( 4, 0, 3, 1 );
mat_3_3 lidar_frame_to_camera_frame;
lidar_frame_to_camera_frame << 0, 0, 1, -1, 0, 0, 0, -1, 0;
pose_q = Eigen::Quaterniond( pose_q.toRotationMatrix() * lidar_frame_to_camera_frame );
pose_t = pose_q.inverse() * ( pose_t * -1.0 );
pose_q = pose_q.inverse();
g_camera_pose_shader.set_camera_pose_and_scale( pose_q, pose_t, display_cam_size );
g_camera_pose_shader.set_point_attr( 5, 0, 1.0 );
g_camera_pose_shader.draw( g_gl_camera.m_gl_cam.m_glm_projection_mat, Common_tools::eigen2glm( g_gl_camera.m_gl_cam.m_camera_pose_mat44_inverse ),
-1 );
}
void draw_camera_trajectory( int current_frame_idx, float pt_disp_size )
{
pt_camera_traj.clear();
for ( int i = 0; i < current_frame_idx; i++ )
{
if ( g_eigen_vec_vec[ i ].second.size() >= 7 )
{
pt_camera_traj.push_back( g_eigen_vec_vec[ i ].second.block( 4, 0, 3, 1 ) );
}
}
display_camera_traj( pt_disp_size );
}
| C++ |
3D | hku-mars/ImMesh | src/meshing/mesh_rec_geometry.cpp | .cpp | 16,170 | 435 | #include "mesh_rec_geometry.hpp"
#include "tinycolormap.hpp"
#include <pcl/io/ply_io.h>
#include <tbb/tbb.h>
#include <tbb/blocked_range.h>
#include <tbb/parallel_for.h>
#include <pcl/kdtree/kdtree_flann.h>
extern double g_color_val_min, g_color_val_max;
extern int g_force_update_flag;
extern Global_map g_map_rgb_pts_mesh;
extern Triangle_manager g_triangles_manager;
// extern Delaunay g_delaunay;
extern std::vector< std::pair< std::vector< vec_4 >, Eigen::Matrix< double, NUMBER_OF_POSE_SIZE, 1 > > > g_eigen_vec_vec;
double minimum_cell_volume = 0.2 * 0.2 * 0.2 * 0.0;
double minimum_height = 0.000;
double hit_scale = 0.10;
int skip_count = 0;
// Compute angle of vector(pb-pa) and (pc-pa)
double compute_angle( vec_2 &pa, vec_2 &pb, vec_2 &pc )
{
vec_2 vec_ab = pb - pa;
vec_2 vec_ac = pc - pa;
return acos( ( vec_ab.dot( vec_ac ) ) / ( vec_ab.norm() * vec_ac.norm() ) ) * 57.3;
}
bool is_face_is_ok( Common_tools::Delaunay2::Face &face, double maximum_angle )
{
// return true;
maximum_angle = 150;
if ( maximum_angle == 180 || maximum_angle <= 0 )
{
return true;
}
vec_2 pt[ 3 ];
for ( int i = 0; i < 3; i++ )
{
pt[ i ] = vec_2( face.vertex( i )->point().x(), face.vertex( i )->point().y() );
}
if ( compute_angle( pt[ 0 ], pt[ 1 ], pt[ 2 ] ) > maximum_angle )
{
return false;
}
if ( compute_angle( pt[ 1 ], pt[ 0 ], pt[ 2 ] ) > maximum_angle )
{
return false;
}
if ( compute_angle( pt[ 2 ], pt[ 0 ], pt[ 1 ] ) > maximum_angle )
{
return false;
}
return true;
}
extern double g_kd_tree_accept_pt_dis;
void smooth_all_pts( double smooth_factor, double knn )
{
long num_of_pt_size = g_map_rgb_pts_mesh.m_rgb_pts_vec.size();
tbb::parallel_for( tbb::blocked_range< size_t >( 0, num_of_pt_size, num_of_pt_size / 12 ), [&]( const tbb::blocked_range< size_t > &r ) {
for ( long i = r.begin(); i != r.end(); i++ )
{
g_map_rgb_pts_mesh.smooth_pts( g_map_rgb_pts_mesh.m_rgb_pts_vec[ i ], smooth_factor, knn, g_kd_tree_accept_pt_dis );
}
} );
}
void save_to_ply_file( std::string ply_file, double smooth_factor, double knn )
{
// std::string ply_file = std::string("/home/ziv/temp/ply/rec_mesh_smooth.ply");
pcl::PolygonMesh mesh_obj;
cout << "Save to file " << ply_file << endl;
long num_of_pt_size = g_map_rgb_pts_mesh.m_rgb_pts_vec.size();
pcl::PointCloud< pcl::PointXYZ > rgb_cloud;
rgb_cloud.points.resize( num_of_pt_size );
tbb::parallel_for( tbb::blocked_range< size_t >( 0, num_of_pt_size, num_of_pt_size / 12 ), [&]( const tbb::blocked_range< size_t > &r ) {
for ( long i = r.begin(); i != r.end(); i++ )
{
vec_3 pt_vec_smoothed;
if(smooth_factor!= 0)
{
pt_vec_smoothed = g_map_rgb_pts_mesh.smooth_pts( g_map_rgb_pts_mesh.m_rgb_pts_vec[ i ], smooth_factor, knn, g_kd_tree_accept_pt_dis );
}
else
{
pt_vec_smoothed = g_map_rgb_pts_mesh.m_rgb_pts_vec[ i ]->get_pos(0);
}
rgb_cloud.points[ i ].x = pt_vec_smoothed( 0 );
rgb_cloud.points[ i ].y = pt_vec_smoothed( 1 );
rgb_cloud.points[ i ].z = pt_vec_smoothed( 2 );
}
} );
int pt_idx = 0;
std::vector< Triangle_set > triangle_set_vec;
int total_size = g_triangles_manager.get_all_triangle_list( triangle_set_vec, nullptr, 0 );
mesh_obj.polygons.reserve( total_size );
for ( int vec_idx = 0; vec_idx < triangle_set_vec.size(); vec_idx++ )
{
for ( Triangle_set::iterator it = triangle_set_vec[ vec_idx ].begin(); it != triangle_set_vec[ vec_idx ].end(); it++ )
{
pcl::Vertices face_pcl;
if ( ( *it )->m_vis_score < 0 )
{
continue;
}
if ( !( *it )->m_index_flip == 0 )
{
face_pcl.vertices.push_back( ( *it )->m_tri_pts_id[ 0 ] );
face_pcl.vertices.push_back( ( *it )->m_tri_pts_id[ 1 ] );
face_pcl.vertices.push_back( ( *it )->m_tri_pts_id[ 2 ] );
}
else
{
face_pcl.vertices.push_back( ( *it )->m_tri_pts_id[ 0 ] );
face_pcl.vertices.push_back( ( *it )->m_tri_pts_id[ 2 ] );
face_pcl.vertices.push_back( ( *it )->m_tri_pts_id[ 1 ] );
}
mesh_obj.polygons.push_back( face_pcl );
}
}
pcl::toPCLPointCloud2( rgb_cloud, mesh_obj.cloud );
// int ok = CGAL::IO::write_PLY( ply_file, pts_vec, polygons );
pcl::io::savePLYFileBinary( ply_file, mesh_obj );
pcl::io::savePCDFileBinary( std::string(ply_file).append(".pcd"), rgb_cloud );
cout << "=== Save to " << ply_file << ", finish !!! === " << endl;
}
extern int g_current_frame;
extern std::vector< vec_3 > dbg_line_vec;
extern std::mutex dbg_line_mutex;
void triangle_compare( const Triangle_set &remove_triangles, const std::vector< long > &add_triangles, Triangle_set &res_remove_triangles,
Triangle_set &res_add_triangles, Triangle_set *exist_triangles )
{
Hash_map_3d< long, std::pair< Triangle_ptr, bool > > all_remove_triangles_list;
for ( const Triangle_ptr &tri_ptr : remove_triangles )
{
all_remove_triangles_list.insert( tri_ptr->m_tri_pts_id[ 0 ], tri_ptr->m_tri_pts_id[ 1 ], tri_ptr->m_tri_pts_id[ 2 ],
std::make_pair( tri_ptr, true ) );
}
for ( int i = 0; i < add_triangles.size(); i += 3 )
{
Triangle tri( add_triangles[ i ], add_triangles[ i + 1 ], add_triangles[ i + 2 ] );
std::pair< Triangle_ptr, bool > *temp_pair_ptr =
all_remove_triangles_list.get_data( tri.m_tri_pts_id[ 0 ], tri.m_tri_pts_id[ 1 ], tri.m_tri_pts_id[ 2 ] );
if ( temp_pair_ptr != nullptr )
{
temp_pair_ptr->second = false;
if ( exist_triangles != nullptr )
{
exist_triangles->insert( temp_pair_ptr->first );
}
}
else
{
res_add_triangles.insert( std::make_shared< Triangle >( tri ) );
}
}
for ( auto &it : all_remove_triangles_list.m_map_3d_hash_map )
{
if ( it.second.second )
{
res_remove_triangles.insert( it.second.first );
}
}
}
std::vector< long > delaunay_triangulation( std::vector< RGB_pt_ptr > &rgb_pt_vec, vec_3 &long_axis, vec_3 &mid_axis, vec_3 &short_axis,
std::set< long > &convex_hull_index, std::set< long > &inner_hull_index )
{
std::vector< int > triangle_indices;
std::vector< long > tri_rgb_pt_indices;
Common_tools::Timer tim;
tim.tic();
int pt_size = rgb_pt_vec.size();
Eigen::MatrixXd pc_mat;
pc_mat.resize( pt_size, 3 );
if ( rgb_pt_vec.size() < 3 )
{
return tri_rgb_pt_indices;
}
for ( int i = 0; i < rgb_pt_vec.size(); i++ )
{
pc_mat.row( i ) = rgb_pt_vec[ i ]->get_pos();
}
vec_3 pc_center = pc_mat.colwise().mean().transpose();
Eigen::MatrixXd pt_sub_center = pc_mat.rowwise() - pc_center.transpose();
if ( short_axis.norm() == 0 )
{
Eigen::Matrix3d cov = ( pt_sub_center.transpose() * pt_sub_center ) / double( pc_mat.rows() );
Eigen::SelfAdjointEigenSolver< Eigen::Matrix3d > eigen_solver;
eigen_solver.compute( cov );
short_axis = eigen_solver.eigenvectors().col( 0 );
mid_axis = eigen_solver.eigenvectors().col( 1 );
// vec_3 long_axis = es.eigenvectors().col(2);
if ( pt_sub_center.row( 0 ).dot( short_axis ) < 0 )
{
short_axis *= -1;
}
if ( pt_sub_center.row( 1 ).dot( mid_axis ) < 0 )
{
mid_axis *= -1;
}
long_axis = short_axis.cross( mid_axis );
}
tim.tic();
std::vector< std::pair< Common_tools::D2_Point, long > > points;
points.resize( rgb_pt_vec.size() );
std::vector< Common_tools::D2_Point > pts_for_hull( rgb_pt_vec.size() );
std::vector< std::size_t > indices( pts_for_hull.size() );
int avail_idx = 0;
for ( int i = 0; i < rgb_pt_vec.size(); i++ )
{
// ANCHOR - remove off plane points
// if(pt_sub_center.row( i ).dot( short_axis ) > 0.1 )
// {
// continue;
// }
Common_tools::D2_Point cgal_pt = Common_tools::D2_Point( pt_sub_center.row( i ).dot( long_axis ), pt_sub_center.row( i ).dot( mid_axis ) );
points[ avail_idx ] = std::make_pair( cgal_pt, rgb_pt_vec[ i ]->m_pt_index );
pts_for_hull[ avail_idx ] = cgal_pt;
avail_idx++;
}
points.resize( avail_idx );
pts_for_hull.resize( avail_idx );
std::iota( indices.begin(), indices.end(), 0 );
std::vector< std::size_t > out;
if ( 1 )
{
CGAL::convex_hull_2( indices.begin(), indices.end(), std::back_inserter( out ),
Common_tools::Convex_hull_traits_2( CGAL::make_property_map( pts_for_hull ) ) );
for ( auto p : out )
{
convex_hull_index.insert( points[ p ].second );
}
for ( auto p : points )
{
if ( convex_hull_index.find( p.second ) == convex_hull_index.end() )
{
inner_hull_index.insert( p.second );
}
}
}
Common_tools::Delaunay2 T;
T.insert( points.begin(), points.end() );
Common_tools::Delaunay2::Finite_faces_iterator fit;
Common_tools::Delaunay2::Face face;
if ( T.number_of_faces() == 0 )
{
return tri_rgb_pt_indices;
}
tri_rgb_pt_indices.resize( T.number_of_faces() * 3 );
long idx = 0;
for ( fit = T.finite_faces_begin(); fit != T.finite_faces_end(); fit++ )
{
face = *fit;
double max_angle = 180;
int hull_count = 0;
for ( int pt_idx = 0; pt_idx < 3; pt_idx++ )
{
if ( convex_hull_index.find( face.vertex( pt_idx )->info() ) != convex_hull_index.end() )
{
hull_count++;
}
}
if ( hull_count >= 1 )
{
max_angle = 180;
}
if ( !is_face_is_ok( face, max_angle ) )
{
continue;
}
tri_rgb_pt_indices[ idx + 0 ] = face.vertex( 0 )->info();
tri_rgb_pt_indices[ idx + 1 ] = face.vertex( 1 )->info();
tri_rgb_pt_indices[ idx + 2 ] = face.vertex( 2 )->info();
idx += 3;
}
tri_rgb_pt_indices.resize( idx );
char dbg_str[ 1024 ];
sprintf( dbg_str, "Cost time=%.2f ms, pts=%d, tri=%d", tim.toc(), ( int ) tri_rgb_pt_indices.size(), ( int ) tri_rgb_pt_indices.size() / 3 );
// g_debug_string = std::string( dbg_str );
// cout << g_debug_string << endl;
return tri_rgb_pt_indices;
}
extern std::string bin_file_name;
extern double minimum_pts;
extern double g_meshing_voxel_size;
FILE * g_fp_cost_time = nullptr;
FILE * g_fp_lio_state = nullptr;
extern bool g_flag_pause;
extern const int number_of_frame;
extern int appending_pts_frame;
// ANCHOR - mesh_reconstruction
Triangle_manager legal_triangles;
std::vector< RGB_pt_ptr > retrieve_neighbor_pts( const std::vector< RGB_pt_ptr > &rgb_pts_vec )
{
std::vector< RGB_pt_ptr > res_pts_vec;
std::set< long > neighbor_indices;
for ( int i = 0; i < rgb_pts_vec.size(); i++ )
{
int idx_a = rgb_pts_vec[ i ]->m_pt_index;
neighbor_indices.insert( idx_a );
for ( Triangle_set::iterator it = legal_triangles.m_map_pt_triangle[ idx_a ].begin(); it != legal_triangles.m_map_pt_triangle[ idx_a ].end();
it++ )
{
neighbor_indices.insert( ( *it )->m_tri_pts_id[ 0 ] );
neighbor_indices.insert( ( *it )->m_tri_pts_id[ 1 ] );
neighbor_indices.insert( ( *it )->m_tri_pts_id[ 2 ] );
}
}
res_pts_vec.reserve( neighbor_indices.size() );
for ( std::set< long >::iterator it = neighbor_indices.begin(); it != neighbor_indices.end(); it++ )
{
res_pts_vec.push_back( g_map_rgb_pts_mesh.m_rgb_pts_vec[ *it ] );
}
// cout << "Before retrieve: " << rgb_pts_vec.size() << ", appended = " <<res_pts_vec.size() << endl;
return res_pts_vec;
}
// ANCHOR - retrieve_neighbor_pts_kdtree
float smooth_factor = 1.0;
double g_kd_tree_accept_pt_dis = 0.32;
std::vector< RGB_pt_ptr > retrieve_neighbor_pts_kdtree( const std::vector< RGB_pt_ptr > &rgb_pts_vec )
{
std::vector< RGB_pt_ptr > res_pt_vec;
std::set< long > new_pts_index;
KDtree_pt_vector kdtree_pt_vector;
std::vector< float > pt_dis_vector;
// g_kd_tree_accept_pt_dis = g_meshing_voxel_size * 0.8;
g_kd_tree_accept_pt_dis = g_meshing_voxel_size*1.25;
for ( int i = 0; i < rgb_pts_vec.size(); i++ )
{
std::vector< int > indices;
std::vector< float > distances;
vec_3 pt_vec = rgb_pts_vec[ i ]->get_pos();
KDtree_pt kdtree_pt( pt_vec );
g_map_rgb_pts_mesh.m_kdtree.Nearest_Search( kdtree_pt, 20, kdtree_pt_vector, pt_dis_vector );
int size = kdtree_pt_vector.size();
vec_3 smooth_vec = vec_3( 0, 0, 0 );
int smooth_count = 0;
for ( int k = 0; k < size; k++ )
{
if ( sqrt( pt_dis_vector[ k ] ) < g_kd_tree_accept_pt_dis )
{
new_pts_index.insert( kdtree_pt_vector[ k ].m_pt_idx );
}
if(sqrt( pt_dis_vector[ k ] ) < g_kd_tree_accept_pt_dis*2)
{
smooth_count++;
smooth_vec += g_map_rgb_pts_mesh.m_rgb_pts_vec[ kdtree_pt_vector[ k ].m_pt_idx ]->get_pos();
}
}
smooth_vec /= smooth_count;
smooth_vec = smooth_vec * ( smooth_factor ) + pt_vec * ( 1 - smooth_factor );
rgb_pts_vec[ i ]->set_smooth_pos( smooth_vec );
}
for ( auto p : new_pts_index )
{
res_pt_vec.push_back( g_map_rgb_pts_mesh.m_rgb_pts_vec[ p ] );
}
return res_pt_vec;
}
std::vector< RGB_pt_ptr > remove_outlier_pts( const std::vector< RGB_pt_ptr > &rgb_pts_vec, const RGB_voxel_ptr &voxel_ptr )
{
int remove_count = 0;
std::vector< RGB_pt_ptr > res_pt_vec;
for ( int i = 0; i < rgb_pts_vec.size(); i++ )
{
if ( rgb_pts_vec[ i ]->m_is_inner_pt == 1 )
{
if ( rgb_pts_vec[ i ]->m_parent_voxel != voxel_ptr )
{
remove_count++;
continue;
}
}
res_pt_vec.push_back( rgb_pts_vec[ i ] );
}
res_pt_vec = rgb_pts_vec;
return res_pt_vec;
}
void correct_triangle_index( Triangle_ptr &ptr, const vec_3 &camera_center, const vec_3 &_short_axis )
{
vec_3 pt_a = g_map_rgb_pts_mesh.m_rgb_pts_vec[ ptr->m_tri_pts_id[ 0 ] ]->get_pos( 1 );
vec_3 pt_b = g_map_rgb_pts_mesh.m_rgb_pts_vec[ ptr->m_tri_pts_id[ 1 ] ]->get_pos( 1 );
vec_3 pt_c = g_map_rgb_pts_mesh.m_rgb_pts_vec[ ptr->m_tri_pts_id[ 2 ] ]->get_pos( 1 );
vec_3 pt_ab = pt_b - pt_a;
vec_3 pt_ac = pt_c - pt_a;
vec_3 pt_tri_cam = camera_center - pt_a;
vec_3 short_axis = _short_axis;
ptr->m_normal = pt_ab.cross( pt_ac );
if ( ptr->m_normal.norm() != 0 )
{
ptr->m_normal.normalize();
}
else
{
ptr->m_normal = vec_3( 0, 0, 1 );
}
if ( short_axis.dot( pt_tri_cam ) < 0 )
{
short_axis *= -1;
}
if ( short_axis.dot( ptr->m_normal ) < 0 )
{
ptr->m_index_flip = 0;
}
else
{
ptr->m_index_flip = 1;
}
if ( ptr->m_normal( 2 ) < 0 )
{
ptr->m_normal *= -1;
}
}
| C++ |
3D | hku-mars/ImMesh | src/meshing/delaunay/openCV_subdiv2d_index.hpp | .hpp | 1,560 | 52 | #pragma once
#include <opencv2/opencv.hpp>
class Subdiv2DIndex : public cv::Subdiv2D
{
public :
Subdiv2DIndex(cv::Rect rectangle);
//Source code of Subdiv2D: https://github.com/opencv/opencv/blob/master/modules/imgproc/src/subdivision2d.cpp#L762
//The implementation tweaks getTrianglesList() so that only the indice of the triangle inside the image are returned
void getTrianglesIndices(std::vector<int> &ind) const;
};
Subdiv2DIndex::Subdiv2DIndex(cv::Rect rectangle) : cv::Subdiv2D{rectangle}
{
}
void Subdiv2DIndex::getTrianglesIndices(std::vector<int> &triangleList) const
{
triangleList.clear();
int i, total = (int)(qedges.size() * 4);
std::vector<bool> edgemask(total, false);
const bool filterPoints = true;
cv::Rect2f rect(topLeft.x, topLeft.y, bottomRight.x - topLeft.x, bottomRight.y - topLeft.y);
for (i = 4; i < total; i += 2)
{
if (edgemask[i])
continue;
cv::Point2f a, b, c;
int edge_a = i;
int indexA = edgeOrg(edge_a, &a) -4;
if (filterPoints && !rect.contains(a))
continue;
int edge_b = getEdge(edge_a, NEXT_AROUND_LEFT);
int indexB = edgeOrg(edge_b, &b) - 4;
if (filterPoints && !rect.contains(b))
continue;
int edge_c = getEdge(edge_b, NEXT_AROUND_LEFT);
int indexC = edgeOrg(edge_c, &c) - 4;
if (filterPoints && !rect.contains(c))
continue;
edgemask[edge_a] = true;
edgemask[edge_b] = true;
edgemask[edge_c] = true;
triangleList.push_back(indexA);
triangleList.push_back(indexB);
triangleList.push_back(indexC);
}
}
| Unknown |
3D | hku-mars/ImMesh | src/meshing/optical_flow/lkpyramid.cpp | .cpp | 34,391 | 821 | /*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2000, Intel Corporation, all rights reserved.
// Copyright (C) 2013, OpenCV Foundation, all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
#include <float.h>
#include <stdio.h>
#include "lkpyramid.hpp"
#include "tools_logger.hpp"
#include <omp.h>
#define CV_DESCALE(x, n) (((x) + (1 << ((n)-1))) >> (n))
using namespace cv;
using std::cout;
using std::endl;
typedef float acctype;
typedef float itemtype;
inline void calc_sharr_deriv(const cv::Mat &src, cv::Mat &dst)
{
// printf_line;
int rows = src.rows, cols = src.cols, cn = src.channels(), colsn = cols * cn, depth = src.depth();
CV_Assert(depth == CV_8U);
if (dst.rows != rows || dst.cols != cols)
{
dst.create(rows, cols, CV_MAKETYPE(DataType<deriv_type>::depth, colsn * 2));
}
int x, y, delta = (int)alignSize((cols + 2) * cn, 16);
AutoBuffer<deriv_type> _tempBuf(delta * 2 + 64);
deriv_type *trow0 = alignPtr(_tempBuf + cn, 16), *trow1 = alignPtr(trow0 + delta, 16);
// #if CV_SIMD128
v_int16x8 c3 = v_setall_s16(3), c10 = v_setall_s16(10);
bool haveSIMD = checkHardwareSupport(CV_CPU_SSE2) || checkHardwareSupport(CV_CPU_NEON);
// #endif
for (y = 0; y < rows; y++)
{
const uchar *srow0 = src.ptr<uchar>(y > 0 ? y - 1 : rows > 1 ? 1
: 0);
const uchar *srow1 = src.ptr<uchar>(y);
const uchar *srow2 = src.ptr<uchar>(y < rows - 1 ? y + 1 : rows > 1 ? rows - 2
: 0);
deriv_type *drow = dst.ptr<deriv_type>(y);
// do vertical convolution
x = 0;
// #if CV_SIMD128
if (haveSIMD)
{
for (; x <= colsn - 8; x += 8)
{
v_int16x8 s0 = v_reinterpret_as_s16(v_load_expand(srow0 + x));
v_int16x8 s1 = v_reinterpret_as_s16(v_load_expand(srow1 + x));
v_int16x8 s2 = v_reinterpret_as_s16(v_load_expand(srow2 + x));
v_int16x8 t1 = s2 - s0;
v_int16x8 t0 = (s0 + s2) * c3 + s1 * c10;
v_store(trow0 + x, t0);
v_store(trow1 + x, t1);
}
}
// #endif
for (; x < colsn; x++)
{
int t0 = (srow0[x] + srow2[x]) * 3 + srow1[x] * 10;
int t1 = srow2[x] - srow0[x];
trow0[x] = (deriv_type)t0;
trow1[x] = (deriv_type)t1;
}
// make border
int x0 = (cols > 1 ? 1 : 0) * cn, x1 = (cols > 1 ? cols - 2 : 0) * cn;
for (int k = 0; k < cn; k++)
{
trow0[-cn + k] = trow0[x0 + k];
trow0[colsn + k] = trow0[x1 + k];
trow1[-cn + k] = trow1[x0 + k];
trow1[colsn + k] = trow1[x1 + k];
}
// do horizontal convolution, interleave the results and store them to dst
x = 0;
// #if CV_SIMD128
if (haveSIMD)
{
for (; x <= colsn - 8; x += 8)
{
v_int16x8 s0 = v_load(trow0 + x - cn);
v_int16x8 s1 = v_load(trow0 + x + cn);
v_int16x8 s2 = v_load(trow1 + x - cn);
v_int16x8 s3 = v_load(trow1 + x);
v_int16x8 s4 = v_load(trow1 + x + cn);
v_int16x8 t0 = s1 - s0;
v_int16x8 t1 = ((s2 + s4) * c3) + (s3 * c10);
v_store_interleave((drow + x * 2), t0, t1);
}
}
// #endif
for (; x < colsn; x++)
{
deriv_type t0 = (deriv_type)(trow0[x + cn] - trow0[x - cn]);
deriv_type t1 = (deriv_type)((trow1[x + cn] + trow1[x - cn]) * 3 + trow1[x] * 10);
drow[x * 2] = t0;
drow[x * 2 + 1] = t1;
}
}
}
opencv_LKTrackerInvoker::opencv_LKTrackerInvoker(
const Mat *_prevImg, const Mat *_prevDeriv, const Mat *_nextImg,
const Point2f *_prevPts, Point2f *_nextPts,
uchar *_status, float *_err,
Size _winSize, TermCriteria _criteria,
int _level, int _maxLevel, int _flags, float _minEigThreshold)
{
prevImg = _prevImg;
prevDeriv = _prevDeriv;
nextImg = _nextImg;
prevPts = _prevPts;
nextPts = _nextPts;
status = _status;
err = _err;
winSize = _winSize;
criteria = _criteria;
level = _level;
maxLevel = _maxLevel;
flags = _flags;
minEigThreshold = _minEigThreshold;
}
inline void calculate_LK_optical_flow(const cv::Range &range, const Mat *prevImg, const Mat *prevDeriv, const Mat *nextImg,
const Point2f *prevPts, Point2f *nextPts,
uchar *status, float *err,
Size winSize, TermCriteria criteria,
int level, int maxLevel, int flags, float minEigThreshold)
{
Point2f halfWin((winSize.width - 1) * 0.5f, (winSize.height - 1) * 0.5f);
const Mat &I = *prevImg;
const Mat &J = *nextImg;
const Mat &derivI = *prevDeriv;
int j, cn = I.channels(), cn2 = cn * 2;
cv::AutoBuffer<deriv_type> _buf(winSize.area() * (cn + cn2));
int derivDepth = DataType<deriv_type>::depth;
Mat IWinBuf(winSize, CV_MAKETYPE(derivDepth, cn), (deriv_type *)_buf);
Mat derivIWinBuf(winSize, CV_MAKETYPE(derivDepth, cn2), (deriv_type *)_buf + winSize.area() * cn);
for (int ptidx = range.start; ptidx < range.end; ptidx++)
{
Point2f prevPt = prevPts[ptidx] * (float)(1. / (1 << level));
Point2f nextPt;
if (level == maxLevel)
{
if (flags & OPTFLOW_USE_INITIAL_FLOW)
nextPt = nextPts[ptidx] * (float)(1. / (1 << level));
else
nextPt = prevPt;
}
else
nextPt = nextPts[ptidx] * 2.f;
nextPts[ptidx] = nextPt;
Point2i iprevPt, inextPt;
prevPt -= halfWin;
iprevPt.x = cvFloor(prevPt.x);
iprevPt.y = cvFloor(prevPt.y);
if (iprevPt.x < -winSize.width || iprevPt.x >= derivI.cols ||
iprevPt.y < -winSize.height || iprevPt.y >= derivI.rows)
{
if (level == 0)
{
if (status)
status[ptidx] = false;
if (err)
err[ptidx] = 0;
}
continue;
}
float a = prevPt.x - iprevPt.x;
float b = prevPt.y - iprevPt.y;
const int W_BITS = 14, W_BITS1 = 14;
const float FLT_SCALE = 1.f / (1 << 20);
int iw00 = cvRound((1.f - a) * (1.f - b) * (1 << W_BITS));
int iw01 = cvRound(a * (1.f - b) * (1 << W_BITS));
int iw10 = cvRound((1.f - a) * b * (1 << W_BITS));
int iw11 = (1 << W_BITS) - iw00 - iw01 - iw10;
int dstep = (int)(derivI.step / derivI.elemSize1());
int stepI = (int)(I.step / I.elemSize1());
int stepJ = (int)(J.step / J.elemSize1());
acctype iA11 = 0, iA12 = 0, iA22 = 0;
float A11, A12, A22;
// #if CV_SSE2
__m128i qw0 = _mm_set1_epi32(iw00 + (iw01 << 16));
__m128i qw1 = _mm_set1_epi32(iw10 + (iw11 << 16));
__m128i z = _mm_setzero_si128();
__m128i qdelta_d = _mm_set1_epi32(1 << (W_BITS1 - 1));
__m128i qdelta = _mm_set1_epi32(1 << (W_BITS1 - 5 - 1));
__m128 qA11 = _mm_setzero_ps(), qA12 = _mm_setzero_ps(), qA22 = _mm_setzero_ps();
// #endif
// extract the patch from the first image, compute covariation matrix of derivatives
int x, y;
for (y = 0; y < winSize.height; y++)
{
const uchar *src = I.ptr() + (y + iprevPt.y) * stepI + iprevPt.x * cn;
const deriv_type *dsrc = derivI.ptr<deriv_type>() + (y + iprevPt.y) * dstep + iprevPt.x * cn2;
deriv_type *Iptr = IWinBuf.ptr<deriv_type>(y);
deriv_type *dIptr = derivIWinBuf.ptr<deriv_type>(y);
x = 0;
// #if CV_SSE2
for (; x <= winSize.width * cn - 4; x += 4, dsrc += 4 * 2, dIptr += 4 * 2)
{
__m128i v00, v01, v10, v11, t0, t1;
v00 = _mm_unpacklo_epi8(_mm_cvtsi32_si128(*(const int *)(src + x)), z);
v01 = _mm_unpacklo_epi8(_mm_cvtsi32_si128(*(const int *)(src + x + cn)), z);
v10 = _mm_unpacklo_epi8(_mm_cvtsi32_si128(*(const int *)(src + x + stepI)), z);
v11 = _mm_unpacklo_epi8(_mm_cvtsi32_si128(*(const int *)(src + x + stepI + cn)), z);
t0 = _mm_add_epi32(_mm_madd_epi16(_mm_unpacklo_epi16(v00, v01), qw0),
_mm_madd_epi16(_mm_unpacklo_epi16(v10, v11), qw1));
t0 = _mm_srai_epi32(_mm_add_epi32(t0, qdelta), W_BITS1 - 5);
_mm_storel_epi64((__m128i *)(Iptr + x), _mm_packs_epi32(t0, t0));
v00 = _mm_loadu_si128((const __m128i *)(dsrc));
v01 = _mm_loadu_si128((const __m128i *)(dsrc + cn2));
v10 = _mm_loadu_si128((const __m128i *)(dsrc + dstep));
v11 = _mm_loadu_si128((const __m128i *)(dsrc + dstep + cn2));
t0 = _mm_add_epi32(_mm_madd_epi16(_mm_unpacklo_epi16(v00, v01), qw0),
_mm_madd_epi16(_mm_unpacklo_epi16(v10, v11), qw1));
t1 = _mm_add_epi32(_mm_madd_epi16(_mm_unpackhi_epi16(v00, v01), qw0),
_mm_madd_epi16(_mm_unpackhi_epi16(v10, v11), qw1));
t0 = _mm_srai_epi32(_mm_add_epi32(t0, qdelta_d), W_BITS1);
t1 = _mm_srai_epi32(_mm_add_epi32(t1, qdelta_d), W_BITS1);
v00 = _mm_packs_epi32(t0, t1); // Ix0 Iy0 Ix1 Iy1 ...
_mm_storeu_si128((__m128i *)dIptr, v00);
t0 = _mm_srai_epi32(v00, 16); // Iy0 Iy1 Iy2 Iy3
t1 = _mm_srai_epi32(_mm_slli_epi32(v00, 16), 16); // Ix0 Ix1 Ix2 Ix3
__m128 fy = _mm_cvtepi32_ps(t0);
__m128 fx = _mm_cvtepi32_ps(t1);
qA22 = _mm_add_ps(qA22, _mm_mul_ps(fy, fy));
qA12 = _mm_add_ps(qA12, _mm_mul_ps(fx, fy));
qA11 = _mm_add_ps(qA11, _mm_mul_ps(fx, fx));
}
// #endif
for (; x < winSize.width * cn; x++, dsrc += 2, dIptr += 2)
{
int ival = CV_DESCALE(src[x] * iw00 + src[x + cn] * iw01 +
src[x + stepI] * iw10 + src[x + stepI + cn] * iw11,
W_BITS1 - 5);
int ixval = CV_DESCALE(dsrc[0] * iw00 + dsrc[cn2] * iw01 +
dsrc[dstep] * iw10 + dsrc[dstep + cn2] * iw11,
W_BITS1);
int iyval = CV_DESCALE(dsrc[1] * iw00 + dsrc[cn2 + 1] * iw01 + dsrc[dstep + 1] * iw10 +
dsrc[dstep + cn2 + 1] * iw11,
W_BITS1);
Iptr[x] = (short)ival;
dIptr[0] = (short)ixval;
dIptr[1] = (short)iyval;
iA11 += (itemtype)(ixval * ixval);
iA12 += (itemtype)(ixval * iyval);
iA22 += (itemtype)(iyval * iyval);
}
}
// #if CV_SSE2
float CV_DECL_ALIGNED(16) A11buf[4], A12buf[4], A22buf[4];
_mm_store_ps(A11buf, qA11);
_mm_store_ps(A12buf, qA12);
_mm_store_ps(A22buf, qA22);
iA11 += A11buf[0] + A11buf[1] + A11buf[2] + A11buf[3];
iA12 += A12buf[0] + A12buf[1] + A12buf[2] + A12buf[3];
iA22 += A22buf[0] + A22buf[1] + A22buf[2] + A22buf[3];
// #endif
A11 = iA11 * FLT_SCALE;
A12 = iA12 * FLT_SCALE;
A22 = iA22 * FLT_SCALE;
float D = A11 * A22 - A12 * A12;
float minEig = (A22 + A11 - std::sqrt((A11 - A22) * (A11 - A22) + 4.f * A12 * A12)) / (2 * winSize.width * winSize.height);
if (err && (flags & OPTFLOW_LK_GET_MIN_EIGENVALS) != 0)
err[ptidx] = (float)minEig;
if (minEig < minEigThreshold || D < FLT_EPSILON)
{
if (level == 0 && status)
status[ptidx] = false;
continue;
}
D = 1.f / D;
nextPt -= halfWin;
Point2f prevDelta;
for (j = 0; j < criteria.maxCount; j++)
{
inextPt.x = cvFloor(nextPt.x);
inextPt.y = cvFloor(nextPt.y);
if (inextPt.x < -winSize.width || inextPt.x >= J.cols ||
inextPt.y < -winSize.height || inextPt.y >= J.rows)
{
if (level == 0 && status)
status[ptidx] = false;
break;
}
a = nextPt.x - inextPt.x;
b = nextPt.y - inextPt.y;
iw00 = cvRound((1.f - a) * (1.f - b) * (1 << W_BITS));
iw01 = cvRound(a * (1.f - b) * (1 << W_BITS));
iw10 = cvRound((1.f - a) * b * (1 << W_BITS));
iw11 = (1 << W_BITS) - iw00 - iw01 - iw10;
acctype ib1 = 0, ib2 = 0;
float b1, b2;
// #if CV_SSE2
qw0 = _mm_set1_epi32(iw00 + (iw01 << 16));
qw1 = _mm_set1_epi32(iw10 + (iw11 << 16));
__m128 qb0 = _mm_setzero_ps(), qb1 = _mm_setzero_ps();
// #endif
for (y = 0; y < winSize.height; y++)
{
const uchar *Jptr = J.ptr() + (y + inextPt.y) * stepJ + inextPt.x * cn;
const deriv_type *Iptr = IWinBuf.ptr<deriv_type>(y);
const deriv_type *dIptr = derivIWinBuf.ptr<deriv_type>(y);
x = 0;
// #if CV_SSE2
for (; x <= winSize.width * cn - 8; x += 8, dIptr += 8 * 2)
{
__m128i diff0 = _mm_loadu_si128((const __m128i *)(Iptr + x)), diff1;
__m128i v00 = _mm_unpacklo_epi8(_mm_loadl_epi64((const __m128i *)(Jptr + x)), z);
__m128i v01 = _mm_unpacklo_epi8(_mm_loadl_epi64((const __m128i *)(Jptr + x + cn)), z);
__m128i v10 = _mm_unpacklo_epi8(_mm_loadl_epi64((const __m128i *)(Jptr + x + stepJ)), z);
__m128i v11 = _mm_unpacklo_epi8(_mm_loadl_epi64((const __m128i *)(Jptr + x + stepJ + cn)), z);
__m128i t0 = _mm_add_epi32(_mm_madd_epi16(_mm_unpacklo_epi16(v00, v01), qw0),
_mm_madd_epi16(_mm_unpacklo_epi16(v10, v11), qw1));
__m128i t1 = _mm_add_epi32(_mm_madd_epi16(_mm_unpackhi_epi16(v00, v01), qw0),
_mm_madd_epi16(_mm_unpackhi_epi16(v10, v11), qw1));
t0 = _mm_srai_epi32(_mm_add_epi32(t0, qdelta), W_BITS1 - 5);
t1 = _mm_srai_epi32(_mm_add_epi32(t1, qdelta), W_BITS1 - 5);
diff0 = _mm_subs_epi16(_mm_packs_epi32(t0, t1), diff0);
diff1 = _mm_unpackhi_epi16(diff0, diff0);
diff0 = _mm_unpacklo_epi16(diff0, diff0); // It0 It0 It1 It1 ...
v00 = _mm_loadu_si128((const __m128i *)(dIptr)); // Ix0 Iy0 Ix1 Iy1 ...
v01 = _mm_loadu_si128((const __m128i *)(dIptr + 8));
v10 = _mm_unpacklo_epi16(v00, v01);
v11 = _mm_unpackhi_epi16(v00, v01);
v00 = _mm_unpacklo_epi16(diff0, diff1);
v01 = _mm_unpackhi_epi16(diff0, diff1);
v00 = _mm_madd_epi16(v00, v10);
v11 = _mm_madd_epi16(v01, v11);
qb0 = _mm_add_ps(qb0, _mm_cvtepi32_ps(v00));
qb1 = _mm_add_ps(qb1, _mm_cvtepi32_ps(v11));
}
// #endif
for (; x < winSize.width * cn; x++, dIptr += 2)
{
int diff = CV_DESCALE(Jptr[x] * iw00 + Jptr[x + cn] * iw01 +
Jptr[x + stepJ] * iw10 + Jptr[x + stepJ + cn] * iw11,
W_BITS1 - 5) -
Iptr[x];
ib1 += (itemtype)(diff * dIptr[0]);
ib2 += (itemtype)(diff * dIptr[1]);
}
}
#if CV_SSE2
float CV_DECL_ALIGNED(16) bbuf[4];
_mm_store_ps(bbuf, _mm_add_ps(qb0, qb1));
ib1 += bbuf[0] + bbuf[2];
ib2 += bbuf[1] + bbuf[3];
#endif
b1 = ib1 * FLT_SCALE;
b2 = ib2 * FLT_SCALE;
Point2f delta((float)((A12 * b2 - A22 * b1) * D),
(float)((A12 * b1 - A11 * b2) * D));
//delta = -delta;
nextPt += delta;
nextPts[ptidx] = nextPt + halfWin;
if (delta.ddot(delta) <= criteria.epsilon)
break;
if (j > 0 && std::abs(delta.x + prevDelta.x) < 0.01 &&
std::abs(delta.y + prevDelta.y) < 0.01)
{
nextPts[ptidx] -= delta * 0.5f;
break;
}
prevDelta = delta;
}
CV_Assert(status != NULL);
if (status[ptidx] && err && level == 0 && (flags & OPTFLOW_LK_GET_MIN_EIGENVALS) == 0)
{
Point2f nextPoint = nextPts[ptidx] - halfWin;
Point inextPoint;
inextPoint.x = cvFloor(nextPoint.x);
inextPoint.y = cvFloor(nextPoint.y);
if (inextPoint.x < -winSize.width || inextPoint.x >= J.cols ||
inextPoint.y < -winSize.height || inextPoint.y >= J.rows)
{
if (status)
status[ptidx] = false;
continue;
}
float aa = nextPoint.x - inextPoint.x;
float bb = nextPoint.y - inextPoint.y;
iw00 = cvRound((1.f - aa) * (1.f - bb) * (1 << W_BITS));
iw01 = cvRound(aa * (1.f - bb) * (1 << W_BITS));
iw10 = cvRound((1.f - aa) * bb * (1 << W_BITS));
iw11 = (1 << W_BITS) - iw00 - iw01 - iw10;
float errval = 0.f;
for (y = 0; y < winSize.height; y++)
{
const uchar *Jptr = J.ptr() + (y + inextPoint.y) * stepJ + inextPoint.x * cn;
const deriv_type *Iptr = IWinBuf.ptr<deriv_type>(y);
for (x = 0; x < winSize.width * cn; x++)
{
int diff = CV_DESCALE(Jptr[x] * iw00 + Jptr[x + cn] * iw01 +
Jptr[x + stepJ] * iw10 + Jptr[x + stepJ + cn] * iw11,
W_BITS1 - 5) -
Iptr[x];
errval += std::abs((float)diff);
}
}
err[ptidx] = errval * 1.f / (32 * winSize.width * cn * winSize.height);
}
}
}
void opencv_LKTrackerInvoker::operator()(const cv::Range &range) const
{
calculate_LK_optical_flow(range, prevImg, prevDeriv, nextImg,
prevPts, nextPts, status, err, winSize, criteria,
level, maxLevel, flags, minEigThreshold);
}
bool opencv_LKTrackerInvoker::calculate(cv::Range range) const
{
calculate_LK_optical_flow(range, prevImg, prevDeriv, nextImg,
prevPts, nextPts, status, err, winSize, criteria,
level, maxLevel, flags, minEigThreshold);
return true;
}
inline int opencv_buildOpticalFlowPyramid(InputArray _img, OutputArrayOfArrays pyramid, Size winSize, int maxLevel, bool withDerivatives,
int pyrBorder, int derivBorder, bool tryReuseInputImage)
{
Mat img = _img.getMat();
CV_Assert(img.depth() == CV_8U && winSize.width > 2 && winSize.height > 2);
int pyrstep = withDerivatives ? 2 : 1;
#if (CV_MAJOR_VERSION==4)
pyramid.create(1, (maxLevel + 1) * pyrstep, 0 /*type*/, -1, true);
#else
pyramid.create(1, (maxLevel + 1) * pyrstep, 0 /*type*/, -1, true, 0);
#endif
int derivType = CV_MAKETYPE(DataType<deriv_type>::depth, img.channels() * 2);
//level 0
bool lvl0IsSet = false;
if (tryReuseInputImage && img.isSubmatrix() && (pyrBorder & BORDER_ISOLATED) == 0)
{
Size wholeSize;
Point ofs;
img.locateROI(wholeSize, ofs);
if (ofs.x >= winSize.width && ofs.y >= winSize.height && ofs.x + img.cols + winSize.width <= wholeSize.width && ofs.y + img.rows + winSize.height <= wholeSize.height)
{
pyramid.getMatRef(0) = img;
lvl0IsSet = true;
}
}
if (!lvl0IsSet)
{
Mat &temp = pyramid.getMatRef(0);
if (!temp.empty())
temp.adjustROI(winSize.height, winSize.height, winSize.width, winSize.width);
if (temp.type() != img.type() || temp.cols != winSize.width * 2 + img.cols || temp.rows != winSize.height * 2 + img.rows)
{
// printf_line;
temp.create(img.rows + winSize.height * 2, img.cols + winSize.width * 2, img.type());
}
if (pyrBorder == BORDER_TRANSPARENT)
img.copyTo(temp(Rect(winSize.width, winSize.height, img.cols, img.rows)));
else
copyMakeBorder(img, temp, winSize.height, winSize.height, winSize.width, winSize.width, pyrBorder);
temp.adjustROI(-winSize.height, -winSize.height, -winSize.width, -winSize.width);
}
Size sz = img.size();
Mat prevLevel = pyramid.getMatRef(0);
Mat thisLevel = prevLevel;
for (int level = 0; level <= maxLevel; ++level)
{
if (level != 0)
{
Mat &temp = pyramid.getMatRef(level * pyrstep);
if (!temp.empty())
{
temp.adjustROI(winSize.height, winSize.height, winSize.width, winSize.width);
}
if (temp.type() != img.type() || temp.cols != winSize.width * 2 + sz.width || temp.rows != winSize.height * 2 + sz.height)
{
temp.create(sz.height + winSize.height * 2, sz.width + winSize.width * 2, img.type());
}
thisLevel = temp(Rect(winSize.width, winSize.height, sz.width, sz.height));
pyrDown(prevLevel, thisLevel, sz);
if (pyrBorder != BORDER_TRANSPARENT)
copyMakeBorder(thisLevel, temp, winSize.height, winSize.height, winSize.width, winSize.width, pyrBorder | BORDER_ISOLATED);
temp.adjustROI(-winSize.height, -winSize.height, -winSize.width, -winSize.width);
}
if (withDerivatives)
{
Mat &deriv = pyramid.getMatRef(level * pyrstep + 1);
if (!deriv.empty())
deriv.adjustROI(winSize.height, winSize.height, winSize.width, winSize.width);
if (deriv.type() != derivType || deriv.cols != winSize.width * 2 + sz.width || deriv.rows != winSize.height * 2 + sz.height)
deriv.create(sz.height + winSize.height * 2, sz.width + winSize.width * 2, derivType);
Mat derivI = deriv(Rect(winSize.width, winSize.height, sz.width, sz.height));
calc_sharr_deriv(thisLevel, derivI);
if (derivBorder != BORDER_TRANSPARENT)
copyMakeBorder(derivI, deriv, winSize.height, winSize.height, winSize.width, winSize.width, derivBorder | BORDER_ISOLATED);
deriv.adjustROI(-winSize.height, -winSize.height, -winSize.width, -winSize.width);
}
sz = Size((sz.width + 1) / 2, (sz.height + 1) / 2);
if (sz.width <= winSize.width || sz.height <= winSize.height)
{
#if (CV_MAJOR_VERSION==4)
pyramid.create(1, (level + 1) * pyrstep, 0 /*type*/, -1, true); //check this
#else
pyramid.create(1, (level + 1) * pyrstep, 0 /*type*/, -1, true, 0); //check this
#endif
return level;
}
prevLevel = thisLevel;
}
return maxLevel;
}
void LK_optical_flow_kernel::allocate_img_deriv_memory(std::vector<Mat> &img_pyr,
std::vector<Mat> &img_pyr_deriv_I,
std::vector<Mat> &img_pyr_deriv_I_buff)
{
int derivDepth = cv::DataType<deriv_type>::depth;
img_pyr_deriv_I.resize(img_pyr.size());
img_pyr_deriv_I_buff.resize(img_pyr.size());
for (int level = m_maxLevel; level >= 0; level--)
{
if (img_pyr_deriv_I_buff[level].cols == 0)
{
// dI/dx ~ Ix, dI/dy ~ Iy
// Create the pyramid mat with add the padding.
img_pyr_deriv_I_buff[level].create(img_pyr[level].rows + m_lk_win_size.height * 2,
img_pyr[level].cols + m_lk_win_size.width * 2,
CV_MAKETYPE(derivDepth, img_pyr[level].channels() * 2));
}
}
}
void LK_optical_flow_kernel::calc_image_deriv_Sharr(std::vector<cv::Mat> &img_pyr,
std::vector<cv::Mat> &img_pyr_deriv_I,
std::vector<cv::Mat> &img_pyr_deriv_I_buff)
{
if (img_pyr_deriv_I_buff.size() == 0 ||
img_pyr_deriv_I_buff[0].size().width == 0 ||
img_pyr_deriv_I_buff[0].size().height == 0)
{
allocate_img_deriv_memory(img_pyr, img_pyr_deriv_I, img_pyr_deriv_I_buff);
}
// Calculate Image derivative
for (int level = m_maxLevel; level >= 0; level--)
{
cv::Size imgSize = img_pyr[level].size();
cv::Mat _derivI(imgSize.height + m_lk_win_size.height * 2,
imgSize.width + m_lk_win_size.width * 2, img_pyr_deriv_I_buff[level].type(), img_pyr_deriv_I_buff[level].ptr());
img_pyr_deriv_I[level] = _derivI(cv::Rect(m_lk_win_size.width, m_lk_win_size.height, imgSize.width, imgSize.height));
calc_sharr_deriv(img_pyr[level], img_pyr_deriv_I[level]);
cv::copyMakeBorder(img_pyr_deriv_I[level], _derivI, m_lk_win_size.height, m_lk_win_size.height, m_lk_win_size.width, m_lk_win_size.width, cv::BORDER_CONSTANT | cv::BORDER_ISOLATED);
}
}
void LK_optical_flow_kernel::set_termination_criteria(cv::TermCriteria &crit)
{
// Set ther creteria of termination.
if ((m_terminate_criteria.type & TermCriteria::COUNT) == 0)
{
m_terminate_criteria.maxCount = 30;
}
else
{
m_terminate_criteria.maxCount = std::min(std::max(m_terminate_criteria.maxCount, 0), 100);
}
if ((m_terminate_criteria.type & TermCriteria::EPS) == 0)
{
m_terminate_criteria.epsilon = 0.01;
}
else
{
m_terminate_criteria.epsilon = std::min(std::max(m_terminate_criteria.epsilon, 0.), 10.);
}
// m_terminate_criteria.epsilon *= m_terminate_criteria.epsilon;
}
void LK_optical_flow_kernel::calc(InputArray _prevImg, InputArray _nextImg,
InputArray _prevPts, InputOutputArray _nextPts,
OutputArray _status, OutputArray _err)
{
Mat prevPtsMat = _prevPts.getMat();
const int derivDepth = DataType<deriv_type>::depth;
CV_Assert(m_maxLevel >= 0 && m_lk_win_size.width > 2 && m_lk_win_size.height > 2);
int level = 0, i, n_points;
CV_Assert((n_points = prevPtsMat.checkVector(2, CV_32F, true)) >= 0);
if (n_points == 0)
{
_nextPts.release();
_status.release();
_err.release();
return;
}
if (!(flags & cv_OPTFLOW_USE_INITIAL_FLOW))
{
_nextPts.create(prevPtsMat.size(), prevPtsMat.type(), -1, true);
}
Mat nextPtsMat = _nextPts.getMat();
CV_Assert(nextPtsMat.checkVector(2, CV_32F, true) == n_points);
const Point2f *prevPts = prevPtsMat.ptr<Point2f>();
Point2f *nextPts = nextPtsMat.ptr<Point2f>();
_status.create((int)n_points, 1, CV_8U, -1, true);
Mat statusMat = _status.getMat(), errMat;
CV_Assert(statusMat.isContinuous());
uchar *status = statusMat.ptr();
float *err = 0;
for (i = 0; i < n_points; i++)
{
status[i] = true;
}
if (_err.needed())
{
_err.create((int)n_points, 1, CV_32F, -1, true);
errMat = _err.getMat();
CV_Assert(errMat.isContinuous());
err = errMat.ptr<float>();
}
m_maxLevel = opencv_buildOpticalFlowPyramid(_prevImg, m_prev_img_pyr, m_lk_win_size, m_maxLevel, false);
m_maxLevel = opencv_buildOpticalFlowPyramid(_nextImg, m_curr_img_pyr, m_lk_win_size, m_maxLevel, false);
calc_image_deriv_Sharr(m_prev_img_pyr, m_prev_img_deriv_I, m_prev_img_deriv_I_buff);
for (level = m_maxLevel; level >= 0; level--)
{
// cout << "Image size = " << prevPyr[level * lvlStep1].size() << ", level = " << level << endl;
CV_Assert(m_prev_img_pyr[level].size() == m_curr_img_pyr[level].size());
CV_Assert(m_prev_img_pyr[level].type() == m_curr_img_pyr[level].type());
parallel_for_(Range(0, n_points), opencv_LKTrackerInvoker(&m_prev_img_pyr[level], &m_prev_img_deriv_I[level],
&m_curr_img_pyr[level], prevPts, nextPts,
status, err,
m_lk_win_size, m_terminate_criteria, level, m_maxLevel,
flags, (float)minEigThreshold));
}
}
void LK_optical_flow_kernel::swap_image_buffer()
{
// Swap image buffer, avoiding reallocate the memory.
for (int level = m_maxLevel; level >= 0; level--)
{
std::swap(m_prev_img_pyr[level], m_curr_img_pyr[level]);
std::swap(m_prev_img_deriv_I[level], m_curr_img_deriv_I[level]);
std::swap(m_prev_img_deriv_I_buff[level], m_curr_img_deriv_I_buff[level]);
}
}
int test_fun(int i, int j)
{
std::cout << "Way 0 hello " << i + j << std::endl;
std::this_thread::sleep_for(std::chrono::seconds(1));
std::cout << "Way 0 world " << i + j << std::endl;
return i * i;
}
int LK_optical_flow_kernel::track_image(const cv::Mat &curr_img, const std::vector<cv::Point2f> &last_tracked_pts,
std::vector<cv::Point2f> &curr_tracked_pts,
std::vector<uchar> &status, int opm_method)
{
// Common_tools::Timer tim;
// tim.tic();
// printf_line;
m_maxLevel = opencv_buildOpticalFlowPyramid(curr_img, m_curr_img_pyr, m_lk_win_size, m_maxLevel, false);
calc_image_deriv_Sharr(m_curr_img_pyr, m_curr_img_deriv_I, m_curr_img_deriv_I_buff);
if (m_prev_img_pyr.size() == 0 || (m_prev_img_pyr[0].cols == 0)) // The first img
{
m_prev_img_pyr.resize(m_curr_img_pyr.size());
allocate_img_deriv_memory(m_curr_img_pyr, m_prev_img_deriv_I, m_prev_img_deriv_I_buff);
swap_image_buffer();
curr_tracked_pts = last_tracked_pts;
return 0;
}
curr_tracked_pts = last_tracked_pts;
status.resize(last_tracked_pts.size());
for (int i = 0; i < last_tracked_pts.size(); i++)
{
status[i] = 1;
}
cv::parallel_for_(
Range(0, last_tracked_pts.size()), [&](const Range &range) {
// cout << "Range " << range.start << ", " << range.end << endl;
for (int level = m_maxLevel; level >= 0; level--) {
calculate_LK_optical_flow(
range, &m_prev_img_pyr[level], &m_prev_img_deriv_I[level],
&m_curr_img_pyr[level], last_tracked_pts.data(),
curr_tracked_pts.data(), status.data(), 0, m_lk_win_size,
m_terminate_criteria, level, m_maxLevel, flags,
minEigThreshold);
}
});
swap_image_buffer();
return std::accumulate(status.begin(), status.end(), 0);
}
void calculate_optical_flow(InputArray _prevImg, InputArray _nextImg,
InputArray _prevPts, InputOutputArray _nextPts,
OutputArray _status, OutputArray _err,
Size winSize, int maxLevel,
TermCriteria criteria,
int flags, double minEigThreshold)
{
// printf_line;
cv::Ptr<LK_optical_flow_kernel> optflow_kernel = cv::makePtr<LK_optical_flow_kernel>(winSize, maxLevel, criteria, flags, minEigThreshold);
optflow_kernel->calc(_prevImg, _nextImg, _prevPts, _nextPts, _status, _err);
}
| C++ |
3D | hku-mars/ImMesh | src/meshing/optical_flow/lkpyramid.hpp | .hpp | 6,321 | 135 | // This file is modified from lkpyramid.hpp of openCV
#pragma once
#include "opencv2/core.hpp"
#include "opencv2/highgui.hpp"
#include "opencv2/video/tracking.hpp"
#include "opencv2/imgproc.hpp"
#define CV_CPU_HAS_SUPPORT_SSE2 1
#define USING_OPENCV_TBB 1
#include "opencv2/core/hal/intrin.hpp"
#include "tools_logger.hpp"
#include "tools_timer.hpp"
#include <numeric>
#include <future>
#include "tools_thread_pool.hpp"
enum
{
cv_OPTFLOW_USE_INITIAL_FLOW = 4,
cv_OPTFLOW_LK_GET_MIN_EIGENVALS = 8,
cv_OPTFLOW_FARNEBACK_GAUSSIAN = 256
};
typedef short deriv_type;
inline int opencv_buildOpticalFlowPyramid(cv::InputArray img, cv::OutputArrayOfArrays pyramid,
cv::Size winSize, int maxLevel, bool withDerivatives = true,
int pyrBorder = cv::BORDER_REFLECT_101,
int derivBorder = cv::BORDER_CONSTANT,
bool tryReuseInputImage = true);
inline void calc_sharr_deriv(const cv::Mat &src, cv::Mat &dst);
void calculate_optical_flow(cv::InputArray prevImg, cv::InputArray nextImg,
cv::InputArray prevPts, cv::InputOutputArray nextPts,
cv::OutputArray status, cv::OutputArray err,
cv::Size winSize = cv::Size(21, 21), int maxLevel = 3,
cv::TermCriteria criteria = cv::TermCriteria(cv::TermCriteria::COUNT + cv::TermCriteria::EPS, 30, 0.01),
int flags = 0, double minEigThreshold = 1e-4);
inline int calculate_LK_optical_flow(const cv::Range &range, const cv::Mat &_prevImg, const cv::Mat &_prevDeriv, const cv::Mat &_nextImg,
const cv::Point2f *_prevPts, cv::Point2f *_nextPts,
uchar *_status, float *_err,
cv::Size _winSize, cv::TermCriteria _criteria,
int _level, int _maxLevel, int _flags, float _minEigThreshold);
struct opencv_LKTrackerInvoker : cv::ParallelLoopBody
{
opencv_LKTrackerInvoker(const cv::Mat *_prevImg, const cv::Mat *_prevDeriv, const cv::Mat *_nextImg,
const cv::Point2f *_prevPts, cv::Point2f *_nextPts,
uchar *_status, float *_err,
cv::Size _winSize, cv::TermCriteria _criteria,
int _level, int _maxLevel, int _flags, float _minEigThreshold);
void operator()(const cv::Range &range) const;
bool calculate( cv::Range range) const;
const cv::Mat *prevImg;
const cv::Mat *nextImg;
const cv::Mat *prevDeriv;
const cv::Point2f *prevPts;
cv::Point2f *nextPts;
uchar *status;
float *err;
cv::Size winSize;
cv::TermCriteria criteria;
int level;
int maxLevel;
int flags;
float minEigThreshold;
};
class LK_optical_flow_kernel
{
public:
cv::Size get_win_size() const { return m_lk_win_size; }
void set_win_size(cv::Size winSize_) { m_lk_win_size = winSize_; }
int get_max_level() const { return m_maxLevel; }
void set_max_level(int maxLevel_) { m_maxLevel = maxLevel_; }
cv::TermCriteria get_term_criteria() const { return m_terminate_criteria; }
void set_term_criteria(cv::TermCriteria &crit_) { m_terminate_criteria = crit_; }
int get_flags() const { return flags; }
void set_flags(int flags_) { flags = flags_; }
double get_min_eig_threshold() const { return minEigThreshold; }
void set_min_eig_threshold(double minEigThreshold_) { minEigThreshold = minEigThreshold_; }
LK_optical_flow_kernel(cv::Size winSize_ = cv::Size(21, 21),
int maxLevel_ = 3,
cv::TermCriteria criteria_ = cv::TermCriteria(cv::TermCriteria::COUNT + cv::TermCriteria::EPS, 30, 0.01),
int flags_ = 0,
double minEigThreshold_ = 1e-4
) : m_lk_win_size(winSize_), m_maxLevel(maxLevel_), m_terminate_criteria(criteria_), flags(flags_), minEigThreshold(minEigThreshold_)
{
set_termination_criteria(m_terminate_criteria);
}
/**
@brief Calculates a sparse optical flow.
@param prevImg First input image.
@param nextImg Second input image of the same cv::Size and the same type as prevImg.
@param prevPts Vector of 2D points for which the flow needs to be found.
@param nextPts Output vector of 2D points containing the calculated new positions of input features in the second image.
@param status Output status vector. Each element of the vector is set to 1 if the
flow for the corresponding features has been found. Otherwise, it is set to 0.
@param err Optional output vector that contains error response for each point (inverse confidence).
**/
void calc(cv::InputArray prevImg, cv::InputArray nextImg,
cv::InputArray prevPts, cv::InputOutputArray nextPts,
cv::OutputArray status,
cv::OutputArray err = cv::noArray());
void allocate_img_deriv_memory( std::vector<cv::Mat> &img_pyr,
std::vector<cv::Mat> &img_pyr_deriv_I,
std::vector<cv::Mat> &img_pyr_deriv_I_buff);
void calc_image_deriv_Sharr(std::vector<cv::Mat> &img_pyr,
std::vector<cv::Mat> &img_pyr_deriv_I,
std::vector<cv::Mat> &img_pyr_deriv_I_buff);
void set_termination_criteria(cv::TermCriteria &crit);
public:
std::vector<cv::Mat> m_prev_img_pyr, m_curr_img_pyr;
std::vector<cv::Mat> m_prev_img_deriv_I, m_prev_img_deriv_I_buff;
std::vector<cv::Mat> m_curr_img_deriv_I, m_curr_img_deriv_I_buff;
cv::Size m_lk_win_size;
int m_maxLevel;
cv::TermCriteria m_terminate_criteria;
int flags;
double minEigThreshold;
void swap_image_buffer();
int track_image(const cv::Mat & curr_img, const std::vector<cv::Point2f> & last_tracked_pts, std::vector<cv::Point2f> & curr_tracked_pts,
std::vector<uchar> & status, int opm_method = 3 ); // opm_method: [0] openCV parallel_body [1] openCV parallel for [2] Thread pool
}; | Unknown |
3D | hku-mars/ImMesh | src/meshing/r3live/triangle.cpp | .cpp | 3,178 | 80 | #include "triangle.hpp"
vec_3 Triangle_manager::get_triangle_center(const Triangle_ptr& tri_ptr)
{
vec_3 triangle_pos = ( m_pointcloud_map->m_rgb_pts_vec[ tri_ptr->m_tri_pts_id[ 0 ] ]->get_pos() +
m_pointcloud_map->m_rgb_pts_vec[ tri_ptr->m_tri_pts_id[ 1 ] ]->get_pos() +
m_pointcloud_map->m_rgb_pts_vec[ tri_ptr->m_tri_pts_id[ 2 ] ]->get_pos() );
triangle_pos = triangle_pos / 3.0;
return triangle_pos;
}
int Triangle_manager::get_all_triangle_list(std::vector< Triangle_set > & triangle_list, std::mutex * mutex, int sleep_us_each_query)
{
int all_triangle_num = 0;
triangle_list.clear();
for ( auto &sync_triangle_set : m_triangle_set_in_region.m_map_3d_hash_map )
{
if ( mutex != nullptr )
mutex->lock();
Triangle_set tri_set;
sync_triangle_set.second.get_triangle_set(tri_set) ;
triangle_list.push_back( tri_set );
all_triangle_num += triangle_list.back().size();
if ( mutex != nullptr )
mutex->unlock();
if ( sleep_us_each_query != 0 )
{
// std::this_thread::yield();
std::this_thread::sleep_for( std::chrono::microseconds( sleep_us_each_query ) );
}
}
return all_triangle_num;
}
void Triangle_manager::insert_triangle_to_list( const Triangle_ptr& tri_ptr , const int& frame_idx)
{
vec_3 triangle_pos = get_triangle_center( tri_ptr );
int hash_3d_x = std::round( triangle_pos( 0 ) / m_region_size );
int hash_3d_y = std::round( triangle_pos( 1 ) / m_region_size );
int hash_3d_z = std::round( triangle_pos( 2 ) / m_region_size );
Sync_triangle_set* sync_triangle_set_ptr = m_triangle_set_in_region.get_data( hash_3d_x, hash_3d_y, hash_3d_z );
if ( sync_triangle_set_ptr == nullptr )
{
sync_triangle_set_ptr = new Sync_triangle_set();
sync_triangle_set_ptr->insert( tri_ptr );
m_triangle_set_in_region.insert( hash_3d_x, hash_3d_y, hash_3d_z, *sync_triangle_set_ptr );
m_triangle_set_vector.push_back( m_triangle_set_in_region.get_data( hash_3d_x, hash_3d_y, hash_3d_z ) );
}
else
{
sync_triangle_set_ptr->insert( tri_ptr );
}
}
void Triangle_manager::erase_triangle_from_list( const Triangle_ptr& tri_ptr , const int & frame_idx)
{
vec_3 triangle_pos = get_triangle_center( tri_ptr );
int hash_3d_x = std::round( triangle_pos( 0 ) / m_region_size );
int hash_3d_y = std::round( triangle_pos( 1 ) / m_region_size );
int hash_3d_z = std::round( triangle_pos( 2 ) / m_region_size );
Sync_triangle_set* triangle_set_ptr = m_triangle_set_in_region.get_data( hash_3d_x, hash_3d_y, hash_3d_z );
if ( triangle_set_ptr == nullptr )
{
return;
}
else
{
triangle_set_ptr->erase( tri_ptr );
}
}
int Triangle_manager::get_triangle_list_size()
{
int tri_list_size = 0;
for ( auto& triangle_set : m_triangle_set_in_region.m_map_3d_hash_map )
{
tri_list_size += triangle_set.second.get_triangle_set_size();
}
return tri_list_size;
} | C++ |
3D | hku-mars/ImMesh | src/meshing/r3live/pointcloud_rgbd.cpp | .cpp | 39,836 | 958 | /*
This code is the implementation of our paper "R3LIVE: A Robust, Real-time, RGB-colored,
LiDAR-Inertial-Visual tightly-coupled state Estimation and mapping package".
Author: Jiarong Lin < ziv.lin.ljr@gmail.com >
If you use any code of this repo in your academic research, please cite at least
one of our papers:
[1] Lin, Jiarong, and Fu Zhang. "R3LIVE: A Robust, Real-time, RGB-colored,
LiDAR-Inertial-Visual tightly-coupled state Estimation and mapping package."
[2] Xu, Wei, et al. "Fast-lio2: Fast direct lidar-inertial odometry."
[3] Lin, Jiarong, et al. "R2LIVE: A Robust, Real-time, LiDAR-Inertial-Visual
tightly-coupled state Estimator and mapping."
[4] Xu, Wei, and Fu Zhang. "Fast-lio: A fast, robust lidar-inertial odometry
package by tightly-coupled iterated kalman filter."
[5] Cai, Yixi, Wei Xu, and Fu Zhang. "ikd-Tree: An Incremental KD Tree for
Robotic Applications."
[6] Lin, Jiarong, and Fu Zhang. "Loam-livox: A fast, robust, high-precision
LiDAR odometry and mapping package for LiDARs of small FoV."
For commercial use, please contact me < ziv.lin.ljr@gmail.com > and
Dr. Fu Zhang < fuzhang@hku.hk >.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
*/
#include "pointcloud_rgbd.hpp"
#include "../optical_flow/lkpyramid.hpp"
extern Common_tools::Cost_time_logger g_cost_time_logger;
extern std::shared_ptr< Common_tools::ThreadPool > m_thread_pool_ptr;
cv::RNG g_rng = cv::RNG( 0 );
// std::atomic<long> g_pts_index(0);
double g_voxel_resolution = 0.1;
double g_global_map_minimum_dis = 0.01;
std::vector< RGB_pt_ptr >* g_rgb_pts_vec;
class Triangle;
void RGB_pts::set_pos( const vec_3& pos )
{
for ( size_t i = 0; i < 3; i++ )
{
m_pos[ i ] = pos( i );
m_pos_aft_smooth[ i ] = pos( i );
}
}
void RGB_pts::set_smooth_pos( const vec_3& pos )
{
for ( size_t i = 0; i < 3; i++ )
{
m_pos_aft_smooth[ i ] = pos( i );
}
m_smoothed = true;
}
vec_3 RGB_pts::get_pos( bool get_smooth )
{
if ( get_smooth )
{
return vec_3( m_pos_aft_smooth[ 0 ], m_pos_aft_smooth[ 1 ], m_pos_aft_smooth[ 2 ] );
}
else
{
return vec_3( m_pos[ 0 ], m_pos[ 1 ], m_pos[ 2 ] );
}
}
mat_3_3 RGB_pts::get_rgb_cov()
{
mat_3_3 cov_mat = mat_3_3::Zero();
for ( int i = 0; i < 3; i++ )
{
cov_mat( i, i ) = m_cov_rgb[ i ];
}
return cov_mat;
}
vec_3 RGB_pts::get_rgb()
{
return vec_3( m_rgb[ 0 ], m_rgb[ 1 ], m_rgb[ 2 ] ) / m_first_obs_exposure_time;
}
vec_3 RGB_pts::get_radiance()
{
return vec_3( m_rgb[ 0 ], m_rgb[ 1 ], m_rgb[ 2 ] );
}
pcl::PointXYZI RGB_pts::get_pt()
{
pcl::PointXYZI pt;
pt.x = m_pos[ 0 ];
pt.y = m_pos[ 1 ];
pt.z = m_pos[ 2 ];
return pt;
}
const double image_obs_cov = 1.5;
// const double process_noise_sigma = 1.5;
const double process_noise_sigma = 0.15;
// const double process_noise_sigma = 0.000;
// const double process_noise_sigma = 150000;
// const double process_noise_sigma = 0.0;
const int THRESHOLD_OVEREXPOSURE = 255;
int RGB_pts::update_rgb( const vec_3& rgb, const double obs_dis, const vec_3 obs_sigma, const double obs_time, const double current_exposure_time )
{
if ( rgb.norm() == 0 ) // avoid less-exposure
{
return 0;
}
if ( rgb( 0 ) > THRESHOLD_OVEREXPOSURE && rgb( 1 ) > THRESHOLD_OVEREXPOSURE && rgb( 2 ) > THRESHOLD_OVEREXPOSURE ) // avoid the over-exposure
{
return 0;
}
if ( m_obs_dis != 0 && ( ( obs_dis > m_obs_dis * 1.1 ) ) )
{
return 0;
}
if ( m_N_rgb == 0 )
{
// For first time of observation.
m_last_obs_time = obs_time;
m_obs_dis = obs_dis;
m_first_obs_exposure_time = current_exposure_time;
for ( int i = 0; i < 3; i++ )
{
m_rgb[ i ] = rgb( i ) * current_exposure_time;
m_cov_rgb[ i ] = obs_sigma( i );
}
m_N_rgb = 1;
return 0;
}
// State estimation for robotics, section 2.2.6, page 37-38
for ( int i = 0; i < 3; i++ )
{
m_cov_rgb[ i ] = ( m_cov_rgb[ i ] + process_noise_sigma * ( obs_time - m_last_obs_time ) ); // Add process noise
double old_sigma = m_cov_rgb[ i ];
m_cov_rgb[ i ] = sqrt( 1.0 / ( 1.0 / m_cov_rgb[ i ] / m_cov_rgb[ i ] + 1.0 / obs_sigma( i ) / obs_sigma( i ) ) );
m_rgb[ i ] = m_cov_rgb[ i ] * m_cov_rgb[ i ] *
( m_rgb[ i ] / old_sigma / old_sigma + rgb( i ) * current_exposure_time / obs_sigma( i ) / obs_sigma( i ) );
}
vec_3 res_rgb_vec = vec_3( m_rgb[ 0 ], m_rgb[ 1 ], m_rgb[ 2 ] ) / m_first_obs_exposure_time;
double max_rgb = res_rgb_vec.maxCoeff(); // Avoid overexposure.
if ( max_rgb > 255 )
{
for ( int i = 0; i < 3; i++ )
{
m_rgb[ i ] = m_rgb[ i ] * 254.999 / max_rgb;
}
}
// if(m_first_obs_exposure_time > 1.0 / g_camera_exp_tim_lower_bound)
// {
// m_first_obs_exposure_time = 1.0 / g_camera_exp_tim_lower_bound;
// }
if ( obs_dis < m_obs_dis )
{
m_obs_dis = obs_dis;
}
m_last_obs_time = obs_time;
m_N_rgb++;
// if ( m_first_obs_exposure_time <= current_exposure_time )
// {
// m_first_obs_exposure_time = current_exposure_time;
// }
m_first_obs_exposure_time = ( m_first_obs_exposure_time * ( m_N_rgb ) + current_exposure_time ) / ( m_N_rgb + 1 );
return 1;
}
// void RGB_Voxel::refresh_triangles()
// {
// int pts_size = m_pts_in_grid.size();
// m_2d_pts_vec.resize( m_triangle_list_in_voxel.size() );
// // for(int i = 0; i < pts_size; i++ )
// int tri_idx = 0;
// for ( Triangle_set::iterator it = m_triangle_list_in_voxel.begin(); it != m_triangle_list_in_voxel.end(); it++ )
// {
// vec_3 pt_3d_a = g_rgb_pts_vec->data()[ ( *it )->m_tri_pts_id[ 0 ] ]->get_pos();
// vec_3 pt_3d_b = g_rgb_pts_vec->data()[ ( *it )->m_tri_pts_id[ 1 ] ]->get_pos();
// vec_3 pt_3d_c = g_rgb_pts_vec->data()[ ( *it )->m_tri_pts_id[ 2 ] ]->get_pos();
// m_2d_pts_vec[ tri_idx ] = Common_tools::Triangle_2( Common_tools::Point_2( pt_3d_a.dot( m_long_axis ), pt_3d_a.dot( m_mid_axis ) ),
// Common_tools::Point_2( pt_3d_b.dot( m_long_axis ), pt_3d_b.dot( m_mid_axis ) ),
// Common_tools::Point_2( pt_3d_c.dot( m_long_axis ), pt_3d_c.dot( m_mid_axis ) ) );
// tri_idx++;
// }
// }
// int RGB_Voxel::insert_triangle( long& id_0, long& id_1, long& id_2 )
// {
// vec_3 pt_3d_a = g_rgb_pts_vec->data()[ id_0 ]->get_pos() - m_center;
// vec_3 pt_3d_b = g_rgb_pts_vec->data()[ id_1 ]->get_pos() - m_center;
// vec_3 pt_3d_c = g_rgb_pts_vec->data()[ id_2 ]->get_pos() - m_center;
// Common_tools::Triangle_2 tar_triangle =
// Common_tools::Triangle_2( Common_tools::Point_2( pt_3d_a.dot( m_long_axis ), pt_3d_a.dot( m_mid_axis ) ),
// Common_tools::Point_2( pt_3d_b.dot( m_long_axis ), pt_3d_b.dot( m_mid_axis ) ),
// Common_tools::Point_2( pt_3d_c.dot( m_long_axis ), pt_3d_c.dot( m_mid_axis ) ) );
// int res_intersection = 0;
// for ( Triangle_set::iterator it = m_triangle_list_in_voxel.begin(); it != m_triangle_list_in_voxel.end(); it++ )
// {
// vec_3 pt_3d_a = g_rgb_pts_vec->data()[ ( *it )->m_tri_pts_id[ 0 ] ]->get_pos() - m_center;
// vec_3 pt_3d_b = g_rgb_pts_vec->data()[ ( *it )->m_tri_pts_id[ 1 ] ]->get_pos() - m_center;
// vec_3 pt_3d_c = g_rgb_pts_vec->data()[ ( *it )->m_tri_pts_id[ 2 ] ]->get_pos() - m_center;
// Common_tools::Triangle_2 test_triangle =
// Common_tools::Triangle_2( Common_tools::Point_2( pt_3d_a.dot( m_long_axis ), pt_3d_a.dot( m_mid_axis ) ),
// Common_tools::Point_2( pt_3d_b.dot( m_long_axis ), pt_3d_b.dot( m_mid_axis ) ),
// Common_tools::Point_2( pt_3d_c.dot( m_long_axis ), pt_3d_c.dot( m_mid_axis ) ) );
// // Common_tools::printf_triangle_pair(tar_triangle, test_triangle);
// if ( ( res_intersection = Common_tools::triangle_intersect_triangle( tar_triangle, test_triangle, 0.01 ) ) >= 4 )
// {
// // cout << "Axis = " << m_long_axis.transpose() << ", " << m_mid_axis.transpose() << ", " << endl;
// scope_color( ANSI_COLOR_RED_BOLD );
// Common_tools::printf_triangle_pair( tar_triangle, test_triangle );
// cout << " is conflict! type = " << res_intersection - 4 << endl;
// return 1;
// }
// }
// Triangle_ptr triangle_ptr = std::make_shared< Triangle >( id_0, id_1, id_2 );
// m_triangle_list_in_voxel.insert( triangle_ptr );
// return 0;
// }
void Global_map::clear()
{
m_rgb_pts_vec.clear();
}
void Global_map::set_minimum_dis( double minimum_dis )
{
m_hashmap_3d_pts.clear();
m_minimum_pts_size = minimum_dis;
g_global_map_minimum_dis = minimum_dis;
}
void Global_map::set_voxel_resolution( double resolution )
{
m_voxel_resolution = resolution;
g_voxel_resolution = resolution;
}
Global_map::Global_map( int if_start_service )
{
m_mutex_pts_vec = std::make_shared< std::mutex >();
m_mutex_img_pose_for_projection = std::make_shared< std::mutex >();
m_mutex_recent_added_list = std::make_shared< std::mutex >();
m_mutex_rgb_pts_in_recent_hitted_boxes = std::make_shared< std::mutex >();
m_mutex_m_box_recent_hitted = std::make_shared< std::mutex >();
m_mutex_pts_last_visited = std::make_shared< std::mutex >();
// Allocate memory for pointclouds
if ( Common_tools::get_total_phy_RAM_size_in_GB() < 12 )
{
scope_color( ANSI_COLOR_RED_BOLD );
std::this_thread::sleep_for( std::chrono::seconds( 1 ) );
cout << "+++++++++++++++++++++++++++++++++++++++++++++++++++" << endl;
cout << "I have detected your physical memory smaller than 12GB (currently: " << Common_tools::get_total_phy_RAM_size_in_GB()
<< "GB). I recommend you to add more physical memory for improving the overall performance of R3LIVE." << endl;
cout << "+++++++++++++++++++++++++++++++++++++++++++++++++++" << endl;
std::this_thread::sleep_for( std::chrono::seconds( 5 ) );
m_rgb_pts_vec.reserve( 1e8 );
m_voxel_vec.reserve( 1e6 );
}
else
{
m_rgb_pts_vec.reserve( 1e9 );
m_voxel_vec.reserve( 1e7 );
}
// m_rgb_pts_in_recent_visited_voxels.reserve( 1e6 );
if ( if_start_service )
{
m_thread_service = std::make_shared< std::thread >( &Global_map::service_refresh_pts_for_projection, this );
}
g_rgb_pts_vec = &m_rgb_pts_vec;
}
Global_map::~Global_map(){};
void Global_map::service_refresh_pts_for_projection()
{
eigen_q last_pose_q = eigen_q::Identity();
Common_tools::Timer timer;
std::shared_ptr< Image_frame > img_for_projection = std::make_shared< Image_frame >();
g_voxel_resolution = m_voxel_resolution;
while ( 1 )
{
std::this_thread::sleep_for( std::chrono::milliseconds( 1 ) );
m_mutex_img_pose_for_projection->lock();
*img_for_projection = m_img_for_projection;
m_mutex_img_pose_for_projection->unlock();
if ( img_for_projection->m_img_cols == 0 || img_for_projection->m_img_rows == 0 )
{
continue;
}
if ( img_for_projection->m_frame_idx == m_updated_frame_index )
{
continue;
}
timer.tic( " " );
std::shared_ptr< std::vector< std::shared_ptr< RGB_pts > > > pts_rgb_vec_for_projection =
std::make_shared< std::vector< std::shared_ptr< RGB_pts > > >();
if ( m_if_get_all_pts_in_boxes_using_mp )
{
std::vector< std::shared_ptr< RGB_pts > > pts_in_recent_hitted_boxes;
pts_in_recent_hitted_boxes.reserve( 1e6 );
std::unordered_set< std::shared_ptr< RGB_Voxel > > boxes_recent_hitted;
m_mutex_m_box_recent_hitted->lock();
boxes_recent_hitted = m_voxels_recent_visited;
m_mutex_m_box_recent_hitted->unlock();
// get_all_pts_in_boxes(boxes_recent_hitted, pts_in_recent_hitted_boxes);
m_mutex_rgb_pts_in_recent_hitted_boxes->lock();
// m_rgb_pts_in_recent_visited_voxels = pts_in_recent_hitted_boxes;
m_mutex_rgb_pts_in_recent_hitted_boxes->unlock();
}
selection_points_for_projection( img_for_projection, pts_rgb_vec_for_projection.get(), nullptr, 10.0, 1 );
m_mutex_pts_vec->lock();
m_pts_rgb_vec_for_projection = pts_rgb_vec_for_projection;
m_updated_frame_index = img_for_projection->m_frame_idx;
// cout << ANSI_COLOR_MAGENTA_BOLD << "Refresh pts_for_projection size = " << m_pts_rgb_vec_for_projection->size()
// << " | " << m_rgb_pts_vec.size()
// << ", cost time = " << timer.toc() << ANSI_COLOR_RESET << endl;
m_mutex_pts_vec->unlock();
last_pose_q = img_for_projection->m_pose_w2c_q;
}
}
void Global_map::render_points_for_projection( std::shared_ptr< Image_frame >& img_ptr )
{
m_mutex_pts_vec->lock();
if ( m_pts_rgb_vec_for_projection != nullptr )
{
render_pts_in_voxels( img_ptr, *m_pts_rgb_vec_for_projection );
// render_pts_in_voxels(img_ptr, m_rgb_pts_vec);
}
m_last_updated_frame_idx = img_ptr->m_frame_idx;
m_mutex_pts_vec->unlock();
}
void Global_map::update_pose_for_projection( std::shared_ptr< Image_frame >& img, double fov_margin )
{
m_mutex_img_pose_for_projection->lock();
m_img_for_projection.set_intrinsic( img->m_cam_K );
m_img_for_projection.m_img_cols = img->m_img_cols;
m_img_for_projection.m_img_rows = img->m_img_rows;
m_img_for_projection.m_fov_margin = fov_margin;
m_img_for_projection.m_frame_idx = img->m_frame_idx;
m_img_for_projection.m_pose_w2c_q = img->m_pose_w2c_q;
m_img_for_projection.m_pose_w2c_t = img->m_pose_w2c_t;
m_img_for_projection.m_img_gray = img->m_img_gray; // clone?
m_img_for_projection.m_img = img->m_img; // clone?
m_img_for_projection.refresh_pose_for_projection();
m_mutex_img_pose_for_projection->unlock();
}
bool Global_map::is_busy()
{
return m_in_appending_pts;
}
template int Global_map::append_points_to_global_map< pcl::PointXYZI >( pcl::PointCloud< pcl::PointXYZI >& pc_in, double added_time,
std::vector< std::shared_ptr< RGB_pts > >* pts_added_vec, int step,
int disable_append );
template int Global_map::append_points_to_global_map< pcl::PointXYZRGB >( pcl::PointCloud< pcl::PointXYZRGB >& pc_in, double added_time,
std::vector< std::shared_ptr< RGB_pts > >* pts_added_vec, int step,
int disable_append );
vec_3 g_current_lidar_position;
std::vector< RGB_pt_ptr > retrieve_pts_in_voxels( std::unordered_set< std::shared_ptr< RGB_Voxel > >& neighbor_voxels )
{
std::vector< RGB_pt_ptr > RGB_pt_ptr_vec;
for ( std::unordered_set< std::shared_ptr< RGB_Voxel > >::iterator it = neighbor_voxels.begin(); it != neighbor_voxels.end(); it++ )
{
auto it_s = ( *it )->m_pts_in_grid.begin();
auto it_e = ( *it )->m_pts_in_grid.end();
RGB_pt_ptr_vec.insert( RGB_pt_ptr_vec.end(), it_s, it_e );
}
return RGB_pt_ptr_vec;
}
std::unordered_set< std::shared_ptr< RGB_Voxel > > voxels_recent_visited;
template < typename T >
int Global_map::append_points_to_global_map( pcl::PointCloud< T >& pc_in, double added_time, std::vector< std::shared_ptr< RGB_pts > >* pts_added_vec,
int step, int disable_append )
{
m_in_appending_pts = 1;
Common_tools::Timer tim;
tim.tic();
int acc = 0;
int rej = 0;
if ( pts_added_vec != nullptr )
{
pts_added_vec->clear();
}
if ( m_recent_visited_voxel_activated_time == 0 )
{
voxels_recent_visited.clear();
}
else
{
m_mutex_m_box_recent_hitted->lock();
std::swap( voxels_recent_visited, m_voxels_recent_visited );
m_mutex_m_box_recent_hitted->unlock();
for ( Voxel_set_iterator it = voxels_recent_visited.begin(); it != voxels_recent_visited.end(); )
{
if ( added_time - ( *it )->m_last_visited_time > m_recent_visited_voxel_activated_time )
{
it = voxels_recent_visited.erase( it );
continue;
}
if ( ( *it )->m_pts_in_grid.size() )
{
double voxel_dis = ( g_current_lidar_position - vec_3( ( *it )->m_pts_in_grid[ 0 ]->get_pos() ) ).norm();
// if ( voxel_dis > 30 )
// {
// it = voxels_recent_visited.erase( it );
// continue;
// }
}
it++;
}
// cout << "Restored voxel number = " << voxels_recent_visited.size() << endl;
}
int number_of_voxels_before_add = voxels_recent_visited.size();
int pt_size = pc_in.points.size();
// step = 4;
KDtree_pt_vector pt_vec_vec;
std::vector< float > dist_vec;
RGB_voxel_ptr* temp_box_ptr_ptr;
for ( long pt_idx = 0; pt_idx < pt_size; pt_idx += step )
{
int add = 1;
int grid_x = std::round( pc_in.points[ pt_idx ].x / m_minimum_pts_size );
int grid_y = std::round( pc_in.points[ pt_idx ].y / m_minimum_pts_size );
int grid_z = std::round( pc_in.points[ pt_idx ].z / m_minimum_pts_size );
int box_x = std::round( pc_in.points[ pt_idx ].x / m_voxel_resolution );
int box_y = std::round( pc_in.points[ pt_idx ].y / m_voxel_resolution );
int box_z = std::round( pc_in.points[ pt_idx ].z / m_voxel_resolution );
auto pt_ptr = m_hashmap_3d_pts.get_data( grid_x, grid_y, grid_z );
if ( pt_ptr != nullptr )
{
add = 0;
if ( pts_added_vec != nullptr )
{
pts_added_vec->push_back( *pt_ptr );
}
}
RGB_voxel_ptr box_ptr;
temp_box_ptr_ptr = m_hashmap_voxels.get_data( box_x, box_y, box_z );
if ( temp_box_ptr_ptr == nullptr )
{
box_ptr = std::make_shared< RGB_Voxel >( box_x, box_y, box_z );
m_hashmap_voxels.insert( box_x, box_y, box_z, box_ptr );
m_voxel_vec.push_back( box_ptr );
}
else
{
box_ptr = *temp_box_ptr_ptr;
}
voxels_recent_visited.insert( box_ptr );
box_ptr->m_last_visited_time = added_time;
if ( add == 0 )
{
rej++;
continue;
}
if ( disable_append )
{
continue;
}
acc++;
KDtree_pt kdtree_pt( vec_3( pc_in.points[ pt_idx ].x, pc_in.points[ pt_idx ].y, pc_in.points[ pt_idx ].z ), 0 );
if ( m_kdtree.Root_Node != nullptr )
{
m_kdtree.Nearest_Search( kdtree_pt, 1, pt_vec_vec, dist_vec );
if ( pt_vec_vec.size() )
{
if ( sqrt( dist_vec[ 0 ] ) < m_minimum_pts_size )
{
continue;
}
}
}
std::shared_ptr< RGB_pts > pt_rgb = std::make_shared< RGB_pts >();
pt_rgb->set_pos( vec_3( pc_in.points[ pt_idx ].x, pc_in.points[ pt_idx ].y, pc_in.points[ pt_idx ].z ) );
pt_rgb->m_pt_index = m_rgb_pts_vec.size();
kdtree_pt.m_pt_idx = pt_rgb->m_pt_index;
m_rgb_pts_vec.push_back( pt_rgb );
m_hashmap_3d_pts.insert( grid_x, grid_y, grid_z, pt_rgb );
if ( box_ptr != nullptr )
{
box_ptr->m_pts_in_grid.push_back( pt_rgb );
// box_ptr->add_pt(pt_rgb);
box_ptr->m_new_added_pts_count++;
box_ptr->m_meshing_times = 0;
}
else
{
scope_color( ANSI_COLOR_RED_BOLD );
for ( int i = 0; i < 100; i++ )
{
cout << "box_ptr is nullptr!!!" << endl;
}
}
// Add to kdtree
m_kdtree.Add_Point( kdtree_pt, false );
if ( pts_added_vec != nullptr )
{
pts_added_vec->push_back( pt_rgb );
}
}
m_in_appending_pts = 0;
m_mutex_m_box_recent_hitted->lock();
std::swap( m_voxels_recent_visited, voxels_recent_visited );
// m_voxels_recent_visited = voxels_recent_visited ;
m_mutex_m_box_recent_hitted->unlock();
return ( m_voxels_recent_visited.size() - number_of_voxels_before_add );
}
void Global_map::render_pts_in_voxels( std::shared_ptr< Image_frame >& img_ptr, std::vector< std::shared_ptr< RGB_pts > >& pts_for_render,
double obs_time )
{
Common_tools::Timer tim;
tim.tic();
double u, v;
int hit_count = 0;
int pt_size = pts_for_render.size();
m_last_updated_frame_idx = img_ptr->m_frame_idx;
double min_voxel_dis = 3e8;
for ( int i = 0; i < pt_size; i++ )
{
double pt_cam_dis = ( pts_for_render[ i ]->get_pos() - img_ptr->m_pose_w2c_t ).dot( img_ptr->m_image_norm );
if ( pt_cam_dis < min_voxel_dis )
{
min_voxel_dis = pt_cam_dis;
}
}
double allow_render_dis = std::max( 0.05, g_voxel_resolution * 0.1 );
for ( int i = 0; i < pt_size; i++ )
{
vec_3 pt_w = pts_for_render[ i ]->get_pos();
double pt_cam_dis = ( pt_w - img_ptr->m_pose_w2c_t ).dot( img_ptr->m_image_norm );
;
if ( ( pt_cam_dis - min_voxel_dis > allow_render_dis ) && ( pts_for_render[ i ]->m_N_rgb > 5 ) )
{
continue;
}
bool res = img_ptr->project_3d_point_in_this_img( pt_w, u, v, nullptr, 1.0 );
if ( res == false )
{
continue;
}
hit_count++;
vec_2 gama_bak = img_ptr->m_gama_para;
img_ptr->m_gama_para = vec_2( 1.0, 0.0 ); // Render using normal value?
double gray = img_ptr->get_grey_color( u, v, 0 );
vec_3 rgb_color = img_ptr->get_rgb( u, v, 0 );
if ( rgb_color.maxCoeff() > 255.0 )
{
cout << ANSI_COLOR_RED << "Error, render RGB = " << rgb_color.transpose() << ANSI_COLOR_RESET << endl;
}
// pts_for_render[i]->update_gray(gray, pt_cam.norm());
pts_for_render[ i ]->update_rgb( rgb_color, pt_cam_dis, vec_3( image_obs_cov, image_obs_cov, image_obs_cov ), obs_time,
img_ptr->m_image_inverse_exposure_time );
img_ptr->m_gama_para = gama_bak;
// m_rgb_pts_vec[i]->update_rgb( vec_3(gray, gray, gray) );
}
// cout << "Render cost time = " << tim.toc() << endl;
// cout << "Total hit count = " << hit_count << endl;
}
Common_tools::Cost_time_logger cost_time_logger_render( "/home/ziv/temp/render_thr.log" );
// SECTION recored average photometric error
// ANCHOR - thread_render_pts_in_voxel
std::atomic< long > render_pts_count;
extern double g_maximum_pe_error;
static inline double thread_render_pts_in_voxel( const int& pt_start, const int& pt_end, const std::shared_ptr< Image_frame >& img_ptr,
const std::vector< RGB_voxel_ptr >* voxels_for_render, const double obs_time )
{
vec_3 pt_w;
vec_3 rgb_color;
double u, v;
double pt_cam_norm;
Common_tools::Timer tim;
tim.tic();
vec_3 pt_radiance;
double allow_render_dis = std::max( 0.1, g_voxel_resolution * 0.2 );
for ( int voxel_idx = pt_start; voxel_idx < pt_end; voxel_idx++ )
{
// continue;
RGB_voxel_ptr voxel_ptr = ( *voxels_for_render )[ voxel_idx ];
double min_voxel_dis = 3e8;
// for ( int i = 0; i < voxel_ptr->m_pts_in_grid.size(); i++ )
// {
// double pt_cam_dis = ( voxel_ptr->m_pts_in_grid[i]->get_pos() - img_ptr->m_pose_w2c_t ).dot( img_ptr->m_image_norm );
// // double pt_cam_dis = ( voxel_ptr->m_pts_in_grid[i]->get_pos() - img_ptr->m_pose_w2c_t ).norm();
// if ( pt_cam_dis < min_voxel_dis )
// {
// min_voxel_dis = pt_cam_dis;
// }
// }
for ( int pt_idx = 0; pt_idx < voxel_ptr->m_pts_in_grid.size(); pt_idx++ )
{
pt_w = voxel_ptr->m_pts_in_grid[ pt_idx ]->get_pos();
vec_3 pt_cam_view_vector = pt_w - img_ptr->m_pose_w2c_t;
double view_dis = pt_cam_view_vector.norm();
double view_angle = acos( pt_cam_view_vector.dot( img_ptr->m_image_norm ) / ( view_dis + 0.0001 ) ) * 57.3;
view_angle = std::max( view_angle, 5.0 );
view_dis = std::max( view_dis, 1.0 );
if ( view_angle > 30 )
{
continue;
}
if ( img_ptr->project_3d_point_in_this_img( pt_w, u, v, nullptr, 1.0 ) == false )
{
continue;
}
vec_3 obs_cov =
vec_3( image_obs_cov * view_dis * view_angle, image_obs_cov * view_dis * view_angle, image_obs_cov * view_dis * view_angle );
rgb_color = img_ptr->get_rgb( u, v, 0 );
if ( voxel_ptr->m_pts_in_grid[ pt_idx ]->update_rgb( rgb_color, view_dis, obs_cov, obs_time, img_ptr->m_image_inverse_exposure_time ) )
{
render_pts_count++;
if ( voxel_ptr->m_pts_in_grid[ pt_idx ]->get_rgb().maxCoeff() > 254 )
{
continue;
}
pt_radiance = voxel_ptr->m_pts_in_grid[ pt_idx ]->get_radiance();
pt_radiance = ( pt_radiance / img_ptr->m_image_inverse_exposure_time );
if ( pt_radiance.maxCoeff() > 245.0 )
{
continue;
}
// double brightness_err = fabs(rgb_color.mean() - pt_radiance.mean());
double brightness_err = fabs( rgb_color.norm() - pt_radiance.norm() );
if ( brightness_err > g_maximum_pe_error )
{
brightness_err = g_maximum_pe_error;
// continue;
}
img_ptr->m_acc_render_count++;
img_ptr->m_acc_photometric_error = img_ptr->m_acc_photometric_error + brightness_err;
// img_ptr->m_acc_photometric_error += std::atomic<double>((pt_color- rgb_color).norm());
}
}
}
double cost_time = tim.toc() * 100;
return cost_time;
}
std::vector< RGB_voxel_ptr > g_voxel_for_render;
FILE* photometric_fp = nullptr;
void render_pts_in_voxels_mp( std::shared_ptr< Image_frame >& img_ptr, std::unordered_set< RGB_voxel_ptr >* _voxels_for_render,
const double& obs_time )
{
Common_tools::Timer tim;
g_voxel_for_render.clear();
for ( Voxel_set_iterator it = ( *_voxels_for_render ).begin(); it != ( *_voxels_for_render ).end(); it++ )
{
g_voxel_for_render.push_back( *it );
}
std::vector< std::future< double > > results;
tim.tic( "Render_mp" );
int numbers_of_voxels = g_voxel_for_render.size();
g_cost_time_logger.record( "Pts_num", numbers_of_voxels );
render_pts_count = 0;
img_ptr->m_acc_render_count = 0;
img_ptr->m_acc_photometric_error = 0;
if ( USING_OPENCV_TBB )
{
cv::parallel_for_( cv::Range( 0, numbers_of_voxels ),
[&]( const cv::Range& r ) { thread_render_pts_in_voxel( r.start, r.end, img_ptr, &g_voxel_for_render, obs_time ); } );
}
else
{
int num_of_threads = std::min( 8 * 2, ( int ) numbers_of_voxels );
// results.clear();
results.resize( num_of_threads );
tim.tic( "Com" );
for ( int thr = 0; thr < num_of_threads; thr++ )
{
// cv::Range range(thr * pt_size / num_of_threads, (thr + 1) * pt_size / num_of_threads);
int start = thr * numbers_of_voxels / num_of_threads;
int end = ( thr + 1 ) * numbers_of_voxels / num_of_threads;
results[ thr ] = m_thread_pool_ptr->commit_task( thread_render_pts_in_voxel, start, end, img_ptr, &g_voxel_for_render, obs_time );
}
g_cost_time_logger.record( tim, "Com" );
tim.tic( "wait_Opm" );
for ( int thr = 0; thr < num_of_threads; thr++ )
{
double cost_time = results[ thr ].get();
cost_time_logger_render.record( std::string( "T_" ).append( std::to_string( thr ) ), cost_time );
}
g_cost_time_logger.record( tim, "wait_Opm" );
cost_time_logger_render.record( tim, "wait_Opm" );
}
// ANCHOR - record photometric error
// printf( "Image frame = %d, count = %d, acc_PT = %.3f, avr_PE = %.3f\r\n", img_ptr->m_frame_idx, long( img_ptr->m_acc_render_count ),
// double( img_ptr->m_acc_photometric_error), double( img_ptr->m_acc_photometric_error) / long(img_ptr->m_acc_render_count ) );
if ( photometric_fp == nullptr )
{
photometric_fp = fopen( std::string( Common_tools::get_home_folder().c_str() ).append( "/r3live_output/photometric.log" ).c_str(), "w+" );
}
if ( long( img_ptr->m_acc_render_count ) != 0 )
{
fprintf( photometric_fp, "%f %d %d %f %f\r\n", img_ptr->m_timestamp, img_ptr->m_frame_idx, long( img_ptr->m_acc_render_count ),
double( img_ptr->m_acc_photometric_error ) / long( img_ptr->m_acc_render_count ), double( img_ptr->m_acc_photometric_error ) );
fflush( photometric_fp );
}
// img_ptr->release_image();
cost_time_logger_render.flush_d();
g_cost_time_logger.record( tim, "Render_mp" );
g_cost_time_logger.record( "Pts_num_r", render_pts_count );
}
//! SECTION
void Global_map::render_with_a_image( std::shared_ptr< Image_frame >& img_ptr, int if_select )
{
std::vector< std::shared_ptr< RGB_pts > > pts_for_render;
// pts_for_render = m_rgb_pts_vec;
if ( if_select )
{
selection_points_for_projection( img_ptr, &pts_for_render, nullptr, 1.0 );
}
else
{
pts_for_render = m_rgb_pts_vec;
}
render_pts_in_voxels( img_ptr, pts_for_render );
}
void Global_map::selection_points_for_projection( std::shared_ptr< Image_frame >& image_pose, std::vector< std::shared_ptr< RGB_pts > >* pc_out_vec,
std::vector< cv::Point2f >* pc_2d_out_vec, double minimum_dis, int skip_step, int use_all_pts )
{
Common_tools::Timer tim;
tim.tic();
if ( pc_out_vec != nullptr )
{
pc_out_vec->clear();
}
if ( pc_2d_out_vec != nullptr )
{
pc_2d_out_vec->clear();
}
Hash_map_2d< int, int > mask_index;
Hash_map_2d< int, float > mask_depth;
std::map< int, cv::Point2f > map_idx_draw_center;
std::map< int, cv::Point2f > map_idx_draw_center_raw_pose;
int u, v;
double u_f, v_f;
// cv::namedWindow("Mask", cv::WINDOW_FREERATIO);
int acc = 0;
int blk_rej = 0;
// int pts_size = m_rgb_pts_vec.size();
std::vector< std::shared_ptr< RGB_pts > > pts_for_projection;
m_mutex_m_box_recent_hitted->lock();
std::unordered_set< std::shared_ptr< RGB_Voxel > > boxes_recent_hitted = m_voxels_recent_visited;
m_mutex_m_box_recent_hitted->unlock();
if ( ( !use_all_pts ) && boxes_recent_hitted.size() )
{
m_mutex_rgb_pts_in_recent_hitted_boxes->lock();
for ( Voxel_set_iterator it = boxes_recent_hitted.begin(); it != boxes_recent_hitted.end(); it++ )
{
// pts_for_projection.push_back( (*it)->m_pts_in_grid.back() );
if ( ( *it )->m_pts_in_grid.size() )
{
// pts_for_projection.push_back( (*it)->m_pts_in_grid.back() );
pts_for_projection.push_back( ( *it )->m_pts_in_grid[ 0 ] );
// pts_for_projection.push_back( ( *it )->m_pts_in_grid[ ( *it )->m_pts_in_grid.size()-1 ] );
}
}
m_mutex_rgb_pts_in_recent_hitted_boxes->unlock();
}
else
{
pts_for_projection = m_rgb_pts_vec;
}
int pts_size = pts_for_projection.size();
for ( int pt_idx = 0; pt_idx < pts_size; pt_idx += skip_step )
{
vec_3 pt = pts_for_projection[ pt_idx ]->get_pos();
double depth = ( pt - image_pose->m_pose_w2c_t ).norm();
if ( depth > m_maximum_depth_for_projection )
{
continue;
}
if ( depth < m_minimum_depth_for_projection )
{
continue;
}
bool res = image_pose->project_3d_point_in_this_img( pt, u_f, v_f, nullptr, 1.0 );
if ( res == false )
{
continue;
}
u = std::round( u_f / minimum_dis ) * minimum_dis; // Why can not work
v = std::round( v_f / minimum_dis ) * minimum_dis;
if ( ( !mask_depth.if_exist( u, v ) ) || mask_depth.m_map_2d_hash_map[ u ][ v ] > depth )
{
acc++;
if ( mask_index.if_exist( u, v ) )
{
// erase old point
int old_idx = mask_index.m_map_2d_hash_map[ u ][ v ];
blk_rej++;
map_idx_draw_center.erase( map_idx_draw_center.find( old_idx ) );
map_idx_draw_center_raw_pose.erase( map_idx_draw_center_raw_pose.find( old_idx ) );
}
mask_index.m_map_2d_hash_map[ u ][ v ] = ( int ) pt_idx;
mask_depth.m_map_2d_hash_map[ u ][ v ] = ( float ) depth;
map_idx_draw_center[ pt_idx ] = cv::Point2f( v, u );
map_idx_draw_center_raw_pose[ pt_idx ] = cv::Point2f( u_f, v_f );
}
}
if ( pc_out_vec != nullptr )
{
for ( auto it = map_idx_draw_center.begin(); it != map_idx_draw_center.end(); it++ )
{
// pc_out_vec->push_back(m_rgb_pts_vec[it->first]);
pc_out_vec->push_back( pts_for_projection[ it->first ] );
}
}
if ( pc_2d_out_vec != nullptr )
{
for ( auto it = map_idx_draw_center.begin(); it != map_idx_draw_center.end(); it++ )
{
pc_2d_out_vec->push_back( map_idx_draw_center_raw_pose[ it->first ] );
}
}
}
void Global_map::save_to_pcd( std::string dir_name, std::string _file_name, int save_pts_with_views )
{
Common_tools::Timer tim;
Common_tools::create_dir( dir_name );
std::string file_name = std::string( dir_name ).append( _file_name );
scope_color( ANSI_COLOR_BLUE_BOLD );
cout << "Save Rgb points to " << file_name << endl;
fflush( stdout );
pcl::PointCloud< pcl::PointXYZRGB > pc_rgb;
long pt_size = m_rgb_pts_vec.size();
pc_rgb.resize( pt_size );
long pt_count = 0;
for ( long i = pt_size - 1; i > 0; i-- )
// for (int i = 0; i < pt_size; i++)
{
if ( i % 1000 == 0 )
{
cout << ANSI_DELETE_CURRENT_LINE << "Saving offline map " << ( int ) ( ( pt_size - 1 - i ) * 100.0 / ( pt_size - 1 ) ) << " % ...";
fflush( stdout );
}
if ( m_rgb_pts_vec[ i ]->m_N_rgb < save_pts_with_views )
{
continue;
}
pcl::PointXYZRGB pt;
vec_3 pt_rgb = m_rgb_pts_vec[ i ]->get_rgb();
pc_rgb.points[ pt_count ].x = m_rgb_pts_vec[ i ]->m_pos[ 0 ];
pc_rgb.points[ pt_count ].y = m_rgb_pts_vec[ i ]->m_pos[ 1 ];
pc_rgb.points[ pt_count ].z = m_rgb_pts_vec[ i ]->m_pos[ 2 ];
pc_rgb.points[ pt_count ].r = pt_rgb( 2 );
pc_rgb.points[ pt_count ].g = pt_rgb( 1 );
pc_rgb.points[ pt_count ].b = pt_rgb( 0 );
pt_count++;
}
cout << ANSI_DELETE_CURRENT_LINE << "Saving offline map 100% ..." << endl;
pc_rgb.resize( pt_count );
cout << "Total have " << pt_count << " points." << endl;
tim.tic();
cout << "Now write to: " << file_name << endl;
pcl::io::savePCDFileBinary( std::string( file_name ).append( ".pcd" ), pc_rgb );
cout << "Save PCD cost time = " << tim.toc() << endl;
}
void Global_map::save_and_display_pointcloud( std::string dir_name, std::string file_name, int save_pts_with_views )
{
save_to_pcd( dir_name, file_name, save_pts_with_views );
scope_color( ANSI_COLOR_WHITE_BOLD );
cout << "========================================================" << endl;
cout << "Open pcl_viewer to display point cloud, close the viewer's window to continue mapping process ^_^" << endl;
cout << "========================================================" << endl;
// system(std::string("pcl_viewer ").append(dir_name).append("/rgb_pt.pcd").c_str());
system( std::string( "r3live_viewer " ).append( dir_name ).append( "/rgb_pt.pcd" ).c_str() );
}
vec_3 Global_map::smooth_pts( RGB_pt_ptr& rgb_pt, double smooth_factor, double knn, double maximum_smooth_dis )
{
std::vector< int > indices;
std::vector< float > distances;
KDtree_pt_vector kdtree_pt_vec;
std::vector< float > search_pt_dis;
vec_3 pt_vec = rgb_pt->get_pos();
vec_3 pt_vec_neighbor = vec_3( 0, 0, 0 );
if(maximum_smooth_dis <= 0)
{
maximum_smooth_dis = m_voxel_resolution * 0.8;
}
m_kdtree.Nearest_Search( KDtree_pt(pt_vec), knn, kdtree_pt_vec, search_pt_dis );
double valid_size = 0.0;
int size = search_pt_dis.size();
for ( int k = 1; k < size; k++ )
{
if( sqrt(search_pt_dis[k]) < maximum_smooth_dis )
{
pt_vec_neighbor += m_rgb_pts_vec[ kdtree_pt_vec[ k ].m_pt_idx ]->get_pos();
valid_size += 1.0;
}
}
vec_3 pt_vec_smoothed = pt_vec * ( 1.0 - smooth_factor ) + pt_vec_neighbor * smooth_factor / valid_size;
rgb_pt->set_smooth_pos(pt_vec_smoothed);
return pt_vec_smoothed;
} | C++ |
3D | hku-mars/ImMesh | src/meshing/r3live/image_frame.hpp | .hpp | 11,622 | 315 | /*
This code is the implementation of our paper "R3LIVE: A Robust, Real-time, RGB-colored,
LiDAR-Inertial-Visual tightly-coupled state Estimation and mapping package".
Author: Jiarong Lin < ziv.lin.ljr@gmail.com >
If you use any code of this repo in your academic research, please cite at least
one of our papers:
[1] Lin, Jiarong, and Fu Zhang. "R3LIVE: A Robust, Real-time, RGB-colored,
LiDAR-Inertial-Visual tightly-coupled state Estimation and mapping package."
[2] Xu, Wei, et al. "Fast-lio2: Fast direct lidar-inertial odometry."
[3] Lin, Jiarong, et al. "R2LIVE: A Robust, Real-time, LiDAR-Inertial-Visual
tightly-coupled state Estimator and mapping."
[4] Xu, Wei, and Fu Zhang. "Fast-lio: A fast, robust lidar-inertial odometry
package by tightly-coupled iterated kalman filter."
[5] Cai, Yixi, Wei Xu, and Fu Zhang. "ikd-Tree: An Incremental KD Tree for
Robotic Applications."
[6] Lin, Jiarong, and Fu Zhang. "Loam-livox: A fast, robust, high-precision
LiDAR odometry and mapping package for LiDARs of small FoV."
For commercial use, please contact me < ziv.lin.ljr@gmail.com > and
Dr. Fu Zhang < fuzhang@hku.hk >.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
*/
#pragma once
#include "stdio.h"
#include "iostream"
#include "string"
#include <thread>
#include <mutex>
#include <atomic>
#include <Eigen/Eigen>
#include <opencv2/opencv.hpp>
#include <opencv2/core/eigen.hpp>
#include "opencv2/features2d.hpp"
// #include "opencv2/xfeatures2d.hpp"
#include <pcl/point_cloud.h>
#include <pcl/point_types.h>
#include <pcl/io/pcd_io.h>
#include "tools_eigen.hpp"
#include "tools_data_io.hpp"
#include "tools_timer.hpp"
#include "tools_logger.hpp"
#include "tools_mem_used.h"
#include "tools_openCV_3_to_4.hpp"
#include "tools_serialization.hpp"
#include "lib_sophus/so3.hpp"
#include "lib_sophus/se3.hpp"
//https://www.opencv-srf.com/2018/02/histogram-equalization.html
inline char cv_wait_key(int ms )
{
static std::mutex cv_wait_key_mutex;
cv_wait_key_mutex.lock();
char c = cv::waitKey(ms);
if (c == 'p' || c == 'P' || c == ' ')
{
c = cv::waitKey(0);
}
cv_wait_key_mutex.unlock();
return c;
}
inline cv::Mat equalize_color_image(cv::Mat &image)
{
cv::Mat hist_equalized_image;
cv::cvtColor(image, hist_equalized_image, cv::COLOR_BGR2YCrCb);
//Split the image into 3 channels; Y, Cr and Cb channels respectively and store it in a std::vector
std::vector<cv::Mat> vec_channels;
cv::split(hist_equalized_image, vec_channels);
//Equalize the histogram of only the Y channel
cv::equalizeHist(vec_channels[0], vec_channels[0]);
// vec_channels[0] =vec_channels[0] * 0.2;
//Merge 3 channels in the vector to form the color image in YCrCB color space.
cv::merge(vec_channels, hist_equalized_image);
cv::cvtColor(hist_equalized_image, hist_equalized_image, cv::COLOR_YCrCb2BGR);
return hist_equalized_image;
}
inline cv::Mat colormap_depth_img(cv::Mat depth_mat)
{
cv::Mat cm_img0;
cv::Mat adjMap;
double min;
double max;
// expand your range to 0..255. Similar to histEq();
cv::minMaxIdx(depth_mat, &min, &max);
// min = 2;
// max = 30;
depth_mat.convertTo( adjMap, CV_8UC1, 255 / ( max - min ), -min );
// cv::applyColorMap( adjMap, cm_img0, cv::COLORMAP_JET);
cv::applyColorMap(255-adjMap, cm_img0, cv::COLORMAP_JET);
return cm_img0;
}
template <typename T = float>
inline cv::Mat img_merge_with_depth(cv::Mat raw_img, cv::Mat depth_mat, float raw_ratio = 0.75)
{
cv::Mat res_img = raw_img.clone();
cv::Mat cImg = colormap_depth_img( depth_mat);
for( int i = 0; i < depth_mat.rows; i++)
{
for( int j = 0; j < depth_mat.cols; j++)
{
if(depth_mat.at<T>(i, j) != 0)
{
res_img.at<cv::Vec3b>(i, j) = res_img.at<cv::Vec3b>(i, j) * raw_ratio + cImg.at<cv::Vec3b>(i, j) * (1 - raw_ratio);
}
}
}
return res_img;
}
template <typename T>
inline void reduce_vector(std::vector<T> &v, std::vector<uchar> status)
{
int j = 0;
for (int i = 0; i < int(v.size()); i++)
if (status[i])
v[j++] = v[i];
v.resize(j);
};
const int MAX_DS_LAY = 7;
// template<typename T>
// static std::atomic<T> &std::atomic<T>::operator=( const std::atomic<T> &other)
// {
// if ( this == &other )
// return *this;
// T temp_val = other;
// if(this == nullptr)
// {
// this = std::atomic<T>(temp_val);
// }
// else
// {
// *this = temp_val;
// }
// return *this;
// }
struct Image_frame
{
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
using data_type = double;
using PointType = pcl::PointXYZI;
int m_if_have_set_intrinsic = 0;
Eigen::Matrix3d m_cam_K;
double fx, fy, cx, cy;
eigen_q m_pose_w2c_q = eigen_q::Identity();
vec_3 m_pose_w2c_t = vec_3(0, 0, 0);
eigen_q m_pose_c2w_q = eigen_q::Identity();
vec_3 m_pose_c2w_t = vec_3(0, 0, 0);
vec_3 m_image_norm = vec_3(1, 0, 0);
int m_if_have_set_pose = 0;
double m_timestamp = 0.0;
double m_image_inverse_exposure_time = 0.01;
int m_have_solved_pnp = 0;
// std::atomic<double> m_acc_photometric_error = {0.0};
// std::atomic<long> m_acc_render_count = {0};
double m_acc_photometric_error = 0;
long m_acc_render_count = 0;
eigen_q m_pnp_pose_w2c_q = eigen_q::Identity();
vec_3 m_pnp_pose_w2c_t = vec_3(0,0,0);
vec_3 m_pose_t;
mat_3_3 m_pose_w2c_R;
int m_img_rows = 0;
int m_img_cols = 0;
int m_frame_idx = 0;
Eigen::Matrix<double, 2, 1> m_gama_para;
// double m_downsample_step[MAX_DS_LAY] = {1.0, 0.5, 0.25 };
// double m_downsample_step[10] = {1.0, 0.5, 0.25, 1.0/8.0, 1.0/16.0, 1.0/32.0, 1.0/64.0, 1.0/128 };
double m_downsample_step[10] = {1.0, 0.5, 0.25, 1.0/8.0, 1.0/16.0, 1.0/24.0, 1.0/32.0, 1.0/64.0 };
cv::Mat m_img;
cv::Mat m_raw_img;
cv::Mat m_img_gray;
double m_fov_margin = 0.005;
Image_frame &operator=( const Image_frame &other )
{
// Guard self assignment
if ( this == &other )
return *this;
double acc_photometric_error = other.m_acc_photometric_error;
this->m_acc_photometric_error = acc_photometric_error;
long acc_render_count = m_acc_render_count;
this->m_acc_render_count = acc_render_count;
this->fx = other.fx;
this->fy = other.fy;
this->cx = other.cx;
this->cy = other.cy;
this->m_if_have_set_intrinsic = other.m_if_have_set_intrinsic;
this->m_pose_w2c_q = other.m_pose_w2c_q;
this->m_pose_w2c_t = other.m_pose_w2c_t;
this->m_pose_c2w_q = other.m_pose_c2w_q;
this->m_pose_c2w_t = other.m_pose_c2w_t;
this->m_if_have_set_pose = other.m_if_have_set_pose;
this->m_timestamp = other.m_timestamp;
this->m_image_inverse_exposure_time = other.m_image_inverse_exposure_time;
this->m_have_solved_pnp = other.m_have_solved_pnp;
this->m_pose_t = other.m_pose_t;
this->m_pose_w2c_R = other.m_pose_w2c_R;
this->m_img_rows = other.m_img_rows;
this->m_img_cols = other.m_img_cols;
this->m_frame_idx = other.m_frame_idx;
this->m_gama_para = other.m_gama_para;
this->m_img = other.m_img;
this->m_raw_img = other.m_raw_img;
this->m_img_gray = other.m_img_gray;
this->m_fov_margin = other.m_fov_margin;
return *this;
}
Image_frame();
~Image_frame();
void refresh_pose_for_projection();
void set_pose(const eigen_q & pose_w2c_q, const vec_3 & pose_w2c_t );
int set_frame_idx(int frame_idx);
void set_intrinsic(Eigen::Matrix3d & camera_K);
Image_frame(Eigen::Matrix3d &camera_K);
void init_cubic_interpolation();
void inverse_pose();
void release_image();
bool project_3d_to_2d( const pcl::PointXYZI & in_pt, Eigen::Matrix3d & cam_K, double &u, double &v, const double & scale = 1.0);
bool if_2d_points_available(const double &u, const double &v, const double &scale = 1.0, double fov_mar = -1.0);
vec_3 get_rgb(double &u, double v, int layer = 0, vec_3 *rgb_dx = nullptr, vec_3 *rgb_dy = nullptr);
double get_grey_color(double & u ,double & v, int layer= 0 );
bool get_rgb( const double & u, const double & v, int & r, int & g, int & b );
void display_pose();
void image_equalize(cv::Mat &img, int amp = 10.0);
void image_equalize();
bool project_3d_point_in_this_img(const pcl::PointXYZI & in_pt, double &u, double &v, pcl::PointXYZRGB * rgb_pt = nullptr, double intrinsic_scale = 1.0);
bool project_3d_point_in_this_img(const vec_3 & in_pt, double &u, double &v, pcl::PointXYZRGB *rgb_pt = nullptr, double intrinsic_scale = 1.0);
void dump_pose_and_image( const std::string name_prefix );
int load_pose_and_image( const std::string name_prefix, const double image_scale = 1.0, int if_load_image = 1 );
private:
friend class boost::serialization::access;
template < typename Archive >
void serialize( Archive &ar, const unsigned int version )
{
ar &fx;
ar &fy;
ar &cx;
ar &cy;
ar &m_if_have_set_intrinsic;
ar &m_timestamp;
ar &m_img_rows;
ar &m_img_cols;
ar &m_frame_idx;
ar &m_pose_c2w_q;
ar &m_pose_c2w_t;
}
};
inline std::shared_ptr< Image_frame > soft_copy_image_frame( const std::shared_ptr< Image_frame > &img_ptr )
{
std::shared_ptr< Image_frame > res_img_ptr = std::make_shared<Image_frame>();
res_img_ptr->fx = img_ptr->fx;
res_img_ptr->fy = img_ptr->fy;
res_img_ptr->cx = img_ptr->cx;
res_img_ptr->cy = img_ptr->cy;
res_img_ptr->m_if_have_set_intrinsic = img_ptr->m_if_have_set_intrinsic;
res_img_ptr->m_timestamp = img_ptr->m_timestamp;
res_img_ptr->m_img_rows = img_ptr->m_img_rows;
res_img_ptr->m_img_cols = img_ptr->m_img_cols;
res_img_ptr->m_frame_idx = img_ptr->m_frame_idx;
res_img_ptr->m_pose_c2w_q = img_ptr->m_pose_c2w_q;
res_img_ptr->m_pose_c2w_t = img_ptr->m_pose_c2w_t;
return res_img_ptr;
} | Unknown |
3D | hku-mars/ImMesh | src/meshing/r3live/triangle.hpp | .hpp | 14,546 | 430 | #pragma once
#include <set>
#include <unordered_set>
#include "tools_kd_hash.hpp"
#include "pointcloud_rgbd.hpp"
// class RGB_pts;
// class RGB_Voxel;
class Global_map;
class Triangle
{
public:
int m_tri_pts_id[ 3 ] = { 0 };
vec_3 m_normal = vec_3( 0, 0, 0 );
int m_projected_texture_id = 0;
vec_2f m_texture_coor[ 3 ];
double m_vis_score = 0;
float last_texture_distance = 3e8;
int m_index_flip = 0;
void sort_id()
{
std::sort( std::begin( m_tri_pts_id ), std::end( m_tri_pts_id ) );
}
Triangle() = default;
~Triangle() = default;
Triangle( int id_0, int id_1, int id_2 )
{
m_tri_pts_id[ 0 ] = id_0;
m_tri_pts_id[ 1 ] = id_1;
m_tri_pts_id[ 2 ] = id_2;
sort_id();
}
};
using Triangle_ptr = std::shared_ptr< Triangle >;
// using Triangle_set = std::unordered_set<Triangle_ptr>;
using Triangle_set = std::set< Triangle_ptr >;
struct Sync_triangle_set
{
Triangle_set m_triangle_set;
std::shared_ptr< std::mutex> m_mutex_ptr = nullptr;
int m_frame_idx = 0;
bool m_if_required_synchronized = true;
Sync_triangle_set()
{
m_mutex_ptr = std::make_shared<std::mutex>();
}
void lock()
{
m_mutex_ptr->lock();
}
void unlock()
{
m_mutex_ptr->unlock();
}
void insert( const Triangle_ptr& tri_ptr )
{
lock();
m_triangle_set.insert( tri_ptr );
m_if_required_synchronized = true;
unlock();
}
void erase( const Triangle_ptr& tri_ptr )
{
lock();
auto it1 = m_triangle_set.find( tri_ptr );
if ( it1 != m_triangle_set.end() )
{
m_triangle_set.erase( it1 );
}
m_if_required_synchronized = true;
unlock();
}
void clear()
{
lock();
m_triangle_set.clear();
m_if_required_synchronized = true;
unlock();
}
int get_triangle_set_size()
{
int return_size = 0;
lock();
return_size = m_triangle_set.size();
unlock();
return return_size;
}
Triangle_set * get_triangle_set_ptr()
{
return &m_triangle_set;
}
void get_triangle_set(Triangle_set & ret_set, bool reset_status = false)
{
lock();
ret_set = m_triangle_set;
if ( reset_status )
{
m_if_required_synchronized = false;
}
unlock();
}
};
class Triangle_manager
{
public:
Global_map* m_pointcloud_map = nullptr;
Hash_map_3d< int, Triangle_ptr > m_triangle_hash;
double m_region_size = 10.0;
std::vector< Sync_triangle_set* > m_triangle_set_vector;
// Hash_map_3d< int, Triangle_set > m_triangle_set_in_region;
Hash_map_3d< int, Sync_triangle_set > m_triangle_set_in_region;
std::unordered_map< int, Triangle_set > m_map_pt_triangle;
// Triangle_set m_triangle_list;
std::mutex m_mutex_triangle_hash;
Hash_map_2d< int, Triangle_set > m_map_edge_triangle;
Hash_map_2d< int, Triangle_set > m_map_edge_triangle_conflicted;
int m_enable_map_edge_triangle = 0;
int m_newest_rec_frame_idx = 0;
void clear()
{
m_triangle_hash.clear();
m_map_pt_triangle.clear();
// m_triangle_list.clear();
for ( auto& triangle_set : m_triangle_set_in_region.m_map_3d_hash_map )
{
triangle_set.second.clear();
}
m_map_edge_triangle.clear();
m_map_edge_triangle_conflicted.clear();
}
Triangle_manager()
{
m_triangle_hash.m_map_3d_hash_map.reserve( 1e7 );
m_map_pt_triangle.reserve( 1e7 );
};
~Triangle_manager() = default;
vec_3 get_triangle_center( const Triangle_ptr& tri_ptr );
void insert_triangle_to_list( const Triangle_ptr& tri_ptr, const int& frame_idx = 0 );
void erase_triangle_from_list( const Triangle_ptr& tri_ptr, const int& frame_idx = 0 );
int get_all_triangle_list( std::vector< Triangle_set >& triangle_list, std::mutex* mutex = nullptr, int sleep_us_each_query = 10 );
int get_triangle_list_size();
// void
void erase_triangle( const Triangle_ptr& tri_ptr )
{
int idx[ 3 ];
idx[ 0 ] = tri_ptr->m_tri_pts_id[ 0 ];
idx[ 1 ] = tri_ptr->m_tri_pts_id[ 1 ];
idx[ 2 ] = tri_ptr->m_tri_pts_id[ 2 ];
// printf_line;
// erase triangle in list
erase_triangle_from_list( tri_ptr, m_newest_rec_frame_idx );
// auto it1 = m_triangle_list.find( tri_ptr );
// if ( it1 != m_triangle_list.end() )
// {
// m_triangle_list.erase( m_triangle_list.find( tri_ptr ) );
// }
for ( int tri_idx = 0; tri_idx < 3; tri_idx++ )
{
auto it3 = m_map_pt_triangle[ idx[ tri_idx ] ].find( tri_ptr );
if ( it3 != m_map_pt_triangle[ idx[ tri_idx ] ].end() )
{
m_map_pt_triangle[ idx[ tri_idx ] ].erase( it3 );
}
}
if ( m_enable_map_edge_triangle )
{
// printf_line;
// erase triangle in edge-triangle list
auto it2 = m_map_edge_triangle.m_map_2d_hash_map[ idx[ 0 ] ][ idx[ 1 ] ].find( tri_ptr );
if ( it2 != m_map_edge_triangle.m_map_2d_hash_map[ idx[ 0 ] ][ idx[ 1 ] ].end() )
{
m_map_edge_triangle.m_map_2d_hash_map[ idx[ 0 ] ][ idx[ 1 ] ].erase( it2 );
}
it2 = m_map_edge_triangle.m_map_2d_hash_map[ idx[ 0 ] ][ idx[ 2 ] ].find( tri_ptr );
if ( it2 != m_map_edge_triangle.m_map_2d_hash_map[ idx[ 0 ] ][ idx[ 2 ] ].end() )
{
m_map_edge_triangle.m_map_2d_hash_map[ idx[ 0 ] ][ idx[ 2 ] ].erase( it2 );
}
it2 = m_map_edge_triangle.m_map_2d_hash_map[ idx[ 1 ] ][ idx[ 2 ] ].find( tri_ptr );
if ( it2 != m_map_edge_triangle.m_map_2d_hash_map[ idx[ 1 ] ][ idx[ 2 ] ].end() )
{
m_map_edge_triangle.m_map_2d_hash_map[ idx[ 1 ] ][ idx[ 2 ] ].erase( it2 );
}
}
// printf_line;
}
void remove_triangle_list( const Triangle_set& triangle_list, const int frame_idx = 0 )
{
m_mutex_triangle_hash.lock();
m_newest_rec_frame_idx = std::max( frame_idx, m_newest_rec_frame_idx );
for ( auto tri_ptr : triangle_list )
{
erase_triangle( tri_ptr );
}
m_mutex_triangle_hash.unlock();
}
template < typename T >
Triangle_set find_relative_triangulation_combination( std::set< T >& set_index )
{
// std::set< T >::iterator it;
Triangle_set triangle_ptr_list;
// m_mutex_triangle_hash.lock();
for ( typename std::set< T >::iterator it = set_index.begin(); it != set_index.end(); it++ )
{
if ( m_map_pt_triangle.find( *it ) != m_map_pt_triangle.end() )
{
for ( Triangle_set::iterator tri_it = m_map_pt_triangle[ *it ].begin(); tri_it != m_map_pt_triangle[ *it ].end(); tri_it++ )
{
if ( ( set_index.find( ( *tri_it )->m_tri_pts_id[ 0 ] ) != set_index.end() ) &&
( set_index.find( ( *tri_it )->m_tri_pts_id[ 1 ] ) != set_index.end() ) &&
( set_index.find( ( *tri_it )->m_tri_pts_id[ 2 ] ) != set_index.end() ) )
{
triangle_ptr_list.insert( *tri_it );
}
}
}
}
// m_mutex_triangle_hash.unlock();
return triangle_ptr_list;
}
template < typename T >
void remove_relative_triangulation_combination( std::set< T >& set_index )
{
// std::set< T >::iterator it;
Triangle_set triangle_ptr_list = find_relative_triangulation_combination( set_index );
// cout << ANSI_COLOR_YELLOW_BOLD << "In conflict triangle size = " << triangle_ptr_list.size() << ANSI_COLOR_RESET << endl;
remove_triangle_list( triangle_ptr_list );
}
template < typename T >
void remove_relative_triangulation_combination( std::vector< T >& vector_index )
{
std::set< T > index_set;
for ( auto p : vector_index )
{
index_set.insert( p );
}
remove_relative_triangulation_combination( index_set );
}
template < typename T >
Triangle_set get_inner_hull_triangle_list( std::set< T >& inner_hull_indices )
{
Triangle_set triangle_list;
for ( auto p : inner_hull_indices )
{
if ( m_map_pt_triangle.find( p ) != m_map_pt_triangle.end() )
{
for ( auto pp : m_map_pt_triangle[ p ] )
{
triangle_list.insert( pp );
}
}
}
return triangle_list;
}
template < typename T >
void remove_inner_hull_triangle( std::set< T >& inner_hull_indices )
{
Triangle_set triangle_list = get_inner_hull_triangle_list( inner_hull_indices );
remove_triangle_list( triangle_list );
}
bool if_triangle_exist( int& id_0, int& id_1, int& id_2 )
{
int ids[ 3 ];
ids[ 0 ] = id_0;
ids[ 1 ] = id_1;
ids[ 2 ] = id_2;
std::sort( std::begin( ids ), std::end( ids ) );
if ( m_triangle_hash.if_exist( ids[ 0 ], ids[ 1 ], ids[ 2 ] ) )
{
// This triangle exist
return true;
}
else
{
return false;
}
}
Triangle_ptr find_triangle( int id_0, int id_1, int id_2 )
{
int ids[ 3 ];
ids[ 0 ] = id_0;
ids[ 1 ] = id_1;
ids[ 2 ] = id_2;
std::sort( std::begin( ids ), std::end( ids ) );
if ( m_triangle_hash.if_exist( ids[ 0 ], ids[ 1 ], ids[ 2 ] ) )
{
// This triangle exist
// return m_triangle_hash.m_map_3d_hash_map[ ids[ 0 ] ][ ids[ 1 ] ][ ids[ 2 ] ];
return *m_triangle_hash.get_data( ids[ 0 ], ids[ 1 ], ids[ 2 ] );
}
else
{
return nullptr;
}
}
Triangle_ptr insert_triangle( int id_0, int id_1, int id_2, int build_triangle_map = false, const int & frame_idx = 0 )
{
int ids[ 3 ];
ids[ 0 ] = id_0;
ids[ 1 ] = id_1;
ids[ 2 ] = id_2;
std::sort( std::begin( ids ), std::end( ids ) );
Triangle_ptr triangle_ptr;
if ( m_triangle_hash.if_exist( ids[ 0 ], ids[ 1 ], ids[ 2 ] ) )
{
// This triangle exist
// triangle_ptr = m_triangle_hash.m_map_3d_hash_map[ ids[ 0 ] ][ ids[ 1 ] ][ ids[ 2 ] ];
triangle_ptr = *m_triangle_hash.get_data( ids[ 0 ], ids[ 1 ], ids[ 2 ] );
}
else
{
// This triangle is not exist.
// Step 1: new a triangle
triangle_ptr = std::make_shared< Triangle >( ids[ 0 ], ids[ 1 ], ids[ 2 ] );
triangle_ptr->m_vis_score = 1;
m_mutex_triangle_hash.lock();
m_triangle_hash.insert( ids[ 0 ], ids[ 1 ], ids[ 2 ], triangle_ptr );
m_mutex_triangle_hash.unlock();
// return m_map_pt_triangle.size();
// return m_triangle_list.size();
// return triangle_ptr;
}
m_mutex_triangle_hash.lock();
// m_triangle_list.insert( triangle_ptr );
insert_triangle_to_list( triangle_ptr, frame_idx );
// ins
if ( build_triangle_map )
{
// Step 2: add this triangle to points list:
m_map_pt_triangle[ ids[ 0 ] ].insert( triangle_ptr );
m_map_pt_triangle[ ids[ 1 ] ].insert( triangle_ptr );
m_map_pt_triangle[ ids[ 2 ] ].insert( triangle_ptr );
if ( m_enable_map_edge_triangle )
{
m_map_edge_triangle.m_map_2d_hash_map[ ids[ 0 ] ][ ids[ 1 ] ].insert( triangle_ptr );
m_map_edge_triangle.m_map_2d_hash_map[ ids[ 0 ] ][ ids[ 2 ] ].insert( triangle_ptr );
m_map_edge_triangle.m_map_2d_hash_map[ ids[ 1 ] ][ ids[ 2 ] ].insert( triangle_ptr );
// Find conflict triangle
if ( m_map_edge_triangle.m_map_2d_hash_map[ ids[ 0 ] ][ ids[ 1 ] ].size() > 2 )
{
m_map_edge_triangle_conflicted.m_map_2d_hash_map[ ids[ 0 ] ][ ids[ 1 ] ] =
m_map_edge_triangle.m_map_2d_hash_map[ ids[ 0 ] ][ ids[ 1 ] ];
}
if ( m_map_edge_triangle.m_map_2d_hash_map[ ids[ 0 ] ][ ids[ 2 ] ].size() > 2 )
{
m_map_edge_triangle_conflicted.m_map_2d_hash_map[ ids[ 0 ] ][ ids[ 2 ] ] =
m_map_edge_triangle.m_map_2d_hash_map[ ids[ 0 ] ][ ids[ 2 ] ];
}
if ( m_map_edge_triangle.m_map_2d_hash_map[ ids[ 1 ] ][ ids[ 2 ] ].size() > 2 )
{
m_map_edge_triangle_conflicted.m_map_2d_hash_map[ ids[ 1 ] ][ ids[ 2 ] ] =
m_map_edge_triangle.m_map_2d_hash_map[ ids[ 1 ] ][ ids[ 2 ] ];
}
}
}
m_mutex_triangle_hash.unlock();
return triangle_ptr;
}
std::set< int > get_conflict_triangle_pts()
{
std::set< int > conflict_triangle_pts;
if ( m_enable_map_edge_triangle )
{
for ( auto it : m_map_edge_triangle_conflicted.m_map_2d_hash_map )
{
for ( auto it_it : it.second )
{
Triangle_set triangle_list = it_it.second;
for ( auto tri : triangle_list )
{
// g_triangle_manager.erase_triangle( tri );
// conflict_triangle++;
conflict_triangle_pts.insert( tri->m_tri_pts_id[ 0 ] );
conflict_triangle_pts.insert( tri->m_tri_pts_id[ 1 ] );
conflict_triangle_pts.insert( tri->m_tri_pts_id[ 2 ] );
}
// printf_line;
}
}
}
return conflict_triangle_pts;
}
void clear_conflicted_triangles_list()
{
if ( m_enable_map_edge_triangle )
{
m_map_edge_triangle_conflicted.clear();
}
}
};
| Unknown |
3D | hku-mars/ImMesh | src/meshing/r3live/image_frame.cpp | .cpp | 13,935 | 402 | /*
This code is the implementation of our paper "R3LIVE: A Robust, Real-time, RGB-colored,
LiDAR-Inertial-Visual tightly-coupled state Estimation and mapping package".
Author: Jiarong Lin < ziv.lin.ljr@gmail.com >
If you use any code of this repo in your academic research, please cite at least
one of our papers:
[1] Lin, Jiarong, and Fu Zhang. "R3LIVE: A Robust, Real-time, RGB-colored,
LiDAR-Inertial-Visual tightly-coupled state Estimation and mapping package."
[2] Xu, Wei, et al. "Fast-lio2: Fast direct lidar-inertial odometry."
[3] Lin, Jiarong, et al. "R2LIVE: A Robust, Real-time, LiDAR-Inertial-Visual
tightly-coupled state Estimator and mapping."
[4] Xu, Wei, and Fu Zhang. "Fast-lio: A fast, robust lidar-inertial odometry
package by tightly-coupled iterated kalman filter."
[5] Cai, Yixi, Wei Xu, and Fu Zhang. "ikd-Tree: An Incremental KD Tree for
Robotic Applications."
[6] Lin, Jiarong, and Fu Zhang. "Loam-livox: A fast, robust, high-precision
LiDAR odometry and mapping package for LiDARs of small FoV."
For commercial use, please contact me < ziv.lin.ljr@gmail.com > and
Dr. Fu Zhang < fuzhang@hku.hk >.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
*/
#include "image_frame.hpp"
Image_frame::Image_frame()
{
m_gama_para( 0 ) = 1.0;
m_gama_para( 1 ) = 0.0;
// m_acc_photometric_error = 0;
// m_acc_render_count = 0;
m_pose_w2c_q.setIdentity();
m_pose_w2c_t.setZero();
};
Image_frame::~Image_frame()
{
release_image();
}
void Image_frame::release_image()
{
m_raw_img.release();
m_img.release();
m_img_gray.release();
}
// ANCHOR - Image_frame::refresh_pose_for_projection()
void Image_frame::refresh_pose_for_projection()
{
m_pose_c2w_q = m_pose_w2c_q.inverse();
m_pose_c2w_t = -(m_pose_w2c_q.inverse() * m_pose_w2c_t);
m_image_norm = m_pose_w2c_q * vec_3(0,0,1);
m_if_have_set_pose = 1;
}
void Image_frame::set_pose(const eigen_q &pose_w2c_q, const vec_3 &pose_w2c_t)
{
m_pose_w2c_q = pose_w2c_q;
m_pose_w2c_t = pose_w2c_t;
refresh_pose_for_projection();
}
int Image_frame::set_frame_idx(int frame_idx)
{
m_frame_idx = frame_idx;
return m_frame_idx;
}
void Image_frame::set_intrinsic(Eigen::Matrix3d &camera_K)
{
m_cam_K = camera_K;
m_if_have_set_intrinsic = 1;
fx = camera_K(0, 0);
fy = camera_K(1, 1);
cx = camera_K(0, 2);
cy = camera_K(1, 2);
m_gama_para(0) = 1.0;
m_gama_para(1) = 0.0;
}
Image_frame::Image_frame(Eigen::Matrix3d &camera_K)
{
m_pose_w2c_q.setIdentity();
m_pose_w2c_t.setZero();
set_intrinsic(camera_K);
};
void Image_frame::init_cubic_interpolation()
{
m_pose_w2c_R = m_pose_w2c_q.toRotationMatrix();
m_img_rows = m_img.rows;
m_img_cols = m_img.cols;
#if (CV_MAJOR_VERSION >= 4)
cv::cvtColor(m_img, m_img_gray, cv::COLOR_RGB2GRAY);
#else
cv::cvtColor(m_img, m_img_gray, CV_RGB2GRAY);
#endif
}
void Image_frame::inverse_pose()
{
m_pose_w2c_t = -(m_pose_w2c_q.inverse() * m_pose_w2c_t);
m_pose_w2c_q = m_pose_w2c_q.inverse();
m_pose_w2c_R = m_pose_w2c_q.toRotationMatrix();
m_pose_c2w_q = m_pose_w2c_q.inverse();
m_pose_c2w_t = -(m_pose_w2c_q.inverse() * m_pose_w2c_t);
}
bool Image_frame::project_3d_to_2d(const pcl::PointXYZI & in_pt, Eigen::Matrix3d &cam_K, double &u, double &v, const double &scale)
{
if (!m_if_have_set_pose)
{
cout << ANSI_COLOR_RED_BOLD << "You have not set the camera pose yet!" << ANSI_COLOR_RESET << endl;
// refresh_pose_for_projection();
while (1)
{};
}
if (m_if_have_set_intrinsic == 0)
{
cout << ANSI_COLOR_RED_BOLD << "You have not set the intrinsic yet!!!" << ANSI_COLOR_RESET << endl;
while (1)
{} ;
return false;
}
vec_3 pt_w(in_pt.x, in_pt.y, in_pt.z), pt_cam;
// pt_cam = (m_pose_w2c_q.inverse() * pt_w - m_pose_w2c_q.inverse()*m_pose_w2c_t);
pt_cam = (m_pose_c2w_q * pt_w + m_pose_c2w_t);
if (pt_cam(2) < 0.001)
{
return false;
}
u = (pt_cam(0) * fx / pt_cam(2) + cx) * scale;
v = (pt_cam(1) * fy / pt_cam(2) + cy) * scale;
return true;
}
bool Image_frame::if_2d_points_available(const double &u, const double &v, const double &scale, double fov_mar)
{
double used_fov_margin = m_fov_margin;
if (fov_mar > 0.0)
{
used_fov_margin = fov_mar;
}
if ((u / scale >= (used_fov_margin * m_img_cols + 1)) && (std::ceil(u / scale) < ((1 - used_fov_margin) * m_img_cols)) &&
(v / scale >= (used_fov_margin * m_img_rows + 1)) && (std::ceil(v / scale) < ((1 - used_fov_margin) * m_img_rows)))
{
return true;
}
else
{
return false;
}
}
template<typename T>
inline T getSubPixel(cv::Mat & mat, const double & row, const double & col, double pyramid_layer = 0)
{
int floor_row = floor(row);
int floor_col = floor(col);
double frac_row = row - floor_row;
double frac_col = col - floor_col;
int ceil_row = floor_row + 1;
int ceil_col = floor_col + 1;
if (pyramid_layer != 0)
{
int pos_bias = pow(2, pyramid_layer - 1);
floor_row -= pos_bias;
floor_col -= pos_bias;
ceil_row += pos_bias;
ceil_row += pos_bias;
}
return ((1.0 - frac_row) * (1.0 - frac_col) * (T)mat.ptr<T>(floor_row)[floor_col]) +
(frac_row * (1.0 - frac_col) * (T)mat.ptr<T>(ceil_row)[floor_col]) +
((1.0 - frac_row) * frac_col * (T)mat.ptr<T>(floor_row)[ceil_col]) +
(frac_row * frac_col * (T)mat.ptr<T>(ceil_row)[ceil_col]);
}
vec_3 Image_frame::get_rgb(double &u, double v, int layer, vec_3 *rgb_dx , vec_3 *rgb_dy )
{
const int ssd = 5;
cv::Vec3b rgb = getSubPixel< cv::Vec3b >( m_img, v, u, layer );
if ( rgb_dx != nullptr )
{
cv::Vec3f rgb_left(0,0,0) , rgb_right(0,0,0);
float pixel_dif = 0;
for ( int bias_idx = 1; bias_idx < ssd; bias_idx++ )
{
rgb_left += getSubPixel< cv::Vec3b >( m_img, v, u - bias_idx, layer );
rgb_right += getSubPixel< cv::Vec3b >( m_img, v, u + bias_idx, layer );
pixel_dif += 2*bias_idx;
}
// printf("[%.5f, %.5f, %.5f] V.S. [%.5f, %.5f, %.5f] \r\n", rgb_left(0), rgb_left(1), rgb_left(2), rgb_right(0), rgb_right(1), rgb_right(2) );
cv::Vec3f cv_rgb_dx = rgb_right - rgb_left;
*rgb_dx = vec_3( cv_rgb_dx( 0 ), cv_rgb_dx( 1 ), cv_rgb_dx( 2 ) ) / pixel_dif ;
}
if ( rgb_dy != nullptr )
{
cv::Vec3f rgb_down(0,0,0) , rgb_up(0,0,0);
float pixel_dif = 0;
for ( int bias_idx = 1; bias_idx < ssd; bias_idx++ )
{
rgb_down += getSubPixel< cv::Vec3b >( m_img, v- bias_idx, u , layer );
rgb_up += getSubPixel< cv::Vec3b >( m_img, v+ bias_idx, u , layer );
pixel_dif += 2*bias_idx;
}
cv::Vec3f cv_rgb_dy = rgb_up - rgb_down;
// printf("[%.5f, %.5f, %.5f] V.S. [%.5f, %.5f, %.5f] \r\n", rgb_down(0), rgb_down(1), rgb_down(2), rgb_up(0), rgb_up(1), rgb_up(2) );
*rgb_dy = vec_3( cv_rgb_dy( 0 ), cv_rgb_dy( 1 ), cv_rgb_dy( 2 ) ) / pixel_dif ;
}
return vec_3( rgb( 0 ), rgb( 1 ), rgb( 2 ) );
}
double Image_frame::get_grey_color( double &u, double &v, int layer )
{
double val = 0;
if ( layer == 0 )
{
double gray_val = getSubPixel< uchar >( m_img, v, u );
return gray_val;
}
else
{
// TODO
while ( 1 )
{
cout << "To be process here" << __LINE__ << endl;
std::this_thread::sleep_for( std::chrono::milliseconds( 1 ) );
};
}
return m_gama_para( 0 ) * val + m_gama_para( 1 );
}
bool Image_frame::get_rgb(const double &u, const double &v, int &r, int &g, int &b)
{
r = m_img.at<cv::Vec3b>(v, u)[2];
g = m_img.at<cv::Vec3b>(v, u)[1];
b = m_img.at<cv::Vec3b>(v, u)[0];
return true;
}
void Image_frame::display_pose()
{
cout << "Frm [" << m_frame_idx << "], pose: " << m_pose_w2c_q.coeffs().transpose() << " | " << m_pose_w2c_t.transpose() << " | ";
cout << fx << ", " << cx << ", " << fy << ", " << cy << ", ";
cout << endl;
}
void Image_frame::image_equalize(cv::Mat &img, int amp)
{
cv::Mat img_temp;
cv::Size eqa_img_size = cv::Size(std::max(img.cols * 32.0 / 640, 4.0), std::max(img.cols * 32.0 / 640, 4.0));
cv::Ptr<cv::CLAHE> clahe = cv::createCLAHE(amp, eqa_img_size);
// Equalize gray image.
clahe->apply(img, img_temp);
img = img_temp;
}
inline void image_equalize(cv::Mat &img, int amp)
{
cv::Mat img_temp;
cv::Size eqa_img_size = cv::Size(std::max(img.cols * 32.0 / 640, 4.0), std::max(img.cols * 32.0 / 640, 4.0));
cv::Ptr<cv::CLAHE> clahe = cv::createCLAHE(amp, eqa_img_size);
// Equalize gray image.
clahe->apply(img, img_temp);
img = img_temp;
}
inline cv::Mat equalize_color_image_Ycrcb(cv::Mat &image)
{
cv::Mat hist_equalized_image;
cv::cvtColor(image, hist_equalized_image, cv::COLOR_BGR2YCrCb);
//Split the image into 3 channels; Y, Cr and Cb channels respectively and store it in a std::vector
std::vector<cv::Mat> vec_channels;
cv::split(hist_equalized_image, vec_channels);
//Equalize the histogram of only the Y channel
// cv::equalizeHist(vec_channels[0], vec_channels[0]);
image_equalize( vec_channels[0], 1 );
cv::merge(vec_channels, hist_equalized_image);
cv::cvtColor(hist_equalized_image, hist_equalized_image, cv::COLOR_YCrCb2BGR);
return hist_equalized_image;
}
void Image_frame::image_equalize()
{
image_equalize(m_img_gray, 3.0);
// cv::imshow("before", m_img.clone());
m_img = equalize_color_image_Ycrcb(m_img);
// cv::imshow("After", m_img.clone());
}
bool Image_frame::project_3d_point_in_this_img(const pcl::PointXYZI & in_pt, double &u, double &v, pcl::PointXYZRGB *rgb_pt, double intrinsic_scale)
{
if (project_3d_to_2d(in_pt, m_cam_K, u, v, intrinsic_scale) == false)
{
return false;
}
if (if_2d_points_available(u, v, intrinsic_scale) == false)
{
// printf_line;
return false;
}
if (rgb_pt != nullptr)
{
int r = 0;
int g = 0;
int b = 0;
get_rgb(u, v, r, g, b);
rgb_pt->x = in_pt.x;
rgb_pt->y = in_pt.y;
rgb_pt->z = in_pt.z;
rgb_pt->r = r;
rgb_pt->g = g;
rgb_pt->b = b;
rgb_pt->a = 255;
}
return true;
}
bool Image_frame::project_3d_point_in_this_img(const vec_3 & in_pt, double &u, double &v, pcl::PointXYZRGB *rgb_pt, double intrinsic_scale)
{
pcl::PointXYZI temp_pt;
temp_pt.x = in_pt(0);
temp_pt.y = in_pt(1);
temp_pt.z = in_pt(2);
return project_3d_point_in_this_img(temp_pt, u, v, rgb_pt, intrinsic_scale);
}
void Image_frame::dump_pose_and_image(const std::string name_prefix)
{
std::string txt_file_name = std::string(name_prefix).append(".txt");
std::string image_file_name = std::string(name_prefix).append(".png");
FILE *fp = fopen(txt_file_name.c_str(), "w+");
if (fp)
{
fprintf(fp, "%lf %lf %lf %lf %lf %lf %lf\r\n", m_pose_w2c_q.w(), m_pose_w2c_q.x(), m_pose_w2c_q.y(), m_pose_w2c_q.z(),
m_pose_w2c_t(0), m_pose_w2c_t(1), m_pose_w2c_t(2));
fclose(fp);
}
cv::imwrite(image_file_name, m_img);
}
int Image_frame::load_pose_and_image(const std::string name_prefix, const double image_scale, int if_load_image)
{
// cout << "Load data from " << name_prefix << ".X" << endl;
std::string txt_file_name = std::string(name_prefix).append(".txt");
std::string image_file_name = std::string(name_prefix).append(".png");
Eigen::MatrixXd pose_data = Common_tools::load_mat_from_txt<double>(txt_file_name);
if (pose_data.size() == 0)
{
// cout << "Load offline data return fail." << endl;
return 0;
}
// cout << "Pose data = " << pose_data << endl;
m_pose_w2c_q = Eigen::Quaterniond(pose_data(0), pose_data(1), pose_data(2), pose_data(3));
if (if_load_image)
{
m_img = cv::imread(image_file_name.c_str());
if (image_scale != 1.0)
{
cv::resize(m_img, m_img, cv::Size(0, 0), image_scale, image_scale);
}
m_img_rows = m_img.rows;
m_img_cols = m_img.cols;
}
// m_pose_w2c_q = Eigen::Map<Eigen::Quaterniond>(&pose_data.data()[0]);
m_pose_w2c_R = m_pose_w2c_q.toRotationMatrix();
m_pose_w2c_t = Eigen::Map<Eigen::Vector3d>(&pose_data.data()[4]);
return 1;
}
| C++ |
3D | hku-mars/ImMesh | src/meshing/r3live/pointcloud_rgbd.hpp | .hpp | 15,825 | 375 | /*
This code is the implementation of our paper "R3LIVE: A Robust, Real-time, RGB-colored,
LiDAR-Inertial-Visual tightly-coupled state Estimation and mapping package".
Author: Jiarong Lin < ziv.lin.ljr@gmail.com >
If you use any code of this repo in your academic research, please cite at least
one of our papers:
[1] Lin, Jiarong, and Fu Zhang. "R3LIVE: A Robust, Real-time, RGB-colored,
LiDAR-Inertial-Visual tightly-coupled state Estimation and mapping package."
[2] Xu, Wei, et al. "Fast-lio2: Fast direct lidar-inertial odometry."
[3] Lin, Jiarong, et al. "R2LIVE: A Robust, Real-time, LiDAR-Inertial-Visual
tightly-coupled state Estimator and mapping."
[4] Xu, Wei, and Fu Zhang. "Fast-lio: A fast, robust lidar-inertial odometry
package by tightly-coupled iterated kalman filter."
[5] Cai, Yixi, Wei Xu, and Fu Zhang. "ikd-Tree: An Incremental KD Tree for
Robotic Applications."
[6] Lin, Jiarong, and Fu Zhang. "Loam-livox: A fast, robust, high-precision
LiDAR odometry and mapping package for LiDARs of small FoV."
For commercial use, please contact me < ziv.lin.ljr@gmail.com > and
Dr. Fu Zhang < fuzhang@hku.hk >.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
*/
#pragma once
#include <atomic>
#include <unordered_set>
#include "tools_logger.hpp"
#include "opencv2/opencv.hpp"
#include "image_frame.hpp"
#include "tools_kd_hash.hpp"
#include "tools_thread_pool.hpp"
#include "tools_serialization.hpp"
#include "tools_graphics.hpp"
#include "triangle.hpp"
#include "ikd_Tree.h"
// #include "assert.h"
#define R3LIVE_MAP_MAJOR_VERSION 1
#define R3LIVE_MAP_MINOR_VERSION 0
#define IF_DBG_COLOR 0
extern cv::RNG g_rng;
extern double g_initial_camera_exp_tim;
extern double g_camera_exp_tim_lower_bound;
extern double g_maximum_radian;
extern double g_voxel_resolution;
// extern std::atomic< long > g_pts_index;
class Triangle;
class Global_map;
class RGB_Voxel;
using RGB_voxel_ptr = std::shared_ptr< RGB_Voxel >;
class RGB_pts
{
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
vec_2 m_img_vel;
vec_2 m_img_pt_in_last_frame;
vec_2 m_img_pt_in_current_frame;
#if 0
std::atomic<double> m_pos[3];
std::atomic<double> m_rgb[3];
std::atomic<double> m_cov_rgb[3];
std::atomic<int> m_N_rgb;
#else
double m_pos[ 3 ] = { 0 };
double m_pos_aft_smooth[ 3 ] = { 0 };
double m_rgb[ 3 ] = { 0 };
double m_cov_rgb[ 3 ] = { 0 };
int m_N_rgb = 0;
int m_pt_index = 0;
bool m_smoothed = false;
#endif
int m_is_out_lier_count = 0;
int m_is_inner_pt = 0 ;
RGB_voxel_ptr m_parent_voxel = nullptr;
#if IF_DBG_COLOR
cv::Scalar m_dbg_color;
#endif
double m_first_obs_exposure_time = 1.0 / g_initial_camera_exp_tim;
double m_obs_dis = 0;
double m_last_obs_time = 0;
void clear()
{
m_rgb[ 0 ] = 0;
m_rgb[ 1 ] = 0;
m_rgb[ 2 ] = 0;
m_N_rgb = 0;
m_obs_dis = 0;
m_last_obs_time = 0;
#if IF_DBG_COLOR
int r = g_rng.uniform( 0, 256 );
int g = g_rng.uniform( 0, 256 );
int b = g_rng.uniform( 0, 256 );
m_dbg_color = cv::Scalar( r, g, b );
#endif
// m_rgb = vec_3(255, 255, 255);
};
RGB_pts()
{
// m_pt_index = g_pts_index++;
clear();
};
~RGB_pts()
{
// cout << "~RGB_pts: " ;
// printf_line;
// while ( 1 )
// {
// std::this_thread::sleep_for( std::chrono::seconds( 1 ) );
// }
};
void set_pos( const vec_3 &pos );
void set_smooth_pos( const vec_3 &pos );
vec_3 get_pos(bool get_smooth = false);
vec_3 get_rgb();
vec_3 get_radiance();
mat_3_3 get_rgb_cov();
pcl::PointXYZI get_pt();
// void update_gray( const double gray, double obs_dis = 1.0 );
int update_rgb( const vec_3 &rgb, const double obs_dis, const vec_3 obs_sigma, const double obs_time, const double current_exposure_time );
private:
friend class boost::serialization::access;
template < typename Archive >
void serialize( Archive &ar, const unsigned int version )
{
ar &m_pos;
ar &m_rgb;
ar &m_pt_index;
ar &m_cov_rgb;
ar &m_N_rgb;
ar &m_first_obs_exposure_time;
}
};
using RGB_pt_ptr = std::shared_ptr< RGB_pts >;
extern std::vector<RGB_pt_ptr> * g_rgb_pts_vec;
class RGB_Voxel
{
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
double m_last_visited_time = 0;
long m_pos[3] = {0};
vec_3 m_center;
// Cov axis
vec_3 m_long_axis;
vec_3 m_mid_axis;
vec_3 m_short_axis;
long m_meshing_times = 0;
long m_new_added_pts_count = 0;
std::vector< RGB_pt_ptr > m_pts_in_grid;
std::vector< Common_tools::Triangle_2 > m_2d_pts_vec;
// Triangle_set m_triangle_list_in_voxel;
// RGB_Voxel() = default;
RGB_Voxel(long x, long y, long z)
{
m_long_axis.setZero();
m_mid_axis.setZero();
m_short_axis.setZero();
m_pos[0] = x;
m_pos[1] = y;
m_pos[2] = z;
m_center = vec_3( x * g_voxel_resolution, y * g_voxel_resolution, z * g_voxel_resolution );
};
void refresh_triangles();
int insert_triangle( long & id_0, long & id_1, long & id_2);
~RGB_Voxel()
{
// cout << "~RGB_Voxel: " ;
// printf_line;
// while ( 1 )
// {
// std::this_thread::sleep_for( std::chrono::seconds( 1 ) );
// }
};
void add_pt( const RGB_pt_ptr & rgb_pts )
{
m_pts_in_grid.push_back( rgb_pts );
}
bool if_pts_belong_to_this_voxel( RGB_pt_ptr &rgb_pt )
{
// clang-format off
if ( ( std::round( rgb_pt->m_pos[ 0 ] / g_voxel_resolution ) == m_pos[ 0 ] ) &&
( std::round( rgb_pt->m_pos[ 1 ] / g_voxel_resolution ) == m_pos[ 1 ] ) &&
( std::round( rgb_pt->m_pos[ 2 ] / g_voxel_resolution ) == m_pos[ 2 ] ) )
{
return true;
}
else
{
return false;
}
// clang-format on
}
};
std::vector<RGB_pt_ptr> retrieve_pts_in_voxels(std::unordered_set< std::shared_ptr< RGB_Voxel > > & neighbor_voxels);
using Voxel_set_iterator = std::unordered_set< std::shared_ptr< RGB_Voxel > >::iterator;
using KDtree_pt = ikdTree_PointType;
using KDtree_pt_vector = KD_TREE<KDtree_pt>::PointVector;
struct Global_map
{
int m_map_major_version = R3LIVE_MAP_MAJOR_VERSION;
int m_map_minor_version = R3LIVE_MAP_MINOR_VERSION;
int m_if_get_all_pts_in_boxes_using_mp = 1;
std::vector< RGB_pt_ptr > m_rgb_pts_vec;
std::vector< RGB_voxel_ptr > m_voxel_vec;
// std::vector< RGB_pt_ptr > m_rgb_pts_in_recent_visited_voxels;
std::shared_ptr< std::vector< RGB_pt_ptr > > m_pts_rgb_vec_for_projection = nullptr;
std::shared_ptr< std::mutex > m_mutex_pts_vec;
std::shared_ptr< std::mutex > m_mutex_recent_added_list;
std::shared_ptr< std::mutex > m_mutex_img_pose_for_projection;
std::shared_ptr< std::mutex > m_mutex_rgb_pts_in_recent_hitted_boxes;
std::shared_ptr< std::mutex > m_mutex_m_box_recent_hitted;
std::shared_ptr< std::mutex > m_mutex_pts_last_visited;
KD_TREE< KDtree_pt > m_kdtree;
Image_frame m_img_for_projection;
double m_recent_visited_voxel_activated_time = 0.0;
bool m_in_appending_pts = 0;
int m_updated_frame_index = 0;
std::shared_ptr< std::thread > m_thread_service;
int m_if_reload_init_voxel_and_hashed_pts = true;
Hash_map_3d< long, RGB_pt_ptr > m_hashmap_3d_pts;
Hash_map_3d< long, std::shared_ptr< RGB_Voxel > > m_hashmap_voxels;
std::unordered_set< std::shared_ptr< RGB_Voxel > > m_voxels_recent_visited;
std::vector< std::shared_ptr< RGB_pts > > m_pts_last_hitted;
double m_minimum_pts_size = 0.05; // 5cm minimum distance.
double m_voxel_resolution = 0.1;
double m_maximum_depth_for_projection = 200;
double m_minimum_depth_for_projection = 3;
int m_last_updated_frame_idx = -1;
void clear();
void set_minimum_dis( double minimum_dis );
void set_voxel_resolution( double minimum_dis );
Global_map( int if_start_service = 1 );
~Global_map();
vec_3 smooth_pts(RGB_pt_ptr & rgb_pt, double smooth_factor, double knn = 20, double maximum_smooth_dis = 0 );
void service_refresh_pts_for_projection();
void render_points_for_projection( std::shared_ptr< Image_frame > &img_ptr );
void update_pose_for_projection( std::shared_ptr< Image_frame > &img, double fov_margin = 0.0001 );
bool is_busy();
template < typename T >
int append_points_to_global_map( pcl::PointCloud< T > &pc_in, double added_time, std::vector< RGB_pt_ptr > *pts_added_vec = nullptr,
int step = 1, int disable_append = 0 );
void render_with_a_image( std::shared_ptr< Image_frame > &img_ptr, int if_select = 1 );
void selection_points_for_projection( std::shared_ptr< Image_frame > &image_pose, std::vector< std::shared_ptr< RGB_pts > > *pc_out_vec = nullptr,
std::vector< cv::Point2f > *pc_2d_out_vec = nullptr, double minimum_dis = 5, int skip_step = 1,
int use_all_pts = 0 );
void save_to_pcd( std::string dir_name, std::string file_name = std::string( "/rgb_pt" ), int save_pts_with_views = 3 );
void save_and_display_pointcloud( std::string dir_name = std::string( "/home/ziv/temp/" ), std::string file_name = std::string( "/rgb_pt" ),
int save_pts_with_views = 3 );
void render_pts_in_voxels( std::shared_ptr< Image_frame > &img_ptr, std::vector< std::shared_ptr< RGB_pts > > &voxels_for_render,
double obs_time = 0 );
private:
friend class boost::serialization::access;
template < typename Archive >
void serialize( Archive &ar, const unsigned int version )
{
boost::serialization::split_free( ar, *this, version );
}
};
void render_pts_in_voxels_mp( std::shared_ptr< Image_frame > &img_ptr, std::unordered_set< RGB_voxel_ptr > *voxels_for_render,
const double &obs_time = 0 );
template < typename Archive >
inline void save( Archive &ar, const Global_map &global_map, const unsigned int /*version*/ )
{
int vector_size;
vector_size = global_map.m_rgb_pts_vec.size();
ar << global_map.m_map_major_version;
ar << global_map.m_map_minor_version;
ar << global_map.m_minimum_pts_size;
ar << global_map.m_voxel_resolution;
ar << vector_size;
cout << ANSI_COLOR_YELLOW_BOLD;
for ( int i = 0; i < vector_size; i++ )
{
ar << ( *global_map.m_rgb_pts_vec[ i ] );
CV_Assert( global_map.m_rgb_pts_vec[ i ]->m_pt_index == i );
if ( ( i % 10000 == 0 ) || ( i == vector_size - 1 ) )
{
cout << ANSI_DELETE_CURRENT_LINE << "Saving global map: " << ( i * 100.0 / ( vector_size - 1 ) ) << " %";
ANSI_SCREEN_FLUSH;
}
}
cout << endl;
}
template < typename Archive >
inline void load( Archive &ar, Global_map &global_map, const unsigned int /*version*/ )
{
Common_tools::Timer tim;
tim.tic();
int vector_size;
vector_size = global_map.m_rgb_pts_vec.size();
ar >> global_map.m_map_major_version;
ar >> global_map.m_map_minor_version;
ar >> global_map.m_minimum_pts_size;
ar >> global_map.m_voxel_resolution;
ar >> vector_size;
int grid_x, grid_y, grid_z, box_x, box_y, box_z;
scope_color( ANSI_COLOR_YELLOW_BOLD );
for ( int i = 0; i < vector_size; i++ )
{
// printf_line;
std::shared_ptr< RGB_pts > rgb_pt = std::make_shared< RGB_pts >();
ar >> *rgb_pt;
CV_Assert( rgb_pt->m_pt_index == global_map.m_rgb_pts_vec.size() );
global_map.m_rgb_pts_vec.push_back( rgb_pt );
grid_x = std::round( rgb_pt->m_pos[ 0 ] / global_map.m_minimum_pts_size );
grid_y = std::round( rgb_pt->m_pos[ 1 ] / global_map.m_minimum_pts_size );
grid_z = std::round( rgb_pt->m_pos[ 2 ] / global_map.m_minimum_pts_size );
box_x = std::round( rgb_pt->m_pos[ 0 ] / global_map.m_voxel_resolution );
box_y = std::round( rgb_pt->m_pos[ 1 ] / global_map.m_voxel_resolution );
box_z = std::round( rgb_pt->m_pos[ 2 ] / global_map.m_voxel_resolution );
if ( global_map.m_if_reload_init_voxel_and_hashed_pts )
{
// if init voxel and hashmap_3d_pts, comment this to save loading time if necessary.
global_map.m_hashmap_3d_pts.insert( grid_x, grid_y, grid_z, rgb_pt );
if ( !global_map.m_hashmap_voxels.if_exist( box_x, box_y, box_z ) )
{
std::shared_ptr< RGB_Voxel > box_rgb = std::make_shared< RGB_Voxel >(box_x, box_y, box_z);
global_map.m_hashmap_voxels.insert( box_x, box_y, box_z, box_rgb );
}
(*global_map.m_hashmap_voxels.get_data(box_x, box_y, box_z))->add_pt( rgb_pt );
}
if ( ( i % 10000 == 0 ) || ( i == vector_size - 1 ) )
{
cout << ANSI_DELETE_CURRENT_LINE << "Loading global map: " << ( i * 100.0 / ( vector_size - 1 ) ) << " %";
ANSI_SCREEN_FLUSH;
}
}
cout << endl;
cout << "Load offine global map cost: " << tim.toc() << " ms" << ANSI_COLOR_RESET << endl;
}
| Unknown |
3D | Aswendt-Lab/AIDAmri | AIDAmri_workshop.ipynb | .ipynb | 175,298 | 2,986 | {
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Table of contents\n",
"* [Before we start](#intro)\n",
" - [Prerequisites](#prereq)\n",
" - [About the notebook](#aboutnb)\n",
" - [Test data](#testd)\n",
"* [From build to launch](#fbtl)\n",
" - [AIDAmri image build](#build)\n",
" - [Create a container](#contcreate)\n",
" - [(Re-)start the container](#contstart)\n",
" - [Basic usage](#usage)\n",
"* [Data processing with AIDAmri](#proc)\n",
" - [T2w](#t2w)\n",
" - [DTI](#dti)\n",
" - [fMRI](#frmi)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Before we start <a class=\"anchor\" id=\"intro\"></a>\n",
"## Prerequisites <a class=\"anchor\" id=\"prereq\"></a>\n",
"The following programs are **necessary** or recommended to use AIDAmri:\n",
"* **Docker**\n",
"* **ca. 12 GB free memory**\n",
"* **<span style=color:orange;>For Windows users:</span> Bash terminal or Linux subsystem** (we recommend [git BASH](https://gitforwindows.org/))\n",
"* Anaconda Navigator or local Jupyter notebook installment to execute the code cells directly in this script\n",
"\n",
"## About the notebook <a class=\"anchor\" id=\"aboutnb\"></a>\n",
"You will notice that the code lines are mostly prefaced by an `!`. This designates the command to run on Bash, i.e. the Unix-shell system. TComments in code cells are prefaced by `#`.\n",
">🚩 A red flag will be used, whenever a difference is noted between the notebook and working in an interactive terminal like the Linux terminal or git BASH. There, the `!` is not necessary when typing in a command.\n",
"\n",
"<span style=color:orange;font-weight:bold;>For Windows users:</span> Windows does not use the Bash shell per default but the command line prompt (CMD). If you opened this notebook via the Anaconda Navigator, open git BASH or any other Bash terminal available and type in `. PATH\\TO\\ANACONDA\\Scripts\\activate` (replace `\\PATH\\TO\\ANACONDA` with your Anaconda install path) to let the terminal access and run the `jupyter notebook` command to re-open Jupyter, so the script uses the Bash shell instead of CMD.\n",
"\n",
"## Test data <a class=\"anchor\" id=\"testd\"></a>\n",
"We provide test data for the purpose of this notebook [here](https://gin.g-node.org/pallastn/AIDA_dataset). Download the `testData.zip` and unpack it. Within the new folder, there will be another folder named `testData`. Move this folder into the same folder where this notebook is located. The data is acquired with Bruker 9.4T - cryo coil setup: adult C57BL7/6 mouse, T2-weighted (anatomical scan), DTI (structural connectivity scan), rs-fMRI (functional connectivity scan)."
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# From build to launch <a class=\"anchor\" id=\"fbtl\"></a>\n",
"## AIDAmri image build<a class=\"anchor\" id=\"build\"></a>\n",
"We provide a Dockerfile on GitHub that functions as a protocol to assemble an image. This image is the installation of AIDAmri as well as the environment to run AIDAmri commands and is based on Linux Ubuntu 18.04. To initiate the build, please check first if you are within the repository directory using `pwd`. It should end with `AIDAmri`."
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"/home/user/Documents/AIDA/AIDAmri\r\n"
]
}
],
"source": [
"!pwd"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"If right, also check whether the Dockerfile is present, as well as the `bin` and `lib` directory. Also, the testData folder should be present for later use."
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"AIDA_Logo.png\r\n",
"aidamri_v1.1.yml\r\n",
"AIDAmri_workshop.ipynb\r\n",
"ARA\r\n",
"bin\r\n",
"Dockerfile\r\n",
"Docker_manual.pdf\r\n",
"docker_runfile.sh\r\n",
"fslinstaller_mod.py\r\n",
"lib\r\n",
"LICENSE\r\n",
"manual.pdf\r\n",
"niftyreg-AIDA_verified.zip\r\n",
"README.md\r\n",
"requirements.txt\r\n",
"testData\r\n",
"tools\r\n"
]
}
],
"source": [
"!ls -1"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"After assuring that everything is in the right place, you may initiate the build.\n",
"\n",
"<span style=color:orange;font-weight:bold>For Windows and Mac:</span> The Docker engine may need to be started by opening Docker Desktop. This allows you to run the docker build command.\n",
"\n",
"**Issues and troubleshoot:**\n",
"* Some Linux operating systems do not start the Docker daemon automatically. Enter `sudo systemctl start docker` in a separate terminal to start it manually.\n",
"* FSL does not support certain CPU architecture. This may cause issues on M1/M2 macbooks. Unfortunately, a solution is still pending. We will update our software as soon as FSL architecture support is eventually expanded or we find another suitable solution.\n",
"\n",
"<span style=\"color:red;font-weight:bold;\">Warning: </span>The initial building process currenty is quite heavy and may take approximately 1.5 hours. Please take this in mind initiating the build. Furthermore, some of the steps currenty output a lot of warnings, causing large amounts of text and possibly some confusion. We assure you that this does not affect the AIDAmri tools and data processing in any way. We will optimize the building process and remove the source of the warning in the future. These issues will not occur when iterating the build after an update as long as the image stays installed."
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"\u001b[1A\u001b[1B\u001b[0G\u001b[?25l[+] Building 0.0s (0/1) docker:default\n",
"\u001b[?25h\u001b[1A\u001b[0G\u001b[?25l[+] Building 0.2s (0/2) docker:default\n",
" => [internal] load .dockerignore 0.2s\n",
" => [internal] load build definition from Dockerfile 0.2s\n",
"\u001b[?25h\u001b[1A\u001b[1A\u001b[1A\u001b[0G\u001b[?25l[+] Building 0.3s (0/2) docker:default\n",
" => [internal] load .dockerignore 0.3s\n",
" => [internal] load build definition from Dockerfile 0.3s\n",
"\u001b[?25h\u001b[1A\u001b[1A\u001b[1A\u001b[0G\u001b[?25l[+] Building 0.5s (0/2) docker:default\n",
" => [internal] load .dockerignore 0.5s\n",
" => [internal] load build definition from Dockerfile 0.5s\n",
"\u001b[34m => => transferring dockerfile: 2.50kB 0.0s\n",
"\u001b[0m\u001b[?25h\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[0G\u001b[?25l[+] Building 0.6s (0/2) docker:default\n",
" => [internal] load .dockerignore 0.6s\n",
"\u001b[34m => => transferring context: 2B 0.0s\n",
"\u001b[0m => [internal] load build definition from Dockerfile 0.6s\n",
"\u001b[34m => => transferring dockerfile: 2.50kB 0.0s\n",
"\u001b[0m\u001b[?25h\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[0G\u001b[?25l[+] Building 0.8s (1/2) docker:default\n",
" => [internal] load .dockerignore 0.8s\n",
"\u001b[34m => => transferring context: 2B 0.0s\n",
"\u001b[0m\u001b[34m => [internal] load build definition from Dockerfile 0.7s\n",
"\u001b[0m\u001b[34m => => transferring dockerfile: 2.50kB 0.0s\n",
"\u001b[0m\u001b[?25h\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[0G\u001b[?25l[+] Building 0.9s (2/2) docker:default\n",
"\u001b[34m => [internal] load .dockerignore 0.9s\n",
"\u001b[0m\u001b[34m => => transferring context: 2B 0.0s\n",
"\u001b[0m\u001b[34m => [internal] load build definition from Dockerfile 0.7s\n",
"\u001b[0m\u001b[34m => => transferring dockerfile: 2.50kB 0.0s\n",
"\u001b[0m\u001b[?25h\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[0G\u001b[?25l[+] Building 1.0s (2/3) docker:default\n",
"\u001b[34m => [internal] load .dockerignore 0.9s\n",
"\u001b[0m\u001b[34m => => transferring context: 2B 0.0s\n",
"\u001b[0m\u001b[34m => [internal] load build definition from Dockerfile 0.7s\n",
"\u001b[0m\u001b[34m => => transferring dockerfile: 2.50kB 0.0s\n",
"\u001b[0m => [internal] load metadata for docker.io/library/ubuntu:18.04 0.1s\n",
"\u001b[?25h\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[0G\u001b[?25l[+] Building 1.2s (2/3) docker:default\n",
"\u001b[34m => [internal] load .dockerignore 0.9s\n",
"\u001b[0m\u001b[34m => => transferring context: 2B 0.0s\n",
"\u001b[0m\u001b[34m => [internal] load build definition from Dockerfile 0.7s\n",
"\u001b[0m\u001b[34m => => transferring dockerfile: 2.50kB 0.0s\n",
"\u001b[0m => [internal] load metadata for docker.io/library/ubuntu:18.04 0.3s\n",
"\u001b[?25h\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[0G\u001b[?25l[+] Building 1.3s (2/3) docker:default\n",
"\u001b[34m => [internal] load .dockerignore 0.9s\n",
"\u001b[0m\u001b[34m => => transferring context: 2B 0.0s\n",
"\u001b[0m\u001b[34m => [internal] load build definition from Dockerfile 0.7s\n",
"\u001b[0m\u001b[34m => => transferring dockerfile: 2.50kB 0.0s\n",
"\u001b[0m => [internal] load metadata for docker.io/library/ubuntu:18.04 0.4s\n",
"\u001b[?25h\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[0G\u001b[?25l[+] Building 1.5s (2/3) docker:default\n",
"\u001b[34m => [internal] load .dockerignore 0.9s\n",
"\u001b[0m\u001b[34m => => transferring context: 2B 0.0s\n",
"\u001b[0m\u001b[34m => [internal] load build definition from Dockerfile 0.7s\n",
"\u001b[0m\u001b[34m => => transferring dockerfile: 2.50kB 0.0s\n",
"\u001b[0m => [internal] load metadata for docker.io/library/ubuntu:18.04 0.6s\n",
"\u001b[?25h\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[0G\u001b[?25l[+] Building 1.6s (2/3) docker:default\n",
"\u001b[34m => [internal] load .dockerignore 0.9s\n",
"\u001b[0m\u001b[34m => => transferring context: 2B 0.0s\n",
"\u001b[0m\u001b[34m => [internal] load build definition from Dockerfile 0.7s\n",
"\u001b[0m\u001b[34m => => transferring dockerfile: 2.50kB 0.0s\n",
"\u001b[0m => [internal] load metadata for docker.io/library/ubuntu:18.04 0.7s\n",
"\u001b[?25h\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[0G\u001b[?25l[+] Building 1.8s (2/3) docker:default\n",
"\u001b[34m => [internal] load .dockerignore 0.9s\n",
"\u001b[0m\u001b[34m => => transferring context: 2B 0.0s\n",
"\u001b[0m\u001b[34m => [internal] load build definition from Dockerfile 0.7s\n",
"\u001b[0m\u001b[34m => => transferring dockerfile: 2.50kB 0.0s\n",
"\u001b[0m => [internal] load metadata for docker.io/library/ubuntu:18.04 0.9s\n",
"\u001b[?25h\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[0G\u001b[?25l[+] Building 1.9s (2/3) docker:default\n",
"\u001b[34m => [internal] load .dockerignore 0.9s\n",
"\u001b[0m\u001b[34m => => transferring context: 2B 0.0s\n",
"\u001b[0m\u001b[34m => [internal] load build definition from Dockerfile 0.7s\n",
"\u001b[0m\u001b[34m => => transferring dockerfile: 2.50kB 0.0s\n",
"\u001b[0m => [internal] load metadata for docker.io/library/ubuntu:18.04 1.0s\n",
"\u001b[?25h\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[0G\u001b[?25l[+] Building 2.1s (2/3) docker:default\n",
"\u001b[34m => [internal] load .dockerignore 0.9s\n",
"\u001b[0m\u001b[34m => => transferring context: 2B 0.0s\n",
"\u001b[0m\u001b[34m => [internal] load build definition from Dockerfile 0.7s\n",
"\u001b[0m\u001b[34m => => transferring dockerfile: 2.50kB 0.0s\n",
"\u001b[0m => [internal] load metadata for docker.io/library/ubuntu:18.04 1.2s\n",
"\u001b[?25h\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[0G\u001b[?25l[+] Building 2.2s (2/3) docker:default\n",
"\u001b[34m => [internal] load .dockerignore 0.9s\n",
"\u001b[0m\u001b[34m => => transferring context: 2B 0.0s\n",
"\u001b[0m\u001b[34m => [internal] load build definition from Dockerfile 0.7s\n",
"\u001b[0m\u001b[34m => => transferring dockerfile: 2.50kB 0.0s\n",
"\u001b[0m => [internal] load metadata for docker.io/library/ubuntu:18.04 1.3s\n",
"\u001b[?25h\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[0G\u001b[?25l[+] Building 2.4s (2/3) docker:default\n",
"\u001b[34m => [internal] load .dockerignore 0.9s\n",
"\u001b[0m\u001b[34m => => transferring context: 2B 0.0s\n",
"\u001b[0m\u001b[34m => [internal] load build definition from Dockerfile 0.7s\n",
"\u001b[0m\u001b[34m => => transferring dockerfile: 2.50kB 0.0s\n",
"\u001b[0m => [internal] load metadata for docker.io/library/ubuntu:18.04 1.5s\n",
"\u001b[?25h\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[0G\u001b[?25l[+] Building 2.5s (2/3) docker:default\n",
"\u001b[34m => [internal] load .dockerignore 0.9s\n",
"\u001b[0m\u001b[34m => => transferring context: 2B 0.0s\n",
"\u001b[0m\u001b[34m => [internal] load build definition from Dockerfile 0.7s\n",
"\u001b[0m\u001b[34m => => transferring dockerfile: 2.50kB 0.0s\n",
"\u001b[0m => [internal] load metadata for docker.io/library/ubuntu:18.04 1.6s\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"\u001b[?25h\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[0G\u001b[?25l[+] Building 2.7s (2/3) docker:default\n",
"\u001b[34m => [internal] load .dockerignore 0.9s\n",
"\u001b[0m\u001b[34m => => transferring context: 2B 0.0s\n",
"\u001b[0m\u001b[34m => [internal] load build definition from Dockerfile 0.7s\n",
"\u001b[0m\u001b[34m => => transferring dockerfile: 2.50kB 0.0s\n",
"\u001b[0m => [internal] load metadata for docker.io/library/ubuntu:18.04 1.8s\n",
"\u001b[?25h\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[0G\u001b[?25l[+] Building 2.8s (2/3) docker:default\n",
"\u001b[34m => [internal] load .dockerignore 0.9s\n",
"\u001b[0m\u001b[34m => => transferring context: 2B 0.0s\n",
"\u001b[0m\u001b[34m => [internal] load build definition from Dockerfile 0.7s\n",
"\u001b[0m\u001b[34m => => transferring dockerfile: 2.50kB 0.0s\n",
"\u001b[0m => [internal] load metadata for docker.io/library/ubuntu:18.04 1.9s\n",
"\u001b[?25h\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[0G\u001b[?25l[+] Building 3.0s (4/28) docker:default\n",
"\u001b[34m => [internal] load .dockerignore 0.9s\n",
"\u001b[0m\u001b[34m => => transferring context: 2B 0.0s\n",
"\u001b[0m\u001b[34m => [internal] load build definition from Dockerfile 0.7s\n",
"\u001b[0m\u001b[34m => => transferring dockerfile: 2.50kB 0.0s\n",
"\u001b[0m\u001b[34m => [internal] load metadata for docker.io/library/ubuntu:18.04 2.0s\n",
"\u001b[0m => [internal] load build context 0.1s\n",
"\u001b[34m => [ 1/24] FROM docker.io/library/ubuntu:18.04@sha256:152dc042452c496007 0.0s\n",
"\u001b[0m\u001b[?25h\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[0G\u001b[?25l[+] Building 3.1s (4/28) docker:default\n",
"\u001b[34m => [internal] load .dockerignore 0.9s\n",
"\u001b[0m\u001b[34m => => transferring context: 2B 0.0s\n",
"\u001b[0m\u001b[34m => [internal] load build definition from Dockerfile 0.7s\n",
"\u001b[0m\u001b[34m => => transferring dockerfile: 2.50kB 0.0s\n",
"\u001b[0m\u001b[34m => [internal] load metadata for docker.io/library/ubuntu:18.04 2.0s\n",
"\u001b[0m => [internal] load build context 0.2s\n",
"\u001b[34m => [ 1/24] FROM docker.io/library/ubuntu:18.04@sha256:152dc042452c496007 0.0s\n",
"\u001b[0m\u001b[?25h\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[0G\u001b[?25l[+] Building 3.3s (4/28) docker:default\n",
"\u001b[34m => [internal] load .dockerignore 0.9s\n",
"\u001b[0m\u001b[34m => => transferring context: 2B 0.0s\n",
"\u001b[0m\u001b[34m => [internal] load build definition from Dockerfile 0.7s\n",
"\u001b[0m\u001b[34m => => transferring dockerfile: 2.50kB 0.0s\n",
"\u001b[0m\u001b[34m => [internal] load metadata for docker.io/library/ubuntu:18.04 2.0s\n",
"\u001b[0m => [internal] load build context 0.4s\n",
" => => transferring context: 5.15MB 0.1s\n",
"\u001b[34m => [ 1/24] FROM docker.io/library/ubuntu:18.04@sha256:152dc042452c496007 0.0s\n",
"\u001b[0m\u001b[?25h\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[0G\u001b[?25l[+] Building 3.4s (4/28) docker:default\n",
"\u001b[34m => [internal] load .dockerignore 0.9s\n",
"\u001b[0m\u001b[34m => => transferring context: 2B 0.0s\n",
"\u001b[0m\u001b[34m => [internal] load build definition from Dockerfile 0.7s\n",
"\u001b[0m\u001b[34m => => transferring dockerfile: 2.50kB 0.0s\n",
"\u001b[0m\u001b[34m => [internal] load metadata for docker.io/library/ubuntu:18.04 2.0s\n",
"\u001b[0m => [internal] load build context 0.5s\n",
" => => transferring context: 24.31MB 0.3s\n",
"\u001b[34m => [ 1/24] FROM docker.io/library/ubuntu:18.04@sha256:152dc042452c496007 0.0s\n",
"\u001b[0m\u001b[?25h\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[0G\u001b[?25l[+] Building 3.6s (4/28) docker:default\n",
"\u001b[34m => [internal] load .dockerignore 0.9s\n",
"\u001b[0m\u001b[34m => => transferring context: 2B 0.0s\n",
"\u001b[0m\u001b[34m => [internal] load build definition from Dockerfile 0.7s\n",
"\u001b[0m\u001b[34m => => transferring dockerfile: 2.50kB 0.0s\n",
"\u001b[0m\u001b[34m => [internal] load metadata for docker.io/library/ubuntu:18.04 2.0s\n",
"\u001b[0m => [internal] load build context 0.7s\n",
" => => transferring context: 45.90MB 0.4s\n",
"\u001b[34m => [ 1/24] FROM docker.io/library/ubuntu:18.04@sha256:152dc042452c496007 0.0s\n",
"\u001b[0m\u001b[?25h\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[0G\u001b[?25l[+] Building 3.7s (4/28) docker:default\n",
"\u001b[34m => [internal] load .dockerignore 0.9s\n",
"\u001b[0m\u001b[34m => => transferring context: 2B 0.0s\n",
"\u001b[0m\u001b[34m => [internal] load build definition from Dockerfile 0.7s\n",
"\u001b[0m\u001b[34m => => transferring dockerfile: 2.50kB 0.0s\n",
"\u001b[0m\u001b[34m => [internal] load metadata for docker.io/library/ubuntu:18.04 2.0s\n",
"\u001b[0m => [internal] load build context 0.8s\n",
" => => transferring context: 89.43MB 0.5s\n",
"\u001b[34m => [ 1/24] FROM docker.io/library/ubuntu:18.04@sha256:152dc042452c496007 0.0s\n",
"\u001b[0m\u001b[?25h\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[0G\u001b[?25l[+] Building 3.8s (4/28) docker:default\n",
"\u001b[34m => [internal] load .dockerignore 0.9s\n",
"\u001b[0m\u001b[34m => => transferring context: 2B 0.0s\n",
"\u001b[0m\u001b[34m => [internal] load build definition from Dockerfile 0.7s\n",
"\u001b[0m\u001b[34m => => transferring dockerfile: 2.50kB 0.0s\n",
"\u001b[0m\u001b[34m => [internal] load metadata for docker.io/library/ubuntu:18.04 2.0s\n",
"\u001b[0m => [internal] load build context 0.9s\n",
" => => transferring context: 111.40MB 0.7s\n",
"\u001b[34m => [ 1/24] FROM docker.io/library/ubuntu:18.04@sha256:152dc042452c496007 0.0s\n",
"\u001b[0m\u001b[?25h\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[0G\u001b[?25l[+] Building 4.0s (4/28) docker:default\n",
"\u001b[34m => [internal] load .dockerignore 0.9s\n",
"\u001b[0m\u001b[34m => => transferring context: 2B 0.0s\n",
"\u001b[0m\u001b[34m => [internal] load build definition from Dockerfile 0.7s\n",
"\u001b[0m\u001b[34m => => transferring dockerfile: 2.50kB 0.0s\n",
"\u001b[0m\u001b[34m => [internal] load metadata for docker.io/library/ubuntu:18.04 2.0s\n",
"\u001b[0m => [internal] load build context 1.1s\n",
" => => transferring context: 133.93MB 0.8s\n",
"\u001b[34m => [ 1/24] FROM docker.io/library/ubuntu:18.04@sha256:152dc042452c496007 0.0s\n",
"\u001b[0m\u001b[?25h\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[0G\u001b[?25l[+] Building 4.1s (4/28) docker:default\n",
"\u001b[34m => [internal] load .dockerignore 0.9s\n",
"\u001b[0m\u001b[34m => => transferring context: 2B 0.0s\n",
"\u001b[0m\u001b[34m => [internal] load build definition from Dockerfile 0.7s\n",
"\u001b[0m\u001b[34m => => transferring dockerfile: 2.50kB 0.0s\n",
"\u001b[0m\u001b[34m => [internal] load metadata for docker.io/library/ubuntu:18.04 2.0s\n",
"\u001b[0m => [internal] load build context 1.2s\n",
" => => transferring context: 176.91MB 0.9s\n",
"\u001b[34m => [ 1/24] FROM docker.io/library/ubuntu:18.04@sha256:152dc042452c496007 0.0s\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"\u001b[0m\u001b[?25h\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[0G\u001b[?25l[+] Building 4.2s (4/28) docker:default\n",
"\u001b[34m => [internal] load .dockerignore 0.9s\n",
"\u001b[0m\u001b[34m => => transferring context: 2B 0.0s\n",
"\u001b[0m\u001b[34m => [internal] load build definition from Dockerfile 0.7s\n",
"\u001b[0m\u001b[34m => => transferring dockerfile: 2.50kB 0.0s\n",
"\u001b[0m\u001b[34m => [internal] load metadata for docker.io/library/ubuntu:18.04 2.0s\n",
"\u001b[0m => [internal] load build context 1.3s\n",
" => => transferring context: 197.17MB 1.0s\n",
"\u001b[34m => [ 1/24] FROM docker.io/library/ubuntu:18.04@sha256:152dc042452c496007 0.0s\n",
"\u001b[0m\u001b[?25h\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[0G\u001b[?25l[+] Building 4.3s (4/28) docker:default\n",
"\u001b[34m => [internal] load .dockerignore 0.9s\n",
"\u001b[0m\u001b[34m => => transferring context: 2B 0.0s\n",
"\u001b[0m\u001b[34m => [internal] load build definition from Dockerfile 0.7s\n",
"\u001b[0m\u001b[34m => => transferring dockerfile: 2.50kB 0.0s\n",
"\u001b[0m\u001b[34m => [internal] load metadata for docker.io/library/ubuntu:18.04 2.0s\n",
"\u001b[0m => [internal] load build context 1.4s\n",
"\u001b[34m => => transferring context: 220.83MB 1.1s\n",
"\u001b[0m\u001b[34m => [ 1/24] FROM docker.io/library/ubuntu:18.04@sha256:152dc042452c496007 0.0s\n",
"\u001b[0m\u001b[?25h\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[0G\u001b[?25l[+] Building 4.5s (4/28) docker:default\n",
"\u001b[34m => [internal] load .dockerignore 0.9s\n",
"\u001b[0m\u001b[34m => => transferring context: 2B 0.0s\n",
"\u001b[0m\u001b[34m => [internal] load build definition from Dockerfile 0.7s\n",
"\u001b[0m\u001b[34m => => transferring dockerfile: 2.50kB 0.0s\n",
"\u001b[0m\u001b[34m => [internal] load metadata for docker.io/library/ubuntu:18.04 2.0s\n",
"\u001b[0m => [internal] load build context 1.5s\n",
"\u001b[34m => => transferring context: 220.83MB 1.1s\n",
"\u001b[0m\u001b[34m => [ 1/24] FROM docker.io/library/ubuntu:18.04@sha256:152dc042452c496007 0.0s\n",
"\u001b[0m\u001b[?25h\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[0G\u001b[?25l[+] Building 4.6s (5/28) docker:default\n",
"\u001b[34m => [internal] load .dockerignore 0.9s\n",
"\u001b[0m\u001b[34m => => transferring context: 2B 0.0s\n",
"\u001b[0m\u001b[34m => [internal] load build definition from Dockerfile 0.7s\n",
"\u001b[0m\u001b[34m => => transferring dockerfile: 2.50kB 0.0s\n",
"\u001b[0m\u001b[34m => [internal] load metadata for docker.io/library/ubuntu:18.04 2.0s\n",
"\u001b[0m\u001b[34m => [internal] load build context 1.6s\n",
"\u001b[0m\u001b[34m => => transferring context: 220.83MB 1.1s\n",
"\u001b[0m\u001b[34m => [ 1/24] FROM docker.io/library/ubuntu:18.04@sha256:152dc042452c496007 0.0s\n",
"\u001b[0m\u001b[?25h\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[0G\u001b[?25l[+] Building 4.7s (29/29) docker:default\n",
"\u001b[34m => CACHED [ 7/24] RUN python3 -m venv /opt/env 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [ 8/24] RUN PYTHON3 -m pip install --upgrade setuptools 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [ 9/24] COPY requirements.txt requirements.txt 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [10/24] RUN pip install --upgrade pip && pip install -r requir 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [11/24] COPY fslinstaller_mod.py ./ 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [12/24] RUN python3 fslinstaller_mod.py -V 5.0.11 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [13/24] RUN . /usr/local/fsl/etc/fslconf/fsl.sh 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [14/24] RUN export FSLDIR PATHs 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [15/24] RUN mkdir -p NiftyReg/niftyreg_source/ 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [16/24] WORKDIR /aida/NiftyReg 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [17/24] RUN git clone git://git.code.sf.net/p/niftyreg/git nif 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [18/24] RUN export NIFTYRREG_INSTALL=../niftyreg_install 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [19/24] RUN export PATH && export LD_LIBRARY_PATH 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [20/24] WORKDIR /aida 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [21/24] RUN wget https://github.com/frankyeh/DSI-Studio/releas 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [22/24] COPY bin/ bin/ 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [23/24] COPY lib/ lib/ 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [24/24] RUN echo \"/aida/bin/dsi_studio_ubuntu_1804/dsi-studio/ 0.0s\n",
"\u001b[0m\u001b[34m => exporting to image 0.1s\n",
"\u001b[0m\u001b[34m => => exporting layers 0.0s\n",
"\u001b[0m\u001b[34m => => writing image sha256:6cea76a670c1e2a661bfa218f0f0b4434289b6266fe08 0.1s\n",
"\u001b[0m\u001b[34m => => naming to docker.io/library/aidamri:latest 0.0s\n",
"\u001b[0m\u001b[?25h\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[0G\u001b[?25l[+] Building 4.9s (29/29) docker:default\n",
"\u001b[34m => CACHED [ 7/24] RUN python3 -m venv /opt/env 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [ 8/24] RUN PYTHON3 -m pip install --upgrade setuptools 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [ 9/24] COPY requirements.txt requirements.txt 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [10/24] RUN pip install --upgrade pip && pip install -r requir 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [11/24] COPY fslinstaller_mod.py ./ 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [12/24] RUN python3 fslinstaller_mod.py -V 5.0.11 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [13/24] RUN . /usr/local/fsl/etc/fslconf/fsl.sh 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [14/24] RUN export FSLDIR PATHs 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [15/24] RUN mkdir -p NiftyReg/niftyreg_source/ 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [16/24] WORKDIR /aida/NiftyReg 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [17/24] RUN git clone git://git.code.sf.net/p/niftyreg/git nif 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [18/24] RUN export NIFTYRREG_INSTALL=../niftyreg_install 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [19/24] RUN export PATH && export LD_LIBRARY_PATH 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [20/24] WORKDIR /aida 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [21/24] RUN wget https://github.com/frankyeh/DSI-Studio/releas 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [22/24] COPY bin/ bin/ 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [23/24] COPY lib/ lib/ 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [24/24] RUN echo \"/aida/bin/dsi_studio_ubuntu_1804/dsi-studio/ 0.0s\n",
"\u001b[0m\u001b[34m => exporting to image 0.1s\n",
"\u001b[0m\u001b[34m => => exporting layers 0.0s\n",
"\u001b[0m\u001b[34m => => writing image sha256:6cea76a670c1e2a661bfa218f0f0b4434289b6266fe08 0.1s\n",
"\u001b[0m\u001b[34m => => naming to docker.io/library/aidamri:latest 0.0s\n",
"\u001b[0m\u001b[?25h\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[0G\u001b[?25l[+] Building 5.0s (29/29) docker:default\n",
"\u001b[34m => CACHED [ 7/24] RUN python3 -m venv /opt/env 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [ 8/24] RUN PYTHON3 -m pip install --upgrade setuptools 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [ 9/24] COPY requirements.txt requirements.txt 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [10/24] RUN pip install --upgrade pip && pip install -r requir 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [11/24] COPY fslinstaller_mod.py ./ 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [12/24] RUN python3 fslinstaller_mod.py -V 5.0.11 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [13/24] RUN . /usr/local/fsl/etc/fslconf/fsl.sh 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [14/24] RUN export FSLDIR PATHs 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [15/24] RUN mkdir -p NiftyReg/niftyreg_source/ 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [16/24] WORKDIR /aida/NiftyReg 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [17/24] RUN git clone git://git.code.sf.net/p/niftyreg/git nif 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [18/24] RUN export NIFTYRREG_INSTALL=../niftyreg_install 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [19/24] RUN export PATH && export LD_LIBRARY_PATH 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [20/24] WORKDIR /aida 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [21/24] RUN wget https://github.com/frankyeh/DSI-Studio/releas 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [22/24] COPY bin/ bin/ 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [23/24] COPY lib/ lib/ 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [24/24] RUN echo \"/aida/bin/dsi_studio_ubuntu_1804/dsi-studio/ 0.0s\n",
"\u001b[0m\u001b[34m => exporting to image 0.1s\n",
"\u001b[0m\u001b[34m => => exporting layers 0.0s\n",
"\u001b[0m\u001b[34m => => writing image sha256:6cea76a670c1e2a661bfa218f0f0b4434289b6266fe08 0.1s\n",
"\u001b[0m\u001b[34m => => naming to docker.io/library/aidamri:latest 0.0s\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"\u001b[0m\u001b[?25h\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[0G\u001b[?25l[+] Building 5.2s (29/29) docker:default\n",
"\u001b[34m => CACHED [ 7/24] RUN python3 -m venv /opt/env 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [ 8/24] RUN PYTHON3 -m pip install --upgrade setuptools 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [ 9/24] COPY requirements.txt requirements.txt 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [10/24] RUN pip install --upgrade pip && pip install -r requir 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [11/24] COPY fslinstaller_mod.py ./ 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [12/24] RUN python3 fslinstaller_mod.py -V 5.0.11 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [13/24] RUN . /usr/local/fsl/etc/fslconf/fsl.sh 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [14/24] RUN export FSLDIR PATHs 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [15/24] RUN mkdir -p NiftyReg/niftyreg_source/ 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [16/24] WORKDIR /aida/NiftyReg 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [17/24] RUN git clone git://git.code.sf.net/p/niftyreg/git nif 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [18/24] RUN export NIFTYRREG_INSTALL=../niftyreg_install 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [19/24] RUN export PATH && export LD_LIBRARY_PATH 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [20/24] WORKDIR /aida 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [21/24] RUN wget https://github.com/frankyeh/DSI-Studio/releas 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [22/24] COPY bin/ bin/ 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [23/24] COPY lib/ lib/ 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [24/24] RUN echo \"/aida/bin/dsi_studio_ubuntu_1804/dsi-studio/ 0.0s\n",
"\u001b[0m\u001b[34m => exporting to image 0.1s\n",
"\u001b[0m\u001b[34m => => exporting layers 0.0s\n",
"\u001b[0m\u001b[34m => => writing image sha256:6cea76a670c1e2a661bfa218f0f0b4434289b6266fe08 0.1s\n",
"\u001b[0m\u001b[34m => => naming to docker.io/library/aidamri:latest 0.0s\n",
"\u001b[0m\u001b[?25h\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[0G\u001b[?25l[+] Building 5.3s (29/29) docker:default\n",
"\u001b[34m => CACHED [ 7/24] RUN python3 -m venv /opt/env 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [ 8/24] RUN PYTHON3 -m pip install --upgrade setuptools 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [ 9/24] COPY requirements.txt requirements.txt 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [10/24] RUN pip install --upgrade pip && pip install -r requir 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [11/24] COPY fslinstaller_mod.py ./ 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [12/24] RUN python3 fslinstaller_mod.py -V 5.0.11 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [13/24] RUN . /usr/local/fsl/etc/fslconf/fsl.sh 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [14/24] RUN export FSLDIR PATHs 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [15/24] RUN mkdir -p NiftyReg/niftyreg_source/ 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [16/24] WORKDIR /aida/NiftyReg 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [17/24] RUN git clone git://git.code.sf.net/p/niftyreg/git nif 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [18/24] RUN export NIFTYRREG_INSTALL=../niftyreg_install 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [19/24] RUN export PATH && export LD_LIBRARY_PATH 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [20/24] WORKDIR /aida 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [21/24] RUN wget https://github.com/frankyeh/DSI-Studio/releas 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [22/24] COPY bin/ bin/ 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [23/24] COPY lib/ lib/ 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [24/24] RUN echo \"/aida/bin/dsi_studio_ubuntu_1804/dsi-studio/ 0.0s\n",
"\u001b[0m\u001b[34m => exporting to image 0.1s\n",
"\u001b[0m\u001b[34m => => exporting layers 0.0s\n",
"\u001b[0m\u001b[34m => => writing image sha256:6cea76a670c1e2a661bfa218f0f0b4434289b6266fe08 0.1s\n",
"\u001b[0m\u001b[34m => => naming to docker.io/library/aidamri:latest 0.0s\n",
"\u001b[0m\u001b[?25h\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[0G\u001b[?25l[+] Building 5.5s (29/29) docker:default\n",
"\u001b[34m => CACHED [ 7/24] RUN python3 -m venv /opt/env 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [ 8/24] RUN PYTHON3 -m pip install --upgrade setuptools 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [ 9/24] COPY requirements.txt requirements.txt 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [10/24] RUN pip install --upgrade pip && pip install -r requir 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [11/24] COPY fslinstaller_mod.py ./ 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [12/24] RUN python3 fslinstaller_mod.py -V 5.0.11 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [13/24] RUN . /usr/local/fsl/etc/fslconf/fsl.sh 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [14/24] RUN export FSLDIR PATHs 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [15/24] RUN mkdir -p NiftyReg/niftyreg_source/ 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [16/24] WORKDIR /aida/NiftyReg 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [17/24] RUN git clone git://git.code.sf.net/p/niftyreg/git nif 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [18/24] RUN export NIFTYRREG_INSTALL=../niftyreg_install 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [19/24] RUN export PATH && export LD_LIBRARY_PATH 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [20/24] WORKDIR /aida 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [21/24] RUN wget https://github.com/frankyeh/DSI-Studio/releas 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [22/24] COPY bin/ bin/ 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [23/24] COPY lib/ lib/ 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [24/24] RUN echo \"/aida/bin/dsi_studio_ubuntu_1804/dsi-studio/ 0.0s\n",
"\u001b[0m\u001b[34m => exporting to image 0.1s\n",
"\u001b[0m\u001b[34m => => exporting layers 0.0s\n",
"\u001b[0m\u001b[34m => => writing image sha256:6cea76a670c1e2a661bfa218f0f0b4434289b6266fe08 0.1s\n",
"\u001b[0m\u001b[34m => => naming to docker.io/library/aidamri:latest 0.0s\n",
"\u001b[0m\u001b[?25h\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[0G\u001b[?25l[+] Building 5.5s (29/29) FINISHED docker:default\n",
"\u001b[34m => [internal] load .dockerignore 0.9s\n",
"\u001b[0m\u001b[34m => => transferring context: 2B 0.0s\n",
"\u001b[0m\u001b[34m => [internal] load build definition from Dockerfile 0.7s\n",
"\u001b[0m\u001b[34m => => transferring dockerfile: 2.50kB 0.0s\n",
"\u001b[0m\u001b[34m => [internal] load metadata for docker.io/library/ubuntu:18.04 2.0s\n",
"\u001b[0m\u001b[34m => [internal] load build context 1.6s\n",
"\u001b[0m\u001b[34m => => transferring context: 220.83MB 1.1s\n",
"\u001b[0m\u001b[34m => [ 1/24] FROM docker.io/library/ubuntu:18.04@sha256:152dc042452c496007 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [ 2/24] RUN apt-get update -y && apt-get upgrade -y && apt-get 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [ 3/24] RUN wget https://github.com/Kitware/CMake/releases/dow 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [ 4/24] RUN mkdir aida/ 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [ 5/24] WORKDIR /aida/ 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [ 6/24] RUN apt install -y python3 python3-pip && python3 -m p 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [ 7/24] RUN python3 -m venv /opt/env 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [ 8/24] RUN PYTHON3 -m pip install --upgrade setuptools 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [ 9/24] COPY requirements.txt requirements.txt 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [10/24] RUN pip install --upgrade pip && pip install -r requir 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [11/24] COPY fslinstaller_mod.py ./ 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [12/24] RUN python3 fslinstaller_mod.py -V 5.0.11 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [13/24] RUN . /usr/local/fsl/etc/fslconf/fsl.sh 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [14/24] RUN export FSLDIR PATHs 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [15/24] RUN mkdir -p NiftyReg/niftyreg_source/ 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [16/24] WORKDIR /aida/NiftyReg 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [17/24] RUN git clone git://git.code.sf.net/p/niftyreg/git nif 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [18/24] RUN export NIFTYRREG_INSTALL=../niftyreg_install 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [19/24] RUN export PATH && export LD_LIBRARY_PATH 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [20/24] WORKDIR /aida 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [21/24] RUN wget https://github.com/frankyeh/DSI-Studio/releas 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [22/24] COPY bin/ bin/ 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [23/24] COPY lib/ lib/ 0.0s\n",
"\u001b[0m\u001b[34m => CACHED [24/24] RUN echo \"/aida/bin/dsi_studio_ubuntu_1804/dsi-studio/ 0.0s\n",
"\u001b[0m\u001b[34m => exporting to image 0.1s\n",
"\u001b[0m\u001b[34m => => exporting layers 0.0s\n",
"\u001b[0m\u001b[34m => => writing image sha256:6cea76a670c1e2a661bfa218f0f0b4434289b6266fe08 0.1s\n",
"\u001b[0m\u001b[34m => => naming to docker.io/library/aidamri:latest 0.0s\n",
"\u001b[0m\u001b[?25h"
]
}
],
"source": [
"!docker build -t aidamri:latest -f Dockerfile ."
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"You can check the installed images in Docker desktop or by using the following command."
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"REPOSITORY TAG IMAGE ID CREATED SIZE\r\n",
"aidamri latest 6cea76a670c1 15 minutes ago 11.7GB\r\n",
"hello-world latest 9c7a54a9a43c 4 months ago 13.3kB\r\n"
]
}
],
"source": [
"!docker images -a"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"It is possible that so-called dangling images exist that might appear as artifacts of the building process. They are not named or tagged. They will not impose any issues but you can remove them with the following command."
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"WARNING! This will remove:\n",
" - all stopped containers\n",
" - all networks not used by at least one container\n",
" - all dangling images\n",
" - all dangling build cache\n",
"\n",
"Are you sure you want to continue? [y/N] Deleted build cache objects:\n",
"ivz6oojk5sxru7n6pbg0mlc6y\n",
"p8b36zjm3ft8fm2nwn2rgtfdk\n",
"zgczrjfwqdincdve2d0pzidjb\n",
"\n",
"Total reclaimed space: 220.7MB\n",
"yes: standard output: Broken pipe\n"
]
}
],
"source": [
"!yes | docker system prune"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
">🚩 When working with an interactive terminal, you will be asked for permission to prune the system. The `yes |` part of the command pipes the directive for permission to the pruning command since the code cells here are non-interactive. You can just type in `docker system prune` when using the shell normally."
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Create a container<a class=\"anchor\" id=\"contcreate\"></a>\n",
"A container is a runnable instance of an image, comparable to an environment. This instance needs to be initiated and run. At the same time, it is possible to mount a volume (i.e. a directory accessible from the container and from the host). If available, you can open Docker Desktop and click the \"Run\" button of the image of choice. There, you can enter the host path of your volume where you have the data stored you wish to process with AIDAmri. You also can enter a path name in the container. We recommend to name the target path like the source path to make it easier to navigate within the container. You also can initiate the container via command.\\\n",
"<span style=color:red;font-weight:bold;>Attention:</span> Due to technical reasons, the following code line differ accordingly to the operative system. Run the cell corresponding to your system."
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"AIDA_Logo.png\t\tDocker_manual.pdf niftyreg-AIDA_verified.zip\n",
"aidamri_v1.1.yml\tdocker_runfile.sh README.md\n",
"AIDAmri_workshop.ipynb\tfslinstaller_mod.py requirements.txt\n",
"ARA\t\t\tlib\t\t testData\n",
"bin\t\t\tLICENSE\t\t tools\n",
"Dockerfile\t\tmanual.pdf\n",
"docker: Error response from daemon: Conflict. The container name \"/aidamri\" is already in use by container \"80868750fa2160e27a0b6f00f3f9f612d9132bbb036f0d4d62987214c658829f\". You have to remove (or rename) that container to be able to reuse that name.\n",
"See 'docker run --help'.\n"
]
}
],
"source": [
"# For Mac/Linux\n",
"!ls\n",
"!docker run -dit --name aidamri --mount type=bind,source=\"$(pwd)\"/testData,target=/testData aidamri:latest"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"docker: Error response from daemon: invalid mount config for type \"bind\": invalid mount path: '%cd%/testData' mount path must be absolute.\r\n",
"See 'docker run --help'.\r\n"
]
}
],
"source": [
"# For Windows\n",
"!docker run -dit --name aidamri --mount type=bind,source=%cd%/testData,target=/testData aidamri:latest"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"The container will be active right away. Stopping the container will not delete it and it can be started again. Let us stop the container for a moment. We will launch it again in the next chapter."
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"aidamri\r\n"
]
}
],
"source": [
"!docker stop aidamri"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## (Re-)start the container <a class=\"anchor\" id=\"contstart\"></a>\n",
"Let us check first, what containers are present."
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\r\n",
"80868750fa21 aidamri:latest \"/bin/bash\" 11 minutes ago Up 11 minutes aidamri\r\n"
]
}
],
"source": [
"!docker ps -a"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"The `-a` flag is necessary to list all containers, including exited and running containers (see the `STATUS` column). The aidamri container should be there but exited.\n",
"It is also possible to format this output (i.e. for displaying which volumes are mounted to the respective containers). See the [ps manual page](https://docs.docker.com/engine/reference/commandline/ps/) to specify your output. Use the `--no-trunc` flag to avoid clipping of the output. The following command gives you an example."
]
},
{
"cell_type": "code",
"execution_count": 10,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"CONTAINERID 80868750fa2160e27a0b6f00f3f9f612d9132bbb036f0d4d62987214c658829f\r\n",
"\tName:\taidamri\r\n",
"\tMount:\t/home/user/Documents/AIDA/AIDAmri/testData\r\n"
]
}
],
"source": [
"!docker ps -a --no-trunc --format \"CONTAINERID {{$.ID}}\\n\\tName:\\t{{.Names}}\\n\\tMount:\\t{{.Mounts}}\""
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Since you already mounted a volume, you can just start the container. If you need to mount another volume, you need to run a new container."
]
},
{
"cell_type": "code",
"execution_count": 11,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"aidamri\r\n"
]
}
],
"source": [
"!docker start aidamri"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
">🚩 To enter a started container in a terminal, use `docker attach aidamri`. Windows and git BASH users, respectively, may need to type in `winpty docker attach aidamri` if the simple attach command does not work."
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Basic usage<a class=\"anchor\" id=\"usage\"></a>\n",
"Now, you can access the container. \n",
"> 🚩 When using a terminal, you may input the attach command\n",
"` docker attach aidamri` \n",
"to activate the interactive mode. You then will notice that the preface in the terminal changed from your user name to\n",
"```\n",
"root@<CONTAINER ID>:/aida#\n",
"```\n",
"Typing in `ls -1` would result into the following output:\n",
"```\n",
"NiftyReg\n",
"bin\n",
"data\n",
"dsi_studio_ubuntu1804\n",
"fslinstaller_mod.py\n",
"lib\n",
"requirements.txt\n",
"```\n",
"To exit the container you are attached to, type in `exit` (keep in mind that this stops the container and you would need to restart it, when wishing to using it again. \n",
"\n",
"We will use `docker exec` to request an operation within the container. For the sake of readability the lines are broken by backslashes (`\\`)\n",
"Using the `ls -1` in this way should generate same output as if we would type in the command in the terminal while attached to the container. Run the following cells for different directory lists. The `-1` flag arranges the output as a column. Later, the `-t` flag will be used instead to sort folder contents from newest to oldest."
]
},
{
"cell_type": "code",
"execution_count": 12,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"NiftyReg\r\n",
"bin\r\n",
"dsi_studio_ubuntu1804\r\n",
"fslinstaller_mod.py\r\n",
"lib\r\n",
"requirements.txt\r\n"
]
}
],
"source": [
"#directory content\n",
"!docker exec aidamri \\\n",
"ls -1"
]
},
{
"cell_type": "code",
"execution_count": 40,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"MAT_0052\r\n",
"MAT_0017\r\n",
"dev\r\n",
"proc\r\n",
"sys\r\n",
"etc\r\n",
"aida\r\n",
"tmp\r\n",
"root\r\n",
"opt\r\n",
"cmake-3.23.2\r\n",
"lib\r\n",
"bin\r\n",
"run\r\n",
"sbin\r\n",
"var\r\n",
"lib64\r\n",
"media\r\n",
"mnt\r\n",
"srv\r\n",
"usr\r\n",
"testData\r\n",
"boot\r\n",
"home\r\n"
]
}
],
"source": [
"#aida directory content\n",
"!docker exec -w / aidamri \\\n",
"ls -1"
]
},
{
"cell_type": "code",
"execution_count": 39,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"3.3_fMRIActivity\r\n",
"2.3_fMRIPreProcessing\r\n",
"3.2_DTIConnectivity\r\n",
"2.2_DTIPreProcessing\r\n",
"2.1_T2PreProcessing\r\n",
"conv2Nifti_auto.py\r\n",
"groupMapping.csv\r\n",
"stat_result.json\r\n",
"4.1_ROI_analysis\r\n",
"AIDA_gui.py\r\n",
"AIDA_gui_support.py\r\n",
"batchProc.py\r\n",
"dsi_studio_ubuntu_1804\r\n",
"3.1_T2Processing\r\n",
"3.2.1_DTIdata_extract\r\n",
"1_PV2NIfTiConverter\r\n"
]
}
],
"source": [
"#bin directory content\n",
"!docker exec -w /aida/bin aidamri \\\n",
"ls -1"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"The `-w` flag was used to set the working directories to `/` and `/aida/bin`, respectively. The former directory is the parental or root directory of the file system in the container. It contains our testData folder, as well as a `bin` folder. Keep in mind that this `bin` folder is **not** the directory containing the AIDAmri tools. Those are located in the `/aida/bin` directory.\n",
"\n",
"> 🚩 When working in an interactive terminal, you need to type in the change directory command (`cd PATH/TO/FOLDER`) to set your working directory. You then can type in the second command, e.g. `ls -1`.\n",
"\n",
"In the first list no working directory was specified. In this case, the default directory was targeted, called `aida`. You can check the working directory by the following command."
]
},
{
"cell_type": "code",
"execution_count": 15,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"/aida\r\n"
]
}
],
"source": [
"!docker exec aidamri \\\n",
"pwd"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Data processing with AIDAmri<a class=\"anchor\" id=\"proc\"></a>\n",
"## T2-weighted MRI (T2w)<a class=\"anchor\" id=\"t2w\"></a>\n",
"Starting with pre-processing the T2w single file test data, first check if the data is complete. Given are two NIfTI files, the 5.1 test data, i.e. the base Nifty file containing the brain image, as well as a segmented stroke mask."
]
},
{
"cell_type": "code",
"execution_count": 16,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Stroke_mask.nii.gz\r\n",
"testData.5.1.nii.gz\r\n"
]
}
],
"source": [
"!docker exec -w /testData/T2w aidamri ls"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Now run the pre-processing script. Here, the image will be re-orientated from head supine to prone. The view direction is rostrad, i.e. the right side of the mouse at the right side at image layers of the coronal plane. Also, bias-field correction and brain extraction is performed in this process.\n",
"Set the working directory to the 2.1_T2PreProcessing folder. Alternatively, pass the full path to the python command directly (`python /aida/bin/2.1_T2PreProcessing/preProcessing_T2.py...`). This alternative is viable for every provided script.\n",
"This process might take a while. The output of the process will be visible in real-time when running the script in an interactive shell. Here, it may not be visible directy, but you can see the changes in the T2w folder."
]
},
{
"cell_type": "code",
"execution_count": 17,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
" 0% | |\r",
" 2% |# |\r",
" 4% |### |\r",
" 6% |#### |\r",
" 8% |###### |\r",
" 10% |####### |\r",
" 12% |######### |\r",
" 14% |########## |\r",
" 16% |############ |\r",
" 18% |############# |\r",
" 20% |############### |\r",
" 22% |################ |\r",
" 25% |################## |\r",
" 27% |################### |\r",
" 29% |##################### |\r",
" 31% |###################### |\r",
" 33% |######################## |\r",
" 35% |######################### |\r",
" 37% |########################### |\r",
" 39% |############################ |\r",
" 41% |############################## |\r",
" 43% |############################### |\r",
" 45% |################################# |\r",
" 47% |################################## |\r",
" 50% |#################################### |\r",
" 52% |##################################### |\r",
" 54% |####################################### |\r",
" 56% |######################################## |\r",
" 58% |########################################## |\r",
" 60% |########################################### |\r",
" 62% |############################################# |\r",
" 64% |############################################## |\r",
" 66% |################################################ |\r",
" 68% |################################################# |\r",
" 70% |################################################### |\r",
" 72% |#################################################### |\r",
" 75% |###################################################### |\r",
" 77% |####################################################### |\r",
" 79% |######################################################### |\r",
" 81% |########################################################## |\r",
" 83% |############################################################ |\r",
" 85% |############################################################# |\r",
" 87% |############################################################### |\r",
" 89% |################################################################ |\r",
" 91% |################################################################## |\r",
" 93% |################################################################### |\r",
" 95% |##################################################################### |\r",
" 97% |###################################################################### |\r",
"100% |########################################################################|\r",
"\r\n",
"T2 Preprocessing \u001b[5m...\u001b[0m (wait!)\r",
"T2 Preprocessing \u001b[0;30;42m COMPLETED \u001b[0m\r\n"
]
}
],
"source": [
"!docker exec -w /aida/bin/2.1_T2PreProcessing aidamri \\\n",
"python preProcessing_T2.py -i /testData/T2w/testData.5.1.nii.gz"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"The output should show that the process completed. The progress bar is a residual of the proceding output normally displayed in the shell.\n",
"Again, check the contents of the T2w folder. Apart from the already present files, it should include the following:\n",
"* testDataBias.nii.gz (Bias field file)\n",
"* testDataBiasBet.nii.gz (Bias field corrected and brain extracted file)\n",
"* testDataBiasBet_mask.nii.gz (Extracted brain mask)\n",
"* preprocess.log (basic processing log file)"
]
},
{
"cell_type": "code",
"execution_count": 18,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Stroke_mask.nii.gz\r\n",
"preprocess.log\r\n",
"testData.5.1.nii.gz\r\n",
"testDataBias.nii.gz\r\n",
"testDataBiasBet.nii.gz\r\n",
"testDataBiasBet_mask.nii.gz\r\n"
]
}
],
"source": [
"!docker exec -w /testData/T2w aidamri \\\n",
"ls -t"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Next, the registration function will be invoked. It requires the brain extracted file (`...BiasBet.nii.gz`)."
]
},
{
"cell_type": "code",
"execution_count": 19,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"\n",
"[NiftyReg ALADIN] Command line:\n",
"\t reg_aladin -ref /testData/T2w/testDataBiasBet.nii.gz -flo /aida/lib/NP_template_sc0.nii.gz -res /testData/T2w/testDataBiasBet_TemplateAff.nii.gz -aff /testData/T2w/testDataBiasBetMatrixAff.txt\n",
"\n",
"[reg_aladin_sym] Parameters\n",
"[reg_aladin_sym] Reference image name: /testData/T2w/testDataBiasBet.nii.gz\n",
"[reg_aladin_sym] \t256x256x48 voxels\n",
"[reg_aladin_sym] \t0.0683594x0.0683594x0.3 mm\n",
"[reg_aladin_sym] Floating image name: /aida/lib/NP_template_sc0.nii.gz\n",
"[reg_aladin_sym] \t228x160x264 voxels\n",
"[reg_aladin_sym] \t0.05x0.05x0.05 mm\n",
"[reg_aladin_sym] Maximum iteration number: 5 (10 during the first level)\n",
"[reg_aladin_sym] Percentage of blocks: 50 %\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"[reg_aladin_sym] Current level 1 / 3\n",
"[reg_aladin_sym] reference image size: \t64x64x48 voxels\t0.273438x0.273438x0.3 mm\n",
"[reg_aladin_sym] floating image size: \t57x40x66 voxels\t0.2x0.2x0.2 mm\n",
"[reg_aladin_sym] Block size = [4 4 4]\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"[reg_aladin_sym] Forward Block number = [16 16 12]\n",
"[reg_aladin_sym] Backward Block number = [15 10 17]\n",
"[reg_aladin_sym] Initial forward transformation matrix::\n",
"1\t0\t0\t-8.725\n",
"0\t1\t0\t-11.325\n",
"0\t0\t1\t-4.575\n",
"0\t0\t0\t1\n",
"[reg_aladin_sym] Initial backward transformation matrix::\n",
"1\t0\t0\t8.725\n",
"0\t1\t0\t11.325\n",
"0\t0\t1\t4.575\n",
"0\t0\t0\t1\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"[reg_aladin_sym] Final forward transformation matrix::\n",
"1.02586\t0.0164464\t0.0392102\t-9.243\n",
"-0.0197475\t1.12116\t-0.00452693\t-12.2456\n",
"-0.0443454\t0.031192\t0.948056\t-4.08053\n",
"0\t0\t0\t1\n",
"[reg_aladin_sym] Final backward transformation matrix::\n",
"0.972794\t-0.0131489\t-0.0402961\t8.66609\n",
"0.0173157\t0.89158\t0.00354111\t11.0924\n",
"0.0449328\t-0.0299489\t1.05279\t4.34451\n",
"0\t0\t0\t1\n",
"- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -\n",
"[reg_aladin_sym] Current level 2 / 3\n",
"[reg_aladin_sym] reference image size: \t128x128x48 voxels\t0.136719x0.136719x0.3 mm\n",
"[reg_aladin_sym] floating image size: \t114x80x132 voxels\t0.1x0.1x0.1 mm\n",
"[reg_aladin_sym] Block size = [4 4 4]\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"[reg_aladin_sym] Forward Block number = [32 32 12]\n",
"[reg_aladin_sym] Backward Block number = [29 20 33]\n",
"[reg_aladin_sym] Initial forward transformation matrix::\n",
"1.02586\t0.0164464\t0.0392102\t-9.243\n",
"-0.0197475\t1.12116\t-0.00452693\t-12.2456\n",
"-0.0443454\t0.031192\t0.948056\t-4.08053\n",
"0\t0\t0\t1\n",
"[reg_aladin_sym] Initial backward transformation matrix::\n",
"0.972794\t-0.0131489\t-0.0402961\t8.66609\n",
"0.0173157\t0.89158\t0.00354111\t11.0924\n",
"0.0449328\t-0.0299489\t1.05279\t4.34451\n",
"0\t0\t0\t1\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"[reg_aladin_sym] Final forward transformation matrix::\n",
"1.02483\t0.0241717\t0.0185933\t-9.21405\n",
"-0.00707995\t1.13079\t-0.0161562\t-12.3742\n",
"-0.0371059\t0.034859\t0.958463\t-4.19251\n",
"0\t0\t0\t1\n",
"[reg_aladin_sym] Final backward transformation matrix::\n",
"0.974937\t-0.0202467\t-0.0192542\t8.65186\n",
"0.00663995\t0.883739\t0.0147678\t11.0587\n",
"0.0375022\t-0.0329251\t1.04205\t4.30694\n",
"0\t0\t0\t1\n",
"- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -\n",
"[reg_aladin_sym] Current level 3 / 3\n",
"[reg_aladin_sym] reference image size: \t256x256x48 voxels\t0.0683594x0.0683594x0.3 mm\n",
"[reg_aladin_sym] floating image size: \t228x160x264 voxels\t0.05x0.05x0.05 mm\n",
"[reg_aladin_sym] Block size = [4 4 4]\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"[reg_aladin_sym] Forward Block number = [64 64 12]\n",
"[reg_aladin_sym] Backward Block number = [57 40 66]\n",
"[reg_aladin_sym] Initial forward transformation matrix::\n",
"1.02483\t0.0241717\t0.0185933\t-9.21405\n",
"-0.00707995\t1.13079\t-0.0161562\t-12.3742\n",
"-0.0371059\t0.034859\t0.958463\t-4.19251\n",
"0\t0\t0\t1\n",
"[reg_aladin_sym] Initial backward transformation matrix::\n",
"0.974937\t-0.0202467\t-0.0192542\t8.65186\n",
"0.00663995\t0.883739\t0.0147678\t11.0587\n",
"0.0375022\t-0.0329251\t1.04205\t4.30694\n",
"0\t0\t0\t1\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"[reg_aladin_sym] Final forward transformation matrix::\n",
"1.0258\t0.0270858\t0.021652\t-9.26662\n",
"-0.0150526\t1.12443\t-0.0192043\t-12.2354\n",
"-0.0363029\t0.0430287\t0.954655\t-4.24792\n",
"0\t0\t0\t1\n",
"[reg_aladin_sym] Final backward transformation matrix::\n",
"0.973719\t-0.022593\t-0.0225388\t8.65091\n",
"0.0136569\t0.88834\t0.0175605\t11.0703\n",
"0.0364123\t-0.0408989\t1.04585\t4.27969\n",
"0\t0\t0\t1\n",
"- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -\n",
"Registration Performed in 0 min 49 sec\n",
"Have a good day !\n",
"\n",
"[NiftyReg ALADIN] Command line:\n",
"\t reg_aladin -ref /aida/lib/average_template_50.nii.gz -flo /testData/T2w/testDataBiasBet.nii.gz -res /testData/T2w/testDataBiasBet_IncidenceData.nii.gz -aff /testData/T2w/testDataBiasBetMatrixInv.txt\n",
"\n",
"[reg_aladin_sym] Parameters\n",
"[reg_aladin_sym] Reference image name: /aida/lib/average_template_50.nii.gz\n",
"[reg_aladin_sym] \t228x160x264 voxels\n",
"[reg_aladin_sym] \t0.05x0.05x0.05 mm\n",
"[reg_aladin_sym] Floating image name: /testData/T2w/testDataBiasBet.nii.gz\n",
"[reg_aladin_sym] \t256x256x48 voxels\n",
"[reg_aladin_sym] \t0.0683594x0.0683594x0.3 mm\n",
"[reg_aladin_sym] Maximum iteration number: 5 (10 during the first level)\n",
"[reg_aladin_sym] Percentage of blocks: 50 %\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"[reg_aladin_sym] Current level 1 / 3\n",
"[reg_aladin_sym] reference image size: \t57x40x66 voxels\t0.2x0.2x0.2 mm\n",
"[reg_aladin_sym] floating image size: \t64x64x48 voxels\t0.273438x0.273438x0.3 mm\n",
"[reg_aladin_sym] Block size = [4 4 4]\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"[reg_aladin_sym] Forward Block number = [15 10 17]\n",
"[reg_aladin_sym] Backward Block number = [16 16 12]\n",
"[reg_aladin_sym] Initial forward transformation matrix::\n",
"1\t0\t0\t8.725\n",
"0\t1\t0\t11.325\n",
"0\t0\t1\t4.575\n",
"0\t0\t0\t1\n",
"[reg_aladin_sym] Initial backward transformation matrix::\n",
"1\t0\t0\t-8.725\n",
"0\t1\t0\t-11.325\n",
"0\t0\t1\t-4.575\n",
"0\t0\t0\t1\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"[reg_aladin_sym] Final forward transformation matrix::\n",
"1.00601\t-0.00649697\t-0.0279012\t8.6818\n",
"0.0089154\t0.926511\t0.00463073\t11.2134\n",
"0.0234342\t-0.0256979\t1.06076\t4.3197\n",
"0\t0\t0\t1\n",
"[reg_aladin_sym] Final backward transformation matrix::\n",
"0.993351\t0.00768943\t0.0260945\t-8.82302\n",
"-0.00944774\t1.07911\t-0.00495935\t-11.9971\n",
"-0.0221738\t0.0259725\t0.942021\t-4.16798\n",
"0\t0\t0\t1\n",
"- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -\n",
"[reg_aladin_sym] Current level 2 / 3\n",
"[reg_aladin_sym] reference image size: \t114x80x132 voxels\t0.1x0.1x0.1 mm\n",
"[reg_aladin_sym] floating image size: \t128x128x48 voxels\t0.136719x0.136719x0.3 mm\n",
"[reg_aladin_sym] Block size = [4 4 4]\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"[reg_aladin_sym] Forward Block number = [29 20 33]\n",
"[reg_aladin_sym] Backward Block number = [32 32 12]\n",
"[reg_aladin_sym] Initial forward transformation matrix::\n",
"1.00601\t-0.00649697\t-0.0279012\t8.6818\n",
"0.0089154\t0.926511\t0.00463073\t11.2134\n",
"0.0234342\t-0.0256979\t1.06076\t4.3197\n",
"0\t0\t0\t1\n",
"[reg_aladin_sym] Initial backward transformation matrix::\n",
"0.993351\t0.00768943\t0.0260945\t-8.82302\n",
"-0.00944774\t1.07911\t-0.00495935\t-11.9971\n",
"-0.0221738\t0.0259725\t0.942021\t-4.16798\n",
"0\t0\t0\t1\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"[reg_aladin_sym] Final forward transformation matrix::\n",
"1.00211\t-0.00624942\t-0.0241245\t8.68661\n",
"0.0196717\t0.929796\t0.0157848\t11.2179\n",
"0.0266633\t-0.0358128\t1.03354\t4.27152\n",
"0\t0\t0\t1\n",
"[reg_aladin_sym] Final backward transformation matrix::\n",
"0.99713\t0.00759398\t0.0231585\t-8.84578\n",
"-0.0206474\t1.07472\t-0.0168955\t-11.8045\n",
"-0.0264393\t0.0370434\t0.966361\t-4.31371\n",
"0\t0\t0\t1\n",
"- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -\n",
"[reg_aladin_sym] Current level 3 / 3\n",
"[reg_aladin_sym] reference image size: \t228x160x264 voxels\t0.05x0.05x0.05 mm\n",
"[reg_aladin_sym] floating image size: \t256x256x48 voxels\t0.0683594x0.0683594x0.3 mm\n",
"[reg_aladin_sym] Block size = [4 4 4]\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"[reg_aladin_sym] Forward Block number = [57 40 66]\n",
"[reg_aladin_sym] Backward Block number = [64 64 12]\n",
"[reg_aladin_sym] Initial forward transformation matrix::\n",
"1.00211\t-0.00624942\t-0.0241245\t8.68661\n",
"0.0196717\t0.929796\t0.0157848\t11.2179\n",
"0.0266633\t-0.0358128\t1.03354\t4.27152\n",
"0\t0\t0\t1\n",
"[reg_aladin_sym] Initial backward transformation matrix::\n",
"0.99713\t0.00759398\t0.0231585\t-8.84578\n",
"-0.0206474\t1.07472\t-0.0168955\t-11.8045\n",
"-0.0264393\t0.0370434\t0.966361\t-4.31371\n",
"0\t0\t0\t1\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"[reg_aladin_sym] Final forward transformation matrix::\n",
"0.993204\t-0.0120307\t-0.0282392\t8.68278\n",
"0.0145397\t0.917878\t0.0237863\t11.1678\n",
"0.0349271\t-0.0285382\t1.03592\t4.29998\n",
"0\t0\t0\t1\n",
"[reg_aladin_sym] Final backward transformation matrix::\n",
"1.00568\t0.0140239\t0.027093\t-9.00525\n",
"-0.0150411\t1.08848\t-0.0254032\t-11.9161\n",
"-0.034322\t0.0295134\t0.963712\t-4.17553\n",
"0\t0\t0\t1\n",
"- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -\n",
"Registration Performed in 0 min 43 sec\n",
"Have a good day !\n",
"\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"Command line:\n",
" reg_resample -ref /aida/lib/average_template_50.nii.gz -flo /testData/T2w/Stroke_mask.nii.gz -trans /testData/T2w/testDataBiasBetMatrixInv.txt -res /testData/T2w/testDataBiasBet_IncidenceData_mask.nii.gz\n",
"\n",
"Parameters\n",
"Reference image name: /aida/lib/average_template_50.nii.gz\n",
"\t228x160x264 voxels, 1 volumes\n",
"\t0.05x0.05x0.05 mm\n",
"Floating image name: /testData/T2w/Stroke_mask.nii.gz\n",
"\t256x256x48 voxels, 1 volumes\n",
"\t0.0683594x0.0683594x0.3 mm\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"\n",
"[NiftyReg] Resampled image has been saved: /testData/T2w/testDataBiasBet_IncidenceData_mask.nii.gz\n",
"\n",
"[NiftyReg F3D] Command line:\n",
"\t reg_f3d -ref /testData/T2w/testDataBiasBet.nii.gz -flo /aida/lib/NP_template_sc0.nii.gz -sx 3 -sy 3 -sz 3 -jl 0.3 -res /testData/T2w/testDataBiasBet_Template.nii.gz -cpp /testData/T2w/testDataBiasBetMatrixBspline.nii -aff /testData/T2w/testDataBiasBetMatrixAff.txt\n",
"\n",
"[NiftyReg F3D] OpenMP is used with 8 thread(s)\n",
"[NiftyReg F3D] **************************************************\n",
"[NiftyReg F3D] INPUT PARAMETERS\n",
"[NiftyReg F3D] **************************************************\n",
"[NiftyReg F3D] Reference image:\n",
"[NiftyReg F3D] \t* name: /testData/T2w/testDataBiasBet.nii.gz\n",
"[NiftyReg F3D] \t* image dimension: 256 x 256 x 48 x 1\n",
"[NiftyReg F3D] \t* image spacing: 0.0683594 x 0.0683594 x 0.3 mm\n",
"[NiftyReg F3D] \t* intensity threshold for timepoint 1/1: [-3.4e+38 3.4e+38]\n",
"[NiftyReg F3D] \t* gaussian smoothing sigma: 0\n",
"[NiftyReg F3D]\n",
"[NiftyReg F3D] Floating image:\n",
"[NiftyReg F3D] \t* name: /aida/lib/NP_template_sc0.nii.gz\n",
"[NiftyReg F3D] \t* image dimension: 228 x 160 x 264 x 1\n",
"[NiftyReg F3D] \t* image spacing: 0.05 x 0.05 x 0.05 mm\n",
"[NiftyReg F3D] \t* intensity threshold for timepoint 1/1: [-3.4e+38 3.4e+38]\n",
"[NiftyReg F3D] \t* gaussian smoothing sigma: 0\n",
"[NiftyReg F3D]\n",
"[NiftyReg F3D] Warped image padding value: nan\n",
"[NiftyReg F3D]\n",
"[NiftyReg F3D] Level number: 3\n",
"[NiftyReg F3D]\n",
"[NiftyReg F3D] Maximum iteration number per level: 300\n",
"[NiftyReg F3D]\n",
"[NiftyReg F3D] Final spacing in mm: 3 3 3\n",
"[NiftyReg F3D]\n",
"[NiftyReg F3D] The NMI is used as a similarity measure.\n",
"[NiftyReg F3D] Similarity measure term weight: 0.695\n",
"[NiftyReg F3D]\n",
"[NiftyReg F3D] Bending energy penalty term weight: 0.005\n",
"[NiftyReg F3D]\n",
"[NiftyReg F3D] Linear energy penalty term weights: 0 0\n",
"[NiftyReg F3D]\n",
"[NiftyReg F3D] L2 norm of the displacement penalty term weights: 0\n",
"[NiftyReg F3D]\n",
"[NiftyReg F3D] Jacobian-based penalty term weight: 0.3\n",
"[NiftyReg F3D] \t* Jacobian-based penalty term is approximated\n",
"[NiftyReg F3D] **************************************************\n",
"[NiftyReg F3D] Current level: 1 / 3\n",
"[NiftyReg F3D] Current reference image\n",
"[NiftyReg F3D] \t* image dimension: 64 x 64 x 48 x 1\n",
"[NiftyReg F3D] \t* image spacing: 0.273438 x 0.273438 x 0.3 mm\n",
"[NiftyReg F3D] Current floating image\n",
"[NiftyReg F3D] \t* image dimension: 57 x 40 x 66 x 1\n",
"[NiftyReg F3D] \t* image spacing: 0.2 x 0.2 x 0.2 mm\n",
"[NiftyReg F3D] Current control point image\n",
"[NiftyReg F3D] \t* image dimension: 5 x 5 x 5\n",
"[NiftyReg F3D] \t* image spacing: 12 x 12 x 12 mm\n",
"[NiftyReg F3D] Initial objective function: 0.782466 = (wSIM)0.785363 - (wBE)1.62589e-15 - (wLE)0 - (wL2)0 - (wJAC)0.00289714\n",
"[NiftyReg F3D] [9] Current objective function: 0.782745 = (wSIM)0.785595 - (wBE)7.27e-08 - (wJAC)2.85e-03 [+ 0.3 mm]\n",
"[NiftyReg F3D] [17] Current objective function: 0.782745 = (wSIM)0.785595 - (wBE)7.27e-08 - (wJAC)2.85e-03 [+ 0 mm]\n",
"[NiftyReg F3D] Current registration level done\n",
"[NiftyReg F3D] --------------------------------------------------\n",
"[NiftyReg F3D] **************************************************\n",
"[NiftyReg F3D] Current level: 2 / 3\n",
"[NiftyReg F3D] Current reference image\n",
"[NiftyReg F3D] \t* image dimension: 128 x 128 x 48 x 1\n",
"[NiftyReg F3D] \t* image spacing: 0.136719 x 0.136719 x 0.3 mm\n",
"[NiftyReg F3D] Current floating image\n",
"[NiftyReg F3D] \t* image dimension: 114 x 80 x 132 x 1\n",
"[NiftyReg F3D] \t* image spacing: 0.1 x 0.1 x 0.1 mm\n",
"[NiftyReg F3D] Current control point image\n",
"[NiftyReg F3D] \t* image dimension: 6 x 6 x 6\n",
"[NiftyReg F3D] \t* image spacing: 6 x 6 x 6 mm\n",
"[NiftyReg F3D] Initial objective function: 0.778444 = (wSIM)0.781265 - (wBE)1.40679e-07 - (wLE)0 - (wL2)0 - (wJAC)0.00282145\n",
"[NiftyReg F3D] [9] Current objective function: 0.78145 = (wSIM)0.784275 - (wBE)7.48e-07 - (wJAC)2.82e-03 [+ 0.3 mm]\n",
"[NiftyReg F3D] [17] Current objective function: 0.78145 = (wSIM)0.784275 - (wBE)7.48e-07 - (wJAC)2.82e-03 [+ 0 mm]\n",
"[NiftyReg F3D] Current registration level done\n",
"[NiftyReg F3D] --------------------------------------------------\n",
"[NiftyReg F3D] **************************************************\n",
"[NiftyReg F3D] Current level: 3 / 3\n",
"[NiftyReg F3D] Current reference image\n",
"[NiftyReg F3D] \t* image dimension: 256 x 256 x 48 x 1\n",
"[NiftyReg F3D] \t* image spacing: 0.0683594 x 0.0683594 x 0.3 mm\n",
"[NiftyReg F3D] Current floating image\n",
"[NiftyReg F3D] \t* image dimension: 228 x 160 x 264 x 1\n",
"[NiftyReg F3D] \t* image spacing: 0.05 x 0.05 x 0.05 mm\n",
"[NiftyReg F3D] Current control point image\n",
"[NiftyReg F3D] \t* image dimension: 9 x 9 x 8\n",
"[NiftyReg F3D] \t* image spacing: 3 x 3 x 3 mm\n",
"[NiftyReg F3D] Initial objective function: 0.776861 = (wSIM)0.779707 - (wBE)1.21322e-06 - (wLE)0 - (wL2)0 - (wJAC)0.00284508\n",
"[NiftyReg F3D] [9] Current objective function: 0.777912 = (wSIM)0.780774 - (wBE)8.74e-06 - (wJAC)2.85e-03 [+ 0.3 mm]\n",
"[NiftyReg F3D] [18] Current objective function: 0.778025 = (wSIM)0.780923 - (wBE)2.76e-05 - (wJAC)2.87e-03 [+ 0.3 mm]\n",
"[NiftyReg F3D] [27] Current objective function: 0.780913 = (wSIM)0.78387 - (wBE)5.86e-05 - (wJAC)2.90e-03 [+ 0.3 mm]\n",
"[NiftyReg F3D] [36] Current objective function: 0.78097 = (wSIM)0.783961 - (wBE)7.48e-05 - (wJAC)2.92e-03 [+ 0.075 mm]\n",
"[NiftyReg F3D] [46] Current objective function: 0.781769 = (wSIM)0.784751 - (wBE)7.70e-05 - (wJAC)2.90e-03 [+ 0.202875 mm]\n",
"[NiftyReg F3D] [56] Current objective function: 0.781874 = (wSIM)0.784853 - (wBE)8.00e-05 - (wJAC)2.90e-03 [+ 0.0323332 mm]\n",
"[NiftyReg F3D] [64] Current objective function: 0.782299 = (wSIM)0.785275 - (wBE)8.01e-05 - (wJAC)2.90e-03 [+ 0.0874613 mm]\n",
"[NiftyReg F3D] [72] Current objective function: 0.782318 = (wSIM)0.785294 - (wBE)8.24e-05 - (wJAC)2.89e-03 [+ 0.0218653 mm]\n",
"[NiftyReg F3D] [80] Current objective function: 0.782465 = (wSIM)0.785441 - (wBE)8.50e-05 - (wJAC)2.89e-03 [+ 0.0723742 mm]\n",
"[NiftyReg F3D] [87] Current objective function: 0.782472 = (wSIM)0.785447 - (wBE)8.69e-05 - (wJAC)2.89e-03 [+ 0.00904678 mm]\n",
"[NiftyReg F3D] [95] Current objective function: 0.782579 = (wSIM)0.785552 - (wBE)8.81e-05 - (wJAC)2.88e-03 [+ 0.0419861 mm]\n",
"[NiftyReg F3D] [101] Current objective function: 0.782606 = (wSIM)0.78558 - (wBE)9.20e-05 - (wJAC)2.88e-03 [+ 0.0209931 mm]\n",
"[NiftyReg F3D] [108] Current objective function: 0.782698 = (wSIM)0.785672 - (wBE)9.30e-05 - (wJAC)2.88e-03 [+ 0.0440854 mm]\n",
"[NiftyReg F3D] [115] Current objective function: 0.782708 = (wSIM)0.785684 - (wBE)9.97e-05 - (wJAC)2.88e-03 [+ 0.0440854 mm]\n",
"[NiftyReg F3D] [123] Current objective function: 0.782894 = (wSIM)0.785872 - (wBE)1.08e-04 - (wJAC)2.87e-03 [+ 0.0925794 mm]\n",
"[NiftyReg F3D] [131] Current objective function: 0.782943 = (wSIM)0.78593 - (wBE)1.21e-04 - (wJAC)2.87e-03 [+ 0.0462897 mm]\n",
"[NiftyReg F3D] [139] Current objective function: 0.783085 = (wSIM)0.786073 - (wBE)1.21e-04 - (wJAC)2.87e-03 [+ 0.0590193 mm]\n",
"[NiftyReg F3D] [146] Current objective function: 0.783086 = (wSIM)0.78608 - (wBE)1.29e-04 - (wJAC)2.86e-03 [+ 0.0590193 mm]\n",
"[NiftyReg F3D] [156] Current objective function: 0.783393 = (wSIM)0.786416 - (wBE)1.61e-04 - (wJAC)2.86e-03 [+ 0.169467 mm]\n",
"[NiftyReg F3D] [164] Current objective function: 0.783458 = (wSIM)0.786691 - (wBE)3.13e-04 - (wJAC)2.92e-03 [+ 0.169467 mm]\n",
"[NiftyReg F3D] [172] Current objective function: 0.784017 = (wSIM)0.787272 - (wBE)3.45e-04 - (wJAC)2.91e-03 [+ 0.169467 mm]\n",
"[NiftyReg F3D] [180] Current objective function: 0.784022 = (wSIM)0.787279 - (wBE)3.49e-04 - (wJAC)2.91e-03 [+ 0.0105917 mm]\n",
"[NiftyReg F3D] [190] Current objective function: 0.784283 = (wSIM)0.787538 - (wBE)3.47e-04 - (wJAC)2.91e-03 [+ 0.0817212 mm]\n",
"[NiftyReg F3D] [199] Current objective function: 0.784291 = (wSIM)0.787543 - (wBE)3.47e-04 - (wJAC)2.91e-03 [+ 0.0158335 mm]\n",
"[NiftyReg F3D] [206] Current objective function: 0.78439 = (wSIM)0.787637 - (wBE)3.48e-04 - (wJAC)2.90e-03 [+ 0.0428296 mm]\n",
"[NiftyReg F3D] [212] Current objective function: 0.784397 = (wSIM)0.78764 - (wBE)3.52e-04 - (wJAC)2.89e-03 [+ 0.0214148 mm]\n",
"[NiftyReg F3D] [220] Current objective function: 0.78456 = (wSIM)0.787812 - (wBE)3.60e-04 - (wJAC)2.89e-03 [+ 0.0708829 mm]\n",
"[NiftyReg F3D] [227] Current objective function: 0.784609 = (wSIM)0.787874 - (wBE)3.80e-04 - (wJAC)2.88e-03 [+ 0.0708829 mm]\n",
"[NiftyReg F3D] [234] Current objective function: 0.784756 = (wSIM)0.788011 - (wBE)3.82e-04 - (wJAC)2.87e-03 [+ 0.0708829 mm]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"[NiftyReg F3D] [240] Current objective function: 0.784756 = (wSIM)0.788011 - (wBE)3.82e-04 - (wJAC)2.87e-03 [+ 0 mm]\n",
"[NiftyReg F3D] Current registration level done\n",
"[NiftyReg F3D] --------------------------------------------------\n",
"[NiftyReg F3D] Registration Performed in 0 min 55 sec\n",
"[NiftyReg F3D] Have a good day !\n",
"\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"Command line:\n",
" reg_resample -ref /testData/T2w/testDataBiasBet.nii.gz -flo /aida/lib/average_template_50.nii.gz -cpp /testData/T2w/testDataBiasBetMatrixBspline.nii -res /testData/T2w/testDataBiasBet_TemplateAllen.nii.gz\n",
"\n",
"Parameters\n",
"Reference image name: /testData/T2w/testDataBiasBet.nii.gz\n",
"\t256x256x48 voxels, 1 volumes\n",
"\t0.0683594x0.0683594x0.3 mm\n",
"Floating image name: /aida/lib/average_template_50.nii.gz\n",
"\t228x160x264 voxels, 1 volumes\n",
"\t0.05x0.05x0.05 mm\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"\n",
"[NiftyReg] Resampled image has been saved: /testData/T2w/testDataBiasBet_TemplateAllen.nii.gz\n",
"\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"Command line:\n",
" reg_resample -ref /testData/T2w/testDataBiasBet.nii.gz -flo /aida/lib/annoVolume+2000_rsfMRI.nii.gz -inter 0 -cpp /testData/T2w/testDataBiasBetMatrixBspline.nii -res /testData/T2w/testDataBiasBet_AnnorsfMRI.nii.gz\n",
"\n",
"Parameters\n",
"Reference image name: /testData/T2w/testDataBiasBet.nii.gz\n",
"\t256x256x48 voxels, 1 volumes\n",
"\t0.0683594x0.0683594x0.3 mm\n",
"Floating image name: /aida/lib/annoVolume+2000_rsfMRI.nii.gz\n",
"\t228x160x264 voxels, 1 volumes\n",
"\t0.05x0.05x0.05 mm\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"\n",
"[NiftyReg] Resampled image has been saved: /testData/T2w/testDataBiasBet_AnnorsfMRI.nii.gz\n",
"\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"Command line:\n",
" reg_resample -ref /testData/T2w/testDataBiasBet.nii.gz -flo /aida/lib/annotation_50CHANGEDanno.nii.gz -inter 0 -cpp /testData/T2w/testDataBiasBetMatrixBspline.nii -res /testData/T2w/testDataBiasBet_Anno.nii.gz\n",
"\n",
"Parameters\n",
"Reference image name: /testData/T2w/testDataBiasBet.nii.gz\n",
"\t256x256x48 voxels, 1 volumes\n",
"\t0.0683594x0.0683594x0.3 mm\n",
"Floating image name: /aida/lib/annotation_50CHANGEDanno.nii.gz\n",
"\t228x160x264 voxels, 1 volumes\n",
"\t0.05x0.05x0.05 mm\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"\n",
"[NiftyReg] Resampled image has been saved: /testData/T2w/testDataBiasBet_Anno.nii.gz\n",
"T2 Registration \u001b[0;30;42m COMPLETED \u001b[0m\n"
]
}
],
"source": [
"!docker exec -w /aida/bin/2.1_T2PreProcessing aidamri \\\n",
"python registration_T2.py -i /testData/T2w/testDataBiasBet.nii.gz"
]
},
{
"cell_type": "code",
"execution_count": 20,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Stroke_mask.nii.gz\r\n",
"preprocess.log\r\n",
"reg.log\r\n",
"testData.5.1.nii.gz\r\n",
"testDataBias.nii.gz\r\n",
"testDataBiasBet.nii.gz\r\n",
"testDataBiasBetMatrixAff.txt\r\n",
"testDataBiasBetMatrixBspline.nii\r\n",
"testDataBiasBetMatrixInv.txt\r\n",
"testDataBiasBet_Anno.nii.gz\r\n",
"testDataBiasBet_AnnorsfMRI.nii.gz\r\n",
"testDataBiasBet_IncidenceData.nii.gz\r\n",
"testDataBiasBet_IncidenceData_mask.nii.gz\r\n",
"testDataBiasBet_Template.nii.gz\r\n",
"testDataBiasBet_TemplateAff.nii.gz\r\n",
"testDataBiasBet_TemplateAllen.nii.gz\r\n",
"testDataBiasBet_mask.nii.gz\r\n"
]
}
],
"source": [
"!docker exec -w /testData/T2w aidamri \\\n",
"ls -t"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"The new files are:\n",
"* testDataBiasBet_Anno.nii.gz (registered atlas file)\n",
"* testDataBiasBet_AnnorsfMRI.nii.gz (registered parent atlas file)\n",
"* testDataBiasBet_IncidenceData.nii.gz (file necessary to create the incidence map)\n",
"* testDataBiasBet_IncidenceData_mask.nii.gz (file necessary to create the incidence map)\n",
"* testDataBiasBet_Template.nii.gz (intermediate step in registration)\n",
"* testDataBiasBet_TemplateAff.nii.gz (intermediate step in registration (affine transformation))\n",
"* testDataBiasBet_TemplateAllen.nii.gz (intermediate step in registration)\n",
"* testDataBiasBetMatrixBspline (intermediate step in registration)\n",
"* testDataBiasBetMatrixInv.txt (intermediate step in registration)\n",
"* testDataBiasBetMatrixAff.txt (intermediate step in registration)\n",
"* reg.log (basic registration log file)\n",
"\n",
"It is recommended to use ITK-Snap to check the registration results by superimposing the Atlas segmentation over the extracted brain, i.e. drag in the bias bet file into ITKSnap and load it as main image, then load in the anno file as segmentation (see image below).\n",
"\n",
"\n",
"\n",
"Finally, extract the incidence size, the parental incidence size and the affected regions. To safe some typing, the commands are put into the same docker exec directive by using `bash -c` command. Basically, we pass consecutive commands as a string to the `bash -c` command, separated by a semicolon."
]
},
{
"cell_type": "code",
"execution_count": 21,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"'1' folder will be proccessed...\n",
"'1' folder will be proccessed...\n"
]
}
],
"source": [
"!docker exec -w /aida/bin/3.1_T2Processing aidamri \\\n",
"bash -c \"\\\n",
"python getIncidenceSize_par.py -i /testData/T2w ; \\\n",
"python getIncidenceSize.py -i /testData/T2w\""
]
},
{
"cell_type": "code",
"execution_count": 22,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Stroke_mask.nii.gz\r\n",
"affectedRegions.nii.gz\r\n",
"affectedRegions.txt\r\n",
"affectedRegions_Parental.nii.gz\r\n",
"affectedRegions_Parental.txt\r\n",
"labelCount.mat\r\n",
"labelCount_par.mat\r\n",
"preprocess.log\r\n",
"reg.log\r\n",
"testData.5.1.nii.gz\r\n",
"testDataBias.nii.gz\r\n",
"testDataBiasBet.nii.gz\r\n",
"testDataBiasBetMatrixAff.txt\r\n",
"testDataBiasBetMatrixBspline.nii\r\n",
"testDataBiasBetMatrixInv.txt\r\n",
"testDataBiasBet_Anno.nii.gz\r\n",
"testDataBiasBet_AnnorsfMRI.nii.gz\r\n",
"testDataBiasBet_IncidenceData.nii.gz\r\n",
"testDataBiasBet_IncidenceDataAnno_mask.nii.gz\r\n",
"testDataBiasBet_IncidenceDataAnno_parmask.nii.gz\r\n",
"testDataBiasBet_IncidenceData_mask.nii.gz\r\n",
"testDataBiasBet_Template.nii.gz\r\n",
"testDataBiasBet_TemplateAff.nii.gz\r\n",
"testDataBiasBet_TemplateAllen.nii.gz\r\n",
"testDataBiasBet_mask.nii.gz\r\n"
]
}
],
"source": [
"!docker exec -w /testData/T2w aidamri \\\n",
"ls -t"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"The following files now should be present:\n",
"* testDataBiasBet_IncidenceDataAnno_parmask.nii.gz (file necessary to create the incidence map) \n",
"* affectedRegions_Parental.nii.gz (incidence map image with parental atlas regions)\n",
"* affectedRegions_Parental.txt (incidence map results with parental atlas regions)\n",
"* labelCount_par.mat (file necessary to create the incidence map) \n",
"* testDataBiasBet_IncidenceDataAnno_mask.nii.gz (file necessary to create the incidence map) \n",
"* affectedRegions.nii.gz (incidence map image with parental atlas regions)\n",
"* affectedRegions.txt (incidence map results with parental atlas regions)\n",
"* labelCount.mat (file necessary to create the incidence map) "
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## DTI<a class=\"anchor\" id=\"dti\"></a>\n",
"Similar to T2w, it begins with pre-processing. The test data is located in the DTI folder:"
]
},
{
"cell_type": "code",
"execution_count": 23,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"testData.7.1.nii.gz\r\n"
]
}
],
"source": [
"!docker exec -w /testData/DTI aidamri \\\n",
"ls"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"The pre-processing includes dimension reduction, bias correction, threshold application and brain extraction."
]
},
{
"cell_type": "code",
"execution_count": 24,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"DTI Preprocessing \u001b[5m...\u001b[0m (wait!)\r",
"DTI Preprocessing \u001b[0;30;42m COMPLETED \u001b[0m\r\n",
" 0% | |\r",
" 5% |### |\r",
" 10% |####### |\r",
" 15% |########## |\r",
" 20% |############## |\r",
" 25% |################## |\r",
" 30% |##################### |\r",
" 35% |######################### |\r",
" 40% |############################ |\r",
" 45% |################################ |\r",
" 50% |#################################### |\r",
" 55% |####################################### |\r",
" 60% |########################################### |\r",
" 65% |############################################## |\r",
" 70% |################################################## |\r",
" 75% |###################################################### |\r",
" 80% |######################################################### |\r",
" 85% |############################################################# |\r",
" 90% |################################################################ |\r",
" 95% |#################################################################### |\r",
"100% |########################################################################|\r",
"\r\n"
]
}
],
"source": [
"!docker exec -w /aida/bin/2.2_DTIPreProcessing aidamri \\\n",
"python preProcessing_DTI.py -i /testData/DTI/testData.7.1.nii.gz"
]
},
{
"cell_type": "code",
"execution_count": 25,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"preprocess.log\r\n",
"testData.7.1.nii.gz\r\n",
"testDataDN.nii.gz\r\n",
"testDataDNSmooth.nii.gz\r\n",
"testDataDNSmoothMico.nii.gz\r\n",
"testDataDNSmoothMicoBet.nii.gz\r\n",
"testDataDNSmoothMicoBet_mask.nii.gz\r\n"
]
}
],
"source": [
"!docker exec -w /testData/DTI aidamri \\\n",
"ls -t"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Alongside the pre-process log file there are the following files after execution:\n",
"* testDataDN.nii.gz\n",
"* testDataDNSmooth.nii.gz\n",
"* testDataDNSmoothMico.nii.gz\n",
"* testDataDNSmoothMicoBet.nii.gz\n",
"* testDataDNSmoothMicoBet_mask.nii.gz\n",
"\n",
"Afterwards, proceed with registration using the brain extracted file. The reference stroke mask used is already located in the folder. It is possible to use another reference mask from other days or dataset by using the `-r [STROKE-MASK-FILENAME]` flag."
]
},
{
"cell_type": "code",
"execution_count": 26,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"\n",
"[NiftyReg ALADIN] Command line:\n",
"\t reg_aladin -ref /testData/DTI/testDataDNSmoothMicoBet.nii.gz -flo /testData/T2w/testDataBiasBet.nii.gz -res /testData/DTI/testDataDNSmoothMicoBet_T2w.nii.gz -rigOnly -aff /testData/DTI/testDataDNSmoothMicoBettransMatrixAff.txt\n",
"\n",
"[reg_aladin_sym] Parameters\n",
"[reg_aladin_sym] Reference image name: /testData/DTI/testDataDNSmoothMicoBet.nii.gz\n",
"[reg_aladin_sym] \t128x128x20 voxels\n",
"[reg_aladin_sym] \t0.140625x0.140625x0.4 mm\n",
"[reg_aladin_sym] Floating image name: /testData/T2w/testDataBiasBet.nii.gz\n",
"[reg_aladin_sym] \t256x256x48 voxels\n",
"[reg_aladin_sym] \t0.0683594x0.0683594x0.3 mm\n",
"[reg_aladin_sym] Maximum iteration number: 5 (10 during the first level)\n",
"[reg_aladin_sym] Percentage of blocks: 50 %\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"[reg_aladin_sym] Current level 1 / 3\n",
"[reg_aladin_sym] reference image size: \t32x32x20 voxels\t0.5625x0.5625x0.4 mm\n",
"[reg_aladin_sym] floating image size: \t64x64x48 voxels\t0.273438x0.273438x0.3 mm\n",
"[reg_aladin_sym] Block size = [4 4 4]\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"[reg_aladin_sym] Forward Block number = [8 8 5]\n",
"[reg_aladin_sym] Backward Block number = [16 16 12]\n",
"[reg_aladin_sym] Initial forward transformation matrix::\n",
"1\t0\t0\t-0.25\n",
"0\t1\t0\t-0.25\n",
"0\t0\t1\t3.2\n",
"0\t0\t0\t1\n",
"[reg_aladin_sym] Initial backward transformation matrix::\n",
"1\t0\t0\t0.25\n",
"0\t1\t0\t0.25\n",
"0\t0\t1\t-3.2\n",
"0\t0\t0\t1\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"[reg_aladin_sym] Final forward transformation matrix::\n",
"0.998172\t0.0275713\t0.0537913\t-0.118758\n",
"-0.0297107\t0.998782\t0.0393854\t0.111694\n",
"-0.0526399\t-0.0409116\t0.997775\t4.09616\n",
"0\t0\t0\t1\n",
"[reg_aladin_sym] Final backward transformation matrix::\n",
"0.998172\t-0.0297107\t-0.0526399\t0.337481\n",
"0.0275713\t0.998782\t-0.0409116\t0.0592969\n",
"0.0537913\t0.0393854\t0.997775\t-4.08506\n",
"0\t0\t0\t1\n",
"- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -\n",
"[reg_aladin_sym] Current level 2 / 3\n",
"[reg_aladin_sym] reference image size: \t64x64x20 voxels\t0.28125x0.28125x0.4 mm\n",
"[reg_aladin_sym] floating image size: \t128x128x48 voxels\t0.136719x0.136719x0.3 mm\n",
"[reg_aladin_sym] Block size = [4 4 4]\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"[reg_aladin_sym] Forward Block number = [16 16 5]\n",
"[reg_aladin_sym] Backward Block number = [32 32 12]\n",
"[reg_aladin_sym] Initial forward transformation matrix::\n",
"0.998172\t0.0275713\t0.0537913\t-0.118758\n",
"-0.0297107\t0.998782\t0.0393854\t0.111694\n",
"-0.0526399\t-0.0409116\t0.997775\t4.09616\n",
"0\t0\t0\t1\n",
"[reg_aladin_sym] Initial backward transformation matrix::\n",
"0.998172\t-0.0297107\t-0.0526399\t0.337481\n",
"0.0275713\t0.998782\t-0.0409116\t0.0592969\n",
"0.0537913\t0.0393854\t0.997775\t-4.08506\n",
"0\t0\t0\t1\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"[reg_aladin_sym] Final forward transformation matrix::\n",
"0.999048\t0.0108569\t0.0422583\t0.0920481\n",
"-0.013317\t0.998206\t0.0583748\t-0.194553\n",
"-0.0415487\t-0.058882\t0.9974\t4.13923\n",
"0\t0\t0\t1\n",
"[reg_aladin_sym] Final backward transformation matrix::\n",
"0.999048\t-0.013317\t-0.0415487\t0.0774287\n",
"0.0108569\t0.998206\t-0.058882\t0.436931\n",
"0.0422583\t0.0583748\t0.9974\t-4.12101\n",
"0\t0\t0\t1\n",
"- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -\n",
"[reg_aladin_sym] Current level 3 / 3\n",
"[reg_aladin_sym] reference image size: \t128x128x20 voxels\t0.140625x0.140625x0.4 mm\n",
"[reg_aladin_sym] floating image size: \t256x256x48 voxels\t0.0683594x0.0683594x0.3 mm\n",
"[reg_aladin_sym] Block size = [4 4 4]\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"[reg_aladin_sym] Forward Block number = [32 32 5]\n",
"[reg_aladin_sym] Backward Block number = [64 64 12]\n",
"[reg_aladin_sym] Initial forward transformation matrix::\n",
"0.999048\t0.0108569\t0.0422583\t0.0920481\n",
"-0.013317\t0.998206\t0.0583748\t-0.194553\n",
"-0.0415487\t-0.058882\t0.9974\t4.13923\n",
"0\t0\t0\t1\n",
"[reg_aladin_sym] Initial backward transformation matrix::\n",
"0.999048\t-0.013317\t-0.0415487\t0.0774287\n",
"0.0108569\t0.998206\t-0.058882\t0.436931\n",
"0.0422583\t0.0583748\t0.9974\t-4.12101\n",
"0\t0\t0\t1\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"[reg_aladin_sym] Final forward transformation matrix::\n",
"0.999877\t0.00773625\t0.013682\t0.195948\n",
"-0.00810399\t0.999602\t0.0270294\t-0.227389\n",
"-0.0134674\t-0.0271369\t0.999541\t3.59453\n",
"0\t0\t0\t1\n",
"[reg_aladin_sym] Final backward transformation matrix::\n",
"0.999876\t-0.00810399\t-0.0134674\t-0.149358\n",
"0.00773625\t0.999602\t-0.0271369\t0.323327\n",
"0.013682\t0.0270294\t0.999541\t-3.58941\n",
"0\t0\t0\t1\n",
"- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -\n",
"Registration Performed in 0 min 4 sec\n",
"Have a good day !\n",
"\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"Command line:\n",
" reg_resample -ref /testData/T2w/testDataBiasBet_Anno.nii.gz -flo /aida/lib/ARA_annotationR+2000.nii.gz -trans /testData/T2w/testDataBiasBetMatrixBspline.nii -inter 0 -res /testData/DTI/testDataDNSmoothMicoBet_AnnoSplit.nii.gz\n",
"\n",
"Parameters\n",
"Reference image name: /testData/T2w/testDataBiasBet_Anno.nii.gz\n",
"\t256x256x48 voxels, 1 volumes\n",
"\t0.0683594x0.0683594x0.3 mm\n",
"Floating image name: /aida/lib/ARA_annotationR+2000.nii.gz\n",
"\t228x160x264 voxels, 1 volumes\n",
"\t0.05x0.05x0.05 mm\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"\n",
"[NiftyReg] Resampled image has been saved: /testData/DTI/testDataDNSmoothMicoBet_AnnoSplit.nii.gz\n",
"\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"Command line:\n",
" reg_resample -ref /testData/DTI/testDataDNSmoothMicoBet.nii.gz -flo /testData/DTI/testDataDNSmoothMicoBet_AnnoSplit.nii.gz -trans /testData/DTI/testDataDNSmoothMicoBettransMatrixAff.txt -inter 0 -res /testData/DTI/testDataDNSmoothMicoBet_AnnoSplit.nii.gz\n",
"\n",
"Parameters\n",
"Reference image name: /testData/DTI/testDataDNSmoothMicoBet.nii.gz\n",
"\t128x128x20 voxels, 1 volumes\n",
"\t0.140625x0.140625x0.4 mm\n",
"Floating image name: /testData/DTI/testDataDNSmoothMicoBet_AnnoSplit.nii.gz\n",
"\t256x256x48 voxels, 1 volumes\n",
"\t0.0683594x0.0683594x0.3 mm\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"\n",
"[NiftyReg] Resampled image has been saved: /testData/DTI/testDataDNSmoothMicoBet_AnnoSplit.nii.gz\n",
"\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"Command line:\n",
" reg_resample -ref /testData/T2w/testDataBiasBet_Anno.nii.gz -flo /aida/lib/annoVolume+2000_rsfMRI.nii.gz -trans /testData/T2w/testDataBiasBetMatrixBspline.nii -inter 0 -res /testData/DTI/testDataDNSmoothMicoBet_AnnoSplit_rsfMRI.nii.gz\n",
"\n",
"Parameters\n",
"Reference image name: /testData/T2w/testDataBiasBet_Anno.nii.gz\n",
"\t256x256x48 voxels, 1 volumes\n",
"\t0.0683594x0.0683594x0.3 mm\n",
"Floating image name: /aida/lib/annoVolume+2000_rsfMRI.nii.gz\n",
"\t228x160x264 voxels, 1 volumes\n",
"\t0.05x0.05x0.05 mm\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"\n",
"[NiftyReg] Resampled image has been saved: /testData/DTI/testDataDNSmoothMicoBet_AnnoSplit_rsfMRI.nii.gz\n",
"\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"Command line:\n",
" reg_resample -ref /testData/DTI/testDataDNSmoothMicoBet.nii.gz -flo /testData/DTI/testDataDNSmoothMicoBet_AnnoSplit_rsfMRI.nii.gz -trans /testData/DTI/testDataDNSmoothMicoBettransMatrixAff.txt -inter 0 -res /testData/DTI/testDataDNSmoothMicoBet_AnnoSplit_rsfMRI.nii.gz\n",
"\n",
"Parameters\n",
"Reference image name: /testData/DTI/testDataDNSmoothMicoBet.nii.gz\n",
"\t128x128x20 voxels, 1 volumes\n",
"\t0.140625x0.140625x0.4 mm\n",
"Floating image name: /testData/DTI/testDataDNSmoothMicoBet_AnnoSplit_rsfMRI.nii.gz\n",
"\t256x256x48 voxels, 1 volumes\n",
"\t0.0683594x0.0683594x0.3 mm\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"\n",
"[NiftyReg] Resampled image has been saved: /testData/DTI/testDataDNSmoothMicoBet_AnnoSplit_rsfMRI.nii.gz\n",
"\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"Command line:\n",
" reg_resample -ref /testData/T2w/testDataBiasBet_Anno.nii.gz -flo /aida/lib/annoVolume.nii.gz -trans /testData/T2w/testDataBiasBetMatrixBspline.nii -inter 0 -res /testData/DTI/testDataDNSmoothMicoBet_Anno_rsfMRI.nii.gz\n",
"\n",
"Parameters\n",
"Reference image name: /testData/T2w/testDataBiasBet_Anno.nii.gz\n",
"\t256x256x48 voxels, 1 volumes\n",
"\t0.0683594x0.0683594x0.3 mm\n",
"Floating image name: /aida/lib/annoVolume.nii.gz\n",
"\t228x160x264 voxels, 1 volumes\n",
"\t0.05x0.05x0.05 mm\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"\n",
"[NiftyReg] Resampled image has been saved: /testData/DTI/testDataDNSmoothMicoBet_Anno_rsfMRI.nii.gz\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"Command line:\n",
" reg_resample -ref /testData/DTI/testDataDNSmoothMicoBet.nii.gz -flo /testData/DTI/testDataDNSmoothMicoBet_Anno_rsfMRI.nii.gz -trans /testData/DTI/testDataDNSmoothMicoBettransMatrixAff.txt -inter 0 -res /testData/DTI/testDataDNSmoothMicoBet_Anno_rsfMRI.nii.gz\n",
"\n",
"Parameters\n",
"Reference image name: /testData/DTI/testDataDNSmoothMicoBet.nii.gz\n",
"\t128x128x20 voxels, 1 volumes\n",
"\t0.140625x0.140625x0.4 mm\n",
"Floating image name: /testData/DTI/testDataDNSmoothMicoBet_Anno_rsfMRI.nii.gz\n",
"\t256x256x48 voxels, 1 volumes\n",
"\t0.0683594x0.0683594x0.3 mm\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"\n",
"[NiftyReg] Resampled image has been saved: /testData/DTI/testDataDNSmoothMicoBet_Anno_rsfMRI.nii.gz\n",
"\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"Command line:\n",
" reg_resample -ref /testData/DTI/testDataDNSmoothMicoBet.nii.gz -flo /testData/T2w/testDataBiasBet_TemplateAllen.nii.gz -cpp /testData/DTI/testDataDNSmoothMicoBettransMatrixAff.txt -res /testData/DTI/testDataDNSmoothMicoBet_Template.nii.gz\n",
"\n",
"Parameters\n",
"Reference image name: /testData/DTI/testDataDNSmoothMicoBet.nii.gz\n",
"\t128x128x20 voxels, 1 volumes\n",
"\t0.140625x0.140625x0.4 mm\n",
"Floating image name: /testData/T2w/testDataBiasBet_TemplateAllen.nii.gz\n",
"\t256x256x48 voxels, 1 volumes\n",
"\t0.0683594x0.0683594x0.3 mm\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"\n",
"[NiftyReg] Resampled image has been saved: /testData/DTI/testDataDNSmoothMicoBet_Template.nii.gz\n",
"\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"Command line:\n",
" reg_resample -ref /testData/DTI/testDataDNSmoothMicoBet.nii.gz -flo /testData/T2w/Stroke_mask.nii.gz -inter 0 -cpp /testData/DTI/testDataDNSmoothMicoBettransMatrixAff.txt -res /testData/DTI/testDataDNSmoothMicoBetStroke_mask.nii.gz\n",
"\n",
"Parameters\n",
"Reference image name: /testData/DTI/testDataDNSmoothMicoBet.nii.gz\n",
"\t128x128x20 voxels, 1 volumes\n",
"\t0.140625x0.140625x0.4 mm\n",
"Floating image name: /testData/T2w/Stroke_mask.nii.gz\n",
"\t256x256x48 voxels, 1 volumes\n",
"\t0.0683594x0.0683594x0.3 mm\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"\n",
"[NiftyReg] Resampled image has been saved: /testData/DTI/testDataDNSmoothMicoBetStroke_mask.nii.gz\n",
"DTI Registration \u001b[0;30;42m COMPLETED \u001b[0m\n"
]
}
],
"source": [
"!docker exec -w /aida/bin/2.2_DTIPreProcessing aidamri \\\n",
"python registration_DTI.py -i /testData/DTI/testDataDNSmoothMicoBet.nii.gz"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"The results of the registration can be seen below. The rsfMRI segmentation was used in this case.\n",
""
]
},
{
"cell_type": "code",
"execution_count": 27,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"testDataDNSmoothMicoBetAllen_scaled.nii\r\n",
"testDataDNSmoothMicoBetAnno_rsfMRISplit_scaled.nii\r\n",
"testDataDNSmoothMicoBetAnno_scaled.nii\r\n",
"testDataDNSmoothMicoBetAnno_rsfMRISplit_scaled.txt\r\n",
"testDataDNSmoothMicoBetAnno_scaled.txt\r\n",
"testDataDNSmoothMicoBetMask_scaled.nii\r\n",
"testDataDNSmoothMicoBetrsfMRI_Mask_scaled.nii\r\n",
"testDataDNSmoothMicoBetrsfMRI_Mask_scaled.txt\r\n",
"testDataDNSmoothMicoBetStrokeMask_scaled.nii\r\n",
"testDataDNSmoothMicoBetStrokeMask_scaled.txt\r\n"
]
}
],
"source": [
"!docker exec -w /testData/DTI/DSI_studio aidamri ls -t "
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"The following files and folders were added:\n",
"* registration.log\n",
"* DSI_studio\n",
" - testDataDNSmoothMicoBetAllen_scaled.nii\n",
" - testDataDNSmoothMicoBetAnno_rsfMRISplit_scaled.nii\n",
" - testDataDNSmoothMicoBetAnno_scaled.nii\n",
" - testDataDNSmoothMicoBetAnno_rsfMRISplit_scaled.txt\n",
" - testDataDNSmoothMicoBetAnno_scaled.txt\n",
" - testDataDNSmoothMicoBetMask_scaled.nii\n",
" - testDataDNSmoothMicoBetrsfMRI_Mask_scaled.txt\n",
" - testDataDNSmoothMicoBetrsfMRI_Mask_scaled.nii\n",
" - testDataDNSmoothMicoBetStrokeMask_scaled.txt\n",
" - testDataDNSmoothMicoBetStrokeMask_scaled.nii\n",
"* testDataDNSmoothMicoBetAnno_rsfMRI_mask.nii.gz\n",
"* testDataDNSmoothMicoBetAnno_mask.nii.gz\n",
"* testDataDNSmoothMicoBetStroke_mask.nii.gz\n",
"* testDataDNSmoothMicoBet_Template.nii.gz\n",
"* testDataDNSmoothMicoBet_Anno_rsfMRI.nii.gz\n",
"* testDataDNSmoothMicoBet_AnnoSplit_rsfMRI.nii.gz\n",
"* testDataDNSmoothMicoBet_AnnoSplit.nii.gz\n",
"* testDataDNSmoothMicoBettransMatrixAff.txt\n",
"* testDataDNSmoothMicoBet_T2w.nii.gz\n",
"\n",
"Connectivity can be calculated by using the DSI studio software, invoked via the dsi_main Python protocol."
]
},
{
"cell_type": "code",
"execution_count": 28,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"DSI Studio \"Chen\" Apr 14 2022\n",
"source=testData_mcf.nii.gz\n",
"action=src\n",
"reading testData_mcf.nii.gz\n",
"b_table=/aida/lib/DTI_Jones30.txt\n",
"b-table /aida/lib/DTI_Jones30.txt loaded\n",
"output=/testData/DTI/src/testData_mcf.nii.src.gz\n",
"output src to /testData/DTI/src/testData_mcf.nii.src.gz\n",
"sort_b_table=0\n",
"up_sampling=0\n",
"save testData_mcf.nii.src.gz\n",
"DSI Studio \"Chen\" Apr 14 2022\n",
"source=/testData/DTI/src/testData_mcf.nii.src.gz\n",
"action=rec\n",
"loading source...\n",
"SRC file loaded\n",
"src loaded\n",
"mask=/testData/DTI/DSI_studio/testDataDNSmoothMicoBetMask_scaled.nii\n",
"param0=16\n",
"method=1\n",
"template=1\n",
"odf_order=8\n",
"odf_resolving=0\n",
"record_odf=0\n",
"dti_no_high_b=0\n",
"check_btable=0\n",
"other_output=fa,ad,rd,md,nqa,iso,rdi,nrdi\n",
"num_fiber=5\n",
"r2_weighted=0\n",
"thread_count=8\n",
"half_sphere=1\n",
"scheme_balance=1\n",
"start reconstruction...\n",
"DTI\n",
"saving testData_mcf.nii.src.gz.dti.fib.gz\n",
"reconstruction finished.\n",
"DSI Studio \"Chen\" Apr 14 2022\n",
"source=/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz\n",
"action=exp\n",
"export=fa\n",
"loading /testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz...\n",
"FIB file loaded\n",
"fa.nii.gz saved \n",
"DSI Studio \"Chen\" Apr 14 2022\n",
"source=/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz\n",
"action=exp\n",
"export=md\n",
"loading /testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz...\n",
"FIB file loaded\n",
"md loaded\n",
"md.nii.gz saved \n",
"DSI Studio \"Chen\" Apr 14 2022\n",
"source=/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz\n",
"action=exp\n",
"export=ad\n",
"loading /testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz...\n",
"FIB file loaded\n",
"ad loaded\n",
"ad.nii.gz saved \n",
"DSI Studio \"Chen\" Apr 14 2022\n",
"source=/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz\n",
"action=exp\n",
"export=rd\n",
"loading /testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz...\n",
"FIB file loaded\n",
"rd loaded\n",
"rd.nii.gz saved \n",
"DSI Studio \"Chen\" Apr 14 2022\n",
"source=/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz\n",
"action=trk\n",
"loading /testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz...\n",
"FIB file loaded\n",
"otsu_threshold=0.6\n",
"fa_threshold=.02\n",
"dt_threshold=0.2\n",
"turning_angle=55\n",
"step_size=.5\n",
"smoothing=.1\n",
"min_length=.5\n",
"max_length=12.0\n",
"method=0\n",
"initial_dir=0\n",
"check_ending=0\n",
"tip_iteration=0\n",
"fiber_count=1000000\n",
"seed_count=0\n",
"thread_count=8\n",
"start tracking.\n",
"thread_count=8\n",
"finished tracking.\n",
"1000000 tracts are generated using 1033230 seeds.\n",
"0 tracts are removed by pruning.\n",
"The final analysis results in 1000000 tracts.\n",
"output=/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz\n",
"saving testData_mcf.nii.src.gz.dti.fib.gz.trk.gz\n",
"Warning: --interpolation is not used. Please check command line syntax.\n",
"DSI Studio \"Chen\" Apr 14 2022\n",
"source=/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz\n",
"action=ana\n",
"loading /testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz...\n",
"FIB file loaded\n",
"tract=/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz\n",
"loading testData_mcf.nii.src.gz.dti.fib.gz.trk.gz\n",
"A total of 1000000 tracks loaded\n",
"a total of 1 tract file(s) loaded\n",
"connectivity=/testData/DTI/DSI_studio/testDataDNSmoothMicoBetStrokeMask_scaled.nii\n",
"connectivity_type=pass,end\n",
"connectivity_value=qa\n",
"loading /testData/DTI/DSI_studio/testDataDNSmoothMicoBetStrokeMask_scaled.nii\n",
"reading /testData/DTI/DSI_studio/testDataDNSmoothMicoBetStrokeMask_scaled.nii as a NIFTI regioin file\n",
"DWI dimension=(128,128,20)\n",
"NIFTI dimension=(128,128,20)\n",
"nifti loaded as multiple ROI file\n",
"looking for region label file\n",
"label file loaded:/testData/DTI/DSI_studio/testDataDNSmoothMicoBetStrokeMask_scaled.txt\n",
"a total of 45 regions are loaded.\n",
"connectivity_threshold=0.001\n",
"count tracks by passing\n",
"calculate matrix using qa\n",
"connectivity calculation error:Cannot quantify matrix value using qa\n",
"DSI Studio \"Chen\" Apr 14 2022\n",
"source=/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz\n",
"action=ana\n",
"loading /testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz...\n",
"FIB file loaded\n",
"tract=/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz\n",
"loading testData_mcf.nii.src.gz.dti.fib.gz.trk.gz\n",
"A total of 1000000 tracks loaded\n",
"a total of 1 tract file(s) loaded\n",
"connectivity=/testData/DTI/DSI_studio/testDataDNSmoothMicoBetStrokeMask_scaled.nii\n",
"connectivity_type=pass,end\n",
"connectivity_value=count\n",
"loading /testData/DTI/DSI_studio/testDataDNSmoothMicoBetStrokeMask_scaled.nii\n",
"reading /testData/DTI/DSI_studio/testDataDNSmoothMicoBetStrokeMask_scaled.nii as a NIFTI regioin file\n",
"DWI dimension=(128,128,20)\n",
"NIFTI dimension=(128,128,20)\n",
"nifti loaded as multiple ROI file\n",
"looking for region label file\n",
"label file loaded:/testData/DTI/DSI_studio/testDataDNSmoothMicoBetStrokeMask_scaled.txt\n",
"a total of 45 regions are loaded.\n",
"connectivity_threshold=0.001\n",
"count tracks by passing\n",
"calculate matrix using count\n",
"export connectivity matrix to /testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz.testDataDNSmoothMicoBetStrokeMask_scaled.count.pass.connectivity.mat\n",
"export connectogram to /testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz.testDataDNSmoothMicoBetStrokeMask_scaled.count.pass.connectogram.txt\n",
"export network measures to /testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz.testDataDNSmoothMicoBetStrokeMask_scaled.count.pass.network_measures.txt\n",
"count tracks by ending\n",
"calculate matrix using count\n",
"export connectivity matrix to /testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz.testDataDNSmoothMicoBetStrokeMask_scaled.count.end.connectivity.mat\n",
"export connectogram to /testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz.testDataDNSmoothMicoBetStrokeMask_scaled.count.end.connectogram.txt\n",
"export network measures to /testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz.testDataDNSmoothMicoBetStrokeMask_scaled.count.end.network_measures.txt\n",
"DSI Studio \"Chen\" Apr 14 2022\n",
"source=/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz\n",
"action=ana\n",
"loading /testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz...\n",
"FIB file loaded\n",
"tract=/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz\n",
"loading testData_mcf.nii.src.gz.dti.fib.gz.trk.gz\n",
"A total of 1000000 tracks loaded\n",
"a total of 1 tract file(s) loaded\n",
"connectivity=/testData/DTI/DSI_studio/testDataDNSmoothMicoBetrsfMRI_Mask_scaled.nii\n",
"connectivity_type=pass,end\n",
"connectivity_value=qa\n",
"loading /testData/DTI/DSI_studio/testDataDNSmoothMicoBetrsfMRI_Mask_scaled.nii\n",
"reading /testData/DTI/DSI_studio/testDataDNSmoothMicoBetrsfMRI_Mask_scaled.nii as a NIFTI regioin file\n",
"DWI dimension=(128,128,20)\n",
"NIFTI dimension=(128,128,20)\n",
"nifti loaded as multiple ROI file\n",
"looking for region label file\n",
"label file loaded:/testData/DTI/DSI_studio/testDataDNSmoothMicoBetrsfMRI_Mask_scaled.txt\n",
"a total of 12 regions are loaded.\n",
"connectivity_threshold=0.001\n",
"count tracks by passing\n",
"calculate matrix using qa\n",
"connectivity calculation error:Cannot quantify matrix value using qa\n",
"DSI Studio \"Chen\" Apr 14 2022\n",
"source=/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz\n",
"action=ana\n",
"loading /testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz...\n",
"FIB file loaded\n",
"tract=/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz\n",
"loading testData_mcf.nii.src.gz.dti.fib.gz.trk.gz\n",
"A total of 1000000 tracks loaded\n",
"a total of 1 tract file(s) loaded\n",
"connectivity=/testData/DTI/DSI_studio/testDataDNSmoothMicoBetrsfMRI_Mask_scaled.nii\n",
"connectivity_type=pass,end\n",
"connectivity_value=count\n",
"loading /testData/DTI/DSI_studio/testDataDNSmoothMicoBetrsfMRI_Mask_scaled.nii\n",
"reading /testData/DTI/DSI_studio/testDataDNSmoothMicoBetrsfMRI_Mask_scaled.nii as a NIFTI regioin file\n",
"DWI dimension=(128,128,20)\n",
"NIFTI dimension=(128,128,20)\n",
"nifti loaded as multiple ROI file\n",
"looking for region label file\n",
"label file loaded:/testData/DTI/DSI_studio/testDataDNSmoothMicoBetrsfMRI_Mask_scaled.txt\n",
"a total of 12 regions are loaded.\n",
"connectivity_threshold=0.001\n",
"count tracks by passing\n",
"calculate matrix using count\n",
"export connectivity matrix to /testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz.testDataDNSmoothMicoBetrsfMRI_Mask_scaled.count.pass.connectivity.mat\n",
"export connectogram to /testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz.testDataDNSmoothMicoBetrsfMRI_Mask_scaled.count.pass.connectogram.txt\n",
"export network measures to /testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz.testDataDNSmoothMicoBetrsfMRI_Mask_scaled.count.pass.network_measures.txt\n",
"count tracks by ending\n",
"calculate matrix using count\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"export connectivity matrix to /testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz.testDataDNSmoothMicoBetrsfMRI_Mask_scaled.count.end.connectivity.mat\n",
"export connectogram to /testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz.testDataDNSmoothMicoBetrsfMRI_Mask_scaled.count.end.connectogram.txt\n",
"export network measures to /testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz.testDataDNSmoothMicoBetrsfMRI_Mask_scaled.count.end.network_measures.txt\n",
"DSI Studio \"Chen\" Apr 14 2022\n",
"source=/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz\n",
"action=ana\n",
"loading /testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz...\n",
"FIB file loaded\n",
"tract=/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz\n",
"loading testData_mcf.nii.src.gz.dti.fib.gz.trk.gz\n",
"A total of 1000000 tracks loaded\n",
"a total of 1 tract file(s) loaded\n",
"connectivity=/testData/DTI/DSI_studio/testDataDNSmoothMicoBetAnno_scaled.nii\n",
"connectivity_type=pass,end\n",
"connectivity_value=qa\n",
"loading /testData/DTI/DSI_studio/testDataDNSmoothMicoBetAnno_scaled.nii\n",
"reading /testData/DTI/DSI_studio/testDataDNSmoothMicoBetAnno_scaled.nii as a NIFTI regioin file\n",
"DWI dimension=(128,128,20)\n",
"NIFTI dimension=(128,128,20)\n",
"nifti loaded as multiple ROI file\n",
"looking for region label file\n",
"label file loaded:/testData/DTI/DSI_studio/testDataDNSmoothMicoBetAnno_scaled.txt\n",
"a total of 941 regions are loaded.\n",
"connectivity_threshold=0.001\n",
"count tracks by passing\n",
"calculate matrix using qa\n",
"connectivity calculation error:Cannot quantify matrix value using qa\n",
"DSI Studio \"Chen\" Apr 14 2022\n",
"source=/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz\n",
"action=ana\n",
"loading /testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz...\n",
"FIB file loaded\n",
"tract=/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz\n",
"loading testData_mcf.nii.src.gz.dti.fib.gz.trk.gz\n",
"A total of 1000000 tracks loaded\n",
"a total of 1 tract file(s) loaded\n",
"connectivity=/testData/DTI/DSI_studio/testDataDNSmoothMicoBetAnno_scaled.nii\n",
"connectivity_type=pass,end\n",
"connectivity_value=count\n",
"loading /testData/DTI/DSI_studio/testDataDNSmoothMicoBetAnno_scaled.nii\n",
"reading /testData/DTI/DSI_studio/testDataDNSmoothMicoBetAnno_scaled.nii as a NIFTI regioin file\n",
"DWI dimension=(128,128,20)\n",
"NIFTI dimension=(128,128,20)\n",
"nifti loaded as multiple ROI file\n",
"looking for region label file\n",
"label file loaded:/testData/DTI/DSI_studio/testDataDNSmoothMicoBetAnno_scaled.txt\n",
"a total of 941 regions are loaded.\n",
"connectivity_threshold=0.001\n",
"count tracks by passing\n",
"calculate matrix using count\n",
"export connectivity matrix to /testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz.testDataDNSmoothMicoBetAnno_scaled.count.pass.connectivity.mat\n",
"export connectogram to /testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz.testDataDNSmoothMicoBetAnno_scaled.count.pass.connectogram.txt\n",
"export network measures to /testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz.testDataDNSmoothMicoBetAnno_scaled.count.pass.network_measures.txt\n",
"count tracks by ending\n",
"calculate matrix using count\n",
"export connectivity matrix to /testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz.testDataDNSmoothMicoBetAnno_scaled.count.end.connectivity.mat\n",
"export connectogram to /testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz.testDataDNSmoothMicoBetAnno_scaled.count.end.connectogram.txt\n",
"export network measures to /testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz.testDataDNSmoothMicoBetAnno_scaled.count.end.network_measures.txt\n",
"fslsplit /testData/DTI/fslScaleTemp.nii.gz testData -z\n",
"For all slices ... \n",
"mcflirt -in /aida/bin/3.2_DTIConnectivity/testData0000.nii.gz -out /testData/DTI/mcf_Folder/testData0000.nii.gz -plots\n",
"mcflirt -in /aida/bin/3.2_DTIConnectivity/testData0001.nii.gz -out /testData/DTI/mcf_Folder/testData0001.nii.gz -plots\n",
"mcflirt -in /aida/bin/3.2_DTIConnectivity/testData0002.nii.gz -out /testData/DTI/mcf_Folder/testData0002.nii.gz -plots\n",
"mcflirt -in /aida/bin/3.2_DTIConnectivity/testData0003.nii.gz -out /testData/DTI/mcf_Folder/testData0003.nii.gz -plots\n",
"mcflirt -in /aida/bin/3.2_DTIConnectivity/testData0004.nii.gz -out /testData/DTI/mcf_Folder/testData0004.nii.gz -plots\n",
"mcflirt -in /aida/bin/3.2_DTIConnectivity/testData0005.nii.gz -out /testData/DTI/mcf_Folder/testData0005.nii.gz -plots\n",
"mcflirt -in /aida/bin/3.2_DTIConnectivity/testData0006.nii.gz -out /testData/DTI/mcf_Folder/testData0006.nii.gz -plots\n",
"mcflirt -in /aida/bin/3.2_DTIConnectivity/testData0007.nii.gz -out /testData/DTI/mcf_Folder/testData0007.nii.gz -plots\n",
"mcflirt -in /aida/bin/3.2_DTIConnectivity/testData0008.nii.gz -out /testData/DTI/mcf_Folder/testData0008.nii.gz -plots\n",
"mcflirt -in /aida/bin/3.2_DTIConnectivity/testData0009.nii.gz -out /testData/DTI/mcf_Folder/testData0009.nii.gz -plots\n",
"mcflirt -in /aida/bin/3.2_DTIConnectivity/testData0010.nii.gz -out /testData/DTI/mcf_Folder/testData0010.nii.gz -plots\n",
"mcflirt -in /aida/bin/3.2_DTIConnectivity/testData0011.nii.gz -out /testData/DTI/mcf_Folder/testData0011.nii.gz -plots\n",
"mcflirt -in /aida/bin/3.2_DTIConnectivity/testData0012.nii.gz -out /testData/DTI/mcf_Folder/testData0012.nii.gz -plots\n",
"mcflirt -in /aida/bin/3.2_DTIConnectivity/testData0013.nii.gz -out /testData/DTI/mcf_Folder/testData0013.nii.gz -plots\n",
"mcflirt -in /aida/bin/3.2_DTIConnectivity/testData0014.nii.gz -out /testData/DTI/mcf_Folder/testData0014.nii.gz -plots\n",
"mcflirt -in /aida/bin/3.2_DTIConnectivity/testData0015.nii.gz -out /testData/DTI/mcf_Folder/testData0015.nii.gz -plots\n",
"mcflirt -in /aida/bin/3.2_DTIConnectivity/testData0016.nii.gz -out /testData/DTI/mcf_Folder/testData0016.nii.gz -plots\n",
"mcflirt -in /aida/bin/3.2_DTIConnectivity/testData0017.nii.gz -out /testData/DTI/mcf_Folder/testData0017.nii.gz -plots\n",
"mcflirt -in /aida/bin/3.2_DTIConnectivity/testData0018.nii.gz -out /testData/DTI/mcf_Folder/testData0018.nii.gz -plots\n",
"mcflirt -in /aida/bin/3.2_DTIConnectivity/testData0019.nii.gz -out /testData/DTI/mcf_Folder/testData0019.nii.gz -plots\n",
"fslmerge -z /testData/DTI/testData_mcf.nii.gz /testData/DTI/mcf_Folder/testData0000.nii.gz /testData/DTI/mcf_Folder/testData0001.nii.gz /testData/DTI/mcf_Folder/testData0002.nii.gz /testData/DTI/mcf_Folder/testData0003.nii.gz /testData/DTI/mcf_Folder/testData0004.nii.gz /testData/DTI/mcf_Folder/testData0005.nii.gz /testData/DTI/mcf_Folder/testData0006.nii.gz /testData/DTI/mcf_Folder/testData0007.nii.gz /testData/DTI/mcf_Folder/testData0008.nii.gz /testData/DTI/mcf_Folder/testData0009.nii.gz /testData/DTI/mcf_Folder/testData0010.nii.gz /testData/DTI/mcf_Folder/testData0011.nii.gz /testData/DTI/mcf_Folder/testData0012.nii.gz /testData/DTI/mcf_Folder/testData0013.nii.gz /testData/DTI/mcf_Folder/testData0014.nii.gz /testData/DTI/mcf_Folder/testData0015.nii.gz /testData/DTI/mcf_Folder/testData0016.nii.gz /testData/DTI/mcf_Folder/testData0017.nii.gz /testData/DTI/mcf_Folder/testData0018.nii.gz /testData/DTI/mcf_Folder/testData0019.nii.gz\n",
"Create directory \"/testData/DTI/src\"\n",
"Create directory \"/testData/DTI/fib_map\"\n",
"Generate src-File /aida/bin/dsi_studio_ubuntu_1804/dsi-studio/dsi_studio --action=src --source=testData_mcf.nii.gz --output=/testData/DTI/src/testData_mcf.nii.src.gz --b_table=/aida/lib/DTI_Jones30.txt:\n",
"Generate fib-File /aida/bin/dsi_studio_ubuntu_1804/dsi-studio/dsi_studio --action=rec --source=/testData/DTI/src/testData_mcf.nii.src.gz --mask=/testData/DTI/DSI_studio/testDataDNSmoothMicoBetMask_scaled.nii --method=1 --param0=16 --check_btable=0 --half_sphere=1:\n",
"Move file \"/testData/DTI/src/testData_mcf.nii.src.gz.dti.fib.gz\" to directory \"/testData/DTI/fib_map\"\n",
"Generate two maps /aida/bin/dsi_studio_ubuntu_1804/dsi-studio/dsi_studio --action=exp --source=/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz --export=fa:\n",
"Generate two maps /aida/bin/dsi_studio_ubuntu_1804/dsi-studio/dsi_studio --action=exp --source=/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz --export=md:\n",
"Generate two maps /aida/bin/dsi_studio_ubuntu_1804/dsi-studio/dsi_studio --action=exp --source=/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz --export=ad:\n",
"Generate two maps /aida/bin/dsi_studio_ubuntu_1804/dsi-studio/dsi_studio --action=exp --source=/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz --export=rd:\n",
"Move file \"/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.fa.nii.gz\" to directory \"/testData/DTI/DSI_studio\"\n",
"Move file \"/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.md.nii.gz\" to directory \"/testData/DTI/DSI_studio\"\n",
"Move file \"/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.ad.nii.gz\" to directory \"/testData/DTI/DSI_studio\"\n",
"Move file \"/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.rd.nii.gz\" to directory \"/testData/DTI/DSI_studio\"\n",
"Track neuronal pathes /aida/bin/dsi_studio_ubuntu_1804/dsi-studio/dsi_studio --action=trk --source=/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz --output=/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz --fiber_count=1000000 --interpolation=0 --step_size=.5 --turning_angle=55 --check_ending=0 --fa_threshold=.02 --smoothing=.1 --min_length=.5 --max_length=12.0:\n",
"Create directory \"/testData/DTI/connectivity\"\n",
"Move file \"/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz.testDataDNSmoothMicoBetStrokeMask_scaled.count.end.connectogram.txt\" to directory \"/testData/DTI/connectivity\"\n",
"Move file \"/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz.testDataDNSmoothMicoBetStrokeMask_scaled.count.end.network_measures.txt\" to directory \"/testData/DTI/connectivity\"\n",
"Move file \"/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz.testDataDNSmoothMicoBetStrokeMask_scaled.count.pass.connectogram.txt\" to directory \"/testData/DTI/connectivity\"\n",
"Move file \"/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz.testDataDNSmoothMicoBetStrokeMask_scaled.count.pass.network_measures.txt\" to directory \"/testData/DTI/connectivity\"\n",
"Move file \"/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz.testDataDNSmoothMicoBetStrokeMask_scaled.count.end.connectivity.mat\" to directory \"/testData/DTI/connectivity\"\n",
"Move file \"/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz.testDataDNSmoothMicoBetStrokeMask_scaled.count.pass.connectivity.mat\" to directory \"/testData/DTI/connectivity\"\n",
"Move file \"/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz.testDataDNSmoothMicoBetrsfMRI_Mask_scaled.count.end.connectogram.txt\" to directory \"/testData/DTI/connectivity\"\n",
"Move file \"/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz.testDataDNSmoothMicoBetrsfMRI_Mask_scaled.count.end.network_measures.txt\" to directory \"/testData/DTI/connectivity\"\n",
"Move file \"/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz.testDataDNSmoothMicoBetrsfMRI_Mask_scaled.count.pass.connectogram.txt\" to directory \"/testData/DTI/connectivity\"\n",
"Move file \"/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz.testDataDNSmoothMicoBetrsfMRI_Mask_scaled.count.pass.network_measures.txt\" to directory \"/testData/DTI/connectivity\"\n",
"Move file \"/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz.testDataDNSmoothMicoBetrsfMRI_Mask_scaled.count.end.connectivity.mat\" to directory \"/testData/DTI/connectivity\"\n",
"Move file \"/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz.testDataDNSmoothMicoBetrsfMRI_Mask_scaled.count.pass.connectivity.mat\" to directory \"/testData/DTI/connectivity\"\n",
"Move file \"/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz.testDataDNSmoothMicoBetAnno_scaled.count.end.connectogram.txt\" to directory \"/testData/DTI/connectivity\"\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Move file \"/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz.testDataDNSmoothMicoBetAnno_scaled.count.end.network_measures.txt\" to directory \"/testData/DTI/connectivity\"DSI Studio \"Chen\" Apr 14 2022\n",
"source=/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz\n",
"action=ana\n",
"loading /testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz...\n",
"FIB file loaded\n",
"tract=/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz\n",
"loading testData_mcf.nii.src.gz.dti.fib.gz.trk.gz\n",
"A total of 1000000 tracks loaded\n",
"a total of 1 tract file(s) loaded\n",
"connectivity=/testData/DTI/DSI_studio/testDataDNSmoothMicoBetAnno_rsfMRISplit_scaled.nii\n",
"connectivity_type=pass,end\n",
"connectivity_value=qa\n",
"loading /testData/DTI/DSI_studio/testDataDNSmoothMicoBetAnno_rsfMRISplit_scaled.nii\n",
"reading /testData/DTI/DSI_studio/testDataDNSmoothMicoBetAnno_rsfMRISplit_scaled.nii as a NIFTI regioin file\n",
"DWI dimension=(128,128,20)\n",
"NIFTI dimension=(128,128,20)\n",
"nifti loaded as multiple ROI file\n",
"looking for region label file\n",
"label file loaded:/testData/DTI/DSI_studio/testDataDNSmoothMicoBetAnno_rsfMRISplit_scaled.txt\n",
"a total of 94 regions are loaded.\n",
"connectivity_threshold=0.001\n",
"count tracks by passing\n",
"calculate matrix using qa\n",
"connectivity calculation error:Cannot quantify matrix value using qa\n",
"DSI Studio \"Chen\" Apr 14 2022\n",
"source=/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz\n",
"action=ana\n",
"loading /testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz...\n",
"FIB file loaded\n",
"tract=/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz\n",
"loading testData_mcf.nii.src.gz.dti.fib.gz.trk.gz\n",
"A total of 1000000 tracks loaded\n",
"a total of 1 tract file(s) loaded\n",
"connectivity=/testData/DTI/DSI_studio/testDataDNSmoothMicoBetAnno_rsfMRISplit_scaled.nii\n",
"connectivity_type=pass,end\n",
"connectivity_value=count\n",
"loading /testData/DTI/DSI_studio/testDataDNSmoothMicoBetAnno_rsfMRISplit_scaled.nii\n",
"reading /testData/DTI/DSI_studio/testDataDNSmoothMicoBetAnno_rsfMRISplit_scaled.nii as a NIFTI regioin file\n",
"DWI dimension=(128,128,20)\n",
"NIFTI dimension=(128,128,20)\n",
"nifti loaded as multiple ROI file\n",
"looking for region label file\n",
"label file loaded:/testData/DTI/DSI_studio/testDataDNSmoothMicoBetAnno_rsfMRISplit_scaled.txt\n",
"a total of 94 regions are loaded.\n",
"connectivity_threshold=0.001\n",
"count tracks by passing\n",
"calculate matrix using count\n",
"export connectivity matrix to /testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz.testDataDNSmoothMicoBetAnno_rsfMRISplit_scaled.count.pass.connectivity.mat\n",
"export connectogram to /testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz.testDataDNSmoothMicoBetAnno_rsfMRISplit_scaled.count.pass.connectogram.txt\n",
"export network measures to /testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz.testDataDNSmoothMicoBetAnno_rsfMRISplit_scaled.count.pass.network_measures.txt\n",
"count tracks by ending\n",
"calculate matrix using count\n",
"export connectivity matrix to /testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz.testDataDNSmoothMicoBetAnno_rsfMRISplit_scaled.count.end.connectivity.mat\n",
"export connectogram to /testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz.testDataDNSmoothMicoBetAnno_rsfMRISplit_scaled.count.end.connectogram.txt\n",
"export network measures to /testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz.testDataDNSmoothMicoBetAnno_rsfMRISplit_scaled.count.end.network_measures.txt\n",
"\n",
"Move file \"/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz.testDataDNSmoothMicoBetAnno_scaled.count.pass.connectogram.txt\" to directory \"/testData/DTI/connectivity\"\n",
"Move file \"/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz.testDataDNSmoothMicoBetAnno_scaled.count.pass.network_measures.txt\" to directory \"/testData/DTI/connectivity\"\n",
"Move file \"/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz.testDataDNSmoothMicoBetAnno_scaled.count.end.connectivity.mat\" to directory \"/testData/DTI/connectivity\"\n",
"Move file \"/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz.testDataDNSmoothMicoBetAnno_scaled.count.pass.connectivity.mat\" to directory \"/testData/DTI/connectivity\"\n",
"Move file \"/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz.testDataDNSmoothMicoBetAnno_rsfMRISplit_scaled.count.end.connectogram.txt\" to directory \"/testData/DTI/connectivity\"\n",
"Move file \"/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz.testDataDNSmoothMicoBetAnno_rsfMRISplit_scaled.count.end.network_measures.txt\" to directory \"/testData/DTI/connectivity\"\n",
"Move file \"/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz.testDataDNSmoothMicoBetAnno_rsfMRISplit_scaled.count.pass.connectogram.txt\" to directory \"/testData/DTI/connectivity\"\n",
"Move file \"/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz.testDataDNSmoothMicoBetAnno_rsfMRISplit_scaled.count.pass.network_measures.txt\" to directory \"/testData/DTI/connectivity\"\n",
"Move file \"/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz.testDataDNSmoothMicoBetAnno_rsfMRISplit_scaled.count.end.connectivity.mat\" to directory \"/testData/DTI/connectivity\"\n",
"Move file \"/testData/DTI/fib_map/testData_mcf.nii.src.gz.dti.fib.gz.trk.gz.testDataDNSmoothMicoBetAnno_rsfMRISplit_scaled.count.pass.connectivity.mat\" to directory \"/testData/DTI/connectivity\"\n",
"DTI Connectivity \u001b[0;30;42m COMPLETED \u001b[0m\n"
]
}
],
"source": [
"!docker exec -w /aida/bin/3.2_DTIConnectivity aidamri \\\n",
"python dsi_main.py -i /testData/DTI/testData.7.1.nii.gz"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"This process proceeded a folder structure, packed with processed data, including fiber mapping and connectivity data."
]
},
{
"cell_type": "code",
"execution_count": 29,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"connectivity\r\n",
"fib_map\r\n",
"DSI_studio\r\n",
"src\r\n",
"testData_mcf.nii.gz\r\n",
"mcf_Folder\r\n",
"registration.log\r\n",
"testDataDNSmoothMicoBetAnno_rsfMRI_mask.nii.gz\r\n",
"testDataDNSmoothMicoBetAnno_mask.nii.gz\r\n",
"testDataDNSmoothMicoBetStroke_mask.nii.gz\r\n",
"testDataDNSmoothMicoBet_Template.nii.gz\r\n",
"testDataDNSmoothMicoBet_Anno_rsfMRI.nii.gz\r\n",
"testDataDNSmoothMicoBet_AnnoSplit_rsfMRI.nii.gz\r\n",
"testDataDNSmoothMicoBet_AnnoSplit.nii.gz\r\n",
"testDataDNSmoothMicoBet_T2w.nii.gz\r\n",
"testDataDNSmoothMicoBettransMatrixAff.txt\r\n",
"preprocess.log\r\n",
"testDataDNSmoothMicoBet.nii.gz\r\n",
"testDataDNSmoothMicoBet_mask.nii.gz\r\n",
"testDataDNSmoothMico.nii.gz\r\n",
"testDataDNSmooth.nii.gz\r\n",
"testDataDN.nii.gz\r\n",
"testData.7.1.nii.gz\r\n"
]
}
],
"source": [
"!docker exec -w /testData/DTI aidamri ls -t"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Below is the new folder list. Use `ls` on the different folders to see the contents. Alternatively, directly click through them in your data folder.\n",
"* connectivity\n",
"* fib_map\n",
"* DSI_studio\n",
"* src\n",
"* mcf_Folder"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## fMRI<a class=\"anchor\" id=\"fmri\"></a>\n",
"Pre-processing again is started like the other method."
]
},
{
"cell_type": "code",
"execution_count": 30,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"rsfMRI Preprocessing \u001b[5m...\u001b[0m (wait!)\r",
"rsfMRI Preprocessing \u001b[0;30;42m COMPLETED \u001b[0m\r\n"
]
}
],
"source": [
"!docker exec -w /aida/bin/2.3_fMRIPreProcessing aidamri \\\n",
"python preProcessing_fMRI.py -i /testData/fMRI/testData.6.1.nii.gz"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"The data after processing will be similar to previous pre-processing steps. Check below to see the file contents."
]
},
{
"cell_type": "code",
"execution_count": 32,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"preprocess.log\r\n",
"testDataSmoothBet.nii.gz\r\n",
"testDataSmoothBet_mask.nii.gz\r\n",
"testDataSmooth.nii.gz\r\n",
"testDataDN.nii.gz\r\n",
"testData.6.1.nii.gz\r\n"
]
}
],
"source": [
"!docker exec -w /testData/fMRI aidamri ls -t"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"The next step will be, again, the registration used on the brain extraction file."
]
},
{
"cell_type": "code",
"execution_count": 33,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"\n",
"[NiftyReg ALADIN] Command line:\n",
"\t reg_aladin -ref /testData/fMRI/testDataSmoothBet.nii.gz -flo /testData/T2w/testDataBiasBet.nii.gz -res /testData/fMRI/testDataSmoothBet_T2w.nii.gz -aff /testData/fMRI/testDataSmoothBettransMatrixAff.txt\n",
"\n",
"[reg_aladin_sym] Parameters\n",
"[reg_aladin_sym] Reference image name: /testData/fMRI/testDataSmoothBet.nii.gz\n",
"[reg_aladin_sym] \t128x128x20 voxels\n",
"[reg_aladin_sym] \t0.140625x0.140625x0.4 mm\n",
"[reg_aladin_sym] Floating image name: /testData/T2w/testDataBiasBet.nii.gz\n",
"[reg_aladin_sym] \t256x256x48 voxels\n",
"[reg_aladin_sym] \t0.0683594x0.0683594x0.3 mm\n",
"[reg_aladin_sym] Maximum iteration number: 5 (10 during the first level)\n",
"[reg_aladin_sym] Percentage of blocks: 50 %\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"[reg_aladin_sym] Current level 1 / 3\n",
"[reg_aladin_sym] reference image size: \t32x32x20 voxels\t0.5625x0.5625x0.4 mm\n",
"[reg_aladin_sym] floating image size: \t64x64x48 voxels\t0.273438x0.273438x0.3 mm\n",
"[reg_aladin_sym] Block size = [4 4 4]\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"[reg_aladin_sym] Forward Block number = [8 8 5]\n",
"[reg_aladin_sym] Backward Block number = [16 16 12]\n",
"[reg_aladin_sym] Initial forward transformation matrix::\n",
"1\t0\t0\t-0.25\n",
"0\t1\t0\t-0.25\n",
"0\t0\t1\t3.2\n",
"0\t0\t0\t1\n",
"[reg_aladin_sym] Initial backward transformation matrix::\n",
"1\t0\t0\t0.25\n",
"0\t1\t0\t0.25\n",
"0\t0\t1\t-3.2\n",
"0\t0\t0\t1\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"[reg_aladin_sym] Final forward transformation matrix::\n",
"1.0124\t0.116708\t0.0210427\t-0.910679\n",
"-0.0635667\t1.11402\t0.0561056\t-0.612335\n",
"0.0339436\t-0.391846\t1.37229\t4.14866\n",
"0\t0\t0\t1\n",
"[reg_aladin_sym] Final backward transformation matrix::\n",
"0.981422\t-0.106577\t-0.0106917\t0.872856\n",
"0.0564118\t0.878797\t-0.0367942\t0.742138\n",
"-0.00816754\t0.253569\t0.718465\t-2.83284\n",
"0\t0\t0\t1\n",
"- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -\n",
"[reg_aladin_sym] Current level 2 / 3\n",
"[reg_aladin_sym] reference image size: \t64x64x20 voxels\t0.28125x0.28125x0.4 mm\n",
"[reg_aladin_sym] floating image size: \t128x128x48 voxels\t0.136719x0.136719x0.3 mm\n",
"[reg_aladin_sym] Block size = [4 4 4]\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"[reg_aladin_sym] Forward Block number = [16 16 5]\n",
"[reg_aladin_sym] Backward Block number = [32 32 12]\n",
"[reg_aladin_sym] Initial forward transformation matrix::\n",
"1.0124\t0.116708\t0.0210427\t-0.910679\n",
"-0.0635667\t1.11402\t0.0561056\t-0.612335\n",
"0.0339436\t-0.391846\t1.37229\t4.14866\n",
"0\t0\t0\t1\n",
"[reg_aladin_sym] Initial backward transformation matrix::\n",
"0.981422\t-0.106577\t-0.0106917\t0.872856\n",
"0.0564118\t0.878797\t-0.0367942\t0.742138\n",
"-0.00816754\t0.253569\t0.718465\t-2.83284\n",
"0\t0\t0\t1\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"[reg_aladin_sym] Final forward transformation matrix::\n",
"1.05737\t-0.00110787\t-0.031221\t-0.0646289\n",
"-0.0266323\t1.196\t0.0469169\t-1.48408\n",
"0.0776352\t-0.399896\t1.39058\t4.1041\n",
"0\t0\t0\t1\n",
"[reg_aladin_sym] Final backward transformation matrix::\n",
"0.944403\t0.00787561\t0.0209379\t-0.0132072\n",
"0.0228404\t0.826982\t-0.0273889\t1.34119\n",
"-0.0461572\t0.23738\t0.710081\t-2.56493\n",
"0\t0\t0\t1\n",
"- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -\n",
"[reg_aladin_sym] Current level 3 / 3\n",
"[reg_aladin_sym] reference image size: \t128x128x20 voxels\t0.140625x0.140625x0.4 mm\n",
"[reg_aladin_sym] floating image size: \t256x256x48 voxels\t0.0683594x0.0683594x0.3 mm\n",
"[reg_aladin_sym] Block size = [4 4 4]\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"[reg_aladin_sym] Forward Block number = [32 32 5]\n",
"[reg_aladin_sym] Backward Block number = [64 64 12]\n",
"[reg_aladin_sym] Initial forward transformation matrix::\n",
"1.05737\t-0.00110787\t-0.031221\t-0.0646289\n",
"-0.0266323\t1.196\t0.0469169\t-1.48408\n",
"0.0776352\t-0.399896\t1.39058\t4.1041\n",
"0\t0\t0\t1\n",
"[reg_aladin_sym] Initial backward transformation matrix::\n",
"0.944403\t0.00787561\t0.0209379\t-0.0132072\n",
"0.0228404\t0.826982\t-0.0273889\t1.34119\n",
"-0.0461572\t0.23738\t0.710081\t-2.56493\n",
"0\t0\t0\t1\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"[reg_aladin_sym] Final forward transformation matrix::\n",
"1.07167\t-0.00307272\t-0.0206067\t-0.183218\n",
"-0.0135992\t1.13693\t0.0833942\t-1.34971\n",
"0.0614744\t-0.455814\t1.5141\t4.30872\n",
"0\t0\t0\t1\n",
"[reg_aladin_sym] Final backward transformation matrix::\n",
"0.93251\t0.00744401\t0.0122813\t0.127983\n",
"0.0136302\t0.860665\t-0.0472186\t1.3676\n",
"-0.0337579\t0.258798\t0.645745\t-2.43922\n",
"0\t0\t0\t1\n",
"- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -\n",
"Registration Performed in 0 min 7 sec\n",
"Have a good day !\n",
"\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"Command line:\n",
" reg_resample -ref /testData/fMRI/testDataSmoothBet.nii.gz -flo /testData/T2w/testDataBiasBet_Anno.nii.gz -cpp /testData/fMRI/testDataSmoothBettransMatrixAff.txt -inter 0 -res /testData/fMRI/testDataSmoothBet_Anno.nii.gz\n",
"\n",
"Parameters\n",
"Reference image name: /testData/fMRI/testDataSmoothBet.nii.gz\n",
"\t128x128x20 voxels, 1 volumes\n",
"\t0.140625x0.140625x0.4 mm\n",
"Floating image name: /testData/T2w/testDataBiasBet_Anno.nii.gz\n",
"\t256x256x48 voxels, 1 volumes\n",
"\t0.0683594x0.0683594x0.3 mm\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"\n",
"[NiftyReg] Resampled image has been saved: /testData/fMRI/testDataSmoothBet_Anno.nii.gz\n",
"\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"Command line:\n",
" reg_resample -ref /testData/T2w/testDataBiasBet_Anno.nii.gz -flo /aida/lib/ARA_annotationR+2000.nii.gz -trans /testData/T2w/testDataBiasBetMatrixBspline.nii -inter 0 -res /testData/fMRI/testDataSmoothBet_AnnoSplit.nii.gz\n",
"\n",
"Parameters\n",
"Reference image name: /testData/T2w/testDataBiasBet_Anno.nii.gz\n",
"\t256x256x48 voxels, 1 volumes\n",
"\t0.0683594x0.0683594x0.3 mm\n",
"Floating image name: /aida/lib/ARA_annotationR+2000.nii.gz\n",
"\t228x160x264 voxels, 1 volumes\n",
"\t0.05x0.05x0.05 mm\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"\n",
"[NiftyReg] Resampled image has been saved: /testData/fMRI/testDataSmoothBet_AnnoSplit.nii.gz\n",
"\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"Command line:\n",
" reg_resample -ref /testData/fMRI/testDataSmoothBet.nii.gz -flo /testData/fMRI/testDataSmoothBet_AnnoSplit.nii.gz -trans /testData/fMRI/testDataSmoothBettransMatrixAff.txt -inter 0 -res /testData/fMRI/testDataSmoothBet_AnnoSplit.nii.gz\n",
"\n",
"Parameters\n",
"Reference image name: /testData/fMRI/testDataSmoothBet.nii.gz\n",
"\t128x128x20 voxels, 1 volumes\n",
"\t0.140625x0.140625x0.4 mm\n",
"Floating image name: /testData/fMRI/testDataSmoothBet_AnnoSplit.nii.gz\n",
"\t256x256x48 voxels, 1 volumes\n",
"\t0.0683594x0.0683594x0.3 mm\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"\n",
"[NiftyReg] Resampled image has been saved: /testData/fMRI/testDataSmoothBet_AnnoSplit.nii.gz\n",
"\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"Command line:\n",
" reg_resample -ref /testData/T2w/testDataBiasBet_Anno.nii.gz -flo /aida/lib/annoVolume+2000_rsfMRI.nii.gz -trans /testData/T2w/testDataBiasBetMatrixBspline.nii -inter 0 -res /testData/fMRI/testDataSmoothBet_AnnoSplit_rsfMRI.nii.gz\n",
"\n",
"Parameters\n",
"Reference image name: /testData/T2w/testDataBiasBet_Anno.nii.gz\n",
"\t256x256x48 voxels, 1 volumes\n",
"\t0.0683594x0.0683594x0.3 mm\n",
"Floating image name: /aida/lib/annoVolume+2000_rsfMRI.nii.gz\n",
"\t228x160x264 voxels, 1 volumes\n",
"\t0.05x0.05x0.05 mm\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"\n",
"[NiftyReg] Resampled image has been saved: /testData/fMRI/testDataSmoothBet_AnnoSplit_rsfMRI.nii.gz\n",
"\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"Command line:\n",
" reg_resample -ref /testData/fMRI/testDataSmoothBet.nii.gz -flo /testData/fMRI/testDataSmoothBet_AnnoSplit_rsfMRI.nii.gz -trans /testData/fMRI/testDataSmoothBettransMatrixAff.txt -inter 0 -res /testData/fMRI/testDataSmoothBet_AnnoSplit_rsfMRI.nii.gz\n",
"\n",
"Parameters\n",
"Reference image name: /testData/fMRI/testDataSmoothBet.nii.gz\n",
"\t128x128x20 voxels, 1 volumes\n",
"\t0.140625x0.140625x0.4 mm\n",
"Floating image name: /testData/fMRI/testDataSmoothBet_AnnoSplit_rsfMRI.nii.gz\n",
"\t256x256x48 voxels, 1 volumes\n",
"\t0.0683594x0.0683594x0.3 mm\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"\n",
"[NiftyReg] Resampled image has been saved: /testData/fMRI/testDataSmoothBet_AnnoSplit_rsfMRI.nii.gz\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"Command line:\n",
" reg_resample -ref /testData/T2w/testDataBiasBet_Anno.nii.gz -flo /aida/lib/annoVolume.nii.gz -trans /testData/T2w/testDataBiasBetMatrixBspline.nii -inter 0 -res /testData/fMRI/testDataSmoothBet_Anno_rsfMRI.nii.gz\n",
"\n",
"Parameters\n",
"Reference image name: /testData/T2w/testDataBiasBet_Anno.nii.gz\n",
"\t256x256x48 voxels, 1 volumes\n",
"\t0.0683594x0.0683594x0.3 mm\n",
"Floating image name: /aida/lib/annoVolume.nii.gz\n",
"\t228x160x264 voxels, 1 volumes\n",
"\t0.05x0.05x0.05 mm\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"\n",
"[NiftyReg] Resampled image has been saved: /testData/fMRI/testDataSmoothBet_Anno_rsfMRI.nii.gz\n",
"\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"Command line:\n",
" reg_resample -ref /testData/fMRI/testDataSmoothBet.nii.gz -flo /testData/fMRI/testDataSmoothBet_Anno_rsfMRI.nii.gz -trans /testData/fMRI/testDataSmoothBettransMatrixAff.txt -inter 0 -res /testData/fMRI/testDataSmoothBet_Anno_rsfMRI.nii.gz\n",
"\n",
"Parameters\n",
"Reference image name: /testData/fMRI/testDataSmoothBet.nii.gz\n",
"\t128x128x20 voxels, 1 volumes\n",
"\t0.140625x0.140625x0.4 mm\n",
"Floating image name: /testData/fMRI/testDataSmoothBet_Anno_rsfMRI.nii.gz\n",
"\t256x256x48 voxels, 1 volumes\n",
"\t0.0683594x0.0683594x0.3 mm\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"\n",
"[NiftyReg] Resampled image has been saved: /testData/fMRI/testDataSmoothBet_Anno_rsfMRI.nii.gz\n",
"\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"Command line:\n",
" reg_resample -ref /testData/fMRI/testDataSmoothBet.nii.gz -flo /testData/T2w/testDataBiasBet_TemplateAllen.nii.gz -cpp /testData/fMRI/testDataSmoothBettransMatrixAff.txt -res /testData/fMRI/testDataSmoothBet_Template.nii.gz\n",
"\n",
"Parameters\n",
"Reference image name: /testData/fMRI/testDataSmoothBet.nii.gz\n",
"\t128x128x20 voxels, 1 volumes\n",
"\t0.140625x0.140625x0.4 mm\n",
"Floating image name: /testData/T2w/testDataBiasBet_TemplateAllen.nii.gz\n",
"\t256x256x48 voxels, 1 volumes\n",
"\t0.0683594x0.0683594x0.3 mm\n",
"* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n",
"\n",
"[NiftyReg] Resampled image has been saved: /testData/fMRI/testDataSmoothBet_Template.nii.gz\n",
"rsfMRI Registration \u001b[0;30;42m COMPLETED \u001b[0m\n"
]
}
],
"source": [
"!docker exec -w /aida/bin/2.3_fMRIPreProcessing aidamri \\\n",
"python registration_rsfMRI.py -i /testData/fMRI/testDataSmoothBet.nii.gz"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Check the contents. Again, the brain extracted registered brain image (rsfMRI) can be seen below.\n",
""
]
},
{
"cell_type": "code",
"execution_count": 34,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"registration.log\r\n",
"testDataSmoothBet_Template.nii.gz\r\n",
"testDataSmoothBet_Anno_rsfMRI.nii.gz\r\n",
"testDataSmoothBet_AnnoSplit_rsfMRI.nii.gz\r\n",
"testDataSmoothBet_AnnoSplit.nii.gz\r\n",
"testDataSmoothBet_Anno.nii.gz\r\n",
"testDataSmoothBet_T2w.nii.gz\r\n",
"testDataSmoothBettransMatrixAff.txt\r\n",
"preprocess.log\r\n",
"testDataSmoothBet.nii.gz\r\n",
"testDataSmoothBet_mask.nii.gz\r\n",
"testDataSmooth.nii.gz\r\n",
"testDataDN.nii.gz\r\n",
"testData.6.1.nii.gz\r\n"
]
}
],
"source": [
"!docker exec -w /testData/fMRI aidamri ls -t"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"You may calculate the adjacency matrix using the physiology data provided from the test data set. If no physiology data is present, this step can be omitted. You do not need to invoke the physiology data directy. Instead, call the function in fMRI activity as shown below and use it on the initial fMRI data file."
]
},
{
"cell_type": "code",
"execution_count": 37,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"fMRI Processing \u001b[5m...\u001b[0m (wait!)\r",
"fMRI Processing \u001b[0;30;42m COMPLETED \u001b[0m\r\n",
"Regression \u001b[5m...\u001b[0m (wait!)\r",
"Regression \u001b[0;30;42m COMPLETED \u001b[0m\r\n",
"sfrgr_file /testData/fMRI/regr/testData_mcf_f_st_SFRGR.nii.gz\r\n",
"Copy Atlas Data and generate seed ROIs\r\n",
"Output: /testData/fMRI/Seed_ROIs.nii.gz\r\n",
"Copy Atlas Data and generate seed ROIs\r\n",
"Output: /testData/fMRI/Seed_ROIs.nii.gz\r\n"
]
}
],
"source": [
"!docker exec -w /aida/bin/3.3_fMRIActivity aidamri \\\n",
"python process_fMRI.py -i /testData/fMRI/testData.6.1.nii.gz"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"Now, check the contents of the folder again. Now, a regr folder should be present."
]
},
{
"cell_type": "code",
"execution_count": 38,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"regr\r\n",
"Seed_ROIs.nii.gz\r\n",
"rs-fMRI_niiData\r\n",
"process.log\r\n",
"rawMonData\r\n",
"rs-fMRI_mcf\r\n",
"registration.log\r\n",
"testDataSmoothBet_Template.nii.gz\r\n",
"testDataSmoothBet_Anno_rsfMRI.nii.gz\r\n",
"testDataSmoothBet_AnnoSplit_rsfMRI.nii.gz\r\n",
"testDataSmoothBet_AnnoSplit.nii.gz\r\n",
"testDataSmoothBet_Anno.nii.gz\r\n",
"testDataSmoothBet_T2w.nii.gz\r\n",
"testDataSmoothBettransMatrixAff.txt\r\n",
"preprocess.log\r\n",
"testDataSmoothBet.nii.gz\r\n",
"testDataSmoothBet_mask.nii.gz\r\n",
"testDataSmooth.nii.gz\r\n",
"testDataDN.nii.gz\r\n",
"testData.6.1.nii.gz\r\n"
]
}
],
"source": [
"!docker exec -w /testData/fMRI aidamri ls -t"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# "
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.0"
}
},
"nbformat": 4,
"nbformat_minor": 4
}
| Unknown |
3D | Aswendt-Lab/AIDAmri | fslinstaller_mod.py | .py | 103,777 | 3,100 | #!/usr/bin/python
from __future__ import print_function
import collections
import csv
import errno
import getpass
import itertools
import json
import locale
import os
import platform
import threading
import time
import shlex
import socket
import sys
import readline
import tempfile
import re
import fileinput
# py3
try:
import urllib.request as urlrequest
import urllib.error as urlerror
# py2
except ImportError:
import urllib2 as urlrequest
import urllib2 as urlerror
from optparse import OptionParser, OptionGroup, SUPPRESS_HELP
from re import compile, escape, sub
from subprocess import Popen, call, PIPE, STDOUT
try:
from subprocess import DEVNULL # py3
except ImportError:
DEVNULL = open(os.devnull, 'wb')
locale.setlocale(locale.LC_ALL, '')
code = locale.getpreferredencoding()
PYVER = sys.version_info[:2]
fsli_C_FAILED = 1
fsli_C_OK = 2
fsli_C_SKIP = 4
fsli_C_WARN = 3
CURRENT = 0
UPDATE = 1
UPGRADE = 2
BOURNE_SHELLS = ('sh', 'bash', 'zsh', 'ksh', 'dash', )
C_SHELLS = ('csh', 'tcsh', )
class Version(object):
def __init__(self, version_string):
if ':' in version_string:
version_string = version_string.split(':')[0]
v_vals = version_string.split('.')
for v in v_vals:
if not v.isdigit():
raise ValueError('Bad version string')
self.major = int(v_vals[0])
try:
self.minor = int(v_vals[1])
except IndexError:
self.minor = 0
try:
self.patch = int(v_vals[2])
except IndexError:
self.patch = 0
try:
self.hotfix = int(v_vals[3])
except IndexError:
self.hotfix = 0
def __repr__(self):
return "Version(%s,%s,%s,%s)" % (
self.major,
self.minor,
self.patch,
self.hotfix)
def __str__(self):
if self.hotfix == 0:
return "%s.%s.%s" % (self.major, self.minor, self.patch)
else:
return "%s.%s.%s.%s" % (
self.major,
self.minor,
self.patch,
self.hotfix)
def __ge__(self, other):
if not isinstance(other, Version):
return NotImplemented
if self > other or self == other:
return True
return False
def __le__(self, other):
if not isinstance(other, Version):
return NotImplemented
if self < other or self == other:
return True
return False
def __cmp__(self, other):
if not isinstance(other, Version):
return NotImplemented
if self.__lt__(other):
return -1
if self.__gt__(other):
return 1
return 0
def __lt__(self, other):
if not isinstance(other, Version):
return NotImplemented
if self.major < other.major:
return True
if self.major > other.major:
return False
if self.minor < other.minor:
return True
if self.minor > other.minor:
return False
if self.patch < other.patch:
return True
if self.patch > other.patch:
return False
if self.hotfix < other.hotfix:
return True
if self.hotfix > other.hotfix:
return False
# major, minor and patch all match so this is not less than
return False
def __gt__(self, other):
if not isinstance(other, Version):
return NotImplemented
if self.major > other.major:
return True
if self.major < other.major:
return False
if self.minor > other.minor:
return True
if self.minor < other.minor:
return False
if self.patch > other.patch:
return True
if self.patch < other.patch:
return False
if self.hotfix > other.hotfix:
return True
if self.hotfix < other.hotfix:
return False
# major, minor and patch all match so this is not less than
return False
def __eq__(self, other):
if not isinstance(other, Version):
return NotImplemented
if (
self.major == other.major and
self.minor == other.minor and
self.patch == other.patch and
self.hotfix == other.hotfix):
return True
return False
def __ne__(self, other):
if not isinstance(other, Version):
return NotImplemented
if self.__eq__(other):
return False
return True
version = Version('3.2.3')
def memoize(f):
cache = f.cache = {}
def g(*args, **kwargs):
key = (f, tuple(args), frozenset(list(kwargs.items())))
if key not in cache:
cache[key] = f(*args, **kwargs)
return cache[key]
return g
class InstallError(Exception):
pass
class shell_colours(object):
default = '\033[0m'
rfg_kbg = '\033[91m'
gfg_kbg = '\033[92m'
yfg_kbg = '\033[93m'
mfg_kbg = '\033[95m'
yfg_bbg = '\033[104;93m'
bfg_kbg = '\033[34m'
bold = '\033[1m'
class MsgUser(object):
__debug = False
__quiet = False
@classmethod
def debugOn(cls):
cls.__debug = True
@classmethod
def debugOff(cls):
cls.__debug = False
@classmethod
def quietOn(cls):
cls.__quiet = True
@classmethod
def quietOff(cls):
cls.__quiet = False
@classmethod
def isquiet(cls):
return cls.__quiet
@classmethod
def isdebug(cls):
return cls.__debug
@classmethod
def debug(cls, message, newline=True):
if cls.__debug:
mess = str(message)
if newline:
mess += "\n"
sys.stderr.write(mess)
@classmethod
def message(cls, msg):
if cls.__quiet:
return
print(msg)
@classmethod
def question(cls, msg):
print(msg, end=' ')
@classmethod
def skipped(cls, msg):
if cls.__quiet:
return
print("".join(
(shell_colours.mfg_kbg, "[Skipped] ", shell_colours.default, msg)))
@classmethod
def ok(cls, msg):
if cls.__quiet:
return
print("".join(
(shell_colours.gfg_kbg, "[OK] ", shell_colours.default, msg)))
@classmethod
def failed(cls, msg):
print("".join(
(shell_colours.rfg_kbg, "[FAILED] ", shell_colours.default, msg)))
@classmethod
def warning(cls, msg):
if cls.__quiet:
return
print("".join(
(shell_colours.bfg_kbg,
shell_colours.bold,
"[Warning]",
shell_colours.default, " ", msg)))
class Progress_bar(object):
def __init__(self, x=0, y=0, mx=1, numeric=False, percentage=False):
self.x = x
self.y = y
self.width = 50
self.current = 0
self.max = mx
self.numeric = numeric
self.percentage = percentage
def update(self, reading):
if MsgUser.isquiet():
return
percent = int(round(reading * 100.0 / self.max))
cr = '\r'
if not self.numeric and not self.percentage:
bar = '#' * int(percent)
elif self.numeric:
bar = "/".join(
(str(reading),
str(self.max))) + ' - ' + str(percent) + "%\033[K"
elif self.percentage:
bar = "%s%%" % (percent)
sys.stdout.write(cr)
sys.stdout.write(bar)
sys.stdout.flush()
self.current = percent
if percent == 100:
sys.stdout.write(cr)
if not self.numeric and not self.percentage:
sys.stdout.write(" " * int(percent))
sys.stdout.write(cr)
sys.stdout.flush()
elif self.numeric:
sys.stdout.write(" " * (len(str(self.max))*2 + 8))
sys.stdout.write(cr)
sys.stdout.flush()
elif self.percentage:
sys.stdout.write("100%")
sys.stdout.write(cr)
sys.stdout.flush()
def temp_file_name(mode='r', close=False):
'''Return a name for a temporary file - uses mkstemp to create the file and
returns a tuple (file object, file name).
Opens as read-only unless mode specifies otherwise. If close is set to True
will close the file before returning.
The file object is a fdopen file object so lacks a useable file name.'''
(tmpfile, fname) = tempfile.mkstemp()
file_obj = os.fdopen(tmpfile, mode)
if close:
file_obj.close()
return (file_obj, fname)
class RunCommandError(Exception):
pass
class Spinner(object):
spinner = itertools.cycle(('-', '\\', '|', '/', ))
busy = False
delay = 0.2
def __init__(self, delay=None, quiet=False):
if delay:
try:
self.delay = float(delay)
except ValueError:
pass
self.quiet = quiet
def spin_it(self):
while self.busy:
sys.stdout.write(next(self.spinner))
sys.stdout.flush()
time.sleep(self.delay)
sys.stdout.write('\b')
sys.stdout.flush()
def start(self):
if not self.quiet:
self.busy = True
threading.Thread(target=self.spin_it).start()
def stop(self):
self.busy = False
time.sleep(self.delay)
def run_cmd_dropstdout(command, as_root=False):
'''Run the command and return result.'''
command_line = shlex.split(command)
if as_root and os.getuid() != 0:
try:
sudo_pwd = get_sudo_pwd()
except SudoPasswordError:
raise RunCommandError(
"Unable to get valid administrator's password")
command_line.insert(0, '-S')
command_line.insert(0, 'sudo')
else:
sudo_pwd = ''
try:
my_spinner = Spinner(quiet=MsgUser.isquiet())
my_spinner.start()
cmd = Popen(command_line, stdin=PIPE, stdout=None, stderr=PIPE,
universal_newlines=True)
if sudo_pwd:
cmd.stdin.write(sudo_pwd + '\n')
cmd.stdin.flush()
(_, error) = cmd.communicate()
except Exception:
raise
finally:
my_spinner.stop()
if cmd.returncode:
MsgUser.debug("An error occured (%s, %s)" % (cmd.returncode, error))
raise RunCommandError(error)
def run_cmd(command, as_root=False):
'''Run the command and return result.'''
command_line = shlex.split(command)
if as_root and os.getuid() != 0:
try:
sudo_pwd = get_sudo_pwd()
except SudoPasswordError:
raise RunCommandError(
"Unable to get valid administrator's password")
command_line.insert(0, '-S')
command_line.insert(0, 'sudo')
else:
sudo_pwd = ''
MsgUser.debug("Will call %s" % (command_line))
try:
my_spinner = Spinner(quiet=MsgUser.isquiet())
my_spinner.start()
cmd = Popen(command_line, stdin=PIPE, stdout=PIPE, stderr=PIPE,
universal_newlines=True)
if sudo_pwd:
cmd.stdin.write(sudo_pwd + '\n')
cmd.stdin.flush()
(output, error) = cmd.communicate()
except Exception:
raise
finally:
my_spinner.stop()
if cmd.returncode:
MsgUser.debug("An error occured (%s, %s)" % (cmd.returncode, error))
raise RunCommandError(error)
MsgUser.debug("Command completed successfully (%s)" % (output))
return output
def run_cmd_displayoutput(command, as_root=False):
'''Run the command and display output.'''
command_line = shlex.split(command)
if as_root and os.getuid() != 0:
try:
sudo_pwd = get_sudo_pwd()
except SudoPasswordError:
raise RunCommandError(
"Unable to get valid administrator's password")
command_line.insert(0, '-S')
command_line.insert(0, 'sudo')
MsgUser.debug("Will call %s" % (command_line))
cmd = Popen(
command_line,
stdin=PIPE, stdout=sys.stdout, stderr=sys.stderr,
universal_newlines=True)
if sudo_pwd:
cmd.stdin.write(sudo_pwd + '\n')
cmd.stdin.flush()
cmd.communicate()
return_code = cmd.returncode
else:
return_code = call(command_line)
if return_code:
MsgUser.debug("An error occured (%s)" % (return_code))
raise RunCommandError(return_code)
MsgUser.debug("Command completed successfully")
def check_sudo(sudo_pwd):
command_line = ['sudo', '-S', 'true']
MsgUser.debug("Checking sudo password")
cmd = Popen(
command_line,
stdin=PIPE,
stdout=DEVNULL,
stderr=DEVNULL,
universal_newlines=True
)
cmd.stdin.write(sudo_pwd + '\n')
cmd.stdin.flush()
cmd.communicate()
if cmd.returncode != 0:
return False
else:
return True
class SudoPasswordError(Exception):
pass
@memoize
def get_sudo_pwd():
'''Get the sudo password from the user'''
MsgUser.message("We require your password to continue...")
attempts = 0
valid = False
while attempts < 3 and not valid:
sudo_pwd = getpass.getpass('password: ')
valid = check_sudo(sudo_pwd)
if not valid:
MsgUser.failed("Incorrect password")
attempts += 1
if not valid:
raise SudoPasswordError()
return sudo_pwd
class DeletionRefused(Exception):
pass
class SafeDeleteError(Exception):
pass
def safe_delete(fs_object, as_root=False):
'''Delete file/folder, becoming root if necessary.
Run some sanity checks on object'''
banned_items = ['/', '/usr', '/usr/bin', '/usr/local', '/bin',
'/sbin', '/opt', '/Library', '/System', '/System/Library',
'/var', '/tmp', '/var/tmp', '/lib', '/lib64', '/Users',
'/home', '/Applications', '/private', '/etc', '/dev',
'/Network', '/net', '/proc']
if os.path.isdir(fs_object):
del_opts = "-rf"
else:
del_opts = '-f'
if fs_object in banned_items:
raise DeletionRefused('Will not delete %s!' % (fs_object))
command_line = " ".join(('rm', del_opts, fs_object))
try:
result = run_cmd(command_line, as_root)
except RunCommandError as e:
raise SafeDeleteError(str(e))
return result
class MoveError(Exception):
pass
def move(source, target, as_root):
try:
run_cmd_dropstdout(" ".join(('mv', source, target)), as_root)
except RunCommandError as e:
raise MoveError(str(e))
class IsDirectoryError(Exception):
pass
class CopyFileError(Exception):
pass
def copy_file(fname, destination, as_root):
'''Copy a file using sudo if necessary'''
MsgUser.debug("Copying %s to %s (as root? %s)" % (
fname, destination, as_root))
if os.path.isdir(fname):
raise IsDirectoryError('Source (%s) is a directory!' % (fname))
if os.path.isdir(destination):
# Ensure that copying into a folder we have a terminating slash
destination = destination.rstrip('/') + "/"
copy_opts = '-p'
fname = '"%s"' % fname
destination = '"%s"' % destination
command_line = " ".join(('cp', copy_opts, fname, destination))
try:
result = run_cmd(command_line, as_root)
except RunCommandError as e:
raise CopyFileError(str(e))
return result
def file_contains(fname, search_for):
'''Equivalent of grep'''
regex = compile(escape(search_for))
found = False
MsgUser.debug("In file_contains.")
MsgUser.debug("Looking for %s in %s." % (search_for, fname))
f = open(fname, 'r')
for l in f:
if regex.search(l):
found = True
break
f.close()
return found
def file_contains_1stline(fname, search_for):
'''Equivalent of grep - returns first occurrence'''
regex = compile(escape(search_for))
found = ''
MsgUser.debug("In file_contains_1stline.")
MsgUser.debug("Looking for %s in %s." % (search_for, fname))
f = open(fname, 'r')
for l in f:
if regex.search(l):
found = l
break
f.close()
return found
def line_string_replace(line, search_for, replace_with):
return sub(escape(search_for), escape(replace_with), line)
def line_starts_replace(line, search_for, replace_with):
if line.startswith(search_for):
return replace_with + '\n'
return line
class MoveFileError(Exception):
pass
def move_file(from_file, to_file, requires_root=False):
'''Move a file, using /bin/cp via sudo if requested.
Will work around known bugs in python.'''
if requires_root:
try:
run_cmd_dropstdout(" ".join(
("/bin/cp", from_file, to_file)), as_root=True)
except RunCommandError as e:
MsgUser.debug(e)
raise MoveFileError("Failed to move %s (%s)" % (from_file, str(e)))
os.remove(from_file)
else:
try:
move(from_file, to_file, requires_root)
except OSError as e:
# Handle bug in some python versions on OS X writing to NFS home
# folders, Python tries to preserve file flags but NFS can't do
# this. It fails to catch this error and ends up leaving the file
# in the original and new locations!
if e.errno == 45:
# Check if new file has been created:
if os.path.isfile(to_file):
# Check if original exists
if os.path.isfile(from_file):
# Destroy original and continue
os.remove(from_file)
else:
try:
run_cmd_dropstdout("/bin/cp %s %s" % (
from_file, to_file), as_root=False)
except RunCommandError as e:
MsgUser.debug(e)
raise MoveFileError("Failed to copy from %s (%s)" % (
from_file, str(e)))
os.remove(from_file)
else:
raise
except Exception:
raise
class EditFileError(Exception):
pass
def edit_file(fname, edit_function, search_for, replace_with, requires_root):
'''Search for a simple string in the file given and replace
it with the new text'''
try:
(tmpfile, tmpfname) = temp_file_name(mode='w')
src = open(fname)
for line in src:
line = edit_function(line, search_for, replace_with)
tmpfile.write(line)
src.close()
tmpfile.close()
try:
move_file(tmpfname, fname, requires_root)
except MoveFileError as e:
MsgUser.debug(e)
os.remove(tmpfname)
raise EditFileError("Failed to edit %s (%s)" % (fname, str(e)))
except IOError as e:
MsgUser.debug(e.strerror)
raise EditFileError("Failed to edit %s (%s)" % (fname, str(e)))
MsgUser.debug("Modified %s (search %s; replace %s)." % (
fname, search_for, replace_with))
class AddToFileError(Exception):
pass
def add_to_file(fname, add_lines, requires_root):
'''Add lines to end of a file'''
if isinstance(add_lines, str):
add_lines = add_lines.split('\n')
try:
(tmpfile, tmpfname) = temp_file_name(mode='w')
src = open(fname)
for line in src:
tmpfile.write(line)
src.close()
tmpfile.write('\n')
for line in add_lines:
tmpfile.write(line)
tmpfile.write('\n')
tmpfile.close()
try:
move_file(tmpfname, fname, requires_root)
except MoveFileError as e:
os.remove(tmpfname)
MsgUser.debug(e)
raise AddToFileError("Failed to add to file %s (%s)" % (
fname, str(e)))
except IOError as e:
MsgUser.debug(e.strerror + tmpfname + fname)
raise AddToFileError("Failed to add to file %s" % (fname))
MsgUser.debug("Modified %s (added %s)" % (fname, '\n'.join(add_lines)))
class CreateFileError(Exception):
pass
def create_file(fname, lines, requires_root):
'''Create a new file containing lines given'''
if isinstance(lines, str):
lines = lines.split('\n')
try:
(tmpfile, tmpfname) = temp_file_name(mode='w')
for line in lines:
tmpfile.write(line)
tmpfile.write('\n')
tmpfile.close()
try:
move_file(tmpfname, fname, requires_root)
except CreateFileError as e:
os.remove(tmpfname)
MsgUser.debug(e)
raise CreateFileError("Failed to edit %s (%s)" % (fname, str(e)))
except IOError as e:
MsgUser.debug(e.strerror)
raise CreateFileError("Failed to create %s" % (fname))
MsgUser.debug("Created %s (added %s)" % (fname, '\n'.join(lines)))
class UnsupportedOs(Exception):
pass
class Host(object):
'''Work out which platform we are running on'''
o_s = platform.system().lower()
arch = platform.machine()
applever = ''
os_type = os.name
supported = True
if o_s == 'darwin':
vendor = 'apple'
version = Version(platform.release())
(applever, _, _) = platform.mac_ver()
glibc = ''
elif o_s == 'linux':
# default to this if we can't detect the linux distro
fallback_vendor = 'centos'
fallback_version = '7.8.2003'
# python 2.6-3.7 has a linux_distribution function
if hasattr(platform, 'linux_distribution'):
(vendor, version, _) = platform.linux_distribution(
full_distribution_name=0)
# linux_distributiobn is not present in python >=3.8
else:
vendor, version = fallback_vendor, fallback_version
try:
vendor = vendor.lower()
version = Version(version)
except ValueError:
vendor = fallback_vendor
version = Version(fallback_version)
glibc = platform.libc_ver()[1]
else:
supported = False
if arch == 'x86_64':
bits = '64'
elif arch == 'i686':
bits = '32'
elif arch == 'Power Macintosh':
bits = ''
def is_writeable(location):
'''Check if we can write to the location given'''
writeable = True
try:
tfile = tempfile.NamedTemporaryFile(mode='w+b', dir=location)
tfile.close()
except OSError as e:
if e.errno == errno.EACCES or e.errno == errno.EPERM:
writeable = False
else:
raise
return writeable
def is_writeable_as_root(location):
'''Check if sudo can write to a given location'''
# This requires us to use sudo
(f, fname) = temp_file_name(mode='w')
f.write("FSL")
f.close()
result = False
tmptarget = '/'.join((location, os.path.basename(fname)))
MsgUser.debug(" ".join(('/bin/cp', fname, tmptarget)))
try:
run_cmd_dropstdout(" ".join(('/bin/cp',
fname, tmptarget)), as_root=True)
result = True
os.remove(fname)
run_cmd_dropstdout(" ".join(('/bin/rm',
'-f', tmptarget)), as_root=True)
except RunCommandError as e:
MsgUser.debug(e)
os.remove(fname)
result = False
MsgUser.debug("Writeable as root? %s" % (result))
return result
class ChecksumCalcError(Exception):
pass
def sha256File(filename, bs=1048576):
'''Returns the sha256 sum of the given file.'''
MsgUser.message("Checking FSL package")
try:
import hashlib
f = open(filename, 'rb')
pb = Progress_bar(mx=os.path.getsize(filename), percentage=True)
pb.position = 0
fhash = hashlib.sha256()
data = f.read(bs)
while len(data) == bs:
fhash.update(data)
data = f.read(bs)
pb.position += len(data)
pb.update(pb.position)
fhash.update(data)
f.close()
return fhash.hexdigest()
except ImportError:
# No SHA256 support on python pre-2.5 so call the OS to do it.
try:
result = run_cmd(" ".join(('sha256sum', '-b', filename)))
return parsesha256sumfile(result)
except RunCommandError as e:
MsgUser.debug("SHA256 calculation error %s" % (str(e)))
raise ChecksumCalcError
def parsesha256sumfile(sha256string):
'''Returns sha256 sum extracted from the output of sha256sum or shasum -a
256 from OS X/Linux platforms'''
(sha256, _) = sha256string.split("*")
return sha256.strip()
def md5File(filename, bs=1048576):
'''Returns the MD5 sum of the given file.'''
MsgUser.message("Checking FSL package")
try:
import hashlib
fhash = hashlib.md5()
except ImportError:
import md5
fhash = md5.new()
f = open(filename, 'rb')
pb = Progress_bar(mx=os.path.getsize(filename), percentage=True)
pb.position = 0
data = f.read(bs)
while len(data) == bs:
fhash.update(data)
data = f.read(bs)
pb.position += len(data)
pb.update(pb.position)
fhash.update(data)
f.close()
return fhash.hexdigest()
def file_checksum(filename, chktype='sha256'):
if chktype == 'sha256':
return sha256File(filename)
if chktype == 'md5':
return md5File(filename)
else:
raise ChecksumCalcError('Unrecognised checksum type')
class OpenUrlError(Exception):
pass
def open_url(url, start=0, timeout=20):
socket.setdefaulttimeout(timeout)
MsgUser.debug("Attempting to download %s." % (url))
try:
req = urlrequest.Request(url)
if start != 0:
req.headers['Range'] = 'bytes=%s-' % (start)
rf = urlrequest.urlopen(req)
except urlerror.HTTPError as e:
MsgUser.debug("%s %s" % (url, e.msg))
raise OpenUrlError("Cannot find file %s on server (%s). "
"Try again later." % (url, e.msg))
except urlerror.URLError as e:
if type(e.reason) != str:
errno = e.reason.args[0]
if len(e.reason.args) > 1:
message = e.reason.args[1]
# give up on trying to identify both the errno and message
else:
message = e.reason.args
if errno == 8:
# Bad host name
MsgUser.debug("%s %s" % (url,
"Unable to find FSL download "
"server in the DNS"))
else:
# Other error
MsgUser.debug("%s %s" % (url, message))
else:
message = str(e.reason)
raise OpenUrlError(
"Cannot find %s (%s). Try again later." % (url, message))
except socket.timeout as e:
MsgUser.debug(e)
raise OpenUrlError("Failed to contact FSL web site. Try again later.")
return rf
class DownloadFileError(Exception):
pass
def download_file(url, localf, timeout=20):
'''Get a file from the url given storing it in the local file specified'''
try:
rf = open_url(url, 0, timeout)
except OpenUrlError as e:
raise DownloadFileError(str(e))
metadata = rf.headers
rf_size = int(metadata.get("Content-Length"))
dl_size = 0
block = 16384
x = 0
y = 0
pb = Progress_bar(x, y, rf_size, numeric=True)
for attempt in range(1, 6):
# Attempt download 5 times before giving up
pause = timeout
try:
try:
lf = open(localf, 'ab')
except Exception:
raise DownloadFileError("Failed to create temporary file.")
while True:
buf = rf.read(block)
if not buf:
break
dl_size += len(buf)
lf.write(buf)
pb.update(dl_size)
lf.close()
except (IOError, socket.timeout) as e:
MsgUser.debug(e.strerror)
MsgUser.message("\nDownload failed re-trying (%s)..." % attempt)
pause = 0
if dl_size != rf_size:
time.sleep(pause)
MsgUser.message("\nDownload failed re-trying (%s)..." % attempt)
try:
rf = open_url(url, dl_size, timeout)
except OpenUrlError as e:
MsgUser.debug(e)
else:
break
if dl_size != rf_size:
raise DownloadFileError("Failed to download file.")
def build_url_with_protocol(protocol, base, parts):
part_l = [protocol + '://' + base.strip('/')]
part_l.extend([x.strip('/') for x in parts])
return '/'.join(part_l)
def build_url(parts):
part_l = [parts[0].strip('/')]
part_l.extend([x.strip('/') for x in parts[1:]])
return '/'.join(part_l)
class SiteNotResponding(Exception):
pass
def fastest_mirror(main_mirrors, mirrors_file, timeout=20):
'''Find the fastest mirror for FSL downloads.'''
MsgUser.debug("Calculating fastest mirror")
socket.setdefaulttimeout(timeout)
# Get the mirror list from the url
fastestmirrors = {}
mirrorlist = []
for m in main_mirrors:
MsgUser.debug("Trying %s" % (m))
m_url = '/'.join((m.strip('/'), mirrors_file))
MsgUser.debug("Attempting to open %s" % (m_url))
try:
response = urlrequest.urlopen(url=m_url)
except urlerror.HTTPError as e:
MsgUser.debug("%s %s" % (m_url, e.msg))
raise SiteNotResponding(e.msg)
except urlerror.URLError as e:
if isinstance(e.reason, socket.timeout):
MsgUser.debug("Time out trying %s" % (m_url))
raise SiteNotResponding(m)
else:
MsgUser.debug(str(e.reason))
raise SiteNotResponding(str(e.reason))
except socket.timeout as e:
MsgUser.debug(e)
raise SiteNotResponding(str(e))
except Exception as e:
MsgUser.debug("Unhandled exception %s" % (str(e)))
raise
else:
mirrorlist = response.read().decode('utf-8').strip().split('\n')
MsgUser.debug("Received the following "
"mirror list %s" % (mirrorlist))
continue
if len(mirrorlist) == 0:
raise ServerFailure("Cannot find FSL download servers")
# Check timings from the urls specified
if len(mirrorlist) > 1:
for mirror in mirrorlist:
MsgUser.debug("Trying %s" % (mirror))
then = time.time()
if mirror.startswith('http:'):
serverport = 80
elif mirror.startswith('https:'):
serverport = 443
else:
raise ServerFailure("Unrecognised protocol")
try:
mysock = socket.create_connection((mirror, serverport),
timeout)
pingtime = time.time() - then
mysock.close()
fastestmirrors[pingtime] = mirror
MsgUser.debug("Mirror responded in %s seconds" % (pingtime))
except socket.gaierror as e:
MsgUser.debug("%s can't be resolved" % (e))
except socket.timeout as e:
MsgUser.debug(e)
if len(fastestmirrors) == 0:
raise ServerFailure('Failed to contact any FSL download sites.')
download_url = fastestmirrors[min(fastestmirrors.keys())]
else:
download_url = mirrorlist[0]
return download_url
# Concept:
# Web app creates the following files:
# fslmirrorlist.txt - contains a list of mirror urls
# fslreleases.json - contains the available maps for oses
# mapping to a download url
# {'installer' {
# 'filename': 'fslinstaller.py',
# 'version': '3.0.0',
# 'date': '02/03/2017',
# 'checksum_type', 'sha256',
# 'checksum'},
# 'linux' : {
# 'centos' : {
# 'x86_64': {
# '6': {
# '5.0.9': {
# 'filename': 'fsl-5.0.9-centos6_64.tar.gz',
# 'version': '5.0.9',
# 'date': '01/02/2017',
# 'checksum_type', 'sha256',
# 'checksum': 'abf645662bcf4453235',
# },
# },
# },
# },
# 'rhel' : {'alias': 'centos'}},
# 'apple' : {
# 'darwin' : {
# 'x86_64': {
# '11': {
# ....
# },
# }
@memoize
def get_web_manifest(download_url, timeout=20):
'''Download the FSL manifest from download_url'''
socket.setdefaulttimeout(timeout)
MsgUser.debug("Looking for manifest at %s." % (download_url))
MsgUser.debug("Downloading JSON file")
return get_json(download_url + Settings.manifest_json)
class GetFslDirError(Exception):
pass
@memoize
def get_fsldir(specified_dir=None, install=False):
'''Find the installed version of FSL using FSLDIR
or location of this script'''
def validate_fsldir(directory):
parent = os.path.dirname(directory)
if parent == directory:
raise GetFslDirError(
"%s appears to be the root folder" %
parent)
if not os.path.exists(parent):
raise GetFslDirError(
"%s doesn't exist" %
parent)
if not os.path.isdir(parent):
raise GetFslDirError(
"%s isn't a directory" %
parent)
if (os.path.exists(directory) and not
os.path.exists(os.path.join(
directory, 'etc', 'fslversion'
))):
raise GetFslDirError(
"%s exists and doesn't appear to be an installed FSL folder" %
directory)
if specified_dir:
specified_dir = os.path.abspath(specified_dir)
if install is False:
if not check_fsl_install(specified_dir):
raise GetFslDirError(
"%s isn't an 'fsl' folder" %
specified_dir)
else:
validate_fsldir(specified_dir)
return specified_dir
try:
fsldir = os.environ['FSLDIR']
try:
validate_fsldir(fsldir)
except GetFslDirError:
# FSLDIR environment variable is incorrect!
MsgUser.warning('FSLDIR environment variable '
'does not point at FSL install, ignoring...')
MsgUser.debug('FSLDIR is set to %s - '
'this folder does not appear to exist' % (fsldir))
fsldir = None
else:
fsldir = fsldir.rstrip('/')
if MsgUser.isquiet():
return fsldir
except KeyError:
# Look to see if I'm in an FSL install
try:
my_parent = os.path.dirname(
os.path.dirname(os.path.realpath(__file__)))
except NameError:
# Running in debugger - __file__ not set, assume it's cwd
my_parent = os.path.dirname(
os.path.dirname(os.getcwd()))
try:
validate_fsldir(my_parent)
fsldir = my_parent
except GetFslDirError:
fsldir = None
if not install:
MsgUser.debug("asking about %s" % (fsldir))
valid_dir = False
while not valid_dir:
fsldir = Settings.inst_qus.ask_question(
'inst_loc', default=fsldir)
try:
validate_fsldir(fsldir)
valid_dir = True
except GetFslDirError as e:
MsgUser.falied(str(e))
return fsldir
else:
if not MsgUser.isquiet():
valid_dir = False
while not valid_dir:
fsldir = "/usr/local/fsl"
try:
validate_fsldir(fsldir)
valid_dir = True
except GetFslDirError as e:
MsgUser.failed(str(e))
MsgUser.message(
'''Hint - press Enter to select the default value '''
'''given in the square brackets.
If you are specifying a destination folder this needs to either be an existing
FSL install folder or a folder that doesn't already exist.''')
fsldir = None
else:
raise GetFslDirError(
"I can't locate FSL, try again using '-d <FSLDIR>' "
"to specify where to find the FSL install")
return fsldir
def archive_version(archive):
'''Takes the path to a FSL install file
and works out what version it is.'''
if not os.path.isfile(archive):
raise NotAFslVersion("%s is not a file" % (archive))
else:
# file is of form: fsl-V.V.V-platform.extensions
(_, vstring, _) = archive.strip().split('-', 2)
try:
return Version(vstring)
except ValueError:
raise NotAFslVersion(
"%s doesn't look like "
"a version number" % (vstring))
class NotAFslVersion(Exception):
pass
class GetInstalledVersionError(Exception):
pass
def get_installed_version(fsldir):
'''Takes path to FSLDIR and finds installed version details'''
MsgUser.debug("Looking for fsl in %s" % fsldir)
v_file = os.path.join(fsldir, 'etc', 'fslversion')
if os.path.exists(v_file):
f = open(v_file)
v_string = f.readline()
f.close()
try:
version = Version(v_string.strip())
except ValueError:
raise NotAFslVersion(
"%s not a valid "
"version string" % (v_string.strip()))
else:
MsgUser.debug(
"No version information found - "
"is this actually an FSL dir?")
raise GetInstalledVersionError(
"Cannot find the version information - "
"is this actually an FSL dir?")
MsgUser.debug("Found version %s" % (version))
return version
def which_shell():
return os.path.basename(os.getenv("SHELL"))
class SelfUpdateError(Exception):
pass
def self_update(server_url):
'''Check for and apply an update to myself'''
# See if there is a newer version available
if 'fslinstaller' in sys.argv[0]:
try:
installer = get_installer(server_url)
except GetInstallerError as e:
MsgUser.debug("Failed to get installer version %s." % (str(e)))
raise SelfUpdateError('Failed to get installer version. '
'Please try again later.')
MsgUser.debug("Server has version " + installer['version'])
if Version(installer['version']) <= version:
MsgUser.debug("Installer is up-to-date.")
return
# There is a new version available - download it
MsgUser.message("There is a newer version (%s) of the installer "
"(you have %s) updating..." % (
installer['version'], version))
(_, tmpfname) = temp_file_name(mode='w', close=True)
downloaded = False
while downloaded is False:
try:
file_url = '/'.join(
(Settings.mirror.rstrip('/'), installer['filename']))
download_file(
url=file_url,
localf=tmpfname)
if (
file_checksum(tmpfname, installer['checksum_type']) !=
installer['checksum']):
raise SelfUpdateError(
"Found update to installer but download "
"was corrupt. Please try again later.")
except DownloadFileError as e:
if Settings.mirror != Settings.main_mirror:
MsgUser.warning(
"Download from mirror failed, re-trying from "
"main FSL download site")
Settings.mirror = Settings.main_mirror
else:
MsgUser.debug("Failed to update installer %s." % (str(e)))
raise SelfUpdateError(
'Found update to installer but unable to '
'download the new version. Please try again.')
else:
downloaded = True
# Now run the new installer
# EXEC new script with the options we were given
os.chmod(tmpfname, 0o755)
c_args = [sys.executable, tmpfname, ]
c_args.extend(sys.argv[1:])
MsgUser.debug(
"Calling %s %s" % (sys.executable, c_args))
os.execv(sys.executable, c_args)
else:
# We are now running the newly downloaded installer
MsgUser.ok('Installer updated to latest version %s' % (str(version)))
MsgUser.ok("Installer self update successful.")
class ServerFailure(Exception):
pass
class BadVersion(Exception):
pass
class GetInstallerError(Exception):
pass
def get_installer(server_url):
MsgUser.debug("Checking %s for "
"installer information" % (server_url))
manifest = get_web_manifest(server_url)
return manifest['installer']
@memoize
def get_releases(server_url):
'''Return a hash with all information about available
versions for this OS'''
computer = Host
MsgUser.debug("Getting web manifest")
manifest = get_web_manifest(server_url)
try:
os_definition = manifest[computer.o_s][computer.vendor]
except KeyError:
raise UnsupportedOs("%s %s not supported by this installer" % (
computer.o_s, computer.vendor
))
t_version = computer.version.major
alias_t = 'alias'
if alias_t in list(os_definition.keys()):
if str(t_version) in os_definition[alias_t]:
os_parent = os_definition[alias_t][
str(t_version)]['parent']
t_version = os_definition[alias_t][
str(t_version)]['version']
os_definition = manifest[computer.o_s][os_parent]
if computer.arch not in list(os_definition.keys()):
raise UnsupportedOs("%s %s not supported" % (
computer.vendor,
computer.arch
))
os_def = os_definition[computer.arch]
while t_version > 0:
MsgUser.debug("Trying version %s" % (t_version))
if str(t_version) not in list(os_def.keys()):
MsgUser.debug("...not found")
t_version -= 1
else:
break
if t_version == 0:
raise UnsupportedOs("%s %s not supported" % (
computer.vendor,
computer.version.major
))
elif t_version != computer.version.major:
MsgUser.warning(
"%s %s not officially supported "
"- trying to locate support for an earlier "
"version - this may not work" % (
computer.vendor, computer.version.major))
return os_definition[computer.arch][str(t_version)]
class ExtraDownloadError(Exception):
pass
@memoize
def get_extra(server_url, extra_type):
'''Return a hash with all information about available
versions of source code'''
MsgUser.debug("Getting web manifest")
manifest = get_web_manifest(server_url)
try:
extra = manifest[extra_type]
except KeyError:
raise ExtraDownloadError("Unrecognised extra %s" % (extra_type))
return extra
class ImproperlyConfigured(Exception):
pass
def list_releases(url):
releases = get_releases(url)
MsgUser.message("Available FSL versions for this OS:")
MsgUser.debug(releases)
rels = []
for v, release in list(releases.items()):
if 'date' in release:
rdate = release['date']
else:
rdate = "Third-party package"
rels.append((v, rdate))
for v, rdate in sorted(rels, reverse=True):
MsgUser.message("%s\t(%s)" % (v, rdate))
def list_builds(url):
'''Lists all available FSL builds. '''
manifest = dict(get_web_manifest(url))
MsgUser.message("All available FSL builds:")
centos = manifest['linux']['centos']['x86_64']
macos = manifest['darwin']['apple']['x86_64']
def get_platform(s):
match = re.match(r'^fsl-(.+)-(.+).tar.gz$', s)
plat = match.group(2)
return plat
fslversions = collections.defaultdict(set)
for builds in itertools.chain(list(centos.values()), list(macos.values())):
for fslversion, info in list(builds.items()):
fslversions[fslversion].add(get_platform(info['filename']))
for fslversion, plats in list(fslversions.items()):
MsgUser.message('%s - %s' % (fslversion, ', '.join(plats)))
def latest_release(url):
releases = get_releases(url)
MsgUser.debug("Got version information: %s" % (releases))
versions = [Version(x) for x in list(releases.keys())]
MsgUser.debug("Versions: %s" % (versions))
return releases[str(sorted(versions)[-1])]
class InstallInstallerError(Exception):
pass
def install_installer(fsldir):
'''Install this script into $FSLDIR/etc'''
targetfolder = os.path.join(fsldir, 'etc')
as_root = False
installer = os.path.abspath(__file__)
MsgUser.debug(
"Copying fslinstaller (%s) to %s" % (
installer,
targetfolder))
if not is_writeable(targetfolder):
if not is_writeable_as_root(targetfolder):
raise InstallInstallerError("Cannot write to folder as root user.")
else:
as_root = True
copy_file(
installer, os.path.join(targetfolder, "fslinstaller.py"),
as_root)
class InstallQuestions(object):
def __init__(self):
self.questions = {}
self.validators = {}
self.preprocs = {}
self.type = {}
self.default = {}
self.defaults = False
def add_question(self, key, question, default, qtype, validation_f, preproc_f=None):
self.questions[key] = question
self.default[key] = default
self.type[key] = qtype
self.validators[key] = validation_f
self.preprocs[key] = preproc_f
def ask_question(self, key, default=None):
# Ask a question
no_answer = True
validator = self.validators[key]
preproc = self.preprocs[key]
def parse_answer(q_type, answer):
if q_type == 'bool':
if answer.lower() == 'yes':
return True
else:
return False
else:
return answer
if not default:
default = self.default[key]
if self.defaults:
MsgUser.debug(self.questions[key])
MsgUser.debug("Automatically using the default %s" % (default))
self.answers[key] = parse_answer(self.type[key], default)
no_answer = False
while no_answer:
MsgUser.question(
"%s? %s:" % (
self.questions[key],
'[%s]' % (default)))
if PYVER[0] == 2: your_answer = raw_input()
else: your_answer = input()
MsgUser.debug("Your answer was %s" % (your_answer))
if your_answer == '':
MsgUser.debug("You want the default")
your_answer = default
elif preproc is not None:
your_answer = preproc(your_answer)
if validator(your_answer):
answer = parse_answer(self.type[key], your_answer)
no_answer = False
MsgUser.debug("Returning the answer %s" % (answer))
return answer
def yes_no(answer):
if answer.lower() == 'yes' or answer.lower() == 'no':
return True
else:
MsgUser.message("Please enter yes or no.")
return False
def check_install_location(folder):
'''Don't allow relative paths'''
MsgUser.debug("Checking %s is an absolute path" % (folder))
if (folder == '.' or
folder == '..' or
folder.startswith('./') or
folder.startswith('../') or
folder.startswith('~')):
MsgUser.message("Please enter an absolute path.")
return False
return True
def external_validate(what_to_check):
'''We will validate elsewhere'''
return True
def check_fsl_install(fsldir):
'''Check if this folder contains FSL install'''
MsgUser.debug("Checking %s is an FSL install" % (fsldir))
if os.path.isdir(fsldir):
if os.path.exists(
os.path.join(fsldir, 'etc', 'fslversion')
):
return True
return False
def fsl_downloadname(suffix, version):
return 'fsl-%s-%s' % (
version, suffix)
class Settings(object):
version = version
title = "--- FSL Installer - Version %s ---" % (version)
main_server = 'fsl.fmrib.ox.ac.uk'
mirrors = [build_url_with_protocol('https',
main_server, ('fsldownloads',
'')), ]
mirrors_file = 'fslmirrorlist.txt'
manifest_json = 'manifest.json'
manifest_csv = 'manifest.csv'
main_mirror = mirrors[0]
mirror = main_mirror
applications = ['bin/fslview.app', 'bin/assistant.app']
x11 = {'bad_versions': [],
'download_url': "http://xquartz.macosforge.org/landing/",
'apps': ['XQuartz.app', 'X11.app', ],
'location': "/Applications/Utilities"}
default_location = '/usr/local/fsl'
post_inst_dir = "etc/fslconf"
inst_qus = InstallQuestions()
inst_qus.add_question('version_match',
"The requested version matches the installed "
"version - do you wish to re-install FSL",
'no', 'bool', yes_no)
inst_qus.add_question('location',
"Where would you like the FSL install to be "
"(including the FSL folder name)",
default_location, 'path', check_install_location, os.path.abspath)
inst_qus.add_question('del_old',
"FSL exists in the current location, "
"would you like to keep a backup of the old "
"version (N.B. You will not be able to use the old "
"version)",
'no', 'bool', yes_no)
inst_qus.add_question('create',
"Install location doesn't exist, should I create it",
'yes', 'bool', yes_no)
inst_qus.add_question('inst_loc',
"Where is the FSL folder (e.g. /usr/local/fsl)",
default_location, 'path', check_fsl_install)
inst_qus.add_question('skipmd5',
"I was unable to download the checksum of "
"the install file so cannot confirm it is correct. "
"Would you like to install anyway",
'no', 'bool', yes_no)
inst_qus.add_question('overwrite',
"There is already a local copy of the file, would "
"you like to overwrite it",
"yes", 'bool', yes_no)
inst_qus.add_question('upgrade',
"Would you like to install upgrade",
"yes", 'bool', yes_no)
inst_qus.add_question('update',
"Would you like to install update",
"yes", 'bool', yes_no)
def get_json(web_url):
MsgUser.debug("Opening "+web_url)
try:
url = open_url(web_url)
data = url.read().decode('utf-8')
return json.loads(data)
except OpenUrlError as e:
raise ServerFailure(str(e))
# [ linux, centos, x86_64, 6, filename, 'fname',
# version, 'version', date, 'date', checksum_type, 'checksum_type',
# checksum, 'checksum', supported, 'true/false', notes, 'notes',
# instructions, 'instructions']
# [ linux, redhat, alias, centos, supported, True/false, version, 'version' ]
# [ 'installer', filename, 'fname', version, 'version', date, 'date',
# checksum_type, 'checksum_type', checksum, 'checksum', supported,
# 'true/false', notes, 'notes', instructions, 'instructions']
# [ feeds, filename, 'fname', version, 'version',
# date, 'date', checksum_type, 'checksum_type', checksum, 'checksum',
# supported, 'true/false', notes, 'notes', instructions, 'instructions']
# [ sources, filename, 'fname', version, 'version',
# date, 'date', checksum_type, 'checksum_type', checksum, 'checksum',
# supported, 'true/false', notes, 'notes', instructions, 'instructions']
class AutoDict(dict):
'''Automatically create a nested dict'''
def __getitem__(self, item):
try:
return dict.__getitem__(self, item)
except KeyError:
value = self[item] = type(self)()
return value
def freeze(self):
'''Returns a dict representation of an AutoDict'''
frozen = {}
for k, v in list(self.items()):
if type(v) == type(self):
frozen[k] = v.freeze()
else:
frozen[k] = v
return frozen
def get_csv_dict(web_url):
MsgUser.debug("Opening "+web_url)
try:
url = open_url(web_url)
manifest_reader = csv.reader(
url, delimiter=',', quoting=csv.QUOTE_MINIMAL)
a_dict = AutoDict()
for line in manifest_reader:
MsgUser.debug(line)
if line[0] == 'feeds':
items = iter(line[1:])
base_dict = dict(list(zip(items, items)))
a_dict[line[0]] = base_dict
elif line[0] == 'sources':
items = iter(line[1:])
base_dict = dict(list(zip(items, items)))
a_dict[line[0]] = base_dict
elif line[0] == 'installer':
items = iter(line[1:])
base_dict = dict(list(zip(items, items)))
a_dict[line[0]] = base_dict
else:
# Install package or alias
if line[2] == 'alias':
items = iter(line[4:])
base_dict = dict(list(zip(items, items)))
a_dict[
str(line[0])][
str(line[1])][
str(line[2])][
str(line[3])] = base_dict
else:
items = iter(line[5:])
base_dict = dict(list(zip(items, items)))
MsgUser.debug(
",".join(
(line[0], line[1], line[2], line[3], line[4])))
a_dict[
str(line[0])][
str(line[1])][
str(line[2])][
str(line[3])][
str(line[4])] = base_dict
except OpenUrlError as e:
raise ServerFailure(str(e))
MsgUser.debug(a_dict)
return a_dict.freeze()
class InvalidVersion(Exception):
pass
def get_web_version_and_details(server_url, request_version=None):
if request_version is None:
details = latest_release(server_url)
try:
version = Version(details['version'])
except KeyError:
try:
redirect = details['redirect']
raise DownloadError(
"Installer not supported on this platform."
"Please visit %s for download instructions" % redirect)
except KeyError:
MsgUser.debug(
"Can't find version or redirect - %s" % details)
raise DownloadError(
"Unsupported OS"
)
else:
MsgUser.debug("Requested version %s" % request_version)
releases = get_releases(server_url)
try:
version = Version(request_version)
except ValueError:
raise DownloadError(
"%s doesn't look like a version" % request_version)
if request_version not in list(releases.keys()):
raise DownloadError(
"%s isn't an available version" % request_version)
details = releases[request_version]
return (version, details)
def download_release(
server_url, to_temp=False,
request_version=None, skip_verify=False,
keep=False, source_code=False, feeds=False):
(version, details) = get_web_version_and_details(
server_url, request_version)
if request_version is None:
request_version = str(version)
if source_code or feeds:
if source_code:
extra_type = 'sources'
MsgUser.message("Downloading source code")
else:
extra_type = 'feeds'
MsgUser.message("Downloading FEEDS")
try:
releases = get_extra(server_url, extra_type)
except ExtraDownloadError as e:
raise DownloadError(
"Unable to find details for %s" % (extra_type)
)
to_temp = False
try:
details = releases[request_version]
except KeyError:
raise DownloadError(
"%s %s isn't available" % (request_version, extra_type)
)
MsgUser.debug(details)
if to_temp:
try:
(_, local_filename) = temp_file_name(close=True)
except Exception as e:
MsgUser.debug("Error getting temporary file name %s" % (str(e)))
raise DownloadError("Unable to begin download")
else:
local_filename = details['filename']
if os.path.exists(local_filename):
if os.path.isfile(local_filename):
MsgUser.message("%s exists" % (local_filename))
overwrite = Settings.inst_qus.ask_question('overwrite')
if overwrite:
MsgUser.warning(
"Erasing existing file %s" % local_filename)
try:
os.remove(local_filename)
except Exception:
raise DownloadError(
"Unabled to remove local file %s - remove"
" it and try again" % local_filename)
else:
raise DownloadError("Aborting download")
else:
raise DownloadError(
"There is a directory named %s "
"- cannot overwrite" % local_filename)
MsgUser.debug(
"Downloading to file %s "
"(this may take some time)." % (local_filename))
MsgUser.message(
"Downloading...")
downloaded = False
while downloaded is False:
try:
file_url = '/'.join(
(Settings.mirror.rstrip('/'), details['filename']))
download_file(
url=file_url,
localf=local_filename)
if (not skip_verify and
(details['checksum'] !=
file_checksum(local_filename, details['checksum_type']))):
raise DownloadError('Downloaded file fails checksum')
MsgUser.ok("File downloaded")
except DownloadFileError as e:
MsgUser.debug(str(e))
if Settings.mirror != Settings.main_mirror:
MsgUser.warning(
"Download from mirror failed, re-trying from "
"main FSL download site")
Settings.mirror = Settings.main_mirror
else:
raise DownloadError(str(e))
else:
downloaded = True
return (local_filename, version, details)
class DownloadError(Exception):
pass
def shell_config(shell, fsldir, skip_root=False):
MsgUser.debug("Building environment for %s" % (shell))
env_lines = ''
if shell in BOURNE_SHELLS:
if skip_root:
env_lines += '''if [ -x /usr/bin/id ]; then
if [ -z "$EUID" ]; then
# ksh and dash doesn't setup the EUID environment var
EUID=`id -u`
fi
fi
if [ "$EUID" != "0" ]; then
'''
env_lines += '''
# FSL Setup
FSLDIR=%s
PATH=${FSLDIR}/bin:${PATH}
export FSLDIR PATH
. ${FSLDIR}/etc/fslconf/fsl.sh
'''
if skip_root:
env_lines += '''fi'''
match = "FSLDIR="
replace = "FSLDIR=%s"
elif shell in C_SHELLS:
if skip_root:
env_lines += '''if ( $uid != 0 ) then
'''
env_lines += '''
# FSL Setup
setenv FSLDIR %s
setenv PATH ${FSLDIR}/bin:${PATH}
source ${FSLDIR}/etc/fslconf/fsl.csh
'''
if skip_root:
env_lines += '''
endif'''
match = "setenv FSLDIR"
replace = "setenv FSLDIR %s"
elif shell == 'matlab':
env_lines = '''
%% FSL Setup
setenv( 'FSLDIR', '%s' );
setenv('FSLOUTPUTTYPE', 'NIFTI_GZ');
fsldir = getenv('FSLDIR');
fsldirmpath = sprintf('%%s/etc/matlab',fsldir);
path(path, fsldirmpath);
clear fsldir fsldirmpath;
'''
match = "setenv( 'FSLDIR',"
replace = "setenv( 'FSLDIR', '%s' );"
else:
raise ValueError("Unknown shell type %s" % shell)
return (env_lines % (fsldir), match, replace % (fsldir))
def get_profile(shell):
home = os.path.expanduser("~")
dotprofile = os.path.join(home, '.profile')
if shell == 'bash':
profile = os.path.join(home, '.bash_profile')
if not os.path.isfile(profile) and os.path.isfile(dotprofile):
profile = dotprofile
elif shell == 'zsh':
profile = os.path.join(home, '.zprofile')
# ZSH will never source .profile
elif shell == 'sh':
profile = dotprofile
else:
cshprofile = os.path.join(home, '.cshrc')
if shell == 'csh':
profile = cshprofile
elif shell == 'tcsh':
profile = os.path.join(home, '.tcshrc')
if not os.path.isfile(profile) and os.path.isfile(cshprofile):
profile = cshprofile
else:
raise ValueError("Unsupported shell")
return profile
class FixFslDirError(Exception):
pass
def fix_fsldir(shell, fsldir):
(_, match, replace) = shell_config(shell, fsldir)
profile = get_profile(shell)
MsgUser.debug(
"Editing %s, replacing line beginning:%s with %s." %
(profile, match, replace))
try:
edit_file(profile, line_starts_replace, match, replace, False)
except EditFileError as e:
raise FixFslDirError(str(e))
class AddFslDirError(Exception):
pass
def add_fsldir(shell, fsldir):
(env_lines, _, _) = shell_config(shell, fsldir)
profile = get_profile(shell)
MsgUser.debug("Adding %s to %s" % (env_lines, profile))
try:
add_to_file(profile, env_lines, False)
except AddToFileError as e:
raise AddFslDirError(str(e))
class ConfigureMatlabError(Exception):
pass
class ConfigureMatlabWarn(Exception):
pass
def configure_matlab(fsldir, m_startup='', c_file=True):
'''Setup your startup.m file to enable FSL MATLAB functions to work'''
(mlines, match, replace) = shell_config('matlab', fsldir)
if m_startup == '':
m_startup = os.path.join(
os.path.expanduser('~'), 'Documents', 'MATLAB', 'startup.m')
if os.path.exists(m_startup):
# Check if already configured
MsgUser.debug("Looking for %s in %s" % (match, m_startup))
if file_contains(m_startup, match):
try:
MsgUser.debug('Updating MATLAB startup file.')
edit_file(
m_startup, line_starts_replace,
match, replace, False)
except EditFileError as e:
raise ConfigureMatlabError(str(e))
else:
MsgUser.debug('Adding FSL settings to MATLAB.')
try:
add_to_file(m_startup, mlines, False)
except AddToFileError as e:
raise ConfigureMatlabError(str(e))
elif c_file:
# No startup.m file found. Create one
try:
MsgUser.debug('No MATLAB startup.m file found, creating one.')
if not os.path.isdir(os.path.dirname(m_startup)):
MsgUser.debug('No MATLAB startup.m file found, creating one.')
os.mkdir(os.path.dirname(m_startup))
create_file(m_startup, mlines, False)
except (OSError, CreateFileError) as e:
MsgUser.debug(
'Unable to create ~/Documents/MATLAB/ folder or startup.m file,'
' cannot configure (%).' % (str(e)))
raise ConfigureMatlabError(
"Unable to create your ~/Documents/MATLAB/ folder or startup.m, "
"so cannot configure MATLAB for FSL.")
else:
MsgUser.debug('MATLAB may not be installed, doing nothing.')
raise ConfigureMatlabWarn("I can't tell if you have MATLAB installed.")
class SetupEnvironmentError(Exception):
pass
class SetupEnvironmentSkip(Exception):
pass
def setup_system_environment(fsldir):
'''Add a system-wide profile setting up FSL for all users.
Only supported on Redhat/Centos'''
profile_d = '/etc/profile.d'
profile_files = ['fsl.sh', 'fsl.csh']
exceptions = []
skips = []
if os.getuid() != 0:
sudo = True
else:
sudo = False
if os.path.isdir(profile_d):
for profile in profile_files:
pf = profile.split('.')[1]
(lines, match, replace) = shell_config(pf, fsldir)
this_profile = os.path.join(profile_d, profile)
if os.path.exists(this_profile):
# Already has a profile file
# Does it contain an exact match for current FSLDIR?
match = file_contains_1stline(this_profile, replace)
if match != '':
# If there is an fsl.(c)sh then just fix
# the entry for FSLDIR
MsgUser.debug(
"Fixing %s for FSLDIR location." % (this_profile))
try:
edit_file(
this_profile, line_starts_replace,
match, replace, sudo)
except EditFileError as e:
exceptions.append(str(e))
else:
# No need to do anything
MsgUser.debug(
"%s already configured - skipping." %
(this_profile))
skips.append(profile)
else:
# Create the file
try:
create_file(this_profile, lines, sudo)
except CreateFileError as e:
exceptions.append(str(e))
else:
raise SetupEnvironmentError(
"No system-wide configuration folder found - Skipped")
if exceptions:
raise SetupEnvironmentError(".".join(exceptions))
if skips:
raise SetupEnvironmentSkip(".".join(skips))
def setup_environment(fsldir=None, system=False, with_matlab=False):
'''Setup the user's environment so that their
terminal finds the FSL tools etc.'''
# Check for presence of profile file:
if fsldir is None:
fsldir = get_fsldir()
user_shell = which_shell()
MsgUser.debug("User's shell is %s" % (user_shell))
try:
(profile_lines, _, _) = shell_config(user_shell, fsldir)
profile = get_profile(user_shell)
except ValueError as e:
raise SetupEnvironmentError(str(e))
cfile = False
if not os.path.isfile(profile):
MsgUser.debug("User is missing a shell setup file.")
cfile = True
if cfile:
MsgUser.debug("Creating file %s" % (profile))
try:
create_file(profile, profile_lines, False)
except CreateFileError as e:
raise SetupEnvironmentError(
"Unable to create profile %s" % (profile))
else:
# Check if user already has FSLDIR set
MsgUser.message("Setting up FSL software...")
try:
if file_contains(profile, "FSLDIR"):
MsgUser.debug("Updating FSLDIR entry.")
fix_fsldir(user_shell, fsldir)
else:
MsgUser.debug("Adding FSLDIR entry.")
add_fsldir(user_shell, fsldir)
except (AddFslDirError, FixFslDirError) as e:
raise SetupEnvironmentError(
"Unable to update your profile %s"
" with FSL settings" % (profile))
if with_matlab:
MsgUser.debug("Setting up MATLAB")
try:
configure_matlab(fsldir)
except ConfigureMatlabError as e:
MsgUser.debug(str(e))
raise SetupEnvironmentError(str(e))
except ConfigureMatlabWarn as e:
MsgUser.skipped(str(e))
class PostInstallError(Exception):
pass
class InstallArchiveError(Exception):
pass
class UnknownArchiveType(Exception):
pass
def archive_type(archive):
'''Determine file type based on extension and check
that file looks like this file type'''
archive_types = {
'gzip': ('tar', '-z'),
'bzip2': ('tar', '-j'),
'zip': ('zip', ''), }
try:
file_type = run_cmd("file %s" % (archive))
except RunCommandError as e:
raise UnknownArchiveType(str(e))
file_type = file_type.lower()
for f_type in ('gzip', 'bzip2', 'zip', ):
if f_type in file_type:
return archive_types[f_type]
raise UnknownArchiveType(archive)
def asl_gui_604_patch(fsldir, as_root=False):
'''
fsl 6.0.4 shipped with a broken fsleyes preview in asl_gui.
This function applies the simple patch to any new installation
that downloads FSL 6.0.4 using the fslinstaller.
1. parse fsl version
2. if version == 6.0.4 apply asl_gui patch, else do nothing and return
to test this patch with an existing fsl 6.0.4:
1. make a minimal $FSLDIR folder structure
- cd ~
- mkdir fsl_test
- cd fsl_test
- mkdir fsl
- cp -r $FSLDIR/etc fsl/
- cp -r $FSLDIR/python fsl/
- mkdir fsl/bin
2. tar it up
- tar -czf fsl-6.0.4-centos7_64.tar.gz fsl
- rm -r fsl # remove the fsl folder after tar-ing
3. run a test python install from the tar file
- be sure to use python 2.X (e.g. 2.7 works fine)
- python fslinstaller.py -f ~/fsl_test/fsl-6.0.4-centos7_64.tar.gz -d ~/fsl_test/fsl -p -M -D
'''
asl_file = os.path.join(fsldir, 'python', 'oxford_asl', 'gui', 'preview_fsleyes.py') #$FSLDIR/python/oxford_asl/gui/preview_fsleyes.py
vfile = os.path.join(fsldir, 'etc', 'fslversion')
vstring = ''
with open(vfile, 'r') as f:
vstring = f.readline()
v = vstring.split(':')[0] # e.g. 6.0.4:wkj2w3jh
if v == '6.0.4':
MsgUser.message("Patching asl_gui for fsl 6.0.4")
tfile = os.path.join(tempfile.mkdtemp(), "preview_fsleyes.py")
# backup asl_file
run_cmd_displayoutput('cp {} {}.bkup'.format(asl_file, asl_file), as_root=as_root)
# copy asl_file to tempfile
run_cmd_displayoutput('cp {} {}'.format(asl_file, tfile), as_root=as_root)
# ensure script can open temp file
run_cmd_displayoutput('chmod 775 {}'.format(tfile), as_root=as_root)
for line in fileinput.input(files=tfile, inplace=True):
line = re.sub('parent=parent, ready=ready', 'ready=ready, raiseErrors=True', line.rstrip())
print(line)
run_cmd_displayoutput('cp {} {}'.format(tfile, asl_file), as_root=as_root)
os.remove(tfile)
def post_install(
fsldir, settings, script="post_install.sh", quiet=False,
app_links=False, x11=False):
MsgUser.message("Performing post install tasks")
if is_writeable(fsldir):
as_root = False
elif is_writeable_as_root(fsldir):
as_root = True
else:
raise PostInstallError(
"Unable to write to target folder (%s)" % (fsldir))
install_installer(fsldir)
# apply asl_gui patch if fsl 6.0.4
asl_gui_604_patch(fsldir, as_root=as_root)
script_path = os.path.join(fsldir, Settings.post_inst_dir, script)
if x11:
try:
check_X11(settings.x11)
except CheckX11Warning as e:
MsgUser.warning(str(e))
else:
MsgUser.ok("X11 (required for GUIs) found")
if os.path.exists(script_path):
MsgUser.debug("Found post-install script %s" % (script_path))
if not os.access(script_path, os.X_OK):
raise PostInstallError(
"Unable to run post install script %s" % (script_path)
)
script_opts = '-f "%s"' % (fsldir)
if quiet:
script_opts += " -q"
command_line = " ".join((script_path, script_opts))
try:
run_cmd_displayoutput(command_line, as_root=as_root)
except RunCommandError as e:
raise PostInstallError(
"Error running post installation script (error %s)"
" - check the install log" % (str(e))
)
# Work around for mistake in 5.0.10 post setup script
mal = os.path.join(
fsldir, Settings.post_inst_dir,
'make_applications_links.sh')
if (os.path.exists(mal) and
not file_contains(script_path, "make_applications_links.sh")):
MsgUser.debug(
"Work around necessary for missing app link creation")
else:
app_links = False
if app_links:
try:
make_applications_links(fsldir, settings.applications)
except MakeApplicationLinksError as e:
for message in list(e.app_messages.values()):
MsgUser.warning(message)
else:
MsgUser.ok("/Applications links created/updated")
MsgUser.ok("Post installation setup complete")
def install_archive(archive, fsldir=None):
def clean_up_temp():
try:
safe_delete(tempfolder, as_root)
except SafeDeleteError as sd_e:
MsgUser.debug(
"Unable to clean up temporary folder! "
"%s" % (str(sd_e)))
if not os.path.isfile(archive):
raise InstallError("%s isn't a file" % (archive))
if not fsldir:
try:
fsldir = get_fsldir(specified_dir=fsldir, install=True)
except GetFslDirError as e:
raise InstallError(str(e))
MsgUser.debug("Requested install of %s as %s" % (archive, fsldir))
if os.path.exists(fsldir):
# move old one out of way
MsgUser.debug("FSL version already installed")
keep_old = Settings.inst_qus.ask_question('del_old')
else:
keep_old = False
install_d = os.path.dirname(fsldir)
MsgUser.debug("Checking %s is writeable." % (install_d))
if is_writeable(install_d):
as_root = False
elif is_writeable_as_root(install_d):
as_root = True
else:
raise InstallArchiveError(
"Unable to write to target folder (%s), "
"even as a super user." % (install_d))
MsgUser.debug("Does %s require root for deletion? %s" % (
install_d, as_root))
try:
unarchive, ua_option = archive_type(archive)
except UnknownArchiveType as e:
raise InstallArchiveError(str(e))
# Generate a temporary name - eg fsl-<mypid>-date
tempname = '-'.join(('fsl', str(os.getpid()), str(time.time())))
tempfolder = os.path.join(install_d, tempname)
try:
run_cmd_dropstdout("mkdir %s" % (tempfolder), as_root=as_root)
except RunCommandError as e:
raise InstallArchiveError(
"Unable to create folder to install into.")
MsgUser.debug(
"Unpacking %s into folder %s." % (archive, tempfolder))
try:
if unarchive == 'tar':
unpack_cmd = 'tar -C %s -x %s -o -f %s' % (
tempfolder, ua_option, archive)
elif unarchive == 'zip':
MsgUser.debug(
"Calling unzip %s %s" % (ua_option, archive)
)
unpack_cmd = 'unzip %s %s' % (ua_option, archive)
try:
run_cmd_dropstdout(unpack_cmd, as_root=as_root)
except RunCommandError as e:
raise InstallArchiveError("Unable to unpack FSL.")
new_fsl = os.path.join(tempfolder, 'fsl')
if os.path.exists(fsldir):
# move old one out of way
try:
old_version = get_installed_version(fsldir)
except (NotAFslVersion, GetInstalledVersionError) as e:
if keep_old:
old_version = Version('0.0.0')
MsgUser.warning(
"The contents of %s doesn't look like an "
"FSL installation! - "
"moving to fsl-0.0.0" % (fsldir))
old_fsl = '-'.join((fsldir, str(old_version)))
if os.path.exists(old_fsl):
MsgUser.debug(
"Looks like there is another copy of the "
"old version of FSL - deleting...")
try:
safe_delete(old_fsl, as_root)
except SafeDeleteError as e:
raise InstallError(
";".join((
"Install location already has a "
"%s - I've tried to delete it but"
" failed" % (old_fsl), str(e))))
if keep_old:
try:
MsgUser.debug(
"Moving %s to %s" % (fsldir, old_fsl))
move(fsldir, old_fsl, as_root)
MsgUser.message(
'''You can find your archived version of FSL in %s.
If you wish to restore it, remove %s and rename %s to %s''' % (
old_fsl, fsldir, old_fsl, fsldir))
except MoveError as mv_e:
# failed to move the old version
MsgUser.debug(
"Failed to move old version "
"- %s" % (str(mv_e)))
raise InstallError(
"Failed to backup old version (%s)" % (str(mv_e)))
else:
MsgUser.debug("Removing existing FSL install")
try:
safe_delete(fsldir, as_root)
MsgUser.debug("Deleted %s." % (fsldir))
except SafeDeleteError as e:
raise InstallError(
"Failed to delete %s - %s." % (fsldir, str(e)))
else:
old_fsl = ''
try:
MsgUser.debug("Moving %s to %s" % (new_fsl, fsldir))
move(new_fsl, fsldir, as_root)
except MoveError as e:
# Unable to move new install into place
MsgUser.debug(
"Move failed - %s." % (str(e)))
raise InstallError(
'Failed to move new version into place.')
except InstallError as e:
clean_up_temp()
raise InstallArchiveError(str(e))
clean_up_temp()
MsgUser.debug("Install complete")
MsgUser.ok("FSL software installed.")
return fsldir
def check_for_updates(url, fsldir, requested_v=None):
# Start an update
MsgUser.message("Looking for new version.")
try:
this_version = get_installed_version(fsldir)
except GetInstalledVersionError as e:
# We can't find an installed version of FSL!
raise InstallError(str(e))
else:
MsgUser.debug("You have version %s" % (this_version))
if not requested_v:
version = Version(latest_release(url)['version'])
else:
try:
version = Version(requested_v)
except NotAFslVersion:
raise InstallError(
"%s doesn't look like a version" % requested_v)
if version > this_version:
# Update Available
if version.major > this_version.major:
# We don't support patching between major
# versions so download a fresh copy
return (UPGRADE, version)
else:
return (UPDATE, version)
else:
return (CURRENT, None)
class MakeApplicationLinksError(Exception):
def __init__(self, *args):
super(MakeApplicationLinksError, self).__init__(*args)
try:
self.app_messages = args[0]
except IndexError:
self.app_messages = []
def make_applications_links(fsldir, apps):
'''Create symlinks in /Applications'''
MsgUser.message("Creating Application links...")
results = {}
for app in apps:
app_location = os.path.join('/Applications', os.path.basename(app))
app_target = os.path.join(fsldir, app)
create_link = True
MsgUser.debug("Looking for existing link %s" % (app_location))
if os.path.lexists(app_location):
MsgUser.debug(
"Is a link: %s; realpath: %s" % (
os.path.islink(app_location),
os.path.realpath(app_location)))
if os.path.islink(app_location):
MsgUser.debug("A link already exists.")
if os.path.realpath(app_location) != app_target:
MsgUser.debug(
"Deleting old (incorrect) link %s" % (app_location))
try:
run_cmd_dropstdout("rm " + app_location, as_root=True)
except RunCommandError as e:
MsgUser.debug(
"Unable to remove broken"
" link to %s (%s)." % (app_target, str(e)))
results[app] = 'Unable to remove broken link to %s' % (
app_target)
create_link = False
else:
MsgUser.debug("Link is correct, skipping.")
create_link = False
else:
MsgUser.debug(
"%s doesn't look like a symlink, "
"so let's not delete it." % (app_location))
results[app] = (
"%s is not a link so hasn't been updated to point at the "
"new FSL install.") % (app_location)
create_link = False
if create_link:
MsgUser.debug('Create a link for %s' % (app))
if os.path.exists(app_target):
try:
run_cmd_dropstdout(
"ln -s %s %s" % (app_target, app_location),
as_root=True)
except RunCommandError as e:
MsgUser.debug(
"Unable to create link to %s (%s)." % (
app_target, str(e)))
results[app] = (
'Unable to create link to %s.') % (app_target)
else:
MsgUser.debug(
'Unable to find application'
' %s to link to.') % (app_target)
if results:
raise MakeApplicationLinksError(results)
class CheckX11Warning(Exception):
pass
def check_X11(x11):
'''Function to find X11 install on Mac OS X and confirm it is compatible.
Advise user to download Xquartz if necessary'''
MsgUser.message(
"Checking for X11 windowing system (required for FSL GUIs).")
xbin = ''
for x in x11['apps']:
if os.path.exists(os.path.join(x11['location'], x)):
xbin = x
if xbin != '':
# Find out what version is installed
x_v_cmd = [
'/usr/bin/mdls', '-name',
'kMDItemVersion', os.path.join(x11['location'], xbin)]
try:
cmd = Popen(x_v_cmd, stdout=PIPE, stderr=STDOUT, universal_newlines=True)
(vstring, _) = cmd.communicate()
except Exception as e:
raise CheckX11Warning(
"Unable to check X11 version (%s)" % (str(e)))
if cmd.returncode:
MsgUser.debug("Error finding the version of X11 (%s)" % (vstring))
# App found, but can't tell version, warn the user
raise CheckX11Warning(
"X11 (required for FSL GUIs) is installed but I"
" can't tell what the version is.")
else:
# Returns:
# kMDItemVersion = "2.3.6"\n
(_, _, version) = vstring.strip().split()
if version.startswith('"'):
version = version[1:-1]
if version in x11['bad_versions']:
raise CheckX11Warning(
"X11 (required for FSL GUIs) is a version that"
" is known to cause problems. We suggest you"
" upgrade to the latest XQuartz release from "
"%s" % (x11['download_url']))
else:
MsgUser.debug(
"X11 found and is not a bad version"
" (%s: %s)." % (xbin, version))
else:
# No X11 found, warn the user
raise CheckX11Warning(
"The FSL GUIs require the X11 window system which I can't"
" find in the usual places. You can download a copy from %s"
" - you will need to install this before the GUIs will"
" function" % (x11['download_url']))
def do_install(options, settings):
MsgUser.message(
shell_colours.bold + settings.title + shell_colours.default)
if options.test_installer:
settings.main_mirror = options.test_installer
this_computer = Host
if not this_computer.supported:
MsgUser.debug("Unsupported host %s %s %s" % (
this_computer.o_s,
this_computer.arch,
this_computer.os_type))
raise InstallError(
"Unsupported host - you could try building from source")
if this_computer.o_s == "linux":
system_environment = True
with_matlab = False
application_links = False
x11 = False
elif this_computer.o_s == "darwin":
system_environment = False
with_matlab = True
application_links = True
x11 = True
else:
MsgUser.debug("Unrecognised OS %s" % (this_computer.o_s))
raise InstallError("Unrecognised OS")
my_uid = os.getuid()
def configure_environment(fsldir, env_all=False, skip=False, matlab=False):
if skip:
return
if env_all:
if system_environment:
# Setup the system-wise environment
try:
setup_system_environment(fsldir)
except SetupEnvironmentError as e:
MsgUser.debug(str(e))
MsgUser.failed(
"Failed to configure system-wide profiles "
"with FSL settings: %s" % (str(e)))
except SetupEnvironmentSkip as e:
MsgUser.skipped(
"Some shells already configured: %s" % (str(e)))
else:
MsgUser.debug("System-wide profiles setup.")
MsgUser.ok("System-wide FSL configuration complete.")
else:
MsgUser.skipped(
"System-wide profiles not supported on this OS")
elif my_uid != 0:
# Setup the environment for the current user
try:
setup_environment(fsldir, with_matlab=matlab)
except SetupEnvironmentError as e:
MsgUser.debug(str(e))
MsgUser.failed(str(e))
else:
MsgUser.ok(
"User profile updated with FSL settings, you will need "
"to log out and back in to use the FSL tools.")
if my_uid != 0:
if options.quiet:
settings.inst_qus.defaults = True
print('''
We may need administrator rights, but you have specified fully automated
mode - you may still be asked for an admin password if required.''')
print('''
To install fully automatedly, either ensure this is running as the root
user (use sudo) or that you can write to the folder you wish to install
FSL in.''')
elif (not options.download and
not options.list_versions and
not options.list_builds and
not options.get_source and
not options.get_feeds):
MsgUser.warning(
'''Some operations of the installer require administative rights,
for example installing into the default folder of /usr/local.
If your account is an 'Administrator' (you have 'sudo' rights)
then you will be prompted for your administrator password
when necessary.''')
if not options.d_dir and options.quiet:
raise InstallError(
"Quiet mode requires you to specify the install location"
" (e.g. /usr/local)")
if not options.quiet and not (options.list_versions or options.list_builds):
MsgUser.message(
"When asked a question, the default answer is given in square "
"brackets.\nHit the Enter key to accept this default answer.")
if options.env_only and my_uid != 0:
configure_environment(
get_fsldir(specified_dir=options.d_dir),
options.env_all)
return
if options.archive:
if not options.skipchecksum:
if not options.checksum:
raise InstallError(
"No checksum provided and checking not disabled")
else:
checksummer = globals()[options.checksum_type + 'File']
if options.checksum != checksummer(options.archive):
raise InstallError("FSL archive doesn't match checksum")
else:
MsgUser.ok("FSL Package looks good")
arc_version = archive_version(options.archive)
MsgUser.message(
"Installing FSL software version %s..." % (arc_version))
fsldir = install_archive(
archive=options.archive, fsldir=options.d_dir)
try:
post_install(fsldir=fsldir, settings=settings, quiet=options.quiet)
except PostInstallError as e:
raise InstallError(str(e))
configure_environment(
fsldir=fsldir, env_all=options.env_all,
skip=options.skip_env, matlab=with_matlab)
return
# All the following options require the Internet...
try:
settings.mirror = fastest_mirror(
settings.mirrors, settings.mirrors_file)
except SiteNotResponding as e:
# We can't find the FSL site - possibly the internet is down
raise InstallError(e)
try:
self_update(settings.mirror)
except SelfUpdateError as e:
MsgUser.debug("Self update error: %s" % (str(e)))
MsgUser.warning("Error checking for updates to installer - continuing")
if options.list_versions:
# Download a list of available downloads from the webserver
list_releases(settings.mirror)
return
if options.list_builds:
# List all available builds
list_builds(settings.mirror)
return
if options.download:
MsgUser.debug("Attempting to download latest release")
try:
download_release(settings.mirror, request_version=options.requestversion,
skip_verify=options.skipchecksum)
except DownloadFileError as e:
raise "Unable to download release %s"
return
if options.update:
fsldir = get_fsldir()
status, new_v = check_for_updates(settings.mirror, fsldir=fsldir)
if status == UPDATE:
MsgUser.ok("Version %s available." % new_v)
if not settings.inst_qus.ask_question('update'):
return
elif status == UPGRADE:
MsgUser.ok("Version %s available." % new_v)
if not settings.inst_qus.ask_question('upgrade'):
return
else:
MsgUser.ok("FSL is up-to-date.")
return
if options.get_source:
MsgUser.debug("Attempting to download source")
try:
download_release(
settings.mirror,
request_version=options.requestversion,
skip_verify=options.skipchecksum,
source_code=True)
except DownloadFileError as e:
raise "Unable to download source code %s"
return
if options.get_feeds:
MsgUser.debug("Attempting to download FEEDS")
try:
download_release(
settings.mirror,
request_version=options.requestversion,
skip_verify=options.skipchecksum,
feeds=True)
except DownloadFileError as e:
raise "Unable to download FEEDS %s"
return
try:
(version, details) = get_web_version_and_details(
Settings.mirror,
request_version=options.requestversion)
if 'redirect' in details:
MsgUser.message("Please download FSL using the instructions here:")
MsgUser.message("%s" % (details['redirect']))
return
fsldir = get_fsldir(specified_dir=options.d_dir, install=True)
reinstall = True
if os.path.exists(fsldir):
inst_version = get_installed_version(fsldir)
if inst_version == version:
reinstall = Settings.inst_qus.ask_question('version_match')
if reinstall:
(fname, version, details) = download_release(
Settings.mirror,
to_temp=True,
request_version=options.requestversion,
skip_verify=options.skipchecksum)
if not details['supported']:
MsgUser.debug(
"This OS is not officially supported -"
" you may experience issues"
)
MsgUser.debug(
"Installing %s from %s (details: %s)" % (
fname, version, details))
MsgUser.message(
"Installing FSL software version %s..." % (version))
install_archive(
archive=fname, fsldir=fsldir)
try:
safe_delete(fname)
except SafeDeleteError as e:
MsgUser.debug(
"Unable to delete downloaded package %s ; %s" % (
fname, str(e)))
if details['notes']:
MsgUser.message(details['notes'])
try:
post_install(
fsldir=fsldir, settings=settings,
quiet=options.quiet, x11=x11,
app_links=application_links)
except PostInstallError as e:
raise InstallError(str(e))
except DownloadError as e:
MsgUser.debug("Unable to download FSL %s" % (str(e)))
raise InstallError("Unable to download FSL")
except InstallArchiveError as e:
MsgUser.debug("Unable to unpack FSL ; %s" % (str(e)))
raise InstallError("Unable to unpack FSL - %s" % (str(e)))
configure_environment(
fsldir=fsldir, env_all=options.env_all,
skip=options.skip_env, matlab=with_matlab)
if details['notes']:
MsgUser.message(details['notes'])
def parse_options(args):
usage = "usage: %prog [options]"
ver = "%%prog %s" % (version)
parser = OptionParser(usage=usage, version=ver)
parser.add_option("-d", "--dest", dest="d_dir",
help="Install into folder given by DESTDIR - "
"e.g. /usr/local/fsl",
metavar="DESTDIR", action="store",
type="string")
parser.add_option("-e", dest="env_only",
help="Only setup/update your environment",
action="store_true")
parser.add_option("-E", dest="env_all",
help="Setup/update the environment for ALL users",
action="store_true")
parser.add_option("-v", help="Print version number and exit",
action="version")
parser.add_option("-c", "--checkupdate", dest='update',
help="Check for FSL updates -"
" needs an internet connection",
action="store_true")
parser.add_option("-o", "--downloadonly", dest="download",
help=SUPPRESS_HELP,
action="store_true")
advanced_group = OptionGroup(
parser, "Advanced Install Options",
"These are advanced install options")
advanced_group.add_option(
"-l", "--listversions", dest="list_versions",
help="List available versions of FSL",
action="store_true")
advanced_group.add_option(
"-b", "--listbuilds", dest="list_builds",
help="List available FSL builds",
action="store_true")
advanced_group.add_option(
"-B", "--fslbuild", dest="requestbuild",
help="Download the specific FSLBUILD of FSL",
metavar="FSLBUILD", action="store",
type="string")
advanced_group.add_option(
"-V", "--fslversion", dest="requestversion",
help="Download the specific version FSLVERSION of FSL",
metavar="FSLVERSION", action="store",
type="string")
advanced_group.add_option(
"-s", "--source", dest="get_source",
help="Download source code for FSL",
action="store_true")
advanced_group.add_option(
"-F", "--feeds", dest="get_feeds",
help="Download FEEDS",
action="store_true")
advanced_group.add_option(
"-q", "--quiet", dest='quiet',
help="Silence all messages - useful if scripting install",
action="store_true")
advanced_group.add_option(
"-p", dest="skip_env",
help="Don't setup the environment",
action="store_true")
parser.add_option_group(advanced_group)
debug_group = OptionGroup(
parser, "Debugging Options",
"These are for use if you have a problem running this installer.")
debug_group.add_option(
"-f", "--file", dest="archive",
help="Install a pre-downloaded copy of the FSL archive",
metavar="ARCHIVEFILE", action="store",
type="string")
debug_group.add_option(
"-C", "--checksum", dest="checksum",
help="Supply the expected checksum for the pre-downloaded FSL archive",
metavar="CHECKSUM", action="store",
type="string")
debug_group.add_option(
"-T", "--checksum-type", dest="checksum_type",
default="sha256",
help="Specify the type of checksum",
action="store",
type="string")
debug_group.add_option(
"-M", "--nochecksum", dest="skipchecksum",
help="Don't check the pre-downloaded FSL archive",
action="store_true")
debug_group.add_option(
"-D", dest="verbose",
help="Switch on debug messages",
action="store_true")
debug_group.add_option(
"-G", dest="test_installer",
help=SUPPRESS_HELP,
action="store",
type="string")
parser.add_option_group(debug_group)
return parser.parse_args(args)
def override_host(requestbuild):
'''Overrides attributes of the Host class in the event that the user
has requested a specific FSL build.
'''
if requestbuild == 'centos7_64':
Host.o_s = 'linux'
Host.arch = 'x86_64'
Host.vendor = 'centos'
Host.version = Version('7.8.2003')
Host.glibc = '2.2.5'
Host.supported = True
Host.bits = '64'
elif requestbuild == 'centos6_64':
Host.o_s = 'linux'
Host.arch = 'x86_64'
Host.vendor = 'centos'
Host.version = Version('6.10')
Host.glibc = '2.2.5'
Host.supported = True
Host.bits = '64'
elif requestbuild == 'macOS_64':
Host.o_s = 'darwin'
Host.arch = 'x86_64'
Host.vendor = 'apple'
Host.version = Version('19.6.0')
Host.glibc = ''
Host.supported = True
Host.bits = '64'
# Download x86 version if running on Apple
# M1, as it runs just fine under emulation
elif (requestbuild is None and
Host.o_s == 'darwin' and
Host.arch == 'arm64'):
Host.arch = 'x86_64'
def main(argv=None):
if argv is None:
argv = sys.argv[1:]
(options, args) = parse_options(argv)
if options.verbose:
MsgUser.debugOn()
print(options)
if options.quiet:
MsgUser.quietOn()
override_host(options.requestbuild)
installer_settings = Settings()
try:
do_install(options, installer_settings)
except BadVersion as e:
MsgUser.debug(str(e))
MsgUser.failed("Unable to find requested version!")
sys.exit(1)
except (InstallError, GetFslDirError, GetInstalledVersionError) as e:
MsgUser.failed(str(e))
sys.exit(1)
except UnsupportedOs as e:
MsgUser.failed(str(e))
sys.exit(1)
except KeyboardInterrupt as e:
MsgUser.message('')
MsgUser.failed("Install aborted.")
sys.exit(1)
if __name__ == '__main__':
main()
| Python |
3D | Aswendt-Lab/AIDAmri | bin/__init__.py | .py | 33 | 2 | from PV2NIfTiConverter import *
| Python |
3D | Aswendt-Lab/AIDAmri | bin/conv2Nifti_auto.py | .py | 20,037 | 478 | """
Created on 18/10/2023
@author: Marc Schneider
AG Neuroimaging and Neuroengineering of Experimental Stroke
Department of Neurology, University Hospital Cologne
This script automates the conversion from the raw bruker data format to the NIfTI
format for the whole dataset using brkraw. The raw
data needs to be stored in one folder.
All the data which is contained in the input folder will be converted to nifti. During the processing a new folder called proc_data is being
created in the same directory where the raw data folder is located. If you wish to save the output elsewhere you can specify the output directory with the -o flag when starting the script.
Example:
python conv2Nifti_auto.py -i /Volumes/Desktop/MRI/raw_data -o /Volumes/Desktop/MRI//proc_data
"""
import os
import csv
import json
import pandas as pd
import nibabel as nii
import glob as glob
from pathlib import Path
import numpy as np
import re
import concurrent.futures
from PV2NIfTiConverter import P2_IDLt2_mapping
import functools
import subprocess
import shlex
import logging
import shutil
import openpyxl
def create_slice_timings(method_file, scanid, out_file):
# read in method file to search for parameters
with open(method_file, "r") as infile:
lines = infile.readlines()
interleaved = False
repetition_time = None
slicepack_delay = None
slice_order = []
n_slices = 0
reverse = False
# iterate over line to find parameters
for idx, line in enumerate(lines):
if "RepetitionTime=" in line:
repetition_time = int(float(line.split("=")[1]))
repetition_time = int(repetition_time)
if "PackDel=" in line:
slicepack_delay = int(float(line.split("=")[1]))
if "ObjOrderScheme=" in line:
slice_order = line.split("=")[1]
if slice_order == 'Sequential':
interleaved = False
else:
interleaved = True
if "ObjOrderList=" in line:
n_slices = re.findall(r'\d+', line)
if len(n_slices) == 1:
n_slices = int(n_slices[0])
if lines[idx+1]:
slice_order = [int(float(s)) for s in re.findall(r'\d+', lines[idx+1])]
if slice_order[0] > slice_order[-1]:
reverse = True
# calculate actual slice timings
slice_timings = calculate_slice_timings(n_slices, repetition_time, slicepack_delay, slice_order, reverse)
# adjust slice order to start at 1
slice_order = [x+1 for x in slice_order]
#save metadata
mri_meta_data = {}
mri_meta_data["RepetitionTime"] = repetition_time
mri_meta_data["ObjOrderList"] = slice_order
mri_meta_data["n_slices"] = n_slices
mri_meta_data["costum_timings"] = slice_timings
mri_meta_data["ScanID"] = scanid
if os.path.exists(out_file):
with open(out_file, "r") as outfile:
content = json.load(outfile)
#update brkraw content with own slice timings
content.update(mri_meta_data)
with open(out_file, "w") as outfile:
json.dump(content, outfile)
# if json has different naming than usual adjust path
else:
parent_path = Path(out_file).parent
search_path = os.path.join(parent_path, "*.json")
json_files = glob.glob(search_path)
for json_file in json_files:
if os.path.exists(json_file):
with open(json_file, "r") as outfile:
content = json.load(outfile)
#update brkraw content with own slice timings
content.update(mri_meta_data)
with open(json_file, "w") as outfile:
json.dump(content, outfile)
def calculate_slice_timings(n_slices, repetition_time, slicepack_delay, slice_order, reverse=False):
n_slices_2 = int(n_slices / 2)
slice_spacing = float(repetition_time - slicepack_delay) / float(n_slices * repetition_time)
if n_slices % 2 == 1: # odd
slice_timings = list(range(n_slices_2, -n_slices_2 - 1, -1))
slice_timings = list(map(float, slice_timings))
else: # even
slice_timings = list(range(n_slices_2, -n_slices_2, -1))
slice_timings = list(map(lambda x: float(x) - 0.5, slice_timings))
if reverse:
slice_order.reverse()
slice_timings = list(slice_timings[x] for x in slice_order)
return list((slice_spacing * x) for x in slice_timings)
def get_visu_pars(path):
echotimes = []
if os.path.exists(path):
with open(path, 'r') as infile:
lines = infile.readlines()
for idx, line in enumerate(lines):
if "VisuAcqEchoTime=" in line:
if lines[idx+1]:
echotimes = [float(s) for s in re.findall(r'\d+', lines[idx+1])]
echotimes = np.array(echotimes)
return echotimes
def bids_convert(input_dir, output_dir):
## rearrange proc data in BIDS-format
temp_dir = os.path.join(input_dir,"temp")
command = f"brkraw bids_helper {input_dir} dataset -j"
command_args = shlex.split(command)
os.chdir(input_dir)
try:
result = subprocess.run(command_args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True)
logging.info(f"Output bids helper:\n{result.stdout}")
except Exception as e:
logging.error(f'Fehler bei der Ausführung des Befehls: {command_args}\nFehlermeldung: {str(e)}')
raise
# # adjust dataset.json template
dataset_json = glob.glob(os.path.join(os.getcwd(),"data*.json"))[0]
dataset_csv = glob.glob(os.path.join(os.getcwd(),"data*.csv"))[0]
if os.path.exists(dataset_json):
with open(dataset_json, 'r') as infile:
meta_data = json.load(infile)
if meta_data["common"]["EchoTime"]:
del meta_data["common"]["EchoTime"]
with open(dataset_json, 'w') as outfile:
json.dump(meta_data, outfile)
## convert to bids
command = f"brkraw bids_convert {input_dir} {dataset_csv} -j {dataset_json} -o {output_dir}"
command_args = shlex.split(command)
try:
result = subprocess.run(command_args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True)
logging.info(f"Output bids convert:\n{result.stdout}")
except Exception as e:
logging.error(f'Fehler bei der Ausführung des Befehls: {command_args}\nFehlermeldung: {str(e)}')
raise
shutil.rmtree(temp_dir)
def nifti_convert(input_dir, raw_data_list, output_dir):
# create list with full paths of raw data
list_of_paths = []
aidamri_dir = os.getcwd()
temp_dir = os.path.join(input_dir,"temp")
if not os.path.exists(temp_dir):
os.mkdir(temp_dir)
os.chdir(temp_dir)
with concurrent.futures.ProcessPoolExecutor() as executor:
futures = [executor.submit(brkraw_tonii, path) for path in raw_data_list]
concurrent.futures.wait(futures)
os.chdir(aidamri_dir)
def brkraw_tonii(input_path):
command = f"brkraw tonii {input_path}"
command_args = shlex.split(command)
try:
result = subprocess.run(command_args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True)
logging.info(f"Output nifti conversion of dataset {os.path.basename(input_path)}:\n{result.stdout}")
except Exception as e:
logging.error(f'Fehler bei der Ausführung des Befehls: {command_args}\nFehlermeldung: {str(e)}')
raise
def create_mems_and_map(mese_scan_ses, mese_scan_data, output_dir):
# iterate over every subject and ses to check if MEMS files are included
sub = os.path.basename(os.path.dirname(mese_scan_ses))
ses = os.path.basename(mese_scan_ses)
anat_data_path = os.path.join(mese_scan_ses, "anat", "*MESE.nii*")
mese_data_paths = glob.glob(anat_data_path, recursive=True)
#skip the subject if no MEMS files are found
if not mese_data_paths:
return 1
# collect data of all individual MEMS files of one subject and session
img_array_data = {}
for m_d_p in mese_data_paths:
# find slice numer in path. e.g.: *echo-10_MESE.nii.gz, extract number 10
slice_number = int(((Path(m_d_p).name).split('-')[-1]).split('_')[0])
# load nifti image and save the array in a dict while key is the slice number
data = nii.load(m_d_p)
img_array = data.dataobj.get_unscaled()
img_array_data[slice_number] = img_array
# remove single mese file
os.remove(m_d_p)
os.remove(m_d_p.replace(".nii.gz", ".json"))
# sort imgs into right order
sorted_imgs = []
for key in sorted(img_array_data):
sorted_imgs.append(img_array_data[key])
# stack all map related niftis
new_img = np.stack(sorted_imgs, axis=2)
qform = data.header.get_qform()
sform = data.header.get_sform()
data.header.set_qform(None)
data.header.set_sform(None)
nii_img = nii.Nifti1Image(new_img, None, data.header)
# save nifti file in anat folder
img_name = sub + "_" + ses + "_T2w_MEMS.nii.gz"
t2_mems_path = os.path.join(output_dir, sub, ses, "anat", img_name)
nii.save(nii_img, t2_mems_path)
# create t2 map
sub_num = sub.split("-")[1]
visu_pars_path = os.path.join(pathToRawData, mese_scan_data[sub_num]["RawData"], str(mese_scan_data[sub_num]["ScanID"]), "visu_pars")
# get echotimes of scan
echotimes = get_visu_pars(visu_pars_path)
if len(echotimes) > 3:
img_name = sub + "_" + ses + "_T2w_MAP.nii.gz"
t2map_path = os.path.join(output_dir, sub, ses, "t2map", img_name)
if not os.path.exists(os.path.join(output_dir, sub, ses, "t2map")):
os.mkdir(os.path.join(output_dir, sub, ses, "t2map"))
try:
P2_IDLt2_mapping.getT2mapping(t2_mems_path, 'T2_2p', 100, 1.5, 'Brummer', echotimes, t2map_path)
logging.info(f"Map created for: {os.path.basename(t2_mems_path)}")
except Exception as e:
logging.error(f"Error while computing T2w Map:\n{e}")
raise
correct_orientation(qform,sform,t2_mems_path,t2map_path)
# generate transposed MEMS img for later registration
org_mems_scan = nii.load(t2_mems_path)
mems_data = org_mems_scan.dataobj.get_unscaled()
mems_data_transposed = np.transpose(mems_data, axes=(0,1,3,2))
mems_data_first_slice = mems_data_transposed[:,:,:,1]
for i in range(mems_data_transposed.shape[3]):
mems_data_transposed[:,:,:,i] = mems_data_first_slice
transposed_copied_img = nii.Nifti1Image(mems_data_transposed, org_mems_scan.affine)
img_name = sub + "_" + ses + "_T2w_transposed_MEMS.nii.gz"
t2_mems_transposed_path = os.path.join(output_dir, sub, ses, "t2map", img_name)
if not os.path.exists(os.path.join(output_dir, sub, ses, "t2map")):
os.mkdir(os.path.join(output_dir, sub, ses, "t2map"))
nii.save(transposed_copied_img, t2_mems_transposed_path)
def correct_orientation(qform,sform, t2_mems_img, t2_map_img):
# overwrite img with correct orienation
mems_img = nii.load(t2_mems_img)
imgTemp = mems_img.dataobj.get_unscaled()
mems_img.header.set_qform(qform)
mems_img.header.set_sform(sform)
new_img = nii.Nifti1Image(imgTemp, None, mems_img.header)
nii.save(new_img, t2_mems_img)
# overwrite img with correct orienation
map_img = nii.load(t2_map_img)
imgTemp = map_img.dataobj.get_unscaled()
map_img.header.set_qform(qform)
map_img.header.set_sform(sform)
new_img = nii.Nifti1Image(imgTemp, None, map_img.header)
nii.save(new_img, t2_map_img)
#this is needed to be done for the bids converter to work correctly.
def fileCopy(list_of_data, input_path):
for ll in list_of_data:
if os.path.dirname(ll) != input_path: # Use '!=' for inequality
# Extract the filename from ll
filename = os.path.basename(ll)
# Create the destination path by combining input_path and the filename
destination_path = os.path.join(input_path, filename)
# Use shutil.copy to copy the file from ll to destination
try:
shutil.move(ll, destination_path)
print(f"File '{filename}' moved to '{input_path}' successfully.")
except Exception as e:
print(f"Error moving '{filename}' to '{input_path}': {str(e)}")
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='This script automates the conversion from the raw bruker data format to the NIfTI format using 1_PV2NIfTiConverter/pv_conv2Nifti.py. The raw data needs to be in the following structure: projectfolder/days/subjects/data/. For this script to work, the groupMapping.csv needs to be adjusted, where the group name of every subject''s folder in the raw data structure needs to be specified. This script computes the converison either for all data in the raw project folder or for certain days and/or groups specified through the optional arguments -d and -g. During the processing a new folder called proc_data is being created in the same directory where the raw data folder is located. Example: python conv2Nifti_auto.py -f /Volumes/Desktop/MRI/raw_data -d Baseline P1 P7 P14 P28')
parser.add_argument('-i', '--input', required=True,
help='Path to the parent project folder of the dataset, e.g. raw_data, WARNING: all of the raw subjects have to be in one folder and not to have a subfolder structure. otherwise the conversion to bids wont work.', type=str)
parser.add_argument('-s', '--sessions',
help='Select which sessions of your data should be processed, if no days are given all data will be used.', type=str, required=False)
parser.add_argument('-o', '--output', type=str, required=False, help='Output directory where the results will be saved.')
## read out parameters
args = parser.parse_args()
pathToRawData = args.input
if args.output == None:
output_dir = os.path.join(pathToRawData, "proc_data")
else:
output_dir = args.output
if not os.path.exists(output_dir):
os.mkdir(output_dir)
# Create a new workbook and select the active sheet
workbook = openpyxl.Workbook()
sheet = workbook.active
# Enter data in Row 1
sheet['A1'] = "Subject"
sheet['B1'] = "Group"
#Create sourcedata folder
sourcedata_dir = os.path.join(output_dir, "Sourcedata")
os.makedirs(sourcedata_dir, exist_ok=True)
# Save the workbook
workbook.save(os.path.join(sourcedata_dir,"GroupMapping.xlsx"))
# Konfiguriere das Logging-Modul
log_file_path = os.path.join(sourcedata_dir, "conv2nifti_log.txt")
logging.basicConfig(filename=log_file_path, level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
# get list of raw data in input folder
#list_of_raw = sorted([d for d in os.listdir(pathToRawData) if os.path.isdir(os.path.join(pathToRawData, d)) \
# or (os.path.isfile(os.path.join(pathToRawData, d)) and (('zip' in d) or ('PvDataset' in d)))])
#list_of_raw = glob.glob(os.path.join(pathToRawData,"**","subject"),recursive=True)
#list_of_data = []
#for path in list_of_raw:
# list_of_data.append(os.path.dirname(path))
#fileCopy(list_of_data,pathToRawData)
list_of_raw = glob.glob(os.path.join(pathToRawData,"**","subject"),recursive=True)
list_of_data = []
for path in list_of_raw:
list_of_data.append(os.path.dirname(path))
logging.info(f"Converting following datasets: {list_of_data}")
print(f"Converting following datasets: {list_of_data}")
# convert data into nifti format
print("Paravision to nifti conversion running \33[5m...\33[0m (wait!)")
#nifti_convert(output_dir, list_of_data)
nifti_convert(pathToRawData, list_of_data, output_dir)
print("\rNifti conversion \033[0;30;42m COMPLETED \33[0m ")
# convert data into BIDS format
print("BIDS conversion running \33[5m...\33[0m (wait!)")
bids_convert(pathToRawData, output_dir)
print("\rBIDS conversion \033[0;30;42m COMPLETED \33[0m ")
# find MEMS and fmri files
mese_scan_data = {}
mese_scan_ids = []
fmri_scan_ids = {}
dataset_csv = glob.glob(os.path.join(os.getcwd(), "data*.csv"))[0]
if os.path.exists(dataset_csv):
with open(dataset_csv, 'r') as csvfile:
df = pd.read_csv(csvfile, delimiter=',')
for index, row in df.iterrows():
# save every sub which has MEMS scans
if row["modality"] == "MESE":
mese_scan_ids.append(row["SubjID"])
mese_scan_data[row["SubjID"]] = {}
mese_scan_data[row["SubjID"]]["ScanID"] = row["ScanID"]
mese_scan_data[row["SubjID"]]["RawData"] = row["RawData"]
# save every sub and scanid wich is fmri scan
if row["DataType"] == "func":
fmri_scan_ids[row["RawData"]] = {}
fmri_scan_ids[row["RawData"]]["ScanID"] = row["ScanID"]
fmri_scan_ids[row["RawData"]]["SessID"] = row["SessID"]
fmri_scan_ids[row["RawData"]]["SubjID"] = row["SubjID"]
# iterate over all fmri scans to calculate and save costum slice timings
for sub, data in fmri_scan_ids.items():
scanid = str(data["ScanID"])
sessid = str(data["SessID"])
subjid = str(data["SubjID"])
# determine method file path
fmri_scan_method_file = os.path.join(pathToRawData, sub, scanid, "method")
# determine output json file path
out_file = os.path.join(output_dir, "sub-" + subjid, "ses-" + sessid, "func", "sub-" + subjid + "_ses-" + sessid + "_EPI.json")
# calculate slice timings
create_slice_timings(fmri_scan_method_file, scanid, out_file)
## use parallel computing for a faster generation of t2maps
mese_scan_sessions = []
for id in mese_scan_ids:
mese_scan_path = os.path.join(output_dir, "sub-" + id)
sessions = os.listdir(mese_scan_path)
for ses in sessions:
mese_scan_ses = os.path.join(mese_scan_path, ses)
if mese_scan_ses not in mese_scan_sessions:
mese_scan_sessions.append(os.path.join(mese_scan_path, ses))
print("T2 mapping running \33[5m...\33[0m (wait!)")
logging.info(f"Creating T2w maps for following datasets:\n{mese_scan_ids}")
with concurrent.futures.ProcessPoolExecutor() as executor:
futures = [executor.submit(create_mems_and_map, mese_scan_ses, mese_scan_data, output_dir) for mese_scan_ses in mese_scan_sessions]
concurrent.futures.wait(futures)
print('\rT2 mapping \033[0;30;42m COMPLETED \33[0m ')
logging.info(f"Finished creating T2w maps")
dataset_csv = glob.glob(os.path.join(os.getcwd(), "data*.csv"))[0]
dataset_json = glob.glob(os.path.join(os.getcwd(), "data*.json"))[0]
#os.remove(dataset_csv)
#os.remove(dataset_json)
print("\n")
print("###")
print("Finished converting raw data into nifti format!")
print("\n")
print("###")
print("For detailed information check logging file!")
print("\n")
print("###")
print("Thank you for using AIDAmri!")
| Python |
3D | Aswendt-Lab/AIDAmri | bin/batchProc.py | .py | 21,057 | 443 | """
Created on 18/11/2020
@author: Marc Schneider
AG Neuroimaging and Neuroengineering of Experimental Stroke
Department of Neurology, University Hospital Cologne
This script runs every needed script for all (pre-)processing and registration
steps. The data needs to be ordered like after Bruker2NIfTI conversion:
project_folder/days/groups/subjects/.
For the script to work, it needs to be placed within the /bin folder of AIDAmri.
Example:
python batchProc.py -i /Volumes/Desktop/MRI/proc_data -t anat dwi func t2map
"""
import glob
import os
import fnmatch
import shutil
from pathlib import Path
import nibabel as nii
import concurrent.futures
import functools
import subprocess
from tqdm import tqdm
import multiprocessing
import logging
import shlex
import time
def findData(projectPath, sessions, dataTypes):
# This function screens all existing paths. Within these paths, this function collects all subject
# folders, which are all folders that are not named 'Physio'.
full_path_list = os.listdir(projectPath)
all_wanted_paths, anat_files, dwi_files, func_files, t2map_files = [], [], [], [], []
for path in full_path_list:
if "sub" in path and not ".DS_Store" in path:
wanted_paths = os.listdir(os.path.join(projectPath, path))
wanted_paths = [os.path.join(projectPath, path, wanted_path) for wanted_path in wanted_paths if "ses" in wanted_path]
all_wanted_paths.extend(wanted_paths)
if sessions:
ses_path = []
matching_paths = []
for ses in sessions:
ses_path.append("ses-" + ses)
for path in all_wanted_paths:
if any(ses in path for ses in ses_path):
matching_paths.append(path)
all_wanted_paths = matching_paths
for path in all_wanted_paths:
for sub_dir in os.listdir(path):
if sub_dir == "anat" and "anat" in dataTypes:
anat_files.append(os.path.join(path, sub_dir))
elif sub_dir == "dwi" and "dwi" in dataTypes:
dwi_files.append(os.path.join(path, sub_dir))
elif sub_dir == "func" and "func" in dataTypes:
func_files.append(os.path.join(path, sub_dir))
elif sub_dir == "t2map" and "t2map" in dataTypes:
t2map_files.append(os.path.join(path, sub_dir))
all_files = {}
all_files["anat"] = anat_files
all_files["dwi"] = dwi_files
all_files["func"] = func_files
all_files["t2map"] = t2map_files
return all_files
def run_subprocess(command,datatype,step,anat_process=False):
timeout = 3600 # set maximum time in seconds after which the subprocess will be terminated
command_args = shlex.split(command)
file = command_args[-1]
if datatype == "func" and step =="process":
file = command_args[-3]
log_file = os.path.join(os.path.dirname(file), step + ".log")
if datatype == "anat" and step == "process":
log_file = os.path.join(os.path.dirname(file), datatype, step + ".log")
if anat_process == False:
log_file = os.path.join(os.path.dirname(file), datatype, step + "_par" + ".log")
# find current sub name
normalized_path = os.path.normpath(file)
directories = normalized_path.split(os.path.sep)
sub = [directory for directory in directories if "sub-" in directory][0]
ses = [directory for directory in directories if "ses-" in directory][0]
try:
logging.info(f"Running command: {command}.\nCheck {log_file} for further information.")
if os.path.exists(log_file):
os.remove(log_file)
with open(log_file, 'w') as outfile:
time.sleep(2) # make sure logging file is created before starting the subprocess
result = subprocess.run(command_args, stdout=outfile, stderr=outfile, text=True, timeout=timeout)
if result.returncode != 0:
return sub,ses,datatype,step
else:
return 0
except subprocess.TimeoutExpired:
logging.error(f'Timeout expired for command: {command_args}')
return sub,ses,datatype,step
except Exception as e:
logging.error(f'Error while executing the command: {command_args} Errorcode: {str(e)}')
raise
def executeScripts(currentPath_wData, dataFormat, step, stc=False, *optargs):
# For every datatype (T2w, fMRI, DTI), go in all days/group/subjects folders
# and execute the respective (pre-)processing/registration-scripts.
# If a certain file does not exist, a note will be created in the errorList.
# cwd should contain the path of the /bin folder (the user needs to navigate to the /bin folder before executing this script)
#KEEP IN MIND DUE TO PARALLEL COMPUTING NO ERRORS IN THIS FUNCTION WILL BE PRINTED OUT => GREY ZONE
errorList = [];
message = '';
cwd = str(Path(__file__).resolve().parent)
currentPath_wData = Path(currentPath_wData)
# currentPath_wData = projectfolder/sub/ses/dataFormat (e.g. anat, func, dwi)
#Find logging file
root_path = Path(currentPath_wData).parents[2]
log_file_path = os.path.join(root_path, "batchproc_log.txt")
#Initialize logging only if no handler is active
if not logging.getLogger().hasHandlers():
logging.basicConfig(
filename=log_file_path,
level=logging.INFO,
format='%(asctime)s - %(levelname)s - %(message)s'
)
if os.path.isdir(currentPath_wData):
if dataFormat == 'anat':
if step == "preprocess":
os.chdir(os.path.join(cwd, '2.1_T2PreProcessing'))
currentFile = list(currentPath_wData.glob("*T2w.nii.gz"))
if len(currentFile) > 0:
command = f'python preProcessing_T2.py -i {currentFile[0]}'
result = run_subprocess(command, dataFormat, step)
if result != 0:
errorList.append(result)
else:
message = f'Could not find *T2w.nii.gz in {str(currentPath_wData)}'
logging.error(message)
errorList.append(message)
os.chdir(cwd)
elif step == "registration":
os.chdir(os.path.join(cwd, '2.1_T2PreProcessing'))
currentFile = list(currentPath_wData.glob("*Bet.nii.gz"))
if len(currentFile) > 0:
command = f'python registration_T2.py -i {currentFile[0]}'
result = run_subprocess(command, dataFormat, step)
command = f'python t2_value_extraction.py -i {currentFile[0]}'
result = run_subprocess(command, dataFormat, step)
if result != 0:
errorList.append(result)
else:
message = f'Could not find *Bet.nii.gz in {str(currentPath_wData)}'
logging.error(message)
errorList.append(message)
os.chdir(cwd)
elif step == "process":
has_stroke_mask = any(currentPath_wData.glob("**/*Stroke_mask.nii.gz"))
if not has_stroke_mask:
message = f"No stroke mask found for {str(currentPath_wData)}, proceeding without mask."
logging.info(message) #write in log-file
#print(message, flush=True)
return 0
os.chdir(os.path.join(cwd, '3.1_T2Processing'))
command = f'python getIncidenceSize_par.py -i {str(currentPath_wData)}'
result = run_subprocess(command, dataFormat, step)
if isinstance(result, tuple) and len(result) == 4:
errorList.append(result)
command = f'python getIncidenceSize.py -i {str(currentPath_wData)}'
result = run_subprocess(command, dataFormat, step, anat_process=True)
if isinstance(result, tuple) and len(result) == 4:
errorList.append(result)
os.chdir(cwd)
elif dataFormat == 'func':
if step == "preprocess":
os.chdir(os.path.join(cwd, '2.3_fMRIPreProcessing'))
currentFile = list(currentPath_wData.glob("*EPI.nii.gz"))
if len(currentFile)>0:
command = f'python preProcessing_fMRI.py -i {currentFile[0]}'
result = run_subprocess(command,dataFormat,step)
if result != 0:
errorList.append(result)
else:
message = f'Could not find *EPI.nii.gz in {str(currentPath_wData)}';
logging.error(message)
errorList.append(message)
os.chdir(cwd)
elif step == "registration":
os.chdir(os.path.join(cwd, '2.3_fMRIPreProcessing'))
currentFile = list(currentPath_wData.glob("*SmoothBet.nii.gz"))
if len(currentFile)>0:
command = f'python registration_rsfMRI.py -i {currentFile[0]}'
result = run_subprocess(command,dataFormat,step)
if result != 0:
errorList.append(result)
else:
message = f'Could not find *SmoothBet.nii.gz in {str(currentPath_wData)}';
logging.error(message)
errorList.append(message)
os.chdir(cwd)
elif step == "process":
currentFile = list(currentPath_wData.glob("*EPI.nii.gz"))
if len(currentFile)>0:
os.chdir(os.path.join(cwd, '3.3_fMRIActivity'))
command = f'python process_fMRI.py -i {currentFile[0]} -stc {stc}'
result = run_subprocess(command,dataFormat,step)
if result != 0:
errorList.append(result)
os.chdir(cwd)
elif dataFormat == 't2map':
if step == "preprocess":
os.chdir(os.path.join(cwd, '4.1_T2mapPreProcessing'))
currentFile = list(currentPath_wData.glob("*MEMS.nii.gz"))
if len(currentFile)>0:
command = f'python preProcessing_T2MAP.py -i {currentFile[0]}'
result = run_subprocess(command,dataFormat,step)
if result != 0:
errorList.append(result)
else:
message = f'Could not find *MEMS.nii.gz in {str(currentPath_wData)}';
logging.error(message)
errorList.append(message)
os.chdir(cwd)
elif step == "registration":
os.chdir(os.path.join(cwd, '4.1_T2mapPreProcessing'))
currentFile = list(currentPath_wData.glob("*SmoothMicoBet.nii.gz"))
if len(currentFile)>0:
command = f'python registration_T2MAP.py -i {currentFile[0]}'
result = run_subprocess(command,dataFormat,step)
if result != 0:
errorList.append(result)
else:
message = f'Could not find *SmoothMicoBet.nii.gz in {str(currentPath_wData)}';
print(message)
errorList.append(message)
os.chdir(cwd)
elif step == "process":
currentFile = list(currentPath_wData.glob("*T2w_MAP.nii.gz"))
if len(currentFile)>0:
command = f'python t2map_data_extract.py -i {currentFile[0]}'
result = run_subprocess(command,dataFormat,step)
if result != 0:
errorList.append(result)
else:
message = f'Could not find *T2w_MAP.nii.gz in {str(currentPath_wData)}';
logging.error(message)
errorList.append(message)
os.chdir(cwd)
elif dataFormat == 'dwi':
if step == "preprocess":
os.chdir(os.path.join(cwd, '2.2_DTIPreProcessing'))
currentFile = list(currentPath_wData.glob("*dwi.nii.gz"))
if len(currentFile)>0:
command = f'python preProcessing_DTI.py -i {currentFile[0]}'
result = run_subprocess(command,dataFormat,step)
if result != 0:
errorList.append(result)
else:
message = f'Could not find *dwi.nii.gz in {str(currentPath_wData)}';
logging.error(message)
errorList.append(message)
os.chdir(cwd)
elif step == "registration":
os.chdir(os.path.join(cwd, '2.2_DTIPreProcessing'))
currentFile = list(currentPath_wData.glob("*SmoothMicoBet.nii.gz"))
if len(currentFile)>0:
command = f'python registration_DTI.py -i {currentFile[0]}'
result = run_subprocess(command,dataFormat,step)
if result != 0:
errorList.append(result)
else:
message = f'Could not find *SmoothMicoBet.nii.gz in {str(currentPath_wData)}';
logging.error(message)
errorList.append(message)
os.chdir(cwd)
elif step == "process":
currentFile = list(currentPath_wData.glob("*dwi.nii.gz"))
# Appends optional (fa0, nii_gz) flags to DTI main process if passed
if len(currentFile)>0:
cli_str = f'dsi_main.py -i {currentFile[0]}'
os.chdir(os.path.join(cwd, '3.2_DTIConnectivity'))
command = f'python {cli_str}'
result = run_subprocess(command,dataFormat,step)
if result != 0:
errorList.append(result)
os.chdir(cwd)
else:
message = 'The data folders'' names do not match anat, dwi, func or t2map';
logging.error(message);
errorList.append(message)
else:
message = f"The folder {dataFormat} does not exist in {str(currentPath_wData)}"
logging.error(message)
errorList.append(message)
if errorList:
return errorList
else:
return 0
def find(pattern, path):
# This function finds all files with a specified fragment within
# the given path
result = []
for root, dirs, files in os.walk(path):
for name in files:
if fnmatch.fnmatch(name, pattern):
result.append(os.path.join(root, name))
return result
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='Batch processing of all data. This script runs every needed script for all registration and processing steps. The data needs to be ordered like after Bruker2NIfTI conversion: project_folder/days/groups/subjects/. For the script to work, it needs to be placed within the /bin folder of AIDAmri. Example: python batchProc.py -f /Volumes/Desktop/MRI/proc_data -g Treatment_C3a Treatment_PBS -d Baseline P7 P14 P28 P42 P56 -t T2w fMRI DTI')
requiredNamed = parser.add_argument_group('required arguments')
requiredNamed.add_argument('-i', '--input', required=True,
help='Path to the parent project folder of the dataset, e.g. proc_data')
optionalNamed = parser.add_argument_group('optional arguments')
optionalNamed.add_argument('-s', '--sessions', required=False,
help='Select which sessions of your data should be processed, if no days are given all data will be used.', nargs='+')
optionalNamed.add_argument('-stc', '--slicetimecorrection', default = "False", type=str,
help='Set True or False if a slice time correction should be performed. Only set true if you converted raw bruker data with conv2nifti.py from aidamri beforehand. Otherwise choose False')
optionalNamed.add_argument('-t', '--dataTypes', required=False, nargs='+', help='Data types to be processed e.g. anat, dwi and/or func. Multiple specifications are possible.')
optionalNamed.add_argument('-ds', '--debug_steps', required=False, nargs='+', help='Define which steps of the processing should be done. Default = [preprocess, registration, process]')
optionalNamed.add_argument('-cpu', '--cpu_cores', required=False, default = "Half", help='Define how many parallel processes should be use to process your data. CAUTION: Too many processes will slow down your computer noticeably. Select between: ["Min", "Half", "Max"]')
optionalNamed.add_argument('-e_cpu', '--expert_cpu', required=False, help='Define precisely how many parallel processes should be used. Enter a number.')
args = parser.parse_args()
pathToData = args.input
sessions = args.sessions
#configurate the logging module
log_file_path = os.path.join(pathToData, "batchproc_log.txt")
logging.basicConfig(filename=log_file_path, level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
if args.slicetimecorrection is None:
stc = False
else:
stc = args.slicetimecorrection
if args.dataTypes is None:
dataTypes = ["anat", "dwi", "func", "t2map"]
else:
dataTypes = args.dataTypes
if args.debug_steps is None:
steps = ["preprocess","registration","process"]
else:
steps = args.debug_steps
print('Entered information:')
print(pathToData)
print('dataTypes %s' % dataTypes)
print('Slice time correction [%s]' % stc)
print('Steps %s' % steps)
print()
all_files = findData(pathToData, sessions, dataTypes)
if args.cpu_cores.upper() == "MIN":
num_processes = 1
elif args.cpu_cores.upper() == "HALF":
num_processes = int(multiprocessing.cpu_count() / 2)
elif args.cpu_cores.upper() == "MAX":
num_processes = multiprocessing.cpu_count()
if args.expert_cpu:
num_processes = int(args.expert_cpu)
print(f"Running with {num_processes} parallel processes!")
logging.info(f"Entered information:\n{pathToData}\n dataTypes {dataTypes}\n Slice time correction [{stc}]")
logging.info(f"Using {num_processes} CPUs for the parallelization")
logging.info(f"Processing following datasets:\n{all_files}")
for key, value in all_files.items():
if value:
error_list_all = []
print()
print(f"Entered {key} data: \n{value}")
print()
print(f"\n{key} processing \33[5m...\33[0m (wait!)")
print()
for step in steps:
error_list_step = []
progress_bar = tqdm(total=len(value), desc=f"{step} {key} data")
with concurrent.futures.ProcessPoolExecutor(max_workers=num_processes) as executor:
futures = [executor.submit(executeScripts, path, key, step, stc) for path in value]
for future in concurrent.futures.as_completed(futures):
progress_bar.update(1)
errorList = future.result()
if errorList != 0:
if isinstance(errorList, list):
error_list_step.extend(errorList)
else:
error_list_step.append(errorList)
concurrent.futures.wait(futures)
progress_bar.close()
error_list_all.extend(error_list_step)
if not error_list_step:
print(f"{key} {step} \033[0;30;42m COMPLETED \33[0m")
else:
print(f"{key} {step} \033[0;30;41m INCOMPLETE \33[0m")
logging.info(f"{key} {step} processing completed")
logging.error(f"Following errors were occuring {error_list_all}")
logging.info(f"{key} processing completed")
if not error_list_all:
print(f"\n{key} processing \033[0;30;42m COMPLETED \33[0m")
else:
print(f"\n{key} processing \033[0;30;41m INCOMPLETE \33[0m")
if error_list_all:
print()
for error in error_list_all:
if isinstance(error, tuple) and len(error) == 4:
sub, ses, datatype, step = error
print(
f"Error in sub: {sub} in session: {ses} in datatype: {datatype} and step: {step}. Check logging file for further information")
else:
print(f"Unrecognized error format: {error}")
| Python |
3D | Aswendt-Lab/AIDAmri | bin/5.1_ROI_analysis/proc_tools.py | .py | 11,672 | 257 | '''
Created on 29.09.2020
Author:
Michael Diedenhofen
Max Planck Institute for Metabolism Research, Cologne
'''
from __future__ import print_function
import csv
import os
import sys
import nibabel as nib
from datetime import datetime
# directories
lib_in_dir = r'C:\Users\Public\Linux\shared_folder\AIDAmri\lib'
proc_in_dir = r'C:\Users\Public\Linux\shared_folder\proc_data'
#proc_out_dir = r'C:\Users\Public\Linux\shared_folder\proc_data'
proc_out_dir = r'C:\Users\Michael\Projects\Markus\Goeteborg\processed_data'
raw_in_dir = r'C:\Users\Public\Linux\shared_folder\raw_data'
# Enput labels text file with atlas index and seed regions (labels) in each line
# Atlas (1 or 2), Label 1, Label 2, ...
path_label_names_2000 = os.path.join(lib_in_dir, 'annoVolume+2000_rsfMRI.nii.txt')
path_labels = os.path.join(lib_in_dir, 'annotation_50CHANGEDanno_label_IDs+2000.txt')
path_labels_1 = r'C:\Users\Michael\Projects\Markus\Goeteborg\processed_data\cortex_labels_1.txt'
path_labels_2 = r'C:\Users\Michael\Projects\Markus\Goeteborg\processed_data\cortex_labels_2.txt'
# Enter all time points of the experiment
timepoints = ['Baseline', 'P7', 'P14', 'P28', 'P42', 'P56']
# Enter all experimental groups
groups = ['Treatment_C3a', 'Treatment_PBS']
# Enter subject name (same as in the Bruker folder structure) in the order of animals per time point for each group.
# For example: [[['subject1-group1-time point1', 'subject1-group1-time point1', ...]],
# [['subject1-group2-time point1', 'subject1-group2-time point1', ...]],
# [['subject1-group1-time point2', 'subject1-group1-time point2', ...]],
# [['subject1-group2-time point2', 'subject1-group2-time point2', ...]]]
study = [[['GV_T3_12_1_1_3_20190808_093354', 'GV_T3_12_2_1_2_20190808_110831', 'GV_T3_12_3_1_2_20190808_120901', 'GV_T3_13_1_1_2_20190808_133527',
'GV_T3_13_2_1_3_20190809_100309', 'GV_T3_13_3_1_2_20190808_144158', 'GV_T3_13_4_1_2_20190809_105931', 'GV_T3_16_1_1_1_20190903_123147',
'GV_T3_16_2_1_1_20190903_131430', 'GV_T3_16_3_1_1_20190903_142337'],
['GV_T3_14_1_1_2_20190809_134721', 'GV_T3_14_2_1_1_20190809_145138', 'GV_T3_14_3_1_1_20190809_154126', 'GV_T3_14_4_1_1_20190812_103755',
'GV_T3_16_4_1_1_20190904_080859', 'GV_T3_17_3_1_1_20190904_101208', 'GV_T3_17_4_1_1_20190904_105336']],
[['GV_T3_12_1_1_4_20190820_090634', 'GV_T3_12_2_1_3_20190820_104019', 'GV_T3_12_3_1_3_20190820_112855', 'GV_T3_13_1_1_3_20190820_130848',
'GV_T3_13_2_1_4_20190820_121817', 'GV_T3_13_3_1_3_20190820_141137', 'GV_T3_13_4_1_3_20190820_152016', 'GV_T3_16_1_1_2_20190923_111155',
'GV_T3_16_2_1_2_20190923_144608', 'GV_T3_16_3_1_2_20190923_162928'],
['GV_T3_14_1_1_3_20190821_095620', 'GV_T3_14_2_1_2_20190821_105807', 'GV_T3_14_3_1_2_20190821_130012', 'GV_T3_14_4_1_2_20190821_134908',
'GV_T3_16_4_1_2_20190924_112719', 'GV_T3_17_3_1_2_20190924_125208', 'GV_T3_17_4_1_2_20190924_133317']],
[['GV_T3_12_1_1_5_20190829_091927', 'GV_T3_12_2_1_4_20190829_103718', 'GV_T3_12_3_1_4_20190829_112627', 'GV_T3_13_1_1_4_20190828_160508',
'GV_T3_13_2_1_5_20190828_170236', 'GV_T3_13_3_1_4_20190828_174327', 'GV_T3_13_4_1_4_20190828_182452', 'GV_T3_16_1_1_3_20191001_091617',
'GV_T3_16_2_1_3_20191001_100959', 'GV_T3_16_3_1_3_20191001_105158'],
['GV_T3_14_1_1_4_20190829_121905', 'GV_T3_14_2_1_3_20190829_130307', 'GV_T3_14_3_1_3_20190829_134612', 'GV_T3_14_4_1_3_20190829_143623',
'GV_T3_16_4_1_3_20191002_083801', 'GV_T3_17_3_1_3_20191002_100801', 'GV_T3_17_4_1_3_20191002_105022']],
[['GV_T3_12_1_1_6_20190910_084053', 'GV_T3_12_2_1_5_20190910_093051', 'GV_T3_12_3_1_5_20190910_101439', 'GV_T3_13_1_1_5_20190910_105531',
'GV_T3_13_2_1_6_20190910_120953', 'GV_T3_13_3_1_5_20190910_125559', 'GV_T3_13_4_1_5_20190910_134736', 'GV_T3_16_1_1_4_20191015_092459',
'GV_T3_16_2_1_5_20191015_151515', 'GV_T3_16_3_1_4_20191015_105651'],
['GV_T3_14_1_1_5_20190911_085005', 'GV_T3_14_2_1_4_20190911_094801', 'GV_T3_14_3_1_4_20190911_103054', 'GV_T3_14_4_1_4_20190911_111332',
'GV_T3_16_4_1_4_20191015_114108', 'GV_T3_17_3_1_4_20191015_131043', 'GV_T3_17_4_1_4_20191015_135026']],
[[None , 'GV_T3_12_2_1_6_20190924_163833', 'GV_T3_12_3_1_7_20190924_171919', 'GV_T3_13_1_1_6_20190925_082850',
'GV_T3_13_2_1_7_20190925_091435', 'GV_T3_13_3_1_6_20190925_095841', 'GV_T3_13_4_1_6_20190925_104204', 'GV_T3_16_1_1_5_20191029_092504',
'GV_T3_16_2_1_6_20191029_101500', 'GV_T3_16_3_1_5_20191029_110341'],
['GV_T3_14_1_1_6_20190925_113349', 'GV_T3_14_2_1_5_20190925_122019', 'GV_T3_14_3_1_5_20190925_132917', 'GV_T3_14_4_1_5_20190925_141548',
'GV_T3_16_4_1_5_20191029_125150', 'GV_T3_17_3_1_5_20191029_133548', 'GV_T3_17_4_1_5_20191029_144222']],
[['GV_T3_12_1_1_8_20191008_102322', 'GV_T3_12_2_1_7_20191008_125211', 'GV_T3_12_3_1_8_20191008_150900', 'GV_T3_13_1_1_7_20191008_161218',
'GV_T3_13_2_1_8_20191009_084507', 'GV_T3_13_3_1_7_20191009_101526', 'GV_T3_13_4_1_7_20191009_112021', 'GV_T3_16_1_1_6_20191112_100410',
'GV_T3_16_2_1_7_20191112_111819', 'GV_T3_16_3_1_6_20191112_121307'],
['GV_T3_14_1_1_8_20191009_121103', 'GV_T3_14_2_1_6_20191009_130701', 'GV_T3_14_3_1_6_20191009_140516', 'GV_T3_14_4_1_6_20191009_150737',
'GV_T3_16_4_1_6_20191112_131231', 'GV_T3_17_3_1_6_20191112_141745', 'GV_T3_17_4_1_6_20191112_150712']]]
#study = [[['GV_T3_12_1_1_3_20190808_093354', 'GV_T3_13_3_1_2_20190808_144158', 'GV_T3_16_3_1_1_20190903_142337'], []], [['GV_T3_12_1_1_4_20190820_090634', 'GV_T3_13_3_1_3_20190820_141137', 'GV_T3_16_3_1_2_20190923_162928'], []], [[], []], [[], []], [[None, 'GV_T3_13_3_1_6_20190925_095841', 'GV_T3_16_3_1_5_20191029_110341'], []], [['GV_T3_12_1_1_8_20191008_102322', 'GV_T3_13_3_1_7_20191009_101526', 'GV_T3_16_3_1_6_20191112_121307'], []]]
# experiment number T2w
expno_T2w = [[[10, 8, 5, 5, 6, 6, 8, 6, 7, 10],
[6, 6, 6, 6, 5, 6, 5]],
[[7, 7, 6, 11, 5, 18, 6, 8, 5, 6],
[11, 8, 6, 6, 5, 5, 10]],
[[6, 5, 6, 5, 5, 5, 5, 6, 5, 8],
[6, 5, 5, 6, 6, 5, 5]],
[[6, 6, 5, 5, 8, 10, 5, 5, 5, 5],
[10, 6, 5, 6, 5, 5, 5]],
[[None, 6, 6, 7, 7, 6, 9, 7, 9, 7],
[8, 10, 9, 6, 6, 15, 11]],
[[6, 5, 10, 5, 10, 8, 5, 6, 6, 7],
[5, 5, 6, 5, 10, 5, 6]]]
#expno_T2w = [[[10, 6, 10], []], [[7, 18, 6], []], [[], []], [[], []], [[None, 6, 7], []], [[6, 8, 7], []]]
# experiment number rsfMRI
expno_rsfMRI = [[[11, 9, 6, 6, 7, 7, 9, 7, 8, 11],
[7, 7, 9, 7, 6, 7, 6]],
[[8, 10, 9, 12, 6, 19, 8, 9, 6, 7],
[12, 9, 7, 7, 6, 6, 11]],
[[7, 6, 7, 6, 6, 6, 6, 7, 6, 9],
[7, 6, 6, 7, 7, 6, 6]],
[[7, 7, 6, 6, 9, 11, 6, 6, 6, 6],
[11, 7, 6, 7, 6, 6, 6]],
[[None, 7, 7, 8, 8, 7, 10, 8, 10, 8],
[9, 11, 10, 7, 7, 16, 12]],
[[10, 6, 11, 6, 14, 9, 6, 7, 7, 10],
[6, 6, 7, 6, 11, 6, 7]]]
#expno_rsfMRI = [[[11, 7, 11], []], [[8, 19, 7], []], [[], []], [[], []], [[None, 7, 8], []], [[10, 9, 10], []]]
# experiment number DTI
expno_DTI = [[[12, 10, 7, 7, 8, 8, 10, 8, 9, 12],
[8, 8, 8, 8, 7, 8, 7]],
[[9, 9, 8, 13, 7, 20, 7, 10, 7, 8],
[13, 10, 8, 8, 7, 7, 12]],
[[8, 7, 8, 7, 7, 7, 7, 8, 7, 10],
[8, 7, 7, 8, 8, 7, 7]],
[[8, 8, 7, 7, 10, 12, 7, 8, 7, 7],
[12, 8, 7, 8, 7, 7, 7]],
[[None, 8, 8, 9, 9, 8, 11, 9, 11, 9],
[10, 12, 11, 8, 8, 17, 13]],
[[9, 8, 13, 8, 13, None, 8, 9, 9, 11],
[8, 8, 9, 8, 13, 8, 9]]]
#expno_DTI = [[[12, 8, 12], []], [[9, 20, 8], []], [[], []], [[], []], [[None, 8, 9], []], [[9, None, 11], []]]
# processed images number
procno = 1
if not os.path.isdir(lib_in_dir):
sys.exit("Error: '%s' is not an existing directory." % (lib_in_dir,))
if not os.path.isdir(proc_in_dir):
sys.exit("Error: '%s' is not an existing directory." % (proc_in_dir,))
if not os.path.isdir(proc_out_dir):
sys.exit("Error: '%s' is not an existing directory." % (proc_out_dir,))
if not os.path.isdir(raw_in_dir):
sys.exit("Error: '%s' is not an existing directory." % (raw_in_dir,))
if not os.path.isfile(path_label_names_2000):
sys.exit("Error: '%s' is not a regular file." % (path_label_names_2000,))
if not os.path.isfile(path_labels):
sys.exit("Error: '%s' is not a regular file." % (path_labels,))
if not os.path.isfile(path_labels_1):
sys.exit("Error: '%s' is not a regular file." % (path_labels_1,))
if not os.path.isfile(path_labels_2):
sys.exit("Error: '%s' is not a regular file." % (path_labels_2,))
def get_date():
now = datetime.now()
pvDate = now.strftime("%a %d %b %Y")
pvTime = now.strftime("%H:%M:%S")
return pvDate + ' ' + pvTime
def read_csv(filename):
if not os.path.isfile(filename):
return None
with open(filename, 'r') as fid:
reader = csv.reader(fid, delimiter=',', dialect='excel', skipinitialspace=True)
next(reader, None)
data = list(reader)
return data
def save_csv(filename, data):
with open(filename, 'w') as fid:
writer = csv.writer(fid, delimiter=';', dialect='excel', lineterminator='\n')
writer.writerows(data)
print(filename)
def read_labels(filename):
# read labels text file
iatlas = []
labels = []
for row in read_csv(filename):
iatlas.append(int(row[0].strip()))
labels.append([int(label.strip()) for label in row[1:]])
#print("iatlas:", iatlas)
#print("labels:", labels)
return (iatlas, labels)
def save_matrix(filename, matrix):
lines = '\n'.join((' '.join('%.12g' % (x,) for x in matrix[y]) + ' ') for y in range(matrix.shape[0]))
# Open text file to write binary (Unix format)
fid = open(filename, 'w')
# Write text file
for line in lines.splitlines():
print(line, end=chr(10), file=fid)
# Close text file
fid.close()
print(filename)
def read_text(filename):
if not os.path.isfile(filename):
return None
# open file to read
fid = open(filename, 'r')
# read file -> list of lines
lines = fid.readlines()
# close file
fid.close()
return lines
def save_text(filename, lines):
with open(filename, 'w') as fid:
for line in lines:
print(' '.join(line), end=chr(10), file=fid)
print(filename)
def read_data(path_data):
image = nib.load(path_data)
data = image.get_data()
header = image.get_header()
voxel_dims = header.get_zooms()
#print("header.get_data_shape():", header.get_data_shape())
#print("header.get_data_dtype():", header.get_data_dtype())
#print("header.get_zooms():", header.get_zooms())
#print("header.get_data_offset():", header.get_data_offset())
#print("header.get_xyzt_units():", header.get_xyzt_units())
return (data, voxel_dims)
def save_data(data, voxel_dims, path_data, dtype='float32'):
image = nib.Nifti1Image(data, None)
header = image.get_header()
if dtype is not None:
header.set_data_dtype(dtype)
if data.ndim == 3:
header.set_zooms(voxel_dims)
else:
header.set_zooms(voxel_dims + (1.0,))
header.set_xyzt_units(xyz='mm', t=None)
image.to_filename(path_data)
print(image.get_filename())
if __name__ == '__main__':
pass
| Python |
3D | Aswendt-Lab/AIDAmri | bin/5.1_ROI_analysis/04_examine_rois.py | .py | 1,858 | 62 | '''
Created on 25.08.2020
Author:
Michael Diedenhofen
Max Planck Institute for Metabolism Research, Cologne
Description:
Helper tool to compare the number of voxels included in the peri-infarct region for each subject.
'''
from __future__ import print_function
try:
zrange = xrange
except NameError:
zrange = range
import os
import sys
import proc_tools as pt
def count_voxels(rois):
values = []
for k in zrange(rois.shape[3]):
roi = rois[:, :, :, k]
values.append(roi[roi>0].size)
return values
def main():
# output text file
path_data = os.path.join(pt.proc_out_dir, 'ROIs_count_voxels.txt')
# read label IDs
_, labels = pt.read_labels(pt.path_labels_1)
data = []
for index_t, timepoint in enumerate(pt.timepoints):
data.append([timepoint] + labels[0])
for index_g, group in enumerate(pt.groups):
for subject in pt.study[index_t][index_g]:
if subject is not None:
in_dir = os.path.join(pt.proc_out_dir, timepoint, group, subject, 'fMRI')
if not os.path.isdir(in_dir):
sys.exit("Error: '%s' is not an existing directory." % (in_dir,))
# input ROIs file (NIfTI)
#path_in_rois = os.path.join(in_dir, subject + '_cortex_rois_2.nii.gz')
path_in_rois = os.path.join(in_dir, 'Seed_ROIs_peri.nii.gz')
if not os.path.isfile(path_in_rois):
sys.exit("Error: '%s' is not a regular file." % (path_in_rois,))
# read ROIs hyperstack (4D)
rois, _ = pt.read_data(path_in_rois)
values = count_voxels(rois)
data.append([subject] + values)
pt.save_csv(path_data, data)
if __name__ == '__main__':
main()
| Python |
3D | Aswendt-Lab/AIDAmri | bin/5.1_ROI_analysis/02_apply_xfm_process.py | .py | 17,479 | 346 | '''
Created on 19.10.2020
Author:
Michael Diedenhofen
Max Planck Institute for Metabolism Research, Cologne
Description:
Pre-requisite: 01_dilate_mask_process.py
Result: for all time points the peri-infarct masks will be aligned in the rsfMRI and DTI space
Two scans of the same session can be aligned to each other without image registration if there was only very limited movement (otherwise image registration is necessary, e.g. between T2w and DTI).
The ParaVision visu_pars file provides a mapping (VisuCoreOrientation, VisuCorePosition) from subject (LPS) into the image coordinate system.
rsfMRI:
- For all time points and for each subject of the two groups a T2w peri-infarct mask is transformed to rsfMRI.
- The input peri-infarct mask <subject>_peri_mask_m3_n15.nii.gz and atlas labels <subject>BiasBet_AnnorsfMRI.nii.gz are located in the T2w subfolder.
- The transformed peri-infarct mask <subject>_T2w_peri_mask_rsfMRI.nii.gz and atlas labels <subject>_T2w_Anno_rsfMRI.nii.gz are stored in the fMRI subfolder.
- Create the inverse of a rigid transformation matrix from the parameters of the T2w raw data visu_pars file.
- Create a rigid transformation matrix from the parameters of the rsfMRI raw data visu_pars file.
- Compose the inverse T2w matrix and the rsfMRI matrix to a rigid transformation matrix (rsfMRI -> T2w).
- The function xfm_serial() (in apply_xfm.py) applies the inverse of the matrix (rsfMRI -> T2w) to the T2w peri-infarct mask.
- The T2w peri-infarct mask is first flipped in x- and z-axis then transformed and then flipped back in x- and z-axis.
DTI:
- For all time points and for each subject of the two groups a T2w peri-infarct mask is transformed to DTI.
- The transformed peri-infarct mask <subject>_T2w_peri_mask_DTI.nii.gz and atlas labels <subject>_T2w_Anno_DTI.nii.gz are stored in the DTI subfolder.
- Create the inverse of a rigid transformation matrix from the parameters of the T2w raw data visu_pars file.
- Create a rigid transformation matrix from the parameters of the DTI raw data visu_pars file.
- Compose the inverse T2w matrix and the DTI matrix to a rigid transformation matrix (DTI -> T2w).
- The function xfm_serial() applies the inverse of the matrix (DTI -> T2w) to the T2w peri-infarct mask.
- The T2w peri-infarct mask is first flipped in x- and z-axis then transformed and then flipped back in x- and z-axis.
'''
from __future__ import print_function
import os
import sys
import numpy as np
import pv_reader as pvr
import proc_tools as pt
import apply_xfm as ax
def xfm_T2w_rsfMRI(raw_dir, timepoint_P7, timepoint, group, subject, expno_T2w, expno_rsfMRI, procno_T2w, procno_rsfMRI):
if (expno_T2w is None) or (expno_rsfMRI is None) or (procno_T2w is None) or (procno_rsfMRI is None):
return
in_dir = os.path.join(pt.proc_in_dir, timepoint, group, subject, 'T2w')
mask_dir = os.path.join(pt.proc_out_dir, timepoint, group, subject, 'T2w')
out_dir = os.path.join(pt.proc_out_dir, timepoint, group, subject, 'fMRI')
if not os.path.isdir(in_dir):
sys.exit("Error: '%s' is not an existing directory." % (in_dir,))
if not os.path.isdir(mask_dir):
sys.exit("Error: '%s' is not an existing directory." % (mask_dir,))
if not os.path.exists(out_dir):
os.makedirs(out_dir)
# input T2w atlas labels file
path_in_anno = os.path.join(in_dir, subject + 'BiasBet_AnnorsfMRI.nii.gz')
if not os.path.isfile(path_in_anno):
sys.exit("Error: '%s' is not a regular file." % (path_in_anno,))
# input T2w stroke mask file
if timepoint == timepoint_P7:
path_in_mask = os.path.join(in_dir, subject + 'Stroke_mask.nii.gz')
else:
path_in_mask = os.path.join(mask_dir, subject + 'Stroke_mask.nii.gz')
# input T2w peri-infarct mask file
path_in_peri = os.path.join(mask_dir, subject + '_peri_mask_m3_n15.nii.gz')
if not os.path.isfile(path_in_peri):
sys.exit("Error: '%s' is not a regular file." % (path_in_peri,))
# output rsfMRI file
#path_rsfMRI = os.path.join(out_dir, subject + '_rsfMRI.nii.gz')
# output transformed T2w file
path_T2w_rsfMRI = os.path.join(out_dir, subject + '_T2w_rsfMRI.nii.gz')
# output transformed atlas labels file
path_anno_rsfMRI = os.path.join(out_dir, subject + '_T2w_Anno_rsfMRI.nii.gz')
# output transformed stroke mask file
path_mask_rsfMRI = os.path.join(out_dir, subject + '_T2w_Stroke_mask_rsfMRI.nii.gz')
# output transformed peri-infarct mask file
path_peri_rsfMRI = os.path.join(out_dir, subject + '_T2w_peri_mask_rsfMRI.nii.gz')
pvr.check_args(pt.proc_out_dir, raw_dir, subject, expno_T2w, procno_T2w)
pvr.check_args(pt.proc_out_dir, raw_dir, subject, expno_rsfMRI, procno_rsfMRI)
# T2w data
pv = pvr.ParaVision(os.path.join(pt.proc_out_dir, timepoint, group), raw_dir, subject, expno_T2w, procno_T2w)
pv.read_2dseq(map_raw=False, map_pv6=False, roll_fg=False, squeeze=False, compact=False, swap_vd=False, scale=1.0)
#pv.save_nifti(ftype='NIFTI_GZ')
matrix_T2w, matrix_T2w_inv = pv.get_matrix()
data_T2w = pv.nifti_image.get_data()
#data_dims_T2w = pv.data_dims[:3]
#data_type_T2w = pv.data_type
voxel_dims_T2w = pv.voxel_dims[:3]
#voxel_unit_T2w = pv.voxel_unit
# rsfMRI data
pv = pvr.ParaVision(os.path.join(pt.proc_out_dir, timepoint, group), raw_dir, subject, expno_rsfMRI, procno_rsfMRI)
pv.read_2dseq(map_raw=False, map_pv6=False, roll_fg=False, squeeze=False, compact=False, swap_vd=False, scale=1.0)
#pv.save_nifti(ftype='NIFTI_GZ')
matrix_rsfMRI, matrix_rsfMRI_inv = pv.get_matrix()
#data_rsfMRI = np.mean(pv.nifti_image.get_data(), axis=3)
data_dims_rsfMRI = pv.data_dims[:3]
#data_type_rsfMRI = pv.data_type
voxel_dims_rsfMRI = pv.voxel_dims[:3]
#voxel_unit_rsfMRI = pv.voxel_unit
# transformation matrix
matrix_T2w_rsfMRI = np.dot(matrix_rsfMRI_inv, matrix_T2w)
matrix_rsfMRI_T2w = np.dot(matrix_T2w_inv, matrix_rsfMRI)
pt.save_matrix(os.path.join(out_dir, subject + '_T2w_rsfMRI.mat'), matrix_T2w_rsfMRI)
pt.save_matrix(os.path.join(out_dir, subject + '_rsfMRI_T2w.mat'), matrix_rsfMRI_T2w)
# save rsfMRI data as NIfTI file
#pt.save_data(np.rot90(data_rsfMRI, k=2, axes=(0, 2)), voxel_dims_rsfMRI, path_rsfMRI, dtype=None)
# save transformed T2w data as NIfTI file
data_T2w_rsfMRI = ax.xfm_serial(data_T2w, matrix_rsfMRI_T2w, data_dims_rsfMRI, voxel_dims_rsfMRI, voxel_dims_T2w, interp=1, inverse=True)
pt.save_data(np.rot90(data_T2w_rsfMRI, k=2, axes=(0, 2)), voxel_dims_rsfMRI, path_T2w_rsfMRI, dtype=None)
# save transformed T2w atlas labels as NIfTI file
data_anno, voxel_dims_anno = pt.read_data(path_in_anno)
data_anno_rsfMRI = ax.xfm_serial(np.rot90(data_anno, k=2, axes=(0, 2)), matrix_rsfMRI_T2w, data_dims_rsfMRI, voxel_dims_rsfMRI, voxel_dims_anno, interp=0, inverse=True)
pt.save_data(np.rot90(data_anno_rsfMRI, k=2, axes=(0, 2)), voxel_dims_rsfMRI, path_anno_rsfMRI, dtype=None)
# save transformed T2w stroke mask as NIfTI file
if os.path.isfile(path_in_mask):
data_mask, voxel_dims_mask = pt.read_data(path_in_mask)
data_mask_rsfMRI = ax.xfm_serial(np.rot90(data_mask, k=2, axes=(0, 2)), matrix_rsfMRI_T2w, data_dims_rsfMRI, voxel_dims_rsfMRI, voxel_dims_mask, interp=0, inverse=True)
pt.save_data(np.rot90(data_mask_rsfMRI, k=2, axes=(0, 2)), voxel_dims_rsfMRI, path_mask_rsfMRI, dtype=None)
# save transformed T2w peri-infarct mask as NIfTI file
data_peri, voxel_dims_peri = pt.read_data(path_in_peri)
data_peri_rsfMRI = ax.xfm_serial(np.rot90(data_peri, k=2, axes=(0, 2)), matrix_rsfMRI_T2w, data_dims_rsfMRI, voxel_dims_rsfMRI, voxel_dims_peri, interp=0, inverse=True)
pt.save_data(np.rot90(data_peri_rsfMRI, k=2, axes=(0, 2)), voxel_dims_rsfMRI, path_peri_rsfMRI, dtype=None)
def xfm_T2w_DTI(raw_dir, timepoint_P7, timepoint, group, subject, expno_T2w, expno_DTI, procno_T2w, procno_DTI):
if (expno_T2w is None) or (expno_DTI is None) or (procno_T2w is None) or (procno_DTI is None):
return
in_dir = os.path.join(pt.proc_in_dir, timepoint, group, subject, 'T2w')
mask_dir = os.path.join(pt.proc_out_dir, timepoint, group, subject, 'T2w')
out_dir = os.path.join(pt.proc_out_dir, timepoint, group, subject, 'DTI')
if not os.path.isdir(in_dir):
sys.exit("Error: '%s' is not an existing directory." % (in_dir,))
if not os.path.isdir(mask_dir):
sys.exit("Error: '%s' is not an existing directory." % (mask_dir,))
if not os.path.exists(out_dir):
os.makedirs(out_dir)
# input T2w atlas labels file
path_in_anno = os.path.join(in_dir, subject + 'BiasBet_AnnorsfMRI.nii.gz')
if not os.path.isfile(path_in_anno):
sys.exit("Error: '%s' is not a regular file." % (path_in_anno,))
# input T2w stroke mask file
if timepoint == timepoint_P7:
path_in_mask = os.path.join(in_dir, subject + 'Stroke_mask.nii.gz')
else:
path_in_mask = os.path.join(mask_dir, subject + 'Stroke_mask.nii.gz')
# input T2w peri-infarct mask file
path_in_peri = os.path.join(mask_dir, subject + '_peri_mask_m3_n15.nii.gz')
if not os.path.isfile(path_in_peri):
sys.exit("Error: '%s' is not a regular file." % (path_in_peri,))
# output DTI file
#path_DTI = os.path.join(out_dir, subject + '_DTI.nii.gz')
# output transformed T2w file
path_T2w_DTI = os.path.join(out_dir, subject + '_T2w_DTI.nii.gz')
# output transformed atlas labels file
path_anno_DTI = os.path.join(out_dir, subject + '_T2w_Anno_DTI.nii.gz')
# output transformed stroke mask file
path_mask_DTI = os.path.join(out_dir, subject + '_T2w_Stroke_mask_DTI.nii.gz')
# output transformed peri-infarct mask file
path_peri_DTI = os.path.join(out_dir, subject + '_T2w_peri_mask_DTI.nii.gz')
pvr.check_args(pt.proc_out_dir, raw_dir, subject, expno_T2w, procno_T2w)
pvr.check_args(pt.proc_out_dir, raw_dir, subject, expno_DTI, procno_DTI)
# T2w data
pv = pvr.ParaVision(os.path.join(pt.proc_out_dir, timepoint, group), raw_dir, subject, expno_T2w, procno_T2w)
pv.read_2dseq(map_raw=False, map_pv6=False, roll_fg=False, squeeze=False, compact=False, swap_vd=False, scale=1.0)
#pv.save_nifti(ftype='NIFTI_GZ')
matrix_T2w, matrix_T2w_inv = pv.get_matrix()
data_T2w = pv.nifti_image.get_data()
#data_dims_T2w = pv.data_dims[:3]
#data_type_T2w = pv.data_type
voxel_dims_T2w = pv.voxel_dims[:3]
#voxel_unit_T2w = pv.voxel_unit
# DTI data
pv = pvr.ParaVision(os.path.join(pt.proc_out_dir, timepoint, group), raw_dir, subject, expno_DTI, procno_DTI)
pv.read_2dseq(map_raw=False, map_pv6=False, roll_fg=False, squeeze=False, compact=False, swap_vd=False, scale=1.0)
#pv.save_nifti(ftype='NIFTI_GZ')
matrix_DTI, matrix_DTI_inv = pv.get_matrix()
#data_DTI = np.mean(pv.nifti_image.get_data(), axis=3)
data_dims_DTI = pv.data_dims[:3]
#data_type_DTI = pv.data_type
voxel_dims_DTI = pv.voxel_dims[:3]
#voxel_unit_DTI = pv.voxel_unit
# transformation matrix
matrix_T2w_DTI = np.dot(matrix_DTI_inv, matrix_T2w)
matrix_DTI_T2w = np.dot(matrix_T2w_inv, matrix_DTI)
pt.save_matrix(os.path.join(out_dir, subject + '_T2w_DTI.mat'), matrix_T2w_DTI)
pt.save_matrix(os.path.join(out_dir, subject + '_DTI_T2w.mat'), matrix_DTI_T2w)
# save DTI data as NIfTI file
#pt.save_data(np.rot90(data_DTI, k=2, axes=(0, 2)), voxel_dims_DTI, path_DTI, dtype=None)
# save transformed T2w data as NIfTI file
data_T2w_DTI = ax.xfm_serial(data_T2w, matrix_DTI_T2w, data_dims_DTI, voxel_dims_DTI, voxel_dims_T2w, interp=1, inverse=True)
pt.save_data(np.rot90(data_T2w_DTI, k=2, axes=(0, 2)), voxel_dims_DTI, path_T2w_DTI, dtype=None)
# save transformed T2w atlas labels as NIfTI file
data_anno, voxel_dims_anno = pt.read_data(path_in_anno)
data_anno_DTI = ax.xfm_serial(np.rot90(data_anno, k=2, axes=(0, 2)), matrix_DTI_T2w, data_dims_DTI, voxel_dims_DTI, voxel_dims_anno, interp=0, inverse=True)
pt.save_data(np.rot90(data_anno_DTI, k=2, axes=(0, 2)), voxel_dims_DTI, path_anno_DTI, dtype=None)
# save transformed T2w stroke mask as NIfTI file
if os.path.isfile(path_in_mask):
data_mask, voxel_dims_mask = pt.read_data(path_in_mask)
data_mask_DTI = ax.xfm_serial(np.rot90(data_mask, k=2, axes=(0, 2)), matrix_DTI_T2w, data_dims_DTI, voxel_dims_DTI, voxel_dims_mask, interp=0, inverse=True)
pt.save_data(np.rot90(data_mask_DTI, k=2, axes=(0, 2)), voxel_dims_DTI, path_mask_DTI, dtype=None)
# save transformed T2w peri-infarct mask as NIfTI file
data_peri, voxel_dims_peri = pt.read_data(path_in_peri)
data_peri_DTI = ax.xfm_serial(np.rot90(data_peri, k=2, axes=(0, 2)), matrix_DTI_T2w, data_dims_DTI, voxel_dims_DTI, voxel_dims_peri, interp=0, inverse=True)
pt.save_data(np.rot90(data_peri_DTI, k=2, axes=(0, 2)), voxel_dims_DTI, path_peri_DTI, dtype=None)
def xfm_T2w_DTI_reg(timepoint_P7, timepoint, group, subject, expno_T2w, expno_DTI, procno_T2w, procno_DTI):
if (expno_T2w is None) or (expno_DTI is None) or (procno_T2w is None) or (procno_DTI is None):
return
in_dir = os.path.join(pt.proc_in_dir, timepoint, group, subject, 'T2w')
dti_dir = os.path.join(pt.proc_in_dir, timepoint, group, subject, 'DTI')
mask_dir = os.path.join(pt.proc_out_dir, timepoint, group, subject, 'T2w')
out_dir = os.path.join(pt.proc_out_dir, timepoint, group, subject, 'DTI')
if not os.path.isdir(in_dir):
sys.exit("Error: '%s' is not an existing directory." % (in_dir,))
if not os.path.isdir(dti_dir):
sys.exit("Error: '%s' is not an existing directory." % (dti_dir,))
if not os.path.isdir(mask_dir):
sys.exit("Error: '%s' is not an existing directory." % (mask_dir,))
if not os.path.exists(out_dir):
os.makedirs(out_dir)
# input T2w file
path_in_T2w = os.path.join(in_dir, subject + 'BiasBet.nii.gz')
if not os.path.isfile(path_in_T2w):
sys.exit("Error: '%s' is not a regular file." % (path_in_T2w,))
# input T2w atlas labels file
path_in_anno = os.path.join(in_dir, subject + 'BiasBet_AnnorsfMRI.nii.gz')
if not os.path.isfile(path_in_anno):
sys.exit("Error: '%s' is not a regular file." % (path_in_anno,))
# input T2w stroke mask file
if timepoint == timepoint_P7:
path_in_mask = os.path.join(in_dir, subject + 'Stroke_mask.nii.gz')
else:
path_in_mask = os.path.join(mask_dir, subject + 'Stroke_mask.nii.gz')
# input T2w peri-infarct mask file
path_in_peri = os.path.join(mask_dir, subject + '_peri_mask_m3_n15.nii.gz')
if not os.path.isfile(path_in_peri):
sys.exit("Error: '%s' is not a regular file." % (path_in_peri,))
# DTI reference file
path_ref = os.path.join(dti_dir, subject + 'SmoothMicoBet.nii.gz')
if not os.path.isfile(path_ref):
sys.exit("Error: '%s' is not a regular file." % (path_ref,))
# transformation matrix
path_xfm = os.path.join(dti_dir, subject + 'SmoothMicoBettransMatrixAff.txt')
if not os.path.isfile(path_xfm):
sys.exit("Error: '%s' is not a regular file." % (path_xfm,))
# output transformed T2w file
path_T2w_DTI = os.path.join(out_dir, subject + '_T2w_DTI.nii.gz')
# output transformed atlas labels file
path_anno_DTI = os.path.join(out_dir, subject + '_T2w_Anno_DTI.nii.gz')
# output transformed stroke mask file
path_mask_DTI = os.path.join(out_dir, subject + '_T2w_Stroke_mask_DTI.nii.gz')
# output transformed peri-infarct mask file
path_peri_DTI = os.path.join(out_dir, subject + '_T2w_peri_mask_DTI.nii.gz')
# resample T2w
command = 'reg_resample -ref %s -flo %s -res %s -trans %s -inter 1' % (path_ref, path_in_T2w, path_T2w_DTI, path_xfm)
os.system(command)
# resample atlas labels
command = 'reg_resample -ref %s -flo %s -res %s -trans %s -inter 0' % (path_ref, path_in_anno, path_anno_DTI, path_xfm)
os.system(command)
# resample stroke mask
command = 'reg_resample -ref %s -flo %s -res %s -trans %s -inter 0' % (path_ref, path_in_mask, path_mask_DTI, path_xfm)
os.system(command)
# resample peri-infarct mask
command = 'reg_resample -ref %s -flo %s -res %s -trans %s -inter 0' % (path_ref, path_in_peri, path_peri_DTI, path_xfm)
os.system(command)
def main():
timepoint_P7 = pt.timepoints[1]
procno = pt.procno
for index_t, timepoint in enumerate(pt.timepoints):
for index_g, group in enumerate(pt.groups):
# raw data directory
raw_dir = os.path.join(pt.raw_in_dir, group, timepoint)
for index_s, subject in enumerate(pt.study[index_t][index_g]):
if subject is not None:
expno_T2w = pt.expno_T2w[index_t][index_g][index_s]
expno_rsfMRI = pt.expno_rsfMRI[index_t][index_g][index_s]
expno_DTI = pt.expno_DTI[index_t][index_g][index_s]
xfm_T2w_rsfMRI(raw_dir, timepoint_P7, timepoint, group, subject, expno_T2w, expno_rsfMRI, procno, procno)
xfm_T2w_DTI(raw_dir, timepoint_P7, timepoint, group, subject, expno_T2w, expno_DTI, procno, procno)
#xfm_T2w_DTI_reg(timepoint_P7, timepoint, group, subject, expno_T2w, expno_DTI, procno, procno)
if __name__ == '__main__':
main()
| Python |
3D | Aswendt-Lab/AIDAmri | bin/5.1_ROI_analysis/create_seed_rois.py | .py | 6,520 | 182 | '''
Created on 20.08.2020
Author:
Michael Diedenhofen
Max Planck Institute for Metabolism Research, Cologne
'''
from __future__ import print_function
try:
zrange = xrange
except NameError:
zrange = range
import os
import sys
import numpy as np
import nibabel as nib
import proc_tools as pt
def create_rois_1(iatlas, labels, labels_hdr, labels_data, datatype=None, preserve=False):
if datatype == 2:
labels_dtype = np.uint8
elif datatype == 4:
labels_dtype = np.int16
elif datatype == 8:
labels_dtype = np.int32
elif datatype == 16:
labels_dtype = np.float32
else:
labels_dtype = labels_hdr[0].get_data_dtype()
labels_shape = labels_hdr[0].get_data_shape()
rois = np.zeros(labels_shape + (len(iatlas),), dtype=labels_dtype)
if preserve:
for k, index in enumerate(iatlas):
data = labels_data[index-1]
for label in labels[k]:
rois[:, :, :, k][data==label] = label
else:
for k, index in enumerate(iatlas):
data = labels_data[index-1]
for label in labels[k]:
rois[:, :, :, k][data==label] = 1
return rois
def create_rois_2(iatlas, labels, labels_hdr, labels_data, datatype=None, preserve=False):
if datatype == 2:
labels_dtype = np.uint8
elif datatype == 4:
labels_dtype = np.int16
elif datatype == 8:
labels_dtype = np.int32
elif datatype == 16:
labels_dtype = np.float32
else:
labels_dtype = labels_hdr[0].get_data_dtype()
labels_shape = labels_hdr[0].get_data_shape()
rois = np.zeros(labels_shape + (len(iatlas),), dtype=labels_dtype)
if preserve:
for k, index in enumerate(iatlas):
ires = []
data = labels_data[index-1]
for label in labels[k]:
ires.append(np.where(data == label))
indices = tuple(np.hstack(ires))
rois[:, :, :, k][indices] = data[indices]
else:
for k, index in enumerate(iatlas):
ires = []
data = labels_data[index-1]
for label in labels[k]:
ires.append(np.where(data == label))
indices = tuple(np.hstack(ires))
rois[:, :, :, k][indices] = 1
return rois
def create_rois_3(iatlas, labels, labels_hdr, labels_data, datatype=None, preserve=False):
if datatype == 2:
labels_dtype = np.uint8
elif datatype == 4:
labels_dtype = np.int16
elif datatype == 8:
labels_dtype = np.int32
elif datatype == 16:
labels_dtype = np.float32
else:
labels_dtype = labels_hdr[0].get_data_dtype()
labels_shape = labels_hdr[0].get_data_shape()
mask = np.zeros(labels_shape, dtype=np.bool)
rois = np.zeros(labels_shape + (len(iatlas),), dtype=labels_dtype)
if preserve:
for k, index in enumerate(iatlas):
data = labels_data[index-1]
for label in labels[k]:
mask = np.logical_or(mask, data == label)
rois[:, :, :, k] = data * mask
mask[:] = False
else:
for k, index in enumerate(iatlas):
data = labels_data[index-1]
for label in labels[k]:
mask = np.logical_or(mask, data == label)
rois[:, :, :, k] = mask
mask[:] = False
return rois
def create_rois(path_labels, list_atlas, datatype=None, preserve=False):
# read labels text file
iatlas, labels = pt.read_labels(path_labels)
# read 3D atlas labels files (NIfTI)
labels_img = []
labels_hdr = []
labels_data = []
labels_shape = []
for k, path_atlas in enumerate(list_atlas):
#print("Atlas%d:" % (k + 1,), path_atlas)
labels_img.append(nib.load(path_atlas))
labels_data.append(labels_img[k].get_data())
#print("labels_data[%d].dtype:" % (k,), labels_data[k].dtype)
#print("labels_data[%d].shape:" % (k,), labels_data[k].shape)
labels_hdr.append(labels_img[k].get_header())
labels_shape.append(labels_hdr[k].get_data_shape())
#print("labels_shape[%d]:" % (k,), labels_shape[k])
if len(labels_shape[k]) != 3:
sys.exit("Error: Atlas%d labels %s don't have three dimensions." % (k, str(labels_shape[k])))
for k in zrange(1, len(labels_shape)):
if labels_shape[0] != labels_shape[k]:
sys.exit("Error: Atlas1 labels %s and Atlas%d labels %s don't have the same shape." % (str(labels_shape[0]), k, str(labels_shape[k])))
# create atlas labels hyperstack (4D)
rois = create_rois_1(iatlas, labels, labels_hdr, labels_data, datatype=datatype, preserve=preserve)
return (labels_hdr, rois)
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(description='Create atlas seed ROIs.')
parser.add_argument('in_labels', help='input labels text file name')
parser.add_argument('in_atlas', nargs='+', help='input 3D atlas labels file names (NIfTI)')
parser.add_argument('-o', '--out_rois', help='output 4D seed ROIs file name')
parser.add_argument('-p', '--preserve', action='store_true', help='preserve label values')
parser.add_argument('-t', '--datatype', type=int, choices=[2, 4, 8, 16], help='data type (2: char, 4: short, 8: int, 16: float)')
args = parser.parse_args()
ext_text = '.txt'
ext_nifti = '.nii.gz'
file_name = 'Seed_ROIs'
# input labels text file with atlas index and seed regions (labels) in each line
# Atlas (1 or 2), Label 1, Label 2, ...
if len(args.in_labels) > 0: path_labels = args.in_labels
if not os.path.isfile(path_labels):
sys.exit("Error: '%s' is not a regular file." % (path_labels,))
# input atlas labels files (NIfTI)
if len(args.in_atlas) > 0: list_atlas = args.in_atlas
for path_atlas in list_atlas:
if not os.path.isfile(path_atlas):
sys.exit("Error: '%s' is not a regular file." % (path_atlas,))
# output seed ROIs file
path_rois = os.path.join(os.getcwd(), args.out_rois) if args.out_rois != None else os.path.join(os.path.dirname(list_atlas[0]), file_name)
# get date and time
#print(pt.get_date())
# create atlas labels hyperstack (4D)
labels_hdr, rois = create_rois(path_labels, path_atlas, datatype=args.datatype, preserve=args.preserve)
# save atlas labels file
voxel_dims = labels_hdr[0].get_zooms()
pt.save_data(rois, voxel_dims, path_rois, dtype=None)
| Python |
3D | Aswendt-Lab/AIDAmri | bin/5.1_ROI_analysis/fsl_mean_ts.py | .py | 2,881 | 92 | '''
Created on 31.08.2020
Author:
Michael Diedenhofen
Max Planck Institute for Metabolism Research, Cologne
'''
from __future__ import print_function
try:
zrange = xrange
except NameError:
zrange = range
import os
import sys
import numpy as np
import scipy.io as sio
import proc_tools as pt
def mean_ts(path_data, path_mask, path_out, label_names):
# Read 4D data file (NIfTI)
data, _ = pt.read_data(path_data)
file_data = os.path.basename(path_data)
if len(data.shape) != 4:
sys.exit("Error: %s is not 4D shape %s." % (file_data, str(data.shape)))
# Read 4D mask file (NIfTI)
mask, _ = pt.read_data(path_mask)
file_mask = os.path.basename(path_mask)
if len(mask.shape) != 4:
sys.exit("Error: %s is not 4D shape $s." % (file_mask, str(mask.shape)))
if data.shape[:3] != mask.shape[:3]:
sys.exit("Error: %s %s and %s %s are not the same shape." % (file_data, str(data.shape[:3]), file_mask, str(mask.shape[:3])))
#path_out_mat = path_out + '.mat'
path_out_mat = os.path.join(os.path.dirname(path_out), os.path.basename(path_out) + '.mat')
m = np.zeros((mask.shape[3], data.shape[3]), dtype=data.dtype)
for k in zrange(mask.shape[3]):
msk = np.array(mask[:, :, :, k]) > 0
if data[msk, :].size > 0:
m[k] = np.mean(data[msk, :], 0)
mT = np.transpose(m)
#np.savetxt(path_out, mT, fmt='%.4f', delimiter=' ')
#s = [['%.4f' % (x,) for x in line] for line in mT.tolist()]
s = [map(lambda x: '%.4f' % (x,), line) for line in mT.tolist()]
#pt.save_csv(path_out, s)
pt.save_text(path_out, s)
sio.savemat(path_out_mat, dict([('matrix', mT), ('label', label_names)]))
print(path_out_mat)
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('in_data', help='input 4D data (x, y, slc, rep)')
parser.add_argument('in_mask', help='input 4D mask (x, y, slc, msk)')
parser.add_argument('-o', '--out_text', help='output text file', required=True)
args = parser.parse_args()
# input data (NIfTI)
if not os.path.isfile(args.in_data):
sys.exit("Error: '%s' is not a regular file." % (args.in_data,))
# input mask (NIfTI)
if not os.path.isfile(args.in_mask):
sys.exit("Error: '%s' is not a regular file." % (args.in_mask,))
# output text file
path_out = args.out_text if args.out_text is not None else os.path.abspath(os.path.join(args.in_data, os.pardir, 'MasksTCs.' + os.path.split(args.in_data)[1]))
if path_out.endswith('.nii.gz'):
path_out = path_out[:-7]
elif path_out.endswith('.nii'):
path_out = path_out[:-4]
path_out = path_out + '.txt'
label_names_2000 = pt.read_text(pt.path_label_names_2000)
#print(get_date())
mean_ts(args.in_data, args.in_mask, path_out, label_names_2000)
| Python |
3D | Aswendt-Lab/AIDAmri | bin/5.1_ROI_analysis/apply_xfm.py | .py | 5,919 | 178 | '''
Created on 20.08.2020
Author:
Michael Diedenhofen
Max Planck Institute for Metabolism Research, Cologne
'''
from __future__ import print_function
try:
zrange = xrange
except NameError:
zrange = range
import os
import sys
import numpy as np
import proc_tools as pt
def get_mat_flip_x_z(data_dims, voxel_dims):
mat = np.zeros((4, 4), dtype=np.float64)
mat[0, 0] = -1
mat[1, 1] = 1
mat[2, 2] = -1
mat[0, 3] = (data_dims[0] - 1) * voxel_dims[0]
mat[2, 3] = (data_dims[2] - 1) * voxel_dims[2]
mat[3, 3] = 1
return mat
def get_mat_voxel_to_world(voxel_dims, origin=(0.0, 0.0, 0.0)):
mat = np.zeros((4, 4), dtype=np.float64)
mat[0, 0] = voxel_dims[0]
mat[1, 1] = voxel_dims[1]
mat[2, 2] = voxel_dims[2]
mat[0, 3] = -origin[0] * voxel_dims[0]
mat[1, 3] = -origin[1] * voxel_dims[1]
mat[2, 3] = -origin[2] * voxel_dims[2]
mat[3, 3] = 1
return mat
def get_mat_world_to_voxel(voxel_dims, origin=(0.0, 0.0, 0.0)):
mat = np.zeros((4, 4), dtype=np.float64)
mat[0, 0] = 1.0 / voxel_dims[0]
mat[1, 1] = 1.0 / voxel_dims[1]
mat[2, 2] = 1.0 / voxel_dims[2]
mat[0, 3] = origin[0] * voxel_dims[0]
mat[1, 3] = origin[1] * voxel_dims[1]
mat[2, 3] = origin[2] * voxel_dims[2]
mat[3, 3] = 1
return mat
def make_matrix(lines):
mat = np.zeros((4, 4), dtype=np.float64)
for k in range(4):
mat[k] = np.array(lines[k].split(), dtype=np.float64)
return mat
def matrix_to_text(mat):
return '\n'.join(' '.join(str(x) for x in mat[y]) + ' ' for y in range(mat.shape[0]))
def interp_nearest(data, v):
vb = np.ones(v.shape[1], dtype=np.bool)
v0 = np.int32(np.floor(v))
for i in range(3):
v1 = v0[i]
n = data.shape[i]
vb[np.logical_or(v1<0, v1>=n-1)] = 0
v0[i, v1>=n-1] = n - 2
v0[v0<0] = 0
d0 = v - v0
v0 = v0 + np.int32(d0 > 0.5)
return data[v0[0], v0[1], v0[2]] * vb
def interp_trilinear(data, v):
vb = np.ones(v.shape[1], dtype=np.bool)
v0 = np.int32(np.floor(v))
for i in range(3):
v1 = v0[i]
n = data.shape[i]
vb[np.logical_or(v1<0, v1>=n-1)] = 0
v0[i, v1>=n-1] = n - 2
v0[v0<0] = 0
v1 = v0 + 1
# processing x
d0 = v[0] - v0[0]
d1 = 1 - d0
c00 = data[v0[0], v0[1], v0[2]] * d1 + data[v1[0], v0[1], v0[2]] * d0
c10 = data[v0[0], v1[1], v0[2]] * d1 + data[v1[0], v1[1], v0[2]] * d0
c01 = data[v0[0], v0[1], v1[2]] * d1 + data[v1[0], v0[1], v1[2]] * d0
c11 = data[v0[0], v1[1], v1[2]] * d1 + data[v1[0], v1[1], v1[2]] * d0
# processing y
d0 = v[1] - v0[1]
c00 = (c10 - c00) * d0 + c00
c01 = (c11 - c01) * d0 + c01
# processing z
d0 = v[2] - v0[2]
c00 = (c01 - c00) * d0 + c00
return c00 * vb
def xfm_serial(data, matrix, xfm_dims, voxel_dims_xfm, voxel_dims, interp=0, inverse=False, origin=(0.0, 0.0, 0.0)):
interpolation = [interp_nearest, interp_trilinear][interp]
if not inverse:
matrix = np.linalg.inv(matrix)
#print("inverse matrix:", matrix.shape); print(matrix)
nx, ny = (xfm_dims[0], xfm_dims[1])
v2w = get_mat_voxel_to_world(voxel_dims_xfm, origin)
w2v = get_mat_world_to_voxel(voxel_dims, origin)
#print("v2w:", v2w.shape); print(v2w)
#print("w2v:", w2v.shape); print(w2v)
v = np.flipud(np.indices((1, 1, ny, nx), dtype=np.float32)).reshape(4, nx * ny)
v[3] = 1
#print("v:", v.shape); print(v)
#xfm = np.ndarray(shape=xfm_dims, dtype=np.float32)
xfm = np.zeros(xfm_dims, dtype=data.dtype, order='F')
for slc in zrange(xfm_dims[2]):
#print("Slice:", slc)
v[2] = slc
w = np.dot(v2w, v) # voxel to world
xfm_v = np.dot(w2v, np.dot(matrix, w)) # world to voxel
#xfm[:, :, slc] = interpolation(data, xfm_v[:3]).reshape(ny, nx).T
xfm[:, :, slc] = interpolation(data, xfm_v[:3]).reshape(nx, ny, order='F').astype(data.dtype)
return xfm
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('in_data', help='input data file name')
parser.add_argument('in_matrix', help='input matrix file name')
parser.add_argument('-o', '--out_data', help='output data file name', required=True)
parser.add_argument('-s', '--out_shape', help='output shape (e.g. "256,256,48")')
parser.add_argument('-d', '--out_dims', help='output voxel dimensions (in mm)')
parser.add_argument('-n', '--interp', type=int, choices=[0, 1], help='interpolation method (0: nearestneighbour, 1: trilinear)')
parser.add_argument('-i', '--inverse', action='store_true', help='inverse transformation')
args = parser.parse_args()
# input data NIfTI file
if not os.path.isfile(args.in_data):
sys.exit("Error: '%s' is not a regular file." % (args.in_data,))
# input matrix text file
if not os.path.isfile(args.in_matrix):
sys.exit("Error: '%s' is not a regular file." % (args.in_matrix,))
# output shape
out_shape = '' if args.out_shape is None else args.out_shape
# output voxel dimensions
out_dims = '' if args.out_dims is None else args.out_dims
# interpolation method (0: nearestneighbour, 1: trilinear)
interp = 1 if args.interp is None else args.interp
# read input data
data, voxel_dims = pt.read_data(args.in_data)
# read matrix text file
matrix = make_matrix(pt.read_text(args.in_matrix))
data_dims_xfm = tuple(int(v) for v in out_shape.split(',')) if len(out_shape) > 0 else data.shape[:]
voxel_dims_xfm = tuple(float(v) for v in out_dims.split(',')) if len(out_dims) > 0 else voxel_dims[:]
# transform input data
data_xfm = xfm_serial(data, matrix, data_dims_xfm, voxel_dims_xfm, voxel_dims, interp=interp, inverse=args.inverse)
# save transformed data as NIfTI file
pt.save_data(data_xfm, voxel_dims_xfm, args.out_data, dtype=None)
| Python |
3D | Aswendt-Lab/AIDAmri | bin/5.1_ROI_analysis/pv_parser.py | .py | 13,157 | 417 | '''
Created on 20.08.2020
Author:
Michael Diedenhofen
Max Planck Institute for Metabolism Research, Cologne
Read Bruker ParaVision JCAMP parameter files (e.g. acqp, method, visu_pars).
'''
from __future__ import print_function
VERSION = 'pv_parser.py v 1.0.2 20200820'
import re
import sys
import collections
import numpy as np
def strfind(string, sub):
len_sub = len(sub)
result = []
if (len_sub == 0) or (len_sub > len(string)):
return result
pos = string.find(sub)
while pos >= 0:
result.append(pos)
pos = string.find(sub, pos + len_sub)
return result
def strtok(string, delimiters=None):
token = ''
remainder = ''
len_str = len(string)
if len_str == 0:
return (token, remainder)
if delimiters is None: # whitespace characters
delimiters = list(map(chr, list(range(9, 14)) + [32]))
i = 0
while string[i] in delimiters:
i += 1
if i >= len_str:
return (token, remainder)
start = i
while string[i] not in delimiters:
i += 1
if i >= len_str:
break
token = string[start:i]
remainder = string[i:len_str]
return (token, remainder)
def extract_jcamp_strings(string, get_all=True):
if string is None:
result = None
elif get_all:
result = re.findall(r'<(.*?)>', string)
else:
result = re.search(r'<(.*?)>', string)
if result is not None:
result = result.group(1)
return result
def extract_unit_string(string):
if string is None:
result = None
else:
result = re.search(r'\[(.*?)\]', string)
if result is not None:
result = result.group(1)
else:
result = string
return result
def replace_jcamp_strings(string):
pos_stop = 0
elements = []
str_list = []
index = 0
while True:
pos_start = string.find('<', pos_stop)
if pos_start < 0:
elements.append(string[pos_stop:])
break
elements.append(string[pos_stop:pos_start])
pos_stop = string.find('>', pos_start + 1)
if pos_stop < 0:
elements.append(string[pos_start:])
break
pos_stop += 1
elements.append(''.join(['<#', str(index), '>']))
str_list.append(string[pos_start:pos_stop])
index += 1
return (''.join(elements), str_list)
def check_struct_list(values, str_list):
flag_int = True
flag_float = True
for value in values:
if flag_int:
try:
value = int(value)
except ValueError:
flag_int = False
else:
continue
try:
value = float(value)
except ValueError:
flag_float = False
break
if flag_int:
return (list(map(int, values)), 0)
if flag_float:
return (list(map(float, values)), 0)
# Restore JCAMP strings
count = len(str_list)
if count > 0:
for index, value in enumerate(values):
result = re.findall(r'<#(.*?)>', value)
if len(result) == 1:
str_id = int(result[0])
values[index] = str_list[str_id]
count -= 1
if count == 0:
break
elif len(result) > 1:
sys.exit("Found more than one ID string in a value: %s" % (value,))
return (values, len(str_list) - count)
def create_struct_list(string, str_list, restored):
if len(string) < 1:
return ([], restored)
# Split one struct in its parts
#items = re.split(r'^ +| *, *| +$', string)
items = re.split(r'(?:^ +| *),(?: *| +$)', string)
#items = [x.strip(' ') for x in string.split(',')]
for index, item in enumerate(items):
#values = re.findall(r'[^\s]+', item)
values = item.split(' ')
#values = item.split()
values, number = check_struct_list(values, str_list)
if len(values) == 1:
items[index] = values[0]
else:
items[index] = values
restored += number
return (items, restored)
def push_list(level, obj_list, obj):
while level > 0:
obj_list = obj_list[-1]
level -= 1
obj_list.append(obj)
def parse_struct(string, str_list):
level = 0
restored = 0
obj_list = []
pos_start = string.find('(')
if pos_start < 0:
return (obj_list, restored)
pos_left, start_left = (pos_start + 1, True)
pos_start = string.find('(', pos_left)
pos_stop = string.find(')', pos_left)
while True:
if (pos_start >= pos_left) and (pos_stop >= pos_left):
pos_right, start_right = (pos_start, True) if pos_start < pos_stop else (pos_stop, False)
elif pos_start >= pos_left:
pos_right, start_right = (pos_start, True)
elif pos_stop >= pos_left:
pos_right, start_right = (pos_stop, False)
else:
pos_right, start_right = (len(string), False)
sub = string[pos_left:pos_right].strip(' ')
if sub.startswith(','):
sub = sub[1:].lstrip(' ')
if sub.endswith(','):
sub = sub[:-1].rstrip(' ')
#print("sub:%d:%s:" % (len(sub), sub))
items, restored = create_struct_list(sub, str_list, restored)
if start_left:
push_list(level, obj_list, items)
if start_right:
level += 1
else:
for item in items:
push_list(level, obj_list, item)
if not start_right:
level -= 1
if pos_right >= len(string):
break
pos_left, start_left = (pos_right + 1, start_right)
if start_left:
pos_start = string.find('(', pos_left)
else:
pos_stop = string.find(')', pos_left)
return (obj_list, restored)
def check_array_list(values):
flag_int = True
flag_float = True
for value in values:
if flag_int:
try:
value = int(value)
except ValueError:
flag_int = False
else:
continue
try:
value = float(value)
except ValueError:
flag_float = False
break
if flag_int:
return np.array(values, dtype=np.int32)
if flag_float:
return np.array(values, dtype=np.float64)
return np.array(values, dtype=object)
def get_array_values(label, sizes, data):
# Removing whitespaces at the edge of strings
#data = data.replace('< ', '<')
#data = data.replace(' >', '>')
if data.startswith('<'): # Checking if array is a single string or an array of strings ...
#data = data.replace('> <', '><')
#values = re.findall(r'<(.*?)>', data)
values = re.findall(r'<.*?>', data)
if len(sizes) > 1:
values = np.array(values, dtype=object)
if np.prod(sizes[:-1]) == values.size:
values = values.reshape(sizes[:-1])
elif len(values) == 1:
values = values[0]
elif data.startswith('('): # ... or a struct or an array of structs ...
if len(sizes) > 1:
print("Warning: The sizes dimension is greater than 1 for the %s array of structs." % (label,), file=sys.stderr)
data, str_list = replace_jcamp_strings(data)
values, restored = parse_struct(data, str_list)
if len(str_list) != restored:
print("%s:" % (label,), values)
sys.exit("Not all replaced JCAMP strings are restored (%d of %d)." % (restored, len(str_list)))
else: # ... or a simple array (most frequently numeric)
values = re.findall(r'[^\s]+', data)
#values = data.split()
values = np.reshape(check_array_list(values), sizes)
return values
def read_param_file(filename):
# Open parameter file
try:
fid = open(filename, 'r')
except IOError as V:
if V.errno == 2:
sys.exit("Cannot open parameter file %s" % (filename,))
else:
raise
# Generate header information
header = collections.OrderedDict()
weekdays = ('Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun')
line = ''
for index, line in enumerate(fid):
line = line.lstrip(' \t').rstrip('\r\n')
if line.startswith('##$'):
break
#print("line:%d:%s:" % (len(line), line))
if line.startswith('##'): # It's a variable with ##
# Retrieve the Labeled Data Record
label, value = strtok(line, delimiters='=')
label = strtok(label, delimiters='#')[0].strip()
value = strtok(value, delimiters='=')[0].strip()
# Save value without $
#value = strtok(value, delimiters='$')[0].strip()
header[label] = value
elif line.startswith('$$'): # It's a comment
comment = strtok(line, delimiters='$')[0].strip()
if comment.startswith('/'):
header['Path'] = comment
elif comment.startswith('process'):
header['Process'] = comment[8:]
else:
pos = strfind(comment[:10], '-')
if (comment[:3] in weekdays) or ((comment[:2] in ('19', '20')) and (len(pos) == 2)):
header['Date'] = comment
else:
header['Header' + str(index + 1)] = comment
# Check if using a supported version of JCAMP file format
if 'JCAMPDX' in header:
version = float(header['JCAMPDX'])
elif 'JCAMP-DX' in header:
version = float(header['JCAMP-DX'])
else:
sys.exit("The file header is not correct.")
if (version != 4.24) and (version != 5):
print("Warning: JCAMP version %s is not supported (%s)." % (version, filename), file=sys.stderr)
params = collections.OrderedDict()
# Loop for reading parameters
while line.lstrip(' \t').startswith('##'):
result = re.search(r'##(.*)=(.*)', line)
result = [] if result is None else list(result.groups())
# Checking if label present and removing proprietary tag
try:
label = result[0]
except:
label = None
else:
if label.startswith('$'):
label = label[1:]
#print("label:%d:%s:" % (len(label), label))
# Checking if value present otherwise value is set to empty string
try:
value = result[1]
except:
value = ''
#print("value:%d:%s:" % (len(value), value))
flag_comment = True if '$$' in line else False
line = ''
data = []
for line in fid:
if line.lstrip(' \t').startswith('##'):
break
if not line.lstrip(' \t').startswith('$$'): # Skip comment line
if (not flag_comment) and ('$$' in line):
flag_comment = True
#data.append(line.rstrip('\\\r\n'))
data.append(line.rstrip('\r\n'))
#print("line:%d:%s:" % (len(data[-1]), data[-1]))
# Create data string
data = ''.join(data)
#print("data:%d:%s:" % (len(data), data))
if flag_comment:
sys.exit("Found JCAMP comment ('$$') in LDR %s." % (label,))
# Checking for END tag
if (label is None) or (label == 'END'):
break
# Checking if value is a string or an array, a struct or a single value
if value.startswith('( <'):
print("Warning: The parsing of the LDR %s failed." % (label,), file=sys.stderr)
elif value.startswith('( '): # A single string, an array of strings or structs or a simple array
sizes = [int(x) for x in value.strip('( )').split(',')]
params[label] = get_array_values(label, sizes, data)
elif value.startswith('('): # A struct
data = ''.join([value, data])
params[label] = get_array_values(label, [1], data)[0]
else: # A single value
try:
params[label] = int(value)
except ValueError:
try:
params[label] = float(value)
except ValueError:
params[label] = value
fid.close()
if label != 'END':
sys.exit("Unexpected end of file: Missing END Statement")
return (header, params)
def main():
import argparse
parser = argparse.ArgumentParser(description='Read ParaVision parameter file')
parser.add_argument('filename', help='ParaVision parameter file (acqp, method, visu_pars)')
args = parser.parse_args()
# read parameter file
header, params = read_param_file(args.filename)
for (label, value) in header.items():
print("%s: %s" % (label, value))
for (label, value) in params.items():
if isinstance(value, np.ndarray):
print("%s:" % (label,))
print(value)
else:
print("%s: %s" % (label, value))
if __name__ == '__main__':
main()
| Python |
3D | Aswendt-Lab/AIDAmri | bin/5.1_ROI_analysis/01_dilate_mask_process.py | .py | 9,824 | 230 | '''
Created on 19.10.2020
Author:
Michael Diedenhofen
Max Planck Institute for Metabolism Research, Cologne
Description:
Pre-requisits: stroke mask was defined at post stroke day 7 (P7)
Result: for all time points the peri-infarct mask is created aligned to the individual T2w MRI data
1. Time point P7: For each subject of the two groups a peri-infarct mask is generated from the stroke mask.
- The input stroke mask <subject>Stroke_mask.nii.gz is located in the T2w subfolder.
- The output peri-infarct mask <subject>_peri_mask_m3_n15.nii.gz is stored in the T2w subfolder.
- The SciPy function binary_dilation() is called for each slice of the stroke mask with a parameter struct if at least one pixel value is greater than 0.
- struct is a mask of size [2*R+1, 2*R+1] filled with a circular disk (radius R=15 pixels).
- In order to obtain the peri-infarct mask the original stroke mask is subtracted from the dilated stroke mask.
2. Time point P7: For each subject of the two groups a non-rigid transformation (from template to T2w MRI) is inverted with NiftyReg.
- The non-rigid transformation <subject>BiasBetMatrixBspline.nii (-invNrr filename1) is inverted with NiftyReg.
NiftyReg command: reg_transform -ref <filename> -invNrr <filename1> <filename2> <filename3>
3. All time points except P7: For each subject of the two groups a peri-infarct mask from time point P7 is transformed with NiftyReg.
- Compose the non-rigid transformation (template -> T2w) with the inverse non-rigid transformation from time point P7 (step 2).
NiftyReg command: reg_transform -ref <filename> -ref2 <filename> -comp <filename1> <filename2> <filename3>
- Apply the combined non-rigid transformation to the peri-infarct mask from time point P7.
NiftyReg command: reg_resample -ref <filename> -flo <filename> -res <filename> -trans <filename> -inter 0
'''
from __future__ import print_function
try:
zrange = xrange
except NameError:
zrange = range
import os
import sys
import numpy as np
import create_seed_rois as csr
import dilate_mask as dm
import proc_tools as pt
def create_rois_1(path_labels, path_atlas, path_rois=None, mask=None):
if not os.path.isfile(path_atlas):
sys.exit("Error: '%s' is not a regular file." % (path_atlas,))
# create atlas labels ROIs
labels_hdr, rois = csr.create_rois(path_labels, [path_atlas], datatype=16, preserve=True)
voxel_dims = labels_hdr[0].get_zooms()
rois = np.squeeze(rois)
# apply mask to ROIs
if mask is not None:
rois = np.multiply(rois, mask)
# save ROIs file (NIfTI)
if path_rois is not None:
pt.save_data(rois, voxel_dims, path_rois, dtype=None)
return (rois, voxel_dims)
def create_peri_mask(timepoint, group, subject, na=[15]):
in_dir = os.path.join(pt.proc_in_dir, timepoint, group, subject, 'T2w')
out_dir = os.path.join(pt.proc_out_dir, timepoint, group, subject, 'T2w')
if not os.path.isdir(in_dir):
sys.exit("Error: '%s' is not an existing directory." % (in_dir,))
if not os.path.exists(out_dir):
os.makedirs(out_dir)
# input atlas labels file (NIfTI)
#path_atlas = os.path.join(in_dir, subject + 'BiasBet_AnnorsfMRI.nii.gz')
# input mask file (NIfTI)
path_in_mask = os.path.join(in_dir, subject + 'Stroke_mask.nii.gz')
if not os.path.isfile(path_in_mask):
sys.exit("Error: '%s' is not a regular file." % (path_in_mask,))
# output cortex ROIs file (NIfTI)
#path_out_rois = os.path.join(out_dir, subject + '_cortex_rois_1.nii.gz')
mask, mask_dims = pt.read_data(path_in_mask)
#rois, rois_dims = create_rois_1(pt.path_labels_1, path_atlas, path_rois=path_out_rois)
for model in range(3, 4):
for n in na:
# output mask file (NIfTI)
path_out_mask = os.path.join(out_dir, subject + '_peri_mask_m%d_n%d.nii.gz' % (model, n))
# output masked cortex ROIs file (NIfTI)
#path_out_rois = os.path.join(out_dir, subject + '_cortex_rois_1_m%d_n%d.nii.gz' % (model, n))
peri = np.copy(mask)
if (model == 1) or (model == 2):
for k in zrange(mask.shape[2]):
image = mask[:, :, k]
if np.any(image.astype(np.bool)):
peri[:, :, k] = dm.dilate_repeat(image, connectivity=model, n=n)
else:
struct = dm.circle_mask(n=n)
for k in zrange(mask.shape[2]):
image = mask[:, :, k]
if np.any(image.astype(np.bool)):
peri[:, :, k] = dm.dilate_struct(image, struct)
pt.save_data(peri.astype(np.float32), mask_dims, path_out_mask, dtype=None)
#rois_masked = np.multiply(rois, peri.astype(np.float32))
#pt.save_data(rois_masked, rois_dims, path_out_rois, dtype=None)
def xfm_inv(timepoint, group, subject):
in_dir = os.path.join(pt.proc_in_dir, timepoint, group, subject, 'T2w')
out_dir = os.path.join(pt.proc_out_dir, timepoint, group, subject, 'T2w')
if not os.path.isdir(in_dir):
sys.exit("Error: '%s' is not an existing directory." % (in_dir,))
if not os.path.isdir(out_dir):
sys.exit("Error: '%s' is not an existing directory." % (out_dir,))
brain_template = os.path.join(pt.lib_in_dir, 'NP_template_sc0.nii.gz')
input_volume = os.path.join(in_dir, subject + 'BiasBet.nii.gz')
output_aff = os.path.join(in_dir, subject + 'BiasBetMatrixAff.txt')
output_aff_inv = os.path.join(out_dir, subject + 'BiasBetMatrixAff_inv.txt')
output_cpp = os.path.join(in_dir, subject + 'BiasBetMatrixBspline.nii')
output_cpp_inv = os.path.join(out_dir, subject + 'BiasBetMatrixBspline_inv.nii.gz')
if not os.path.isfile(brain_template):
sys.exit("Error: '%s' is not a regular file." % (brain_template,))
if not os.path.isfile(input_volume):
sys.exit("Error: '%s' is not a regular file." % (input_volume,))
if not os.path.isfile(output_cpp):
sys.exit("Error: '%s' is not a regular file." % (output_cpp,))
# inverse affine transformation
command = 'reg_transform -invAff %s %s' % (output_aff, output_aff_inv)
os.system(command)
print(output_aff_inv)
# inverse transformation
command = 'reg_transform -ref %s -invNrr %s %s %s' % (input_volume, output_cpp, brain_template, output_cpp_inv)
os.system(command)
print(output_cpp_inv)
def xfm_peri_mask(timepoint_P7, timepoint, group, subject_P7, subject):
in_dir_P7 = os.path.join(pt.proc_in_dir, timepoint_P7, group, subject_P7, 'T2w')
in_dir = os.path.join(pt.proc_in_dir, timepoint, group, subject, 'T2w')
out_dir_P7 = os.path.join(pt.proc_out_dir, timepoint_P7, group, subject_P7, 'T2w')
out_dir = os.path.join(pt.proc_out_dir, timepoint, group, subject, 'T2w')
if not os.path.isdir(in_dir_P7):
sys.exit("Error: '%s' is not an existing directory." % (in_dir_P7,))
if not os.path.isdir(in_dir):
sys.exit("Error: '%s' is not an existing directory." % (in_dir,))
if not os.path.isdir(out_dir_P7):
sys.exit("Error: '%s' is not an existing directory." % (out_dir_P7,))
if not os.path.exists(out_dir):
os.makedirs(out_dir)
brain_template = os.path.join(pt.lib_in_dir, 'NP_template_sc0.nii.gz')
input_volume = os.path.join(in_dir, subject + 'BiasBet.nii.gz')
output_cpp = os.path.join(in_dir, subject + 'BiasBetMatrixBspline.nii')
output_cpp_inv = os.path.join(out_dir_P7, subject_P7 + 'BiasBetMatrixBspline_inv.nii.gz')
output_cpp_comp = os.path.join(out_dir, subject + 'BiasBetMatrixBspline_comp.nii.gz')
mask_P7 = os.path.join(in_dir_P7, subject_P7 + 'Stroke_mask.nii.gz')
mask = os.path.join(out_dir, subject + 'Stroke_mask.nii.gz')
peri_P7 = os.path.join(out_dir_P7, subject_P7 + '_peri_mask_m3_n15.nii.gz')
peri = os.path.join(out_dir, subject + '_peri_mask_m3_n15.nii.gz')
if not os.path.isfile(brain_template):
sys.exit("Error: '%s' is not a regular file." % (brain_template,))
if not os.path.isfile(input_volume):
sys.exit("Error: '%s' is not a regular file." % (input_volume,))
if not os.path.isfile(output_cpp):
sys.exit("Error: '%s' is not a regular file." % (output_cpp,))
if not os.path.isfile(mask_P7):
sys.exit("Error: '%s' is not a regular file." % (mask_P7,))
if not os.path.isfile(peri_P7):
sys.exit("Error: '%s' is not a regular file." % (peri_P7,))
# compose transformations
command = 'reg_transform -ref %s -ref2 %s -comp %s %s %s' % (input_volume, brain_template, output_cpp, output_cpp_inv, output_cpp_comp)
os.system(command)
# resample stroke mask
command = 'reg_resample -ref %s -flo %s -res %s -trans %s -inter 0' % (input_volume, mask_P7, mask, output_cpp_comp)
os.system(command)
# resample peri-infarct mask
command = 'reg_resample -ref %s -flo %s -res %s -trans %s -inter 0' % (input_volume, peri_P7, peri, output_cpp_comp)
os.system(command)
def main():
timepoint_P7 = pt.timepoints[1]
# timepoint P7
for index_g, group in enumerate(pt.groups):
for subject in pt.study[1][index_g]:
if subject is not None:
create_peri_mask(timepoint_P7, group, subject)
xfm_inv(timepoint_P7, group, subject)
# all timepoints except P7
for index_t, timepoint in enumerate(pt.timepoints):
if index_t != 1:
for index_g, group in enumerate(pt.groups):
for index_s, subject in enumerate(pt.study[index_t][index_g]):
if subject is not None:
xfm_peri_mask(timepoint_P7, timepoint, group, pt.study[1][index_g][index_s], subject)
if __name__ == '__main__':
main()
| Python |
3D | Aswendt-Lab/AIDAmri | bin/5.1_ROI_analysis/03_create_seed_rois_process.py | .py | 12,461 | 253 | '''
Created on 19.10.2020
Author:
Michael Diedenhofen
Max Planck Institute for Metabolism Research, Cologne
Description:
Pre-requisite: 02_apply_xfm_process.py
Result:
rsfMRI - a Matlab file which contains two text files: 1) for each region one column with the averaged rsfMRI time series and 2) the atlas labels names.
DTI - atlas labels file modified to include individually shaped peri-infarct brain regions which replace the original regions
The text file annotation_50CHANGEDanno_label_IDs+2000.txt contains all atlas labels and another text file contains selected cortical peri-infarct atlas labels.
rsfMRI:
1. For all time points and for each subject of the two groups a hyperstack with modified selected cortical regions is created.
- The peri-infarct mask <subject>_T2w_peri_mask_rsfMRI.nii.gz and the atlas labels <subject>_T2w_Anno_rsfMRI.nii.gz are located in the fMRI subfolder.
- The output hyperstack Seed_ROIs_all_mod_peri.nii.gz is stored in the fMRI subfolder.
- Create a first hyperstack with all regions (each region is one volume) from the atlas labels.
- Create a second hyperstack with selected cortical regions from the atlas labels and apply the rsfMRI peri-infarct mask.
- Create a third hyperstack with all regions but replaced selected cortical regions from the second hyperstack.
2. For each region of the modified hyperstack an averaged rsfMRI time series is computed and a text file with one column for each region is created.
- The atlas labels names are listed in the annoVolume+2000_rsfMRI.nii.txt text file.
- The input rsfMRI file <subject>_mcf_f_SFRGR.nii.gz is located in the fMRI/regr subfolder.
- The resulting text file MasksTCsSplit_GV_all_mod_peri.txt and Matlab file MasksTCsSplit_GV_all_mod_peri.txt.mat are stored in the fMRI/regr subfolder.
- The modified hyperstack is used with the rsfMRI data and the averaged time series of a region is computed from the voxels of the rsfMRI data which belong to this region.
- The resulting text file contains for each hyperstack region one column with the averaged rsfMRI time series.
- This text file is combined with the atlas labels names and stored as a Matlab file.
DTI:
1. For all time points and for each subject of the two groups a hyperstack and atlas labels with modified selected cortical regions are created.
- The peri-infarct mask <subject>_T2w_peri_mask_DTI.nii.gz and the atlas labels <subject>_T2w_Anno_DTI.nii.gz are located in the DTI subfolder.
- The output hyperstack Seed_ROIs_all_mod_peri.nii.gz is stored in the DTI subfolder.
- The output atlas labels file <subject>_T2w_Anno_DTI_mod_peri_scaled.nii.gz is stored in the DTI/DSI_studio subfolder.
- Create a first hyperstack with all regions (each region is one volume) from the atlas labels.
- Create a second hyperstack with selected cortical regions from the atlas labels and apply the DTI peri-infarct mask.
- Create a third hyperstack with all regions but replaced selected cortical regions from the second hyperstack.
- A maximum intensity projection of the third hyperstack generates atlas labels with all regions but replaced selected cortical regions.
- The voxel dimensions of the output atlas labels file are scaled by a factor of 10 which is required for DSI Studio.
'''
from __future__ import print_function
try:
zrange = xrange
except NameError:
zrange = range
import os
import sys
import shutil
import numpy as np
import create_seed_rois as csr
import fsl_mean_ts as mts
import proc_tools as pt
def read_mask(path_in_mask):
if not os.path.isfile(path_in_mask):
sys.exit("Error: '%s' is not a regular file." % (path_in_mask,))
# read mask file (NIfTI)
mask, voxel_dims = pt.read_data(path_in_mask)
return (mask, voxel_dims)
def create_rois_1(path_labels, path_atlas, path_rois=None, mask=None):
if not os.path.isfile(path_atlas):
sys.exit("Error: '%s' is not a regular file." % (path_atlas,))
# create atlas labels ROIs
labels_hdr, rois = csr.create_rois(path_labels, [path_atlas], datatype=16, preserve=True)
voxel_dims = labels_hdr[0].get_zooms()
rois = np.squeeze(rois)
# apply mask to ROIs
if mask is not None:
rois = np.multiply(rois, mask)
# save ROIs file (NIfTI)
if path_rois is not None:
pt.save_data(rois, voxel_dims, path_rois, dtype=None)
return (rois, voxel_dims)
def create_rois_2(path_labels, path_atlas, path_rois=None, mask=None, preserve=False):
if not os.path.isfile(path_atlas):
sys.exit("Error: '%s' is not a regular file." % (path_atlas,))
# create atlas labels ROIs hyperstack (4D)
labels_hdr, rois = csr.create_rois(path_labels, [path_atlas], datatype=16, preserve=preserve)
voxel_dims = labels_hdr[0].get_zooms()
# apply mask to each ROI
if mask is not None:
for k in zrange(rois.shape[3]):
rois[:, :, :, k] = np.multiply(rois[:, :, :, k], mask)
# save ROIs file (NIfTI)
if path_rois is not None:
pt.save_data(rois, voxel_dims, path_rois, dtype=None)
return (rois, voxel_dims)
def replace_rois(labels_1, labels_2, rois_1, rois_2):
for index_2, label in enumerate(labels_2):
if label in labels_1:
index_1 = labels_1.index(label)
rois_1[:, :, :, index_1] = rois_2[:, :, :, index_2]
def create_rois_rsfMRI(timepoint, group, subject, labels, labels_2, label_names_2000, label_names_peri):
in_dir = os.path.join(pt.proc_in_dir, timepoint, group, subject, 'fMRI')
out_dir = os.path.join(pt.proc_out_dir, timepoint, group, subject, 'fMRI')
regr_dir = os.path.join(pt.proc_out_dir, timepoint, group, subject, 'fMRI', 'regr')
if not os.path.isdir(in_dir):
sys.exit("Error: '%s' is not an existing directory." % (in_dir,))
if not os.path.isdir(out_dir):
sys.exit("Error: '%s' is not an existing directory." % (out_dir,))
if not os.path.exists(regr_dir):
os.makedirs(regr_dir)
# input atlas labels file (NIfTI)
#path_atlas = os.path.join(in_dir, subject + 'SmoothBet_AnnoSplit_rsfMRI.nii.gz')
path_atlas = os.path.join(out_dir, subject + '_T2w_Anno_rsfMRI.nii.gz')
# input peri-infarct mask file (NIfTI)
path_mask = os.path.join(out_dir, subject + '_T2w_peri_mask_rsfMRI.nii.gz')
peri_mask, _ = read_mask(path_mask)
# input rsfMRI file (NIfTI)
path_rsfMRI = os.path.join(in_dir, 'regr', subject + '_mcf_f_SFRGR.nii.gz')
if not os.path.isfile(path_rsfMRI):
sys.exit("Error: '%s' is not a regular file." % (path_rsfMRI,))
# output ROIs files (NIfTI)
#path_out_rois = os.path.join(out_dir, subject + '_seed_rois.nii.gz')
#path_out_rois_1 = os.path.join(out_dir, subject + '_cortex_rois_1.nii.gz')
#path_out_rois_2 = os.path.join(out_dir, subject + '_cortex_rois_2.nii.gz')
#path_out_rois_x = os.path.join(out_dir, subject + '_seed_rois_mod.nii.gz')
path_out_rois = os.path.join(out_dir, 'Seed_ROIs_all.nii.gz')
path_out_rois_1 = os.path.join(out_dir, subject + '_rois_peri.nii.gz')
path_out_rois_2 = os.path.join(out_dir, 'Seed_ROIs_peri.nii.gz')
path_out_rois_x = os.path.join(out_dir, 'Seed_ROIs_all_mod_peri.nii.gz')
# output time series text files
#path_out_ts = os.path.join(out_dir, subject + '_ts.txt')
#path_out_ts_2 = os.path.join(out_dir, subject + '_ts_cortex.txt')
#path_out_ts_x = os.path.join(out_dir, subject + '_ts_mod.txt')
path_out_ts = os.path.join(regr_dir, 'MasksTCsSplit_GV_all.txt')
path_out_ts_2 = os.path.join(regr_dir, 'MasksTCsSplit_GV_peri.txt')
path_out_ts_x = os.path.join(regr_dir, 'MasksTCsSplit_GV_all_mod_peri.txt')
rois, rois_dims = create_rois_2(pt.path_labels, path_atlas, path_rois=path_out_rois)
_, _ = create_rois_1(pt.path_labels_1, path_atlas, path_rois=path_out_rois_1, mask=peri_mask)
rois_2, _ = create_rois_2(pt.path_labels_2, path_atlas, path_rois=path_out_rois_2, mask=peri_mask)
rois_x = np.copy(rois)
replace_rois(labels, labels_2, rois_x, rois_2)
pt.save_data(rois_x, rois_dims, path_out_rois_x, dtype=None)
mts.mean_ts(path_rsfMRI, path_out_rois, path_out_ts, label_names_2000)
mts.mean_ts(path_rsfMRI, path_out_rois_2, path_out_ts_2, label_names_peri)
mts.mean_ts(path_rsfMRI, path_out_rois_x, path_out_ts_x, label_names_2000)
def create_rois_DTI(timepoint, group, subject, labels, labels_2, scale=10.0):
in_dir = os.path.join(pt.proc_in_dir, timepoint, group, subject, 'DTI')
out_dir = os.path.join(pt.proc_out_dir, timepoint, group, subject, 'DTI')
dti_dir = os.path.join(pt.proc_out_dir, timepoint, group, subject, 'DTI', 'DSI_studio')
if not os.path.isdir(in_dir):
#sys.exit("Error: '%s' is not an existing directory." % (in_dir,))
return
if not os.path.isdir(out_dir):
sys.exit("Error: '%s' is not an existing directory." % (out_dir,))
if not os.path.exists(dti_dir):
os.makedirs(dti_dir)
# input atlas labels file (NIfTI)
#path_atlas = os.path.join(in_dir, subject + 'SmoothMicoBet_AnnoSplit_rsfMRI.nii.gz')
path_atlas = os.path.join(out_dir, subject + '_T2w_Anno_DTI.nii.gz')
if not os.path.isfile(path_atlas):
#sys.exit("Error: '%s' is not a regular file." % (path_atlas,))
return
# input peri-infarct mask file (NIfTI)
path_mask = os.path.join(out_dir, subject + '_T2w_peri_mask_DTI.nii.gz')
peri_mask, _ = read_mask(path_mask)
# output ROIs files (NIfTI)
#path_out_rois = os.path.join(out_dir, subject + '_seed_rois.nii.gz')
#path_out_rois_1 = os.path.join(out_dir, subject + '_cortex_rois_1.nii.gz')
#path_out_rois_2 = os.path.join(out_dir, subject + '_cortex_rois_2.nii.gz')
#path_out_rois_x = os.path.join(out_dir, subject + '_seed_rois_mod.nii.gz')
path_out_rois = os.path.join(out_dir, 'Seed_ROIs_all.nii.gz')
path_out_rois_1 = os.path.join(out_dir, subject + '_rois_peri.nii.gz')
path_out_rois_2 = os.path.join(out_dir, 'Seed_ROIs_peri.nii.gz')
path_out_rois_x = os.path.join(out_dir, 'Seed_ROIs_all_mod_peri.nii.gz')
# output maximum intensity projection files (NIfTI)
path_out_mip_rois = os.path.join(dti_dir, subject + '_T2w_Anno_DTI_scaled.nii.gz')
path_out_mip_rois_x = os.path.join(dti_dir, subject + '_T2w_Anno_DTI_mod_peri_scaled.nii.gz')
# output maximum intensity projection label names text files
path_out_label_names = os.path.join(dti_dir, subject + '_T2w_Anno_DTI_scaled.nii.txt')
path_out_label_names_x = os.path.join(dti_dir, subject + '_T2w_Anno_DTI_mod_peri_scaled.nii.txt')
rois, rois_dims = create_rois_2(pt.path_labels, path_atlas, path_rois=path_out_rois, preserve=True)
_, _ = create_rois_1(pt.path_labels_1, path_atlas, path_rois=path_out_rois_1, mask=peri_mask)
rois_2, _ = create_rois_2(pt.path_labels_2, path_atlas, path_rois=path_out_rois_2, mask=peri_mask, preserve=True)
rois_x = np.copy(rois)
replace_rois(labels, labels_2, rois_x, rois_2)
pt.save_data(rois_x, rois_dims, path_out_rois_x, dtype=None)
mip_rois = np.max(rois, axis=3)
mip_rois_x = np.max(rois_x, axis=3)
mip_rois_dims = tuple((x * scale) for x in rois_dims)
pt.save_data(mip_rois, mip_rois_dims, path_out_mip_rois, dtype=None)
pt.save_data(mip_rois_x, mip_rois_dims, path_out_mip_rois_x, dtype=None)
shutil.copyfile(pt.path_label_names_2000, path_out_label_names)
shutil.copyfile(pt.path_label_names_2000, path_out_label_names_x)
print(path_out_label_names)
print(path_out_label_names_x)
def main():
# read labels
_, labels = pt.read_labels(pt.path_labels)
_, labels_2 = pt.read_labels(pt.path_labels_2)
labels = [x for list_x in labels for x in list_x]
labels_2 = [x for list_x in labels_2 for x in list_x]
label_names_2000 = pt.read_text(pt.path_label_names_2000)
labels_2000 = [int(x.split('\t')[0]) for x in label_names_2000]
label_names_peri = [label_names_2000[x] for x in list(np.where(np.in1d(labels_2000, labels_2))[0])]
for index_t, timepoint in enumerate(pt.timepoints):
for index_g, group in enumerate(pt.groups):
for subject in pt.study[index_t][index_g]:
if subject is not None:
create_rois_rsfMRI(timepoint, group, subject, labels, labels_2, label_names_2000, label_names_peri)
create_rois_DTI(timepoint, group, subject, labels, labels_2)
if __name__ == '__main__':
main()
| Python |
3D | Aswendt-Lab/AIDAmri | bin/5.1_ROI_analysis/dilate_mask.py | .py | 1,916 | 73 | '''
Created on 20.08.2020
Author:
Michael Diedenhofen
Max Planck Institute for Metabolism Research, Cologne
'''
from __future__ import print_function
try:
zrange = xrange
except NameError:
zrange = range
import os
import sys
import numpy as np
from scipy import ndimage
import proc_tools as pt
def circle_mask(n=8):
nn = 2 * n + 1
xx, yy = np.mgrid[:nn, :nn]
circle = ((xx - n) ** 2 + (yy - n) ** 2) < (n * n + 2)
#print(circle.astype(np.int16))
return circle
def dilate_repeat(image, connectivity=1, n=8):
dilated = np.copy(image).astype(np.bool)
struct = ndimage.generate_binary_structure(2, connectivity)
for _ in zrange(n):
dilated = ndimage.binary_dilation(dilated, structure=struct)
dilated = np.subtract(dilated.astype(image.dtype), image)
return dilated
def dilate_struct(image, struct):
dilated = np.copy(image).astype(np.bool)
dilated = ndimage.binary_dilation(dilated, structure=struct)
dilated = np.subtract(dilated.astype(image.dtype), image)
return dilated
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(description='Dilate input mask.')
parser.add_argument('in_mask', help='input mask file name')
parser.add_argument('-o', '--out_mask', help='output mask file name', required=True)
args = parser.parse_args()
# input mask file
if not os.path.isfile(args.in_mask):
sys.exit("Error: '%s' is not a regular file." % (args.in_mask,))
# read input mask data
data, voxel_dims = pt.read_data(args.in_mask)
struct = circle_mask()
for k in zrange(data.shape[2]):
image = data[:, :, k]
if np.any(image.astype(np.bool)):
#data[:, :, k] = dilate_repeat(image)
data[:, :, k] = dilate_struct(image, struct)
# save mask data as NIfTI file
pt.save_data(data, voxel_dims, args.out_mask, dtype=None)
| Python |
3D | Aswendt-Lab/AIDAmri | bin/5.1_ROI_analysis/pv_reader.py | .py | 19,580 | 491 | '''
Created on 19.10.2020
Author:
Michael Diedenhofen
Max Planck Institute for Metabolism Research, Cologne
Read Bruker ParaVision data (2dseq) and save as NIfTI file.
Create a b-table text file with b-values and directions for diffusion data.
'''
from __future__ import print_function
try:
zrange = xrange
except NameError:
zrange = range
VERSION = 'pv_reader.py v 1.1.2 20201019'
import os
import sys
import numpy as np
import nibabel as nib
import nibabel.nifti1 as nii
import pv_parser as par
class ParaVision:
"""
Read ParaVision data and save as NIfTI file
"""
def __init__(self, procfolder, rawfolder, study, expno, procno):
self.procfolder = procfolder
self.rawfolder = rawfolder
self.study = study
self.expno = int(expno)
self.procno = int(procno)
self.name = '.'.join([study, str(expno), str(procno)])
def __check_params(self, params_name, labels):
misses = [label for label in labels if label not in getattr(self, params_name)]
if len(misses) > 0:
sys.exit("Missing labels in %s: %s" % (params_name, str(misses),))
def __check_path(self, header_path):
header_path = header_path.split('/')
study, expno, procno = (header_path[-5], int(header_path[-4]), int(header_path[-2]))
if self.study != study:
print("Warning: Study '%s' differs from '%s' in the visu_pars header." % (self.study, study), file=sys.stderr)
if self.expno != expno:
print("Warning: Experiment number %s differs from %s in the visu_pars header." % (self.expno, expno), file=sys.stderr)
if self.procno != procno:
print("Warning: Processed images number %s differs from %s in the visu_pars header." % (self.procno, procno), file=sys.stderr)
def __get_data_dims(self):
labels_visu_pars = ['VisuCoreDim', 'VisuCoreSize', 'VisuCoreWordType', 'VisuCoreByteOrder']
self.__check_params('visu_pars', labels_visu_pars)
#VisuCoreFrameCount = self.visu_pars.get('VisuCoreFrameCount') # Number of frames
VisuCoreDim = self.visu_pars.get('VisuCoreDim')
VisuCoreSize = self.visu_pars.get('VisuCoreSize')
VisuCoreDimDesc = self.visu_pars.get('VisuCoreDimDesc')
VisuCoreWordType = self.visu_pars.get('VisuCoreWordType')
#VisuCoreByteOrder = self.visu_pars.get('VisuCoreByteOrder')
#VisuFGOrderDescDim = self.visu_pars.get('VisuFGOrderDescDim')
VisuFGOrderDesc = self.visu_pars.get('VisuFGOrderDesc')
dim_desc = None if VisuCoreDimDesc is None else VisuCoreDimDesc[0]
# FrameGroup dimensions and names
if (VisuFGOrderDesc is not None) and len(VisuFGOrderDesc) > 0:
#fg_dims = list(map(lambda item: int(item[0]), VisuFGOrderDesc))
#fg_names = list(map(lambda item: str(item[1]), VisuFGOrderDesc))
fg_dims = [int(item[0]) for item in VisuFGOrderDesc]
fg_names = [str(item[1]) for item in VisuFGOrderDesc]
fg_names = [par.extract_jcamp_strings(item, get_all=False) for item in fg_names]
else:
fg_dims = []
fg_names = []
# Data dimensions
data_dims = list(map(int, VisuCoreSize)) + fg_dims
# FrameGroup FG_SLICE index
fg_index, fg_slice = (None, None)
if VisuCoreDim == 2:
fg_slices = ('FG_SLICE', 'FG_IRMODE')
fg_indices = [fg_names.index(x) for x in fg_slices if x in fg_names]
if len(fg_indices) > 0:
fg_index = fg_indices[0]
fg_slice = fg_slices[fg_index]
fg_index += VisuCoreDim
# ParaVision to NumPy data-type conversion
if VisuCoreWordType == '_8BIT_UNSGN_INT':
data_type = 'uint8'
elif VisuCoreWordType == '_16BIT_SGN_INT':
data_type = 'int16'
elif VisuCoreWordType == '_32BIT_SGN_INT':
data_type = 'int32'
elif VisuCoreWordType == '_32BIT_FLOAT':
data_type = 'float32'
else:
sys.exit("The data format is not correct specified.")
return (data_dims, data_type, dim_desc, fg_index, fg_slice)
def __get_voxel_dims(self, data_dims, scale=1.0):
labels_visu_pars = ['VisuCoreExtent']
self.__check_params('visu_pars', labels_visu_pars)
ACQ_slice_sepn = self.acqp.get('ACQ_slice_sepn')
#PVM_SPackArrSliceGap = self.method.get('PVM_SPackArrSliceGap')
PVM_SPackArrSliceDistance = self.method.get('PVM_SPackArrSliceDistance')
VisuCoreExtent = self.visu_pars.get('VisuCoreExtent')
VisuCoreFrameThickness = self.visu_pars.get('VisuCoreFrameThickness')
VisuCoreUnits = self.visu_pars.get('VisuCoreUnits')
VisuCoreSlicePacksSliceDist = self.visu_pars.get('VisuCoreSlicePacksSliceDist')
VisuAcqRepetitionTime = self.visu_pars.get('VisuAcqRepetitionTime')
nd = min(len(data_dims), 4)
dims = [1] * 4
dims[:nd] = data_dims
nx, ny, nz, nt = dims
# Voxel dimensions
if len(VisuCoreExtent) > 1:
dx = scale * float(VisuCoreExtent[0]) / nx
dy = scale * float(VisuCoreExtent[1]) / ny
else:
dx = 1.0
dy = 0.0
if len(VisuCoreExtent) > 2:
dz = scale * float(VisuCoreExtent[2]) / nz
elif ACQ_slice_sepn is not None: # Slice thickness inclusive gap
dz = scale * float(ACQ_slice_sepn[0])
elif PVM_SPackArrSliceDistance is not None: # Slice thickness inclusive gap
dz = scale * float(PVM_SPackArrSliceDistance[0])
elif VisuCoreSlicePacksSliceDist is not None: # Slice thickness inclusive gap (PV6)
dz = scale * float(VisuCoreSlicePacksSliceDist[0])
elif VisuCoreFrameThickness is not None: # Slice thickness
dz = scale * float(VisuCoreFrameThickness[0])
else:
dz = 0.0
if (VisuAcqRepetitionTime is not None) and (nt > 1):
dt = float(VisuAcqRepetitionTime[0]) / 1000.0
else:
dt = 0.0
#voxel_dims = [dx, dy, dz, dt][:nd]
voxel_dims = [dx, dy, dz, dt]
# Units of the voxel dimensions
voxel_unit = par.extract_unit_string(par.extract_jcamp_strings(VisuCoreUnits[0], get_all=False))
return (voxel_dims, voxel_unit)
def __map_data(self, data, map_pv6):
VisuCoreExtent = self.visu_pars.get('VisuCoreExtent')
VisuCoreDataOffs = self.visu_pars.get('VisuCoreDataOffs')
VisuCoreDataSlope = self.visu_pars.get('VisuCoreDataSlope')
n = min(len(VisuCoreExtent), 3)
dims = data.shape[n:]
if VisuCoreDataOffs.size > 1:
VisuCoreDataOffs = VisuCoreDataOffs.reshape(dims, order='F').astype(np.float32)
else:
VisuCoreDataOffs = np.float32(VisuCoreDataOffs[0])
if VisuCoreDataSlope.size > 1:
VisuCoreDataSlope = VisuCoreDataSlope.reshape(dims, order='F').astype(np.float32)
else:
VisuCoreDataSlope = np.float32(VisuCoreDataSlope[0])
if map_pv6:
data = data.astype(np.float32) / VisuCoreDataSlope
data = data + VisuCoreDataOffs
else:
data = data.astype(np.float32) * VisuCoreDataSlope
data = data + VisuCoreDataOffs
return (data, 'float32')
def __make_subfolder(self, subfolder=''):
procfolder = os.path.join(self.procfolder, self.study)
if not os.path.isdir(procfolder):
os.mkdir(procfolder)
if len(subfolder) > 0:
procfolder = os.path.join(self.procfolder, self.study, subfolder)
if not os.path.isdir(procfolder):
os.mkdir(procfolder)
return procfolder
def __get_matrix(self):
VisuCoreOrientation = self.visu_pars.get('VisuCoreOrientation')
VisuCoreOrientation = VisuCoreOrientation.flatten()[:9].astype(np.float32)
VisuCoreOrientation = VisuCoreOrientation.reshape((3, 3), order='F')
VisuCorePosition = self.visu_pars.get('VisuCorePosition')
VisuCorePosition = VisuCorePosition.flatten()[:3].astype(np.float32)
matrix = np.zeros((4, 4), dtype=np.float32)
matrix[:3, :3] = VisuCoreOrientation
matrix[:3, 3] = self.scale * VisuCorePosition
matrix[3, 3] = 1
return matrix
def __save_matrix(self, matrix, procfolder, ext='mat'):
lines = '\n'.join((' '.join('%.12g' % (x,) for x in matrix[y]) + ' ') for y in range(matrix.shape[0]))
fname = '.'.join([self.name, ext])
fpath = os.path.join(procfolder, fname)
print(fpath)
# Open text file to write binary (Unix format)
fid = open(fpath, 'wb')
# Write text file
for line in lines.splitlines():
print(line, end=chr(10), file=fid)
# Close text file
fid.close()
def read_2dseq(self, map_raw=False, map_pv6=False, roll_fg=False, squeeze=False, compact=False, swap_vd=False, scale=1.0):
self.scale = float(scale)
# Get acqp and method parameters
datadir = os.path.join(self.rawfolder, self.study, str(self.expno))
_header, self.acqp = par.read_param_file(os.path.join(datadir, 'acqp'))
_header, self.method = par.read_param_file(os.path.join(datadir, 'method'))
# Get visu_pars parameters
datadir = os.path.join(self.rawfolder, self.study, str(self.expno), 'pdata', str(self.procno))
#_header, self.d3proc = par.read_param_file(os.path.join(datadir, 'd3proc')) # Removed for PV6
header, self.visu_pars = par.read_param_file(os.path.join(datadir, 'visu_pars'))
self.__check_path(header['Path'])
# Remove selected parameters from the visu_pars dictionary
#if 'VisuCoreDataMin' in self.visu_pars: del self.visu_pars['VisuCoreDataMin']
#if 'VisuCoreDataMax' in self.visu_pars: del self.visu_pars['VisuCoreDataMax']
#if 'VisuCoreDataOffs' in self.visu_pars: del self.visu_pars['VisuCoreDataOffs']
#if 'VisuCoreDataSlope' in self.visu_pars: del self.visu_pars['VisuCoreDataSlope']
#if 'VisuAcqImagePhaseEncDir' in self.visu_pars: del self.visu_pars['VisuAcqImagePhaseEncDir']
#VisuCoreFrameType = self.visu_pars.get('VisuCoreFrameType')
#VisuCoreDiskSliceOrder = self.visu_pars.get('VisuCoreDiskSliceOrder')
# Get data dimensions
data_dims, data_type, dim_desc, fg_index, fg_slice = self.__get_data_dims()
# Open 2dseq file
path_2dseq = os.path.join(datadir, '2dseq')
try:
fid = open(path_2dseq, 'rb')
except IOError as V:
if V.errno == 2:
sys.exit("Cannot open 2dseq file %s" % (path_2dseq,))
else:
raise
# Read 2dseq file
data = np.fromfile(fid, dtype=np.dtype(data_type)).reshape(data_dims, order='F')
# Close 2dseq file
fid.close()
# Map to raw data range
if map_raw:
data, data_type = self.__map_data(data, map_pv6)
# Move FrameGroup FG_SLICE axis to position 2
self.roll_fg = False
if roll_fg:
if fg_index is None:
print("Warning: Could not find FrameGroup.", file=sys.stderr)
elif fg_index > 2:
print("Warning: Move axis %d (FrameGroup %s) to position %d." % (fg_index, fg_slice, 2), file=sys.stderr)
data = np.rollaxis(data, fg_index, 2)
data_dims = list(data.shape)
self.roll_fg = True
else:
print("Warning: Could not move FrameGroup %s." % (fg_slice,), file=sys.stderr)
# Remove data dimensions of size 1
if squeeze and (1 in data_dims):
data = np.squeeze(data)
data_dims = list(data.shape)
# Reduce data dimensions to 4
if compact and (len(data_dims) > 4):
nt = int(np.prod(data_dims[3:]))
data_dims[3:] = [nt]
data = data.reshape(data_dims, order='F')
# Get voxel dimensions
voxel_dims, voxel_unit = self.__get_voxel_dims(data_dims, scale=self.scale)
self.swap_vd = False
if swap_vd:
if (not self.roll_fg) and (fg_index != 2) and (len(data_dims) > 3):
print("Warning: Swap third and fourth voxel dimension.", file=sys.stderr)
voxel_dims[2:4] = voxel_dims[3:1:-1]
self.swap_vd = True
else:
print("Warning: Could not swap third and fourth voxel dimension.", file=sys.stderr)
pixdim = [0.0] * 8
pixdim[0] = 1.0 # NIfTI qfac which is either -1 or 1
pixdim[1:len(voxel_dims)+1] = voxel_dims
# Info parameters
self.data_dims = data_dims
self.data_type = data_type
self.voxel_dims = voxel_dims
self.voxel_unit = voxel_unit
# NIfTI image
self.nifti_image = nii.Nifti1Image(data.reshape(data_dims, order='F'), None)
# NIfTI header
header = self.nifti_image.get_header()
header.set_data_dtype(data.dtype)
header.set_data_shape(data_dims)
#header.set_zooms(voxel_dims)
header['pixdim'] = pixdim
if dim_desc != 'spectroscopic':
header.set_xyzt_units(xyz=voxel_unit, t=None)
#print("header:"); print(header)
def save_nifti(self, ftype='NIFTI_GZ', subfolder=''):
if ftype == 'NIFTI_GZ':
ext = 'nii.gz'
elif ftype == 'NIFTI':
ext = 'nii'
elif ftype == 'ANALYZE':
ext = 'img'
else:
ext = 'nii.gz'
fproc = self.__make_subfolder(subfolder=subfolder)
fname = '.'.join([self.name, ext])
fpath = os.path.join(fproc, fname)
# Write NIfTI file
nib.save(self.nifti_image, fpath)
#self.nifti_image.to_filename(fpath)
print(self.nifti_image.get_filename())
def get_matrix(self):
matrix = self.__get_matrix()
return (matrix, np.linalg.inv(matrix))
def save_matrix(self, subfolder=''):
matrix = self.__get_matrix()
fproc = self.__make_subfolder(subfolder=subfolder)
self.__save_matrix(matrix, fproc, ext='omat')
self.__save_matrix(np.linalg.inv(matrix), fproc, ext='imat')
def save_table(self, eff_bval=False, subfolder=''):
DwAoImages = int(self.method.get('PVM_DwAoImages'))
DwNDiffDir = int(self.method.get('PVM_DwNDiffDir'))
DwNDiffExpEach = int(self.method.get('PVM_DwNDiffExpEach'))
#DwNDiffExp = int(self.method.get('PVM_DwNDiffExp'))
#print("DwAoImages:", DwAoImages)
#print("DwNDiffDir:", DwNDiffDir)
#print("DwNDiffExpEach:", DwNDiffExpEach)
#print("DwNDiffExp:", DwNDiffExp)
nd = DwAoImages + DwNDiffDir * DwNDiffExpEach
bvals = np.zeros(nd, dtype=np.float64)
dwdir = np.zeros((nd, 3), dtype=np.float64)
if eff_bval:
DwEffBval = self.method.get('PVM_DwEffBval').astype(np.float64)
#print("DwEffBval:"); print(DwEffBval)
bvals[DwAoImages:] = DwEffBval[DwAoImages:]
else:
DwBvalEach = self.method.get('PVM_DwBvalEach').astype(np.float64)
#print("DwBvalEach:", DwBvalEach)
bvals[DwAoImages:] = np.tile(DwBvalEach, DwNDiffDir)
DwDir = self.method.get('PVM_DwDir').astype(np.float64)
#DwDir = DwDir.reshape((DwNDiffDir * DwNDiffExpEach, 3))
#print("DwDir:"); print(DwDir)
dwdir[DwAoImages:] = np.repeat(DwDir, DwNDiffExpEach, axis=0)
fproc = self.__make_subfolder(subfolder=subfolder)
fname = '.'.join([self.name, 'btable', 'txt'])
fpath = os.path.join(fproc, fname)
print(fpath)
# Open btable file to write binary (Windows format)
fid = open(fpath, 'wb')
for i in zrange(nd):
print("%.4f" % (bvals[i],) + " %.8f %.8f %.8f" % tuple(dwdir[i]), end="\r\n", file=fid)
# Close file
fid.close()
fname = '.'.join([self.name, 'bvals', 'txt'])
fpath = os.path.join(fproc, fname)
print(fpath)
# Open bvals file to write binary (Unix format)
fid = open(fpath, 'wb')
print(" ".join("%.4f" % (bvals[i],) for i in zrange(nd)), end=chr(10), file=fid)
# Close bvals file
fid.close()
fname = '.'.join([self.name, 'bvecs', 'txt'])
fpath = os.path.join(fproc, fname)
print(fpath)
# Open bvecs file to write binary (Unix format)
fid = open(fpath, 'wb')
for k in range(3):
print(" ".join("%.8f" % (dwdir[i, k],) for i in zrange(nd)), end=chr(10), file=fid)
# Close bvecs file
fid.close()
def check_args(proc_folder, raw_folder, study, expno, procno):
# processed data folder
if not os.path.isdir(proc_folder):
sys.exit("Error: '%s' is not an existing directory." % (proc_folder,))
# raw data folder
if not os.path.isdir(raw_folder):
sys.exit("Error: '%s' is not an existing directory." % (raw_folder,))
# study name
path = os.path.join(raw_folder, study)
if not os.path.isdir(path):
sys.exit("Error: '%s' is not an existing directory." % (path,))
# experiment number
path = os.path.join(raw_folder, study, str(expno))
if not os.path.isdir(path):
sys.exit("Error: '%s' is not an existing directory." % (path,))
# processed images number
path = os.path.join(raw_folder, study, str(expno), 'pdata', str(procno))
if not os.path.isdir(path):
sys.exit("Error: '%s' is not an existing directory." % (path,))
def main():
import argparse
parser = argparse.ArgumentParser(description='Read ParaVision data and save as NIfTI file')
parser.add_argument('proc_folder', help='processed data folder')
parser.add_argument('raw_folder', help='raw data folder')
parser.add_argument('study', help='study name')
parser.add_argument('expno', help='experiment number')
parser.add_argument('procno', help='processed (reconstructed) images number')
parser.add_argument('-s', '--scale', default=1.0, help='voxel dimensions scale factor')
parser.add_argument('-m', '--map_raw', action='store_true', help='map the data to get the real values')
parser.add_argument('-p', '--map_pv6', action='store_true', help='map the data by dividing (ParaVision 6)')
parser.add_argument('-r', '--roll_fg', action='store_true', help='move slice framegroup to third dimension')
parser.add_argument('-q', '--squeeze', action='store_true', help='remove data dimensions of size 1')
parser.add_argument('-c', '--compact', action='store_true', help='reduce data dimensions to 4')
parser.add_argument('-v', '--swap_vd', action='store_true', help='swap third and fourth voxel dimension')
parser.add_argument('-t', '--table', action='store_true', help='save b-values and diffusion directions')
args = parser.parse_args()
check_args(args.proc_folder, args.raw_folder, args.study, args.expno, args.procno)
pv = ParaVision(args.proc_folder, args.raw_folder, args.study, args.expno, args.procno)
pv.read_2dseq(map_raw=args.map_raw, map_pv6=args.map_pv6, roll_fg=args.roll_fg, squeeze=args.squeeze, compact=args.compact, swap_vd=args.swap_vd, scale=args.scale)
pv.save_nifti(ftype='NIFTI_GZ')
pv.save_matrix()
if args.table:
pv.save_table()
if __name__ == '__main__':
main()
| Python |
3D | Aswendt-Lab/AIDAmri | bin/3.3_fMRIActivity/getSingleRegTable.py | .py | 5,280 | 138 | """
Created on 10/08/2017
@author: Niklas Pallast
Neuroimaging & Neuroengineering
Department of Neurology
University Hospital Cologne
"""
import sys,os
import numpy as np
import glob
import shutil
import parReader
import i32Reader
def findData(path,addon):
reg_list = []
fileALL = glob.iglob(path+'/'+addon, recursive=True)
for filename in fileALL:
reg_list.append(filename)
return reg_list
def getRegrTable(file_name,physio_Folder,parPath_folder):
# proof par Folder
par_Path = os.path.join(file_name,'rs-fMRI_mcf')
if not os.path.exists(par_Path):
sys.exit("Error: %s is not an existing directory or file." % (par_Path,))
# get par folder info
par_folder_path = parPath_folder
# get par-Folder content
cur_contentOfPar = findData(par_folder_path, '*.par')
numberOfSlices = len(cur_contentOfPar)
# read the first par Table to get the real number of Repition
cur_par_file_path = cur_contentOfPar[0]
parTestTable = parReader.getPar(cur_par_file_path)
# delete the first five measurements
numberOfAllRepitionsParTable = len(parTestTable) - 5
# proof i32 Folder
i32_Path = os.path.join(file_name,'rawMonData')
if not os.path.exists(i32_Path):
sys.exit("Error: %s is not an existing directory or file." % (i32_Path,))
if not physio_Folder:
physio_Folder= os.path.abspath(os.path.join(os.getcwd(), os.pardir,os.pardir))+'/lib/physio%s.i32'%str(numberOfAllRepitionsParTable)
# generate target Folder
target_folder = os.path.join(file_name, 'txtRegrPython')
if os.path.exists(target_folder):
shutil.rmtree(target_folder)
os.mkdir(target_folder)
# get all file entries and compare the length
#listofPar_names = findData(par_Path,'*mcf.mat')
#if not len(listofPar_names) == len(listofI32_names):
# print('\x1b[00;37;43m' + 'Some Data of I32 have no corresponding par data!' + '\x1b[0m')
headlineStr = ['#Resp. BLC(1)','Resp. Deriv.(2)','Card. BLC(3)',
'Card. Deriv.(4)','RotX(5)','RotY(6)','RotZ(7)',
'dX(8)','dY(9)','dZ(10)','1st Order Drift(11)',
'2nd Order Drift(12)','3rd Order Drift(13)']
# get i32 - Data
trigger,i32Table = i32Reader.getI32(physio_Folder,numberOfSlices,numberOfAllRepitionsParTable)
numberOfAllRepitionsI32 = len(trigger)/numberOfSlices
#print(numberOfAllRepitionsI32)
# generate drifts
driftTable = np.zeros([numberOfAllRepitionsParTable,3])
x = np.linspace(-1,1,numberOfAllRepitionsParTable)
driftTable[:,0] = x
driftTable[:,1] = x**2
driftTable[:,2] = x**3
tempRgrName = os.path.basename(physio_Folder).split('.')[0]
for j in range(len(cur_contentOfPar)):
cur_slc = int(cur_contentOfPar[j][-15:-11])
rgr_folder_name = tempRgrName + '_mcf_slice_' + cur_contentOfPar[j][-15:-11] + '.txt'
rgr_folder_path = os.path.join(target_folder,rgr_folder_name)
# get par - Data
cur_par_file_path = cur_contentOfPar[j]
parTable = parReader.getPar(cur_par_file_path)
# get I32 entries for cur_slc
cur_slc_i32entries = i32Table[trigger[cur_slc::numberOfSlices]]
# merge i32Table, parTable and driftTable
fid = open(rgr_folder_path,'w')
fid.write('%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\n'
% (headlineStr[0],headlineStr[1],headlineStr[2],headlineStr[3],headlineStr[4],
headlineStr[5],headlineStr[6],headlineStr[7],headlineStr[8],headlineStr[9]
,headlineStr[10],headlineStr[11],headlineStr[12]))
for repIndex in range(numberOfAllRepitionsParTable):
fid.write('%f\t%f\t%f\t%f\t%f\t%f\t%f\t%f\t%f\t%f\t%f\t%f\t%f\n'
% (cur_slc_i32entries[repIndex,0],cur_slc_i32entries[repIndex,1],
cur_slc_i32entries[repIndex,2],cur_slc_i32entries[repIndex,3],
parTable[repIndex,0],parTable[repIndex,1],parTable[repIndex,2],
parTable[repIndex,3],parTable[repIndex,4],parTable[repIndex,5],
driftTable[repIndex,0],driftTable[repIndex,1],driftTable[repIndex,2]))
fid.close()
return 0
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='Generate Regression Table out of par and I32')
requiredNamed = parser.add_argument_group('required named arguments')
requiredNamed.add_argument('-i','--input', help='Path to input data',required=True)
requiredNamed.add_argument('-p', '--physio_Folder', help='Path to the Physio folder', required=True)
args = parser.parse_args()
if args.input is not None and args.input is not None:
input = args.input
if not os.path.exists(input):
sys.exit("Error: '%s' is not an existing directory or file %s is not in directory." % (input, args.file,))
if args.physio_Folder is not None and args.physio_Folder is not None:
physio_Folder = args.physio_Folder
if not os.path.exists(physio_Folder):
sys.exit("Error: '%s' is not an existing directory or file %s is not in directory." % (physio_Folder, args.file,))
result = getRegrTable(input,physio_Folder)
| Python |
3D | Aswendt-Lab/AIDAmri | bin/3.3_fMRIActivity/correlate_matrix.py | .py | 914 | 23 | import numpy as np
import scipy.io as io
from scipy.stats import pearsonr
def calculate_p_corr_matrix(data, lines, output_paths):
(rows, cols) = np.shape(data)
correlation_matrix = np.zeros((cols,cols))
p_value_matrix = np.zeros((cols,cols))
for i in range(cols):
for j in range(i+1, cols):
corr_coef, p_value = pearsonr(data[:,i], data[:,j])
correlation_matrix[i, j] = corr_coef
correlation_matrix[j, i] = corr_coef
p_value_matrix[i, j] = p_value
p_value_matrix[j, i] = p_value
# calculate fisher-transformation
matrix_PcorrZ = np.arctanh(correlation_matrix)
io.savemat(output_paths[0], dict([('matrix', correlation_matrix),('label',lines)]))
io.savemat(output_paths[1], dict([('matrix', p_value_matrix),('label',lines)]))
io.savemat(output_paths[2], dict([('matrix', matrix_PcorrZ),('label',lines)]))
| Python |
3D | Aswendt-Lab/AIDAmri | bin/3.3_fMRIActivity/regress.py | .py | 11,859 | 330 | """
Created on 10/08/2017
@author: Niklas Pallast
Neuroimaging & Neuroengineering
Department of Neurology
University Hospital Cologne
"""
import sys,os
import nibabel as nii
import numpy as np
import nipype.interfaces.fsl as fsl
import glob
import shutil
from pathlib import Path
def scaleBy10(input_path,inv):
data = nii.load(input_path)
imgTemp = data.get_data()
if inv == False:
scale = np.eye(4) * 10
scale[3][3] = 1
scaledNiiData = nii.Nifti1Image(imgTemp, data.affine * scale)
fslPath = os.path.join(os.path.dirname(input_path), os.path.basename(input_path).split('.')[0]+'_fslScaleTemp.nii.gz')
nii.save(scaledNiiData, fslPath)
return fslPath
elif inv == True:
scale = np.eye(4) / 10
scale[3][3] = 1
unscaledNiiData = nii.Nifti1Image(imgTemp, data.affine * scale)
hdrOut = unscaledNiiData.header
hdrOut.set_xyzt_units('mm')
# hdrOut['sform_code'] = 1
nii.save(unscaledNiiData, input_path)
return input_path
else:
sys.exit("Error: inv - parameter should be a boolean.")
def findRegData(path):
regMR_list = []
fileALL = glob.iglob(path+'/*.txt',recursive=True)
for filename in fileALL:
regMR_list.append(filename)
regMR_list.sort()
return regMR_list
def findSlicesData(path,pre):
regMR_list = []
fileALL = glob.iglob(path+'/'+pre+'*.nii.gz',recursive=True)
for filename in fileALL:
regMR_list.append(filename)
regMR_list.sort()
return regMR_list
def delete5Slides(input_file,regr_Path):
# scale Nifti data by factor 10
fslPath = scaleBy10(input_file, inv=False)
# delete 5 slides
output_file = os.path.join(os.path.dirname(input_file), os.path.basename(input_file).split('.')[0]) + '_f.nii.gz'
myROI = fsl.ExtractROI(in_file=fslPath, roi_file=output_file, t_min=5, t_size=-1)
print(myROI.cmdline)
myROI.run()
os.remove(fslPath)
# unscale result data by factor 10ˆ(-1)
output_file = scaleBy10(output_file, inv=True)
return output_file
def fsl_RegrSliceWise(input_file,txtregr_Path,regr_Path):
# scale Nifti data by factor 10
dataName = os.path.basename(input_file).split('.')[0]
aidamri_dir = os.getcwd()
temp_dir = os.path.join(os.path.dirname(input_file), "temp")
os.chdir(temp_dir)
# proof data existence
regrTextFiles = findRegData(txtregr_Path)
if len(regrTextFiles) == 0:
print('No regression with physio data!')
output_file = os.path.join(regr_Path,
os.path.basename(input_file).split('.')[0]) + '_RGR.nii.gz'
shutil.copyfile(input_file, output_file)
return output_file
fslPath = scaleBy10(input_file, inv=False)
# split input_file in slices
mySplit = fsl.Split(in_file=fslPath, dimension='z', out_base_name=dataName)
print(mySplit.cmdline)
mySplit.run()
os.remove(fslPath)
# sparate ref and src volume in slices
sliceFiles = findSlicesData(os.getcwd(), dataName)
if not len(regrTextFiles) == len(sliceFiles):
sys.exit('Error: Not enough .txt-Files in %s' % txtregr_Path)
print('Start separate slice Regression ... ')
# start to regression slice by slice
print('For all slices ...')
for i in range(len(sliceFiles)):
slc = sliceFiles[i]
regr = regrTextFiles[i]
# only take the columns [1,2,7,9,11,12,13] of the reg-.txt Files
output_file = os.path.join(regr_Path, os.path.basename(slc))
myRegr = fsl.FilterRegressor(in_file=slc,design_file=regr,out_file=output_file,filter_columns=[1,2,7,9,11,12,13])
print(myRegr.cmdline)
myRegr.run()
os.remove(slc)
# merge slices to a single volume
mcf_sliceFiles = findSlicesData(regr_Path, dataName)
output_file = os.path.join(regr_Path,
os.path.basename(input_file).split('.')[0]) + '_RGR.nii.gz'
myMerge = fsl.Merge(in_files=mcf_sliceFiles, dimension='z', merged_file=output_file)
print(myMerge.cmdline)
myMerge.run()
for slc in mcf_sliceFiles: os.remove(slc)
# unscale result data by factor 10ˆ(-1)
output_file = scaleBy10(output_file, inv=True)
os.chdir(aidamri_dir)
return output_file
def getMask(input_file,threshold):
threshold = threshold/10
output_file = os.path.join(os.path.dirname(input_file), 'mask.nii.gz')
thres = fsl.Threshold(in_file=input_file,thresh=threshold,out_file=output_file,output_datatype='char',args='-Tmin -bin')
print(thres.cmdline)
thres.run()
return output_file
def dilF(input_file):
output_file = os.path.join(os.path.dirname(input_file), 'mask.nii.gz')
mydilf = fsl.DilateImage(in_file=input_file, operation='max', out_file=output_file)
print(mydilf.cmdline)
mydilf.run()
return output_file
def applyMask(input_file,mask_file,appendix):
output_file = os.path.join(os.path.dirname(input_file), os.path.basename(input_file).split('.')[0]) + appendix+ '.nii.gz'
myMaskapply = fsl.ApplyMask(in_file=input_file, out_file=output_file, mask_file=mask_file)
print(myMaskapply.cmdline)
myMaskapply.run()
return output_file
def getMean(input_file,appendix):
output_file = os.path.join(os.path.dirname(input_file), appendix+'.nii.gz')
myMean = fsl.MeanImage(in_file=input_file, out_file=output_file)
print(myMean.cmdline)
myMean.run()
return output_file
def applySusan(input_file,meanintensity,FWHM,mean_func):
# scale Nifti data by factor 10
fslPath = scaleBy10(input_file, inv=False)
meanPath = scaleBy10(mean_func, inv=False)
output_file = os.path.join(os.path.dirname(input_file),
os.path.basename(input_file).split('RGR')[0]) + 'SRGR.nii.gz'
mySusan = fsl.SUSAN(in_file=fslPath, brightness_threshold=meanintensity, fwhm=FWHM, dimension=2,
use_median=1, usans=[(meanPath, meanintensity), ], out_file=output_file)
print(mySusan.cmdline)
mySusan.run()
os.remove(fslPath)
os.remove(meanPath)
output_file = scaleBy10(output_file, inv=True)
return output_file
def mathOperation(input_file,scale_factor):
output_file = os.path.join(os.path.dirname(input_file), os.path.basename(input_file).split('.')[0])+'_intnorm.nii.gz'
myMath = fsl.BinaryMaths(in_file=input_file,operand_value =scale_factor,operation='mul',out_file=output_file)
print(myMath.cmdline)
myMath.run()
return output_file
def fsl_slicetimeCorrector(input_file, costum_timings, slice_order, TR):
output_file = os.path.join(os.path.dirname(input_file),
os.path.basename(input_file).split('.')[0]) + '_st.nii.gz'
st = fsl.SliceTimer(in_file=input_file, custom_timings=costum_timings, custom_order=slice_order, time_repetition=TR, out_file=output_file)
st.run()
return output_file
def filterFSL(input_file,highpass,tempMean):
outputSFRGR = os.path.join(os.path.dirname(input_file), os.path.basename(input_file).split('SRGR')[0])+'SFRGR.nii.gz'
myHP = fsl.TemporalFilter(in_file = input_file,highpass_sigma=highpass, args='-add '+tempMean,out_file=outputSFRGR)
print(myHP.cmdline)
myHP.run()
input_file = outputSFRGR
output_file = os.path.join(os.path.dirname(input_file), os.path.basename(input_file).split('.')[0])+'_thres_mask.nii.gz'
#input_file = getMean(input_file,'HPmean')
thres = fsl.Threshold(in_file=input_file, thresh=17, out_file=output_file, output_datatype='float',use_robust_range=True,args='-Tmean -bin')
print(thres.cmdline)
thres.run()
return outputSFRGR
def applyBET(input_file,frac,radius,vertical_gradient):
# scale Nifti data by factor 10
fslPath = scaleBy10(input_file,inv=False)
# extract brain
output_file = os.path.join(os.path.dirname(input_file),os.path.basename(input_file).split('.')[0]) + 'Bet.nii.gz'
maskFile = os.path.join(os.path.dirname(input_file), os.path.basename(input_file).split('.')[0]) + 'Bet_mask.nii.gz'
myBet = fsl.BET(in_file=fslPath, out_file=output_file,frac=frac,radius=radius,
vertical_gradient=vertical_gradient,robust=True, mask = True)
print(myBet.cmdline)
myBet.run()
os.remove(fslPath)
# unscale result data by factor 10ˆ(-1)
output_file = scaleBy10(output_file,inv=True)
maskFile = scaleBy10(maskFile,inv=True)
return output_file,maskFile
def startRegression(input_File, FWHM, cutOff_sec, TR, stc, slice_order = None, costum_timings = None):
# generate folder regr images
origin_Path = os.path.dirname(os.path.dirname(input_File))
regr_Path = os.path.join(origin_Path, 'regr')
if os.path.exists(regr_Path):
shutil.rmtree(regr_Path)
os.mkdir(regr_Path)
print("Regression started (wait!)")
# perform slice time correction
if stc:
input_File = fsl_slicetimeCorrector(input_File, costum_timings, slice_order, TR)
# delete the first slides
input_File5Sub = delete5Slides(input_File, regr_Path)
# proof regression files
txtregr_Path = os.path.join(origin_Path, 'txtRegrPython')
# slive wise regression with physio data
regr_FileReal = fsl_RegrSliceWise(input_File5Sub, txtregr_Path, regr_Path)
# get mean
meanRegr_File = getMean(regr_FileReal,'mean2')
file_nameEPI_BET, mask_file = applyBET(meanRegr_File, frac=0.35, radius=45, vertical_gradient=0.1)
os.remove(meanRegr_File)
regr_File = applyMask(regr_FileReal,mask_file,'')
# "robust intensity range" which calculates values similar to the 98% percentiles
myStat = fsl.ImageStats(in_file=regr_File,op_string='-p 98',terminal_output='allatonce')
print(myStat.cmdline)
stat_result = myStat.run()
upperp = stat_result.outputs.out_stat
# get binary mask
mask = getMask(regr_File, upperp)
# "robust intensity range" which calculates values similar to the 50% percentiles with mask
myStat = fsl.ImageStats(in_file=regr_File, op_string=' -k ' +mask+ ' -p 50 ',mask_file=mask ,terminal_output='allatonce')
print(myStat.cmdline)
stat_result = myStat.run()
meanintensity = stat_result.outputs.out_stat
meanintensity = meanintensity*0.75
# maxmium filter of mask
mask = dilF(mask)
# apply mask on regrFile
thresRegr_file = applyMask(regr_File,mask,'thres')
# get mean of masked regr-Dataset
mean_func = getMean(thresRegr_file,'mean_func')
# FWHM = 3.0
# sigma = FWHM/(2 * np.sqrt(2 * np.log(2))) = 1.27
srgr_file = applySusan(thresRegr_file,meanintensity,FWHM,mean_func)
# apply mask on srgr_file
smmothSRegr_file = applyMask(srgr_file,mask,'_smooth')
inscalefactor = 10000.0/meanintensity
# multiply image with inscalefactor
intnormSrgr_file = mathOperation(smmothSRegr_file,inscalefactor)
# mean of scaled Dataset
tempMean = getMean(intnormSrgr_file,'tempMean')
# filter image cut-off frequency 0.01 Hz
highpass = (cutOff_sec / (2.0 * TR))
#highpass = 17.6056338028
filtered_image = filterFSL(intnormSrgr_file,highpass,tempMean)
print('Regression completed!')
return regr_FileReal, srgr_file ,filtered_image
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='Regression of fMRI data')
requiredNamed = parser.add_argument_group('required named arguments')
requiredNamed.add_argument('-i','--input', help='Path to input file',required=True)
args = parser.parse_args()
if args.input is not None and args.input is not None:
input = args.input
if not os.path.exists(input):
sys.exit("Error: '%s' is not an existing directory or file %s is not in directory." % (input, args.file,))
result = startRegression(input)
| Python |
3D | Aswendt-Lab/AIDAmri | bin/3.3_fMRIActivity/process_fMRI.py | .py | 13,283 | 363 | """
Created on 10/08/2017
@author: Niklas Pallast
Neuroimaging & Neuroengineering
Department of Neurology
University Hospital Cologne
"""
import sys, os
import nipype.interfaces.fsl as fsl
import nibabel as nii
import numpy as np
import glob
import shutil
import regress
import getSingleRegTable
import scipy.misc as mc
import create_seed_rois
import fsl_mean_ts
from pathlib import Path
import json
def copyAtlasOfData(path,post,labels):
fileALL = glob.glob(path + '/*' + post + '.nii.gz')
if fileALL.__len__()>1:
sys.exit("Error: '%s' has no related Atlas File." % (path,))
else:
fileALL = fileALL[0]
print("Copy Atlas Data and generate seed ROIs")
#pathfMRI = os.path.join(os.path.dirname(path),'fMRI')
outputRois = create_seed_rois.startSeedPoint(in_atlas=os.path.join(path, os.path.basename(fileALL)),in_labels=labels)
return outputRois
def imgScaleResize(img):
newImg = np.zeros([128,128,20,355])
for i in range(img.shape[3]):
for j in range(img.shape[2]):
newImg[:,:,j,i]=mc.imresize(img[:,:,j,i],1.34,interp='nearest')
return newImg
def scaleBy10(input_path,inv):
data = nii.load(input_path)
imgTemp = data.get_fdata()
if inv is False:
scale = np.eye(4) * 10
scale[3][3] = 1
scaledNiiData = nii.Nifti1Image(imgTemp, data.affine * scale)
fslPath = os.path.join(os.path.dirname(input_path), 'fslScaleTemp.nii.gz')
nii.save(scaledNiiData, fslPath)
return fslPath
elif inv is True:
scale = np.eye(4) / 10
scale[3][3] = 1
unscaledNiiData = nii.Nifti1Image(imgTemp, data.affine * scale)
hdrOut = unscaledNiiData.header
hdrOut.set_xyzt_units('mm')
# hdrOut['sform_code'] = 1
nii.save(unscaledNiiData, input_path)
return input_path
else:
sys.exit("Error: inv - parameter should be a boolean.")
def findSlicesData(path,pre):
regMR_list = []
fileALL = glob.iglob(path+'/'+pre+'*.nii.gz',recursive=True)
for filename in fileALL:
regMR_list.append(filename)
regMR_list.sort()
return regMR_list
def getRASorientation(file_name,proc_Path):
data = nii.load(file_name)
imgData = data.get_fdata()
imgData = np.flip(imgData, 2)
imgData = np.flip(imgData, 0)
epiData = nii.Nifti1Image(imgData, data.affine)
hdrIn = epiData.header
hdrIn.set_xyzt_units('mm')
epiData_RAS = nii.as_closest_canonical(epiData)
print('Orientation:' + str(nii.aff2axcodes(epiData_RAS.affine)))
output_file = os.path.join(proc_Path, os.path.basename(file_name))
nii.save(epiData, output_file)
return output_file
def getEPIMean(file_name,proc_Path):
output_file = os.path.join(proc_Path, os.path.basename(file_name).split('.')[0]) + 'mean.nii.gz'
myMean = fsl.MeanImage(in_file=file_name, out_file=output_file)
print(myMean.cmdline)
myMean.run()
return output_file
def applyBET(input_file,frac,radius,vertical_gradient):
# scale Nifti data by factor 10
fslPath = scaleBy10(input_file,inv=False)
# extract brain
output_file = os.path.join(os.path.dirname(input_file),os.path.basename(input_file).split('.')[0]) + 'Bet.nii.gz'
maskFile = os.path.join(os.path.dirname(input_file), os.path.basename(input_file).split('.')[0]) + 'Bet_mask.nii.gz'
myBet = fsl.BET(in_file=fslPath, out_file=output_file,frac=frac,radius=radius,
vertical_gradient=vertical_gradient,robust=True, mask = True)
print(myBet.cmdline)
myBet.run()
os.remove(fslPath)
# unscale result data by factor 10ˆ(-1)
output_file = scaleBy10(output_file,inv=True)
return output_file,maskFile
def applyMask(input_file,mask_file):
fslPath = scaleBy10(input_file, inv=False)
# maks apply
output_file = os.path.join(os.path.dirname(input_file), os.path.basename(input_file).split('.')[0]) + 'BET.nii.gz'
myMaskapply = fsl.ApplyMask(in_file=fslPath, out_file=output_file, mask_file=mask_file)
print(myMaskapply.cmdline)
myMaskapply.run()
os.remove(fslPath)
# unscale result data by factor 10ˆ(-1)
output_file = scaleBy10(output_file, inv=True)
return output_file
def fsl_SeparateSliceMoCo(input_file,par_folder):
# scale Nifti data by factor 10
dataName = os.path.basename(input_file).split('.')[0]
aidamri_dir = os.getcwd()
temp_dir = os.path.join(os.path.dirname(input_file), "temp")
if not os.path.exists(temp_dir):
os.mkdir(temp_dir)
fslPath = scaleBy10(input_file, inv=False)
os.chdir(temp_dir)
mySplit= fsl.Split(in_file=fslPath,dimension='z',out_base_name = dataName)
print(mySplit.cmdline)
mySplit.run()
os.remove(fslPath)
# sparate ref and src volume in slices
sliceFiles = findSlicesData(os.getcwd(),dataName)
#start to correct motions slice by slice
for i in range(len(sliceFiles)):
slc = sliceFiles[i]
# ref = refFiles[i]
# take epi as ref
output_file = os.path.join(par_folder,os.path.basename(slc))
myMCFLIRT = fsl.preprocess.MCFLIRT(in_file=slc,out_file=output_file,save_plots=True,terminal_output='none')
myMCFLIRT.run()
os.remove(slc)
# os.remove(ref)
# merge slices to a single volume
mcf_sliceFiles = findSlicesData(par_folder,dataName)
output_file = os.path.join(os.path.dirname(input_file),
os.path.basename(input_file).split('.')[0]) + '_mcf.nii.gz'
myMerge = fsl.Merge(in_files=mcf_sliceFiles,dimension='z',merged_file =output_file)
print(myMerge.cmdline)
myMerge.run()
for slc in mcf_sliceFiles: os.remove(slc)
# unscale result data by factor 10ˆ(-1)
output_file = scaleBy10(output_file, inv=True)
#os.remove(temp_dir)
os.chdir(aidamri_dir)
return output_file
def copyRawPhysioData(file_name,i32_Path):
img_name = Path(file_name).name
json_name = img_name.replace(".nii.gz", ".json")
json_file = os.path.join(os.path.dirname(file_name), json_name)
sub_name = (Path(file_name).name.split("_")[0]).split("-")[1]
studyName = (Path(os.path.dirname(os.path.dirname(file_name))).name).split("-")[1]
relatedPhysioData = []
if os.path.exists(json_file):
with open(json_file, 'r') as infile:
content = json.load(infile)
scanid = str(content["ScanID"]) + ".I32"
physioPath=os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(file_name)))),'Physio')
conditions = [sub_name , studyName]
for file in glob.iglob(os.path.join(physioPath, "**", "*" + scanid), recursive=True):
filename = os.path.basename(file)
if all(condition in filename for condition in conditions):
relatedPhysioData.append(file)
if len(relatedPhysioData)>1:
sys.exit("Warning: '%s' has no unique physio data for scan %s." % (physioPath, scanid,))
if len(relatedPhysioData) is 0:
print("Error: '%s' has no related physio data for scan %s." % (physioPath, scanid,))
return []
physioFile_name = relatedPhysioData[0]
print('Copy related physio data %s to rawMoData' % (physioFile_name,))
shutil.copyfile(physioFile_name, os.path.join(i32_Path,os.path.basename(physioFile_name)))
return physioFile_name
def create_txt_file(file, data):
import time
with open(file, "w") as outfile:
for data_point in data:
outfile.write(''.join([str(data_point), '\n']))
time.sleep(1.0)
def delete_txt_file(file):
os.remove(file)
def startProcess(Rawfile_name):
# generate folder for images
origin_Path = os.path.dirname(Rawfile_name)
proc_Path = os.path.join(origin_Path, 'rs-fMRI_niiData')
if os.path.exists(proc_Path):
shutil.rmtree(proc_Path)
os.mkdir(proc_Path)
# generate folder for motion correction files
par_Path = os.path.join(origin_Path, 'rs-fMRI_mcf')
if os.path.exists(par_Path):
shutil.rmtree(par_Path)
os.mkdir(par_Path)
# generate folder for motion correction files
subFile= os.path.basename(Rawfile_name).split('.')[0]
subFile = '%s_mcf.mat' % subFile
par_Path = os.path.join(par_Path,subFile)
if os.path.exists(par_Path):
shutil.rmtree(par_Path)
os.mkdir(par_Path)
# generate folder for physio data
i32_Path = os.path.join(origin_Path, 'rawMonData')
if os.path.exists(i32_Path):
shutil.rmtree(i32_Path)
os.mkdir(i32_Path)
# bring dataset to RAS orientation
file_name = getRASorientation(Rawfile_name,proc_Path)
# calculate EPIMean
file_nameEPI = getEPIMean(file_name,proc_Path)
# apply BET on EPImean
file_nameEPI_BET,mask_file = applyBET(file_nameEPI,frac=0.35,radius=45,vertical_gradient=0.1)
#apply Mask on original dataset
maskedFile_data = applyMask(file_name,mask_file)
# apply motion correction on original dataset with EPImean as reference
mcfFile_name=fsl_SeparateSliceMoCo(file_name,par_Path)
# apply mean on motion corrected data
meanMcfFile_name = getEPIMean(mcfFile_name, proc_Path)
# copy physio data to rawMonData-Folder
relatedPhysioFolder = copyRawPhysioData(Rawfile_name,i32_Path)
# get Regression Values
if len(relatedPhysioFolder) is not 0:
getSingleRegTable.getRegrTable(os.path.dirname(Rawfile_name),relatedPhysioFolder,par_Path)
else:
print("Error: Processing not possible, because either there is no folder called Physio or the related physio data for the scan is missing there.")
return mcfFile_name
if __name__ == "__main__":
TR = 1.42
cutOff_sec = 100.0
FWHM = 3.0
import argparse
parser = argparse.ArgumentParser(description='Process fMRI data')
requiredNamed = parser.add_argument_group('required arguments')
requiredNamed.add_argument('-i', '--input', help='Path to the RAW data of rsfMRI NIfTI file', required=True)
parser.add_argument('-t', '--TR', default=TR, help='Current TR value')
parser.add_argument('-c', '--cutOff_sec', default=cutOff_sec, help='High-pass filter cutoff sec')
parser.add_argument('-f', '--FWHM', default=FWHM, help='Full width at half maximum')
parser.add_argument('-stc', '--slicetimecorrection', default="False", type=str, help='choose to perform slice time correction or not')
args = parser.parse_args()
if args.slicetimecorrection == "True":
stc = True
else:
stc = False
labels = os.path.abspath(
os.path.join(os.getcwd(), os.pardir, os.pardir)) + '/lib/annotation_50CHANGEDanno_label_IDs.txt'
labelNames = os.path.abspath(os.path.join(os.getcwd(), os.pardir, os.pardir)) + '/lib/annoVolume.nii.txt'
labels2000 = os.path.abspath(
os.path.join(os.getcwd(), os.pardir, os.pardir)) + '/lib/annotation_50CHANGEDanno_label_IDs+2000.txt'
labelNames2000 = os.path.abspath(
os.path.join(os.getcwd(), os.pardir, os.pardir)) + '/lib/annoVolume+2000_rsfMRI.nii.txt'
input_file = None
if args.input is not None and args.input is not None:
input_file = args.input
if not os.path.exists(input_file):
sys.exit("Error: '%s' is not an existing directory or file %s is not in directory." % (input_file, args.file,))
mcfFile_name = startProcess(input_file)
# if stc is activated find parameters
if stc:
print("Starting Regression with slice time correction:")
# find meta data json file
meta_data_file_name = Path(args.input).name.replace(".nii.gz", ".json")
meta_data_file = os.path.join(Path(args.input).parent, meta_data_file_name)
with open(meta_data_file, "r") as infile:
meta_data = json.load(infile)
TR = meta_data["RepetitionTime"] / 1000
slice_order = meta_data["ObjOrderList"]
n_slices = meta_data["n_slices"]
costum_timings = meta_data["costum_timings"]
# create costum timings txt file
costum_timings_path = os.path.join(Path(meta_data_file).parent, "tcostum.txt")
create_txt_file(costum_timings_path, costum_timings)
# create slice order txt file
slice_order_path = os.path.join(Path(meta_data_file).parent, "slice_order.txt")
create_txt_file(slice_order_path, slice_order)
rgr_file, srgr_file, sfrgr_file = regress.startRegression(mcfFile_name, FWHM, cutOff_sec, TR, stc, slice_order_path, costum_timings_path)
# delete temp txt files
delete_txt_file(costum_timings_path)
delete_txt_file(slice_order_path)
else:
print("Starting Regression without slice time correction:")
rgr_file, srgr_file, sfrgr_file = regress.startRegression(mcfFile_name, FWHM, cutOff_sec, TR, stc)
print(f"sfrgr_file {sfrgr_file}")
atlasPath = os.path.dirname(input_file)
roisPath = copyAtlasOfData(atlasPath,'Anno_parental',labels)
fslMeantsFile = fsl_mean_ts.start_fsl_mean_ts(sfrgr_file, roisPath, labelNames, 'MasksTCs.')
roisPath = copyAtlasOfData(atlasPath, 'AnnoSplit_parental', labels2000)
fslMeantsFile = fsl_mean_ts.start_fsl_mean_ts(sfrgr_file, roisPath, labelNames2000, 'MasksTCsSplit.')
| Python |
3D | Aswendt-Lab/AIDAmri | bin/3.3_fMRIActivity/parReader.py | .py | 1,205 | 51 | """
Created on 10/08/2017
@author: Niklas Pallast
Neuroimaging & Neuroengineering
Department of Neurology
University Hospital Cologne
"""
import numpy as np
import os,sys
def getPar(filename):
## Open the text file.
fileID = open(filename,'r')
# Read columns of data according to the format.
fileID.seek(0)
lines = fileID.readlines()
parData = np.zeros([len(lines),6])
for row in range(len(lines)):
for j in range(0, 12, 2):
colume = int(j/2)
parData[row][colume] = float(lines[row].split(' ')[j])
fileID.close()
# Create output variable
return parData
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='par Reader')
requiredNamed = parser.add_argument_group('required named arguments')
requiredNamed.add_argument('-i','--input', help='Path to input file',required=True)
args = parser.parse_args()
if args.input is not None and args.input is not None:
input = args.input
if not os.path.exists(input):
sys.exit("Error: '%s' is not an existing directory or file %s is not in directory." % (input, args.file,))
result = getPar(input)
| Python |
3D | Aswendt-Lab/AIDAmri | bin/3.3_fMRIActivity/correlate_seed_voxels.py | .py | 5,780 | 155 | """
Created on 10/08/2017
@author: Niklas Pallast
Neuroimaging & Neuroengineering
Department of Neurology
University Hospital Cologne
"""
from __future__ import print_function
import argparse
import os
import sys
import numpy as np
import nibabel as nib
from datetime import datetime
def get_date():
now = datetime.now()
pvDate = now.strftime("%a %d %b %Y")
pvTime = now.strftime("%H:%M:%S")
return pvDate + ' ' + pvTime
def save_csv(sFilename, data):
thefile = open(sFilename, 'w')
for item in data:
thefile.write("%s\n" % item)
thefile.close()
def save_nifti(sFilename, data, index, ext_nii):
# save matrix (NIfTI)
image = nib.Nifti1Image(data, None)
header = image.get_header()
header.set_xyzt_units(xyz=None, t=None)
image.to_filename(sFilename + '_%03d' % (index + 1,) + ext_nii)
print("Output:", image.get_filename())
def get_seed_stat(sPathMatrix, sPathTS, data, seed, ext_nii, r_to_z=False, save_mat=False, ignore_nan=False):
seed_stat = np.zeros((5, seed.shape[3]), dtype=np.float64)
for k in range(seed.shape[3]):
msk = seed[:,:,:,k] > 0
maskData = data[msk, :]
if maskData.size == 0:
matrix[0] = np.nan
else:
matrix = np.corrcoef(maskData, rowvar=True)
if r_to_z:
matrix = np.arctanh(matrix)
if save_mat:
#pos = np.where(seed[:,:,:,k] > 0)
#labels = [', '.join(str(v) for v in t) for t in zip(pos[0], pos[1], pos[2])]
save_nifti(sPathTS, data[msk,:].T, k, ext_nii)
save_nifti(sPathMatrix, matrix, k, ext_nii)
# get upper-triangle of matrix as list and set inf to nan
triu_cc = matrix[np.triu_indices_from(matrix, k=1)]
#triu_cc[np.where(np.isinf(triu_cc))] = np.nan
triu_cc[np.isinf(triu_cc)] = np.nan
seed_stat[0,k] = matrix.shape[0]
if ignore_nan:
seed_stat[1,k] = np.nanmin(triu_cc)
seed_stat[2,k] = np.nanmax(triu_cc)
seed_stat[3,k] = np.nanmean(triu_cc)
seed_stat[4,k] = np.nanstd(triu_cc)
else:
seed_stat[1,k] = np.amin(triu_cc)
seed_stat[2,k] = np.amax(triu_cc)
seed_stat[3,k] = np.mean(triu_cc)
seed_stat[4,k] = np.std(triu_cc)
return seed_stat
def make_text_stat(sPathData, sPathSeed, seed_stat):
text_mean = []
line = ['Data', 'Seed_ROIs', 'ROI_Index', 'Voxels', 'Min', 'Max', 'Mean', 'StdDev']
text_mean.append(line)
for k in range(seed_stat.shape[1]):
stat = seed_stat[:,k]
line = [os.path.basename(sPathData), os.path.basename(sPathSeed), str(k + 1), '%d' % (stat[0],), '%.8f' % (stat[1],), '%.8f' % (stat[2],), '%.8f' % (stat[3],), '%.8f' % (stat[4],)]
text_mean.append(line)
return text_mean
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Create correlation matrix of seed voxels.')
parser.add_argument('in_data', help='input 4D EPI data file name (NIfTI)')
parser.add_argument('in_seed', help='input 4D seed ROIs file name (NIfTI)')
parser.add_argument('-m', '--out_matrix', help='output seed ROIs matrix file name (w/o ext.)')
parser.add_argument('-o', '--out_stat', help='output seed ROIs statistic text file name')
args = parser.parse_args()
sPathData = None
sPathSeed = None
ext_nii = '.nii'
ext_gz = '.gz'
ext_text = '.txt'
# input 4D EPI data file (NIfTI)
if args.in_data is not None: sPathData = args.in_data
if not os.path.isfile(sPathData):
sys.exit("Error: '%s' is not a regular file." % (sPathData,))
# input 4D seed ROIs file (NIfTI)
if args.in_seed is not None: sPathSeed = args.in_seed
if not os.path.isfile(sPathSeed):
sys.exit("Error: '%s' is not a regular file." % (sPathSeed,))
sData = os.path.basename(sPathData)
sData = sData[:-len(ext_nii)] if sData.endswith(ext_nii) else sData[:-len(ext_nii+ext_gz)]
# output seed ROIs matrix file
sPathMatrix = os.path.join(os.getcwd(), args.out_matrix) if args.out_matrix is not None else os.path.join(os.path.dirname(sPathData), 'Matrix.' + sData)
# output seed ROIs statistic text file
sPathStat = os.path.join(os.getcwd(), args.out_stat) if args.out_stat is not None else os.path.join(os.path.dirname(sPathData), 'Stat.' + sData + ext_text)
# output seed ROIs time series file
sPathTS = os.path.join(os.path.dirname(sPathData), 'TS.' + sData)
# get date and time
print(get_date())
# read 3D data file (NIfTI)
print("Data:", sPathData)
data_img = nib.load(sPathData)
data_data = data_img.get_data()
#print("data_data.dtype:", data_data.dtype)
#print("data_data.shape:", data_data.shape)
data_hdr = data_img.get_header()
data_shape = data_hdr.get_data_shape()
#print("data_shape:", data_shape)
if len(data_shape) != 4:
sys.exit("Error: EPI data %s do not have four dimensions." % (str(data_shape),))
# read 4D seed ROIs file (NIfTI)
print("Seed:", sPathSeed)
seed_img = nib.load(sPathSeed)
seed_data = seed_img.get_data()
#print("seed_data.dtype:", seed_data.dtype)
#print("seed_data.shape:", seed_data.shape)
seed_hdr = seed_img.get_header()
seed_shape = seed_hdr.get_data_shape()
#print("seed_shape:", seed_shape)
if len(seed_shape) != 4:
sys.exit("Error: Seed ROIs %s do not have four dimensions." % (str(seed_shape),))
seed_stat = get_seed_stat(sPathMatrix, sPathTS, data_data, seed_data, ext_nii, r_to_z=True, save_mat=False if args.out_matrix is None else True, ignore_nan=True)
text_stat = make_text_stat(sPathData, sPathSeed, seed_stat)
save_csv(sPathStat, text_stat)
print('Stat:', sPathStat)
| Python |
3D | Aswendt-Lab/AIDAmri | bin/3.3_fMRIActivity/create_seed_rois.py | .py | 9,633 | 272 | """
Created on 10/08/2017
@author: Niklas Pallast
Neuroimaging & Neuroengineering
Department of Neurology
University Hospital Cologne
"""
from __future__ import print_function
import argparse
import os
import sys
import numpy as np
import nibabel as nib
from datetime import datetime
def startSeedPoint(in_labels,in_atlas):
ext_text = '.txt'
ext_nifti = '.nii.gz'
sPathLabels = None
PathAtlas = None
preserve = 0
datatype = None
# input labels text file with atlas index and seed regions (labels) in each line
# Atlas (1 or 2), Label 1, Label 2, ...
sPathLabels = in_labels
if not os.path.isfile(sPathLabels):
sys.exit("Error: '%s' is not a regular file." % (sPathLabels,))
# input atlas labels files (NIfTI)
PathAtlas = list([in_atlas])
#sPathAtlas = in_atlas
for sPathAtlas in PathAtlas:
if not os.path.isfile(sPathAtlas):
sys.exit("Error: '%s' is not a regular file." % (sPathAtlas,))
# output seed ROIs file
sPathROIs = os.path.join(os.path.dirname(PathAtlas[0]), 'Seed_ROIs.nii.gz')
# get date and time
# print(get_date())
# read labels text file
iatlas = []
labels = []
for row in read_csv(sPathLabels):
iatlas.append(int(row.split(',\t')[0]))
labels.append(int(row.split(',\t')[1]))
# print("iatlas:", iatlas)
# print("labels:", labels)
# read 3D atlas labels files (NIfTI)
labels_img = []
labels_hdr = []
labels_data = []
labels_shape = []
# read 3D atlas labels files (NIfTI)
for k, sPathAtlas in enumerate(PathAtlas):
# print("Atlas%d:" % (k + 1,), sPathAtlas)
labels_img.append(nib.load(sPathAtlas))
labels_data.append(labels_img[k].get_data())
# print("labels_data[%d].dtype:" % (k,), labels_data[k].dtype)
# print("labels_data[%d].shape:" % (k,), labels_data[k].shape)
labels_hdr.append(labels_img[k].header)
labels_shape.append(labels_hdr[k].get_data_shape())
# print("labels_shape[%d]:" % (k,), labels_shape[k])
if len(labels_shape[k]) != 3:
sys.exit("Error: Atlas%d labels %s do not have three dimensions." % (k, str(labels_shape[k])))
for k in range(1, len(labels_shape)):
if labels_shape[0] != labels_shape[k]:
sys.exit("Error: Atlas1 labels %s and Atlas%d labels %s do not have the same shape." % (
str(labels_shape[0]), k, str(labels_shape[k])))
# create atlas labels hyperstack (4D)
rois = create_rois_1(iatlas, labels, labels_hdr, labels_data, datatype=datatype, preserve=preserve)
# save atlas labels file
dataOrg = nib.load(sPathAtlas)
niiData = nib.Nifti1Image(rois, dataOrg.affine)
hdrIn = niiData.header
hdrIn.set_xyzt_units('mm')
scaledNiiData = nib.as_closest_canonical(niiData)
nib.save(niiData, sPathROIs)
print("Output:", sPathROIs)
return sPathROIs
def get_date():
now = datetime.now()
pvDate = now.strftime("%a %d %b %Y")
pvTime = now.strftime("%H:%M:%S")
return pvDate + ' ' + pvTime
def read_csv(sFilename):
fid = open(sFilename)
holeDataset = fid.read()
rowsInDataset = holeDataset.split('\n')
return rowsInDataset[1::]
def create_rois_1(iatlas, labels, labels_hdr, labels_data, datatype=None, preserve=False):
if datatype == 2:
labels_dtype = np.uint8
elif datatype == 4:
labels_dtype = np.int16
elif datatype == 8:
labels_dtype = np.int32
elif datatype == 16:
labels_dtype = np.float32
else:
labels_dtype = labels_hdr[0].get_data_dtype()
labels_shape = labels_hdr[0].get_data_shape()
rois = np.zeros(labels_shape + (len(iatlas),), dtype=labels_dtype)
if preserve:
for k, index in enumerate(iatlas):
data = labels_data[index-1]
for label in labels[k]:
rois[:,:,:,k][data==label] = label
else:
for k, index in enumerate(iatlas):
data = labels_data[index-1]
rois[:,:,:,k][data==labels[k]] = 1
return rois
def create_rois_2(iatlas, labels, labels_hdr, labels_data, datatype=None, preserve=False):
if datatype == 2:
labels_dtype = np.uint8
elif datatype == 4:
labels_dtype = np.int16
elif datatype == 8:
labels_dtype = np.int32
elif datatype == 16:
labels_dtype = np.float32
else:
labels_dtype = labels_hdr[0].get_data_dtype()
labels_shape = labels_hdr[0].get_data_shape()
rois = np.zeros(labels_shape + (len(iatlas),), dtype=labels_dtype)
if preserve:
for k, index in enumerate(iatlas):
ires = []
data = labels_data[index-1]
for label in labels[k]:
ires.append(np.where(data == label))
indices = tuple(np.hstack(ires))
rois[:,:,:,k][indices] = data[indices]
else:
for k, index in enumerate(iatlas):
ires = []
data = labels_data[index-1]
for label in labels[k]:
ires.append(np.where(data == label))
indices = tuple(np.hstack(ires))
rois[:,:,:,k][indices] = 1
return rois
def create_rois_3(iatlas, labels, labels_hdr, labels_data, datatype=None, preserve=False):
if datatype == 2:
labels_dtype = np.uint8
elif datatype == 4:
labels_dtype = np.int16
elif datatype == 8:
labels_dtype = np.int32
elif datatype == 16:
labels_dtype = np.float32
else:
labels_dtype = labels_hdr[0].get_data_dtype()
labels_shape = labels_hdr[0].get_data_shape()
mask = np.zeros(labels_shape, dtype=np.bool)
rois = np.zeros(labels_shape + (len(iatlas),), dtype=labels_dtype)
if preserve:
for k, index in enumerate(iatlas):
data = labels_data[index-1]
for label in labels[k]:
mask = np.logical_or(mask, data == label)
rois[:,:,:,k] = data * mask
mask[:] = False
else:
for k, index in enumerate(iatlas):
data = labels_data[index-1]
for label in labels[k]:
mask = np.logical_or(mask, data == label)
rois[:,:,:,k] = mask
mask[:] = False
return rois
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Create atlas seed ROIs.')
parser.add_argument('-i', '--in_atlas', nargs='+', help='Input 3D atlas labels file names (NIfTI)')
parser.add_argument('-l','--in_labels', help='Input labels text file name',default='/Volumes/AG_Aswendt_Share/Scratch/Asw_fMRI2AllenBrain_Data/annotation_50CHANGEDanno_label_IDs.txt')
parser.add_argument('-o', '--out_rois', help='Output 4D seed ROIs file name')
parser.add_argument('-p', '--preserve', action='store_true', help='Preserve label values')
parser.add_argument('-t', '--datatype', type=int, choices=[2, 4, 8, 16], help='Data type (2: char, 4: short, 8: int, 16: float)')
args = parser.parse_args()
ext_text = '.txt'
ext_nifti = '.nii.gz'
# input labels text file with atlas index and seed regions (labels) in each line
# Atlas (1 or 2), Label 1, Label 2, ...
if len(args.in_labels) > 0: sPathLabels = args.in_labels
if not os.path.isfile(sPathLabels):
sys.exit("Error: '%s' is not a regular file." % (sPathLabels,))
# input atlas labels files (NIfTI)
if len(args.in_atlas) > 0: PathAtlas = args.in_atlas
for sPathAtlas in PathAtlas:
if not os.path.isfile(sPathAtlas):
sys.exit("Error: '%s' is not a regular file." % (sPathAtlas,))
# output seed ROIs file
sPathROIs = os.path.join(os.getcwd(), args.out_rois) if args.out_rois != None else os.path.join(os.path.dirname(PathAtlas[0]), 'Seed_ROIs')
# get date and time
#print(get_date())
# read labels text file
iatlas = []
labels = []
for row in read_csv(sPathLabels):
iatlas.append(int(row.split(',\t')[0]))
labels.append(int(row.split(',\t')[1]))
#print("iatlas:", iatlas)
#print("labels:", labels)
# read 3D atlas labels files (NIfTI)
labels_img = []
labels_hdr = []
labels_data = []
labels_shape = []
# read 3D atlas labels files (NIfTI)
for k, sPathAtlas in enumerate(PathAtlas):
#print("Atlas%d:" % (k + 1,), sPathAtlas)
labels_img.append(nib.load(sPathAtlas))
labels_data.append(labels_img[k].get_data())
#print("labels_data[%d].dtype:" % (k,), labels_data[k].dtype)
#print("labels_data[%d].shape:" % (k,), labels_data[k].shape)
labels_hdr.append(labels_img[k].get_header())
labels_shape.append(labels_hdr[k].get_data_shape())
#print("labels_shape[%d]:" % (k,), labels_shape[k])
if len(labels_shape[k]) != 3:
sys.exit("Error: Atlas%d labels %s do not have three dimensions." % (k, str(labels_shape[k])))
for k in range(1, len(labels_shape)):
if labels_shape[0] != labels_shape[k]:
sys.exit("Error: Atlas1 labels %s and Atlas%d labels %s do not have the same shape." % (str(labels_shape[0]), k, str(labels_shape[k])))
# create atlas labels hyperstack (4D)
rois = create_rois_1(iatlas, labels, labels_hdr, labels_data, datatype=args.datatype, preserve=args.preserve)
# save atlas labels file
dataOrg = nib.load(sPathAtlas)
niiData = nib.Nifti1Image(rois, dataOrg.affine)
hdrIn = niiData.header
hdrIn.set_xyzt_units('mm')
scaledNiiData = nib.as_closest_canonical(niiData)
nib.save(niiData, sPathROIs+ext_nifti)
print("Output:", sPathROIs+ext_nifti)
| Python |
3D | Aswendt-Lab/AIDAmri | bin/3.3_fMRIActivity/fsl_mean_ts.py | .py | 6,495 | 173 | """
Created on 07.12.2015
@author: michaeld
"""
from __future__ import print_function
import argparse
import os
import sys
import numpy as np
import nibabel as nib
import scipy.io as io
import correlate_matrix
from datetime import datetime
def start_fsl_mean_ts(sPathData,sPathMask,labelNames,postTxt):
# input data
# Read 4D data file (NIfTI )
data_img = nib.load(sPathData)
data = data_img.get_data()
data_hdr = data_img.header
data_dtype = data_hdr.get_data_dtype()
data_shape = data_hdr.get_data_shape()
# output data
sPathOut = os.path.abspath(os.path.join(sPathData, os.pardir, postTxt + os.path.basename(sPathData).split('_')[0]))
sPathOut = sPathOut + '.txt'
if os.path.basename(labelNames) == "annoVolume.nii.txt":
PcorrR_matrix_path = os.path.abspath(os.path.join(sPathData, os.pardir, 'Matrix_PcorrR.' + os.path.basename(sPathData).split('_')[0])) + ".mat"
PcorrP_matrix_path = os.path.abspath(os.path.join(sPathData, os.pardir, 'Matrix_PcorrP.' + os.path.basename(sPathData).split('_')[0])) + ".mat"
PcorrZ_matirx_path = os.path.abspath(os.path.join(sPathData, os.pardir, 'Matrix_PcorrZ.' + os.path.basename(sPathData).split('_')[0])) + ".mat"
elif os.path.basename(labelNames) == "annoVolume+2000_rsfMRI.nii.txt" :
PcorrR_matrix_path = os.path.abspath(os.path.join(sPathData, os.pardir, 'Matrix_PcorrR_Split.' + os.path.basename(sPathData).split('_')[0])) + ".mat"
PcorrP_matrix_path = os.path.abspath(os.path.join(sPathData, os.pardir, 'Matrix_PcorrP_Split.' + os.path.basename(sPathData).split('_')[0])) + ".mat"
PcorrZ_matirx_path = os.path.abspath(os.path.join(sPathData, os.pardir, 'Matrix_PcorrZ_Split.' + os.path.basename(sPathData).split('_')[0])) + ".mat"
pcorr_paths = [PcorrR_matrix_path, PcorrP_matrix_path, PcorrZ_matirx_path]
if len(data_shape) != 4:
sys.exit("Error: data %s has no 4D shape." % (str(data_shape),))
# Read 4D mask file (NIfTI)
mask_img = nib.load(sPathMask)
mask = mask_img.get_data()
mask_hdr = mask_img.header
#mask_dtype = mask_hdr.get_data_dtype()
mask_shape = mask_hdr.get_data_shape()
if len(mask_shape) != 4:
sys.exit("Error: mask %s has no 4D shape." % (str(mask_shape),))
if data_shape[:3] != mask_shape[:3]:
sys.exit("Error: data %s and mask %s are not the same shape." % (str(data_shape[:3]), str(mask_shape[:3])))
m = np.zeros((mask_shape[3], data_shape[3]), dtype=data_dtype)
for k in range(mask_shape[3]):
msk = np.array(mask[:, :, :, k]) > 0
maskedData = data[msk, :]
if maskedData.size > 0:
m[k] = np.mean(data[msk, :], 0)
fileNames = open(labelNames, 'r');
lines = fileNames.readlines()
mT = np.transpose(m)
np.savetxt(sPathOut, mT, fmt='%.4f', delimiter=' ')
matPathOut = os.path.join(os.path.dirname(sPathOut), os.path.basename(sPathOut) + '.mat')
io.savemat(matPathOut, dict([('matrix', mT),('label',lines)]))
correlate_matrix.calculate_p_corr_matrix(mT, lines, pcorr_paths)
return sPathOut
def get_date():
now = datetime.now()
pvDate = now.strftime("%a %d %b %Y")
pvTime = now.strftime("%H:%M:%S")
return pvDate + ' ' + pvTime
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('in_data', nargs='?', default='', help='input 4D data (x, y, slc, rep)')
parser.add_argument('in_mask', nargs='?', default='', help='input 4D mask (x, y, slc, msk)')
#parser.add_argument('in_data', help='input 4D data (x, y, slc, rep)')
#parser.add_argument('in_mask', help='input 4D mask (x, y, slc, msk)')
parser.add_argument('-o', '--out_text', help='output text matrix')
args = parser.parse_args()
sPathData = None
sPathMask = None
# input data
if len(args.in_data) > 0: sPathData = args.in_data
if not os.path.isfile(sPathData):
sys.exit("Error: '%s' is not a regular file." % (sPathData,))
# input mask
if len(args.in_mask) > 0: sPathMask = args.in_mask
if not os.path.isfile(sPathMask):
sys.exit("Error: '%s' is not a regular file." % (sPathMask,))
# output data
sPathOut = args.out_text if args.out_text is not None else os.path.abspath(os.path.join(sPathData, os.pardir, 'MasksTCs.' + os.path.basename(sPathData).split('_')[0]))
sPathOut = sPathOut + '.txt'
#print(get_date())
# Read 4D data file (NIfTI )
#print(sPathData)
data_img = nib.load(sPathData)
data = data_img.get_data()
#data = np.squeeze(data_img.get_data())
#data = np.cast[np.float32](data_img.get_data())
#print("data.dtype:", data.dtype)
#print("data.shape:", data.shape)
data_hdr = data_img.header
data_dtype = data_hdr.get_data_dtype()
data_shape = data_hdr.get_data_shape()
#print("data_dtype:", data_dtype)
#print("data_shape:", data_shape)
if len(data_shape) != 4:
sys.exit("Error: data %s has no 4D shape." % (str(data_shape),))
# Read 4D mask file (NIfTI)
#print(sPathMask)
mask_img = nib.load(sPathMask)
mask = mask_img.get_data()
#mask = np.squeeze(mask_img.get_data())
#mask = np.cast[np.float32](mask_img.get_data())
#print("mask.dtype:", mask.dtype)
#print("mask.shape:", mask.shape)
mask_hdr = mask_img.header
mask_dtype = mask_hdr.get_data_dtype()
mask_shape = mask_hdr.get_data_shape()
#print("mask_dtype:", mask_dtype)
#print("mask_shape:", mask_shape)
if len(mask_shape) != 4:
sys.exit("Error: mask %s has no 4D shape." % (str(mask_shape),))
if data_shape[:3] != mask_shape[:3]:
sys.exit("Error: data %s and mask %s are not the same shape." % (str(data_shape[:3]), str(mask_shape[:3])))
m = np.zeros((mask_shape[3], data_shape[3]), dtype=data_dtype)
for k in range(mask_shape[3]):
msk = np.array(mask[:,:,:,k]) > 0
maskedData = data[msk,:]
if maskedData.size > 0:
m[k] = np.mean(data[msk,:], 0)
#s = [['%.4f' % (x,) for x in line] for line in m.T.tolist()]
#s = [map(lambda x: '%.4f' % (x,), line) for line in m.T.tolist()]
mT = np.transpose(m)
np.savetxt(sPathOut, mT , fmt='%.4f',delimiter=' ')
matPathOut = os.path.join(os.path.dirname(sPathOut), os.path.basename(sPathOut) + '.mat')
io.savemat(matPathOut, dict([('matrix', mT)]))
#print(sPathOut)
#save_csv(sPathOut, s)
#save_data(sPathOut, s)
| Python |
3D | Aswendt-Lab/AIDAmri | bin/3.3_fMRIActivity/getRegrTable.py | .py | 5,398 | 135 | """
Created on 10/08/2017
@author: Niklas Pallast
Neuroimaging & Neuroengineering
Department of Neurology
University Hospital Cologne
"""
import sys,os
import numpy as np
import glob
import parReader
import i32Reader
def findData(path,addon):
reg_list = []
fileALL = glob.iglob(path+'/'+addon, recursive=True)
for filename in fileALL:
reg_list.append(filename)
return reg_list
def getRegrTable(file_name):
# proof par Folder
par_Path = os.path.join(file_name,'rs-fMRI_mcf')
if not os.path.exists(par_Path):
sys.exit("Error: %s is not an existing directory or file." % (par_Path,))
# proof i32 Folder
i32_Path = os.path.join(file_name,'rawMonData')
if not os.path.exists(i32_Path):
sys.exit("Error: %s is not an existing directory or file." % (i32_Path,))
# generate target Folder
target_folder = os.path.join(file_name, 'txtRegrPython')
if not os.path.exists(target_folder):
os.mkdir(target_folder)
# get all file entries and compare the length
listofPar_names = findData(par_Path,'*mcf.mat')
listofI32_names = findData(i32_Path,'*I32')
#if not len(listofPar_names) == len(listofI32_names):
# print('\x1b[00;37;43m' + 'Some Data of I32 have no corresponding par data!' + '\x1b[0m')
headlineStr = ['#Resp. BLC(1)','Resp. Deriv.(2)','Card. BLC(3)',
'Card. Deriv.(4)','RotX(5)','RotY(6)','RotZ(7)',
'dX(8)','dY(9)','dZ(10)','1st Order Drift(11)',
'2nd Order Drift(12)','3rd Order Drift(13)']
for i in range(len(listofPar_names)):
# get par folder info
par_folder_path = os.path.join(listofPar_names[i])
fullpar_folder_name = os.path.basename(par_folder_path).split('.')
par_folder_name2comp = fullpar_folder_name[1]+'.PHYSWAV.'+fullpar_folder_name[2]
# get par-Folder content
cur_contentOfPar = findData(par_folder_path, '*.par')
numberOfSlices = len(cur_contentOfPar)
# find corresponding I32 dataset
str_indexI32 = [s for s in listofI32_names if par_folder_name2comp in s]
if len(str_indexI32) < 1:
print('\x1b[00;37;43m' + 'No corresponding .par - .i32 file for %s' % (par_folder_name2comp,) + '\x1b[0m')
continue
else:
i32_folder_path = str_indexI32[0]
# read the first par Table to get the real number of Repition
cur_par_file_path = cur_contentOfPar[0]
parTestTable = parReader.getPar(cur_par_file_path)
# delete the first five measurements
numberOfAllRepitionsParTable = len(parTestTable) - 5
# get i32 - Data
trigger,i32Table = i32Reader.getI32(i32_folder_path,numberOfSlices,numberOfAllRepitionsParTable)
numberOfAllRepitionsI32 = len(trigger)/numberOfSlices
# generate drifts
driftTable = np.zeros([numberOfAllRepitionsParTable,3])
x = np.linspace(-1,1,numberOfAllRepitionsParTable)
driftTable[:,0] = x
driftTable[:,1] = x**2
driftTable[:,2] = x**3
for j in range(len(cur_contentOfPar)):
tempRgrName = os.path.basename(cur_contentOfPar[j]).split('_slc')
cur_slc = int(tempRgrName[1][0:4])
rgr_folder_name = tempRgrName[0] + '_mcf_slice_' + tempRgrName[1][0:4] + '.txt'
rgr_folder_path = os.path.join(target_folder,rgr_folder_name)
# get par - Data
cur_par_file_path = cur_contentOfPar[j]
parTable = parReader.getPar(cur_par_file_path)
# get I32 entries for cur_slc
cur_slc_i32entries = i32Table[trigger[cur_slc::numberOfSlices]]
# merge i32Table, parTable and driftTable
fid = open(rgr_folder_path,'w')
fid.write('%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\n'
% (headlineStr[0],headlineStr[1],headlineStr[2],headlineStr[3],headlineStr[4],
headlineStr[5],headlineStr[6],headlineStr[7],headlineStr[8],headlineStr[9]
,headlineStr[10],headlineStr[11],headlineStr[12]))
for repIndex in range(numberOfAllRepitionsParTable):
fid.write('%f\t%f\t%f\t%f\t%f\t%f\t%f\t%f\t%f\t%f\t%f\t%f\t%f\n'
% (cur_slc_i32entries[repIndex,0],cur_slc_i32entries[repIndex,1],
cur_slc_i32entries[repIndex,2],cur_slc_i32entries[repIndex,3],
parTable[repIndex,0],parTable[repIndex,1],parTable[repIndex,2],
parTable[repIndex,3],parTable[repIndex,4],parTable[repIndex,5],
driftTable[repIndex,0],driftTable[repIndex,1],driftTable[repIndex,2]))
fid.close()
return 0
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='Generate Regression Table out of par and I32')
requiredNamed = parser.add_argument_group('required named arguments')
requiredNamed.add_argument('-i','--input', help='Path to input data',required=True)
args = parser.parse_args()
if args.input is not None and args.input is not None:
input = args.input
if not os.path.exists(input):
sys.exit("Error: '%s' is not an existing directory or file %s is not in directory." % (input, args.file,))
result = getRegrTable(input)
| Python |
3D | Aswendt-Lab/AIDAmri | bin/3.3_fMRIActivity/peakdet.py | .py | 2,337 | 87 | """
Converted from MATLAB script at http://billauer.co.il/peakdet.html
Returns two arrays
function [maxtab, mintab]=peakdet(v, delta, x)
%PEAKDET Detect peaks in a vector
% [MAXTAB, MINTAB] = PEAKDET(V, DELTA) finds the local
% maxima and minima ("peaks") in the vector V.
% MAXTAB and MINTAB consists of two columns. Column 1
% contains indices in V, and column 2 the found values.
%
% With [MAXTAB, MINTAB] = PEAKDET(V, DELTA, X) the indices
% in MAXTAB and MINTAB are replaced with the corresponding
% X-values.
%
% A point is considered a maximum peak if it has the maximal
% value, and was preceded (to the left) by a value lower by
% DELTA.
% Eli Billauer, 3.4.05 (Explicitly not copyrighted).
% This function is released to the public domain; Any use is allowed.
"""
import sys
from numpy import NaN, Inf, arange, isscalar, asarray, array
def peakdet(v, delta, x=None):
maxtab = []
mintab = []
if x is None:
x = arange(len(v))
v = asarray(v)
if len(v) != len(x):
sys.exit('Input vectors v and x must have same length')
if not isscalar(delta):
sys.exit('Input argument delta must be a scalar')
if delta <= 0:
sys.exit('Input argument delta must be positive')
mn, mx = Inf, -Inf
mnpos, mxpos = NaN, NaN
lookformax = True
for i in arange(len(v)):
this = v[i]
if this > mx:
mx = this
mxpos = x[i]
if this < mn:
mn = this
mnpos = x[i]
if lookformax:
if this < mx - delta:
maxtab.append((mxpos, mx))
mn = this
mnpos = x[i]
lookformax = False
else:
if this > mn + delta:
mintab.append((mnpos, mn))
mx = this
mxpos = x[i]
lookformax = True
return array(maxtab), array(mintab)
if __name__ == "__main__":
from matplotlib.pyplot import plot, scatter, show
series = [0, 0, 0, 2, 0, 0, 0, -2, 0, 0, 0, 2, 0, 0, 0, -2, 0]
maxtab, mintab = peakdet(series, .3)
plot(series)
scatter(array(maxtab)[:, 0], array(maxtab)[:, 1], color='blue')
scatter(array(mintab)[:, 0], array(mintab)[:, 1], color='red')
show() | Python |
3D | Aswendt-Lab/AIDAmri | bin/3.3_fMRIActivity/plotfMRI_mat.py | .py | 2,137 | 76 | """
Created on 10/08/2017
@author: Niklas Pallast
Neuroimaging & Neuroengineering
Department of Neurology
University Hospital Cologne
"""
import matplotlib.pyplot as plt
import os, sys
import numpy as np
import scipy.io as sio
np.seterr(divide='ignore', invalid='ignore')
import seaborn as sns
def matrixMaker(matData, output_path):
unCorrmatrix = matData['matrix']
labels = matData['label']
# Adapt labels to pyplot
labels = [s.replace('\t', '-') for s in labels]
labels = [s.replace('\n', ' ') for s in labels]
labels = [s.replace(' ', '') for s in labels]
labels = [s.replace('_', '') for s in labels]
# Calculcate correlation of time series
corrMatrix = np.corrcoef(unCorrmatrix, rowvar=False)
corrMatrix = corrMatrix - np.eye(np.size(corrMatrix, 1))
corrMatrix = np.nan_to_num(corrMatrix)
fig, ax = plt.subplots()
sns.heatmap(corrMatrix, vmin = 0, vmax = 0.75)
ax.axis('tight')
# Set labels
ax.set(xticks=np.arange(len(labels)), xticklabels=labels,
yticks=np.arange(len(labels)), yticklabels=labels)
# Rotate the tick labels and set their alignment.
plt.setp(ax.get_xticklabels(), rotation=45, ha="right",
rotation_mode="anchor")
ax.set_title("rsfMRI Correlation between ARA regions")
output_file = os.path.join(output_path, "CorrMatrixHM.png")
plt.savefig(output_file)
plt.close()
return corrMatrix
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='Visualize mat file of fMRI ')
requiredNamed = parser.add_argument_group('required named arguments')
requiredNamed.add_argument('-i', '--inputMat', help='File: fMRI mat-File')
args = parser.parse_args()
inputPath = None
if args.inputMat is not None and args.inputMat is not None:
inputPath = args.inputMat
if not os.path.exists(inputPath):
sys.exit("Error: %s path is not an existing directory." % (args.inputPath,))
inputPath = args.inputMat
matData = sio.loadmat(inputPath)
# generate Matrix
matrixMaker(matData, os.path.dirname(inputPath))
| Python |
3D | Aswendt-Lab/AIDAmri | bin/3.3_fMRIActivity/i32Reader.py | .py | 5,214 | 153 | """
Created on 10/08/2017
@author: Niklas Pallast
Neuroimaging & Neuroengineering
Department of Neurology
University Hospital Cologne
"""
import sys,os
import numpy as np
import peakdet as pk
import scipy.signal as sc
# noinspection PyTypeChecker
def getI32(file_name,numberOfSlices,numberOfAllRepitionsParTable):
#fileSize = fileInfo.bytes/4
fid = open(file_name)
fileTable = np.fromfile(fid, dtype=np.float32)
#fpRawTime = fileTable[0:len(fileTable):4]
fpRawResp = fileTable[1:len(fileTable):4]
fpRawTrig = fileTable[2:len(fileTable):4]
fpRawCard = fileTable[3:len(fileTable):4]
# Process Respiration Data
# Merge 10 Values
N = 10
RespBLC = np.convolve(fpRawResp, np.ones((N,)) / N, mode='same')
# Evalute Baseline Shift
RespBLC = RespBLC - np.median(RespBLC, axis=None)
# derivative of respiration
kernel = [1, 0, -1]
RespDeriv = np.convolve(RespBLC, kernel, mode='same')
#RespDeriv = np.gradient(RespBLC)
# peak detection
pksRespMax,pksResMin = pk.peakdet(RespBLC*20, delta=1)
print('avg. Respiration Rate: '+str(len(pksRespMax) / (len(RespBLC) / 60000))+' 1/min')
# Process Cardiac Data
fs = 1000 # Sampling Frequency(1 kHz)
lowcut = 2.5
highcut = 10.0
nyq = 0.5 * fs # Nyquist Frequency (Hz)
Wp = [lowcut/nyq, highcut/nyq] # Passband Frequencies (Normalised 2.5 - 10 Hz)
Ws= [0.1/nyq, 35/nyq] # Stopband Frequencies (Normalised)
Rs = 40 # Sroppband Ripple (dB)
N = 3 # Filter order
b, a = sc.cheby2(N, Rs,Ws, btype='bandpass')
filtCardBLC = sc.filtfilt(b, a, fpRawCard)
N = 10
CardBLC = np.convolve(filtCardBLC, np.ones((N,)) / N, mode='same')
# Evalute Baseline Shift
CardBLC = CardBLC - np.median(CardBLC, axis=None)
# derivative of respiration
kernel = [1, 0, -1]
CardDeriv = np.convolve(CardBLC, kernel, mode='same')
# peak detection
pksCardpMax, pksCardMin = pk.peakdet(CardBLC * 20, delta=1)
print('avg. Card Rate: ' + str(len(pksCardpMax) / (len(CardBLC) / 60000)) + ' 1/min')
# if the trigger max is not equal 1 but higher
if max(fpRawTrig) != 1.0:
fpRawTrig = fpRawTrig-(max(fpRawTrig)-1)
# find missing trigger and replace 1 by 0
idx_missedTrigger = np.where(np.diff(fpRawTrig,2)==2)[0]+1
if len(idx_missedTrigger)>0:
fpRawTrig[idx_missedTrigger+1] =0
triggerDataPoints = np.argwhere(fpRawTrig == 0)
numberOfTiggers = len(triggerDataPoints)
numberOfRepitions = numberOfTiggers / (numberOfSlices * 2)
print('Number of Repetitions: ' + str(numberOfRepitions))
# if two dataset in a single i32 file
if numberOfTiggers >= ((numberOfAllRepitionsParTable + 5) * numberOfSlices * 2)*2:
triggerDataPoints = triggerDataPoints[:int(numberOfTiggers/2)]# if more than two dataset in a single i32 file
old_numberOfTriggers = numberOfTiggers
# corrected number of triggers
numberOfTiggers = len(triggerDataPoints)
# if some wrong triggers in i32 file
if numberOfTiggers > (numberOfAllRepitionsParTable + 5) * numberOfSlices * 2:
wrongAmountOfTriggers = numberOfTiggers-(numberOfAllRepitionsParTable + 5) * numberOfSlices * 2
fpRawTrig_cut = fpRawTrig[wrongAmountOfTriggers*100-1::]
triggerDataPoints = np.argwhere(fpRawTrig_cut == 0)
numberOfTiggers = len(triggerDataPoints)
numberOfRepitions = numberOfTiggers / (numberOfSlices * 2)
print('Number of Repetitions: ' + str(numberOfRepitions))
triggerDataPoints_1st = triggerDataPoints[numberOfSlices * 5 * 2 : numberOfTiggers:2,0]
triggerDataPoints_2nd = triggerDataPoints[numberOfSlices * 5 * 2 +1: numberOfTiggers:2,0]
usedTriggerAmount = ((numberOfAllRepitionsParTable+5)*numberOfSlices*2-5*2*numberOfSlices)/2
if not len(triggerDataPoints_1st) == len(triggerDataPoints_2nd):
print('Miss one Trigger in file_name in %s', file_name)
if len(triggerDataPoints_1st) < usedTriggerAmount:
if len(triggerDataPoints_2nd) == usedTriggerAmount:
triggerDataPoints_1st = triggerDataPoints_2nd
else:
sys.exit('Trigger does not relate to any slice or rep. Time')
if len(RespBLC) == len(CardBLC):
i32Table = np.zeros([len(RespBLC),4])
else:
sys.exit('Respiration and Cardiac Data do not have the same length!')
i32Table[:,0] = RespBLC
i32Table[:,1] = RespDeriv
i32Table[:,2] = CardBLC
i32Table[:,3] = CardDeriv
return triggerDataPoints_1st,i32Table
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='I32 Reader')
requiredNamed = parser.add_argument_group('required named arguments')
requiredNamed.add_argument('-i','--input', help='Path to input file',required=True)
args = parser.parse_args()
if args.input is not None and args.input is not None:
input = args.input
if not os.path.exists(input):
sys.exit("Error: '%s' is not an existing directory or file %s is not in directory." % (input, args.file,))
result = getI32(input,16,100)
| Python |
3D | Aswendt-Lab/AIDAmri | bin/3.2.1_DTIdata_extract/iterativeRun.py | .py | 1,074 | 45 | '''
Created on 08.04.2019
@author: Niklas Pallast
process all DTI data
'''
import glob
import os
import numpy as np
def findData(path):
regAtlas_list = []
fileALL = glob.iglob(path + '/P*/S*/DTI/DSI_studio/*_rsfMRISplit_scaled.nii.gz', recursive=True)
for filename in fileALL:
regAtlas_list.append(filename)
return regAtlas_list
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='Find all related DTI data')
parser.add_argument('-p','--pathData', help='Path to study')
args = parser.parse_args()
pathData = args.pathData
listAtlas = findData(pathData)
print(listAtlas)
for i in range(np.size(listAtlas)):
print(listAtlas[i])
curPath = os.path.dirname(listAtlas[i])
dti = glob.glob(curPath+'/*.fa0.nii.gz')
if dti:
print('python DTIdata_extract.py ' + dti[0] + ' ' + listAtlas[i] + ' -t ./acronyms_ARA.txt')
os.system('python DTIdata_extract.py ' +dti[0]+ ' ' +listAtlas[i]+ ' -t ./acronyms_splitted_ARA.txt')
| Python |
3D | Aswendt-Lab/AIDAmri | bin/3.2.1_DTIdata_extract/iterativeRun_MA_peri-infarct_ROIs.py | .py | 2,326 | 66 | '''
Created on 08.04.2019
Updated: 26.09.2020
@author: Niklas Pallast and Markus Aswendt
process all DTI data
'''
import glob
import os
import numpy as np
def findData(path):
regAtlas_list = []
fileALL = glob.iglob(path + '/GV*/DTI/DSI_studio/*mod_peri_scaled.nii.gz', recursive=True)
for filename in fileALL:
regAtlas_list.append(filename)
return regAtlas_list
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='Find all related DTI data')
parser.add_argument('-p','--pathData', help='Path to study data')
args = parser.parse_args()
pathData = args.pathData
listAtlas = findData(pathData)
print(listAtlas)
for i in range(np.size(listAtlas)):
print(listAtlas[i])
curPath = os.path.dirname(listAtlas[i])
dti = glob.glob(curPath+'/*.md.nii.gz')
if dti:
print('python DTIdata_extract.py ' + dti[0] + ' ' + listAtlas[i] + ' -t ./acronyms_ARA.txt')
os.system('python DTIdata_extract.py ' +dti[0]+ ' ' +listAtlas[i]+ ' -t ./acronyms_splitted_ARA.txt')
for i in range(np.size(listAtlas)):
print(listAtlas[i])
curPath = os.path.dirname(listAtlas[i])
dti = glob.glob(curPath + '/*.fa0.nii.gz')
if dti:
print('python DTIdata_extract.py ' + dti[0] + ' ' + listAtlas[i] + ' -t ./acronyms_ARA.txt')
os.system('python DTIdata_extract.py ' + dti[0] + ' ' + listAtlas[i] + ' -t ./acronyms_splitted_ARA.txt')
for i in range(np.size(listAtlas)):
print(listAtlas[i])
curPath = os.path.dirname(listAtlas[i])
dti = glob.glob(curPath + '/*.rd.nii.gz')
if dti:
print('python DTIdata_extract.py ' + dti[0] + ' ' + listAtlas[i] + ' -t ./acronyms_ARA.txt')
os.system('python DTIdata_extract.py ' + dti[0] + ' ' + listAtlas[i] + ' -t ./acronyms_splitted_ARA.txt')
for i in range(np.size(listAtlas)):
print(listAtlas[i])
curPath = os.path.dirname(listAtlas[i])
dti = glob.glob(curPath + '/*.ad.nii.gz')
if dti:
print('python DTIdata_extract.py ' + dti[0] + ' ' + listAtlas[i] + ' -t ./acronyms_ARA.txt')
os.system('python DTIdata_extract.py ' + dti[0] + ' ' + listAtlas[i] + ' -t ./acronyms_splitted_ARA.txt')
| Python |
3D | Aswendt-Lab/AIDAmri | bin/3.2.1_DTIdata_extract/DTIdata_extract.py | .py | 3,384 | 102 | """"
Created on 06.04.2019
@authors: Niklas Pallast
"""
import os
import sys
import argparse
import numpy as np
import nibabel as nii
def getOutfile(roi_file,img_file):
imgName = os.path.basename(img_file)
baseName = str.split(os.path.basename(roi_file),'.')[0]
dtiParam = str.split(imgName,'.')[-3]
print('\nStart processing DTI parameter: %s' % str.upper(dtiParam))
outFile = os.path.join(os.path.dirname(img_file),baseName+'_'+str.split(imgName,'.')[-3])+'.txt'
return outFile
def extractDTIData(img,rois,outfile,txt_file):
regions = np.uint16(np.unique(rois))
regions=np.delete(regions,0)
indices = None
if txt_file is not None:
ref_lines = open(txt_file).readlines()
indices = np.zeros_like(ref_lines)
for idx in range(np.size(ref_lines)):
curNum = int(str.split(ref_lines[idx], '\t')[0])
indices[idx] = curNum
indices = np.uint16(indices)
fileID = open(outfile, 'w')
fileID.write("%s values for %i given regions:\n\n" % (str.upper(outfile[-6:-4]),np.size(regions)))
for r in regions:
paramValue = np.mean(img[rois==r])
if indices is not None:
str_idx = ref_lines[int(np.argwhere(indices == r)[0])]
acro = str.split(str_idx,'\t')[1][:-1]
fileID.write("%i\t%s\t%.2f\n" % (r,acro ,paramValue))
else:
fileID.write("%i\t%.2f\n" % (r, paramValue))
fileID.close()
return outfile
if __name__ == '__main__':
# default values
parser = argparse.ArgumentParser(description='Extracts the major DTI parameters (apparent diffusion coefficients) '
'axial diffusivity (AD), fractional anisotropy (FA), mean diffusivity (MD), and radial diffusivity (RD)')
requiredNamed = parser.add_argument_group('Required named arguments')
requiredNamed.add_argument('image_file', help='Input file of AIDA pipeline with related folder')
requiredNamed.add_argument('roi_file', help='Input file of related roi')
parser.add_argument('-t', '--translatorTXT',
help='txt file to translate ROI Number to acronyms',type=str)
args = parser.parse_args()
# read image data
image_file=None
if args.image_file is not None and args.image_file is not None:
image_file = args.image_file
if not os.path.exists(image_file):
sys.exit("Error: '%s' is not an existing image nii-file." % (image_file))
img_data=nii.load(image_file)
img = img_data.get_data()
# read roi data
roi_file = None
if args.roi_file is not None and args.roi_file is not None:
roi_file = args.roi_file
if not os.path.exists(roi_file):
sys.exit("Error: '%s' is not an existing roi file." % (roi_file))
# read translation TXT file
txt_file = None
if args.translatorTXT is not None:
txt_file = args.translatorTXT
if not os.path.exists(args.translatorTXT):
sys.exit("Error: '%s' is not an existing translation txt file." % (txt_file))
roi_data = nii.load(roi_file)
rois = roi_data.get_data()
outFile = getOutfile(roi_file, image_file)
file = extractDTIData(img,rois,outFile,txt_file)
print("\033[0;30;42m Done \33[0m' %s" % file)
# save output image and txtFile
#save_data(image_out, peaks)
| Python |
3D | Aswendt-Lab/AIDAmri | bin/3.2.1_DTIdata_extract/iterativeRun_MA_stroke_mask.py | .py | 2,323 | 65 | '''
Created on 08.04.2019
Updated: 26.09.2020
@author: Niklas Pallast and Markus Aswendt
process all DTI data
'''
import glob
import os
import numpy as np
def findData(path):
regAtlas_list = []
fileALL = glob.iglob(path + '/GV*/DTI/DSI_studio/*StrokeMask_scaled.nii.gz', recursive=True)
for filename in fileALL:
regAtlas_list.append(filename)
return regAtlas_list
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='Find all related DTI datta')
parser.add_argument('-p','--pathData', help='path to study')
args = parser.parse_args()
pathData = args.pathData
listAtlas = findData(pathData)
print(listAtlas)
for i in range(np.size(listAtlas)):
print(listAtlas[i])
curPath = os.path.dirname(listAtlas[i])
dti = glob.glob(curPath+'/*.md.nii.gz')
if dti:
print('python DTIdata_extract.py ' + dti[0] + ' ' + listAtlas[i] + ' -t ./acronyms_ARA.txt')
os.system('python DTIdata_extract.py ' +dti[0]+ ' ' +listAtlas[i]+ ' -t ./acronyms_splitted_ARA.txt')
for i in range(np.size(listAtlas)):
print(listAtlas[i])
curPath = os.path.dirname(listAtlas[i])
dti = glob.glob(curPath + '/*.fa0.nii.gz')
if dti:
print('python DTIdata_extract.py ' + dti[0] + ' ' + listAtlas[i] + ' -t ./acronyms_ARA.txt')
os.system('python DTIdata_extract.py ' + dti[0] + ' ' + listAtlas[i] + ' -t ./acronyms_splitted_ARA.txt')
for i in range(np.size(listAtlas)):
print(listAtlas[i])
curPath = os.path.dirname(listAtlas[i])
dti = glob.glob(curPath + '/*.rd.nii.gz')
if dti:
print('python DTIdata_extract.py ' + dti[0] + ' ' + listAtlas[i] + ' -t ./acronyms_ARA.txt')
os.system('python DTIdata_extract.py ' + dti[0] + ' ' + listAtlas[i] + ' -t ./acronyms_splitted_ARA.txt')
for i in range(np.size(listAtlas)):
print(listAtlas[i])
curPath = os.path.dirname(listAtlas[i])
dti = glob.glob(curPath + '/*.ad.nii.gz')
if dti:
print('python DTIdata_extract.py ' + dti[0] + ' ' + listAtlas[i] + ' -t ./acronyms_ARA.txt')
os.system('python DTIdata_extract.py ' + dti[0] + ' ' + listAtlas[i] + ' -t ./acronyms_splitted_ARA.txt') | Python |
3D | Aswendt-Lab/AIDAmri | bin/3.2.1_DTIdata_extract/iterativeRun_MA.py | .py | 2,331 | 66 | '''
Created on 08.04.2019
Updated: 26.09.2020
@author: Niklas Pallast and Markus Aswendt
process all DTI data
'''
import glob
import os
import numpy as np
def findData(path):
regAtlas_list = []
fileALL = glob.iglob(path + '/GV*/DTI/DSI_studio/*_rsfMRISplit_scaled.nii.gz', recursive=True)
for filename in fileALL:
regAtlas_list.append(filename)
return regAtlas_list
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='Find all related DTI data')
parser.add_argument('-p','--pathData', help='Path to study')
args = parser.parse_args()
pathData = args.pathData
listAtlas = findData(pathData)
print(listAtlas)
for i in range(np.size(listAtlas)):
print(listAtlas[i])
curPath = os.path.dirname(listAtlas[i])
dti = glob.glob(curPath + '/*.md.nii.gz')
if dti:
print('python DTIdata_extract.py ' + dti[0] + ' ' + listAtlas[i] + ' -t ./acronyms_ARA.txt')
os.system('python DTIdata_extract.py ' + dti[0] + ' ' + listAtlas[i] + ' -t ./acronyms_splitted_ARA.txt')
for i in range(np.size(listAtlas)):
print(listAtlas[i])
curPath = os.path.dirname(listAtlas[i])
dti = glob.glob(curPath + '/*.fa0.nii.gz')
if dti:
print('python DTIdata_extract.py ' + dti[0] + ' ' + listAtlas[i] + ' -t ./acronyms_ARA.txt')
os.system('python DTIdata_extract.py ' + dti[0] + ' ' + listAtlas[i] + ' -t ./acronyms_splitted_ARA.txt')
for i in range(np.size(listAtlas)):
print(listAtlas[i])
curPath = os.path.dirname(listAtlas[i])
dti = glob.glob(curPath + '/*.rd.nii.gz')
if dti:
print('python DTIdata_extract.py ' + dti[0] + ' ' + listAtlas[i] + ' -t ./acronyms_ARA.txt')
os.system('python DTIdata_extract.py ' + dti[0] + ' ' + listAtlas[i] + ' -t ./acronyms_splitted_ARA.txt')
for i in range(np.size(listAtlas)):
print(listAtlas[i])
curPath = os.path.dirname(listAtlas[i])
dti = glob.glob(curPath + '/*.ad.nii.gz')
if dti:
print('python DTIdata_extract.py ' + dti[0] + ' ' + listAtlas[i] + ' -t ./acronyms_ARA.txt')
os.system('python DTIdata_extract.py ' + dti[0] + ' ' + listAtlas[i] + ' -t ./acronyms_splitted_ARA.txt')
| Python |
3D | Aswendt-Lab/AIDAmri | bin/4.1_T2mapPreProcessing/registration_T2MAP.py | .py | 10,120 | 233 | """
Created on 11/09/2023
@author: Marc Schneider
Neuroimaging & Neuroengineering
Department of Neurology
University Hospital Cologne
Documentation preface, added 23/05/09 by Victor Vera Frazao:
This document is currently in revision for improvement and fixing.
Specifically changes are made to allow compatibility of the pipeline with Ubuntu 18.04 systems
and Ubuntu 18.04 Docker base images, respectively, as well as adapting to appearent changes of
DSI-Studio that were applied since the AIDAmri v.1.1 release. As to date the DSI-Studio version
used is the 2022/08/03 Ubuntu 18.04 release.
All changes and additional documentations within this script carry a signature with the writer's
initials (e.g. VVF for Victor Vera Frazao) and the date at application, denoted after '//' at
the end of the comment line. If code segments need clearance the comment line will be prefaced
by '#?'. Changes are prefaced by '#>' and other comments are prefaced ordinalrily
by '#'.
"""
import sys,os
import nibabel as nii
import numpy as np
import shutil
import glob
import subprocess
import shlex
def regABA2T2map(inputVolume,stroke_mask,refStroke_mask,T2data, brain_template,brain_anno, splitAnno,splitAnno_rsfMRI,anno_rsfMRI,bsplineMatrix,outfile):
outputT2w = os.path.join(outfile, os.path.basename(inputVolume).split('.')[0] + '_T2w.nii.gz')
outputAff = os.path.join(outfile, os.path.basename(inputVolume).split('.')[0] + 'transMatrixAff.txt')
command = f"reg_aladin -ref {inputVolume} -flo {T2data} -res {outputT2w} -rigOnly -aff {outputAff}"
command_args = shlex.split(command)
try:
result = subprocess.run(command_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,text=True)
print(f"Output of {command}:\n{result.stdout}")
except Exception as e:
print(f'Error while executing the command: {command_args} Errorcode: {str(e)}')
raise
# resample split Annotation
outputAnnoSplit = os.path.join(outfile, os.path.basename(inputVolume).split('.')[0] + '_AnnoSplit.nii.gz')
command = f"reg_resample -ref {brain_anno} -flo {splitAnno} -trans {bsplineMatrix} -inter 0 -res {outputAnnoSplit}"
command_args = shlex.split(command)
try:
result = subprocess.run(command_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,text=True)
print(f"Output of {command}:\n{result.stdout}")
except Exception as e:
print(f'Error while executing the command: {command_args}Errorcode: {str(e)}')
raise
command = f"reg_resample -ref {inputVolume} -flo {outputAnnoSplit} -trans {outputAff} -inter 0 -res {outputAnnoSplit}"
command_args = shlex.split(command)
try:
result = subprocess.run(command_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,text=True)
print(f"Output of {command}:\n{result.stdout}")
except Exception as e:
print(f'Error while executing the command: {command_args} Errorcode: {str(e)}')
raise
# resample split rsfMRI Annotation
outputAnnoSplit_rsfMRI = os.path.join(outfile, os.path.basename(inputVolume).split('.')[0] + '_AnnoSplit_parental.nii.gz')
command = f"reg_resample -ref {brain_anno} -flo {splitAnno_rsfMRI} -trans {bsplineMatrix} -inter 0 -res {outputAnnoSplit_rsfMRI}"
command_args = shlex.split(command)
try:
result = subprocess.run(command_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,text=True)
print(f"Output of {command}:\n{result.stdout}")
except Exception as e:
print(f'Error while executing the command: {command_args} Errorcode: {str(e)}')
raise
command = f"reg_resample -ref {inputVolume} -flo {outputAnnoSplit_rsfMRI} -trans {outputAff} -inter 0 -res {outputAnnoSplit_rsfMRI}"
command_args = shlex.split(command)
try:
result = subprocess.run(command_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,text=True)
print(f"Output of {command}:\n{result.stdout}")
except Exception as e:
print(f'Error while executing the command: {command_args} Errorcode: {str(e)}')
raise
return outputAnnoSplit
def find_RefStroke(refStrokePath,inputVolume):
path = glob.glob(refStrokePath+'/' + os.path.basename(inputVolume)[0:9]+'*/anat/*IncidenceData_mask.nii.gz', recursive=False)
return path
def find_RefAff(inputVolume):
path = glob.glob(os.path.dirname(os.path.dirname(inputVolume))+'/anat/*MatrixAff.txt', recursive=False)
return path
def find_RefTemplate(inputVolume):
path = glob.glob(os.path.dirname(os.path.dirname(inputVolume))+'/anat/*TemplateAff.nii.gz', recursive=False)
return path
def find_relatedData(pathBase):
pathT2 = glob.glob(pathBase+'*/anat/*Bet.nii.gz', recursive=False)
pathStroke_mask = glob.glob(pathBase + '*/anat/*Stroke_mask.nii.gz', recursive=False)
pathAnno = glob.glob(pathBase + '*/anat/*Anno.nii.gz', recursive=False)
pathAllen = glob.glob(pathBase + '*/anat/*Allen.nii.gz', recursive=False)
bsplineMatrix = glob.glob(pathBase + '*/anat/*MatrixBspline.nii', recursive=False)
return pathT2,pathStroke_mask,pathAnno,pathAllen,bsplineMatrix
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='Registration Allen Brain to T2map')
requiredNamed = parser.add_argument_group('required named arguments')
requiredNamed.add_argument('-i', '--inputVolume', help='Path to the BET file of T2map data after preprocessing',
required=True)
parser.add_argument('-r', '--referenceDay', help='Reference Stroke mask (for example: P5)', nargs='?', type=str,
default=None)
parser.add_argument('-s', '--splitAnno', help='Split annotations atlas', nargs='?', type=str,
default=os.path.abspath(os.path.join(os.getcwd(), os.pardir,os.pardir))+'/lib/ARA_annotationR+2000.nii.gz')
parser.add_argument('-f', '--splitAnno_rsfMRI', help='Split annotations atlas for rsfMRI/T2map', nargs='?', type=str,
default=os.path.abspath(os.path.join(os.getcwd(), os.pardir,os.pardir))+'/lib/annoVolume+2000_rsfMRI.nii.gz')
parser.add_argument('-a', '--anno_rsfMRI', help='Parental Annotations atlas for rsfMRI/T2map', nargs='?', type=str,
default=os.path.abspath(os.path.join(os.getcwd(), os.pardir,os.pardir))+'/lib/annoVolume.nii.gz')
args = parser.parse_args()
stroke_mask = None
inputVolume = None
refStrokePath = None
splitAnno = None
splitAnno_rsfMRI = None
anno_rsfMRI = None
if args.inputVolume is not None:
inputVolume = args.inputVolume
if not os.path.exists(inputVolume):
sys.exit("Error: '%s' is not an existing directory." % (inputVolume,))
outfile = os.path.join(os.path.dirname(inputVolume))
if not os.path.exists(outfile):
os.makedirs(outfile)
# find related data
pathT2, pathStroke_mask, pathAnno, pathTemplate, bsplineMatrix = find_relatedData(os.path.dirname(outfile))
if len(pathT2) is 0:
T2data = []
sys.exit("Error: %s' has no reference T2 template." % (os.path.basename(inputVolume),))
else:
T2data = pathT2[0]
if len(pathStroke_mask) is 0:
pathStroke_mask = []
print("Notice: '%s' has no defined reference (stroke) mask - will proceed without." % (os.path.basename(inputVolume),))
else:
stroke_mask = pathStroke_mask[0]
if len(pathAnno) is 0:
pathAnno = []
sys.exit("Error: %s' has no reference annotations." % (os.path.basename(inputVolume),))
else:
brain_anno = pathAnno[0]
if len(pathTemplate) is 0:
pathTemplate = []
sys.exit("Error: %s' has no reference template." % (os.path.basename(inputVolume),))
else:
brain_template = pathTemplate[0]
if len(bsplineMatrix) is 0:
bsplineMatrix = []
sys.exit("Error: %s' has no bspline Matrix." % (os.path.basename(inputVolume),))
else:
bsplineMatrix = bsplineMatrix[0]
# finde reference stroke mask
refStroke_mask = None
if args.referenceDay is not None:
referenceDay = args.referenceDay
refStrokePath = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(outfile))), referenceDay)
if not os.path.exists(refStrokePath):
sys.exit("Error: '%s' is not an existing directory." % (refStrokePath,))
refStroke_mask = find_RefStroke(refStrokePath, inputVolume)
if len(refStroke_mask) is 0:
refStroke_mask = []
print("Notice: '%s' has no defined reference (stroke) mask - will proceed without." % (os.path.basename(inputVolume),))
else:
refStroke_mask = refStroke_mask[0]
if args.splitAnno is not None:
splitAnno = args.splitAnno
if not os.path.exists(splitAnno):
sys.exit("Error: '%s' is not an existing directory." % (splitAnno,))
if args.splitAnno_rsfMRI is not None:
splitAnno_rsfMRI = args.splitAnno_rsfMRI
if not os.path.exists(splitAnno_rsfMRI):
sys.exit("Error: '%s' is not an existing directory." % (splitAnno_rsfMRI,))
if args.anno_rsfMRI is not None:
anno_rsfMRI = args.anno_rsfMRI
if not os.path.exists(anno_rsfMRI):
sys.exit("Error: '%s' is not an existing directory." % (anno_rsfMRI,))
output = regABA2T2map(inputVolume, stroke_mask, refStroke_mask, T2data, brain_template, brain_anno, splitAnno,splitAnno_rsfMRI,anno_rsfMRI,bsplineMatrix,outfile)
current_dir = os.path.dirname(inputVolume)
search_string = os.path.join(current_dir, "*t2map.nii.gz")
currentFile = glob.glob(search_string)
search_string = os.path.join(current_dir, "*.nii*")
created_imgs = glob.glob(search_string, recursive=True)
os.chdir(os.path.dirname(os.getcwd()))
for idx, img in enumerate(created_imgs):
if img == None:
continue
#os.system('python adjust_orientation.py -i '+ str(img) + ' -t ' + currentFile[0])
continue
print("Registration completed")
| Python |
3D | Aswendt-Lab/AIDAmri | bin/4.1_T2mapPreProcessing/t2map_data_extract.py | .py | 4,617 | 108 | import nibabel as nii
import numpy as np
import argparse
import os
import glob
import csv
import sys # Added import statement for sys module
def getOutfile(atlas_type, img_file, suffix):
imgName = os.path.basename(img_file)
t2map = str.split(imgName, '.')[-3]
acronym_name = os.path.basename(atlas_type).split('.')[0]
outFile = os.path.join(os.path.dirname(img_file), f"{t2map}_T2values_{acronym_name}_{suffix}.csv")
return outFile
def extractT2MapdataMean(img, rois, outfile, txt_file):
slices = np.unique(np.where(rois > 0)[2])
regions = np.delete(np.unique(rois), 0)
indices = None
if txt_file is not None:
ref_lines = open(txt_file).readlines()
indices = {int(line.split('\t')[0]): line.split('\t')[1].strip() for line in ref_lines}
with open(outfile, 'w', newline='') as csvfile:
csv_writer = csv.writer(csvfile)
csv_writer.writerow(["Slice", "ARA IDs", "Names", "T2 Values", "Region Sizes"])
for s in slices:
for r in regions:
region_voxels = np.where((rois[:, :, s] == r) & (rois[:, :, s] > 0))
if len(region_voxels[0]) == 0:
continue
mean_value = np.mean(img[region_voxels])
region_size = len(region_voxels[0])
acro = indices.get(r, "") # Using dict.get() to avoid KeyError
csv_writer.writerow([s, r, acro, "%.2f" % mean_value, "%.2f" % region_size])
def extractT2MapdataPerRegion(img, rois, outfile, txt_file):
regions = np.delete(np.unique(rois), 0)
indices = None
if txt_file is not None:
ref_lines = open(txt_file).readlines()
indices = {int(line.split('\t')[0]): line.split('\t')[1].strip() for line in ref_lines}
with open(outfile, 'w', newline='') as csvfile:
csv_writer = csv.writer(csvfile)
csv_writer.writerow(["ARA IDs", "Names", "T2 Values", "Region Sizes"])
for r in regions:
region_voxels = np.where((rois == r) & (rois > 0))
if len(region_voxels[0]) == 0:
continue
mean_value = np.mean(img[region_voxels])
region_size = len(region_voxels[0])
acro = indices.get(r, "") # Using dict.get() to avoid KeyError
csv_writer.writerow([r, acro, "%.2f" % mean_value, "%.2f" % region_size])
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Extracts the T2 values from the T2 map for every atlas region')
requiredNamed = parser.add_argument_group('Required named arguments')
requiredNamed.add_argument('-i', '--input', help='Input T2 map, should be a nifti file')
args = parser.parse_args()
acronyms_files = glob.glob(os.path.join(os.getcwd(), "*.txt"))
print(f"Extracting T2 values for: {args.input}")
print(f"Acronym files: {acronyms_files}")
# Checking if input file is provided
if args.input is None:
sys.exit("Error: No input file provided.")
image_file = args.input
if not os.path.exists(image_file):
sys.exit(f"Error: '{image_file}' is not an existing image nii-file.")
img_data = nii.load(image_file)
img = img_data.get_fdata() # Using get_fdata() for compatibility
parental_atlas = glob.glob(os.path.join(os.path.dirname(image_file), "*AnnoSplit_parental.nii*"))[0]
non_parental_atlas = glob.glob(os.path.join(os.path.dirname(image_file), "*AnnoSplit.nii*"))[0]
for acronyms in acronyms_files: # Corrected variable name to acronyms
try:
if "parentARA_LR" in acronyms:
atlas_type = "parental"
atlas = parental_atlas
else:
atlas_type = "non-parental"
atlas = non_parental_atlas
roi_data = nii.load(atlas)
rois = roi_data.get_fdata() # Using get_fdata() for compatibility
outFileMean = getOutfile(atlas_type, image_file, "Mean") # Fixed suffix to "Mean"
print(f"Outfile (Mean): {outFileMean}")
extractT2MapdataMean(img, rois, outFileMean, acronyms)
outFilePerRegion = getOutfile(atlas_type, image_file, "PerRegion") # Fixed suffix to "PerRegion"
print(f"Outfile (Per Region): {outFilePerRegion}")
extractT2MapdataPerRegion(img, rois, outFilePerRegion, acronyms)
except Exception as e:
print(f'Error while processing the T2 values: {str(e)}') # Improved error message
raise # Raising the exception to halt execution
print("Finished T2 map processing")
| Python |
3D | Aswendt-Lab/AIDAmri | bin/4.1_T2mapPreProcessing/preProcessing_T2MAP.py | .py | 6,671 | 200 | """
Created on 11/09/2023
@author: Marc Schneider
Neuroimaging & Neuroengineering
Department of Neurology
University Hospital Cologne
"""
import nipype.interfaces.fsl as fsl
import os, sys
import nibabel as nii
import numpy as np
import applyMICO
import cv2
from pathlib import Path
import shutil
import subprocess
def reset_orientation(input_file):
brkraw_dir = os.path.join(os.path.dirname(input_file), "brkraw")
if os.path.exists(brkraw_dir):
return
os.mkdir(brkraw_dir)
dst_path = os.path.join(brkraw_dir, os.path.basename(input_file))
shutil.copyfile(input_file, dst_path)
data = nii.load(input_file)
raw_img = data.dataobj.get_unscaled()
raw_nii = nii.Nifti1Image(raw_img, data.affine)
nii.save(raw_nii, input_file)
delete_orient_command = f"fslorient -deleteorient {input_file}"
subprocess.run(delete_orient_command, shell=True)
# Befehl zum Festlegen der radiologischen Orientierung
forceradiological_command = f"fslorient -forceradiological {input_file}"
subprocess.run(forceradiological_command, shell=True)
def applyBET(input_file: str, frac: float, radius: int, output_path: str) -> str:
"""
Performs brain extraction via the FSL Brain Extraction Tool (BET). Requires an appropriate input file (input_file), the fractional intensity threshold (frac), the head radius (radius) and the output path (output_path).
"""
# scale Nifti data by factor 10
data = nii.load(input_file)
imgTemp = data.get_fdata()
scale = np.eye(4)* 10
scale[3][3] = 1
imgTemp = np.flip(imgTemp, 2)
scaledNiiData = nii.Nifti1Image(imgTemp, data.affine * scale)
hdrIn = scaledNiiData.header
hdrIn.set_xyzt_units('mm')
scaledNiiData = nii.as_closest_canonical(scaledNiiData)
fsl_path = os.path.join(os.path.dirname(input_file),'fslScaleTemp.nii.gz')
nii.save(scaledNiiData, fsl_path)
# extract brain
output_file = os.path.join(output_path, os.path.basename(input_file).split('.')[0] + 'Bet.nii.gz')
myBet = fsl.BET(in_file=fsl_path, out_file=output_file,frac=frac,radius=radius,robust=True, mask = True)
myBet.run()
os.remove(fsl_path)
# unscale result data by factor 10ˆ(-1)
dataOut = nii.load(output_file)
imgOut = dataOut.get_fdata()
scale = np.eye(4)/ 10
scale[3][3] = 1
unscaledNiiData = nii.Nifti1Image(imgOut, dataOut.affine * scale)
hdrOut = unscaledNiiData.header
hdrOut.set_xyzt_units('mm')
nii.save(unscaledNiiData, output_file)
return output_file
def smoothIMG(input_file, output_path):
"""
Smoothes image via FSL. Only input and output has do be specified. Parameters are fixed to box shape and to the kernel size of 0.1 voxel.
"""
data = nii.load(input_file)
vol = data.get_fdata()
ImgSmooth = np.min(vol, 3)
unscaledNiiData = nii.Nifti1Image(ImgSmooth, data.affine)
hdrOut = unscaledNiiData.header
hdrOut.set_xyzt_units('mm')
output_file = os.path.join(os.path.dirname(input_file),
os.path.basename(input_file).split('.')[0] + 'DN.nii.gz')
nii.save(unscaledNiiData, output_file)
input_file = output_file
output_file = os.path.join(output_path, os.path.basename(input_file).split('.')[0] + 'Smooth.nii.gz')
myGauss = fsl.SpatialFilter(
in_file = input_file,
out_file = output_file,
operation = 'median',
kernel_shape = 'box',
kernel_size = 0.1
)
myGauss.run()
return output_file
def thresh(input_file, output_path):
#output_file = os.path.join(os.path.dirname(input_file),os.path.basename(input_file).split('.')[0]+ 'Thres.nii.gz')
output_file = os.path.join(output_path, os.path.basename(input_file).split('.')[0] + 'Thres.nii.gz')
myThres = fsl.Threshold(in_file=input_file,out_file=output_file,thresh=20)#,direction='above')
myThres.run()
return output_file
def cropToSmall(input_file,output_path):
#output_file = os.path.join(os.path.dirname(input_file),os.path.basename(input_file).split('.')[0] + 'Crop.nii.gz')
output_file = os.path.join(output_path, os.path.basename(input_file).split('.')[0] + 'Crop.nii.gz')
myCrop = fsl.ExtractROI(in_file=input_file,roi_file=output_file,x_min=40,x_size=130,y_min=50,y_size=110,z_min=0,z_size=12)
myCrop.run()
return output_file
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='Preprocessing of T2map Data')
requiredNamed = parser.add_argument_group('required named arguments')
requiredNamed.add_argument('-i', '--input', help='Path to the raw NIfTI T2map file', required=True)
parser.add_argument('-f', '--frac', help='Fractional intensity threshold - default=0.3, smaller values give larger brain outline estimates', nargs='?', type=float,default=0.3)
parser.add_argument('-r', '--radius', help='Head radius (mm not voxels) - default=45', nargs='?', type=int ,default=45)
parser.add_argument('-g', '--vertical_gradient', help='Vertical gradient in fractional intensity threshold - default=0.0, positive values give larger brain outlines at bottom and smaller brain outlines at top', nargs='?',
type=float,default=0.0)
args = parser.parse_args()
# set parameters
input_file = None
if args.input is not None and args.input is not None:
input_file = args.input
if not os.path.exists(input_file):
sys.exit("Error: '%s' is not an existing directory or file %s is not in directory." % (input_file, args.file,))
frac = args.frac
radius = args.radius
vertical_gradient = args.vertical_gradient
output_path = os.path.dirname(input_file)
print(f"Frac: {frac} Radius: {radius} Gradient {vertical_gradient}")
reset_orientation(input_file)
print("Orientation resetted to RAS")
try:
output_smooth = smoothIMG(input_file = input_file, output_path = output_path)
print("Smoothing completed")
except Exception as e:
print(f'Fehler in der Biasfieldcorrecttion\nFehlermeldung: {str(e)}')
raise
# intensity correction using non parametric bias field correction algorithm
try:
output_mico = applyMICO.run_MICO(output_smooth,output_path)
print("Biasfieldcorrecttion was successful")
except Exception as e:
print(f'Fehler in der Biasfieldcorrecttion\nFehlermeldung: {str(e)}')
raise
# get rid of your skull
outputBET = applyBET(input_file = output_mico, frac = frac, radius = radius, output_path = output_path)
print("Brainextraction was successful")
| Python |
3D | Aswendt-Lab/AIDAmri | bin/4.1_T2mapPreProcessing/anisodiff.py | .py | 2,762 | 87 | """
Created on 10/08/2017
@author: Niklas Pallast
Neuroimaging & Neuroengineering
Department of Neurology
University Hospital Cologne
"""
import numpy as np
import scipy.ndimage
def applyFilter(im, num_iter, delta_t, kappa, option):
# Convert input image to float.
im.astype(float)
# PDE(partial differential equation) initial condition.
diff_im = im
# Center pixel distances.
dx = 1
dy = 1
dd = np.sqrt(2)
# 2D convolution masks - finite differences.
hN = np.array(([0, 1, 0],[0, -1, 0],[0, 0, 0]))
hS = np.array(([0, 0, 0],[0, -1, 0],[0, 1, 0]))
hE = np.array(([0, 0, 0],[0, -1, 1],[0, 0, 0]))
hW = np.array(([0, 0, 0],[1, -1, 0],[0, 0, 0]))
hNE = np.array(([0, 0, 1],[0, -1, 0],[0, 0, 0]))
hSE = np.array(([0, 0, 0],[0, -1, 0],[0, 0, 1]))
hSW = np.array(([0, 0, 0],[0, -1, 0],[1, 0, 0]))
hNW = np.array(([1, 0, 0],[0, -1, 0],[0, 0, 0]))
for t in range(num_iter):
nablaN = scipy.ndimage.convolve(diff_im, hN, mode='nearest')
nablaS = scipy.ndimage.convolve(diff_im, hS, mode='nearest')
nablaE = scipy.ndimage.convolve(diff_im, hE, mode='nearest')
nablaW = scipy.ndimage.convolve(diff_im, hW, mode='nearest')
nablaNE = scipy.ndimage.convolve(diff_im, hNE, mode='nearest')
nablaSE = scipy.ndimage.convolve(diff_im, hSE, mode='nearest')
nablaSW = scipy.ndimage.convolve(diff_im, hSW, mode='nearest')
nablaNW = scipy.ndimage.convolve(diff_im, hNW, mode='nearest')
# Diffusion function.
if option == 1:
cN = np.exp(-(nablaN / kappa)**2)
cS = np.exp(-(nablaS / kappa)**2)
cW = np.exp(-(nablaW / kappa)**2)
cE = np.exp(-(nablaE / kappa)**2)
cNE = np.exp(-(nablaNE / kappa)**2)
cSE = np.exp(-(nablaSE / kappa)**2)
cSW = np.exp(-(nablaSW / kappa)**2)
cNW = np.exp(-(nablaNW / kappa)**2)
elif option == 2:
cN = 1 / (1 + (nablaN / kappa)**2)
cS = 1 / (1 + (nablaS / kappa)**2)
cW = 1 / (1 + (nablaW / kappa)**2)
cE = 1 / (1 + (nablaE / kappa)**2)
cNE = 1 / (1 + (nablaNE / kappa)**2)
cSE = 1 / (1 + (nablaSE / kappa)**2)
cSW = 1 / (1 + (nablaSW / kappa)**2)
cNW = 1 / (1 + (nablaNW / kappa)**2)
#Discrete PDE solution
diff_im = diff_im + delta_t * (
(1 / (dy **2)) * cN * nablaN + (1 / (dy ** 2)) * cS * nablaS +
(1 / (dx ** 2)) * cW * nablaW + (1 / (dx ** 2)) * cE * nablaE +
(1 / (dd ** 2)) * cNE * nablaNE + (1 / (dd ** 2)) * cSE * nablaSE +
(1 / (dd ** 2)) * cSW * nablaSW + (1 / (dd ** 2)) * cNW * nablaNW)
return diff_im | Python |
3D | Aswendt-Lab/AIDAmri | bin/4.1_T2mapPreProcessing/MICO.py | .py | 2,666 | 113 | """
@Article{li2014multiplicative,
author = {Li, Chunming and Gore, John C and Davatzikos, Christos},
title = {Multiplicative intrinsic component optimization (MICO) for MRI bias field estimation and tissue segmentation},
journal = {Magnetic resonance imaging},
year = {2014},
volume = {32},
number = {7},
pages = {913--923},
publisher = {Elsevier},
}
Created on 10/08/2017
@author: Niklas Pallast
Neuroimaging & Neuroengineering
Department of Neurology
University Hospital Cologne
"""
import numpy as np
import sys
def runMICO(Img,q,W,M,C,b,Bas,GGT,ImgG, Iter, iterCM):
D = np.zeros(M.shape)
for n in range(Iter):
C = updateC(Img, W, b, M)
for k in range (iterCM):
N_class = M.shape[2]
e = np.zeros(M.shape)
for kk in range(N_class):
D[:,:, kk] = (Img - C[kk] * b)** 2
M = updateM(D, q)
b_out = updateB(Img, q, C, M, Bas, GGT, ImgG)
M_out = M
C_out = C
return M_out, b_out, C_out
def updateB(Img, q, C, M, Bas,GGT,ImgG):
PC2 = np.zeros(Img.shape)
PC = PC2
N_class = M.shape[2]
for kk in range(N_class):
PC2 = PC2 + C[kk] ** 2 * M[:,:, kk]** q
PC = PC + C[kk] * M[:,:, kk]** q
N_bas = Bas.shape[2]
V = np.zeros(N_bas)
A = np.zeros([N_bas,N_bas])
for ii in range(N_bas):
ImgG_PC = ImgG[:,:,ii] * PC # Mask in ImgG
V[ii] = np.sum(ImgG_PC) # inner product
for jj in range(N_bas):
B = GGT[:,:,ii, jj] * PC2 # Mask in GGT
A[ii, jj] = np.sum(B) # inner product
A[jj, ii] = A[ii, jj]
w = np.dot(np.linalg.inv(A) , V)
b = np.zeros(Img.shape)
for kk in range (N_bas):
b = b + np.dot(w[kk] , Bas[:,:, kk])
return b
def updateC(Img, W,b, M):
N_class=M.shape[2]
C_new = np.zeros(N_class)
for nn in range (N_class):
N=b*Img*M[:,:,nn]
D=(b**2) *M[:,:,nn]
sN = np.sum(N*W) # inner product
sD = np.sum(D*W) # inner product
C_new[nn]=sN/(sD+(sD==0))
return C_new
def updateM(e, q):
M = np.zeros(e.shape)
N_class= e.shape[2]
if q >1:
epsilon=0.000000000001
e=e+epsilon # avoid division by zero
p = 1/(q-1)
f = 1/(e**p)
f_sum = np.sum(f,2)
for kk in range(N_class):
M[:,:,kk] = f[:,:,kk]/f_sum
elif q==1:
e_min = np.amin(e,2)
N_min = np.argmin(e,2)
for kk in range (N_class):
tempComp = (N_min == kk)
M[:,:,kk] = tempComp
else:
sys.exit('Error: MICO: wrong fuzzifizer')
return M | Python |
3D | Aswendt-Lab/AIDAmri | bin/4.1_T2mapPreProcessing/applyMICO.py | .py | 5,649 | 216 | """
@Article{li2014multiplicative,
author = {Li, Chunming and Gore, John C and Davatzikos, Christos},
title = {Multiplicative intrinsic component optimization (MICO) for MRI bias field estimation and tissue segmentation},
journal = {Magnetic resonance imaging},
year = {2014},
volume = {32},
number = {7},
pages = {913--923},
publisher = {Elsevier},
}
Created on 10/08/2017
@author: Niklas Pallast
Neuroimaging & Neuroengineering
Department of Neurology
University Hospital Cologne
"""
import numpy as np
import nibabel as nii
import sys,os
import MICO
import progressbar
import cv2
from tqdm import tqdm
def run_MICO(IMGdata,outputPath):
data = nii.load(IMGdata)
v = 8
vol = data.get_fdata()
biasCorrectedVol = np.zeros(vol.shape[0:3])
ImgMe = np.mean(vol)
if ImgMe > 10000:
nCvalue = 1000
elif ImgMe > 1000:
nCvalue = 10
else:
nCvalue = 1
progressbar = tqdm(total=vol.shape[2], desc='Biasfieldcorrection')
for idx in range(vol.shape[2]):
Img = vol[:,:,idx] / nCvalue
kernel =np.ones((5,5),np.uint8)
erosion = cv2.erode(Img,kernel,iterations = 1)
iterNum = 100
N_region = 1
q = 1
thres = 100
A = 1
Img_original = Img
nrow = Img.shape[0]
ncol = Img.shape[1]
n = nrow * ncol
ROIt = Img > thres
ROI = np.zeros([nrow,ncol])
ROI[ROIt] = 1
Bas = getBasisOrder3(nrow, ncol)
N_bas = Bas.shape[2]
ImgG = np.zeros([nrow,ncol,10])
GGT = np.zeros([nrow, ncol, 10,10])
for ii in range(N_bas):
ImgG[:,:,ii] = Img * Bas[:,:, ii]*ROI
for jj in range(N_bas):
GGT[:,:,ii, jj] = Bas[:,:, ii]*Bas[:,:, jj]*ROI
GGT[:,:,jj, ii] = GGT[:,:,ii, jj]
energy_MICO = np.zeros([3, iterNum])
b = np.ones([nrow,ncol])
for ini_num in range(1):
C = np.random.rand(3, 1)
C = C * A
M = np.random.rand(nrow, ncol, 3)
a = np.sum(M, 2)
for k in range(N_region):
M[:,:, k]=M[:,:, k]/ a
e_max = np.amax(M,2)
N_max = np.argmax(M,2)
M_old = M
chg = 10000
energy_MICO[ini_num, 0] = get_energy(Img, b, C, M, ROI, q)
for n in range(1,iterNum):
M, b, C = MICO.runMICO(Img, q, ROI, M, C, b, Bas, GGT, ImgG, 1, 1)
energy_MICO[ini_num, n] = get_energy(Img, b, C, M, ROI, q)
if np.mod(n, 1) == 0:
PC = np.zeros([nrow,ncol])
for k in range(N_region):
PC = PC + C[k] * M[:,:, k]
img_bc = Img /b # bias field corrected image
smV = img_bc < 0
img_bc[smV] = 0
smV = img_bc > 5000
img_bc[smV] = 0
M, C = sortMemC(M, C)
seg = np.zeros([nrow,ncol])
for k in range(N_region):
seg = seg + k * M[:,:, k] # label the k-th region
biasCorrectedVol[:, :, idx] = img_bc
progressbar.update(1)
progressbar.close()
unscaledNiiData = nii.Nifti1Image(biasCorrectedVol, data.affine)
hdrOut = unscaledNiiData.header
hdrOut.set_xyzt_units('mm')
outputData = os.path.join(outputPath,os.path.basename(IMGdata).split('.')[0]+'Mico.nii.gz')
nii.save(unscaledNiiData,outputData)
return outputData
def sortMemC(M, C):
C_out =np.sort(C)
IDX= np.argsort(C)
if len(M.shape) == 4:
M_out = np.zeros([M.shape[0], M.shape[1], M.shape[2], len(IDX)])
for k in range(np.size(C)):
M_out[:,:,:,k] = M[:,:,:,IDX[k]]
elif len(M.shape) == 3:
M_out = np.zeros([M.shape[0], M.shape[1], len(IDX)])
for k in range(np.size(C)):
M_out[:,:,k] = M[:,:,IDX[k]]
else:
sys.exit('Error: sortMemC: wrong dimension of the membership function')
return M_out, C_out
def get_energy(Img,b,C,M,ROI,q):
N = M.shape[2]
energy = 0
for k in range(N):
C_k = C[k] * np.ones([Img.shape[0],Img.shape[1]])
energy = energy + np.sum(np.sum((Img * ROI - b * C_k * ROI) ** 2 * M[:, :, k] ** q))
return energy
def getBasisOrder3(Height,Wide):
x = np.zeros([Height,Wide])
y = np.zeros([Height,Wide])
for i in range(Height):
x[i,:] = np.linspace(-1,1,Wide)
for i in range(Wide):
y[:,i] = np.linspace(-1,1,Height)
bais = np.zeros([Height,Wide,10])
bais[:,:,0] = 1
bais[:,:,1] = x
bais[:,:,2] = (3*x*x - 1)/2
bais[:,:,3] = (5*x*x*x - 3*x)/2
bais[:,:,4] = y
bais[:,:,5] = x*y
bais[:,:,6] = y*(3*x*x -1)/2
bais[:,:,7] = (3*y*y -1)/2
bais[:,:,8] = (3*y*y -1)*x/2
bais[:,:,9] = (5*y*y*y -3*y)/2
B = bais
for kk in range(10):
A=bais[:,:,kk]**2
r = np.sqrt(sum(sum(A)))
B[:,:,kk]=bais[:,:,kk]/r
return B
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='Bias Correction')
requiredNamed = parser.add_argument_group('required named arguments')
requiredNamed.add_argument('-i','--input', help='Path to input file',required=True)
args = parser.parse_args()
if args.input is not None and args.input is not None:
input = args.input
if not os.path.exists(input):
sys.exit("Error: '%s' is not an existing directory of file %s is not in directory." % (input, args.file,))
result = run_MICO(input)
| Python |
3D | Aswendt-Lab/AIDAmri | bin/2.1_T2PreProcessing/MICO.py | .py | 3,015 | 123 | """
@Article{li2014multiplicative,
author = {Li, Chunming and Gore, John C and Davatzikos, Christos},
title = {Multiplicative intrinsic component optimization (MICO) for MRI bias field estimation and tissue segmentation},
journal = {Magnetic resonance imaging},
year = {2014},
volume = {32},
number = {7},
pages = {913--923},
publisher = {Elsevier},
}
Created on 10/08/2017
@author: Niklas Pallast
Neuroimaging & Neuroengineering
Department of Neurology
University Hospital Cologne
"""
import numpy as np
import nibabel as nii
import sys, os
def runMICO(Img, q, W, M, C, b, Bas, GGT, ImgG, Iter, iterCM):
D = np.zeros(M.shape)
for n in range(Iter):
C = updateC(Img, W, b, M)
for k in range (iterCM):
N_class = M.shape[2]
e = np.zeros(M.shape)
for kk in range(N_class):
D[:,:, kk] = (Img - C[kk] * b)** 2
M = updateM(D, q)
b_out = updateB(Img, q, C, M, Bas, GGT, ImgG)
M_out = M
C_out = C
return M_out, b_out, C_out
def updateB(Img, q, C, M, Bas, GGT, ImgG):
PC2 = np.zeros(Img.shape)
PC = PC2
N_class = M.shape[2]
for kk in range(N_class):
PC2 = PC2 + C[kk] ** 2 * M[:,:, kk]** q
PC = PC + C[kk] * M[:,:, kk]** q
N_bas = Bas.shape[2]
V = np.zeros(N_bas)
A = np.zeros([N_bas,N_bas])
for ii in range(N_bas):
ImgG_PC = ImgG[:,:,ii] * PC # Mask in ImgG
V[ii] = np.sum(ImgG_PC) # inner product
for jj in range(N_bas):
B = GGT[:,:,ii, jj] * PC2 # Mask in GGT
A[ii, jj] = np.sum(B) # inner product
A[jj, ii] = A[ii, jj]
#clearvPC1;
#clear PC2;
#clear B;
#clear ImgG_PC;
try:
# numerical stable: solves Ax = V
w = np.linalg.solve(A, V)
except np.linalg.LinAlgError:
# Fallback if A is rank deficient / singular:
print("Warning: A is singular, uses pseudoinverse in updateB")
w = np.dot(np.linalg.pinv(A), V)
b = np.zeros(Img.shape)
for kk in range (N_bas):
b = b + np.dot(w[kk] , Bas[:,:, kk])
return b
def updateC(Img, W,b, M):
N_class=M.shape[2]
C_new = np.zeros(N_class)
for nn in range (N_class):
N=b*Img*M[:,:,nn]
D=(b**2) *M[:,:,nn]
sN = np.sum(N*W) # inner product
sD = np.sum(D*W) # inner product
C_new[nn]=sN/(sD+(sD==0))
return C_new
def updateM(e, q):
M = np.zeros(e.shape)
N_class= e.shape[2]
if q >1:
epsilon=0.000000000001
e=e+epsilon # avoid division by zero
p = 1/(q-1)
f = 1/(e**p)
f_sum = np.sum(f,2)
for kk in range(N_class):
M[:,:,kk] = f[:,:,kk]/f_sum
elif q==1:
e_min = np.amin(e,2)
N_min = np.argmin(e,2)
for kk in range (N_class):
tempComp = (N_min == kk)
M[:,:,kk] = tempComp
else:
sys.exit('Error: MICO: wrong fuzzifizer')
return M | Python |
3D | Aswendt-Lab/AIDAmri | bin/2.1_T2PreProcessing/t2_value_extraction.py | .py | 4,809 | 111 | import nibabel as nii
import numpy as np
import argparse
import os
import glob
import csv
import sys # Added import statement for sys module
def getOutfile(atlas_type, img_file, suffix):
imgName = os.path.basename(img_file)
t2map = str.split(imgName, '.')[-3]
acronym_name = os.path.basename(atlas_type).split('.')[0]
outFile = os.path.join(os.path.dirname(img_file),"t2_values_extraction",f"{t2map}_T2values_{acronym_name}_{suffix}.csv")
return outFile
def extractT2MapdataMean(img, rois, outfile, txt_file):
slices = np.unique(np.where(rois > 0)[2])
regions = np.delete(np.unique(rois), 0)
indices = None
if txt_file is not None:
ref_lines = open(txt_file).readlines()
indices = {int(line.split('\t')[0]): line.split('\t')[1].strip() for line in ref_lines}
with open(outfile, 'w', newline='') as csvfile:
csv_writer = csv.writer(csvfile)
csv_writer.writerow(["Slice", "ARA IDs", "Names", "T2 Values", "Region Sizes"])
for s in slices:
for r in regions:
region_voxels = np.where((rois[:, :, s] == r) & (rois[:, :, s] > 0))
if len(region_voxels[0]) == 0:
continue
mean_value = np.mean(img[region_voxels])
region_size = len(region_voxels[0])
acro = indices.get(r, "") # Using dict.get() to avoid KeyError
csv_writer.writerow([s, r, acro, "%.2f" % mean_value, "%.2f" % region_size])
def extractT2MapdataPerRegion(img, rois, outfile, txt_file):
regions = np.delete(np.unique(rois), 0)
indices = None
if txt_file is not None:
ref_lines = open(txt_file).readlines()
indices = {int(line.split('\t')[0]): line.split('\t')[1].strip() for line in ref_lines}
with open(outfile, 'w', newline='') as csvfile:
csv_writer = csv.writer(csvfile)
csv_writer.writerow(["ARA IDs", "Names", "T2 Values", "Region Sizes"])
for r in regions:
region_voxels = np.where((rois == r) & (rois > 0))
if len(region_voxels[0]) == 0:
continue
mean_value = np.mean(img[region_voxels])
region_size = len(region_voxels[0])
acro = indices.get(r, "") # Using dict.get() to avoid KeyError
csv_writer.writerow([r, acro, "%.2f" % mean_value, "%.2f" % region_size])
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Extracts the T2w values for every atlas region')
requiredNamed = parser.add_argument_group('Required named arguments')
requiredNamed.add_argument('-i', '--input', help='Input T2w file, should be a nifti file')
args = parser.parse_args()
acronyms_files = glob.glob(os.path.join(os.getcwd(), "*.txt"))
print(f"Extracting T2 values for: {args.input}")
print(f"Acronym files: {acronyms_files}")
# Checking if input file is provided
if args.input is None:
sys.exit("Error: No input file provided.")
image_file = args.input
if not os.path.exists(image_file):
sys.exit(f"Error: '{image_file}' is not an existing image nii-file.")
img_data = nii.load(image_file)
img = img_data.get_fdata() # Using get_fdata() for compatibility
parental_atlas = glob.glob(os.path.join(os.path.dirname(image_file), "*AnnoSplit_parental.nii*"))[0]
non_parental_atlas = glob.glob(os.path.join(os.path.dirname(image_file), "*AnnoSplit.nii*"))[0]
if not os.path.exists(os.path.join(os.path.dirname(image_file), "t2_values_extraction")):
os.mkdir(os.path.join(os.path.dirname(image_file), "t2_values_extraction"))
for acronyms in acronyms_files: # Corrected variable name to acronyms
try:
if "parentARA_LR" in acronyms:
atlas_type = "parental"
atlas = parental_atlas
else:
atlas_type = "non-parental"
atlas = non_parental_atlas
roi_data = nii.load(atlas)
rois = roi_data.get_fdata() # Using get_fdata() for compatibility
outFileMean = getOutfile(atlas_type, image_file, "Mean") # Fixed suffix to "Mean"
print(f"Outfile (Mean): {outFileMean}")
extractT2MapdataMean(img, rois, outFileMean, acronyms)
outFilePerRegion = getOutfile(atlas_type, image_file, "PerRegion") # Fixed suffix to "PerRegion"
print(f"Outfile (Per Region): {outFilePerRegion}")
extractT2MapdataPerRegion(img, rois, outFilePerRegion, acronyms)
except Exception as e:
print(f'Error while processing the T2 values: {str(e)}') # Improved error message
raise # Raising the exception to halt execution
print("Finished T2 map processing")
| Python |
3D | Aswendt-Lab/AIDAmri | bin/2.1_T2PreProcessing/preProcessing_T2.py | .py | 5,376 | 184 | """
Created on 10/08/2017
@author: Niklas Pallast
Neuroimaging & Neuroengineering
Department of Neurology
University Hospital Cologne
"""
import nipype.interfaces.fsl as fsl
import os,sys
import nibabel as nii
import numpy as np
import applyMICO
import subprocess
import shutil
def reset_orientation(input_file):
brkraw_dir = os.path.join(os.path.dirname(input_file), "brkraw")
if os.path.exists(brkraw_dir):
return
os.mkdir(brkraw_dir)
dst_path = os.path.join(brkraw_dir, os.path.basename(input_file))
shutil.copyfile(input_file, dst_path)
data = nii.load(input_file)
raw_img = data.dataobj.get_unscaled()
raw_nii = nii.Nifti1Image(raw_img, data.affine)
nii.save(raw_nii, input_file)
delete_orient_command = f"fslorient -deleteorient {input_file}"
subprocess.run(delete_orient_command, shell=True)
# Befehl zum Festlegen der radiologischen Orientierung
forceradiological_command = f"fslorient -forceradiological {input_file}"
subprocess.run(forceradiological_command, shell=True)
def applyBET(input_file,frac,radius,vertical_gradient):
"""Apply BET"""
# scale Nifti data by factor 10
data = nii.load(input_file)
imgTemp = data.get_fdata()
scale = np.eye(4)* 10
scale[3][3] = 1
# this has to be adapted in the case the output image is not RAS orientated - Siding from feet to nose
# AIDAmri expects the brkraw data to be anterior - posterior. If this is not the case this axis flip has to be adjusted
imgTemp = np.flip(imgTemp,2)# z-flip
#imgTemp = np.flip(imgTemp, 0)
#imgTemp = np.rot90(imgTemp, 2)
scaledNiiData = nii.Nifti1Image(imgTemp, data.affine * scale)
hdrIn = scaledNiiData.header
hdrIn.set_xyzt_units('mm')
scaledNiiData = nii.as_closest_canonical(scaledNiiData)
fslPath = os.path.join(os.path.dirname(input_file),'fslScaleTemp.nii.gz')
nii.save(scaledNiiData, fslPath)
# extract brain
output_file = os.path.join(os.path.dirname(input_file),os.path.basename(input_file).split('.')[0] + 'Bet.nii.gz')
myBet = fsl.BET(in_file=fslPath, out_file=output_file,frac=frac,radius=radius,
vertical_gradient=vertical_gradient,robust=True, mask = True)
myBet.run()
os.remove(fslPath)
# unscale result data by factor 10ˆ(-1)
dataOut = nii.load(output_file)
imgOut = dataOut.get_fdata()
scale = np.eye(4)/ 10
scale[3][3] = 1
unscaledNiiData = nii.Nifti1Image(imgOut, dataOut.affine * scale)
hdrOut = unscaledNiiData.header
hdrOut.set_xyzt_units('mm')
nii.save(unscaledNiiData, output_file)
return output_file
#%% Program
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='Preprocessing of T2 Data')
requiredNamed = parser.add_argument_group('Required named arguments')
requiredNamed.add_argument('-i','--input_file', help='path to input file',required=True)
parser.add_argument(
'-f',
'--frac',
help='Fractional intensity threshold - default=0.15 smaller values give larger brain outline estimates',
nargs='?',
type=float,
default=0.15,
)
parser.add_argument(
'-r',
'--radius',
help='Head radius (mm not voxels) - default=45',
nargs='?',
type=int,
default=45,
)
parser.add_argument(
'-g',
'--vertical_gradient',
help='Vertical gradient in fractional intensity threshold - default=0.0 positive values give larger brain outlines at bottom and smaller brain outlines at top',
nargs='?',
type=float,
default=0.0,
)
parser.add_argument(
'-b',
'--bias_skip',
help='Set value to 1 to skip bias field correction',
nargs='?',
type=float,
default=0.0,
)
args = parser.parse_args()
# set Parameters
input_file = None
if args.input_file is not None and args.input_file is not None:
input_file = args.input_file
if not os.path.exists(input_file):
sys.exit("Error: '%s' is not an existing directory or file %s is not in directory." % (input_file, args.file,))
frac = args.frac
radius = args.radius
vertical_gradient = args.vertical_gradient
bias_skip = args.bias_skip
print(f"Frac: {frac} Radius: {radius} Gradient {vertical_gradient}")
reset_orientation(input_file)
print("Orientation resetted to RAS")
#intensity correction using non parametric bias field correction algorithm
print("Starting Biasfieldcorrection:")
if bias_skip == 0:
try:
outputMICO = applyMICO.run_MICO(input_file,os.path.dirname(input_file))
print("Biasfieldcorrecttion was successful")
except Exception as e:
print(f'Fehler in der Biasfieldcorrecttion\nFehlermeldung: {str(e)}')
raise
else:
outputMICO = input_file
# brain extraction
print("Starting brain extraction")
try:
outputBET = applyBET(input_file=outputMICO,frac=frac,radius=radius,vertical_gradient=vertical_gradient)
print("Brain extraction was successful")
except Exception as e:
print(f'Error in brain extraction\nFehlermeldung: {str(e)}')
raise
print("Preprocessing completed")
| Python |
3D | Aswendt-Lab/AIDAmri | bin/2.1_T2PreProcessing/registration_T2.py | .py | 12,410 | 282 | """
Created on 10/08/2017
@author: Niklas Pallast
Neuroimaging & Neuroengineering
Department of Neurology
University Hospital Cologne
"""
import sys,os
import numpy as np
import nibabel as nii
import glob
import subprocess
import shlex
def BET_2_MPIreg(inputVolume, stroke_mask,brain_template, allenBrain_template,allenBrain_anno,split_anno,anno_rsfMRI,split_allenBrain_annorsfMRI,outfile,opt):
output = os.path.join(outfile, os.path.basename(inputVolume).split('.')[0] + '_TemplateAff.nii.gz')
outputCPPAff = os.path.join(outfile, os.path.basename(inputVolume).split('.')[0] + 'MatrixAff.txt')
command = f"reg_aladin -ref {inputVolume} -flo {brain_template} -res {output} -aff {outputCPPAff}"
command_args = shlex.split(command)
try:
result = subprocess.run(command_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,text=True)
print(f"Output of {command}:\n{result.stdout}")
except Exception as e:
print(f'Error while executing the command: {command_args}\Errorcode: {str(e)}')
raise
# Inverse registration
outputInc = os.path.join(outfile, os.path.basename(inputVolume).split('.')[0] + '_IncidenceData.nii.gz')
outputIncAff = os.path.join(outfile, os.path.basename(inputVolume).split('.')[0] + 'MatrixInv.txt')
command = f"reg_aladin -ref {allenBrain_template} -flo {inputVolume} -res {outputInc} -aff {outputIncAff}"
command_args = shlex.split(command)
try:
result = subprocess.run(command_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,text=True)
print(f"Output of {command}:\n{result.stdout}")
except Exception as e:
print(f'Error while executing the command: {command_args}\Errorcode: {str(e)}')
raise
# if region such as stroke_mask is defined
if len(stroke_mask) > 0:
outputIncStrokeMask = os.path.join(outfile, os.path.basename(outputInc).split('.')[0] + '_mask.nii.gz')
command = f"reg_resample -ref {allenBrain_template} -flo {stroke_mask} -trans {outputIncAff} -res {outputIncStrokeMask}"
command_args = shlex.split(command)
try:
result = subprocess.run(command_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,text=True)
print(f"Output of {command}:\n{result.stdout}")
except Exception as e:
print(f'Error while executing the command: {command_args}\Errorcode: {str(e)}')
raise
jac = 0.3
# minimum defomraiton field in mm
if opt == 1:
s = [1, 1, 2]
elif opt == 2: s = [2,2,2]
elif opt == 3: s = [3,3,3]
else: s = [5,5,5]
outputCPP = os.path.join(outfile, os.path.basename(inputVolume).split('.')[0] + 'MatrixBspline.nii')
# resample in-house developed template
output = os.path.join(outfile, os.path.basename(inputVolume).split('.')[0] + '_Template.nii.gz')
command = f"reg_f3d -ref {inputVolume} -flo {brain_template} -sx {s[0]} -sy {s[1]} -sz {s[2]} -jl {jac} -res {output} -cpp {outputCPP} -aff {outputCPPAff}"
command_args = shlex.split(command)
try:
result = subprocess.run(command_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,text=True)
print(f"Output of {command}:\n{result.stdout}")
except Exception as e:
print(f'Error while executing the command: {command_args}\Errorcode: {str(e)}')
raise
# resmaple Allen Brain Reference Template
outputAnno = os.path.join(outfile, os.path.basename(inputVolume).split('.')[0] + '_TemplateAllen.nii.gz')
command = f"reg_resample -ref {inputVolume} -flo {allenBrain_template} -cpp {outputCPP} -res {outputAnno}"
command_args = shlex.split(command)
try:
result = subprocess.run(command_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
print(f"Output of {command}:\n{result.stdout}")
except Exception as e:
print(f'Error while executing the command: {command_args}\Errorcode: {str(e)}')
raise
# resample parental annotations
outputAnnorsfMRI = os.path.join(outfile, os.path.basename(inputVolume).split('.')[0] + '_Anno_parental.nii.gz')
command = f"reg_resample -ref {inputVolume} -flo {anno_rsfMRI} -inter 0 -cpp {outputCPP} -res {outputAnnorsfMRI}"
command_args = shlex.split(command)
try:
result = subprocess.run(command_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,text=True)
print(f"Output of {command}:\n{result.stdout}")
except Exception as e:
print(f'Error while executing the command: {command_args}\Errorcode: {str(e)}')
raise
# resample parental split annotations
outputAnnorsfMRI_split = os.path.join(outfile, os.path.basename(inputVolume).split('.')[0] + '_AnnoSplit_parental.nii.gz')
command = f"reg_resample -ref {inputVolume} -flo {split_allenBrain_annorsfMRI} -inter 0 -cpp {outputCPP} -res {outputAnnorsfMRI_split}"
command_args = shlex.split(command)
try:
result = subprocess.run(command_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,text=True)
print(f"Output of {command}:\n{result.stdout}")
except Exception as e:
print(f'Error while executing the command: {command_args}\Errorcode: {str(e)}')
raise
# resample annotations
outputAnno = os.path.join(outfile, os.path.basename(inputVolume).split('.')[0] + '_Anno.nii.gz')
command = f"reg_resample -ref {inputVolume} -flo {allenBrain_anno} -inter 0 -cpp {outputCPP} -res {outputAnno}"
command_args = shlex.split(command)
try:
result = subprocess.run(command_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,text=True)
print(f"Output of {command}:\n{result.stdout}")
except Exception as e:
print(f'Error while executing the command: {command_args}\Errorcode: {str(e)}')
raise
# resample parental split annotations
outputAnnoSplit = os.path.join(outfile, os.path.basename(inputVolume).split('.')[0] + '_AnnoSplit.nii.gz')
command = f"reg_resample -ref {inputVolume} -flo {split_anno} -inter 0 -cpp {outputCPP} -res {outputAnnoSplit}"
command_args = shlex.split(command)
try:
result = subprocess.run(command_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,text=True)
print(f"Output of {command}:\n{result.stdout}")
except Exception as e:
print(f'Error while executing the command: {command_args}\Errorcode: {str(e)}')
raise
return outputAnno
def find_nearest(array,value):
idx = (np.abs(array-value)).argmin()
return array[idx]
def clearAnno(araAnno,realBrain_anno,outfile):
araData = nii.load(araAnno)
araVol = araData.get_data()
nullValues = araVol < 0.0
araVol[nullValues] = 0.0
araVol = np.memmap.round(araVol)
realData = nii.load(realBrain_anno)
realVal = realData.get_data()
realVal = realVal.tolist()
uniqueList = np.unique(realVal)
for i in np.nditer(araVol,op_flags=['readwrite']):
i[...] = find_nearest(uniqueList, i)
scaledNiiData = nii.Nifti1Image(araVol, araData.affine)
hdrIn = scaledNiiData.header
hdrIn.set_xyzt_units('mm')
output_file = os.path.join(outfile, 'reconstructedAnno.nii.gz')
nii.save(scaledNiiData, output_file)
return outfile
def find_mask(inputVolume):
directory = os.path.dirname(inputVolume)
return glob.glob(os.path.join(directory, '*Stroke_mask.nii.gz'))
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='Registration from ABA to T2 Data')
requiredNamed = parser.add_argument_group('required named arguments')
requiredNamed.add_argument('-i', '--inputVolume', help='Path to input file', required=True)
parser.add_argument('-s', '--deformationStrength', help='integer: 1 - very strong deformation, 2 - strong deformation, 3 - medium deformation, 4 - weak deformation ', nargs='?', type=int,
default=3)
parser.add_argument('-g', '--template', help='File: Templates for Allen Brain', nargs='?', type=str,
default=os.path.abspath(os.path.join(os.getcwd(), os.pardir,os.pardir))+'/lib/NP_template_sc0.nii.gz')
parser.add_argument('-t','--allenBrain_template', help='File: Templates of Allen Brain', nargs='?', type=str,
default=os.path.abspath(
os.path.join(os.getcwd(), os.pardir, os.pardir)) + '/lib/average_template_50.nii.gz')
parser.add_argument('-a','--allenBrain_anno', help='File: Annotations of Allen Brain', nargs='?', type=str,
default=os.path.abspath(
os.path.join(os.getcwd(), os.pardir, os.pardir)) + '/lib/annotation_50CHANGEDanno.nii.gz')
parser.add_argument('-sa', '--splitAnno', help='Split annotations atlas', nargs='?', type=str,
default=os.path.abspath(os.path.join(os.getcwd(), os.pardir,os.pardir))+'/lib/ARA_annotationR+2000.nii.gz')
parser.add_argument('-f', '--anno_rsfMRI', help='Parental Annotations atlas', nargs='?', type=str,
default=os.path.abspath(os.path.join(os.getcwd(), os.pardir,os.pardir))+'/lib/annoVolume.nii.gz')
parser.add_argument('-sf', '--split_annorsfMRI', help='File: Annotations of split Allen Brain', nargs='?',
type=str,
default=os.path.abspath(
os.path.join(os.getcwd(), os.pardir, os.pardir)) + '/lib/annoVolume+2000_rsfMRI.nii.gz')
args = parser.parse_args()
inputVolume = None
allenBrain_template = None
allenBrain_anno = None
split_anno = None
brain_template = None
split_allenBrain_annorsfMRI = None
anno_rsfMRI = None
deformationStrength = args.deformationStrength
if args.inputVolume is not None:
inputVolume = args.inputVolume
if not os.path.exists(inputVolume):
sys.exit("Error: '%s' is not an existing directory." % (inputVolume,))
if args.allenBrain_template is not None:
allenBrain_template = args.allenBrain_template
if not os.path.exists(allenBrain_template):
sys.exit("Error: '%s' is not an existing directory." % (allenBrain_template,))
if args.allenBrain_anno is not None:
allenBrain_anno = args.allenBrain_anno
if not os.path.exists(allenBrain_anno):
sys.exit("Error: '%s' is not an existing directory." % (allenBrain_anno,))
if args.splitAnno is not None:
split_anno = args.splitAnno
if not os.path.exists(split_anno):
sys.exit("Error: '%s' is not an existing directory." % (split_anno,))
if args.split_annorsfMRI is not None:
split_allenBrain_annorsfMRI = args.split_annorsfMRI
if not os.path.exists(split_allenBrain_annorsfMRI):
sys.exit("Error: '%s' is not an existing directory." % (split_allenBrain_annorsfMRI,))
if args.anno_rsfMRI is not None:
anno_rsfMRI = args.anno_rsfMRI
if not os.path.exists(anno_rsfMRI):
sys.exit("Error: '%s' is not an existing directory." % (anno_rsfMRI,))
if args.template is not None:
brain_template = args.template
if not os.path.exists(brain_template):
sys.exit("Error: '%s' is not an existing directory." % (brain_template,))
outfile = os.path.join(os.path.dirname(inputVolume))
if not os.path.exists(outfile):
os.makedirs(outfile)
stroke_mask = find_mask(inputVolume)
if len(stroke_mask) is 0:
stroke_mask = []
print("Notice: '%s' has no defined reference (stroke) mask - will proceed without." % (inputVolume,))
else:
stroke_mask = stroke_mask[0]
transInput = BET_2_MPIreg(inputVolume, stroke_mask,brain_template,allenBrain_template,allenBrain_anno,split_anno,anno_rsfMRI,split_allenBrain_annorsfMRI,outfile,deformationStrength)
current_dir = os.path.dirname(inputVolume)
search_string = os.path.join(current_dir, "*T2w.nii.gz")
currentFile = glob.glob(search_string)
search_string = os.path.join(current_dir, "*.nii*")
created_imgs = glob.glob(search_string, recursive=True)
os.chdir(os.path.dirname(os.getcwd()))
for idx, img in enumerate(created_imgs):
if img == None:
continue
#os.system('python adjust_orientation.py -i '+ str(img) + ' -t ' + currentFile[0])
print("Registration completed")
| Python |
3D | Aswendt-Lab/AIDAmri | bin/2.1_T2PreProcessing/applyMICO.py | .py | 7,170 | 255 | """
@Article{li2014multiplicative,
author = {Li, Chunming and Gore, John C and Davatzikos, Christos},
title = {Multiplicative intrinsic component optimization (MICO) for MRI bias field estimation and tissue segmentation},
journal = {Magnetic resonance imaging},
year = {2014},
volume = {32},
number = {7},
pages = {913--923},
publisher = {Elsevier},
}
Created on 10/08/2017
@author: Niklas Pallast
Neuroimaging & Neuroengineering
Department of Neurology
University Hospital Cologne
"""
import numpy as np
import nibabel as nii
import sys,os
import MICO
import progressbar
from tqdm import tqdm
def run_MICO(IMGdata,outputPath):
data = nii.load(IMGdata)
# get UNSCALED img data
vol = data.get_fdata()
biasCorrectedVol = np.zeros(vol.shape[0:3])
#1) Scaling factor depending on image intensity
ImgMe = np.mean(vol)
if ImgMe > 10000:
nCvalue = 1000
elif ImgMe > 1000:
nCvalue = 10
else:
nCvalue = 1
#2) Global threshold over whole volume
#Scale the volume as it will later be used for the slices.
if vol.ndim == 4:
vol_norm = vol[:, :, :, 0] / nCvalue
else:
vol_norm = vol / nCvalue
nz_all = vol_norm[vol_norm > 0] # all voxels above 0 in volume
if nz_all.size > 0:
#e.g. global median as threshold (50. Perzentil)
global_thr = np.percentile(nz_all, 50)
print(f"Global ROI-threshold of volumen: {global_thr:.3f}")
else:
global_thr = 0.0
print("Warning: No voxels above zero in volume, global_thr = 0")
#Debug
# --- Debug: Test how large the ROI would be for some slices ---
print("\nROI-Check for example slices:")
for idx in [0, vol.shape[2] // 2, vol.shape[2] - 1]: # first, middle, last slice
Img_test = vol_norm[:, :, idx]
ROIt_test = Img_test > global_thr
print(f"Slice {idx}: ROI voxels = {ROIt_test.sum()} of {Img_test.size}")
print("------------------------------------------------------------\n")
#--- Ende Debug ---
progressbar = tqdm(total=vol.shape[2], desc='Biasfieldcorrection')
#Debug output
print(f"Amount of non-zero voxels in total volumen: {nz_all.size}")
print(
f"Min/Median/Max of nz_all voxels: {nz_all.min():.3f} / {np.percentile(nz_all, 50):.3f} / {nz_all.max():.3f}")
print(f"Global ROI-threshold of volume: {global_thr:.3f}")
#--- Ende Debug output --
# 3) loop over slices, ROI with global threshold
for idx in range(vol.shape[2]):
if np.size(vol.shape) == 4:
Img = vol[:, :, idx, 0] / nCvalue
else:
Img = vol[:, :, idx] / nCvalue
iterNum = 50
N_region = 1
q = 1
A = 1
Img_original = Img
nrow = Img.shape[0]
ncol = Img.shape[1]
n = nrow * ncol
#Global ROI thresholding
if global_thr > 0:
ROIt = Img > global_thr
else:
# Fallback: simple non-zero thresholding
ROIt = Img > 0
ROI = np.zeros((nrow, ncol))
ROI[ROIt] = 1
Bas = getBasisOrder3(nrow, ncol)
N_bas = Bas.shape[2]
ImgG = np.zeros([nrow,ncol,10])
GGT = np.zeros([nrow, ncol, 10,10])
for ii in range(N_bas):
ImgG[:,:,ii] = Img * Bas[:,:, ii]*ROI
for jj in range(N_bas):
GGT[:,:,ii, jj] = Bas[:,:, ii]*Bas[:,:, jj]*ROI
GGT[:,:,jj, ii] = GGT[:,:,ii, jj]
energy_MICO = np.zeros([3, iterNum])
b = np.ones([nrow,ncol])
for ini_num in range(1):
C = np.random.rand(3, 1)
C = C * A
M = np.random.rand(nrow, ncol, 3)
a = np.sum(M, 2)
for k in range(N_region):
M[:,:, k]=M[:,:, k]/ a
energy_MICO[ini_num, 0] = get_energy(Img, b, C, M, ROI, q)
for n in range(1,iterNum):
M, b, C = MICO.runMICO(Img, q, ROI, M, C, b, Bas, GGT, ImgG, 1, 1)
energy_MICO[ini_num, n] = get_energy(Img, b, C, M, ROI, q)
if np.mod(n, 1) == 0:
PC = np.zeros([nrow,ncol])
for k in range(N_region):
PC = PC + C[k] * M[:,:, k]
img_bc = Img /b # bias field corrected image
smV = img_bc < 0
img_bc[smV] = 0
# smV = img_bc > 1200
# img_bc[smV] = 0
M, C = sortMemC(M, C)
seg = np.zeros([nrow,ncol])
for k in range(N_region):
seg = seg + k * M[:,:, k] # label the k-th region
biasCorrectedVol[:,:,idx]=img_bc
progressbar.update(1)
progressbar.close()
unscaledNiiData = nii.Nifti1Image(biasCorrectedVol, data.affine)
hdrOut = unscaledNiiData.header
hdrOut.set_xyzt_units('mm')
outputData = os.path.join(outputPath,os.path.basename(IMGdata).split('.')[0]+'Bias.nii.gz')
nii.save(unscaledNiiData,outputData)
return outputData
def sortMemC(M, C):
C_out =np.sort(C)
IDX= np.argsort(C)
if len(M.shape) == 4:
M_out = np.zeros([M.shape[0], M.shape[1], M.shape[2], len(IDX)])
for k in range(np.size(C)):
M_out[:,:,:,k] = M[:,:,:,IDX[k]]
elif len(M.shape) == 3:
M_out = np.zeros([M.shape[0], M.shape[1], len(IDX)])
for k in range(np.size(C)):
M_out[:,:,k] = M[:,:,IDX[k]]
else:
sys.exit('Error: sortMemC: wrong dimension of the membership function')
return M_out, C_out
def get_energy(Img,b,C,M,ROI,q):
N = M.shape[2]
energy = 0
for k in range(N):
C_k = C[k] * np.ones([Img.shape[0],Img.shape[1]])
energy = energy + np.sum(np.sum((Img * ROI - b * C_k * ROI) ** 2 * M[:, :, k] ** q))
return energy
def getBasisOrder3(Height,Wide):
x = np.zeros([Height,Wide])
y = np.zeros([Height,Wide])
for i in range(Height):
x[i,:] = np.linspace(-1,1,Wide)
for i in range(Wide):
y[:,i] = np.linspace(-1,1,Height)
bais = np.zeros([Height,Wide,10])
bais[:,:,0] = 1
bais[:,:,1] = x
bais[:,:,2] = (3*x*x - 1)/2
bais[:,:,3] = (5*x*x*x - 3*x)/2
bais[:,:,4] = y
bais[:,:,5] = x*y
bais[:,:,6] = y*(3*x*x -1)/2
bais[:,:,7] = (3*y*y -1)/2
bais[:,:,8] = (3*y*y -1)*x/2
bais[:,:,9] = (5*y*y*y -3*y)/2
B = bais
for kk in range(10):
A=bais[:,:,kk]**2
r = np.sqrt(sum(sum(A)))
B[:,:,kk]=bais[:,:,kk]/r
return B
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='Bias Correction')
requiredNamed = parser.add_argument_group('required named arguments')
requiredNamed.add_argument('-i','--input', help='file name of data',required=True)
args = parser.parse_args()
if args.input is not None and args.input is not None:
input = args.input
if not os.path.exists(input):
sys.exit("Error: '%s' is not an existing directory or file %s is not in directory." % (input, args.file,))
result = run_MICO(input)
| Python |
3D | Aswendt-Lab/AIDAmri | bin/3.1_T2Processing/getIncidenceSize_par.py | .py | 8,846 | 255 |
"""
Created on 10/08/2017
@author: Niklas Pallast
Neuroimaging & Neuroengineering
Department of Neurology
University Hospital Cologne
"""
import os,sys
import nibabel as nii
import glob
import numpy as np
import scipy.io as sc
import scipy.ndimage as ndimage
def find_nearest(array,value):
idx = (np.abs(array-value)).argmin()
return array[idx]
def thresholdingSlc(volumeMR,maskImg,thres):
volumeMR=ndimage.gaussian_filter(volumeMR, sigma=(1.2, 1.2, 1))
volumeMR = volumeMR * maskImg[:, :, :, 0]
scaledNiiData = nii.Nifti1Image(volumeMR, np.eye(4))
hdrIn = scaledNiiData.header
hdrIn.set_xyzt_units('mm')
output_file = os.path.join(outfile,'maskedVolume.nii.gz')
nii.save(scaledNiiData, output_file)
for i in range(len(volumeMR[1,1,:])-1):
voSlc = volumeMR[:,:,i]
uvalues = voSlc >= thres
voSlc[uvalues] = 0
zvalues = voSlc != 0
thresF = np.mean(voSlc[zvalues]) + 1.5*np.std(voSlc[zvalues])
bvalues = voSlc < thresF
voSlc[bvalues] = 0
voSlc[bvalues] = 0
fvalues = voSlc >= thresF
voSlc[fvalues] = 1
volumeMR[:, :, i] = voSlc
fvalues = volumeMR == 1
return volumeMR,fvalues
def thresholding(volumeMR,maskImg,thres,k):
volumeMR=ndimage.gaussian_filter(volumeMR, sigma=(1.3, 1.3, 1))
zvalues = volumeMR != 0
if k==1:
volumeMR = volumeMR * maskImg[:, :, :]#, 0]
if thres == 0:
thres = np.mean(volumeMR[zvalues]) + 2*np.std(volumeMR[zvalues])
bvalues = volumeMR < thres
volumeMR[bvalues] = 0
fvalues = volumeMR >= thres
volumeMR[fvalues] = 1
return volumeMR
def incidenceMap(path_listInc,path_listMR ,path_listAnno, araDataTemplate,incidenceMask ,thres, outfile,labels):
araDataTemplate = nii.load(araDataTemplate)
realAraImg = araDataTemplate.get_data()
coloredAraLabels = np.zeros([np.size(realAraImg, 0), np.size(realAraImg, 1), np.size(realAraImg, 2)])
matFile = sc.loadmat(labels)
labMat = matFile['ABLAbelsIDsParental']
maskData = nii.load(incidenceMask)
maskImg = maskData.get_data()
oneValues = maskImg > 0.0
maskImg[oneValues] = 1.0
fileIndex = 0
# get warped annos of the current mr
dataAnno = nii.load(path_listAnno[fileIndex])
volumeAnno = np.round(dataAnno.get_data())
dataMR = nii.load(path_listInc[fileIndex])
volumeMR = dataMR.get_data()
strokeVolume = thresholding(volumeMR, maskImg, thres,1)
fValues_Anno = volumeAnno*strokeVolume
scaledNiiData = nii.Nifti1Image(fValues_Anno, dataAnno.affine)
hdrIn = scaledNiiData.header
hdrIn.set_xyzt_units('mm')
output_file = os.path.join(outfile,os.path.basename(path_listMR[fileIndex]).split('.')[0]+ 'Anno_parmask.nii.gz')
nii.save(scaledNiiData, output_file)
ref_Image = fValues_Anno
fValues_Anno = np.unique(fValues_Anno)
nullValues = np.argwhere(fValues_Anno<=0.0)
fValues_Anno = np.delete(fValues_Anno, nullValues)
regionAffectPercent = np.zeros(np.size(fValues_Anno))
for i in range(np.size(fValues_Anno)):
regionAffectPercent[i] = (np.sum(ref_Image == fValues_Anno[i]) / np.sum(volumeAnno == fValues_Anno[i])) * 200
regionAffectPercent[regionAffectPercent > 100] = 100
labCounterList = np.isin(labMat[:, 0], fValues_Anno)
labMat = labMat[labCounterList,0]
labCounterColor = np.isin(realAraImg, fValues_Anno)
coloredAraLabels[labCounterColor] = realAraImg[labCounterColor]
xdim = np.size(coloredAraLabels, 0)
coloredAraLabels[int(xdim / 2):xdim, :, :] = coloredAraLabels[int(xdim / 2):xdim, :, :] + 2000
coloredAraLabels[coloredAraLabels == 2000] = 0
scaledNiiData = nii.Nifti1Image(coloredAraLabels, araDataTemplate.affine)
hdrIn = scaledNiiData.header
hdrIn.set_xyzt_units('mm')
output_file = os.path.join(outfile, 'affectedRegions_Parental.nii.gz')
nii.save(scaledNiiData, output_file)
# Stroke volume calculation
betMask = nii.load(os.path.join(outfile,os.path.basename(path_listInc[fileIndex]).split('.')[0]+'_mask.nii.gz'))
betMaskImg = betMask.get_data()
oneValues = betMaskImg > 0.0
betMaskImg[oneValues] = 1.0
strokeVolumeInCubicMM = np.sum(maskImg * (dataMR.affine[0, 0] * dataMR.affine[1, 1] * dataMR.affine[2, 2]))
brainVolumeInCubicMM = np.sum(betMaskImg * (dataMR.affine[0, 0] * dataMR.affine[1, 1] * dataMR.affine[2, 2]))
lines =open(os.path.abspath(os.path.join(os.getcwd(), os.pardir,os.pardir))+ '/lib/annoVolume.nii.txt').readlines()
o=open(os.path.join(outfile, 'affectedRegions_Parental.txt'), 'w')
o.write("Stroke: %0.2f %% - Stroke Volume: %0.2f mm^3\n" % (((strokeVolumeInCubicMM/brainVolumeInCubicMM)*100),strokeVolumeInCubicMM,))
matIndex = 0
labelNamesAffected = ["" for x in range(np.size(fValues_Anno))]
labelNames = ["" for x in range(np.size(lines))]
for i in range(len(lines)):
labelNames[i] = lines[i].split('\t')[1]
if np.isin(int(lines[i].split('\t')[0]),labMat):
o.write(lines[i][:-1] + "\t %0.2f %%\n" % regionAffectPercent[matIndex])
labelNamesAffected[matIndex] = lines[i].split('\t')[1]
matIndex = matIndex + 1
#o.write(str(int(lines[i].split(' ')[0]) + 2000) + ' R_' + lines[i].split(' ')[1])
o.close()
rows = np.shape(labMat)[0]
labMat = np.stack((labMat, regionAffectPercent[0:rows]))
matFile['ABLAbelsIDsParental'] = labMat
matFile['ABANamesPar'] = labelNamesAffected
matFile['ABAlabels'] = labelNames
matFile['volumePer'] = (strokeVolumeInCubicMM / brainVolumeInCubicMM) * 100
matFile['volumeMM'] = strokeVolumeInCubicMM
sc.savemat(os.path.join(outfile, 'labelCount_par.mat'), matFile)
def findIncData(path):
regMR_list = []
for filename in glob.iglob(path+'*/*IncidenceData.nii.gz', recursive=False):
regMR_list.append(filename)
return regMR_list
def findBETData(path):
regMR_list = []
for filename in glob.iglob(path+'*/*Bet.nii.gz', recursive=False):
regMR_list.append(filename)
return regMR_list
def findRegisteredData(path):
regMR_list = []
for filename in glob.iglob(path+'*/*_Template.nii.gz', recursive=True):
regMR_list.append(filename)
return regMR_list
def findRegisteredAnno(path):
regANNO_list = []
for filename in glob.iglob(path + '*/*_AnnoSplit_parental.nii.gz', recursive=True):
regANNO_list.append(filename)
return regANNO_list
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='Calculate incidence sizes of parental regions. You do not need to enter single files, but the path to the .../T2w folder')
requiredNamed = parser.add_argument_group('Required named arguments')
requiredNamed.add_argument('-i', '--inputFolder', help='.../T2w')
parser.add_argument('-t', '--threshold', help='Threshold for stroke values ', nargs='?', type=int,
default=0)
parser.add_argument('-a', '--allenBrain_anno', help='File: Annotations of Allen Brain', nargs='?', type=str,
default=os.path.abspath(
os.path.join(os.getcwd(), os.pardir, os.pardir)) + '/lib/average_template_50.nii.gz')
inputFolder = None
allenBrain_template = None
allenBrain_anno = None
outfile = None
args = parser.parse_args()
if args.inputFolder is not None:
inputFolder = args.inputFolder
outfile = args.inputFolder
if not os.path.exists(inputFolder):
sys.exit("Error: '%s' is not an existing directory." % (inputFolder,))
if args.allenBrain_anno is not None:
allenBrain_anno = args.allenBrain_anno
if not os.path.exists(allenBrain_anno):
sys.exit("Error: '%s' is not an existing directory." % (allenBrain_anno,))
thres = args.threshold
labels = os.path.abspath(os.path.join(os.getcwd(), os.pardir,os.pardir))+ '/lib/rsfMRILablelID.mat'
araDataTemplate = os.path.abspath(os.path.join(os.getcwd(), os.pardir,os.pardir))+ '/lib/annoVolume.nii.gz'
if len(glob.glob(inputFolder+'/*Stroke_mask.nii.gz')) > 0:
incidenceMask = glob.glob(inputFolder+'/*Stroke_mask.nii.gz')[0]
else:
sys.exit("Error: '%s' has no affected or masked regions." % (inputFolder,))
path = os.path.join(inputFolder)
regMR_list = findBETData(path)
regInc_list = findIncData(path)
regANNO_list = findRegisteredAnno(path)
print("'%i' folder will be proccessed..." % (len(regMR_list),))
if not len(regANNO_list) == len(regMR_list):
sys.exit("Error: For one or more annotations is no corresponding MR file defined in '%s'." % (inputFolder,))
incidenceMap(regMR_list,regInc_list,regANNO_list,araDataTemplate,incidenceMask,thres,outfile,labels)
| Python |
3D | Aswendt-Lab/AIDAmri | bin/3.1_T2Processing/getIncidenceMap.py | .py | 4,056 | 107 | import os
import sys
import nibabel as nii
import glob
import numpy as np
import progressbar
import matplotlib
import matplotlib.pyplot as plt
# --- Fonts & Text Display ---
matplotlib.rcParams['svg.fonttype'] = 'none' #text remains editable in SVG
matplotlib.rcParams['pdf.fonttype'] = 42 # Editable text in PDF (Type 42)
def heatMap(incidenceMap, araVol, outputLocation):
maxV = int(np.max(incidenceMap))
fig, axes = plt.subplots(nrows=3, ncols=4)
t = 1
for ax in axes.flat:
im = ax.imshow(np.transpose(np.round(incidenceMap[:, :, t * 16])), cmap='gnuplot', vmin=0, vmax=maxV)
ax.imshow(np.transpose(araVol[:, :, t * 16]), alpha=0.55, cmap='gray')
ax.axis('off')
t = t + 1
fig.subplots_adjust(right=0.8)
cbar_ax = fig.add_axes([0.85, 0.15, 0.05, 0.7])
bounds = np.linspace(0, maxV, maxV + 1)
cbar = fig.colorbar(im, cax=cbar_ax, format='%1i', ticks=bounds)
cbar.ax.tick_params(labelsize=14)
# Save the heatmap instead of showing
output_file = os.path.join(outputLocation, 'heatMap.png')
plt.savefig(output_file)
# Save heatmap as PDF
output_pdf = os.path.join(outputLocation, 'heatMap.pdf')
plt.savefig(output_pdf)
# Save heatmap as SVG (vector graphics)
output_svg = os.path.join(outputLocation, 'heatMap.svg')
plt.savefig(output_svg)
plt.close()
def incidenceMap2(path_listInc, araTemplate, inputFile, outputLocation):
araDataTemplate = nii.load(araTemplate)
realAraImg = np.asanyarray(araDataTemplate.dataobj)
overlaidIncidences = np.zeros_like(realAraImg)
bar = progressbar.ProgressBar()
for fileIndex in bar(range(len(path_listInc))):
dataMRI = nii.load(path_listInc[fileIndex])
volumeMRI = np.asanyarray(dataMRI.dataobj)
# Adjusting the volumeMRI data
volumeMRI[volumeMRI <= 0] = 0
volumeMRI[volumeMRI > 0] = 1
overlaidIncidences += volumeMRI
overlayNII = nii.Nifti1Image(overlaidIncidences, araDataTemplate.affine)
output_file = os.path.join(outputLocation, 'incMap.nii.gz')
nii.save(overlayNII, output_file)
heatMap(incidenceMap=overlaidIncidences, araVol=realAraImg, outputLocation=outputLocation)
max_overlap = int(np.max(overlaidIncidences))
print("Maximum number of subjects overlapping at any voxel in the incidence volume:", max_overlap)
def findIncData(path):
regMR_list = []
for filename in glob.iglob(os.path.join(path,"*","*",'anat', '*IncidenceData_mask.nii.gz')):
regMR_list.append(filename)
return regMR_list
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='Calculate an Incidence Map')
parser.add_argument('-i', '--inputFile', help='Directory: Brain extracted input data, e.g proc_data folder', required=True)
parser.add_argument('-o', '--outputLocation', help='Directory: Output location for the heat map', required=True)
parser.add_argument('-a', '--allenBrainTemplate', help='File: Annotations of Allen Brain', nargs='?', type=str,
default=os.path.abspath(os.path.join(os.getcwd(), os.pardir, os.pardir, 'lib', 'average_template_50.nii.gz')))
args = parser.parse_args()
inputFile = args.inputFile
outputLocation = args.outputLocation
allenBrainTemplate = args.allenBrainTemplate
if not os.path.exists(inputFile):
sys.exit("Error: '%s' is not an existing directory." % (inputFile,))
if not os.path.exists(outputLocation):
sys.exit("Error: '%s' is not an existing directory." % (outputLocation,))
if not os.path.exists(allenBrainTemplate):
sys.exit("Error: '%s' is not an existing file." % (allenBrainTemplate,))
regInc_list = findIncData(inputFile)
if len(regInc_list) < 1:
sys.exit("Error: No masked strokes found in the provided directory.")
print("'%i' folders are part of the incidence map." % (len(regInc_list),))
incidenceMap2(regInc_list, allenBrainTemplate, inputFile, outputLocation)
sys.exit(0)
| Python |
3D | Aswendt-Lab/AIDAmri | bin/3.1_T2Processing/changSNR.py | .py | 1,856 | 69 | """ Changs's method
{chang2005automatic,
title={An automatic method for estimating noise-induced signal variance in magnitude-reconstructed magnetic resonance images},
author={Chang, Lin-Ching and Rohde, Gustavo K and Pierpaoli, Carlo},
booktitle={Medical Imaging},
pages={1136--1142},
year={2005},
organization={International Society for Optics and Photonics}"""
from math import *
import numpy as np
import matplotlib.pyplot as plt
def calcSNR(img, show, fac):
# Normalize input dataset and plot histogram
# img = np.fliplr(img)
img = img.astype(int)
maxi = img.max()
imgFlat = img.flatten(2)
imgNorm = imgFlat / maxi
bins = ceil(sqrt(imgNorm.size)) * fac
binCount, binLoc = np.histogram(imgNorm, int(bins))
n = len(imgNorm)
estStd = np.argmax(binCount)
estStd = (estStd) / binCount.shape
x = np.linspace(0, 1, bins)
fhat = np.zeros([1, len(x)])
h = 1.06 * n ** (-1 / 5) * estStd
# define function
gauss = lambda x: gaussianFct(x)
for i in range(n):
# get each kernel function evaluated at x
# centered at data
f = gauss((x - imgNorm[i]) / h)
# plot(x, f / (n * h))
fhat = fhat + f
fhat = fhat / (n * h)
# SNR-Map
maxPos = np.argmax(fhat)
estStdNorm = binLoc[maxPos]
estStd = (binLoc[maxPos] * maxi) / 10
snrMap = np.sqrt(abs(np.square(img) - (np.square(estStd)))) / estStd
if show > 0:
if len(img.shape) == 2:
figChang = plt.figure(3)
plt.imshow(snrMap)
plt.show()
elif len(img.shape) == 3:
figChang = plt.figure(3)
plt.imshow(snrMap[:, :, int(np.ceil(len(img.shape) / 2))])
plt.show()
return snrMap, estStd, estStdNorm
def gaussianFct(x):
y = 1 / sqrt(2 * pi) * np.exp((-(np.square(x))) / 2)
return y
| Python |
3D | Aswendt-Lab/AIDAmri | bin/3.1_T2Processing/brummerSNR.py | .py | 1,682 | 59 | """ Brummer's Method
brummer1993automatic,
title={Automatic detection of brain contours in MRI data sets},
author={Brummer, Marijn E and Mersereau, Russell M and Eisner, Robert L and Lewine, Richard RJ},
journal={IEEE Transactions on medical imaging},
volume={12},
number={2},
pages={153--166},
year={1993},
publisher={IEEE} """
from math import *
import numpy as np
import matplotlib.pyplot as plt
import scipy.optimize, scipy.signal
def calcSNR(img, show, fac):
# Normalize input dataset and plot histogram
# img = np.fliplr(img)
img = img.astype(int)
maxi = img.max()
imgFlat = img.flatten()
imgNorm = imgFlat / maxi
bins = ceil(sqrt(imgNorm.size)) * fac
binCount, binLoc = np.histogram(imgNorm, int(bins))
maxRayl = max(binCount)
estStd = np.argmax(binCount)
cutOff = 2 * estStd
estStd = (estStd) / len(binCount)
# define function
raylfunc = lambda x: rayl_2p(x, binLoc[0:cutOff - 1], binCount[0:cutOff - 1])
yout = scipy.optimize.fmin(func=raylfunc, x0=[maxRayl, estStd], disp=False)
estStdNorm = yout[1]
estStd = (yout[1] * maxi) / 10
snrMap = np.sqrt(abs(np.square(img) - (np.square(estStd)))) / estStd
if show > 0:
if len(img.shape) == 2:
plt.figure(3)
plt.imshow(snrMap)
plt.show()
elif len(img.shape) == 3:
plt.figure(3)
plt.imshow(snrMap[:, :, int(np.ceil(len(img.shape) / 2))])
plt.show()
return snrMap, estStd, estStdNorm
def rayl_2p(fitPar, x, data):
ray = x / (fitPar[1] ** 2) * np.exp(-np.square(x) / (2 * fitPar[1] ** 2))
err = sum((fitPar[0] * ray - data) ** 2)
return err
| Python |
3D | Aswendt-Lab/AIDAmri | bin/3.1_T2Processing/getIncidenceSize.py | .py | 7,766 | 215 |
"""
Created on 10/08/2017
@author: Niklas Pallast
Neuroimaging & Neuroengineering
Department of Neurology
University Hospital Cologne
"""
import os,sys
import nibabel as nii
import glob
import numpy as np
import scipy.io as sc
import scipy.ndimage as ndimage
def thresholding(volumeMR,maskImg,thres,k):
volumeMR=ndimage.gaussian_filter(volumeMR, sigma=(1.3, 1.3, 1))
zvalues = volumeMR != 0
if k==1:
volumeMR = volumeMR * maskImg[:, :, :]#, 0]
if thres == 0:
thres = np.mean(volumeMR[zvalues]) + 2*np.std(volumeMR[zvalues])
bvalues = volumeMR < thres
volumeMR[bvalues] = 0
fvalues = volumeMR >= thres
volumeMR[fvalues] = 1
return volumeMR
def incidenceMap(path_listInc,path_listMR ,path_listAnno, araDataTemplate,incidenceMask ,thres, outfile, labels):
araDataTemplate = nii.load(araDataTemplate)
realAraImg = araDataTemplate.get_data()
coloredAraLabels = np.zeros([np.size(realAraImg, 0), np.size(realAraImg, 1), np.size(realAraImg, 2)])
matFile = sc.loadmat(labels)
labMat = matFile['ABALabelIDs']
maskData = nii.load(incidenceMask)
maskImg = maskData.get_data()
oneValues = maskImg > 0.0
maskImg[oneValues] = 1.0
fileIndex = 0
# get warped annos of the current mr
dataAnno = nii.load(path_listAnno[fileIndex])
volumeAnno = np.round(dataAnno.get_data())
dataMR = nii.load(path_listInc[fileIndex])
volumeMR = dataMR.get_data()
strokeVolume = thresholding(volumeMR, maskImg, thres,1)
fValues_Anno = volumeAnno*strokeVolume
scaledNiiData = nii.Nifti1Image(fValues_Anno, dataAnno.affine)
hdrIn = scaledNiiData.header
hdrIn.set_xyzt_units('mm')
output_file = os.path.join(outfile,os.path.basename(path_listMR[fileIndex]).split('.')[0]+ 'Anno_mask.nii.gz')
nii.save(scaledNiiData, output_file)
ref_Image = fValues_Anno
fValues_Anno = np.unique(fValues_Anno)
nullValues = np.argwhere(fValues_Anno<=0.0)
fValues_Anno = np.delete(fValues_Anno, nullValues)
regionAffectPercent = np.zeros(np.size(fValues_Anno))
for i in range(np.size(fValues_Anno)):
regionAffectPercent[i] = (np.sum(ref_Image == fValues_Anno[i]) / np.sum(volumeAnno == fValues_Anno[i])) * 100
labCounterList = np.isin(labMat[:, 0], fValues_Anno)
labMat = labMat[labCounterList,0]
labCounterColor = np.isin(realAraImg, fValues_Anno)
coloredAraLabels[labCounterColor] = realAraImg[labCounterColor]
xdim = np.size(coloredAraLabels, 0)
coloredAraLabels[int(xdim / 2):xdim, :, :] = coloredAraLabels[int(xdim / 2):xdim, :, :] + 2000
coloredAraLabels[coloredAraLabels == 2000] = 0
scaledNiiData = nii.Nifti1Image(coloredAraLabels, araDataTemplate.affine)
hdrIn = scaledNiiData.header
hdrIn.set_xyzt_units('mm')
output_file = os.path.join(outfile, 'affectedRegions.nii.gz')
nii.save(scaledNiiData, output_file)
# Stroke volume calculation
betMask = nii.load(os.path.join(outfile,os.path.basename(path_listInc[fileIndex]).split('.')[0]+'_mask.nii.gz'))
betMaskImg = betMask.get_data()
oneValues = betMaskImg > 0.0
betMaskImg[oneValues] = 1.0
strokeVolumeInCubicMM = np.sum(maskImg * (dataMR.affine[0, 0] * dataMR.affine[1, 1] * dataMR.affine[2, 2]))
brainVolumeInCubicMM = np.sum(betMaskImg * (dataMR.affine[0, 0] * dataMR.affine[1, 1] * dataMR.affine[2, 2]))
lines = open(os.path.abspath(os.path.join(os.getcwd(), os.pardir,os.pardir))+ '/lib/ARA_changedAnnotatiosn2DTI.txt').readlines()
o=open(os.path.join(outfile, 'affectedRegions.txt'), 'w')
o.write("Stroke: %0.2f %% - Stroke Volume: %0.2f mm^3\n" % (
((strokeVolumeInCubicMM / brainVolumeInCubicMM) * 100), strokeVolumeInCubicMM,))
matIndex = 0
labelNamesAffected = ["" for x in range(np.size(fValues_Anno))]
labelNames = ["" for x in range(np.size(lines))]
for i in range(len(lines)):
labelNames[i] = lines[i].split('\t')[1]
if np.isin(int(lines[i].split('\t')[0]), labMat):
o.write(lines[i][:-1] + "\t %0.2f %%\n" % regionAffectPercent[matIndex])
labelNamesAffected[matIndex] = lines[i].split('\t')[1]
matIndex = matIndex + 1
# o.write(str(int(lines[i].split(' ')[0]) + 2000) + ' R_' + lines[i].split(' ')[1])
o.close()
labMat = np.stack((labMat, regionAffectPercent))
matFile['ABALabelIDs'] = labMat
matFile['ABANames'] = labelNamesAffected
matFile['ABAlabels'] = labelNames
matFile['volumePer'] = (strokeVolumeInCubicMM / brainVolumeInCubicMM) * 100
matFile['volumeMM'] = strokeVolumeInCubicMM
sc.savemat(os.path.join(outfile, 'labelCount.mat'), matFile)
def findIncData(path):
regMR_list = []
for filename in glob.iglob(path+'*/*IncidenceData.nii.gz', recursive=False):
regMR_list.append(filename)
return regMR_list
def findBETData(path):
regMR_list = []
for filename in glob.iglob(path+'*/*Bet.nii.gz', recursive=False):
regMR_list.append(filename)
return regMR_list
def findRegisteredData(path):
regMR_list = []
for filename in glob.iglob(path+'*/*_Template.nii.gz', recursive=True):
regMR_list.append(filename)
return regMR_list
def findRegisteredAnno(path):
regANNO_list = []
for filename in glob.iglob(path + '*/*_Anno.nii.gz', recursive=True):
regANNO_list.append(filename)
return regANNO_list
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='Calculate incidence sizes of regions. You do not need to enter single files, but the path to the .../T2w folder')
requiredNamed = parser.add_argument_group('Required named arguments')
requiredNamed.add_argument('-i', '--inputFolder', help='.../T2w')
parser.add_argument('-t', '--threshold', help='Threshold for stroke values', nargs='?', type=int,
default=0)
parser.add_argument('-a', '--allenBrain_anno', help='File: Annotations of Allen Brain', nargs='?', type=str,
default=os.path.abspath(
os.path.join(os.getcwd(), os.pardir, os.pardir)) + '/lib/average_template_50.nii.gz')
inputFolder = None
allenBrain_template = None
allenBrain_anno = None
outfile = None
args = parser.parse_args()
if args.inputFolder is not None:
inputFolder = args.inputFolder
outfile = args.inputFolder
if not os.path.exists(inputFolder):
sys.exit("Error: '%s' is not an existing directory." % (inputFolder,))
if args.allenBrain_anno is not None:
allenBrain_anno = args.allenBrain_anno
if not os.path.exists(allenBrain_anno):
sys.exit("Error: '%s' is not an existing directory." % (allenBrain_anno,))
thres = args.threshold
labels = os.path.abspath(os.path.join(os.getcwd(), os.pardir,os.pardir))+ '/lib/ABALabelsIDchanged.mat'
araDataTemplate = os.path.abspath(
os.path.join(os.getcwd(), os.pardir, os.pardir)) + '/lib/annotation_50CHANGEDanno.nii.gz'
if len(glob.glob(inputFolder+'/*Stroke_mask.nii.gz')) > 0:
incidenceMask = glob.glob(inputFolder+'/*Stroke_mask.nii.gz')[0]
else:
sys.exit("Error: '%s' has no affected or masked regions." % (inputFolder,))
path = os.path.join(inputFolder)
regMR_list = findBETData(path)
regInc_list = findIncData(path)
regANNO_list = findRegisteredAnno(path)
print("'%i' folder will be proccessed..." % (len(regMR_list),))
if not len(regANNO_list) == len(regMR_list):
sys.exit("Error: For one or more annotations no corresponding MR file is defined in '%s'." % (inputFolder,))
incidenceMap(regMR_list,regInc_list,regANNO_list,araDataTemplate,incidenceMask,thres,outfile,labels)
| Python |
3D | Aswendt-Lab/AIDAmri | bin/3.1_T2Processing/sijbersSNR.py | .py | 1,797 | 62 | """"" Sijbers's method
sijbers2007automatic,
title={Automatic estimation of the noise variance from the histogram of a magnetic resonance image},
author={Sijbers, Jan and Poot, Dirk and den Dekker, Arnold J and Pintjens, Wouter},
journal={Physics in medicine and biology},
volume={52},
number={5},
pages={1335},
year={2007},
publisher={IOP Publishing} """
from math import *
import numpy as np
import matplotlib.pyplot as plt
import scipy.optimize
def calcSNR(img, show, fac):
# Normalize input dataset and plot histogram
# img = np.fliplr(img)
img = img.astype(int)
maxi = img.max()
imgFlat = img.flatten(2)
imgNorm = imgFlat / maxi
bins = ceil(sqrt(imgNorm.size)) * fac
binCount, binLoc = np.histogram(imgNorm, int(bins))
estStd = np.argmax(binCount)
fc = binLoc[2 * estStd]
[n, l] = np.histogram(imgNorm[imgNorm <= fc], int(bins))
Nk = np.sum(n)
K = bins
mlfunc = lambda x: maxLikelihood(x, Nk, K, l, n)
sigma0 = binLoc[estStd]
out = scipy.optimize.fmin(func=mlfunc, x0=sigma0, disp=False)
estStdNorm = out
estStd = (out * maxi) / 10
snrMap = np.sqrt(abs(np.square(img) - (np.square(estStd)))) / estStd
if show > 0:
if len(img.shape) == 2:
figSijbers = plt.figure(3)
plt.imshow(snrMap)
plt.show()
elif len(img.shape) == 3:
figSijbers = plt.figure(3)
plt.imshow(snrMap[:, :, int(np.ceil(len(img.shape) / 2))])
plt.show()
return snrMap, estStd, estStdNorm
def maxLikelihood(x, Nk, K, l, n):
y = Nk * np.log(np.exp(-l[0] ** 2 / (2 * x ** 2)) - np.exp(-l[K] ** 2 / (2 * x ** 2))) \
- np.sum(n[1:K] * np.log(np.exp(-l[0:K - 1] ** 2 / (2 * x ** 2)) - np.exp(-l[1:K] ** 2. / (2 * x ** 2))))
return y
| Python |
3D | Aswendt-Lab/AIDAmri | bin/3.1_T2Processing/getSNR.py | .py | 2,537 | 88 | '''
Created on 10/08/2017
@author: Niklas Pallast
Neuroimaging & Neuroengineering
Department of Neurology
University Hospital Cologne
'''
import os
import changSNR as ch
import brummerSNR as bm
import sijbersSNR as sj
import numpy as np
import glob
import nibabel as nii
def snrCalclualtor(input_file):
fileSNR = open(os.path.join(os.path.dirname(input_file), 'snr.txt'), 'w')
data = nii.load(input_file)
imgData = data.get_data()
# nx = imgData.shape[0] # Images size in x - direction
# ny = imgData.shape[1] # Images size in y - direction
ns = imgData.shape[2] # Number of slices
noiseChSNR = np.zeros(ns)
noiseBmSNR = np.zeros(ns)
noiseSjSNR = np.zeros(ns)
imgData = np.ndarray.astype(imgData, 'float64')
for slc in range(ns):
# Print % of progress
print('Slice: ' + str(slc + 1))
# Temporal image containing all TE values for the selected slice
slice = imgData[:, :, slc]
curSnrCHMap, estStdChang, estStdChangNorm = ch.calcSNR(slice, 0, 1)
curSnrBMMap, estStdBrummer, estStdBrummerNorm = bm.calcSNR(slice, 0, 1)
curSnrSJMap, estStdSijbers, estStdSijbersNorm = sj.calcSNR(slice, 0, 1)
noiseChSNR[slc] = estStdChang
noiseBmSNR[slc] = estStdBrummer
noiseSjSNR[slc] = estStdSijbers
snrCh = 20 * np.log10(np.mean(imgData) / np.mean(noiseChSNR))
snrBrum = 20 * np.log10(np.mean(imgData) / np.mean(noiseBmSNR))
snrSij = 20 * np.log10(np.mean(imgData) / np.mean(noiseSjSNR))
fileSNR.write("Mean of Chang: %0.3f dB \n" % snrCh)
fileSNR.write("Mean of Brummer: %0.3f dB\n" % snrBrum)
fileSNR.write("Mean of Sijbers: %0.3f dB\n" % snrSij)
fileSNR.close()
def findRegisteredData(path, studyPrefix):
regMR_list = []
for filename in glob.iglob(path + '/' + studyPrefix + '*/T2w/*1.nii.gz', recursive=True):
regMR_list.append(filename)
return regMR_list
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='Calculate SNR')
requiredNamed = parser.add_argument_group('Required named arguments')
requiredNamed.add_argument('-i', '--inputData', help='Folder of all files of one study')
requiredNamed.add_argument('-s', '--studyPrefix', help='First letter of files in input data')
args = parser.parse_args()
pathData = args.inputData
studyPrefix = args.studyPrefix
listMr = findRegisteredData(pathData, studyPrefix)
for i in listMr:
print(i)
snrCalclualtor(i)
| Python |
3D | Aswendt-Lab/AIDAmri | bin/helper_tools/DistributeStrokeMasks.py | .py | 2,937 | 64 | import os
import glob
import argparse
def main(inputPath):
log_file_path = os.path.join(inputPath, "missing_files_log.txt")
SearchPath = os.path.join(inputPath, "**", "anat", "*Stroke_mask.nii.gz")
List_of_Stroke_rois = glob.glob(SearchPath, recursive=True)
print(List_of_Stroke_rois)
for ss in List_of_Stroke_rois:
tempSplit = ss.split(os.sep)
print(tempSplit)
modality = tempSplit[-2]
timepoint = tempSplit[-3]
Subject = tempSplit[-4]
try:
IndicdencPath = glob.glob(os.path.join(os.path.dirname(ss), "*IncidenceData.nii.gz"))[0]
TransMatInv = glob.glob(os.path.join(os.path.dirname(ss), "*MatrixInv.txt"))[0]
except IndexError:
with open(log_file_path, "a") as log_file:
log_file.write(f"TransMatInv or IncidenceData not found for: {ss}\n")
continue
OutputStrokeIncidence = os.path.join(os.path.dirname(ss), Subject + "_" + timepoint + "_StrokeM_IncidenceSpace.nii.gz")
SubjectPath = os.path.join(inputPath, Subject)
List_of_all_tp = glob.glob(os.path.join(SubjectPath, "*"))
command1 = f"reg_resample -ref {IndicdencPath} -flo {ss} -inter 0 -trans {TransMatInv} -res {OutputStrokeIncidence}"
os.system(command1)
for tp in List_of_all_tp:
if tp != timepoint:
anat_path_for_tp_Affine = os.path.join(tp, "anat", "*MatrixAff.txt")
anat_path_for_tp_Bspline = os.path.join(tp, "anat", "*MatrixBspline.nii")
anat_path_for_tp_BetFile = os.path.join(tp, "anat", "*BiasBet.nii.gz")
if glob.glob(anat_path_for_tp_Affine):
MatrixAff = glob.glob(anat_path_for_tp_Affine)[0]
else:
with open(log_file_path, "a") as log_file:
log_file.write(f"Affine matrix file not found for: {tp}\n")
continue
MatrixBspline = glob.glob(anat_path_for_tp_Bspline)[0]
BetFile = glob.glob(anat_path_for_tp_BetFile)[0]
OutputStroke = os.path.join(tp, "anat", Subject + "_" + timepoint + "_" + "Stroke_mask.nii.gz")
CheckIfStroke = glob.glob(os.path.join(tp, "anat", "*Stroke_mask.nii.gz"))
if not CheckIfStroke:
command2 = f"reg_resample -ref {BetFile} -flo {OutputStrokeIncidence} -inter 0 -trans {MatrixBspline} -res {OutputStroke}"
os.system(command2)
else:
continue
print("done")
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Process stroke mask files.")
parser.add_argument("-i", "--input", type=str, help="Input path", required=True)
args = parser.parse_args()
main(args.input)
| Python |
3D | Aswendt-Lab/AIDAmri | bin/helper_tools/reset_naming.py | .py | 2,944 | 79 | import os
import argparse
import glob
import re
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description=(
"This script prepares Bruker raw data before running "
"1_PV2NIfTiConverter/pv_conv2Nifti.py. "
"The raw data must follow the structure: projectfolder/subjects/ses/data. "
"It automatically scans for all 'subject' files within the input folder and "
"performs two modifications: "
"1) Removes the first underscore '_' in the SUBJECT_id and SUBJECT_study_name lines; "
"2) Replaces the word 'baseline' (case-insensitive) with 'PT0' in the study name. "
"A corrected version of each 'subject' file is written back to disk. "
"Example usage: "
"python conv2Nifti_auto.py -i /Volumes/Desktop/MRI/raw_data"
)
)
parser.add_argument(
'-i', '--input',
required=True,
help='Path to the parent project folder containing the dataset, e.g. raw_data',
type=str
)
args = parser.parse_args()
# Get list of raw data folders or files in input directory
list_of_raw = sorted([
d for d in os.listdir(args.input)
if os.path.isdir(os.path.join(args.input, d))
or (os.path.isfile(os.path.join(args.input, d)) and (('zip' in d) or ('PvDataset' in d)))
])
# Recursively find all files named "subject"
subject_files = glob.glob(os.path.join(args.input, "**", "subject"), recursive=True)
print(subject_files)
print(list_of_raw)
subject_id = "##$SUBJECT_id="
session_id = "##$SUBJECT_study_name="
for subject_file in subject_files:
if not os.path.exists(subject_file):
continue
with open(subject_file, 'r') as infile:
lines = infile.readlines()
modified = False
for idx, line in enumerate(lines):
# Modify both subject_id and study_name entries
if subject_id in line or session_id in line:
if idx + 1 < len(lines):
original_next = lines[idx + 1]
# 1) Remove the first underscore "_" in the following line
new_next = original_next.replace("_", "", 1)
# 2) For study_name only: replace "baseline" with "PT0"
if session_id in line:
new_next = re.sub(r'<\s*baseline[^\s>]*\s*>', 'PT0', new_next, flags=re.IGNORECASE)
# Apply only if something changed
if new_next != original_next:
lines[idx + 1] = new_next
modified = True
# Write back the modified content if changes were made
if modified:
with open(subject_file, 'w') as outfile:
outfile.writelines(lines)
print(f"Modified: {subject_file}")
print("Success")
| Python |
3D | Aswendt-Lab/AIDAmri | bin/helper_tools/MRI_files_summarizer.py | .py | 1,999 | 54 | import os
import argparse
import glob
import pandas as pd
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='This script processes NIfTI files in a directory. It extracts relevant parts of the file name and creates a DataFrame.')
parser.add_argument('-i', '--input', required=True, help='Path to the parent project folder of the dataset, e.g., raw_data', type=str)
parser.add_argument('-o', '--output', required=True, help='Path where the output CSV file should be saved', type=str)
args = parser.parse_args()
# Search for all subject files in the input path
temp_files = glob.glob(os.path.join(args.input, "**","brkraw", "*.nii.gz"), recursive=True)
data = []
for tt in temp_files:
filename = os.path.basename(tt)
list_split = filename.split("_")
# Initialize the dictionary to collect file details
file_info = {
"FileAddress": tt,
"Modality": None,
"TimePoint": None,
"SubjectID": None,
"RunNumber": None
}
# Parse the split filename for specific identifiers
for element in list_split:
if "sub-" in element:
file_info['SubjectID'] = element.replace("sub-", "")
elif "ses-" in element:
file_info['TimePoint'] = element.replace("ses-", "")
elif "run-" in element:
file_info['RunNumber'] = element.replace("run-", "")
elif ".nii.gz" in element:
file_info['Modality'] = element.replace(".nii.gz", "")
data.append(file_info)
# Create DataFrame
df = pd.DataFrame(data)
# Remove duplicate rows
df = df.drop_duplicates()
# Save the DataFrame to a CSV file
output_file_path = os.path.join(args.output, "MRI_files_overview.csv")
df.to_csv(output_file_path, index=False)
print("Data processing complete. The DataFrame has been saved to", output_file_path)
| Python |
3D | Aswendt-Lab/AIDAmri | bin/helper_tools/ReorientBatch.py | .py | 13,057 | 430 | #!/usr/bin/env python3
import os
import sys
import subprocess
import shutil
import traceback
import argparse
import numpy as np
import nibabel as nib
from nibabel import orientations as nio
from typing import Optional
"""
Batch reorientation of NIfTI files within a BIDS-like directory structure.
Usage:
python ReorientBatch.py <INPUT_ROOT> <OUTPUT_ROOT>
- All .nii and .nii.gz files under INPUT_ROOT are processed.
- Non-NIfTI files are copied unchanged into the mirrored structure.
- The relative directory structure is preserved under OUTPUT_ROOT.
- Images are reoriented to a user-specified target orientation
(default: LIP for AIDAmri).
- Only the orientation (affine and axis order) is changed; all other
header information is preserved where possible.
"""
def strip_nii_ext(p: str) -> str:
if p.endswith(".nii.gz"):
return p[:-7]
if p.endswith(".nii"):
return p[:-4]
return p
def reorient_bvecs_fsl(src_bvec: str, dst_bvec: str, ornt_trans: np.ndarray):
"""
Reorient FSL-style bvecs (3xN) using nibabel orientation transform.
ornt_trans is expected to map from NEW (target) axes to OLD (current) axes,
i.e. the inverse transform relative to the data reorientation.
"""
bvec = np.loadtxt(src_bvec)
# Allow both 3xN (FSL) and Nx3
if bvec.ndim != 2:
raise ValueError(f"Unexpected bvec ndim: {bvec.ndim}")
if bvec.shape[0] != 3:
if bvec.shape[1] == 3:
bvec = bvec.T
else:
raise ValueError(f"Unexpected bvec shape: {bvec.shape}")
new = np.zeros_like(bvec)
for new_ax in range(3):
old_ax = int(ornt_trans[new_ax, 0])
flip = float(ornt_trans[new_ax, 1])
new[new_ax, :] = flip * bvec[old_ax, :]
np.savetxt(dst_bvec, new, fmt="%.16f")
def copy_sidecars_if_present(src_base: str, dst_base: str, *, reorient: bool, ornt_trans=None, log=None):
"""
Copy bval and (optionally reorient) bvec sidecars from src_base to dst_base.
"""
src_bval = src_base + ".bval"
src_bvec = src_base + ".bvec"
dst_bval = dst_base + ".bval"
dst_bvec = dst_base + ".bvec"
has_bval = os.path.exists(src_bval)
has_bvec = os.path.exists(src_bvec)
if not (has_bval or has_bvec):
return
os.makedirs(os.path.dirname(dst_base), exist_ok=True)
if has_bval:
shutil.copy2(src_bval, dst_bval)
if log:
log(" Sidecar: copied .bval")
if has_bvec:
if reorient:
if ornt_trans is None:
raise ValueError("ornt_trans is required to reorient bvecs")
reorient_bvecs_fsl(src_bvec, dst_bvec, ornt_trans)
if log:
log(" Sidecar: reoriented .bvec")
else:
shutil.copy2(src_bvec, dst_bvec)
if log:
log(" Sidecar: copied .bvec (no reorientation)")
def ask_target_orientation_with_default(non_interactive: bool, target_cli: Optional[str]) -> str:
if non_interactive:
if not target_cli:
raise ValueError("-n was set but -t is missing.")
return target_cli.upper()
# if target_cli set, dont ask
if target_cli:
return target_cli.upper()
while True:
ans = input(
"\nDo you want to reorient all images to the AIDAmri default orientation LIP "
"(Left-Inferior-Posterior)? [Y/n]: "
).strip().lower()
if ans in ("", "y", "yes"):
return "LIP"
elif ans in ("n", "no"):
break
else:
print("Please answer 'y' (yes) or 'n' (no).")
while True:
ori = input(
"Please enter the target orientation "
"(three letters from {L, R, A, P, S, I}): "
).strip().upper()
if len(ori) != 3 or not set(ori).issubset(set("LRAPSI")):
print("Invalid orientation.")
continue
return ori
def get_current_orientation(img: nib.Nifti1Image):
"""
Determine current orientation from the 'active' transform:
prefer sform if sform_code > 0, else qform if qform_code > 0,
else fall back to img.affine.
Returns (ori_str, src_label, affine_used)
"""
hdr = img.header
s_code = int(hdr["sform_code"])
q_code = int(hdr["qform_code"])
if s_code > 0:
A = img.get_sform()
src = f"sform (code={s_code})"
elif q_code > 0:
A = img.get_qform()
src = f"qform (code={q_code})"
else:
A = img.affine
src = "img.affine (fallback)"
ori = "".join(nio.aff2axcodes(A))
return ori, src, A
def reorient_image(img: nib.Nifti1Image, target_ori: str, current_ori: str, base_affine: np.ndarray, log=None):
"""
Returns: (img_new, did_reorient: bool, ornt_trans_for_bvecs_or_None)
"""
data = img.get_fdata(dtype=np.float32)
data = np.ascontiguousarray(data, dtype=np.float32)
if log:
log(f" Target orientation: {target_ori}")
if current_ori == target_ori:
if log:
log(" Current orientation already matches target. No reorientation is applied.")
hdr_copy = img.header.copy()
img_copy = nib.Nifti1Image(data, base_affine, header=hdr_copy)
s_code = int(img.header["sform_code"])
q_code = int(img.header["qform_code"])
img_copy.set_sform(base_affine, code=(s_code if s_code > 0 else 1))
img_copy.set_qform(base_affine, code=(q_code if q_code > 0 else 1))
return img_copy, False, None
curr_ornt = nio.axcodes2ornt(tuple(current_ori))
target_ornt = nio.axcodes2ornt(tuple(target_ori))
# data transform: current -> target
ornt_trans = nio.ornt_transform(curr_ornt, target_ornt)
# bvec transform: target -> current (inverse), as in your single-file script
ornt_trans_bvec = nio.ornt_transform(target_ornt, curr_ornt)
if log:
log(" Applying orientation transform to image data...")
data_reoriented = nio.apply_orientation(data, ornt_trans)
if log:
log(" Updating affine for new orientation...")
inv_aff = nio.inv_ornt_aff(ornt_trans, img.shape[:3])
new_affine = base_affine @ inv_aff
hdr = img.header.copy()
hdr.set_data_dtype(np.float32)
# robustness after permutation/flip:
hdr.set_data_shape(data_reoriented.shape)
# optional but recommended: reset freq/phase/slice info (can become wrong after permutation)
try:
hdr["dim_info"] = 0
except Exception:
pass
img_new = nib.Nifti1Image(data_reoriented, new_affine, header=hdr)
img_new.set_sform(new_affine, code=1)
img_new.set_qform(new_affine, code=1)
if log:
new_str = "".join(nio.aff2axcodes(new_affine))
log(f" New orientation (from affine): {new_str}")
return img_new, True, ornt_trans_bvec
def reorient_single_image(src_path: str, dst_path: str, target_ori: str, log):
log("")
log("Processing file:")
log(f" Source: {src_path}")
log(f" Destination: {dst_path}")
img = nib.load(src_path)
current_ori, ori_src, A_used = get_current_orientation(img)
log(f" Current orientation (from {ori_src}): {current_ori}")
log(f" Target orientation: {target_ori}")
img_out, did_reorient, bvec_ornt = reorient_image(
img, target_ori, current_ori, A_used, log=log
)
nib.save(img_out, dst_path)
log(" Saved NIfTI.")
# Sidecars (.bval/.bvec) handled here
src_base = strip_nii_ext(src_path)
dst_base = strip_nii_ext(dst_path)
copy_sidecars_if_present(
src_base,
dst_base,
reorient=did_reorient,
ornt_trans=bvec_ornt,
log=log,
)
def copy_non_nifti(src_path: str, dst_path: str, log):
log("")
log("Copying non-NIfTI file:")
log(f" Source: {src_path}")
log(f" Destination: {dst_path}")
shutil.copy2(src_path, dst_path)
def validate_target_ori(ori: str) -> str:
ori = ori.strip().upper()
if len(ori) != 3 or not set(ori).issubset(set("LRAPSI")):
raise ValueError(f"Invalid orientation: {ori}")
axes = set()
for c in ori:
if c in "LR": axes.add("x")
elif c in "AP": axes.add("y")
elif c in "SI": axes.add("z")
if axes != {"x", "y", "z"}:
raise ValueError(f"Invalid axis combination in orientation: {ori}")
return ori
def parse_args():
parser = argparse.ArgumentParser(
description="Batch reorientation of NIfTI files (AIDAmri compatible)."
)
parser.add_argument(
"-i",
required=True,
metavar="INPUT_ROOT",
help="Input root directory (BIDS-like proc_data)"
)
parser.add_argument(
"-o",
required=True,
metavar="OUTPUT_ROOT",
help="Output root directory for reoriented data"
)
parser.add_argument(
"-t",
metavar="ORI",
default=None,
help="Target orientation (e.g. LIP). If omitted, ask interactively."
)
parser.add_argument(
"-l",
default="reorient_log.txt",
metavar="LOGFILE",
help="Log filename (written into output root)"
)
parser.add_argument(
"-n",
action="store_true",
help="Non-interactive mode (requires -t)"
)
return parser.parse_args()
def main():
args = parse_args()
src_root = args.i
dst_root = args.o
LOG_FILENAME = args.l
target_cli = args.t
non_interactive = args.n
if not os.path.isdir(src_root):
print(f"Source root folder not found:\n{src_root}")
sys.exit(1)
os.makedirs(dst_root, exist_ok=True)
log_path = os.path.join(dst_root, LOG_FILENAME)
target_ori = ask_target_orientation_with_default(
non_interactive=non_interactive,
target_cli=target_cli
)
target_ori = validate_target_ori(target_ori)
# --- Count total files for progress bar ---
total_files = 0
for root, dirs, files in os.walk(src_root):
total_files += len(files)
if total_files == 0:
print("No files found in source root. Nothing to do.")
return
any_errors = False
n_total_nifti = 0
n_processed_nifti = 0
n_copied_non_nifti = 0
processed_files = 0
bar_width = 40
with open(log_path, "w") as log_fh:
def log(msg: str):
log_fh.write(msg + "\n")
log(f"Target orientation for all images: {target_ori}")
log(f"Source root: {src_root}")
log(f"Destination root: {dst_root}")
log(f"Total files (NIfTI + non-NIfTI): {total_files}")
for root, dirs, files in os.walk(src_root):
for fname in files:
src_path = os.path.join(root, fname)
rel_path = os.path.relpath(src_path, src_root)
dst_path = os.path.join(dst_root, rel_path)
os.makedirs(os.path.dirname(dst_path), exist_ok=True)
is_nifti = fname.endswith(".nii") or fname.endswith(".nii.gz")
is_sidecar = fname.endswith(".bvec") or fname.endswith(".bval")
try:
if is_nifti:
n_total_nifti += 1
reorient_single_image(src_path, dst_path, target_ori, log)
n_processed_nifti += 1
elif is_sidecar:
# skip: handled with the image
pass
else:
copy_non_nifti(src_path, dst_path, log)
n_copied_non_nifti += 1
except Exception as e:
any_errors = True
print(f"\nError processing file: {rel_path}: {e.__class__.__name__}: {e}", file=sys.stderr)
log("")
log("ERROR during processing file:")
log(f" Source: {src_path}")
log(f" Destination: {dst_path}")
log(f" Exception: {e.__class__.__name__}: {e}")
log(" Traceback:")
log(traceback.format_exc())
finally:
processed_files += 1
progress = processed_files / total_files
filled = int(bar_width * progress)
bar = "#" * filled + "-" * (bar_width - filled)
print(
f"\rProgress: [{bar}] {progress * 100:6.2f}% ({processed_files}/{total_files})",
end="",
flush=True,
)
print()
print("\nBatch processing completed.")
print(f"Total NIfTI files found: {n_total_nifti}")
print(f"Total NIfTI files processed: {n_processed_nifti}")
print(f"Non-NIfTI files copied: {n_copied_non_nifti}")
print(f"Reoriented data written to: {dst_root}")
print(f"Log file written to: {log_path}")
if any_errors:
print("One or more errors occurred. See the log file for details.")
if __name__ == "__main__":
main()
| Python |
3D | Aswendt-Lab/AIDAmri | bin/3.2_DTIConnectivity/dsi_tools.py | .py | 17,129 | 426 | """
Created on 10/08/2017
@author: Niklas Pallast
Neuroimaging & Neuroengineering
Department of Neurology
University Hospital Cologne
Documentation preface, added 23/05/09 by Victor Vera Frazao:
This document is currently in revision for improvement and fixing.
Specifically changes are made to allow compatibility of the pipeline with Ubuntu 18.04 systems
and Ubuntu 18.04 Docker base images, respectively, as well as adapting to appearent changes of
DSI-Studio that were applied since the AIDAmri v.1.1 release. As to date the DSI-Studio version
used is the 2022/08/03 Ubuntu 18.04 release.
All changes and additional documentations within this script carry a signature with the writer's
initials (e.g. VVF for Victor Vera Frazao) and the date at application, denoted after '//' at
the end of the comment line. If code segments need clearance the comment line will be prefaced
by '#?'. Changes are prefaced by '#>' and other comments are prefaced ordinalrily
by '#'.
"""
from __future__ import print_function
import os
import re
import sys
import time
import glob
import nibabel as nii
import numpy as np
import nipype.interfaces.fsl as fsl
import shutil
import subprocess
import pandas as pd
def scaleBy10(input_path, inv):
data = nii.load(input_path)
imgTemp = data.get_fdata()
if inv is False:
scale = np.eye(4) * 10
scale[3][3] = 1
scaledNiiData = nii.Nifti1Image(imgTemp, data.affine * scale)
# overwrite old nifti
fslPath = os.path.join(os.path.dirname(input_path), 'fslScaleTemp.nii.gz')
nii.save(scaledNiiData, fslPath)
return fslPath
elif inv is True:
scale = np.eye(4) / 10
scale[3][3] = 1
unscaledNiiData = nii.Nifti1Image(imgTemp, data.affine * scale)
hdrOut = unscaledNiiData.header
hdrOut.set_xyzt_units('mm')
nii.save(unscaledNiiData, input_path)
return input_path
else:
sys.exit("Error: inv - parameter should be a boolean.")
def findSlicesData(path, pre):
regMR_list = []
fileALL = glob.iglob(path + '/' + pre + '*.nii.gz', recursive=True)
for filename in fileALL:
regMR_list.append(filename)
regMR_list.sort()
return regMR_list
def fsl_SeparateSliceMoCo(input_file, par_folder):
# scale Nifti data by factor 10
dataName = os.path.basename(input_file).split('.')[0]
fslPath = scaleBy10(input_file, inv=False)
aidamri_dir = os.getcwd()
temp_dir = os.path.join(os.path.dirname(input_file), "temp")
if not os.path.exists(temp_dir):
os.mkdir(temp_dir)
os.chdir(temp_dir)
mySplit = fsl.Split(in_file=fslPath, dimension='z', out_base_name=dataName)
mySplit.run()
os.remove(fslPath)
# sparate ref and src volume in slices
sliceFiles = findSlicesData(os.getcwd(), dataName)
# start to correct motions slice by slice
for i in range(len(sliceFiles)):
slc = sliceFiles[i]
output_file = os.path.join(par_folder, os.path.basename(slc))
myMCFLIRT = fsl.preprocess.MCFLIRT(in_file=slc, out_file=output_file, save_plots=True, terminal_output='none')
myMCFLIRT.run()
os.remove(slc)
# merge slices to a single volume
mcf_sliceFiles = findSlicesData(par_folder, dataName)
output_file = os.path.join(os.path.dirname(input_file),
os.path.basename(input_file).split('.')[0]) + '_mcf.nii.gz'
myMerge = fsl.Merge(in_files=mcf_sliceFiles, dimension='z', merged_file=output_file)
myMerge.run()
for slc in mcf_sliceFiles:
os.remove(slc)
# unscale result data by factor 10**(-1)
output_file = scaleBy10(output_file, inv=True)
os.chdir(aidamri_dir)
return output_file
def make_dir(dir_out, dir_sub):
"""
Creates new directory.
"""
dir_out = os.path.normpath(os.path.join(dir_out, dir_sub))
if not os.path.exists(dir_out):
os.mkdir(dir_out)
time.sleep(1.0)
if not os.path.exists(dir_out):
sys.exit("Could not create directory \"%s\"" % (dir_out,))
return dir_out
def move_files(dir_in, dir_out, pattern):
time.sleep(1.0)
file_list = glob.glob(dir_in+pattern)
file_list.sort()
time.sleep(1.0)
for file_mv in file_list: # move files from input to output directory
file_in = os.path.join(dir_in, file_mv)
shutil.copy(file_in, dir_out)
for file_mv in file_list: # remove files in output directory
file_out = os.path.join(dir_out, file_mv)
if os.path.isfile(file_out):
os.remove(file_out)
def connectivity(dsi_studio, dir_in, dir_seeds, dir_out, dir_con):
"""
Calculates connectivity data (types: pass and end).
"""
if not os.path.exists(dir_in):
sys.exit("Input directory \"%s\" does not exist." % (dir_in,))
dir_seeds = os.path.normpath(os.path.join(dir_in, dir_seeds))
if not os.path.exists(dir_seeds):
sys.exit("Seeds directory \"%s\" does not exist." % (dir_seeds,))
if not os.path.exists(dir_out):
sys.exit("Output directory \"%s\" does not exist." % (dir_out,))
dir_con = make_dir(dir_out, dir_con)
# change to input directory
os.chdir(os.path.dirname(dir_in))
cmd_ana = r'%s --action=%s --source=%s --tract=%s --connectivity=%s --connectivity_value=%s --connectivity_type=%s'
filename = glob.glob(dir_in+'/*fib.gz')[0]
file_trk = glob.glob(dir_in+'/*trk.gz')[0]
file_seeds = dir_seeds
# Performs analysis on every connectivity value within the list ('qa' may not be necessary; might be removed in the future.)
connect_vals = ['qa', 'count']
for i in connect_vals:
parameters = (dsi_studio, 'ana', filename, file_trk, file_seeds, i, 'pass,end')
os.system(cmd_ana % parameters)
#move_files(dir_in, dir_con, re.escape(filename) + '\.' + re.escape(pre_seeds) + '.*(?:\.pass\.|\.end\.)')
move_files(os.path.dirname(file_trk), dir_con, '/*.txt')
move_files(os.path.dirname(file_trk), dir_con, '/*.mat')
def mapsgen(dsi_studio, dir_in, dir_msk, b_table, pattern_in, pattern_fib):
"""
FUNCTION DEPRECATED. REMOVAL PENDING.
"""
pre_msk = 'bet.bin.'
ext_src = '.src.gz'
ext_nii = '.nii.gz'
if not os.path.exists(dir_in):
sys.exit("Input directory \"%s\" does not exist." % (dir_in,))
dir_msk = os.path.normpath(os.path.join(dir_in, dir_msk))
if not os.path.exists(dir_msk):
sys.exit("Masks directory \"%s\" does not exist." % (dir_msk,))
b_table = os.path.join(dir_in, b_table)
if not os.path.isfile(b_table):
sys.exit("File \"%s\" does not exist." % (b_table,))
# change to input directory
os.chdir(dir_in)
cmd_src = r'%s --action=%s --source=%s --output=%s --b_table=%s'
# method: 0:DSI, 1:DTI, 4:GQI 7:QSDR, param0: 1.25 (in vivo) diffusion sampling lenth ratio for GQI and QSDR reconstruction, --thread_count: number of multi-threads used to conduct reconstruction
cmd_rec = r'%s --action=%s --source=%s --mask=%s --method=%d --param0=%s --thread_count=%d --check_btable=%d'
file_list = [x for x in os.listdir(dir_in) if os.path.isfile(os.path.join(dir_in, x)) and re.match(pattern_in, x)]
file_list.sort()
for index, filename in enumerate(file_list):
# create source files
pos = filename.rfind('_')
file_src = filename[:pos] + ext_src
parameters = (dsi_studio, 'src', filename, file_src, b_table)
subprocess.call(cmd_src % parameters)
# create fib files
file_msk = os.path.join(dir_msk, pre_msk + filename[:pos] + ext_nii)
parameters = (dsi_studio, 'rec', file_src, file_msk, 3, '1.25', 2, 0)
subprocess.call(cmd_rec % parameters)
# extracts maps: 2 ways:
cmd_exp = r'%s --action=%s --source=%s --export=%s'
file_list = [x for x in os.listdir(dir_in) if os.path.isfile(os.path.join(dir_in, x)) and re.match(pattern_fib, x)]
file_list.sort()
for index, filename in enumerate(file_list):
#file_fib = os.path.join(dir_in, filename)
#parameters = (dsi_studio, 'exp', file_fib, 'fa')
parameters = (dsi_studio, 'exp', filename, 'fa')
print("%d of %d:" % (index + 1, len(file_list)), cmd_exp % parameters)
subprocess.call(cmd_exp % parameters)
def srcgen(dsi_studio, dir_in, dir_msk, dir_out, b_table):
"""
Sources and creates fib files. Diffusivity and aniosotropy metrics are exported from data.
"""
dir_src = r'src'
dir_fib = r'fib_map'
dir_qa = r'DSI_studio'
dir_con = r'connectivity'
ext_src = '.src.gz'
if not os.path.exists(dir_in):
sys.exit("Input directory \"%s\" does not exist." % (dir_in,))
dir_msk = os.path.normpath(os.path.join(dir_in, dir_msk))
if not os.path.exists(dir_msk):
sys.exit("Masks directory \"%s\" does not exist." % (dir_msk,))
if not os.path.exists(dir_out):
sys.exit("Output directory \"%s\" does not exist." % (dir_out,))
b_table = os.path.join(dir_in, b_table)
if not os.path.isfile(b_table):
sys.exit("File \"%s\" does not exist." % (b_table,))
dir_src = make_dir(os.path.dirname(dir_out), dir_src)
dir_fib = make_dir(os.path.dirname(dir_out), dir_fib)
dir_qa = make_dir(os.path.dirname(dir_out), dir_qa)
# change to input directory
os.chdir(os.path.dirname(dir_in))
cmd_src = r'%s --action=%s --source=%s --output=%s --b_table=%s'
# method: 0:DSI, 1:DTI, 4:GQI 7:QSDR, param0: 1.25 (in vivo) diffusion sampling lenth ratio for GQI and QSDR reconstruction,
# check_btable: Set –check_btable=1 to test b-table orientation and apply automatic flippin, thread_count: number of multi-threads used to conduct reconstruction
# flip image orientation in x, y or z direction !! needs to be adjusted according to your data, check fiber tracking result to be anatomically meaningful
cmd_rec = r'%s --action=%s --source=%s --mask=%s --method=%d --param0=%s --check_btable=%d --half_sphere=%d --cmd=%s'
# create source files
filename = os.path.basename(dir_in)
pos = filename.rfind('.')
file_src = os.path.join(dir_src, filename[:pos] + ext_src)
parameters = (dsi_studio, 'src', filename, file_src, b_table)
os.system(cmd_src % parameters)
# create fib files
file_msk = dir_msk
parameters = (dsi_studio, 'rec', file_src, file_msk, 1, '1.25', 0, 1,'"[Step T2][B-table][flip by]+[Step T2][B-table][flip bz]"')
os.system(cmd_rec % parameters)
# move fib to corresponding folders
move_files(dir_src, dir_fib, '/*fib.gz')
# extracts maps: 2 ways:
cmd_exp = r'%s --action=%s --source=%s --export=%s'
file_fib = glob.glob(dir_fib+'/*fib.gz')[0]
parameters = (dsi_studio, 'exp', file_fib, 'fa')
os.system(cmd_exp % parameters)
# extracts maps: 2 ways:
cmd_exp = r'%s --action=%s --source=%s --export=%s'
file_fib = glob.glob(dir_fib + '/*fib.gz')[0]
parameters = (dsi_studio, 'exp', file_fib, 'md')
os.system(cmd_exp % parameters)
# extracts maps: 2 ways:
cmd_exp = r'%s --action=%s --source=%s --export=%s'
file_fib = glob.glob(dir_fib + '/*fib.gz')[0]
parameters = (dsi_studio, 'exp', file_fib, 'ad')
os.system(cmd_exp % parameters)
# extracts maps: 2 ways:
cmd_exp = r'%s --action=%s --source=%s --export=%s'
file_fib = glob.glob(dir_fib + '/*fib.gz')[0]
parameters = (dsi_studio, 'exp', file_fib, 'rd')
os.system(cmd_exp % parameters)
move_files(dir_fib, dir_qa, '/*qa.nii.gz')
move_files(dir_fib, dir_qa, '/*fa.nii.gz')
move_files(dir_fib, dir_qa, '/*md.nii.gz')
move_files(dir_fib, dir_qa, '/*ad.nii.gz')
move_files(dir_fib, dir_qa, '/*rd.nii.gz')
fa_file = nii.load(glob.glob(os.path.join(dir_qa,"*fa.nii*"))[0])
fa_data = fa_file.get_fdata()
fa_data_flipped = np.flip(fa_data,0)
fa_data_flipped = np.flip(fa_data_flipped,1)
fa_file_flipped = nii.Nifti1Image(fa_data_flipped, fa_file.affine)
nii.save(fa_file_flipped,os.path.join(dir_qa,"fa_flipped.nii.gz"))
md_file = nii.load(glob.glob(os.path.join(dir_qa,"*md.nii*"))[0])
md_data = md_file.get_fdata()
md_data_flipped = np.flip(md_data,0)
md_data_flipped = np.flip(md_data_flipped,1)
md_file_flipped = nii.Nifti1Image(md_data_flipped, md_file.affine)
nii.save(md_file_flipped,os.path.join(dir_qa,"md_flipped.nii.gz"))
ad_file = nii.load(glob.glob(os.path.join(dir_qa,"*ad.nii*"))[0])
ad_data = ad_file.get_fdata()
ad_data_flipped = np.flip(ad_data,0)
ad_data_flipped = np.flip(ad_data_flipped,1)
ad_file_flipped = nii.Nifti1Image(ad_data_flipped, ad_file.affine)
nii.save(ad_file_flipped,os.path.join(dir_qa,"ad_flipped.nii.gz"))
rd_file = nii.load(glob.glob(os.path.join(dir_qa,"*rd.nii*"))[0])
rd_data = rd_file.get_fdata()
rd_data_flipped = np.flip(rd_data,0)
rd_data_flipped = np.flip(rd_data_flipped,1)
rd_file_flipped = nii.Nifti1Image(rd_data_flipped, rd_file.affine)
nii.save(rd_file_flipped,os.path.join(dir_qa,"rd_flipped.nii.gz"))
def tracking(dsi_studio, dir_in):
"""
Performs seed-based fiber-tracking.
"""
if not os.path.exists(dir_in):
sys.exit("Input directory \"%s\" does not exist." % (dir_in,))
# change to input directory
os.chdir(os.path.dirname(dir_in))
# Use this tracking parameters if you want to specify each tracking parameter separately.
#cmd_trk = r'%s --action=%s --source=%s --output=%s --fiber_count=%d --interpolation=%d --step_size=%s --turning_angle=%s --check_ending=%d --fa_threshold=%s --smoothing=%s --min_length=%s --max_length=%s'
# Use this tracking parameters in the form of parameter_id that you can get directly from the dsi_studio gui console. (this is here now the defualt mode)
cmd_trk = r'%s --action=%s --source=%s --output=%s --parameter_id=%s'
filename = glob.glob(dir_in+'/*fib.gz')[0]
# Use this tracking parameters if you want to specify each tracking parameter separately.
#parameters = (dsi_studio, 'trk', filename, os.path.join(dir_in, filename+'.trk.gz'), 1000000, 0, '.5', '55', 0, '.02', '.1', '.5', '12.0') #Our Old parameters
#parameters = (dsi_studio, 'trk', filename, os.path.join(dir_in, filename+'.trk.gz'), 1000000, 0, '.01', '55', 0, '.02', '.1', '.3', '120.0') #Here are the optimized parameters (fatemeh)
# Use this tracking parameters in the form of parameter_id that you can get directly from the dsi_studio gui console. (this is here now the defualt mode)
parameters = (dsi_studio, 'trk', filename, os.path.join(dir_in, filename+'.trk.gz'), '0AD7A33C9A99193FE8D5123F0AD7233CCDCCCC3D9A99993EbF04240420FdcaCDCC4C3Ec')
os.system(cmd_trk % parameters)
def merge_bval_bvec_to_btable(folder_path):
# List files in the specified folder
files = os.listdir(folder_path)
# Find bval and bvec files in the folder
bval_file = None
bvec_file = None
for file in files:
if file.endswith(".bval"):
bval_file = os.path.join(folder_path, file)
elif file.endswith(".bvec"):
bvec_file = os.path.join(folder_path, file)
# Check if both bval and bvec files were found
if bval_file is not None or bvec_file is not None:
print("Both bval and bvec files must be present in the folder.")
fileName = os.path.basename(bvec_file).replace(".bvec","")
try:
with open(bval_file, 'r') as bval_file:
bval_contents = bval_file.read()
# Split the content into a list of values (assuming it's space-separated)
bval_values = bval_contents.strip().split()
# Convert the list to a Pandas DataFrame and cast the 'bval' column to integers
bval_table = pd.DataFrame({'bval': bval_values}).astype(float)
with open(bvec_file, 'r') as bvec_file:
# Read lines and split each line into values
bvec_lines = bvec_file.readlines()
bvec_values = [line.strip().split() for line in bvec_lines]
# Create a Pandas DataFrame from the values
bvec_table = pd.DataFrame(bvec_values, columns=[f'bvec_{i+1}' for i in range(len(bvec_values[0]))])
# Transpose the bvec_table
bvec_table = bvec_table.T
# Merge bval_table and bvec_table
merged_table = np.hstack((bval_table, bvec_table))
# Convert the merged_table content to float
merged_table = merged_table.astype(float)
# Define the path for the final merged table
final_path = os.path.join(folder_path, fileName + "_btable.txt")
# Save the merged table to the final file
np.savetxt(final_path, merged_table, fmt='%f', delimiter='\t')
print(f"Merged table saved to {final_path}")
return final_path
except FileNotFoundError:
print("One or both of the bval and bvec files were not found.")
return False
except Exception as e:
print(f"An error occurred: {str(e)}")
return False
if __name__ == '__main__':
pass
| Python |
3D | Aswendt-Lab/AIDAmri | bin/3.2_DTIConnectivity/plotDTI_mat.py | .py | 3,742 | 127 | """
Created on 10/08/2017
@author: Niklas Pallast
Neuroimaging & Neuroengineering
Department of Neurology
University Hospital Cologne
"""
import matplotlib.pyplot as plt
import os, sys
import numpy as np
import scipy.io as sio
np.seterr(divide='ignore', invalid='ignore')
import seaborn as sns
def intersect_mtlb(a, b):
a1, ia = np.unique(a, return_index=True)
b1, ib = np.unique(b, return_index=True)
aux = np.concatenate((a1, b1))
aux.sort()
c = aux[:-1][aux[1:] == aux[:-1]]
return ia[np.isin(a1, c)]
def getRefLabels(prefix):
if "rsfMRISplit" in prefix:
dataTemplate = np.loadtxt(
os.path.abspath(os.path.join(os.getcwd(), os.pardir, os.pardir)) + '/lib/annoVolume+2000_rsfMRI.nii.txt',
dtype=str)
refLabels = dataTemplate[:, 1]
elif "rsfMRI" in prefix:
dataTemplate = np.loadtxt(
os.path.abspath(os.path.join(os.getcwd(), os.pardir, os.pardir)) + '/lib/annoVolume.nii.txt', dtype=str)
refLabels = dataTemplate[:, 1]
else:
dataTemplate = np.loadtxt(
os.path.abspath(os.path.join(os.getcwd(), os.pardir, os.pardir)) + '/lib/ARA_changedAnnotatiosn2DTI.txt',
dtype=str)
refLabels = dataTemplate[:, 1]
return refLabels
def matrixMaker(inputPath):
# Read pass and end
if "pass" in inputPath:
matData = sio.loadmat(inputPath)
connectivityPass = matData['connectivity']
matData = sio.loadmat(inputPath.replace('.pass.', '.end.'))
connectivityEnd = matData['connectivity']
connectivity = connectivityEnd + connectivityPass
elif "end" in inputPath:
matData = sio.loadmat(inputPath)
connectivityEnd = matData['connectivity']
matData = sio.loadmat(inputPath.replace('.end.', '.pass.'))
connectivityPass = matData['connectivity']
connectivity = connectivityEnd + connectivityPass
else:
sys.exit("Error: %s path do not conatain path or end data." % (inputPath,))
labels = matData['name']
tempLabels = ""
labels = tempLabels.join([chr(a) for a in labels[0]]).split('\n')
# Get reference Labels
refLabels = getRefLabels(os.path.basename(inputPath))
# Intersection between ref and cur labels
ia = intersect_mtlb(refLabels, labels)
missingLabels = np.setdiff1d(np.arange(1, len(refLabels)), ia)
# Adapt labels to pyplot
labels = [s.replace('_', ' ') for s in labels]
zeroVec = np.zeros([len(refLabels), len(refLabels)])
zeroVec[np.ix_(np.sort(ia), np.sort(ia))] = connectivity
connectivityFilled = zeroVec
fig, ax = plt.subplots()
sns.heatmap(connectivityFilled)
ax.axis('tight')
# Set labels
ax.set(xticks=np.arange(len(labels)), xticklabels=labels,
yticks=np.arange(len(labels)), yticklabels=labels)
# Rotate the tick labels and set their alignment.
plt.setp(ax.get_xticklabels(), rotation=45, ha="right",
rotation_mode="anchor")
ax.set_title("DTI conncectivity between ARA regions")
plt.show()
return connectivity
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='Visualize mat file of DTI ')
requiredNamed = parser.add_argument_group('required named arguments')
requiredNamed.add_argument('-i', '--inputMat', help='file name: DTI mat-File')
args = parser.parse_args()
inputPath = None
if args.inputMat is not None and args.inputMat is not None:
inputPath = args.inputMat
if not os.path.exists(inputPath):
sys.exit("Error: %s path is not an existing directory." % (args.inputPath,))
inputPath = args.inputMat
# generate Matrix
matrixMaker(inputPath)
| Python |
3D | Aswendt-Lab/AIDAmri | bin/3.2_DTIConnectivity/dsi_main.py | .py | 5,797 | 128 | #!/opt/env/bin/python
"""
Created on 10/08/2017
@author: Niklas Pallast
Neuroimaging & Neuroengineering
Department of Neurology
University Hospital Cologne
"""
from __future__ import print_function
import argparse
import os
import glob
import dsi_tools
import shutil
if __name__ == '__main__':
# default dsi studio directory
f = open(os.path.join(os.getcwd(), "dsi_studioPath.txt"), "r")
dsi_studio = f.read().split("\n")[0]
f.close()
# default b-table in input directory
b_table = os.path.abspath(os.path.join(os.getcwd(), os.pardir,os.pardir)) + '/lib/DTI_Jones30.txt'
# default connectivity directory relative to input directory
dir_con = r'connectivity'
# Defining CLI flags
parser = argparse.ArgumentParser(description='Get connectivity of DTI dataset')
requiredNamed = parser.add_argument_group('Required named arguments')
requiredNamed.add_argument('-i',
'--file_in',
help = 'path to the raw NIfTI DTI file (ends with *dwi.nii.gz)',
required=True
)
parser.add_argument('-b',
'--b_table',
default='auto', # Default to 'auto' for automatic selection
help='Specify the b-table source: "auto" (will look for bvec and bval, create the btable. If val or vec can not be found, it uses the Jones30 file)'
)
parser.add_argument('-o',
'--optional',
nargs = '*',
help = 'Optional arguments.\n\t"fa0": Renames the FA metric data to former DSI naming convention.\n\t"nii_gz": Converts ROI labeling relating files from .nii to .nii.gz format to match former data structures.'
)
args = parser.parse_args()
# Determine the btable source based on the -b option
if args.b_table.lower() == 'auto':
# Use the merge_bval_bvec_to_btable function with folder_path as file_in
b_table = dsi_tools.merge_bval_bvec_to_btable(os.path.dirname(args.file_in))
if b_table is False:
# Use the default "Jones30" btable
b_table = os.path.abspath(os.path.join(os.getcwd(), os.pardir, os.pardir)) + '/lib/DTI_Jones30.txt'
# Preparing directories
file_cur = os.path.dirname(args.file_in)
dsi_path = os.path.join(file_cur, 'DSI_studio')
mcf_path = os.path.join(file_cur, 'mcf_Folder')
dir_mask = glob.glob(os.path.join(dsi_path, '*BetMask_scaled.nii'))
if not dir_mask:
dir_mask = glob.glob(os.path.join(dsi_path, '*BetMask_scaled.nii.gz')) # check for ending (either .nii or .nii.gz)
dir_mask = dir_mask[0]
dir_out = args.file_in
if os.path.exists(mcf_path):
shutil.rmtree(mcf_path)
os.mkdir(mcf_path)
file_in = dsi_tools.fsl_SeparateSliceMoCo(args.file_in, mcf_path)
dsi_tools.srcgen(dsi_studio, file_in, dir_mask, dir_out, b_table)
file_in = os.path.join(file_cur,'fib_map')
# Fiber tracking
dir_out = os.path.dirname(args.file_in)
dsi_tools.tracking(dsi_studio, file_in)
# Calculating connectivity
suffixes = ['*StrokeMask_scaled.nii', '*parental_Mask_scaled.nii', '*Anno_scaled.nii', '*AnnoSplit_parental_scaled.nii']
for f in suffixes:
dir_seeds = glob.glob(os.path.join(file_cur, 'DSI_studio', f))
if not dir_seeds:
dir_seeds = glob.glob(os.path.join(file_cur, 'DSI_studio', f + '.gz')) # check for ending (either .nii or .nii.gz)
if not dir_seeds:
continue
dir_seeds = dir_seeds[0]
dsi_tools.connectivity(dsi_studio, file_in, dir_seeds, dir_out, dir_con)
# rename files to reduce path length
confiles = os.path.join(file_cur,dir_con)
data_list = os.listdir(confiles)
for filename in data_list:
splittedName = filename.split('.src.gz.dti.fib.gz.trk.gz.')
if len(splittedName)>1:
newName = splittedName[1]
newName = os.path.join(confiles,newName)
if os.path.isfile(newName):
os.remove(newName)
oldName = os.path.join(confiles,filename)
os.rename(oldName,newName)
# Including optional arguments regarding deprecated terminology
if args.optional is not None:
file_list = os.listdir(dsi_path)
for f in file_list:
# fa0 was a former term used in earlier DSI-studio versions; the '0' in fa0 referred to the first fiber track. However, DTI can only result in one track, therefore only one fractional anisotropy value per voxel is given, thus the collective values are referred to as fa. With the 'fa0' flag toggled on, the 'fa' data file is renamed to the former naming convention (fa0).
if 'fa0' in [s.lower() for s in args.optional] and f.endswith('fa.nii.gz'):
newName = f.split('fa.nii.gz')[0] + 'fa0.nii.gz'
newName = os.path.join(dsi_path, newName)
oldName = os.path.join(dsi_path, f)
if os.path.isfile(newName):
os.remove(newName)
os.rename(oldName, newName)
# Due to changes in ROI annotations the corresponding files are saved as '.nii' files as opposed to '.nii.gz' files in earlier versions of DSI studio. With the 'nii_gz' flag toggled on, the '.nii' files are renamed to '.nii.gz'.
if 'nii_gz' in args.optional and f.endswith('.nii'):
newName = f + '.gz'
newName = os.path.join(dsi_path, newName)
oldName = os.path.join(dsi_path, f)
if os.path.isfile(newName):
os.remove(newName)
os.rename(oldName, newName)
| Python |
3D | Aswendt-Lab/AIDAmri | bin/PV2NIfTiConverter/__init__.py | .py | 0 | 0 | null | Python |
3D | Aswendt-Lab/AIDAmri | bin/PV2NIfTiConverter/pv_parseBruker_md_np.py | .py | 11,946 | 345 | """
Created on 10/08/2017
@author: Niklas Pallast
Neuroimaging & Neuroengineering
Department of Neurology
University Hospital Cologne
"""
from __future__ import print_function
import os,sys
import numpy as np
from dict2xml import createXML
# from string import split
def parsePV(filename):
"""
Parser for Bruker ParaVision parameter files in JCAMP-DX format
Prarmeters:
===========
filename: 'acqp', 'method', 'd3proc', 'roi', 'visu_pars', etc.
"""
if not os.path.exists(filename):
return []
# Read file 'filename' -> list 'lines'
f = open(filename, 'r')
lines = f.readlines()
f.close()
# Dictionary for parameters
params = {}
# Get STUDYNAME, EXPNO, and PROCNO
#if filename[-9:] == 'visu_pars':
if 'visu_pars' in filename:
tmp = lines[6].split('/')
params['studyname'] = [[], tmp[-5]]
params['expno'] = [[], tmp[-4]]
params['procno'] = [[], tmp[-2]]
# Remove comment lines
remove = [] # Index list
for index, line in enumerate(lines): # Find lines
if line[0:2] == '$$':
remove.append(index)
for offset, index in enumerate(remove): # Remove lines
del lines[index-offset]
# Create list of LDR (Labelled Data Record) elements
lines = ''.join(lines).split('\n##') # Join lines and split into LDRs
#lines = map(rstrip, lines) # Remove trailing whitespace from each LDR
lines[0] = lines[0].lstrip('##') # Remove leading '##' from first LDR
# Combine LDR lines
for index, line in enumerate(lines):
lines[index] = ''.join(line.split('\n'))
# Fill parameter dictionary
if np.size(lines) == 1:
sys.exit("Error: visu_pars is not readable")
if 'subject' in filename:
tmp = lines[32].split('#$Name,')
params['coilname'] = tmp[1].split('#$Id')[0]
return params
for line in lines:
line = line.split('=', 1)
if line[0][0] == '$':
key = line[0].lstrip('$')
dataset = line[1]
params[key] = []
pos = 0
if (len(dataset) > 4) and (dataset[0:2] == '( '):
pos = dataset.find(' )', 2)
if pos > 2:
pardim = [int(dim) for dim in dataset[2:pos].split(',')]
params[key].append(pardim)
params[key].append(dataset[pos+2:])
if pos <= 2:
params[key].append([])
params[key].append(dataset)
# Remove specific elements from parameter dictionary
if '$VisuCoreDataMin' in params: del params['$VisuCoreDataMin']
if '$VisuCoreDataMax' in params: del params['$VisuCoreDataMax']
if '$VisuCoreDataOffs' in params: del params['$VisuCoreDataOffs']
if '$VisuCoreDataSlope' in params: del params['$VisuCoreDataSlope']
if '$VisuAcqImagePhaseEncDir' in params: del params['$VisuAcqImagePhaseEncDir']
for key in params.keys():
pardim = params[key][0]
parval = params[key][1]
if (len(pardim) > 0) and (len(parval) > 0) and (parval[0] == '<'):
params[key][1] = parval.replace('<', '"').replace('>', '"')
elif (len(parval) > 0) and (parval[0] == '('):
params[key][1] = parval.replace('<', '"').replace('>', '"')
params[key] = params[key][1]
return params
def getXML(filename, writeFile=False):
"""
Writes header dictionary to xml format
Parameters:
==========
filename: Bruker ParaVision '2dseq' file
writeFile: Boolean, if 'False' return string containing xml-header, else save to file
"""
path = os.path.abspath(os.path.dirname(filename))
# Parse all parameter files
header_acqp = parsePV(os.path.join(path, '..', '..', 'acqp'))
header_method = parsePV(os.path.join(path, '..', '..', 'method'))
#header_d3proc = parsePV(os.path.join(path, 'd3proc')) # removed for PV6
header_visu = parsePV(os.path.join(path, 'visu_pars'))
header = {'Scaninfo': {}}
header['Scaninfo']['acqp'] = header_acqp
header['Scaninfo']['method'] = header_method
#header['Scaninfo']['d3proc'] = header_d3proc # removed for PV6
header['Scaninfo']['visu_pars'] = header_visu
xml = createXML(header, '<?xml version="1.0"?>\n')
if writeFile:
f = open('scaninfo.xml', 'w')
f.write(xml)
f.close()
else:
return xml
def getNiftiHeader(params, sc=10):
"""
Returns necessary header parameters for NIfTI generation ()
Parameters:
===========
filename: header returned from parser
sc: scales pixel dimension (defaults to 10 for animal imaging)
"""
# List of 'VisuCoreSize' parameter strings
if params == []:
return
CoreSize = str.split(params['VisuCoreSize'])
if params['VisuCoreDimDesc'] == 'spectroscopic':
print("spectroscopic")
#quit(42)
return params['VisuStudyDate'], int(CoreSize[0]), 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 8
# Dimensions
nX = int(CoreSize[0])
nY = int(CoreSize[1])
nZ = 1
nT = 1
# FrameGroup dimensions
if 'VisuFGOrderDescDim' in params:
if int(params['VisuFGOrderDescDim']) > 0:
FGOrderDesc = params['VisuFGOrderDesc'][1:-1].split(') (')
#FGOrderDesc = map(lambda item: item.split(', '), FGOrderDesc)
FGOrderDesc = [item.split(', ') for item in FGOrderDesc]
#frameDims = map(lambda item: int(item[0]), FGOrderDesc)
frameDims = [int(item[0]) for item in FGOrderDesc]
# Number of slices
nZ = frameDims[0]
if int(params['VisuFGOrderDescDim']) > 1:
nT = frameDims[1]
if int(params['VisuFGOrderDescDim']) > 2:
nT *= frameDims[2]
# Voxel dimensions
extent = params['VisuCoreExtent'].split()
dX = sc * float(extent[0]) / nX
dY = sc * float(extent[1]) / nY
VisuCoreSlicePacksSliceDist = params.get('VisuCoreSlicePacksSliceDist')
print("VisuCoreSlicePacksSliceDist",VisuCoreSlicePacksSliceDist)
print("VisuCoreFrameThickness", params['VisuCoreFrameThickness'])
if VisuCoreSlicePacksSliceDist is None:
dZ = sc * float(params['VisuCoreFrameThickness'])
else:
# Slice thickness inclusive gap (PV6)
VisuCoreSlicePacksSliceDist=VisuCoreSlicePacksSliceDist.split()[0]
print("VisuCoreSlicePacksSliceDist",VisuCoreSlicePacksSliceDist)
dZ = sc * float(VisuCoreSlicePacksSliceDist)
if 'VisuAcqRepetitionTime' in params:
if (nT > 1) and (float(params['VisuAcqRepetitionTime']) > 0 ):
dT = float(params['VisuAcqRepetitionTime']) / 1000
else:
dT=0
else:
dT = 0
if int(params['VisuCoreDim']) == 3:
nZ = int(CoreSize[2])
nT = 1
frameDims = None
if 'VisuFGOrderDescDim' in params:
if int(params['VisuFGOrderDescDim']) > 0:
nT = frameDims[0]
dZ = sc * float(extent[2]) / nZ
if (nT > 1) and (float(params['VisuAcqRepetitionTime']) > 1 ):
dT = float(params['VisuAcqRepetitionTime']) / 1000
else:
dT = 0
DT = 4
if params['VisuCoreWordType'] == '_8BIT_UNSGN_INT': DT = 'int8'
if params['VisuCoreWordType'] == '_16BIT_SGN_INT' : DT = 'int16'
if params['VisuCoreWordType'] == '_32BIT_SGN_INT' : DT = 'int32'
if params['VisuCoreWordType'] == '_32BIT_FLOAT' : DT = 'float32'
tmp = params['studyname'] + '.' + params['expno'] + '.' + params['procno']
return tmp, nX, nY, nZ, nT, dX, dY, dZ, dT, 0, 0, 0, DT
''' def getNiftiHeader_md(params, sc=10):
"""
Returns necessary header parameters for NIfTI generation ()
Parameters:
===========
filename: header returned from parser
sc: scales pixel dimension (defaults to 10 for animal imaging)
"""
# List of 'VisuCoreSize' parameter strings
global frameDims
CoreSize = str.split(params['VisuCoreSize'])
if params['VisuCoreDimDesc'] == 'spectroscopic':
return params['VisuStudyName'], int(CoreSize[0]), 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 8
# Dimensions
nX = int(CoreSize[0])
nY = int(CoreSize[1])
nZ = 1
nT = 1
# FrameGroup dimensions
if int(params['VisuFGOrderDescDim']) > 0:
FGOrderDesc = params['VisuFGOrderDesc'][1:-1].split(') (')
#FGOrderDesc = map(lambda item: item.split(', '), FGOrderDesc)
FGOrderDesc = [item.split(', ') for item in FGOrderDesc]
#frameDims = map(lambda item: int(item[0]), FGOrderDesc)
frameDims = [int(item[0]) for item in FGOrderDesc]
# Number of slices
nZ = frameDims[0]
if int(params['VisuFGOrderDescDim']) > 1:
nT = frameDims[1]
# Voxel dimensions
extent = params['VisuCoreExtent'].split()
dX = sc * float(extent[0]) / nX
dY = sc * float(extent[1]) / nY
VisuCoreSlicePacksSliceDist = params.get('VisuCoreSlicePacksSliceDist')
print("VisuCoreSlicePacksSliceDist",VisuCoreSlicePacksSliceDist)
if VisuCoreSlicePacksSliceDist is None:
dZ = sc * float(params['VisuCoreFrameThickness'])
else:
# Slice thickness inclusive gap (PV6)
dz = sc * float(VisuCoreSlicePacksSliceDist)
print("dz",dz)
if (nT > 1) and (float(params['VisuAcqRepetitionTime']) > 0 ):
dT = float(params['VisuAcqRepetitionTime']) / 1000
else:
dT = 0
if int(params['VisuCoreDim']) == 3:
nZ = int(CoreSize[2])
nT = 1
if int(params['VisuFGOrderDescDim']) > 0:
nT = frameDims[0]
dZ = sc * float(extent[2]) / nZ
if (nT > 1) and (float(params['VisuAcqRepetitionTime']) > 1 ):
dT = float(params['VisuAcqRepetitionTime']) / 1000
else:
dT = 0
CoreWordType = params['VisuCoreWordType']
if CoreWordType == '_8BIT_UNSGN_INT': DT = 'DT_UINT8' # 2: 'int8'
elif CoreWordType == '_16BIT_SGN_INT': DT = 'DT_INT16' # 4: 'int16'
elif CoreWordType == '_32BIT_SGN_INT': DT = 'DT_INT32' # 8: 'int32'
elif CoreWordType == '_32BIT_FLOAT': DT = 'DT_FLOAT32' # 16: 'float32'
else: DT = 4
tmp = params['studyname'] + '.' + params['expno'] + '.' + params['procno']
return (tmp, nX, nY, nZ, nT, dX, dY, dZ, dT, 0, 0, 0, DT)
'''
def getRotMatrix(filename):
"""
Returns rotation matrix for image registration
Parameters:
===========
filename: visu_pars file to parse
sc : scales pixel dimension (defaults to 10 for animal imaging)
"""
params = parsePV(filename)
orientation = map(float, params['VisuCoreOrientation'].split())
if not 'VisuCorePosition' in params:
return np.array([0.0, 0.0, 0.0, 0.0])
position = map(float, params['VisuCorePosition'].split())
orientation = np.array(orientation[0:9]).reshape((3, 3))
position = np.array(position[0:3]).reshape((3, 1))
rotMatrix = np.append(orientation, position, axis=1)
rotMatrix = np.append(rotMatrix, np.array([0.0, 0.0, 0.0, 1.0]).reshape(1, 4), axis=0)
return rotMatrix
def writeRotMatrix(rotMatrix, filename):
fid = open(filename, 'w')
np.savetxt(fid, rotMatrix, fmt='%-7.2f')
fid.close()
"""
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(description='Parse Bruker parameter files')
parser.add_argument('file', type=str, help='name of parameter file (2dseq for all)')
parser.add_argument('-t', '--type', type=str, default = "xml", help='nifti, xml')
args = parser.parse_args()
if args.type == "nifti" and os.path.basename(args.file)=="visu_pars":
print(getNiftiHeader(args.file))
elif args.type == "mat" and os.path.basename(args.file)=="visu_pars":
print(getRotMatrix(args.file))
elif args.type == "xml":
print(getXML(args.file))
else:
print("Hmmm, works not that way ;)")
"""
| Python |
3D | Aswendt-Lab/AIDAmri | bin/PV2NIfTiConverter/P2_IDLt2_mapping.py | .py | 12,514 | 344 | """
Created on 10/08/2017
@author: Niklas Pallast
Neuroimaging & Neuroengineering
Department of Neurology
University Hospital Cologne
"""
import os
from math import *
from lmfit import Minimizer, Parameters
import matplotlib.pyplot as plt
import nibabel as nii
import numpy as np
import progressbar
from .ReferenceMethods import brummerSNR
plt.interactive(False)
def t2_monoexp3 (params, t, data):
"""
# t2_monoexp3
#
# Define a mono-exponential decay equation with Y0 offset
#------------------------------------------------------------------------------
# params: name (str, optional) – Name of the Parameter.
# value (float, optional) – Numerical Parameter value.
# vary (bool, optional) – Whether the Parameter is varied during a fit (default is True).
# min (float, optional) – Lower bound for value (default is -numpy.inf, no lower bound).
# max (float, optional) – Upper bound for value (default is numpy.inf, no upper bound).
# expr (str, optional) – Mathematical expression used to constrain the value during the fit.
# user_data (optional) – User-definable extra attribute used for a Parameter.
# t: time axis in index units
# data: scattert points
"""
S0 = params['S0']
T2 = params['T2']
Y0 = params['Y0']
model = S0 * np.exp(-t/T2)+Y0
return model - data
def t2_monoexp2(params, t, data):
"""
# t2_monoexp2
#
# Define a mono-exponential decay equation without Y0 offset
# ------------------------------------------------------------------------------
# params: name (str, optional) – Name of the Parameter.
# value (float, optional) – Numerical Parameter value.
# vary (bool, optional) – Whether the Parameter is varied during a fit (default is True).
# min (float, optional) – Lower bound for value (default is -numpy.inf, no lower bound).
# max (float, optional) – Upper bound for value (default is numpy.inf, no upper bound).
# expr (str, optional) – Mathematical expression used to constrain the value during the fit.
# brute_step (float, optional) – Step size for grid points in the brute method.
# user_data (optional) – User-definable extra attribute used for a Parameter.
# t: time axis in index units
# data: scattert points
"""
S0 = params['S0']
T2 = params['T2']
model = S0 * np.exp(-t/T2)
return model - data
###############################################################################
# t2_fitmonoexp1
#
# Perform the data fitting using a mono-exponential model:
# S = Y0 + (S0 * EXP(-TE / T2))
###############################################################################
#def t2_fitmonoexp1 (T2, S0, Y0, T2bn, T2pe, img, snr, snrlim, nx, ny, slc, te, start, pinfo, FIXOFFSE):
#(slice, echoTime, model, curSnrMap, snrLim, slc)
def t2_fitmonoexp1(slice,te,snrMap,snrLim, model,uplim):
dims = slice.shape
nx = dims[1]
ny = dims[0]
T2 = np.zeros([nx,ny],dtype='int8') #Temporary store T2 map
S0 = np.zeros([nx,ny],dtype='int8') #Temporary store S0 map
# // FITTING PROCEDURE //
#bar = progressbar.ProgressBar()
for i in range(nx):
for j in range(ny):
y = slice[i][j][:]
if np.mean(snrMap[i, j]) >= snrLim:
result=mpfitfun(y, te, model,uplim)
T2[i, j] = result['T2'].value
S0[i, j] = result['S0'].value
#bar.update(i)
allResult = {'T2': T2, 'S0': S0, 'SNR': snrMap}
#plt.imshow(T2, cmap='gray')
return allResult
###############################################################################
# t2_fitmonoexp2
#
# Perform the data fitting using a mono-exponential model:
# S = S0 * EXP(-TE / T2)
#------------------------------------------------------------------------------
###############################################################################
#def t2_fitmonoexp2 (T2, S0, T2bn, T2pe, slice, snr, snrlim, nx, ny, slc, te, start, pinfo):
def t2_fitmonoexp2(slice,te,snrMap,snrLim, model,uplim):
dims = slice.shape
nx = dims[1]
ny = dims[0]
T2 = np.zeros([nx, ny],dtype='int8') # Temporary store T2 map
S0 = np.zeros([nx, ny],dtype='int8') # Temporary store S0 map
Y0 = np.zeros([nx, ny],dtype='int8') # Temporary storeY0 map
# // FITTING PROCEDURE //
bar = progressbar.ProgressBar()
for i in bar(range(nx)):
for j in range(ny):
y = slice[i][j][:]
if np.mean(snrMap[i, j]) >= snrLim:
result = mpfitfun(y, te, model, uplim)
T2[i, j] = result['T2'].value
S0[i, j] = result['S0'].value
Y0[i, j] = result['Y0'].value
bar.update(i)
allResult = {'T2': T2, 'S0': S0, 'Y0': Y0, 'SNR': snrMap}
#plt.imshow(T2, cmap='gray')
return allResult
###############################################################################
# t2_mapping
#
# Main function for the T2 mapping project. Iterate over the slices
# T2 maps are generated from Bruker ParaVision data.
# Generate arrays to store data and call the fitting routines.
###############################################################################
def t2_mapping(data,echoTime, model, uplim, snrLim, SNRMethod):
imgData = data.get_data()
nx = imgData.shape[0] # Images size in x - direction
ny = imgData.shape[1] # Images size in y - direction
ns = imgData.shape[3] # Number of slices
if 'T2_2p' in model:
# Array to store the T2, S0 and Y0 maps
pvMaps = np.zeros([nx, ny, ns, 3],dtype=data.get_data_dtype())
#Loop to go through all slices
for slc in range(ns):
# Print % of progress
#print('Slice: ' + str(slc + 1))
# Temporal image containing all TE values for the selected slice
slice = imgData[:, :, :, slc]
# Temporal map containing the snr values for the selected slice
if 'Chang' in SNRMethod:
curSnrMap, estStdSijbers, estStdSijbersNorm = changSNR.calcSNR(slice, 0, 1)
elif 'Brummer' in SNRMethod:
curSnrMap, estStdSijbers, estStdSijbersNorm = brummerSNR.calcSNR(slice, 0, 1)
elif 'Sijbers' in SNRMethod:
curSnrMap, estStdSijbers, estStdSijbersNorm = sijbersSNR.calcSNR(slice, 0, 1)
else:
sys.exit("Error: No valid SNR model.")
# Fit the data of the single slice (model 1)
results = t2_fitmonoexp1(slice, echoTime, curSnrMap, snrLim, model, uplim)
# Store data of slice in final image
pvMaps[:, :, slc, 0] = results['T2']
pvMaps[:, :, slc, 1] = results['S0']
pvMaps[:, :, slc, 2] = results['SNR'][:, :, 0]
elif 'T2_3p' in model:
# Array to store the T2, S0 maps
pvMaps = np.zeros([nx, ny, ns, 4],dtype=data.get_data_dtype())
# Loop to go through all slices
for slc in range(ns):
# Print % of progress
print('Slice: ' + str(slc + 1) +'\n')
# Temporal image containing all TE values for the selected slice
slice = imgData[:, :, :, slc]
# Temporal map containing the snr values for the selected slice
if 'Chang' in SNRMethod:
curSnrMap, estStdSijbers, estStdSijbersNorm = changSNR.calcSNR(slice, 0, 1)
elif 'Brummer' in SNRMethod:
curSnrMap, estStdSijbers, estStdSijbersNorm = brummerSNR.calcSNR(slice, 0, 1)
elif 'Sijbers' in SNRMethod:
curSnrMap, estStdSijbers, estStdSijbersNorm = sijbersSNR.calcSNR(slice, 0, 1)
else:
sys.exit("Error: No valid SNR model.")
# Fit the data of the single slice (model 1)
results = t2_fitmonoexp2(slice, echoTime, curSnrMap, snrLim, model, uplim)
# Store data of slice in final image
pvMaps[:, :, slc, 0] = results['T2']
pvMaps[:, :, slc, 1] = results['S0']
pvMaps[:, :, slc, 2] = results['Y0']
pvMaps[:, :, slc, 3] = results['SNR'][:,:,0]
else:
sys.exit("Error: No valid model.")
return pvMaps
def mpfitfun(data,te,model,uplim):
y = data
x = te
params = Parameters()
if 'T2_2p' in model:
estT2 = (x[1]-x[0])/((np.log(y[0])/np.log(y[1])))
est = [estT2, y[0]]
params.add('T2', value=est[0], min=0, max=uplim)
params.add('S0', value=est[1])
minner = Minimizer(t2_monoexp2, params, fcn_args=(x, y))
result = minner.minimize()
elif 'T2_3p' in model:
estT2 = (x[1]-x[0])/(np.log(y[0])/np.log(y[1]))
est = [estT2, y[0], y[len(y)-1]]
params.add('T2', value=est[0], min=0, max=70)
params.add('S0', value=est[1])
params.add('Y0', value=est[2])
minner = Minimizer(t2_monoexp3, params, fcn_args=(x, y))
result = minner.minimize()
return result.params
def parsePV(filename):
"""
Parser for Bruker ParaVision parameter files in JCAMP-DX format
"""
# Read file 'filename' -> list 'lines'
f = open(filename, 'r')
lines = f.readlines()
f.close()
# Dictionary for parameters
params = {}
# Get STUDYNAME, EXPNO, and PROCNO
#if filename[-9:] == 'visu_pars':
if 'visu_pars' in filename:
tmp = lines[6].split('/')
params['studyname'] = [[], tmp[-5]]
params['expno'] = [[], tmp[-4]]
params['procno'] = [[], tmp[-2]]
# Remove comment lines
remove = [] # Index list
for index, line in enumerate(lines): # Find lines
if line[0:2] == '$$':
remove.append(index)
for offset, index in enumerate(remove): # Remove lines
del lines[index-offset]
# Create list of LDR (Labelled Data Record) elements
lines = ''.join(lines).split('\n##') # Join lines and split into LDRs
#lines = map(rstrip, lines) # Remove trailing whitespace from each LDR
lines[0] = lines[0].lstrip('##') # Remove leading '##' from first LDR
# Combine LDR lines
for index, line in enumerate(lines):
lines[index] = ''.join(line.split('\n'))
# Fill parameter dictionary
for line in lines:
line = line.split('=', 1)
if line[0][0] == '$':
key = line[0].lstrip('$')
dataset = line[1]
params[key] = []
pos = 0
if (len(dataset) > 4) and (dataset[0:2] == '( '):
pos = dataset.find(' )', 2)
if pos > 2:
pardim = [int(dim) for dim in dataset[2:pos].split(',')]
params[key].append(pardim)
params[key].append(dataset[pos+2:])
if pos <= 2:
params[key].append([])
params[key].append(dataset)
# Remove specific elements from parameter dictionary
if '$VisuCoreDataMin' in params: del params['$VisuCoreDataMin']
if '$VisuCoreDataMax' in params: del params['$VisuCoreDataMax']
if '$VisuCoreDataOffs' in params: del params['$VisuCoreDataOffs']
if '$VisuCoreDataSlope' in params: del params['$VisuCoreDataSlope']
if '$VisuAcqImagePhaseEncDir' in params: del params['$VisuAcqImagePhaseEncDir']
for key in params.keys():
pardim = params[key][0]
parval = params[key][1]
if (len(pardim) > 0) and (len(parval) > 0) and (parval[0] == '<'):
params[key][1] = parval.replace('<', '"').replace('>', '"')
elif (len(parval) > 0) and (parval[0] == '('):
params[key][1] = parval.replace('<', '"').replace('>', '"')
params[key] = params[key][1]
return params
def getT2mapping(path,model,upLim,snrLim,SNRMethod,echoTime,output_path):
data = nii.load(path)
hdr = data.header
raw = hdr.structarr
if raw['dim'][3] < 2:
sys.exit("Error: '%s' has wrong dimensions." % (path,))
t2map = t2_mapping(data, echoTime, model=model, uplim=upLim, snrLim=snrLim, SNRMethod=SNRMethod)
pathT2Map = os.path.split(path)[0]
t2map = t2map[:, :, :, 0] #delete this line if you want more outputdata
t2map = np.flip(t2map, 2)
mapNii = nii.as_closest_canonical(nii.Nifti1Image(t2map, data.affine))
hdr = mapNii.header
hdr.set_xyzt_units('mm')
nii.save(mapNii, output_path)
| Python |
3D | Aswendt-Lab/AIDAmri | bin/PV2NIfTiConverter/pv_conv2Nifti_bval_bvec.py | .py | 15,938 | 386 | """
Created on 10/08/2017
@author: Niklas Pallast, Markus Aswendt
Neuroimaging & Neuroengineering
Department of Neurology
University Hospital Cologne
"""
from __future__ import print_function
import os
import time
import re
import sys
import numpy as np
import nibabel as nib
import nibabel.nifti1 as nii
import pv_parseBruker_md_np as pB
import P2_IDLt2_mapping as mapT2
import json
class Bruker2Nifti:
def __init__(self, study, expno, procno, rawfolder, procfolder, ftype='NIFTI_GZ'):
self.study = study
self.expno = str(expno)
self.procno = str(procno)
self.rawfolder = rawfolder
self.procfolder = procfolder
self.ftype = ftype
def save_table(self, subfolder=''):
try:
procfolder = os.path.join(self.procfolder, self.study)
if not os.path.isdir(procfolder):
os.mkdir(procfolder)
procfolder = os.path.join(self.procfolder, self.study, subfolder)
if not os.path.isdir(procfolder):
os.mkdir(procfolder)
except Exception as e:
print("Error in save_table:", str(e))
def read_2dseq(self, map_raw=False, pv6=False, sc=1.0):
study = self.study
expno = self.expno
procno = self.procno
rawfolder = self.rawfolder
self.acqp = pB.parsePV(os.path.join(rawfolder, study, expno, 'acqp'))
self.method = pB.parsePV(os.path.join(rawfolder, study, expno, 'method'))
self.method_path = os.path.join(rawfolder, study, expno, 'method')
self.subject = pB.parsePV(os.path.join(rawfolder, study, 'subject'))
# get header information
datadir = os.path.join(rawfolder, study, expno, 'pdata', procno)
#self.d3proc = pB.parsePV(os.path.join(datadir, 'd3proc')) # removed for PV6
self.visu_pars = pB.parsePV(os.path.join(datadir, 'visu_pars'))
hdr = pB.getNiftiHeader(self.visu_pars, sc=sc)
#print("hdr:", hdr)
if hdr is None or not isinstance(hdr[12], str):
return
# read '2dseq' file
f_id = open(os.path.join(datadir, '2dseq'), 'rb')
data = np.fromfile(f_id, dtype=np.dtype(hdr[12])).reshape(hdr[1], hdr[2], hdr[3], hdr[4], order='F')
f_id.close()
# map to raw data range (PV6)
if map_raw:
visu_core_data_slope = np.array(map(float, self.visu_pars['VisuCoreDataSlope'].split()), dtype=np.float32)
visu_core_data_offs = np.array(map(float, self.visu_pars['VisuCoreDataOffs'].split()), dtype=np.float32)
visu_core_data_shape = list(data.shape)
visu_core_data_shape[:2] = (1, 1)
if pv6:
data = data / visu_core_data_slope.reshape(visu_core_data_shape)
else:
data = data * visu_core_data_slope.reshape(visu_core_data_shape)
data = data + visu_core_data_offs.reshape(visu_core_data_shape)
# NIfTI image
nim = nii.Nifti1Image(data, None)
# NIfTI header
#header = nim.header
header = nim.get_header()
#print("header:"); print(header)
header['pixdim'] = [0.0, hdr[5], hdr[6], hdr[7], hdr[8], 0.0, 0.0, 0.0]
#nim.setXYZUnit('mm')
header.set_xyzt_units(xyz='mm', t=None)
#nim.header = header
#header = nim.get_header()
#print("header:"); print(header)
# write header in xml structure
#xml = pB.getXML(datadir + "/")
xml = pB.getXML(os.path.join(datadir, 'visu_pars'))
#print("xml:"); print(xml)
# add protocol information (method, acqp, visu_pars, d3proc) to Nifti's header extensions
#nim.extensions += ('comment', xml)
#extension = nii.Nifti1Extension('comment', xml)
self.hdr = hdr
self.nim = nim
self.xml = xml
def create_slice_timings(self):
with open(self.method_path, "r") as method_file:
lines = method_file.readlines()
interleaved = False
repetition_time = None
slicepack_delay = None
slice_order = None
n_slices = None
reverse = False
for idx, line in enumerate(lines):
if "RepetitionTime=" in line:
repetition_time = int(float(line.split("=")[1]))
repetition_time = int(repetition_time)
if "PackDel=" in line:
slicepack_delay = int(float(line.split("=")[1]))
if "ObjOrderScheme=" in line:
slice_order = line.split("=")[1]
if slice_order == 'Sequential':
interleaved = False
else:
interleaved = True
if "ObjOrderList=" in line:
n_slices = re.findall(r'\d+', line)
if len(n_slices) == 1:
n_slices = int(n_slices[0])
if lines[idx+1]:
slice_order = [int(float(s)) for s in re.findall(r'\d+', lines[idx+1])]
if slice_order[0] > slice_order[-1]:
reverse = True
if "fMRI" in self.acqp['ACQ_protocol_name']:
slice_timings = self._calculate_slice_timings(n_slices, repetition_time, slicepack_delay, slice_order, reverse)
# adjust slice order to start at 1
temp_slice_order = [x+1 for x in slice_order]
slice_order = temp_slice_order
#save metadata
mri_meta_data = {}
mri_meta_data["RepetitionTime"] = repetition_time
mri_meta_data["ObjOrderList"] = slice_order
mri_meta_data["n_slices"] = n_slices
mri_meta_data["costum_timings"] = slice_timings
procfolder = os.path.join(self.procfolder, self.study, "fMRI")
fname = '.'.join([self.study, self.expno, self.procno, "json"])
mri_meta_json = os.path.join(procfolder, fname)
with open(mri_meta_json, "w") as outfile:
json.dump(mri_meta_data, outfile)
def _calculate_slice_timings(self, n_slices, repetition_time, slicepack_delay, slice_order, reverse=False):
n_slices_2 = int(n_slices / 2)
slice_spacing = float(repetition_time - slicepack_delay) / float(n_slices * repetition_time)
if n_slices % 2 == 1: # odd
slice_timings = list(range(n_slices_2, -n_slices_2 - 1, -1))
slice_timings = list(map(float, slice_timings))
else: # even
slice_timings = list(range(n_slices_2, -n_slices_2, -1))
slice_timings = list(map(lambda x: float(x) - 0.5, slice_timings))
if reverse:
slice_order.reverse()
print("slice_order:", slice_order)
slice_timings = list(slice_timings[x] for x in slice_order)
return list((slice_spacing * x) for x in slice_timings)
def save_nifti(self, subfolder=''):
procfolder = os.path.join(self.procfolder, self.study)
if not os.path.isdir(procfolder):
os.mkdir(procfolder)
if "Localizer" in self.acqp['ACQ_protocol_name']:
procfolder = os.path.join(self.procfolder, self.study, "Localizer")
elif "DTI" in self.acqp['ACQ_protocol_name'] or "Diffusion" in self.acqp['ACQ_protocol_name']:
procfolder = os.path.join(self.procfolder, self.study, "DTI")
elif "fMRI" in self.acqp['ACQ_protocol_name']:
procfolder = os.path.join(self.procfolder, self.study, "fMRI")
elif "Turbo" in self.acqp['ACQ_protocol_name']:
procfolder = os.path.join(self.procfolder, self.study, "T2w")
elif "MSME" in self.acqp['ACQ_protocol_name']:
procfolder = os.path.join(self.procfolder, self.study, "T2map")
else:
procfolder = os.path.join(self.procfolder, self.study, "Others")
if not os.path.isdir(procfolder):
os.mkdir(procfolder)
if self.ftype == 'NIFTI_GZ': ext = 'nii.gz'
elif self.ftype == 'NIFTI': ext = 'nii'
elif self.ftype == 'ANALYZE': ext = 'img'
else: ext = 'nii.gz'
fname = '.'.join([self.study, self.expno, self.procno, ext])
# write Nifti file
print(os.path.join(procfolder, fname))
if not hasattr(self, 'nim'):
return
nib.save(self.nim, os.path.join(procfolder, fname))
return os.path.join(procfolder, fname)
def save_table(self, subfolder=''):
procfolder = os.path.join(self.procfolder, self.study)
if not os.path.isdir(procfolder):
os.mkdir(procfolder)
procfolder = os.path.join(self.procfolder, self.study, subfolder)
if not os.path.isdir(procfolder):
os.mkdir(procfolder)
#dw_bval_each = float(self.method['PVM_DwBvalEach'])
if 'PVM_DwEffBval' in self.method:
dw_eff_bval = np.array(list(map(float, self.method['PVM_DwEffBval'].split())), dtype=np.float32)
#print("dw_bval_each:", dw_bval_each)
#print("dw_eff_bval:"); print(dw_eff_bval)
if 'PVM_DwAoImages' in self.method:
dw_ao_images = int(self.method['PVM_DwAoImages'])
if 'PVM_DwNDiffDir' in self.method:
dw_n_diff_dir = int(self.method['PVM_DwNDiffDir'])
#print("dw_ao_images:", dw_ao_images)
#print("dw_n_diff_dir:", dw_n_diff_dir)
if 'PVM_DwDir' in self.method:
dw_dir = np.array(list(map(float, self.method['PVM_DwDir'].split())), dtype=np.float32)
dw_dir = dw_dir.reshape((dw_n_diff_dir, 3))
nd = dw_ao_images + dw_n_diff_dir
bvals = np.zeros(nd, dtype=np.float32)
dwdir = np.zeros((nd, 3), dtype=np.float32)
bvals[dw_ao_images:] = dw_eff_bval[dw_ao_images:]
dwdir[dw_ao_images:] = dw_dir
fname = '.'.join([self.study, self.expno, self.procno, 'btable', 'txt'])
print(os.path.join(procfolder, fname))
# Open btable file to write binary (windows format)
#fid = open(os.path.join(procfolder, fname), 'wb') - py 2.6
fid = open(os.path.join(procfolder, fname),mode='w',buffering=-1)
for i in range(nd):
fid.write("%.4f" % (bvals[i],) + " %.8f %.8f %.8f" % tuple(dwdir[i]))
#print("%.4f" % (bvals[i],) + " %.8f %.8f %.8f" % tuple(dwdir[i]), end="\r\n", file=fid) - py 2.6
# Close file
fid.close()
fname = '.'.join([self.study, self.expno, self.procno, 'bvals', 'txt'])
print(os.path.join(procfolder, fname))
# Open bvals file to write binary (unix format)
fid = open(os.path.join(procfolder, fname), mode='w', buffering=-1)
#fid = open(os.path.join(procfolder, fname), 'wb') - py 2.6
fid.write(" ".join("%.4f" % (bvals[i],) for i in range(nd)))
#print(" ".join("%.4f" % (bvals[i],) for i in range(nd)), end=chr(10), file=fid) - py 2.6
# Close bvals file
fid.close()
fname = '.'.join([self.study, self.expno, self.procno, 'bvecs', 'txt'])
print(os.path.join(procfolder, fname))
# Open bvecs file to write binary (unix format)
fid = open(os.path.join(procfolder, fname), mode='w', buffering=-1)
#fid = open(os.path.join(procfolder, fname), 'wb') - py 2.6
for k in range(3):
fid.write(" ".join("%.8f" % (dwdir[i,k],) for i in range(nd)))
#print(" ".join("%.8f" % (dwdir[i,k],) for i in range(nd)), end=chr(10), file=fid)- py 2.6
# Close bvecs file
fid.close()
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='Convert ParaVision to NIfTI')
requiredNamed = parser.add_argument_group('Required named arguments')
requiredNamed.add_argument('-i', '--input_folder', help='raw data folder')
# parser.add_argument('-o','--output_folder', help='output data folder')
# parser.add_argument('study', help='study name')
# parser.add_argument('expno', help='experiment number')
# parser.add_argument('procno', help='processed (reconstructed) images number')
parser.add_argument('-f', '--model',
help='T2_2p (default) : Two parameter T2 decay S(t) = S0 * exp(-t/T2)\n'
'T2_3p : Three parameter T2 decay S(t) = S0 * exp(-t/T2) + C'
, nargs='?', const='T2_2p', type=str, default='T2_2p')
parser.add_argument('-u', '--upLim', help='upper limit of TE - default: 100', nargs='?', const=100, type=int,
default=100)
parser.add_argument('-s', '--snrLim', help='upper limit of SNR - default: 1.5', nargs='?', const=1.5, type=float,
default=1.5)
parser.add_argument('-k', '--snrMethod', help='Brummer ,Chang, Sijbers', nargs='?', const='Brummer', type=str,
default='Brummer')
parser.add_argument('-m', '--map_raw', action='store_true', help='get the real values')
parser.add_argument('-p', '--pv6', action='store_true', help='ParaVision 6')
parser.add_argument('-t', '--table', action='store_true', help='save b-values and diffusion directions')
args = parser.parse_args()
input_folder = None
# raw data folder
if args.input_folder is not None:
input_folder = args.input_folder
if not os.path.isdir(input_folder):
sys.exit("Error: '%s' is not an existing directory." % (input_folder,))
listOfDirs = os.listdir(input_folder)
listOfScans = [s for s in listOfDirs if s.isdigit()]
if len(listOfScans) is 0:
sys.exit("Error: '%s' contains no numbered scans." % (input_folder,))
print('Start to process ' + str(len(listOfScans)) + ' scans...')
procno = '1'
study = input_folder.split('/')[len(input_folder.split('/')) - 1]
print(study)
img = []
for expno in np.sort(listOfScans):
path = os.path.join(input_folder, expno, 'pdata', procno)
if not os.path.isdir(path):
print("Error: '%s' is not an existing directory." % (path,))
continue
if os.path.exists(os.path.join(path, '2dseq')):
img = Bruker2Nifti(study, expno, procno, os.path.split(input_folder)[0], input_folder, ftype='NIFTI_GZ')
img.read_2dseq(map_raw=args.map_raw, pv6=args.pv6)
resPath = img.save_nifti()
img.create_slice_timings()
if resPath is not None:
pathlog = os.path.dirname(os.path.dirname(resPath))
pathlog = os.path.join(pathlog, 'data.log')
logfile = open(pathlog, 'w')
logfile.write(img.subject['coilname'])
logfile.close()
# Call the save_table function here
img.save_table()
if 'VisuAcqEchoTime' in img.visu_pars:
echoTime = img.visu_pars['VisuAcqEchoTime']
echoTime = np.fromstring(echoTime, dtype=float, sep=' ')
if len(echoTime) > 3:
mapT2.getT2mapping(resPath, args.model, args.upLim, args.snrLim, args.snrMethod, echoTime)
else:
print("The following file does not exist, it will be skipped:")
print(os.path.join(path, '2dseq'))
continue
if resPath is not None:
pathlog = os.path.dirname(os.path.dirname(resPath))
pathlog = os.path.join(pathlog, 'data.log')
logfile = open(pathlog, 'w')
logfile.write(img.subject['coilname'])
logfile.close()
| Python |
3D | Aswendt-Lab/AIDAmri | bin/PV2NIfTiConverter/dict2xml.py | .py | 2,466 | 92 | """ Dictionary to XML - Library to convert a python dictionary to XML output
Copyleft (C) 2007 Pianfetti Maurizio <boymix81@gmail.com>
Package site : http://boymix81.altervista.org/files/dict2xml.tar.gz
Revision 1.0 2007/12/15 11:57:20 Maurizio
- First stable version
"""
__author__ = "Pianfetti Maurizio <boymix81@gmail.com>"
__contributors__ = []
__date__ = "$Date: 2007/12/15 11:57:20 $"
__credits__ = """..."""
__version__ = "$Revision: 1.0.0 $"
class Dict2XML:
#XML output
xml = ""
#Tab level
level = 0
def __init__(self):
self.xml = ""
self.level = 0
#end def
def __del__(self):
pass
#end def
def setXml(self,Xml):
self.xml = Xml
#end if
def setLevel(self,Level):
self.level = Level
#end if
def dict2xml(self,map): # reserved assignment
if (str(type(map)) == "<class 'object_dict.object_dict'>" or str(type(map)) == "<type 'dict'>"):
for key, value in map.items():
if (str(type(value)) == "<class 'object_dict.object_dict'>" or str(type(value)) == "<type 'dict'>"):
if(len(value) > 0):
self.xml += "\t"*self.level
self.xml += "<%s>\n" % (key)
self.level += 1
self.dict2xml(value)
self.level -= 1
self.xml += "\t"*self.level
self.xml += "</%s>\n" % (key)
else:
self.xml += "\t"*(self.level)
self.xml += "<%s></%s>\n" % (key,key)
#end if
else:
self.xml += "\t"*(self.level)
self.xml += "<%s>%s</%s>\n" % (key,value, key)
#end if
else:
self.xml += "\t"*self.level
self.xml += "<%s>%s</%s>\n" % (key,value, key)
#end if
return self.xml
#end def
#end class
def createXML(dict,xml): # reserved assignment
xmlout = Dict2XML()
xmlout.setXml(xml)
return xmlout.dict2xml(dict)
#end def
dict2Xml = createXML
if __name__ == "__main__":
#Define the dict
d={}
d['root'] = {}
d['root']['v1'] = "";
d['root']['v2'] = "hi";
d['root']['v3'] = {};
d['root']['v3']['v31']="hi";
#xml='<?xml version="1.0"?>\n'
xml = ""
print(dict2Xml(d,xml))
#end if
| Python |
3D | Aswendt-Lab/AIDAmri | bin/PV2NIfTiConverter/pv_conv2Nifti.py | .py | 15,954 | 386 | """
Created on 10/08/2017
@author: Niklas Pallast, Marc Schneider, Markus Aswendt
Neuroimaging & Neuroengineering
Department of Neurology
University Hospital Cologne
"""
from __future__ import print_function
import os
import time
import re
import sys
import numpy as np
import nibabel as nib
import nibabel.nifti1 as nii
import pv_parseBruker_md_np as pB
import P2_IDLt2_mapping as mapT2
import json
class Bruker2Nifti:
def __init__(self, study, expno, procno, rawfolder, procfolder, ftype='NIFTI_GZ'):
self.study = study
self.expno = str(expno)
self.procno = str(procno)
self.rawfolder = rawfolder
self.procfolder = procfolder
self.ftype = ftype
def save_table(self, subfolder=''):
try:
procfolder = os.path.join(self.procfolder, self.study)
if not os.path.isdir(procfolder):
os.mkdir(procfolder)
procfolder = os.path.join(self.procfolder, self.study, subfolder)
if not os.path.isdir(procfolder):
os.mkdir(procfolder)
except Exception as e:
print("Error in save_table:", str(e))
def read_2dseq(self, map_raw=False, pv6=False, sc=1.0):
study = self.study
expno = self.expno
procno = self.procno
rawfolder = self.rawfolder
self.acqp = pB.parsePV(os.path.join(rawfolder, study, expno, 'acqp'))
self.method = pB.parsePV(os.path.join(rawfolder, study, expno, 'method'))
self.method_path = os.path.join(rawfolder, study, expno, 'method')
self.subject = pB.parsePV(os.path.join(rawfolder, study, 'subject'))
# get header information
datadir = os.path.join(rawfolder, study, expno, 'pdata', procno)
#self.d3proc = pB.parsePV(os.path.join(datadir, 'd3proc')) # removed for PV6
self.visu_pars = pB.parsePV(os.path.join(datadir, 'visu_pars'))
hdr = pB.getNiftiHeader(self.visu_pars, sc=sc)
#print("hdr:", hdr)
if hdr is None or not isinstance(hdr[12], str):
return
# read '2dseq' file
f_id = open(os.path.join(datadir, '2dseq'), 'rb')
data = np.fromfile(f_id, dtype=np.dtype(hdr[12])).reshape(hdr[1], hdr[2], hdr[3], hdr[4], order='F')
f_id.close()
# map to raw data range (PV6)
if map_raw:
visu_core_data_slope = np.array(map(float, self.visu_pars['VisuCoreDataSlope'].split()), dtype=np.float32)
visu_core_data_offs = np.array(map(float, self.visu_pars['VisuCoreDataOffs'].split()), dtype=np.float32)
visu_core_data_shape = list(data.shape)
visu_core_data_shape[:2] = (1, 1)
if pv6:
data = data / visu_core_data_slope.reshape(visu_core_data_shape)
else:
data = data * visu_core_data_slope.reshape(visu_core_data_shape)
data = data + visu_core_data_offs.reshape(visu_core_data_shape)
# NIfTI image
nim = nii.Nifti1Image(data, None)
# NIfTI header
#header = nim.header
header = nim.get_header()
#print("header:"); print(header)
header['pixdim'] = [0.0, hdr[5], hdr[6], hdr[7], hdr[8], 0.0, 0.0, 0.0]
#nim.setXYZUnit('mm')
header.set_xyzt_units(xyz='mm', t=None)
#nim.header = header
#header = nim.get_header()
#print("header:"); print(header)
# write header in xml structure
#xml = pB.getXML(datadir + "/")
xml = pB.getXML(os.path.join(datadir, 'visu_pars'))
#print("xml:"); print(xml)
# add protocol information (method, acqp, visu_pars, d3proc) to Nifti's header extensions
#nim.extensions += ('comment', xml)
#extension = nii.Nifti1Extension('comment', xml)
self.hdr = hdr
self.nim = nim
self.xml = xml
def create_slice_timings(self):
with open(self.method_path, "r") as method_file:
lines = method_file.readlines()
interleaved = False
repetition_time = None
slicepack_delay = None
slice_order = None
n_slices = None
reverse = False
for idx, line in enumerate(lines):
if "RepetitionTime=" in line:
repetition_time = int(float(line.split("=")[1]))
repetition_time = int(repetition_time)
if "PackDel=" in line:
slicepack_delay = int(float(line.split("=")[1]))
if "ObjOrderScheme=" in line:
slice_order = line.split("=")[1]
if slice_order == 'Sequential':
interleaved = False
else:
interleaved = True
if "ObjOrderList=" in line:
n_slices = re.findall(r'\d+', line)
if len(n_slices) == 1:
n_slices = int(n_slices[0])
if lines[idx+1]:
slice_order = [int(float(s)) for s in re.findall(r'\d+', lines[idx+1])]
if slice_order[0] > slice_order[-1]:
reverse = True
if "fMRI" in self.acqp['ACQ_protocol_name']:
slice_timings = self._calculate_slice_timings(n_slices, repetition_time, slicepack_delay, slice_order, reverse)
# adjust slice order to start at 1
temp_slice_order = [x+1 for x in slice_order]
slice_order = temp_slice_order
#save metadata
mri_meta_data = {}
mri_meta_data["RepetitionTime"] = repetition_time
mri_meta_data["ObjOrderList"] = slice_order
mri_meta_data["n_slices"] = n_slices
mri_meta_data["costum_timings"] = slice_timings
procfolder = os.path.join(self.procfolder, self.study, "fMRI")
fname = '.'.join([self.study, self.expno, self.procno, "json"])
mri_meta_json = os.path.join(procfolder, fname)
with open(mri_meta_json, "w") as outfile:
json.dump(mri_meta_data, outfile)
def _calculate_slice_timings(self, n_slices, repetition_time, slicepack_delay, slice_order, reverse=False):
n_slices_2 = int(n_slices / 2)
slice_spacing = float(repetition_time - slicepack_delay) / float(n_slices * repetition_time)
if n_slices % 2 == 1: # odd
slice_timings = list(range(n_slices_2, -n_slices_2 - 1, -1))
slice_timings = list(map(float, slice_timings))
else: # even
slice_timings = list(range(n_slices_2, -n_slices_2, -1))
slice_timings = list(map(lambda x: float(x) - 0.5, slice_timings))
if reverse:
slice_order.reverse()
print("slice_order:", slice_order)
slice_timings = list(slice_timings[x] for x in slice_order)
return list((slice_spacing * x) for x in slice_timings)
def save_nifti(self, subfolder=''):
procfolder = os.path.join(self.procfolder, self.study)
if not os.path.isdir(procfolder):
os.mkdir(procfolder)
if "Localizer" in self.acqp['ACQ_protocol_name']:
procfolder = os.path.join(self.procfolder, self.study, "Localizer")
elif "DTI" in self.acqp['ACQ_protocol_name'] or "Diffusion" in self.acqp['ACQ_protocol_name']:
procfolder = os.path.join(self.procfolder, self.study, "DTI")
elif "fMRI" in self.acqp['ACQ_protocol_name']:
procfolder = os.path.join(self.procfolder, self.study, "fMRI")
elif "Turbo" in self.acqp['ACQ_protocol_name']:
procfolder = os.path.join(self.procfolder, self.study, "T2w")
elif "MSME" in self.acqp['ACQ_protocol_name']:
procfolder = os.path.join(self.procfolder, self.study, "T2map")
else:
procfolder = os.path.join(self.procfolder, self.study, "Others")
if not os.path.isdir(procfolder):
os.mkdir(procfolder)
if self.ftype == 'NIFTI_GZ': ext = 'nii.gz'
elif self.ftype == 'NIFTI': ext = 'nii'
elif self.ftype == 'ANALYZE': ext = 'img'
else: ext = 'nii.gz'
fname = '.'.join([self.study, self.expno, self.procno, ext])
# write Nifti file
print(os.path.join(procfolder, fname))
if not hasattr(self, 'nim'):
return
nib.save(self.nim, os.path.join(procfolder, fname))
return os.path.join(procfolder, fname)
def save_table(self, subfolder=''):
procfolder = os.path.join(self.procfolder, self.study)
if not os.path.isdir(procfolder):
os.mkdir(procfolder)
procfolder = os.path.join(self.procfolder, self.study, subfolder)
if not os.path.isdir(procfolder):
os.mkdir(procfolder)
#dw_bval_each = float(self.method['PVM_DwBvalEach'])
if 'PVM_DwEffBval' in self.method:
dw_eff_bval = np.array(list(map(float, self.method['PVM_DwEffBval'].split())), dtype=np.float32)
#print("dw_bval_each:", dw_bval_each)
#print("dw_eff_bval:"); print(dw_eff_bval)
if 'PVM_DwAoImages' in self.method:
dw_ao_images = int(self.method['PVM_DwAoImages'])
if 'PVM_DwNDiffDir' in self.method:
dw_n_diff_dir = int(self.method['PVM_DwNDiffDir'])
#print("dw_ao_images:", dw_ao_images)
#print("dw_n_diff_dir:", dw_n_diff_dir)
if 'PVM_DwDir' in self.method:
dw_dir = np.array(list(map(float, self.method['PVM_DwDir'].split())), dtype=np.float32)
dw_dir = dw_dir.reshape((dw_n_diff_dir, 3))
nd = dw_ao_images + dw_n_diff_dir
bvals = np.zeros(nd, dtype=np.float32)
dwdir = np.zeros((nd, 3), dtype=np.float32)
bvals[dw_ao_images:] = dw_eff_bval[dw_ao_images:]
dwdir[dw_ao_images:] = dw_dir
fname = '.'.join([self.study, self.expno, self.procno, 'btable', 'txt'])
print(os.path.join(procfolder, fname))
# Open btable file to write binary (windows format)
#fid = open(os.path.join(procfolder, fname), 'wb') - py 2.6
fid = open(os.path.join(procfolder, fname),mode='w',buffering=-1)
for i in range(nd):
fid.write("%.4f" % (bvals[i],) + " %.8f %.8f %.8f" % tuple(dwdir[i]))
#print("%.4f" % (bvals[i],) + " %.8f %.8f %.8f" % tuple(dwdir[i]), end="\r\n", file=fid) - py 2.6
# Close file
fid.close()
fname = '.'.join([self.study, self.expno, self.procno, 'bvals', 'txt'])
print(os.path.join(procfolder, fname))
# Open bvals file to write binary (unix format)
fid = open(os.path.join(procfolder, fname), mode='w', buffering=-1)
#fid = open(os.path.join(procfolder, fname), 'wb') - py 2.6
fid.write(" ".join("%.4f" % (bvals[i],) for i in range(nd)))
#print(" ".join("%.4f" % (bvals[i],) for i in range(nd)), end=chr(10), file=fid) - py 2.6
# Close bvals file
fid.close()
fname = '.'.join([self.study, self.expno, self.procno, 'bvecs', 'txt'])
print(os.path.join(procfolder, fname))
# Open bvecs file to write binary (unix format)
fid = open(os.path.join(procfolder, fname), mode='w', buffering=-1)
#fid = open(os.path.join(procfolder, fname), 'wb') - py 2.6
for k in range(3):
fid.write(" ".join("%.8f" % (dwdir[i,k],) for i in range(nd)))
#print(" ".join("%.8f" % (dwdir[i,k],) for i in range(nd)), end=chr(10), file=fid)- py 2.6
# Close bvecs file
fid.close()
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='Convert ParaVision to NIfTI')
requiredNamed = parser.add_argument_group('Required named arguments')
requiredNamed.add_argument('-i', '--input_folder', help='raw data folder')
# parser.add_argument('-o','--output_folder', help='output data folder')
# parser.add_argument('study', help='study name')
# parser.add_argument('expno', help='experiment number')
# parser.add_argument('procno', help='processed (reconstructed) images number')
parser.add_argument('-f', '--model',
help='T2_2p (default) : Two parameter T2 decay S(t) = S0 * exp(-t/T2)\n'
'T2_3p : Three parameter T2 decay S(t) = S0 * exp(-t/T2) + C'
, nargs='?', const='T2_2p', type=str, default='T2_2p')
parser.add_argument('-u', '--upLim', help='upper limit of TE - default: 100', nargs='?', const=100, type=int,
default=100)
parser.add_argument('-s', '--snrLim', help='upper limit of SNR - default: 1.5', nargs='?', const=1.5, type=float,
default=1.5)
parser.add_argument('-k', '--snrMethod', help='Brummer ,Chang, Sijbers', nargs='?', const='Brummer', type=str,
default='Brummer')
parser.add_argument('-m', '--map_raw', action='store_true', help='get the real values')
parser.add_argument('-p', '--pv6', action='store_true', help='ParaVision 6')
parser.add_argument('-t', '--table', action='store_true', help='save b-values and diffusion directions')
args = parser.parse_args()
input_folder = None
# raw data folder
if args.input_folder is not None:
input_folder = args.input_folder
if not os.path.isdir(input_folder):
sys.exit("Error: '%s' is not an existing directory." % (input_folder,))
listOfDirs = os.listdir(input_folder)
listOfScans = [s for s in listOfDirs if s.isdigit()]
if len(listOfScans) is 0:
sys.exit("Error: '%s' contains no numbered scans." % (input_folder,))
print('Start to process ' + str(len(listOfScans)) + ' scans...')
procno = '1'
study = input_folder.split('/')[len(input_folder.split('/')) - 1]
print(study)
img = []
for expno in np.sort(listOfScans):
path = os.path.join(input_folder, expno, 'pdata', procno)
if not os.path.isdir(path):
print("Error: '%s' is not an existing directory." % (path,))
continue
if os.path.exists(os.path.join(path, '2dseq')):
img = Bruker2Nifti(study, expno, procno, os.path.split(input_folder)[0], input_folder, ftype='NIFTI_GZ')
img.read_2dseq(map_raw=args.map_raw, pv6=args.pv6)
resPath = img.save_nifti()
img.create_slice_timings()
if resPath is not None:
pathlog = os.path.dirname(os.path.dirname(resPath))
pathlog = os.path.join(pathlog, 'data.log')
logfile = open(pathlog, 'w')
logfile.write(img.subject['coilname'])
logfile.close()
# Call the save_table function here
img.save_table()
if 'VisuAcqEchoTime' in img.visu_pars:
echoTime = img.visu_pars['VisuAcqEchoTime']
echoTime = np.fromstring(echoTime, dtype=float, sep=' ')
if len(echoTime) > 3:
mapT2.getT2mapping(resPath, args.model, args.upLim, args.snrLim, args.snrMethod, echoTime)
else:
print("The following file does not exist, it will be skipped:")
print(os.path.join(path, '2dseq'))
continue
if resPath is not None:
pathlog = os.path.dirname(os.path.dirname(resPath))
pathlog = os.path.join(pathlog, 'data.log')
logfile = open(pathlog, 'w')
logfile.write(img.subject['coilname'])
logfile.close()
| Python |
3D | Aswendt-Lab/AIDAmri | bin/PV2NIfTiConverter/ReferenceMethods/__init__.py | .py | 0 | 0 | null | Python |
3D | Aswendt-Lab/AIDAmri | bin/PV2NIfTiConverter/ReferenceMethods/changSNR.py | .py | 1,961 | 83 | """ Changs's method
{chang2005automatic,
title={An automatic method for estimating noise-induced signal variance in magnitude-reconstructed magnetic resonance images},
author={Chang, Lin-Ching and Rohde, Gustavo K and Pierpaoli, Carlo},
booktitle={Medical Imaging},
pages={1136--1142},
year={2005},
organization={International Society for Optics and Photonics}
Created on 10/08/2017
@author: Niklas Pallast
Neuroimaging & Neuroengineering
Department of Neurology
University Hospital Cologne
"""
from math import *
import numpy as np
import matplotlib.pyplot as plt
def calcSNR(img,show,fac):
# Normalize input dataset and plot histogram
#img = np.fliplr(img)
img = img.astype(int)
maxi = img.max()
imgFlat = img.flatten(2)
imgNorm= imgFlat/maxi
bins = ceil(sqrt(imgNorm.size))*fac
binCount, binLoc = np.histogram(imgNorm, int(bins))
n = len(imgNorm)
estStd = np.argmax(binCount)
estStd = (estStd)/binCount.shape
x = np.linspace(0, 1, bins)
fhat = np.zeros([1,len(x)])
h = 1.06 * n**(-1/5) * estStd
# define function
gauss = lambda x: gaussianFct(x)
for i in range(n):
# get each kernel function evaluated at x
# centered at data
f = gauss((x-imgNorm[i])/h)
# plot(x, f / (n * h))
fhat = fhat+f
fhat = fhat/(n*h)
# SNR-Map
maxPos = np.argmax(fhat)
estStdNorm = binLoc[maxPos]
estStd = (binLoc[maxPos]*maxi)/10
snrMap = np.sqrt(abs(np.square(img) - (np.square(estStd)))) / estStd
if show > 0:
if len(img.shape) == 2:
figChang = plt.figure(3)
plt.imshow(snrMap)
plt.show()
elif len(img.shape) == 3:
figChang = plt.figure(3)
plt.imshow(snrMap[:, :, int(np.ceil(len(img.shape) / 2))])
plt.show()
return snrMap, estStd, estStdNorm
def gaussianFct(x):
y = 1/sqrt(2*pi)*np.exp((-(np.square(x)))/2)
return y | Python |
3D | Aswendt-Lab/AIDAmri | bin/PV2NIfTiConverter/ReferenceMethods/brummerSNR.py | .py | 1,775 | 71 | """ Brummer's Method
brummer1993automatic,
title={Automatic detection of brain contours in MRI data sets},
author={Brummer, Marijn E and Mersereau, Russell M and Eisner, Robert L and Lewine, Richard RJ},
journal={IEEE Transactions on medical imaging},
volume={12},
number={2},
pages={153--166},
year={1993},
publisher={IEEE}
Created on 10/08/2017
@author: Niklas Pallast
Neuroimaging & Neuroengineering
Department of Neurology
University Hospital Cologne
"""
from math import *
import numpy as np
import matplotlib.pyplot as plt
import scipy.optimize, scipy.signal
def calcSNR(img,show,fac):
# Normalize input dataset and plot histogram
#img = np.fliplr(img)
img = img.astype(float)
maxi = img.max()
imgFlat = img.flatten()
imgNorm= imgFlat/maxi
bins = ceil(sqrt(imgNorm.size))*fac
binCount, binLoc = np.histogram(imgNorm, int(bins))
maxRayl = max(binCount)
estStd = np.argmax(binCount)
cutOff = 2 * estStd
estStd = (estStd)/len(binCount)
# define function
raylfunc = lambda x: rayl_2p(x, binLoc[0:cutOff-1] , binCount[0:cutOff-1])
yout = scipy.optimize.fmin(func=raylfunc, x0=[maxRayl,estStd],disp=False)
estStdNorm = yout[1]
estStd = (yout[1] * maxi)/10
snrMap = np.sqrt(abs(np.square(img) - (np.square(estStd))))/estStd
if show > 0:
if len(img.shape) == 2:
plt.figure(3)
plt.imshow(snrMap)
plt.show()
elif len(img.shape) == 3:
plt.figure(3)
plt.imshow(snrMap[:,:,int(np.ceil(len(img.shape)/2))])
plt.show()
return snrMap, estStd, estStdNorm
def rayl_2p(fitPar,x,data):
ray=x/(fitPar[1]**2)*np.exp(-np.square(x)/(2*fitPar[1]**2))
err=sum((fitPar[0]*ray-data)**2)
return err | Python |
3D | Aswendt-Lab/AIDAmri | bin/PV2NIfTiConverter/ReferenceMethods/sijbersSNR.py | .py | 1,872 | 75 | """
Sijbers's method
sijbers2007automatic,
title={Automatic estimation of the noise variance from the histogram of a magnetic resonance image},
author={Sijbers, Jan and Poot, Dirk and den Dekker, Arnold J and Pintjens, Wouter},
journal={Physics in medicine and biology},
volume={52},
number={5},
pages={1335},
year={2007},
publisher={IOP Publishing}
Created on 10/08/2017
@author: Niklas Pallast
Neuroimaging & Neuroengineering
Department of Neurology
University Hospital Cologne
"""
from math import *
import numpy as np
import matplotlib.pyplot as plt
import scipy.optimize
def calcSNR(img,show,fac):
# Normalize input dataset and plot histogram
#img = np.fliplr(img)
img = img.astype(int)
maxi = img.max()
imgFlat = img.flatten(2)
imgNorm= imgFlat/maxi
bins = ceil(sqrt(imgNorm.size))*fac
binCount, binLoc = np.histogram(imgNorm, int(bins))
estStd = np.argmax(binCount)
fc = binLoc[2 * estStd]
[n, l] = np.histogram(imgNorm[imgNorm <= fc], int(bins))
Nk = np.sum(n)
K = bins
mlfunc = lambda x: maxLikelihood(x,Nk,K,l,n)
sigma0 = binLoc[estStd]
out = scipy.optimize.fmin(func=mlfunc, x0= sigma0,disp=False)
estStdNorm = out
estStd = (out*maxi)/10
snrMap = np.sqrt(abs(np.square(img) - (np.square(estStd)))) / estStd
if show > 0:
if len(img.shape) == 2:
figSijbers = plt.figure(3)
plt.imshow(snrMap)
plt.show()
elif len(img.shape) == 3:
figSijbers = plt.figure(3)
plt.imshow(snrMap[:,:,int(np.ceil(len(img.shape)/2))])
plt.show()
return snrMap, estStd, estStdNorm
def maxLikelihood(x,Nk,K,l,n):
y = Nk*np.log(np.exp(-l[0]**2/(2*x**2)) - np.exp(-l[K]**2/(2*x**2)))\
- np.sum(n[1:K] * np.log( np.exp(-l[0:K-1]**2/(2*x**2))- np.exp(-l[1:K]**2./(2*x**2))))
return y | Python |
3D | Aswendt-Lab/AIDAmri | bin/PV2NIfTiConverter/ReferenceMethods/getSNR.py | .py | 2,348 | 89 | """
Created on 10/08/2017
@author: Niklas Pallast
Neuroimaging & Neuroengineering
Department of Neurology
University Hospital Cologne
"""
import os, sys
import changSNR as ch
import brummerSNR as bm
import sijbersSNR as sj
import numpy as np
import glob
import nibabel as nii
def snrCalclualtor(input_file, method):
fileSNR = open(os.path.join(os.path.dirname(input_file),'snr.txt'), 'w')
data = nii.load(input_file)
imgData = data.get_data()
#nx = imgData.shape[0] # Images size in x - direction
#ny = imgData.shape[1] # Images size in y - direction
ns = imgData.shape[2] # Number of slices
noiseSNR = np.zeros(ns)
imgData = np.ndarray.astype(imgData,'float64')
for slc in range(ns):
# Print % of progress
print('Slice: ' + str(slc + 1))
# Temporal image containing all TE values for the selected slice
slice = imgData[:, :, slc]
if method == 1:
curSnr, estStd, estStdNorm = ch.calcSNR(slice, 0, 1)
elif method == 2:
curSnr, estStd, estStdNorm = bm.calcSNR(slice, 0, 1)
elif method == 3:
curSnr, estStd, estStdNorm = sj.calcSNR(slice, 0, 1)
else:
sys.exit(
"Error: '%i' is not an existing choice for a SNR method!" % (method,))
noiseSNR[slc] = estStd
snr = 20 * np.log10(np.mean(imgData) / np.mean(noiseSNR))
fileSNR.write("SNR [dB]: %0.3f \n" % snr)
fileSNR.close()
def findRegisteredData(path):
regMR_list = []
for filename in glob.iglob(path + '*/T2w/*1.nii.gz', recursive=True):
regMR_list.append(filename)
return regMR_list
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='Calculates SNR and generates snr.txt in T2w files')
parser.add_argument('-p', '--pathData', help='src path to all processed files')
parser.add_argument('-f', '--filePrefix', help='file prefix in src path')
parser.add_argument('-m', '--SNRmethod', help='1: Brummer(default) 2:Chang 3:Sijbers', nargs='?', type=int,
default=1)
args = parser.parse_args()
pathData = args.pathData + '/' + args.filePrefix
listMr = findRegisteredData(pathData)
method = args.SNRmethod
for i in listMr:
print(i)
snrCalclualtor(i, method)
| Python |
3D | Aswendt-Lab/AIDAmri | bin/PV2NIfTiConverter/Alternative_pv_reader/pv_parser.py | .py | 13,157 | 417 | '''
Created on 20.08.2020
Author:
Michael Diedenhofen
Max Planck Institute for Metabolism Research, Cologne
Read Bruker ParaVision JCAMP parameter files (e.g. acqp, method, visu_pars).
'''
from __future__ import print_function
VERSION = 'pv_parser.py v 1.0.2 20200820'
import re
import sys
import collections
import numpy as np
def strfind(string, sub):
len_sub = len(sub)
result = []
if (len_sub == 0) or (len_sub > len(string)):
return result
pos = string.find(sub)
while pos >= 0:
result.append(pos)
pos = string.find(sub, pos + len_sub)
return result
def strtok(string, delimiters=None):
token = ''
remainder = ''
len_str = len(string)
if len_str == 0:
return (token, remainder)
if delimiters is None: # whitespace characters
delimiters = list(map(chr, list(range(9, 14)) + [32]))
i = 0
while string[i] in delimiters:
i += 1
if i >= len_str:
return (token, remainder)
start = i
while string[i] not in delimiters:
i += 1
if i >= len_str:
break
token = string[start:i]
remainder = string[i:len_str]
return (token, remainder)
def extract_jcamp_strings(string, get_all=True):
if string is None:
result = None
elif get_all:
result = re.findall(r'<(.*?)>', string)
else:
result = re.search(r'<(.*?)>', string)
if result is not None:
result = result.group(1)
return result
def extract_unit_string(string):
if string is None:
result = None
else:
result = re.search(r'\[(.*?)\]', string)
if result is not None:
result = result.group(1)
else:
result = string
return result
def replace_jcamp_strings(string):
pos_stop = 0
elements = []
str_list = []
index = 0
while True:
pos_start = string.find('<', pos_stop)
if pos_start < 0:
elements.append(string[pos_stop:])
break
elements.append(string[pos_stop:pos_start])
pos_stop = string.find('>', pos_start + 1)
if pos_stop < 0:
elements.append(string[pos_start:])
break
pos_stop += 1
elements.append(''.join(['<#', str(index), '>']))
str_list.append(string[pos_start:pos_stop])
index += 1
return (''.join(elements), str_list)
def check_struct_list(values, str_list):
flag_int = True
flag_float = True
for value in values:
if flag_int:
try:
value = int(value)
except ValueError:
flag_int = False
else:
continue
try:
value = float(value)
except ValueError:
flag_float = False
break
if flag_int:
return (list(map(int, values)), 0)
if flag_float:
return (list(map(float, values)), 0)
# Restore JCAMP strings
count = len(str_list)
if count > 0:
for index, value in enumerate(values):
result = re.findall(r'<#(.*?)>', value)
if len(result) == 1:
str_id = int(result[0])
values[index] = str_list[str_id]
count -= 1
if count == 0:
break
elif len(result) > 1:
sys.exit("Found more than one ID string in a value: %s" % (value,))
return (values, len(str_list) - count)
def create_struct_list(string, str_list, restored):
if len(string) < 1:
return ([], restored)
# Split one struct in its parts
#items = re.split(r'^ +| *, *| +$', string)
items = re.split(r'(?:^ +| *),(?: *| +$)', string)
#items = [x.strip(' ') for x in string.split(',')]
for index, item in enumerate(items):
#values = re.findall(r'[^\s]+', item)
values = item.split(' ')
#values = item.split()
values, number = check_struct_list(values, str_list)
if len(values) == 1:
items[index] = values[0]
else:
items[index] = values
restored += number
return (items, restored)
def push_list(level, obj_list, obj):
while level > 0:
obj_list = obj_list[-1]
level -= 1
obj_list.append(obj)
def parse_struct(string, str_list):
level = 0
restored = 0
obj_list = []
pos_start = string.find('(')
if pos_start < 0:
return (obj_list, restored)
pos_left, start_left = (pos_start + 1, True)
pos_start = string.find('(', pos_left)
pos_stop = string.find(')', pos_left)
while True:
if (pos_start >= pos_left) and (pos_stop >= pos_left):
pos_right, start_right = (pos_start, True) if pos_start < pos_stop else (pos_stop, False)
elif pos_start >= pos_left:
pos_right, start_right = (pos_start, True)
elif pos_stop >= pos_left:
pos_right, start_right = (pos_stop, False)
else:
pos_right, start_right = (len(string), False)
sub = string[pos_left:pos_right].strip(' ')
if sub.startswith(','):
sub = sub[1:].lstrip(' ')
if sub.endswith(','):
sub = sub[:-1].rstrip(' ')
#print("sub:%d:%s:" % (len(sub), sub))
items, restored = create_struct_list(sub, str_list, restored)
if start_left:
push_list(level, obj_list, items)
if start_right:
level += 1
else:
for item in items:
push_list(level, obj_list, item)
if not start_right:
level -= 1
if pos_right >= len(string):
break
pos_left, start_left = (pos_right + 1, start_right)
if start_left:
pos_start = string.find('(', pos_left)
else:
pos_stop = string.find(')', pos_left)
return (obj_list, restored)
def check_array_list(values):
flag_int = True
flag_float = True
for value in values:
if flag_int:
try:
value = int(value)
except ValueError:
flag_int = False
else:
continue
try:
value = float(value)
except ValueError:
flag_float = False
break
if flag_int:
return np.array(values, dtype=np.int32)
if flag_float:
return np.array(values, dtype=np.float64)
return np.array(values, dtype=object)
def get_array_values(label, sizes, data):
# Removing whitespaces at the edge of strings
#data = data.replace('< ', '<')
#data = data.replace(' >', '>')
if data.startswith('<'): # Checking if array is a single string or an array of strings ...
#data = data.replace('> <', '><')
#values = re.findall(r'<(.*?)>', data)
values = re.findall(r'<.*?>', data)
if len(sizes) > 1:
values = np.array(values, dtype=object)
if np.prod(sizes[:-1]) == values.size:
values = values.reshape(sizes[:-1])
elif len(values) == 1:
values = values[0]
elif data.startswith('('): # ... or a struct or an array of structs ...
if len(sizes) > 1:
print("Warning: The sizes dimension is greater than 1 for the %s array of structs." % (label,), file=sys.stderr)
data, str_list = replace_jcamp_strings(data)
values, restored = parse_struct(data, str_list)
if len(str_list) != restored:
print("%s:" % (label,), values)
sys.exit("Not all replaced JCAMP strings are restored (%d of %d)." % (restored, len(str_list)))
else: # ... or a simple array (most frequently numeric)
values = re.findall(r'[^\s]+', data)
#values = data.split()
values = np.reshape(check_array_list(values), sizes)
return values
def read_param_file(filename):
# Open parameter file
try:
fid = open(filename, 'r')
except IOError as V:
if V.errno == 2:
sys.exit("Cannot open parameter file %s" % (filename,))
else:
raise
# Generate header information
header = collections.OrderedDict()
weekdays = ('Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun')
line = ''
for index, line in enumerate(fid):
line = line.lstrip(' \t').rstrip('\r\n')
if line.startswith('##$'):
break
#print("line:%d:%s:" % (len(line), line))
if line.startswith('##'): # It's a variable with ##
# Retrieve the Labeled Data Record
label, value = strtok(line, delimiters='=')
label = strtok(label, delimiters='#')[0].strip()
value = strtok(value, delimiters='=')[0].strip()
# Save value without $
#value = strtok(value, delimiters='$')[0].strip()
header[label] = value
elif line.startswith('$$'): # It's a comment
comment = strtok(line, delimiters='$')[0].strip()
if comment.startswith('/'):
header['Path'] = comment
elif comment.startswith('process'):
header['Process'] = comment[8:]
else:
pos = strfind(comment[:10], '-')
if (comment[:3] in weekdays) or ((comment[:2] in ('19', '20')) and (len(pos) == 2)):
header['Date'] = comment
else:
header['Header' + str(index + 1)] = comment
# Check if using a supported version of JCAMP file format
if 'JCAMPDX' in header:
version = float(header['JCAMPDX'])
elif 'JCAMP-DX' in header:
version = float(header['JCAMP-DX'])
else:
sys.exit("The file header is not correct.")
if (version != 4.24) and (version != 5):
print("Warning: JCAMP version %s is not supported (%s)." % (version, filename), file=sys.stderr)
params = collections.OrderedDict()
# Loop for reading parameters
while line.lstrip(' \t').startswith('##'):
result = re.search(r'##(.*)=(.*)', line)
result = [] if result is None else list(result.groups())
# Checking if label present and removing proprietary tag
try:
label = result[0]
except:
label = None
else:
if label.startswith('$'):
label = label[1:]
#print("label:%d:%s:" % (len(label), label))
# Checking if value present otherwise value is set to empty string
try:
value = result[1]
except:
value = ''
#print("value:%d:%s:" % (len(value), value))
flag_comment = True if '$$' in line else False
line = ''
data = []
for line in fid:
if line.lstrip(' \t').startswith('##'):
break
if not line.lstrip(' \t').startswith('$$'): # Skip comment line
if (not flag_comment) and ('$$' in line):
flag_comment = True
#data.append(line.rstrip('\\\r\n'))
data.append(line.rstrip('\r\n'))
#print("line:%d:%s:" % (len(data[-1]), data[-1]))
# Create data string
data = ''.join(data)
#print("data:%d:%s:" % (len(data), data))
if flag_comment:
sys.exit("Found JCAMP comment ('$$') in LDR %s." % (label,))
# Checking for END tag
if (label is None) or (label == 'END'):
break
# Checking if value is a string or an array, a struct or a single value
if value.startswith('( <'):
print("Warning: The parsing of the LDR %s failed." % (label,), file=sys.stderr)
elif value.startswith('( '): # A single string, an array of strings or structs or a simple array
sizes = [int(x) for x in value.strip('( )').split(',')]
params[label] = get_array_values(label, sizes, data)
elif value.startswith('('): # A struct
data = ''.join([value, data])
params[label] = get_array_values(label, [1], data)[0]
else: # A single value
try:
params[label] = int(value)
except ValueError:
try:
params[label] = float(value)
except ValueError:
params[label] = value
fid.close()
if label != 'END':
sys.exit("Unexpected end of file: Missing END Statement")
return (header, params)
def main():
import argparse
parser = argparse.ArgumentParser(description='Read ParaVision parameter file')
parser.add_argument('filename', help='ParaVision parameter file (acqp, method, visu_pars)')
args = parser.parse_args()
# read parameter file
header, params = read_param_file(args.filename)
for (label, value) in header.items():
print("%s: %s" % (label, value))
for (label, value) in params.items():
if isinstance(value, np.ndarray):
print("%s:" % (label,))
print(value)
else:
print("%s: %s" % (label, value))
if __name__ == '__main__':
main()
| Python |
3D | Aswendt-Lab/AIDAmri | bin/PV2NIfTiConverter/Alternative_pv_reader/pv_reader.py | .py | 19,580 | 491 | '''
Created on 19.10.2020
Author:
Michael Diedenhofen
Max Planck Institute for Metabolism Research, Cologne
Read Bruker ParaVision data (2dseq) and save as NIfTI file.
Create a b-table text file with b-values and directions for diffusion data.
'''
from __future__ import print_function
try:
zrange = xrange
except NameError:
zrange = range
VERSION = 'pv_reader.py v 1.1.2 20201019'
import os
import sys
import numpy as np
import nibabel as nib
import nibabel.nifti1 as nii
import pv_parser as par
class ParaVision:
"""
Read ParaVision data and save as NIfTI file
"""
def __init__(self, procfolder, rawfolder, study, expno, procno):
self.procfolder = procfolder
self.rawfolder = rawfolder
self.study = study
self.expno = int(expno)
self.procno = int(procno)
self.name = '.'.join([study, str(expno), str(procno)])
def __check_params(self, params_name, labels):
misses = [label for label in labels if label not in getattr(self, params_name)]
if len(misses) > 0:
sys.exit("Missing labels in %s: %s" % (params_name, str(misses),))
def __check_path(self, header_path):
header_path = header_path.split('/')
study, expno, procno = (header_path[-5], int(header_path[-4]), int(header_path[-2]))
if self.study != study:
print("Warning: Study '%s' differs from '%s' in the visu_pars header." % (self.study, study), file=sys.stderr)
if self.expno != expno:
print("Warning: Experiment number %s differs from %s in the visu_pars header." % (self.expno, expno), file=sys.stderr)
if self.procno != procno:
print("Warning: Processed images number %s differs from %s in the visu_pars header." % (self.procno, procno), file=sys.stderr)
def __get_data_dims(self):
labels_visu_pars = ['VisuCoreDim', 'VisuCoreSize', 'VisuCoreWordType', 'VisuCoreByteOrder']
self.__check_params('visu_pars', labels_visu_pars)
#VisuCoreFrameCount = self.visu_pars.get('VisuCoreFrameCount') # Number of frames
VisuCoreDim = self.visu_pars.get('VisuCoreDim')
VisuCoreSize = self.visu_pars.get('VisuCoreSize')
VisuCoreDimDesc = self.visu_pars.get('VisuCoreDimDesc')
VisuCoreWordType = self.visu_pars.get('VisuCoreWordType')
#VisuCoreByteOrder = self.visu_pars.get('VisuCoreByteOrder')
#VisuFGOrderDescDim = self.visu_pars.get('VisuFGOrderDescDim')
VisuFGOrderDesc = self.visu_pars.get('VisuFGOrderDesc')
dim_desc = None if VisuCoreDimDesc is None else VisuCoreDimDesc[0]
# FrameGroup dimensions and names
if (VisuFGOrderDesc is not None) and len(VisuFGOrderDesc) > 0:
#fg_dims = list(map(lambda item: int(item[0]), VisuFGOrderDesc))
#fg_names = list(map(lambda item: str(item[1]), VisuFGOrderDesc))
fg_dims = [int(item[0]) for item in VisuFGOrderDesc]
fg_names = [str(item[1]) for item in VisuFGOrderDesc]
fg_names = [par.extract_jcamp_strings(item, get_all=False) for item in fg_names]
else:
fg_dims = []
fg_names = []
# Data dimensions
data_dims = list(map(int, VisuCoreSize)) + fg_dims
# FrameGroup FG_SLICE index
fg_index, fg_slice = (None, None)
if VisuCoreDim == 2:
fg_slices = ('FG_SLICE', 'FG_IRMODE')
fg_indices = [fg_names.index(x) for x in fg_slices if x in fg_names]
if len(fg_indices) > 0:
fg_index = fg_indices[0]
fg_slice = fg_slices[fg_index]
fg_index += VisuCoreDim
# ParaVision to NumPy data-type conversion
if VisuCoreWordType == '_8BIT_UNSGN_INT':
data_type = 'uint8'
elif VisuCoreWordType == '_16BIT_SGN_INT':
data_type = 'int16'
elif VisuCoreWordType == '_32BIT_SGN_INT':
data_type = 'int32'
elif VisuCoreWordType == '_32BIT_FLOAT':
data_type = 'float32'
else:
sys.exit("The data format is not correct specified.")
return (data_dims, data_type, dim_desc, fg_index, fg_slice)
def __get_voxel_dims(self, data_dims, scale=1.0):
labels_visu_pars = ['VisuCoreExtent']
self.__check_params('visu_pars', labels_visu_pars)
ACQ_slice_sepn = self.acqp.get('ACQ_slice_sepn')
#PVM_SPackArrSliceGap = self.method.get('PVM_SPackArrSliceGap')
PVM_SPackArrSliceDistance = self.method.get('PVM_SPackArrSliceDistance')
VisuCoreExtent = self.visu_pars.get('VisuCoreExtent')
VisuCoreFrameThickness = self.visu_pars.get('VisuCoreFrameThickness')
VisuCoreUnits = self.visu_pars.get('VisuCoreUnits')
VisuCoreSlicePacksSliceDist = self.visu_pars.get('VisuCoreSlicePacksSliceDist')
VisuAcqRepetitionTime = self.visu_pars.get('VisuAcqRepetitionTime')
nd = min(len(data_dims), 4)
dims = [1] * 4
dims[:nd] = data_dims
nx, ny, nz, nt = dims
# Voxel dimensions
if len(VisuCoreExtent) > 1:
dx = scale * float(VisuCoreExtent[0]) / nx
dy = scale * float(VisuCoreExtent[1]) / ny
else:
dx = 1.0
dy = 0.0
if len(VisuCoreExtent) > 2:
dz = scale * float(VisuCoreExtent[2]) / nz
elif ACQ_slice_sepn is not None: # Slice thickness inclusive gap
dz = scale * float(ACQ_slice_sepn[0])
elif PVM_SPackArrSliceDistance is not None: # Slice thickness inclusive gap
dz = scale * float(PVM_SPackArrSliceDistance[0])
elif VisuCoreSlicePacksSliceDist is not None: # Slice thickness inclusive gap (PV6)
dz = scale * float(VisuCoreSlicePacksSliceDist[0])
elif VisuCoreFrameThickness is not None: # Slice thickness
dz = scale * float(VisuCoreFrameThickness[0])
else:
dz = 0.0
if (VisuAcqRepetitionTime is not None) and (nt > 1):
dt = float(VisuAcqRepetitionTime[0]) / 1000.0
else:
dt = 0.0
#voxel_dims = [dx, dy, dz, dt][:nd]
voxel_dims = [dx, dy, dz, dt]
# Units of the voxel dimensions
voxel_unit = par.extract_unit_string(par.extract_jcamp_strings(VisuCoreUnits[0], get_all=False))
return (voxel_dims, voxel_unit)
def __map_data(self, data, map_pv6):
VisuCoreExtent = self.visu_pars.get('VisuCoreExtent')
VisuCoreDataOffs = self.visu_pars.get('VisuCoreDataOffs')
VisuCoreDataSlope = self.visu_pars.get('VisuCoreDataSlope')
n = min(len(VisuCoreExtent), 3)
dims = data.shape[n:]
if VisuCoreDataOffs.size > 1:
VisuCoreDataOffs = VisuCoreDataOffs.reshape(dims, order='F').astype(np.float32)
else:
VisuCoreDataOffs = np.float32(VisuCoreDataOffs[0])
if VisuCoreDataSlope.size > 1:
VisuCoreDataSlope = VisuCoreDataSlope.reshape(dims, order='F').astype(np.float32)
else:
VisuCoreDataSlope = np.float32(VisuCoreDataSlope[0])
if map_pv6:
data = data.astype(np.float32) / VisuCoreDataSlope
data = data + VisuCoreDataOffs
else:
data = data.astype(np.float32) * VisuCoreDataSlope
data = data + VisuCoreDataOffs
return (data, 'float32')
def __make_subfolder(self, subfolder=''):
procfolder = os.path.join(self.procfolder, self.study)
if not os.path.isdir(procfolder):
os.mkdir(procfolder)
if len(subfolder) > 0:
procfolder = os.path.join(self.procfolder, self.study, subfolder)
if not os.path.isdir(procfolder):
os.mkdir(procfolder)
return procfolder
def __get_matrix(self):
VisuCoreOrientation = self.visu_pars.get('VisuCoreOrientation')
VisuCoreOrientation = VisuCoreOrientation.flatten()[:9].astype(np.float32)
VisuCoreOrientation = VisuCoreOrientation.reshape((3, 3), order='F')
VisuCorePosition = self.visu_pars.get('VisuCorePosition')
VisuCorePosition = VisuCorePosition.flatten()[:3].astype(np.float32)
matrix = np.zeros((4, 4), dtype=np.float32)
matrix[:3, :3] = VisuCoreOrientation
matrix[:3, 3] = self.scale * VisuCorePosition
matrix[3, 3] = 1
return matrix
def __save_matrix(self, matrix, procfolder, ext='mat'):
lines = '\n'.join((' '.join('%.12g' % (x,) for x in matrix[y]) + ' ') for y in range(matrix.shape[0]))
fname = '.'.join([self.name, ext])
fpath = os.path.join(procfolder, fname)
print(fpath)
# Open text file to write binary (Unix format)
fid = open(fpath, 'wb')
# Write text file
for line in lines.splitlines():
print(line, end=chr(10), file=fid)
# Close text file
fid.close()
def read_2dseq(self, map_raw=False, map_pv6=False, roll_fg=False, squeeze=False, compact=False, swap_vd=False, scale=1.0):
self.scale = float(scale)
# Get acqp and method parameters
datadir = os.path.join(self.rawfolder, self.study, str(self.expno))
_header, self.acqp = par.read_param_file(os.path.join(datadir, 'acqp'))
_header, self.method = par.read_param_file(os.path.join(datadir, 'method'))
# Get visu_pars parameters
datadir = os.path.join(self.rawfolder, self.study, str(self.expno), 'pdata', str(self.procno))
#_header, self.d3proc = par.read_param_file(os.path.join(datadir, 'd3proc')) # Removed for PV6
header, self.visu_pars = par.read_param_file(os.path.join(datadir, 'visu_pars'))
self.__check_path(header['Path'])
# Remove selected parameters from the visu_pars dictionary
#if 'VisuCoreDataMin' in self.visu_pars: del self.visu_pars['VisuCoreDataMin']
#if 'VisuCoreDataMax' in self.visu_pars: del self.visu_pars['VisuCoreDataMax']
#if 'VisuCoreDataOffs' in self.visu_pars: del self.visu_pars['VisuCoreDataOffs']
#if 'VisuCoreDataSlope' in self.visu_pars: del self.visu_pars['VisuCoreDataSlope']
#if 'VisuAcqImagePhaseEncDir' in self.visu_pars: del self.visu_pars['VisuAcqImagePhaseEncDir']
#VisuCoreFrameType = self.visu_pars.get('VisuCoreFrameType')
#VisuCoreDiskSliceOrder = self.visu_pars.get('VisuCoreDiskSliceOrder')
# Get data dimensions
data_dims, data_type, dim_desc, fg_index, fg_slice = self.__get_data_dims()
# Open 2dseq file
path_2dseq = os.path.join(datadir, '2dseq')
try:
fid = open(path_2dseq, 'rb')
except IOError as V:
if V.errno == 2:
sys.exit("Cannot open 2dseq file %s" % (path_2dseq,))
else:
raise
# Read 2dseq file
data = np.fromfile(fid, dtype=np.dtype(data_type)).reshape(data_dims, order='F')
# Close 2dseq file
fid.close()
# Map to raw data range
if map_raw:
data, data_type = self.__map_data(data, map_pv6)
# Move FrameGroup FG_SLICE axis to position 2
self.roll_fg = False
if roll_fg:
if fg_index is None:
print("Warning: Could not find FrameGroup.", file=sys.stderr)
elif fg_index > 2:
print("Warning: Move axis %d (FrameGroup %s) to position %d." % (fg_index, fg_slice, 2), file=sys.stderr)
data = np.rollaxis(data, fg_index, 2)
data_dims = list(data.shape)
self.roll_fg = True
else:
print("Warning: Could not move FrameGroup %s." % (fg_slice,), file=sys.stderr)
# Remove data dimensions of size 1
if squeeze and (1 in data_dims):
data = np.squeeze(data)
data_dims = list(data.shape)
# Reduce data dimensions to 4
if compact and (len(data_dims) > 4):
nt = int(np.prod(data_dims[3:]))
data_dims[3:] = [nt]
data = data.reshape(data_dims, order='F')
# Get voxel dimensions
voxel_dims, voxel_unit = self.__get_voxel_dims(data_dims, scale=self.scale)
self.swap_vd = False
if swap_vd:
if (not self.roll_fg) and (fg_index != 2) and (len(data_dims) > 3):
print("Warning: Swap third and fourth voxel dimension.", file=sys.stderr)
voxel_dims[2:4] = voxel_dims[3:1:-1]
self.swap_vd = True
else:
print("Warning: Could not swap third and fourth voxel dimension.", file=sys.stderr)
pixdim = [0.0] * 8
pixdim[0] = 1.0 # NIfTI qfac which is either -1 or 1
pixdim[1:len(voxel_dims)+1] = voxel_dims
# Info parameters
self.data_dims = data_dims
self.data_type = data_type
self.voxel_dims = voxel_dims
self.voxel_unit = voxel_unit
# NIfTI image
self.nifti_image = nii.Nifti1Image(data.reshape(data_dims, order='F'), None)
# NIfTI header
header = self.nifti_image.get_header()
header.set_data_dtype(data.dtype)
header.set_data_shape(data_dims)
#header.set_zooms(voxel_dims)
header['pixdim'] = pixdim
if dim_desc != 'spectroscopic':
header.set_xyzt_units(xyz=voxel_unit, t=None)
#print("header:"); print(header)
def save_nifti(self, ftype='NIFTI_GZ', subfolder=''):
if ftype == 'NIFTI_GZ':
ext = 'nii.gz'
elif ftype == 'NIFTI':
ext = 'nii'
elif ftype == 'ANALYZE':
ext = 'img'
else:
ext = 'nii.gz'
fproc = self.__make_subfolder(subfolder=subfolder)
fname = '.'.join([self.name, ext])
fpath = os.path.join(fproc, fname)
# Write NIfTI file
nib.save(self.nifti_image, fpath)
#self.nifti_image.to_filename(fpath)
print(self.nifti_image.get_filename())
def get_matrix(self):
matrix = self.__get_matrix()
return (matrix, np.linalg.inv(matrix))
def save_matrix(self, subfolder=''):
matrix = self.__get_matrix()
fproc = self.__make_subfolder(subfolder=subfolder)
self.__save_matrix(matrix, fproc, ext='omat')
self.__save_matrix(np.linalg.inv(matrix), fproc, ext='imat')
def save_table(self, eff_bval=False, subfolder=''):
DwAoImages = int(self.method.get('PVM_DwAoImages'))
DwNDiffDir = int(self.method.get('PVM_DwNDiffDir'))
DwNDiffExpEach = int(self.method.get('PVM_DwNDiffExpEach'))
#DwNDiffExp = int(self.method.get('PVM_DwNDiffExp'))
#print("DwAoImages:", DwAoImages)
#print("DwNDiffDir:", DwNDiffDir)
#print("DwNDiffExpEach:", DwNDiffExpEach)
#print("DwNDiffExp:", DwNDiffExp)
nd = DwAoImages + DwNDiffDir * DwNDiffExpEach
bvals = np.zeros(nd, dtype=np.float64)
dwdir = np.zeros((nd, 3), dtype=np.float64)
if eff_bval:
DwEffBval = self.method.get('PVM_DwEffBval').astype(np.float64)
#print("DwEffBval:"); print(DwEffBval)
bvals[DwAoImages:] = DwEffBval[DwAoImages:]
else:
DwBvalEach = self.method.get('PVM_DwBvalEach').astype(np.float64)
#print("DwBvalEach:", DwBvalEach)
bvals[DwAoImages:] = np.tile(DwBvalEach, DwNDiffDir)
DwDir = self.method.get('PVM_DwDir').astype(np.float64)
#DwDir = DwDir.reshape((DwNDiffDir * DwNDiffExpEach, 3))
#print("DwDir:"); print(DwDir)
dwdir[DwAoImages:] = np.repeat(DwDir, DwNDiffExpEach, axis=0)
fproc = self.__make_subfolder(subfolder=subfolder)
fname = '.'.join([self.name, 'btable', 'txt'])
fpath = os.path.join(fproc, fname)
print(fpath)
# Open btable file to write binary (Windows format)
fid = open(fpath, 'wb')
for i in zrange(nd):
print("%.4f" % (bvals[i],) + " %.8f %.8f %.8f" % tuple(dwdir[i]), end="\r\n", file=fid)
# Close file
fid.close()
fname = '.'.join([self.name, 'bvals', 'txt'])
fpath = os.path.join(fproc, fname)
print(fpath)
# Open bvals file to write binary (Unix format)
fid = open(fpath, 'wb')
print(" ".join("%.4f" % (bvals[i],) for i in zrange(nd)), end=chr(10), file=fid)
# Close bvals file
fid.close()
fname = '.'.join([self.name, 'bvecs', 'txt'])
fpath = os.path.join(fproc, fname)
print(fpath)
# Open bvecs file to write binary (Unix format)
fid = open(fpath, 'wb')
for k in range(3):
print(" ".join("%.8f" % (dwdir[i, k],) for i in zrange(nd)), end=chr(10), file=fid)
# Close bvecs file
fid.close()
def check_args(proc_folder, raw_folder, study, expno, procno):
# processed data folder
if not os.path.isdir(proc_folder):
sys.exit("Error: '%s' is not an existing directory." % (proc_folder,))
# raw data folder
if not os.path.isdir(raw_folder):
sys.exit("Error: '%s' is not an existing directory." % (raw_folder,))
# study name
path = os.path.join(raw_folder, study)
if not os.path.isdir(path):
sys.exit("Error: '%s' is not an existing directory." % (path,))
# experiment number
path = os.path.join(raw_folder, study, str(expno))
if not os.path.isdir(path):
sys.exit("Error: '%s' is not an existing directory." % (path,))
# processed images number
path = os.path.join(raw_folder, study, str(expno), 'pdata', str(procno))
if not os.path.isdir(path):
sys.exit("Error: '%s' is not an existing directory." % (path,))
def main():
import argparse
parser = argparse.ArgumentParser(description='Read ParaVision data and save as NIfTI file')
parser.add_argument('proc_folder', help='processed data folder')
parser.add_argument('raw_folder', help='raw data folder')
parser.add_argument('study', help='study name')
parser.add_argument('expno', help='experiment number')
parser.add_argument('procno', help='processed (reconstructed) images number')
parser.add_argument('-s', '--scale', default=1.0, help='voxel dimensions scale factor')
parser.add_argument('-m', '--map_raw', action='store_true', help='map the data to get the real values')
parser.add_argument('-p', '--map_pv6', action='store_true', help='map the data by dividing (ParaVision 6)')
parser.add_argument('-r', '--roll_fg', action='store_true', help='move slice framegroup to third dimension')
parser.add_argument('-q', '--squeeze', action='store_true', help='remove data dimensions of size 1')
parser.add_argument('-c', '--compact', action='store_true', help='reduce data dimensions to 4')
parser.add_argument('-v', '--swap_vd', action='store_true', help='swap third and fourth voxel dimension')
parser.add_argument('-t', '--table', action='store_true', help='save b-values and diffusion directions')
args = parser.parse_args()
check_args(args.proc_folder, args.raw_folder, args.study, args.expno, args.procno)
pv = ParaVision(args.proc_folder, args.raw_folder, args.study, args.expno, args.procno)
pv.read_2dseq(map_raw=args.map_raw, map_pv6=args.map_pv6, roll_fg=args.roll_fg, squeeze=args.squeeze, compact=args.compact, swap_vd=args.swap_vd, scale=args.scale)
pv.save_nifti(ftype='NIFTI_GZ')
pv.save_matrix()
if args.table:
pv.save_table()
if __name__ == '__main__':
main()
| Python |
3D | Aswendt-Lab/AIDAmri | bin/2.2_DTIPreProcessing/registration_DTI.py | .py | 21,386 | 441 | """
Created on 10/08/2017
@author: Niklas Pallast
Neuroimaging & Neuroengineering
Department of Neurology
University Hospital Cologne
Documentation preface, added 23/05/09 by Victor Vera Frazao:
This document is currently in revision for improvement and fixing.
Specifically changes are made to allow compatibility of the pipeline with Ubuntu 18.04 systems
and Ubuntu 18.04 Docker base images, respectively, as well as adapting to appearent changes of
DSI-Studio that were applied since the AIDAmri v.1.1 release. As to date the DSI-Studio version
used is the 2022/08/03 Ubuntu 18.04 release.
All changes and additional documentations within this script carry a signature with the writer's
initials (e.g. VVF for Victor Vera Frazao) and the date at application, denoted after '//' at
the end of the comment line. If code segments need clearance the comment line will be prefaced
by '#?'. Changes are prefaced by '#>' and other comments are prefaced ordinalrily
by '#'.
"""
import sys,os
import nibabel as nii
import numpy as np
import shutil
import glob
import subprocess
import shlex
def regABA2DTI(inputVolume,stroke_mask,refStroke_mask,T2data, brain_template,brain_anno, splitAnno,splitAnno_rsfMRI,anno_rsfMRI,bsplineMatrix,outfile):
outputT2w = os.path.join(outfile, os.path.basename(inputVolume).split('.')[0] + '_T2w.nii.gz')
outputAff = os.path.join(outfile, os.path.basename(inputVolume).split('.')[0] + 'transMatrixAff.txt')
command = f"reg_aladin -ref {inputVolume} -flo {T2data} -res {outputT2w} -rigOnly -aff {outputAff}"
command_args = shlex.split(command)
try:
result = subprocess.run(command_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,text=True)
print(f"Output of {command}:\n{result.stdout}")
except Exception as e:
print(f'Error while executing the command: {command_args}Errorcode: {str(e)}')
raise
# resample Annotation
#outputAnno = os.path.join(outfile, os.path.basename(inputVolume).split('.')[0] + '_Anno.nii.gz')
#os.system(
# 'reg_resample -ref ' + inputVolume + ' -flo ' + brain_anno +
# ' -cpp ' + outputAff + ' -inter 0 -res ' + outputAnno)
# resample split Annotation
outputAnnoSplit = os.path.join(outfile, os.path.basename(inputVolume).split('.')[0] + '_AnnoSplit.nii.gz')
command = f"reg_resample -ref {brain_anno} -flo {splitAnno} -trans {bsplineMatrix} -inter 0 -res {outputAnnoSplit}"
command_args = shlex.split(command)
try:
result = subprocess.run(command_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,text=True)
print(f"Output of {command}:\n{result.stdout}")
except Exception as e:
print(f'Error while executing the command: {command_args}Errorcode: {str(e)}')
raise
command = f"reg_resample -ref {inputVolume} -flo {outputAnnoSplit} -trans {outputAff} -inter 0 -res {outputAnnoSplit}"
command_args = shlex.split(command)
try:
result = subprocess.run(command_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,text=True)
print(f"Output of {command}:\n{result.stdout}")
except Exception as e:
print(f'Error while executing the command: {command_args}Errorcode: {str(e)}')
raise
# resample split par Annotation
outputAnnoSplit_par = os.path.join(outfile, os.path.basename(inputVolume).split('.')[0] + '_AnnoSplit_parental.nii.gz')
command = f"reg_resample -ref {brain_anno} -flo {splitAnno_rsfMRI} -trans {bsplineMatrix} -inter 0 -res {outputAnnoSplit_par}"
command_args = shlex.split(command)
try:
result = subprocess.run(command_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,text=True)
print(f"Output of {command}:\n{result.stdout}")
except Exception as e:
print(f'Error while executing the command: {command_args}Errorcode: {str(e)}')
raise
command = f"reg_resample -ref {inputVolume} -flo {outputAnnoSplit_par} -trans {outputAff} -inter 0 -res {outputAnnoSplit_par}"
command_args = shlex.split(command)
try:
result = subprocess.run(command_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,text=True)
print(f"Output of {command}:\n{result.stdout}")
except Exception as e:
print(f'Error while executing the command: {command_args}Errorcode: {str(e)}')
raise
# resample par Annotation
outputAnno_par = os.path.join(outfile,
os.path.basename(inputVolume).split('.')[0] + '_Anno_parental.nii.gz')
command = f"reg_resample -ref {brain_anno} -flo {anno_rsfMRI} -trans {bsplineMatrix} -inter 0 -res {outputAnno_par}"
command_args = shlex.split(command)
try:
result = subprocess.run(command_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,text=True)
print(f"Output of {command}:\n{result.stdout}")
except Exception as e:
print(f'Error while executing the command: {command_args}Errorcode: {str(e)}')
raise
command = f"reg_resample -ref {inputVolume} -flo {outputAnno_par} -trans {outputAff} -inter 0 -res {outputAnno_par}"
command_args = shlex.split(command)
try:
result = subprocess.run(command_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,text=True)
print(f"Output of {command}:\n{result.stdout}")
except Exception as e:
print(f'Error while executing the command: {command_args}Errorcode: {str(e)}')
raise
# resample Template
outputTemplate = os.path.join(outfile, os.path.basename(inputVolume).split('.')[0] + '_Template.nii.gz')
command = f"reg_resample -ref {inputVolume} -flo {brain_template} -cpp {outputAff} -res {outputTemplate}"
command_args = shlex.split(command)
try:
result = subprocess.run(command_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,text=True)
print(f"Output of {command}:\n{result.stdout}")
except Exception as e:
print(f'Error while executing the command: {command_args}Errorcode: {str(e)}')
raise
# Some scaled data for DSI Studio
outfileDSI = os.path.join(os.path.dirname(inputVolume), 'DSI_studio')
if os.path.exists(outfileDSI):
shutil.rmtree(outfileDSI) #? script-based removal of directories not recommended. Maybe change? // VVF 23/10/05
os.makedirs(outfileDSI)
outputRefStrokeMaskAff = None
if refStroke_mask is not None and len(refStroke_mask) > 0 and os.path.exists(refStroke_mask):
refMatrix = find_RefAff(inputVolume)[0]
refMTemplate = find_RefTemplate(inputVolume)[0]
outputRefStrokeMaskAff = os.path.join(outfile, os.path.basename(inputVolume).split('.')[0] + '_refStrokeMaskAff.nii.gz')
command = f"reg_resample -ref {refMTemplate} -flo {refStroke_mask} -cpp {refMatrix} -res {outputRefStrokeMaskAff}"
command_args = shlex.split(command)
try:
result = subprocess.run(command_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,text=True)
print(f"Output of {command}:\n{result.stdout}")
except Exception as e:
print(f'Error while executing the command: {command_args}Errorcode: {str(e)}')
raise
stroke_mask = outputRefStrokeMaskAff
if stroke_mask is not None and len(stroke_mask) > 0 and os.path.exists(stroke_mask):
outputStrokeMask = os.path.join(outfile,
os.path.basename(inputVolume).split('.')[0] + 'Stroke_mask.nii.gz')
command = f"reg_resample -ref {inputVolume} -flo {stroke_mask} -inter 0 -cpp {outputAff} -res {outputStrokeMask}"
command_args = shlex.split(command)
try:
result = subprocess.run(command_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,text=True)
print(f"Output of {command}:\n{result.stdout}")
except Exception as e:
print(f'Error while executing the command: {command_args}Errorcode: {str(e)}')
raise
# Superposition of annotations and mask
dataAnno = nii.load(outputAnnoSplit_par)
dataStroke = nii.load(outputStrokeMask)
imgAnno = dataAnno.get_fdata()
imgStroke = dataStroke.get_fdata()
imgStroke[imgStroke > 0] = 1
imgStroke[imgStroke == 0] = 0
superPosAnnoStroke = imgStroke * imgAnno
unscaledNiiData = nii.Nifti1Image(superPosAnnoStroke, dataAnno.affine)
hdrOut = unscaledNiiData.header
hdrOut.set_xyzt_units('mm')
nii.save(unscaledNiiData,
os.path.join(outfile, os.path.basename(inputVolume).split('.')[0] + 'Anno_mask.nii.gz'))
# Stroke Mask
outputMaskScaled = os.path.join(outfileDSI,
os.path.basename(inputVolume).split('.')[0] + 'StrokeMask_scaled.nii') #> removed '.gz' ending to correct atlas implementation // VVF 23/05/10
superPosAnnoStroke = np.flip(superPosAnnoStroke, 2)
# uperPosAnnoStroke = np.rot90(superPosAnnoStroke, 2)
# superPosAnnoStroke = np.flip(superPosAnnoStroke, 0)
scale = np.eye(4) * 10
scale[3][3] = 1
unscaledNiiDataMask = nii.Nifti1Image(superPosAnnoStroke, dataStroke.affine * scale)
hdrOut = unscaledNiiDataMask.header
hdrOut.set_xyzt_units('mm')
nii.save(unscaledNiiDataMask, outputMaskScaled)
src_file = os.path.join(os.path.abspath(os.path.join(os.getcwd(), os.pardir,os.pardir))+'/lib/', 'ARA_annotationR+2000.nii.txt')
dst_file = os.path.join(outfileDSI, os.path.basename(inputVolume).split('.')[0] + 'StrokeMask_scaled.txt')#> removed '.nii.' ending to correct atlas implementation // VVF 23/05/10
superPosAnnoStroke = np.flip(superPosAnnoStroke, 2)
shutil.copyfile(src_file, dst_file)
# Superposition of rsfMRI annotations and mask
dataAnno = nii.load(outputAnnoSplit_par)
dataStroke = nii.load(outputStrokeMask)
imgAnno = dataAnno.get_fdata()
imgStroke = dataStroke.get_fdata()
imgStroke[imgStroke > 0] = 1
imgStroke[imgStroke == 0] = 0
superPosAnnoStroke = imgStroke * imgAnno
unscaledNiiData = nii.Nifti1Image(superPosAnnoStroke, dataAnno.affine)
hdrOut = unscaledNiiData.header
hdrOut.set_xyzt_units('mm')
nii.save(unscaledNiiData,
os.path.join(outfile, os.path.basename(inputVolume).split('.')[0] + 'Anno_parental_mask.nii.gz'))
superPosAnnoStroke = np.flip(superPosAnnoStroke, 2)
# Stroke Mask
outputMaskScaled = os.path.join(outfileDSI,
os.path.basename(inputVolume).split('.')[0] + 'parental_Mask_scaled.nii') #> removed '.gz' ending to correct atlas implementation // VVF 23/05/10
superPosAnnoStroke = np.flip(superPosAnnoStroke, 2)
# superPosAnnoStroke = np.rot90(superPosAnnoStroke, 2)
#superPosAnnoStroke = np.flip(superPosAnnoStroke, 0)
scale = np.eye(4) * 10
scale[3][3] = 1
unscaledNiiDataMask = nii.Nifti1Image(superPosAnnoStroke, dataStroke.affine * scale)
hdrOut = unscaledNiiDataMask.header
hdrOut.set_xyzt_units('mm')
nii.save(unscaledNiiDataMask, outputMaskScaled)
src_file = os.path.join(os.path.abspath(os.path.join(os.getcwd(),os.pardir,os.pardir))+'/lib/annoVolume+2000_rsfMRI.nii.txt')
dst_file = os.path.join(outfileDSI, os.path.basename(inputVolume).split('.')[0] + 'parental_Mask_scaled.txt') #> removed '.nii.' ending to correct atlas implementation // VVF 23/05/10
superPosAnnoStroke = np.flip(superPosAnnoStroke, 2)
shutil.copyfile(src_file, dst_file)
# Mask
outputMaskScaled = os.path.join(outfileDSI, os.path.basename(inputVolume).split('.')[0] + 'Mask_scaled.nii') #> removed '.gz' ending to correct atlas implementation // VVF 23/05/10
dataMask = nii.load(os.path.join(outfile, os.path.basename(inputVolume).split('.')[0] + '_mask.nii.gz'))
imgMask = dataMask.get_fdata()
imgMask = np.flip(imgMask, 2)
# imgMask = np.rot90(imgMask, 2)
# imgMask = np.flip(imgMask, 0)
scale = np.eye(4) * 10
scale[3][3] = 1
unscaledNiiDataMask = nii.Nifti1Image(imgMask, dataMask.affine * scale)
hdrOut = unscaledNiiDataMask.header
hdrOut.set_xyzt_units('mm')
nii.save(unscaledNiiDataMask, outputMaskScaled)
# Allen Brain
outputAnnoScaled = os.path.join(outfileDSI, os.path.basename(inputVolume).split('.')[0] + 'Anno_scaled.nii') #> removed '.gz' ending to correct atlas implementation // VVF 23/05/10
outputAnnorparScaled = os.path.join(outfileDSI, os.path.basename(inputVolume).split('.')[
0] + 'AnnoSplit_parental_scaled.nii') #> removed '.gz' ending to correct atlas implementation // VVF 23/05/10
outputAllenBScaled = os.path.join(outfileDSI, os.path.basename(inputVolume).split('.')[0] + 'Allen_scaled.nii') #> removed '.gz' ending to correct atlas implementation // VVF 23/05/10
src_file = os.path.join(os.path.abspath(os.path.join(os.getcwd(), os.pardir,os.pardir))+'/lib/', 'ARA_annotationR+2000.nii.txt')
dst_file = os.path.join(outfileDSI, os.path.basename(inputVolume).split('.')[0] + 'Anno_scaled.txt') #> removed '.nii.' ending to correct atlas implementation // VVF 23/05/10
shutil.copyfile(src_file, dst_file)
src_file = os.path.join(os.path.abspath(os.path.join(os.getcwd(), os.pardir,os.pardir))+'/lib/', 'annoVolume+2000_rsfMRI.nii.txt')
dst_file = os.path.join(outfileDSI, os.path.basename(inputVolume).split('.')[0] + 'AnnoSplit_parental_scaled.txt') #> removed '.nii.' ending to correct atlas implementation // VVF 23/05/10
shutil.copyfile(src_file, dst_file)
dataAnno = nii.load(os.path.join(outfile, os.path.basename(inputVolume).split('.')[0] + '_AnnoSplit.nii.gz'))
dataAnnorspar = nii.load(os.path.join(outfile, os.path.basename(inputVolume).split('.')[0] + '_AnnoSplit_parental.nii.gz'))
dataAllen = nii.load(os.path.join(outfile, os.path.basename(inputVolume).split('.')[0] + '_Template.nii.gz'))
imgTempAnno = dataAnno.get_fdata()
imgTempAnnorspar = dataAnnorspar.get_fdata()
imgTempAllen = dataAllen.get_fdata()
imgTempAllen = np.flip(imgTempAllen, 2)
imgTempAnno = np.flip(imgTempAnno, 2)
imgTempAnnorspar = np.flip(imgTempAnnorspar, 2)
scale = np.eye(4) * 10
scale[3][3] = 1
unscaledNiiDataAnno = nii.Nifti1Image(imgTempAnno, dataAnno.affine * scale)
unscaledNiiDataAnnorspar = nii.Nifti1Image(imgTempAnnorspar, dataAnnorspar.affine * scale)
unscaledNiiDataAllen = nii.Nifti1Image(imgTempAllen, dataAllen.affine * scale)
hdrOut = unscaledNiiDataAnno.header
hdrOut.set_xyzt_units('mm')
hdrOut = unscaledNiiDataAnnorspar.header
hdrOut.set_xyzt_units('mm')
hdrOut = unscaledNiiDataAllen.header
hdrOut.set_xyzt_units('mm')
nii.save(unscaledNiiDataAnno, outputAnnoScaled)
nii.save(unscaledNiiDataAnnorspar, outputAnnorparScaled)
nii.save(unscaledNiiDataAllen, outputAllenBScaled)
if outputRefStrokeMaskAff is not None:
os.remove(outputRefStrokeMaskAff)
return outputAnnoSplit
def find_RefStroke(refStrokePath,inputVolume):
path = glob.glob(os.path.join(refStrokePath, os.path.basename(inputVolume)[0:9],'*',"anat","*","IncidenceData_mask.nii.gz"), recursive=False)
return path
def find_RefAff(inputVolume):
parent_dir = os.path.dirname(os.path.dirname(inputVolume))
path = glob.glob(os.path.join(parent_dir, 'anat', '*MatrixAff.txt'))
return path
def find_RefTemplate(inputVolume):
parent_dir = os.path.dirname(os.path.dirname(inputVolume))
path = glob.glob(os.path.join(parent_dir, 'anat', '*TemplateAff.nii.gz'))
return path
def find_relatedData(pathBase):
pathT2 = glob.glob(pathBase+'*/anat/*Bet.nii.gz', recursive=False)
pathStroke_mask = glob.glob(pathBase + '*/anat/*Stroke_mask.nii.gz', recursive=False)
pathAnno = glob.glob(pathBase + '*/anat/*Anno.nii.gz', recursive=False)
pathAllen = glob.glob(pathBase + '*/anat/*Allen.nii.gz', recursive=False)
bsplineMatrix = glob.glob(pathBase + '*/anat/*MatrixBspline.nii', recursive=False)
return pathT2,pathStroke_mask,pathAnno,pathAllen,bsplineMatrix
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='Registration Allen Brain to DTI')
requiredNamed = parser.add_argument_group('required named arguments')
requiredNamed.add_argument('-i', '--inputVolume', help='Path to the BET file of DTI data after preprocessing',
required=True)
parser.add_argument('-r', '--referenceDay', help='Reference Stroke mask (for example: P5)', nargs='?', type=str,
default=None)
parser.add_argument('-s', '--splitAnno', help='Split annotations atlas', nargs='?', type=str,
default=os.path.abspath(os.path.join(os.getcwd(), os.pardir,os.pardir))+'/lib/ARA_annotationR+2000.nii.gz')
parser.add_argument('-f', '--splitAnno_rsfMRI', help='Split annotations atlas for rsfMRI/DTI', nargs='?', type=str,
default=os.path.abspath(os.path.join(os.getcwd(), os.pardir,os.pardir))+'/lib/annoVolume+2000_rsfMRI.nii.gz')
parser.add_argument('-a', '--anno_rsfMRI', help='Parental Annotations atlas for rsfMRI/DTI', nargs='?', type=str,
default=os.path.abspath(os.path.join(os.getcwd(), os.pardir,os.pardir))+'/lib/annoVolume.nii.gz')
args = parser.parse_args()
stroke_mask = None
inputVolume = None
refStrokePath = None
splitAnno = None
splitAnno_rsfMRI = None
anno_rsfMRI = None
if args.inputVolume is not None:
inputVolume = args.inputVolume
if not os.path.exists(inputVolume):
sys.exit("Error: '%s' is not an existing directory." % (inputVolume,))
outfile = os.path.join(os.path.dirname(inputVolume)) #this will be something like E:\CRC_data\proc_data\sub-GVsT3c3m2\ses-Baseline
if not os.path.exists(outfile):
os.makedirs(outfile)
# find related data
pathT2, pathStroke_mask, pathAnno, pathTemplate, bsplineMatrix = find_relatedData(os.path.dirname(outfile)) #this will be something like E:\CRC_data\proc_data\sub-GVsT3c3m2
if len(pathT2) is 0:
T2data = []
sys.exit("Error: %s' has no reference T2 template." % (os.path.basename(inputVolume),))
else:
T2data = pathT2[0]
if len(pathStroke_mask) is 0:
pathStroke_mask = []
print("Notice: '%s' has no defined reference (stroke) mask - will proceed without." % (os.path.basename(inputVolume),))
else:
stroke_mask = pathStroke_mask[0]
if len(pathAnno) is 0:
pathAnno = []
sys.exit("Error: %s' has no reference annotations." % (os.path.basename(inputVolume),))
else:
brain_anno = pathAnno[0]
if len(pathTemplate) is 0:
pathTemplate = []
sys.exit("Error: %s' has no reference template." % (os.path.basename(inputVolume),))
else:
brain_template = pathTemplate[0]
if len(bsplineMatrix) is 0:
bsplineMatrix = []
sys.exit("Error: %s' has no bspline Matrix." % (os.path.basename(inputVolume),))
else:
bsplineMatrix = bsplineMatrix[0]
# finde reference stroke mask
refStroke_mask = None
if args.referenceDay is not None:
referenceDay = args.referenceDay
refStrokePath = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(outfile))), referenceDay)
if not os.path.exists(refStrokePath):
sys.exit("Error: '%s' is not an existing directory." % (refStrokePath,))
refStroke_mask = find_RefStroke(refStrokePath, inputVolume)
if len(refStroke_mask) is 0:
refStroke_mask = []
print("Notice: '%s' has no defined reference (stroke) mask - will proceed without." % (os.path.basename(inputVolume),))
else:
refStroke_mask = refStroke_mask[0]
if args.splitAnno is not None:
splitAnno = args.splitAnno
if not os.path.exists(splitAnno):
sys.exit("Error: '%s' is not an existing directory." % (splitAnno,))
if args.splitAnno_rsfMRI is not None:
splitAnno_rsfMRI = args.splitAnno_rsfMRI
if not os.path.exists(splitAnno_rsfMRI):
sys.exit("Error: '%s' is not an existing directory." % (splitAnno_rsfMRI,))
if args.anno_rsfMRI is not None:
anno_rsfMRI = args.anno_rsfMRI
if not os.path.exists(anno_rsfMRI):
sys.exit("Error: '%s' is not an existing directory." % (anno_rsfMRI,))
output = regABA2DTI(inputVolume, stroke_mask, refStroke_mask, T2data, brain_template, brain_anno, splitAnno,splitAnno_rsfMRI,anno_rsfMRI,bsplineMatrix,outfile)
current_dir = os.path.dirname(inputVolume)
search_string = os.path.join(current_dir, "*dwi.nii.gz")
currentFile = glob.glob(search_string)
search_string = os.path.join(current_dir, "*.nii*")
created_imgs = glob.glob(search_string, recursive=True)
os.chdir(os.path.dirname(os.getcwd()))
for idx, img in enumerate(created_imgs):
if img == None:
continue
#os.system('python adjust_orientation.py -i '+ str(img) + ' -t ' + currentFile[0])
continue
print("Registration completed")
| Python |
3D | Aswendt-Lab/AIDAmri | bin/2.2_DTIPreProcessing/preProcessing_DTI.py | .py | 6,660 | 196 | """
Created on 10/08/2017
@author: Niklas Pallast
Neuroimaging & Neuroengineering
Department of Neurology
University Hospital Cologne
"""
import nipype.interfaces.fsl as fsl
import os, sys
import nibabel as nii
import numpy as np
import applyMICO
import cv2
from pathlib import Path
import subprocess
import shutil
def reset_orientation(input_file):
brkraw_dir = os.path.join(os.path.dirname(input_file), "brkraw")
if os.path.exists(brkraw_dir):
return
os.mkdir(brkraw_dir)
dst_path = os.path.join(brkraw_dir, os.path.basename(input_file))
shutil.copyfile(input_file, dst_path)
data = nii.load(input_file)
raw_img = data.dataobj.get_unscaled()
raw_nii = nii.Nifti1Image(raw_img, data.affine)
nii.save(raw_nii, input_file)
delete_orient_command = f"fslorient -deleteorient {input_file}"
subprocess.run(delete_orient_command, shell=True)
# Befehl zum Festlegen der radiologischen Orientierung
forceradiological_command = f"fslorient -forceradiological {input_file}"
subprocess.run(forceradiological_command, shell=True)
def applyBET(input_file: str, frac: float, radius: int, output_path: str) -> str:
"""
Performs brain extraction via the FSL Brain Extraction Tool (BET). Requires an appropriate input file (input_file), the fractional intensity threshold (frac), the head radius (radius) and the output path (output_path).
"""
# scale Nifti data by factor 10
data = nii.load(input_file)
imgTemp = data.get_fdata()
scale = np.eye(4)* 10
scale[3][3] = 1
imgTemp = np.flip(imgTemp, 2)
scaledNiiData = nii.Nifti1Image(imgTemp, data.affine * scale)
hdrIn = scaledNiiData.header
hdrIn.set_xyzt_units('mm')
scaledNiiData = nii.as_closest_canonical(scaledNiiData)
fsl_path = os.path.join(os.path.dirname(input_file),'fslScaleTemp.nii.gz')
nii.save(scaledNiiData, fsl_path)
# extract brain
output_file = os.path.join(output_path, os.path.basename(input_file).split('.')[0] + 'Bet.nii.gz')
myBet = fsl.BET(in_file=fsl_path, out_file=output_file,frac=frac,radius=radius,robust=True, mask = True)
myBet.run()
os.remove(fsl_path)
# unscale result data by factor 10ˆ(-1)
dataOut = nii.load(output_file)
imgOut = dataOut.get_fdata()
scale = np.eye(4)/ 10
scale[3][3] = 1
unscaledNiiData = nii.Nifti1Image(imgOut, dataOut.affine * scale)
hdrOut = unscaledNiiData.header
hdrOut.set_xyzt_units('mm')
nii.save(unscaledNiiData, output_file)
return output_file
def smoothIMG(input_file, output_path):
"""
Smoothes image via FSL. Only input and output has do be specified. Parameters are fixed to box shape and to the kernel size of 0.1 voxel.
"""
data = nii.load(input_file)
vol = data.get_fdata()
ImgSmooth = np.min(vol, 3)
unscaledNiiData = nii.Nifti1Image(ImgSmooth, data.affine)
hdrOut = unscaledNiiData.header
hdrOut.set_xyzt_units('mm')
output_file = os.path.join(os.path.dirname(input_file),
os.path.basename(input_file).split('.')[0] + 'DN.nii.gz')
nii.save(unscaledNiiData, output_file)
input_file = output_file
output_file = os.path.join(output_path, os.path.basename(input_file).split('.')[0] + 'Smooth.nii.gz')
myGauss = fsl.SpatialFilter(
in_file = input_file,
out_file = output_file,
operation = 'median',
kernel_shape = 'box',
kernel_size = 0.1
)
myGauss.run()
return output_file
def thresh(input_file, output_path):
#output_file = os.path.join(os.path.dirname(input_file),os.path.basename(input_file).split('.')[0]+ 'Thres.nii.gz')
output_file = os.path.join(output_path, os.path.basename(input_file).split('.')[0] + 'Thres.nii.gz')
myThres = fsl.Threshold(in_file=input_file,out_file=output_file,thresh=20)#,direction='above')
myThres.run()
return output_file
def cropToSmall(input_file,output_path):
#output_file = os.path.join(os.path.dirname(input_file),os.path.basename(input_file).split('.')[0] + 'Crop.nii.gz')
output_file = os.path.join(output_path, os.path.basename(input_file).split('.')[0] + 'Crop.nii.gz')
myCrop = fsl.ExtractROI(in_file=input_file,roi_file=output_file,x_min=40,x_size=130,y_min=50,y_size=110,z_min=0,z_size=12)
myCrop.run()
return output_file
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='Preprocessing of DTI Data')
requiredNamed = parser.add_argument_group('required named arguments')
requiredNamed.add_argument('-i', '--input', help='Path to the raw NIfTI DTI file', required=True)
parser.add_argument('-f', '--frac', help='Fractional intensity threshold - default=0.3, smaller values give larger brain outline estimates', nargs='?', type=float,default=0.3)
parser.add_argument('-r', '--radius', help='Head radius (mm not voxels) - default=45', nargs='?', type=int ,default=45)
parser.add_argument('-g', '--vertical_gradient', help='Vertical gradient in fractional intensity threshold - default=0.0, positive values give larger brain outlines at bottom and smaller brain outlines at top', nargs='?',
type=float,default=0.0)
args = parser.parse_args()
# set parameters
input_file = None
if args.input is not None and args.input is not None:
input_file = args.input
if not os.path.exists(input_file):
sys.exit("Error: '%s' is not an existing directory or file %s is not in directory." % (input_file, args.file,))
frac = args.frac
radius = args.radius
vertical_gradient = args.vertical_gradient
output_path = os.path.dirname(input_file)
print(f"Frac: {frac} Radius: {radius} Gradient {vertical_gradient}")
reset_orientation(input_file)
print("Orientation resetted to RAS")
try:
output_smooth = smoothIMG(input_file = input_file, output_path = output_path)
print("Smoothing completed")
except Exception as e:
print(f'Fehler in der Biasfieldcorrecttion\nFehlermeldung: {str(e)}')
raise
# intensity correction using non parametric bias field correction algorithm
try:
output_mico = applyMICO.run_MICO(output_smooth,output_path)
print("Biasfieldcorrecttion was successful")
except Exception as e:
print(f'Fehler in der Biasfieldcorrecttion\nFehlermeldung: {str(e)}')
raise
# get rid of your skull
outputBET = applyBET(input_file = output_mico, frac = frac, radius = radius, output_path = output_path)
print("Brainextraction was successful")
| Python |
3D | Aswendt-Lab/AIDAmri | bin/2.2_DTIPreProcessing/anisodiff.py | .py | 2,762 | 87 | """
Created on 10/08/2017
@author: Niklas Pallast
Neuroimaging & Neuroengineering
Department of Neurology
University Hospital Cologne
"""
import numpy as np
import scipy.ndimage
def applyFilter(im, num_iter, delta_t, kappa, option):
# Convert input image to float.
im.astype(float)
# PDE(partial differential equation) initial condition.
diff_im = im
# Center pixel distances.
dx = 1
dy = 1
dd = np.sqrt(2)
# 2D convolution masks - finite differences.
hN = np.array(([0, 1, 0],[0, -1, 0],[0, 0, 0]))
hS = np.array(([0, 0, 0],[0, -1, 0],[0, 1, 0]))
hE = np.array(([0, 0, 0],[0, -1, 1],[0, 0, 0]))
hW = np.array(([0, 0, 0],[1, -1, 0],[0, 0, 0]))
hNE = np.array(([0, 0, 1],[0, -1, 0],[0, 0, 0]))
hSE = np.array(([0, 0, 0],[0, -1, 0],[0, 0, 1]))
hSW = np.array(([0, 0, 0],[0, -1, 0],[1, 0, 0]))
hNW = np.array(([1, 0, 0],[0, -1, 0],[0, 0, 0]))
for t in range(num_iter):
nablaN = scipy.ndimage.convolve(diff_im, hN, mode='nearest')
nablaS = scipy.ndimage.convolve(diff_im, hS, mode='nearest')
nablaE = scipy.ndimage.convolve(diff_im, hE, mode='nearest')
nablaW = scipy.ndimage.convolve(diff_im, hW, mode='nearest')
nablaNE = scipy.ndimage.convolve(diff_im, hNE, mode='nearest')
nablaSE = scipy.ndimage.convolve(diff_im, hSE, mode='nearest')
nablaSW = scipy.ndimage.convolve(diff_im, hSW, mode='nearest')
nablaNW = scipy.ndimage.convolve(diff_im, hNW, mode='nearest')
# Diffusion function.
if option == 1:
cN = np.exp(-(nablaN / kappa)**2)
cS = np.exp(-(nablaS / kappa)**2)
cW = np.exp(-(nablaW / kappa)**2)
cE = np.exp(-(nablaE / kappa)**2)
cNE = np.exp(-(nablaNE / kappa)**2)
cSE = np.exp(-(nablaSE / kappa)**2)
cSW = np.exp(-(nablaSW / kappa)**2)
cNW = np.exp(-(nablaNW / kappa)**2)
elif option == 2:
cN = 1 / (1 + (nablaN / kappa)**2)
cS = 1 / (1 + (nablaS / kappa)**2)
cW = 1 / (1 + (nablaW / kappa)**2)
cE = 1 / (1 + (nablaE / kappa)**2)
cNE = 1 / (1 + (nablaNE / kappa)**2)
cSE = 1 / (1 + (nablaSE / kappa)**2)
cSW = 1 / (1 + (nablaSW / kappa)**2)
cNW = 1 / (1 + (nablaNW / kappa)**2)
#Discrete PDE solution
diff_im = diff_im + delta_t * (
(1 / (dy **2)) * cN * nablaN + (1 / (dy ** 2)) * cS * nablaS +
(1 / (dx ** 2)) * cW * nablaW + (1 / (dx ** 2)) * cE * nablaE +
(1 / (dd ** 2)) * cNE * nablaNE + (1 / (dd ** 2)) * cSE * nablaSE +
(1 / (dd ** 2)) * cSW * nablaSW + (1 / (dd ** 2)) * cNW * nablaNW)
return diff_im | Python |
3D | Aswendt-Lab/AIDAmri | bin/2.2_DTIPreProcessing/MICO.py | .py | 2,915 | 120 | """
@Article{li2014multiplicative,
author = {Li, Chunming and Gore, John C and Davatzikos, Christos},
title = {Multiplicative intrinsic component optimization (MICO) for MRI bias field estimation and tissue segmentation},
journal = {Magnetic resonance imaging},
year = {2014},
volume = {32},
number = {7},
pages = {913--923},
publisher = {Elsevier},
}
Created on 10/08/2017
@author: Niklas Pallast
Neuroimaging & Neuroengineering
Department of Neurology
University Hospital Cologne
"""
import numpy as np
import sys
def runMICO(Img,q,W,M,C,b,Bas,GGT,ImgG, Iter, iterCM):
D = np.zeros(M.shape)
for n in range(Iter):
C = updateC(Img, W, b, M)
for k in range (iterCM):
N_class = M.shape[2]
e = np.zeros(M.shape)
for kk in range(N_class):
D[:,:, kk] = (Img - C[kk] * b)** 2
M = updateM(D, q)
b_out = updateB(Img, q, C, M, Bas, GGT, ImgG)
M_out = M
C_out = C
return M_out, b_out, C_out
def updateB(Img, q, C, M, Bas,GGT,ImgG):
PC2 = np.zeros(Img.shape)
PC = PC2
N_class = M.shape[2]
for kk in range(N_class):
PC2 = PC2 + C[kk] ** 2 * M[:,:, kk]** q
PC = PC + C[kk] * M[:,:, kk]** q
N_bas = Bas.shape[2]
V = np.zeros(N_bas)
A = np.zeros([N_bas,N_bas])
for ii in range(N_bas):
ImgG_PC = ImgG[:,:,ii] * PC # Mask in ImgG
V[ii] = np.sum(ImgG_PC) # inner product
for jj in range(N_bas):
B = GGT[:,:,ii, jj] * PC2 # Mask in GGT
A[ii, jj] = np.sum(B) # inner product
A[jj, ii] = A[ii, jj]
try:
# numerical stable: solves Ax = V
w = np.linalg.solve(A, V)
except np.linalg.LinAlgError:
# Fallback if A is rank deficient / singular:
print("Warning: A is singular, uses pseudoinverse in updateB")
w = np.dot(np.linalg.pinv(A), V)
b = np.zeros(Img.shape)
for kk in range (N_bas):
b = b + np.dot(w[kk] , Bas[:,:, kk])
return b
def updateC(Img, W,b, M):
N_class=M.shape[2]
C_new = np.zeros(N_class)
for nn in range (N_class):
N=b*Img*M[:,:,nn]
D=(b**2) *M[:,:,nn]
sN = np.sum(N*W) # inner product
sD = np.sum(D*W) # inner product
C_new[nn]=sN/(sD+(sD==0))
return C_new
def updateM(e, q):
M = np.zeros(e.shape)
N_class= e.shape[2]
if q >1:
epsilon=0.000000000001
e=e+epsilon # avoid division by zero
p = 1/(q-1)
f = 1/(e**p)
f_sum = np.sum(f,2)
for kk in range(N_class):
M[:,:,kk] = f[:,:,kk]/f_sum
elif q==1:
e_min = np.amin(e,2)
N_min = np.argmin(e,2)
for kk in range (N_class):
tempComp = (N_min == kk)
M[:,:,kk] = tempComp
else:
sys.exit('Error: MICO: wrong fuzzifizer')
return M | Python |
3D | Aswendt-Lab/AIDAmri | bin/2.2_DTIPreProcessing/applyMICO.py | .py | 7,221 | 256 | """
@Article{li2014multiplicative,
author = {Li, Chunming and Gore, John C and Davatzikos, Christos},
title = {Multiplicative intrinsic component optimization (MICO) for MRI bias field estimation and tissue segmentation},
journal = {Magnetic resonance imaging},
year = {2014},
volume = {32},
number = {7},
pages = {913--923},
publisher = {Elsevier},
}
Created on 10/08/2017
@author: Niklas Pallast
Neuroimaging & Neuroengineering
Department of Neurology
University Hospital Cologne
"""
import numpy as np
import nibabel as nii
import sys,os
import MICO
import progressbar
import cv2
from tqdm import tqdm
def run_MICO(IMGdata,outputPath):
data = nii.load(IMGdata)
# get UNSCALED img data
vol = data.get_fdata()
biasCorrectedVol = np.zeros(vol.shape[0:3])
#1) Scaling factor depending on image intensity
ImgMe = np.mean(vol)
if ImgMe > 10000:
nCvalue = 1000
elif ImgMe > 1000:
nCvalue = 10
else:
nCvalue = 1
#2) Global threshold over whole volume
#Scale the volume as it will later be used for the slices.
vol_norm = vol / nCvalue
nz_all = vol_norm[vol_norm > 0] # all voxels above 0 in volume
if nz_all.size > 0:
#e.g. global median as threshold (50. Perzentil)
global_thr = np.percentile(nz_all, 50)
print(f"Global ROI-threshold of volumen: {global_thr:.3f}")
else:
global_thr = 0.0
print("Warning: No voxels above zero in volume, global_thr = 0")
#Debug
# --- Debug: Test how large the ROI would be for some slices ---
print("\nROI-Check for example slices:")
for idx in [0, vol.shape[2] // 2, vol.shape[2] - 1]: # first, middle, last slice
Img_test = vol_norm[:, :, idx]
ROIt_test = Img_test > global_thr
print(f"Slice {idx}: ROI voxels = {ROIt_test.sum()} of {Img_test.size}")
print("------------------------------------------------------------\n")
#--- Ende Debug ---
progressbar = tqdm(total=vol.shape[2], desc='Biasfieldcorrection')
#Debug output
print(f"Amount of non-zero voxels in total volumen: {nz_all.size}")
print(
f"Min/Median/Max of nz_all Voxels: {nz_all.min():.3f} / {np.percentile(nz_all, 50):.3f} / {nz_all.max():.3f}")
print(f"Global ROI-threshold of volume: {global_thr:.3f}")
#--- Ende Debug output --
# 3) loop over slices, ROI with global threshold
for idx in range(vol.shape[2]):
Img = vol[:,:,idx] / nCvalue
kernel =np.ones((5,5),np.uint8)
erosion = cv2.erode(Img,kernel,iterations = 1)
iterNum = 100
N_region = 1
q = 1
A = 1
Img_original = Img
nrow = Img.shape[0]
ncol = Img.shape[1]
n = nrow * ncol
#Global ROI thresholding
if global_thr > 0:
ROIt = Img > global_thr
else:
# Fallback: simple non-zero thresholding
ROIt = Img > 0
ROI = np.zeros((nrow, ncol))
ROI[ROIt] = 1
Bas = getBasisOrder3(nrow, ncol)
N_bas = Bas.shape[2]
ImgG = np.zeros([nrow,ncol,10])
GGT = np.zeros([nrow, ncol, 10,10])
for ii in range(N_bas):
ImgG[:,:,ii] = Img * Bas[:,:, ii]*ROI
for jj in range(N_bas):
GGT[:,:,ii, jj] = Bas[:,:, ii]*Bas[:,:, jj]*ROI
GGT[:,:,jj, ii] = GGT[:,:,ii, jj]
energy_MICO = np.zeros([3, iterNum])
b = np.ones([nrow,ncol])
for ini_num in range(1):
C = np.random.rand(3, 1)
C = C * A
M = np.random.rand(nrow, ncol, 3)
a = np.sum(M, 2)
for k in range(N_region):
M[:,:, k]=M[:,:, k]/ a
e_max = np.amax(M,2)
N_max = np.argmax(M,2)
M_old = M
chg = 10000
energy_MICO[ini_num, 0] = get_energy(Img, b, C, M, ROI, q)
for n in range(1,iterNum):
M, b, C = MICO.runMICO(Img, q, ROI, M, C, b, Bas, GGT, ImgG, 1, 1)
energy_MICO[ini_num, n] = get_energy(Img, b, C, M, ROI, q)
if np.mod(n, 1) == 0:
PC = np.zeros([nrow,ncol])
for k in range(N_region):
PC = PC + C[k] * M[:,:, k]
img_bc = Img /b # bias field corrected image
smV = img_bc < 0
img_bc[smV] = 0
smV = img_bc > 5000
img_bc[smV] = 0
M, C = sortMemC(M, C)
seg = np.zeros([nrow,ncol])
for k in range(N_region):
seg = seg + k * M[:,:, k] # label the k-th region
biasCorrectedVol[:, :, idx] = img_bc
progressbar.update(1)
progressbar.close()
unscaledNiiData = nii.Nifti1Image(biasCorrectedVol, data.affine)
hdrOut = unscaledNiiData.header
hdrOut.set_xyzt_units('mm')
outputData = os.path.join(outputPath,os.path.basename(IMGdata).split('.')[0]+'Mico.nii.gz')
nii.save(unscaledNiiData,outputData)
return outputData
def sortMemC(M, C):
C_out =np.sort(C)
IDX= np.argsort(C)
if len(M.shape) == 4:
M_out = np.zeros([M.shape[0], M.shape[1], M.shape[2], len(IDX)])
for k in range(np.size(C)):
M_out[:,:,:,k] = M[:,:,:,IDX[k]]
elif len(M.shape) == 3:
M_out = np.zeros([M.shape[0], M.shape[1], len(IDX)])
for k in range(np.size(C)):
M_out[:,:,k] = M[:,:,IDX[k]]
else:
sys.exit('Error: sortMemC: wrong dimension of the membership function')
return M_out, C_out
def get_energy(Img,b,C,M,ROI,q):
N = M.shape[2]
energy = 0
for k in range(N):
C_k = C[k] * np.ones([Img.shape[0],Img.shape[1]])
energy = energy + np.sum(np.sum((Img * ROI - b * C_k * ROI) ** 2 * M[:, :, k] ** q))
return energy
def getBasisOrder3(Height,Wide):
x = np.zeros([Height,Wide])
y = np.zeros([Height,Wide])
for i in range(Height):
x[i,:] = np.linspace(-1,1,Wide)
for i in range(Wide):
y[:,i] = np.linspace(-1,1,Height)
bais = np.zeros([Height,Wide,10])
bais[:,:,0] = 1
bais[:,:,1] = x
bais[:,:,2] = (3*x*x - 1)/2
bais[:,:,3] = (5*x*x*x - 3*x)/2
bais[:,:,4] = y
bais[:,:,5] = x*y
bais[:,:,6] = y*(3*x*x -1)/2
bais[:,:,7] = (3*y*y -1)/2
bais[:,:,8] = (3*y*y -1)*x/2
bais[:,:,9] = (5*y*y*y -3*y)/2
B = bais
for kk in range(10):
A=bais[:,:,kk]**2
r = np.sqrt(sum(sum(A)))
B[:,:,kk]=bais[:,:,kk]/r
return B
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='Bias Correction')
requiredNamed = parser.add_argument_group('required named arguments')
requiredNamed.add_argument('-i','--input', help='Path to input file',required=True)
args = parser.parse_args()
if args.input is not None and args.input is not None:
input = args.input
if not os.path.exists(input):
sys.exit("Error: '%s' is not an existing directory of file %s is not in directory." % (input, args.file,))
result = run_MICO(input)
| Python |
3D | Aswendt-Lab/AIDAmri | bin/2.3_fMRIPreProcessing/preProcessing_fMRI.py | .py | 6,607 | 185 | """
Created on 10/08/2017
@author: Niklas Pallast
Neuroimaging & Neuroengineering
Department of Neurology
University Hospital Cologne
"""
import nipype.interfaces.fsl as fsl
import os,sys
import nibabel as nii
import numpy as np
import nipype.interfaces.ants as ants
from pathlib import Path
import subprocess
import shutil
def reset_orientation(input_file):
brkraw_dir = os.path.join(os.path.dirname(input_file), "brkraw")
if os.path.exists(brkraw_dir):
return
os.mkdir(brkraw_dir)
dst_path = os.path.join(brkraw_dir, os.path.basename(input_file))
shutil.copyfile(input_file, dst_path)
data = nii.load(input_file)
raw_img = data.dataobj.get_unscaled()
raw_nii = nii.Nifti1Image(raw_img, data.affine)
nii.save(raw_nii, input_file)
delete_orient_command = f"fslorient -deleteorient {input_file}"
subprocess.run(delete_orient_command, shell=True)
# Befehl zum Festlegen der radiologischen Orientierung
forceradiological_command = f"fslorient -forceradiological {input_file}"
subprocess.run(forceradiological_command, shell=True)
def applyBET(input_file,frac,radius,outputPath):
# scale Nifti data by factor 10
data = nii.load(input_file)
imgTemp = data.get_fdata()
scale = np.eye(4)* 10
scale[3][3] = 1
#imgTemp = np.rot90(imgTemp,2)
imgTemp = np.flip(imgTemp, 2)
#imgTemp = np.flip(imgTemp, 0)
scaledNiiData = nii.Nifti1Image(imgTemp, data.affine * scale)
hdrIn = scaledNiiData.header
hdrIn.set_xyzt_units('mm')
scaledNiiData = nii.as_closest_canonical(scaledNiiData)
print('Orientation:' + str(nii.aff2axcodes(scaledNiiData.affine)))
fslPath = os.path.join(os.path.dirname(input_file),'fslScaleTemp.nii.gz')
nii.save(scaledNiiData, fslPath)
# extract brain
output_file = os.path.join(outputPath, os.path.basename(input_file).split('.')[0] + 'Bet.nii.gz')
myBet = fsl.BET(in_file=fslPath, out_file=output_file,frac=frac,radius=radius,robust=True, mask = True)
myBet.run()
os.remove(fslPath)
# unscale result data by factor 10ˆ(-1)
dataOut = nii.load(output_file)
imgOut = dataOut.get_fdata()
scale = np.eye(4)/ 10
scale[3][3] = 1
unscaledNiiData = nii.Nifti1Image(imgOut, dataOut.affine * scale)
hdrOut = unscaledNiiData.header
hdrOut.set_xyzt_units('mm')
nii.save(unscaledNiiData, output_file)
print("Brain extraction completed")
return output_file
def biasfieldcorr(input_file,outputPath):
output_file = os.path.join(outputPath, os.path.basename(input_file).split('.')[0] + 'Bias.nii.gz')
myAnts = ants.N4BiasFieldCorrection(input_image=input_file,output_image=output_file,shrink_factor=4,dimension=3)
myAnts.run()
print("Biasfield correction completed")
return output_file
def smoothIMG(input_file,outputPath):
"""
Smoothes image via FSL. Only input and output has do be specified. Parameters are fixed to box shape and to the kernel size of 0.1 voxel.
"""
data = nii.load(input_file)
vol = data.get_fdata()
ImgSmooth = np.min(vol, 3)
unscaledNiiData = nii.Nifti1Image(ImgSmooth, data.affine)
hdrOut = unscaledNiiData.header
hdrOut.set_xyzt_units('mm')
output_file = os.path.join(os.path.dirname(input_file),
os.path.basename(input_file).split('.')[0] + 'DN.nii.gz')
# hdrOut['sform_code'] = 1
nii.save(unscaledNiiData, output_file)
input_file = output_file
#output_file = os.path.join(os.path.dirname(input_file),os.path.basename(input_file).split('.')[0] + 'Smooth.nii.gz')
output_file = os.path.join(outputPath, os.path.basename(inputFile).split('.')[0] + 'Smooth.nii.gz')
myGauss = fsl.SpatialFilter(in_file=input_file,out_file=output_file,operation='median',kernel_shape='box',kernel_size=0.1)
myGauss.run()
print("Smoothing completed")
return output_file
def thresh(input_file,outputPath):
#output_file = os.path.join(os.path.dirname(input_file),os.path.basename(input_file).split('.')[0]+ 'Thres.nii.gz')
output_file = os.path.join(outputPath, os.path.basename(input_file).split('.')[0] + 'Thres.nii.gz')
myThres = fsl.Threshold(in_file=input_file,out_file=output_file,thresh=20)#,direction='above')
myThres.run()
print("Thresholding completed")
return output_file
def cropToSmall(input_file,outputPath):
#output_file = os.path.join(os.path.dirname(input_file),os.path.basename(input_file).split('.')[0] + 'Crop.nii.gz')
output_file = os.path.join(outputPath, os.path.basename(input_file).split('.')[0] + 'Crop.nii.gz')
myCrop = fsl.ExtractROI(in_file=input_file,roi_file=output_file,x_min=40,x_size=130,y_min=50,y_size=110,z_min=0,z_size=12)
myCrop.run()
print("Cropping done")
return output_file
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='Preprocessing of rsfMRI Data')
requiredNamed = parser.add_argument_group('required named arguments')
requiredNamed.add_argument('-i', '--input', help='Path to the RAW data of rsfMRI NIfTI file', required=True)
parser.add_argument('-f', '--frac',
help='Fractional intensity threshold - default=0.3, smaller values give larger brain outline estimates',
nargs='?', type=float, default=0.15)
parser.add_argument('-r', '--radius', help='Head radius (mm not voxels) - default=45', nargs='?', type=int ,default=45)
parser.add_argument('-g', '--vertical_gradient', help='Vertical gradient in fractional intensity threshold - default=0.0, positive values give larger brain outlines at bottom and smaller brain outlines at top', nargs='?',
type=float,default=0.0)
args = parser.parse_args()
# set parameters
inputFile = None
if args.input is not None and args.input is not None:
inputFile = args.input
if not os.path.exists(inputFile):
sys.exit("Error: '%s' is not an existing directory or file %s is not in directory." % (inputFile, args.file,))
frac = args.frac
radius = args.radius
vertical_gradient = args.vertical_gradient
outputPath = os.path.dirname(inputFile)
print(f"Frac: {frac} Radius: {radius} Gradient {vertical_gradient}")
reset_orientation(inputFile)
print("Orientation resetted to RAS")
outputSmooth = smoothIMG(input_file=inputFile,outputPath=outputPath)
# get rid of your skull
outputBET = applyBET(input_file=outputSmooth,frac=frac,radius=radius,outputPath=outputPath)
print("Preprocessing completed")
| Python |
3D | Aswendt-Lab/AIDAmri | bin/2.3_fMRIPreProcessing/registration_rsfMRI.py | .py | 12,590 | 279 | """
Created on 10/08/2017
@author: Niklas Pallast
Neuroimaging & Neuroengineering
Department of Neurology
University Hospital Cologne
"""
import sys,os
import glob
import shutil as sh
import subprocess
import shlex
def regABA2rsfMRI(inputVolume, T2data, brain_template, brain_anno, splitAnno, splitAnno_rsfMRI, anno_rsfMRI,
bsplineMatrix, dref, outfile):
outputT2w = os.path.join(outfile, os.path.basename(inputVolume).split('.')[0] + '_T2w.nii.gz')
outputAff = os.path.join(outfile, os.path.basename(inputVolume).split('.')[0] + 'transMatrixAff.txt')
if dref:
pathT2 = glob.glob(os.path.dirname(outfile) + '*/dwi/*T2w.nii.gz', recursive=False)
sh.copy(pathT2[0], outputT2w)
else:
command = f"reg_aladin -ref {inputVolume} -flo {T2data} -res {outputT2w} -rigOnly -aff {outputAff}"
command_args = shlex.split(command)
try:
result = subprocess.run(command_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,text=True)
print(f"Output of {command}:\n{result.stdout}")
except Exception as e:
print(f'Error while executing the command: {command_args}\Errorcode: {str(e)}')
raise
# resample Annotation
outputAnno = os.path.join(outfile, os.path.basename(inputVolume).split('.')[0] + '_Anno.nii.gz')
command = f"reg_resample -ref {inputVolume} -flo {brain_anno} -cpp {outputAff} -inter 0 -res {outputAnno}"
command_args = shlex.split(command)
try:
result = subprocess.run(command_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,text=True)
print(f"Output of {command}:\n{result.stdout}")
except Exception as e:
print(f'Error while executing the command: {command_args}\Errorcode: {str(e)}')
raise
# resample split annotation
outputAnnoSplit = os.path.join(outfile, os.path.basename(inputVolume).split('.')[0] + '_AnnoSplit.nii.gz')
if dref:
pathT2 = glob.glob(os.path.dirname(outfile) + '*/dwi/*AnnoSplit.nii.gz', recursive=False)
sh.copy(pathT2[0], outputAnnoSplit)
else:
command = f"reg_resample -ref {brain_anno} -flo {splitAnno} -trans {bsplineMatrix} -inter 0 -res {outputAnnoSplit}"
command_args = shlex.split(command)
try:
result = subprocess.run(command_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,text=True)
print(f"Output of {command}:\n{result.stdout}")
except Exception as e:
print(f'Error while executing the command: {command_args}\Errorcode: {str(e)}')
raise
command = f"reg_resample -ref {inputVolume} -flo {outputAnnoSplit} -trans {outputAff} -inter 0 -res {outputAnnoSplit}"
command_args = shlex.split(command)
try:
result = subprocess.run(command_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,text=True)
print(f"Output of {command}:\n{result.stdout}")
except Exception as e:
print(f'Error while executing the command: {command_args}\Errorcode: {str(e)}')
raise
# resample split parental annotation
outputAnnoSplit_rsfMRI = os.path.join(outfile, os.path.basename(inputVolume).split('.')[0] + '_AnnoSplit_parental.nii.gz')
if dref:
pathT2 = glob.glob(os.path.dirname(outfile) + '*/dwi/*AnnoSplit_parental.nii.gz', recursive=False)
sh.copy(pathT2[0], outputAnnoSplit_rsfMRI)
else:
command = f"reg_resample -ref {brain_anno} -flo {splitAnno_rsfMRI} -trans {bsplineMatrix} -inter 0 -res {outputAnnoSplit_rsfMRI}"
command_args = shlex.split(command)
try:
result = subprocess.run(command_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,text=True)
print(f"Output of {command}:\n{result.stdout}")
except Exception as e:
print(f'Error while executing the command: {command_args}\Errorcode: {str(e)}')
raise
command = f"reg_resample -ref {inputVolume} -flo {outputAnnoSplit_rsfMRI} -trans {outputAff} -inter 0 -res {outputAnnoSplit_rsfMRI}"
command_args = shlex.split(command)
try:
result = subprocess.run(command_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,text=True)
print(f"Output of {command}:\n{result.stdout}")
except Exception as e:
print(f'Error while executing the command: {command_args}\Errorcode: {str(e)}')
raise
# resample parental annotation
outputAnno_rsfMRI = os.path.join(outfile,
os.path.basename(inputVolume).split('.')[0] + '_Anno_parental.nii.gz')
if dref:
pathT2 = glob.glob(os.path.dirname(outfile) + '*/dwi/*Anno_parental.nii.gz', recursive=False)
sh.copy(pathT2[0], outputAnno_rsfMRI)
else:
command = f"reg_resample -ref {brain_anno} -flo {anno_rsfMRI} -trans {bsplineMatrix} -inter 0 -res {outputAnno_rsfMRI}"
command_args = shlex.split(command)
try:
result = subprocess.run(command_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,text=True)
print(f"Output of {command}:\n{result.stdout}")
except Exception as e:
print(f'Error while executing the command: {command_args}\Errorcode: {str(e)}')
raise
command = f"reg_resample -ref {inputVolume} -flo {outputAnno_rsfMRI} -trans {outputAff} -inter 0 -res {outputAnno_rsfMRI}"
command_args = shlex.split(command)
try:
result = subprocess.run(command_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,text=True)
print(f"Output of {command}:\n{result.stdout}")
except Exception as e:
print(f'Error while executing the command: {command_args}\Errorcode: {str(e)}')
raise
# resample in-house developed tempalate
outputTemplate = os.path.join(outfile, os.path.basename(inputVolume).split('.')[0] + '_Template.nii.gz')
command = f"reg_resample -ref {inputVolume} -flo {brain_template} -trans {outputAff} -res {outputTemplate}"
command_args = shlex.split(command)
try:
result = subprocess.run(command_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,text=True)
print(f"Output of {command}:\n{result.stdout}")
except Exception as e:
print(f'Error while executing the command: {command_args}\Errorcode: {str(e)}')
raise
return outputAnnoSplit
def find_RefStroke(refStrokePath,inputVolume):
path = glob.glob(refStrokePath+'/' + os.path.basename(inputVolume)[0:9]+'*/anat/*IncidenceData_mask.nii.gz', recursive=False)
return path
def find_RefAff(inputVolume):
path = glob.glob(os.path.dirname(os.path.dirname(inputVolume))+'/anat/*MatrixAff.txt', recursive=False)
return path
def find_RefTemplate(inputVolume):
path = glob.glob(os.path.dirname(os.path.dirname(inputVolume))+'/anat/*TemplateAff.nii.gz', recursive=False)
return path
def find_relatedData(pathBase):
pathT2 = glob.glob(pathBase+'*/anat/*Bet.nii.gz', recursive=False)
pathStroke_mask = glob.glob(pathBase + '*/anat/*Stroke_mask.nii.gz', recursive=False)
pathAnno = glob.glob(pathBase + '*/anat/*Anno.nii.gz', recursive=False)
pathAllen = glob.glob(pathBase + '*/anat/*Allen.nii.gz', recursive=False)
bsplineMatrix = glob.glob(pathBase + '*/anat/*MatrixBspline.nii', recursive=False)
return pathT2,pathStroke_mask,pathAnno,pathAllen,bsplineMatrix
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='Registration of Allen Brain Atlas to rsfMRI')
requiredNamed = parser.add_argument_group('required named arguments')
requiredNamed.add_argument('-i', '--inputVolume', help='Path to rsfMRI data after preprocessing', required=True)
parser.add_argument('-d', '--dtiasRef', action='store_true', help='use DTI as reference if data quality is low')
parser.add_argument('-r', '--referenceDay', help='Reference Stroke mask', nargs='?', type=str,
default=None)
parser.add_argument('-s', '--splitAnno', help='Split annotations atlas', nargs='?', type=str,
default=os.path.abspath(os.path.join(os.getcwd(), os.pardir,os.pardir))+'/lib/ARA_annotationR+2000.nii.gz')
parser.add_argument('-f', '--splitAnno_rsfMRI', help='Split annotations atlas for rsfMRI', nargs='?', type=str,
default=os.path.abspath(os.path.join(os.getcwd(), os.pardir,os.pardir))+'/lib/annoVolume+2000_rsfMRI.nii.gz')
parser.add_argument('-a', '--anno_rsfMRI', help='Annotations atlas for rsfMRI', nargs='?', type=str,
default=os.path.abspath(os.path.join(os.getcwd(), os.pardir,os.pardir))+'/lib/annoVolume.nii.gz')
args = parser.parse_args()
stroke_mask = None
inputVolume = None
refStrokePath = None
splitAnno = None
splitAnno_rsfMRI = None
anno_rsfMRI = None
if args.inputVolume is not None:
inputVolume = args.inputVolume
if not os.path.exists(inputVolume):
sys.exit("Error: '%s' is not an existing directory." % (inputVolume,))
outfile = os.path.join(os.path.dirname(inputVolume))
if not os.path.exists(outfile):
os.makedirs(outfile)
# find related data
pathT2, pathStroke_mask, pathAnno, pathTemplate, bsplineMatrix = find_relatedData(os.path.dirname(outfile))
if len(pathT2) is 0:
T2data = []
sys.exit("Error: %s' has no reference T2 template." % (os.path.basename(inputVolume),))
else:
T2data = pathT2[0]
if len(pathStroke_mask) is 0:
pathStroke_mask = []
print("Notice: '%s' has no defined reference (stroke) mask - will proceed without." % (os.path.basename(inputVolume),))
else:
stroke_mask = pathStroke_mask[0]
if len(pathAnno) is 0:
pathAnno = []
sys.exit("Error: %s' has no reference annotations." % (os.path.basename(inputVolume),))
else:
brain_anno = pathAnno[0]
if len(pathTemplate) is 0:
pathTemplate = []
sys.exit("Error: %s' has no reference template." % (os.path.basename(inputVolume),))
else:
brain_template = pathTemplate[0]
if len(bsplineMatrix) is 0:
bsplineMatrix = []
sys.exit("Error: %s' has no bspline Matrix." % (os.path.basename(inputVolume),))
else:
bsplineMatrix = bsplineMatrix[0]
# find reference stroke mask
refStroke_mask = None
if args.referenceDay is not None:
referenceDay = args.referenceDay
refStrokePath = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(outfile))), referenceDay)
if not os.path.exists(refStrokePath):
sys.exit("Error: '%s' is not an existing directory." % (refStrokePath,))
refStroke_mask = find_RefStroke(refStrokePath, inputVolume)
if len(refStroke_mask) is 0:
refStroke_mask = []
print("Notice: '%s' has no defined reference (stroke) mask - will proceed without." % (os.path.basename(inputVolume),))
else:
refStroke_mask = refStroke_mask[0]
if args.splitAnno is not None:
splitAnno = args.splitAnno
if not os.path.exists(splitAnno):
sys.exit("Error: '%s' is not an existing directory." % (splitAnno,))
if args.splitAnno_rsfMRI is not None:
splitAnno_rsfMRI = args.splitAnno_rsfMRI
if not os.path.exists(splitAnno_rsfMRI):
sys.exit("Error: '%s' is not an existing directory." % (splitAnno_rsfMRI,))
if args.anno_rsfMRI is not None:
anno_rsfMRI = args.anno_rsfMRI
if not os.path.exists(anno_rsfMRI):
sys.exit("Error: '%s' is not an existing directory." % (anno_rsfMRI,))
output = regABA2rsfMRI(inputVolume, T2data, brain_template, brain_anno, splitAnno, splitAnno_rsfMRI,
anno_rsfMRI, bsplineMatrix, args.dtiasRef, outfile)
sys.stdout = sys.__stdout__
current_dir = os.path.dirname(inputVolume)
search_string = os.path.join(current_dir, "*EPI.nii.gz")
currentFile = glob.glob(search_string)
search_string = os.path.join(current_dir, "*.nii*")
created_imgs = glob.glob(search_string, recursive=True)
os.chdir(os.path.dirname(os.getcwd()))
for idx, img in enumerate(created_imgs):
if img == None:
continue
#os.system('python adjust_orientation.py -i '+ str(img) + ' -t ' + currentFile[0])
print("Registration done")
| Python |
3D | Aswendt-Lab/AIDAmri | ARA/readXML_Lables.m | .m | 392 | 8 | function annotation50CHANGEDannolabelIDs = readXML_Lables(xml_file)
%% Import the data
[~, ~, annotation50CHANGEDannolabelIDs] = xlsread(xml_file);
annotation50CHANGEDannolabelIDs = annotation50CHANGEDannolabelIDs(2:end,5);
annotation50CHANGEDannolabelIDs = string(annotation50CHANGEDannolabelIDs);
annotation50CHANGEDannolabelIDs(ismissing(annotation50CHANGEDannolabelIDs)) = '';
end | MATLAB |
3D | Aswendt-Lab/AIDAmri | ARA/getParentalARA.m | .m | 1,206 | 32 | %% Method generate a atlas with all parental regions of the given xlsx file
% getParentalARA('./annotation_label_IDs_valid.xlsx','./annotation/annotation.nii.gz')
function getParentalARA(xml_file,atlasNii_file)
addpath('./AllenBrainAPI-master/');
labelsStrArray = char(readXML_Lables(xml_file));
atlasData = load_nii(atlasNii_file);
parentalAtlasVolume = zeros(size(atlasData.img));
for label_idx = 1:length(labelsStrArray)
disp(labelsStrArray(label_idx,:));
childTable = getAllenStructureList('childrenOf',labelsStrArray(label_idx,:));
if isempty(childTable)
continue
end
childIDs = childTable.id;
parentalID = name2structureID(labelsStrArray(label_idx,:));
for child_idx = 1:length(childIDs)
parentalAtlasVolume(atlasData.img==childIDs(child_idx)) = parentalID;
end
end
% change large annotation value of Primary somatosensory area, unassigned
parentalAtlasVolume(parentalAtlasVolume==182305689)= 1098;
file=dir(atlasNii_file);
fileName = strsplit(string(file.name),'.');
output_file = [fileName{1} '_parent.' fileName{2} '.' fileName{3}];
atlasData.img = parentalAtlasVolume;
save_nii(atlasData,output_file);
end
| MATLAB |
3D | Aswendt-Lab/AIDAmri | ARA/download_ARA.py | .py | 2,738 | 85 | """
Created on 17/03/2020
@author: Niklas Pallast
Neuroimaging & Neuroengineering
Department of Neurology
University Hospital Cologne
More information can be found here:
http://alleninstitute.github.io/AllenSDK/_modules/allensdk/api/queries/reference_space_api.html
"""
import os
import nrrd # pip install pynrrd, if pynrrd is not already installed
import nibabel as nib # pip install nibabel, if nibabel is not already installed
import numpy as np
from allensdk.api.queries.reference_space_api import ReferenceSpaceApi
from allensdk.config.manifest import Manifest
# the annotation download writes a file, so we will need somwhere to put it
annotation_dir = 'annotation'
Manifest.safe_mkdir(annotation_dir)
annotation_path = os.path.join(annotation_dir, 'annotation.nrrd')
# this is a string which contains the name of the latest ccf version
annotation_version = ReferenceSpaceApi.CCF_VERSION_DEFAULT
# download annotations
mcapi = ReferenceSpaceApi()
mcapi.download_annotation_volume(annotation_version, 50, annotation_path)
annotation = nrrd.read(annotation_path)
# read nrrd data and header
_nrrd = nrrd.read(annotation_path)
data = _nrrd[0]
header = _nrrd[1]
# create header and for RAS orientation
space_value = header['space directions']
affine = np.eye(4) * 0.001 * space_value[0, 0]
affine[3][3] = 1
# ensure RAS orientation
data = np.swapaxes(data, 2, 0)
data = np.flip(data, 2)
img = nib.Nifti1Image(data, affine) #
img = nib.as_closest_canonical(img)
hdrIn = img.header
hdrIn.set_xyzt_units('mm')
# img = nib.Nifti1Image(data, img.affine,hdrIn)
scaledNiiData = nib.as_closest_canonical(img)
nib.save(scaledNiiData, os.path.join(annotation_dir, os.path.dirname(annotation_path) + '.nii.gz'))
# the template download writes a file, so we will need somwhere to put it
template_dir = 'template'
Manifest.safe_mkdir(template_dir)
template_path = os.path.join(template_dir, 'template.nrrd')
# download templates
mcapi.download_template_volume(50, template_path)
template = nrrd.read(template_path)
# read nrrd data and header
_nrrd = nrrd.read(template_path)
data = _nrrd[0]
header = _nrrd[1]
# create header and for RAS orientation
space_value = header['space directions']
affine = np.eye(4) * 0.001 * space_value[0, 0]
affine[3][3] = 1
# ensure RAS orientation
data = np.swapaxes(data, 2, 0)
data = np.flip(data, 2)
img = nib.Nifti1Image(data, affine) #
img = nib.as_closest_canonical(img)
hdrIn = img.header
hdrIn.set_xyzt_units('mm')
# img = nib.Nifti1Image(data, img.affine,hdrIn)
scaledNiiData = nib.as_closest_canonical(img)
nib.save(scaledNiiData, os.path.join(template_dir, os.path.dirname(template_path) + '.nii.gz'))
| Python |
3D | Aswendt-Lab/AIDAmri | ARA/AllenBrainAPI-master/findAllenExperiments.m | .m | 2,762 | 103 | function varargout = findAllenExperiments(varargin)
% find all Allen experiments defined parameters
%
% function [IDs,json]=findAllenExperiments('param1','val1','param2','val2',...)
%
%
% Inputs
% 'injection' - search for experiments with injections in this location. not searched for by default.
% this is case senstive. So 'VISp' searches for V1 but 'visp' produces an error.
% 'line' - search for experiments on this transgenic line. not searched for by default. use '0' for wild-type,
% 'primary' - true/false. true by default, search for injections where 'injection' was the primary
% injection structure. if false it it will search for cases where 'injection' was not the primary
% structure.
%
%
% Outputs
% IDs - a vector of numbers corresponding to experiment IDs
% json - cell array of structures containing all data pulled out of the JSON returned by the Allen API.
%
%
% Examples
% a) Return all experiments on wild type animals:
% findAllenExperiments('line','0');
%
% b) Return all experiments with injections in V1
% findAllenExperiments('injection','VISp');
% or:
% findAllenExperiments('injection','385');
%
% c) Return all experiments with injections in cortex
% findAllenExperiments('injection','Isocortex');
%
% Rob Campbell - Basel 2015
%
% requires JSONlab from the FEX
%
% See Also:
% getInjectionIDfromExperiment
if ~exist('loadjson')
disp('Please install JSONlab from the FEX')
return
end
%Handle input arguments
params = inputParser;
params.CaseSensitive = false;
params.addParamValue('injection','',@ischar)
params.addParamValue('line','',@ischar)
params.addParamValue('primary',true,@islogical)
params.parse(varargin{:});
%Build the URL
%this is the base URL
url = 'http://api.brain-map.org/api/v2/data/query.json?criteria=service::mouse_connectivity_injection_structure';
%now we extend it according to what is being searched
if ~isempty(params.Results.injection)
url = [url,'[injection_structures$eq',params.Results.injection,']'];
end
if ~isempty(params.Results.line)
url = [url,'[transgenic_lines$eq',params.Results.line,']'];
end
if params.Results.primary
primary='true';
else
primary='false';
end
url = [url,'[primary_structure_only$eq',primary,']'];
%Get data from Allen
page=urlread(url);
result=loadjson(page);
if ~result.success
fprintf('Query failed!\n%s\nAt URL: %s\n\n',result.msg,url);
end
%Return data
IDs = ones(1,length(result.msg));
for ii=1:length(IDs)
IDs(ii) = result.msg{ii}.id;
end
fprintf('Found %d experiments\n',length(IDs))
if nargout>0
varargout{1}=IDs;
end
if nargout>1
varargout{2}=result.msg;
end
| MATLAB |
3D | Aswendt-Lab/AIDAmri | ARA/AllenBrainAPI-master/name2structureID.m | .m | 1,653 | 72 | function [IDs,ARA_LIST]=name2structureID(names,ARA_LIST,quiet)
% Convert a list of ARA (Allen Reference Atlas) area names to a vector of structure IDs
%
% function [IDs,ARA_LIST]=name2structureID(names,ARA_LIST,quiet)
%
% Purpose
% Each Allen Reference Atlas (ARA) brain area is associated with a unique
% number (structure ID), a long name and an acronym. This function converts
% the acronym to an area structure ID number.
%
%
% Inputs
% names - a cell array (if a list) of brain area names or a single string.
% ARA_LIST - [optional] the first output of getAllenStructureList
% quiet - [optional, false by default] if true, do not print any warning messages
%
%
% Outputs
% IDs - a vector of brain area structure IDs (if more than one acronym was provided)
% ARA_LIST - the CSV data from getAllenStructureList in the form of a cell array
%
%
% Examples
%
% >> name2structureID('Paraflocculus')
%
%ans =
%
% int32
%
% 1041
%
%
%
% Rob Campbell
%
% See also:
% getAllenStructureList, structureID2name
if isstr(names)
names={names};
end
if nargin<2 || isempty(ARA_LIST)
ARA_LIST = getAllenStructureList;
end
if nargin<3
quiet = false;
end
%loop through and find all the names
for ii=1:length(names)
f=strmatch(lower(names{ii}),lower(ARA_LIST.name),'exact');
if isempty(f)
if ~quiet
fprintf('%s finds no name %s in the atlas\n',mfilename, names{ii})
end
continue
end
if length(f)>1
if ~quiet
error('found more than one ID index')
end
end
IDs(ii) = ARA_LIST.id(f);
end
| MATLAB |
3D | Aswendt-Lab/AIDAmri | ARA/AllenBrainAPI-master/structureID2name.m | .m | 2,195 | 83 | function [names,acronyms,ARA_LIST]=structureID2name(structIDs,ARA_LIST,quiet)
% convert a list of ARA (Allen Reference Atlas) structure IDs to a cell array of names
%
% function [names,acronyms,ARA_LIST]=structureID2name(structIDs,ARA_LIST,quiet)
%
% Purpose
% Each Allen Reference Atlas (ARA) brain area is associated with a unique
% number (structure ID). This function converts the ID to an area name.
%
%
% Inputs
% structIDs - a vector (list) of integers corresponding to brain structure ids
% ARA_LIST - [optional] the first output of getAllenStructureList
% quiet - [optional, false by default] if true, do not print any warning messages
%
%
% Outputs
% names - a list of brain area names in a cell array (if there is more than one name)
% acronyms - a list of brain area abbreviations in a cell array (if there is more than one)
% ARA_LIST - the CSV data from getAllenStructureList in the form of a cell array
%
%
% Examples
%
% >> structureID2name(644)
% ans =
% 'Somatomotor areas, Layer 6a'
%
% >> structureID2name([60,33])
% ans =
% 'Entorhinal area, lateral part, layer 6b' 'Primary visual area, layer 6a'
%
% >> structureID2name([60,33],[],true) %for quiet operation
% ans =
% 'Entorhinal area, lateral part, layer 6b' 'Primary visual area, layer 6a'
%
%
% Rob Campbell
%
% See also:
% getAllenStructureList, acronym2structureID
if nargin<2 || isempty(ARA_LIST)
ARA_LIST = getAllenStructureList;
end
if nargin<3
quiet = false;
end
%loop through and find all the IDs
names={};
for ii=1:length(structIDs)
if structIDs(ii)==0
if ~quiet
names{ii}='Out of brain';
end
continue
end
f=find(ARA_LIST.id == structIDs(ii));
if isempty(f)
if ~quiet
fprintf('%s finds no name for ARA ID %d\n',mfilename,structIDs(ii))
end
continue
end
if length(f)>1
if ~quiet
error('found more than one ID index')
end
end
names{ii} = ARA_LIST.name{f};
acronyms{ii} = ARA_LIST.acronym{f};
end
if ~isempty(names) & length(names)==1
names=names{1};
end
| MATLAB |
3D | Aswendt-Lab/AIDAmri | ARA/AllenBrainAPI-master/DownloadImageSeries.m | .m | 2,076 | 62 | function DownloadImageSeries(outdir, expid, varargin)
% download Allen sample brain using the Allen API
%
% function DownloadImageSeries(outdir, expid, varargin)
%
%
% Inputs [required]
% outdir - where to put the JPEGs
% expid - [numerical scalar] experiment ID assigned by Allen
%
% Inputs [optional]
% 'downsample' sets how many times the image will be downsampled and scaled down,
% e.g. downsample=3 means the image will be 1/2^3 = 1/8 of original size.
% 'range' specifies the range of 16 bit RGB values that will be mapped onto 8 bit
%
%
% Example
% - Pull in a nicely downsampled version of experiment ID 479701339 into the current directory:
% >> DownloadImageSeries(pwd,479701339,'downsample',4)
%
%
% PZ
params = inputParser;
params.addParamValue('range', '0,2500,0,2500,0,4095', @ischar);
params.addParamValue('downsample', 2, @isnumeric);
params.parse(varargin{:});
% download XML data for experiment
exp_url = ['http://api.brain-map.org/api/v2/data/SectionDataSet/' ...
num2str(expid) '.xml?include=section_images'];
disp(['Accessing ',exp_url]);
doc = xmlread(exp_url);
% select the image IDs for all images
list = doc.getElementsByTagName('section-image');
nImages = list.getLength;
% cycle through images, zero-indexed
for ind = 0:nImages-1
thisImage = list.item(ind);
imageid = thisImage.getElementsByTagName('id');
% number of the brain section (they aren't in order for some reason)
sectionnum = thisImage.getElementsByTagName('section-number');
query = ['http://api.brain-map.org/api/v2/section_image_download/' ...
char(imageid.item(0).getFirstChild.getData) ...
'?range=' params.Results.range...
'&downsample=' num2str(params.Results.downsample)];
disp([ num2str(ind+1) ' of ' num2str(nImages) ': ' query]);
try
urlwrite(query,...
[ outdir filesep ...
char(sectionnum.item(0).getFirstChild.getData) '.jpg']);
catch
disp(['Error getting image from ',query]);
end
end
| MATLAB |
3D | Aswendt-Lab/AIDAmri | ARA/AllenBrainAPI-master/getProjectionDataFromExperiment.m | .m | 2,762 | 95 | function result = getProjectionDataFromExperiment(expID)
% get projection data from Allen experiment ID
%
% function data = getProjectionDataFromExperiment(expID)
%
% Purpose
% Get projection information from ARA sample brain(s) given one or more experiment IDs.
% These can be searched for using findAllenExperiments
%
%
% Inputs
% 'expID' - a scalar or vector defining one or more experiment IDs to extract using the ARA API.
%
%
% Outputs
% data - the connectivity data in a cell array [of length equal to length(expID)] of structures.
% The output structure as the following fields
% hemisphere_id
% id . . . . . . . . . experiment ID
% is_injection . . . . . . the labeling here part of the injection volume?
% max_voxel_density
% max_voxel_x
% max_voxel_y
% max_voxel_z
% normalized_projection_volume
% projection_density . . . . detected signal volume / structure volume sum detected pixels / sum all pixels in a grid
% projection_intensity. . . . sum detected pixel intensity / sum all detected pixels
% projection_energy. . . . . sum detected pixel intensity / structure volume
% projection_volume
% section_data_set_id
% structure_id . . . . . . the ID of the brain area with which these data are associated.
% sum_pixel_intensity
% sum_pixels
% sum_projection_pixel_intensity
% sum_projection_pixels
% volume . . . . . . . . sum of detected signal volume in mm^3 (I think)
%
%
% Rob Campbell - Basel 2015
%
%
% Also see:
% findAllenExperiments
%
% requires JSONlab from the
if ~exist('loadjson')
disp('Please install JSONlab from the FEX')
return
end
if ~isnumeric(expID)
error('expID should be numeric')
end
%Build the URL
url = 'http://connectivity.brain-map.org/api/v2/data/ProjectionStructureUnionize/query.json?criteria=[section_data_set_id$eq%d]&num_rows=all';
%Get projection data (this can be slow)
for ii=1:length(expID)
if length(expID)>1
fprintf('%d/%d. Getting data for experiment ID %d\n',ii,length(expID),expID(ii))
end
try
page{ii} = urlread(sprintf(url,expID(ii)));
catch
fprintf('Failed to get data for ID %d\n', expID(ii))
page{ii} = [];
end
end
%parse the JSON data into a cell array of structures that the function returns
n=1;
for ii=1:length(page)
if isempty(page{ii}), continue, end
tmp = loadjson(page{ii});
if tmp.success %skip any failures
%Convert to an array of structures because cell arrays are annoying
for jj=1:length(tmp.msg)
result{n}(jj)=tmp.msg{jj};
end
end
n=n+1;
end
| MATLAB |
3D | Aswendt-Lab/AIDAmri | ARA/AllenBrainAPI-master/acronym2structureID.m | .m | 1,722 | 72 | function [IDs,ARA_LIST]=acronym2structureID(acronyms,ARA_LIST,quiet)
% Convert a list of ARA (Allen Reference Atlas) acronyms to a vector of structure IDs
%
% function [IDs,ARA_LIST]=acronym2structureID(acronyms,ARA_LIST,quiet)
%
% Purpose
% Each Allen Reference Atlas (ARA) brain area is associated with a unique
% number (structure ID), a long name and an acronym. This function converts
% the acronym to an area structure ID number.
%
%
% Inputs
% acronyms - a cell array (if a list) of brain area acronyms or a single string.
% ARA_LIST - [optional] the first output of getAllenStructureList
% quiet - [optional, false by default] if true, do not print any warning messages
%
%
% Outputs
% IDs - a vector of brain area structure IDs (if more than one acronym was provided)
% ARA_LIST - the CSV data from getAllenStructureList in the form of a cell array
%
%
% Examples
%
% >> acronym2structureID({'VISp','VISal'})
%
% ans =
%
% 1x2 int32 row vector
%
% 385 402
%
%
%
% Rob Campbell
%
% See also:
% getAllenStructureList, structureID2name
if isstr(acronyms)
acronyms={acronyms};
end
if nargin<2 || isempty(ARA_LIST)
ARA_LIST = getAllenStructureList;
end
if nargin<3
quiet = false;
end
%loop through and find all the acronyms
for ii=1:length(acronyms)
f=strmatch(lower(acronyms{ii}),lower(ARA_LIST.acronym),'exact');
if isempty(f)
if ~quiet
fprintf('%s finds no acronym %s in the atlas\n',mfilename, acronyms{ii})
end
continue
end
if length(f)>1
if ~quiet
error('found more than one ID index')
end
end
IDs(ii) = ARA_LIST.id(f);
end
| MATLAB |
3D | Aswendt-Lab/AIDAmri | ARA/AllenBrainAPI-master/getInjectionIDfromExperiment.m | .m | 2,188 | 71 | function [IDs,names] = getInjectionIDfromExperiment(expIDs)
% Download structure ID of the primary injection structure from an Allen experiment
%
% function IDs = getInjectionIDfromExperiment(expIDs)
%
% Purpose
% Make an API query that downloads the structure id of the primary injection
% structure of each experiment in the list expIDs.
%
%
% Inputs
% expIDs - a vector (list) of integers corresponding to Allen experiment IDs
%
%
% Outputs
% IDs - a list of brain area index values associated with each experiment
% names - the names associated with the IDs
%
%
% Notes
% Based on R example at http://api.brain-map.org/examples/doc/thalamus/thalamus.R.html
%
%
% Rob Campbell - Basel 2015
%
% See Also:
% findAllenExperiments
csvQueryURl = 'http://api.brain-map.org/api/v2/data/query.csv';
% This uses a tabular query to select a few columns of interest for the results.
% The first criteria chooses seven experiments by their ids. The specimen and
% injection models are included in the criteria so that the subsequent tabular
% query can access their columns. Each experiment can have multiple injections
% but only one primary injection structure, so the 'distinct' clause limits the
% results to one data set and primary injection structure per row.
% two percent sizes instead of one because we need to escape them for sprintf to work.
% TODO: perhaps make this more flexible or create a function that allows more elaborate API queries.
queryURL = '?criteria=model::SectionDataSet,rma::criteria,[id$in%s],specimen%%28injections%%29,rma::options[tabular$eq%%27distinct%%20data_sets.id%%20as%%20section_data_set_id,injections.primary_injection_structure_id%%27]';
%Build a comma-separated text string of experiment IDs
csvID = sprintf('%d,',expIDs);
csvID(end)=[];
finalURL = sprintf([csvQueryURl,queryURL],csvID);
result = strsplit(urlread(finalURL),'\n');
IDs = [];
n=1;
for ii = 2:length(result)
if length(result{ii})==0
continue
end
tmp=strsplit(result{ii},',');
IDs(n) = str2num(tmp{2});
n=n+1;
end
if nargout>1
names = structureID2name(IDs);
end | MATLAB |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.