code
stringlengths
1
1.05M
repo_name
stringlengths
6
83
path
stringlengths
3
242
language
stringclasses
222 values
license
stringclasses
20 values
size
int64
1
1.05M
#include "selfdrive/ui/qt/offroad/settings.h" #include <cassert> #include <cmath> #include <string> #include <QDebug> #include <QLabel> #include "common/params.h" #include "common/util.h" #include "selfdrive/ui/ui.h" #include "selfdrive/ui/qt/util.h" #include "selfdrive/ui/qt/widgets/controls.h" #include "selfdrive/ui/qt/widgets/input.h" #include "system/hardware/hw.h" void SoftwarePanel::checkForUpdates() { std::system("pkill -SIGUSR1 -f system.updated.updated"); } SoftwarePanel::SoftwarePanel(QWidget* parent) : ListWidget(parent) { onroadLbl = new QLabel(tr("Updates are only downloaded while the car is off.")); onroadLbl->setStyleSheet("font-size: 50px; font-weight: 400; text-align: left; padding-top: 30px; padding-bottom: 30px;"); addItem(onroadLbl); // current version versionLbl = new LabelControl(tr("Current Version"), ""); addItem(versionLbl); // download update btn downloadBtn = new ButtonControl(tr("Download"), tr("CHECK")); connect(downloadBtn, &ButtonControl::clicked, [=]() { downloadBtn->setEnabled(false); if (downloadBtn->text() == tr("CHECK")) { checkForUpdates(); } else { std::system("pkill -SIGHUP -f system.updated.updated"); } }); addItem(downloadBtn); // install update btn installBtn = new ButtonControl(tr("Install Update"), tr("INSTALL")); connect(installBtn, &ButtonControl::clicked, [=]() { installBtn->setEnabled(false); params.putBool("DoReboot", true); }); addItem(installBtn); // branch selecting targetBranchBtn = new ButtonControl(tr("Target Branch"), tr("SELECT")); connect(targetBranchBtn, &ButtonControl::clicked, [=]() { auto current = params.get("GitBranch"); QStringList branches = QString::fromStdString(params.get("UpdaterAvailableBranches")).split(","); for (QString b : {current.c_str(), "devel-staging", "devel", "nightly", "master-ci", "master"}) { auto i = branches.indexOf(b); if (i >= 0) { branches.removeAt(i); branches.insert(0, b); } } QString cur = QString::fromStdString(params.get("UpdaterTargetBranch")); QString selection = MultiOptionDialog::getSelection(tr("Select a branch"), branches, cur, this); if (!selection.isEmpty()) { params.put("UpdaterTargetBranch", selection.toStdString()); targetBranchBtn->setValue(QString::fromStdString(params.get("UpdaterTargetBranch"))); checkForUpdates(); } }); if (!params.getBool("IsTestedBranch")) { addItem(targetBranchBtn); } // uninstall button auto uninstallBtn = new ButtonControl(tr("Uninstall %1").arg(getBrand()), tr("UNINSTALL")); connect(uninstallBtn, &ButtonControl::clicked, [&]() { if (ConfirmationDialog::confirm(tr("Are you sure you want to uninstall?"), tr("Uninstall"), this)) { params.putBool("DoUninstall", true); } }); addItem(uninstallBtn); fs_watch = new ParamWatcher(this); QObject::connect(fs_watch, &ParamWatcher::paramChanged, [=](const QString &param_name, const QString &param_value) { updateLabels(); }); connect(uiState(), &UIState::offroadTransition, [=](bool offroad) { is_onroad = !offroad; updateLabels(); }); updateLabels(); } void SoftwarePanel::showEvent(QShowEvent *event) { // nice for testing on PC installBtn->setEnabled(true); updateLabels(); } void SoftwarePanel::updateLabels() { // add these back in case the files got removed fs_watch->addParam("LastUpdateTime"); fs_watch->addParam("UpdateFailedCount"); fs_watch->addParam("UpdaterState"); fs_watch->addParam("UpdateAvailable"); if (!isVisible()) { return; } // updater only runs offroad onroadLbl->setVisible(is_onroad); downloadBtn->setVisible(!is_onroad); // download update QString updater_state = QString::fromStdString(params.get("UpdaterState")); bool failed = std::atoi(params.get("UpdateFailedCount").c_str()) > 0; if (updater_state != "idle") { downloadBtn->setEnabled(false); downloadBtn->setValue(updater_state); } else { if (failed) { downloadBtn->setText(tr("CHECK")); downloadBtn->setValue(tr("failed to check for update")); } else if (params.getBool("UpdaterFetchAvailable")) { downloadBtn->setText(tr("DOWNLOAD")); downloadBtn->setValue(tr("update available")); } else { QString lastUpdate = tr("never"); auto tm = params.get("LastUpdateTime"); if (!tm.empty()) { lastUpdate = timeAgo(QDateTime::fromString(QString::fromStdString(tm + "Z"), Qt::ISODate)); } downloadBtn->setText(tr("CHECK")); downloadBtn->setValue(tr("up to date, last checked %1").arg(lastUpdate)); } downloadBtn->setEnabled(true); } targetBranchBtn->setValue(QString::fromStdString(params.get("UpdaterTargetBranch"))); // current + new versions versionLbl->setText(QString::fromStdString(params.get("UpdaterCurrentDescription"))); versionLbl->setDescription(QString::fromStdString(params.get("UpdaterCurrentReleaseNotes"))); installBtn->setVisible(!is_onroad && params.getBool("UpdateAvailable")); installBtn->setValue(QString::fromStdString(params.get("UpdaterNewDescription"))); installBtn->setDescription(QString::fromStdString(params.get("UpdaterNewReleaseNotes"))); update(); }
2301_81045437/openpilot
selfdrive/ui/qt/offroad/software_settings.cc
C++
mit
5,272
import QtQuick 2.0 Item { id: root signal scroll() Flickable { id: flickArea objectName: "flickArea" anchors.fill: parent contentHeight: helpText.height contentWidth: width - (leftMargin + rightMargin) bottomMargin: 50 topMargin: 50 rightMargin: 50 leftMargin: 50 flickableDirection: Flickable.VerticalFlick flickDeceleration: 7500.0 maximumFlickVelocity: 10000.0 pixelAligned: true onAtYEndChanged: root.scroll() Text { id: helpText width: flickArea.contentWidth font.family: "Inter" font.weight: "Light" font.pixelSize: 50 textFormat: Text.RichText color: "#C9C9C9" wrapMode: Text.Wrap text: text_view } } Rectangle { id: scrollbar anchors.right: flickArea.right anchors.rightMargin: 20 y: flickArea.topMargin + flickArea.visibleArea.yPosition * (flickArea.height - flickArea.bottomMargin - flickArea.topMargin) width: 12 radius: 6 height: flickArea.visibleArea.heightRatio * (flickArea.height - flickArea.bottomMargin - flickArea.topMargin) color: "#808080" } }
2301_81045437/openpilot
selfdrive/ui/qt/offroad/text_view.qml
QML
mit
1,132
#include "selfdrive/ui/qt/onroad/alerts.h" #include <QPainter> #include <map> #include "selfdrive/ui/qt/util.h" void OnroadAlerts::updateState(const UIState &s) { Alert a = getAlert(*(s.sm), s.scene.started_frame); if (!alert.equal(a)) { alert = a; update(); } } void OnroadAlerts::clear() { alert = {}; update(); } OnroadAlerts::Alert OnroadAlerts::getAlert(const SubMaster &sm, uint64_t started_frame) { const cereal::ControlsState::Reader &cs = sm["controlsState"].getControlsState(); const uint64_t controls_frame = sm.rcv_frame("controlsState"); Alert a = {}; if (controls_frame >= started_frame) { // Don't get old alert. a = {cs.getAlertText1().cStr(), cs.getAlertText2().cStr(), cs.getAlertType().cStr(), cs.getAlertSize(), cs.getAlertStatus()}; } if (!sm.updated("controlsState") && (sm.frame - started_frame) > 5 * UI_FREQ) { const int CONTROLS_TIMEOUT = 5; const int controls_missing = (nanos_since_boot() - sm.rcv_time("controlsState")) / 1e9; // Handle controls timeout if (controls_frame < started_frame) { // car is started, but controlsState hasn't been seen at all a = {tr("openpilot Unavailable"), tr("Waiting for controls to start"), "controlsWaiting", cereal::ControlsState::AlertSize::MID, cereal::ControlsState::AlertStatus::NORMAL}; } else if (controls_missing > CONTROLS_TIMEOUT && !Hardware::PC()) { // car is started, but controls is lagging or died if (cs.getEnabled() && (controls_missing - CONTROLS_TIMEOUT) < 10) { a = {tr("TAKE CONTROL IMMEDIATELY"), tr("Controls Unresponsive"), "controlsUnresponsive", cereal::ControlsState::AlertSize::FULL, cereal::ControlsState::AlertStatus::CRITICAL}; } else { a = {tr("Controls Unresponsive"), tr("Reboot Device"), "controlsUnresponsivePermanent", cereal::ControlsState::AlertSize::MID, cereal::ControlsState::AlertStatus::NORMAL}; } } } return a; } void OnroadAlerts::paintEvent(QPaintEvent *event) { if (alert.size == cereal::ControlsState::AlertSize::NONE) { return; } static std::map<cereal::ControlsState::AlertSize, const int> alert_heights = { {cereal::ControlsState::AlertSize::SMALL, 271}, {cereal::ControlsState::AlertSize::MID, 420}, {cereal::ControlsState::AlertSize::FULL, height()}, }; int h = alert_heights[alert.size]; int margin = 40; int radius = 30; if (alert.size == cereal::ControlsState::AlertSize::FULL) { margin = 0; radius = 0; } QRect r = QRect(0 + margin, height() - h + margin, width() - margin*2, h - margin*2); QPainter p(this); // draw background + gradient p.setPen(Qt::NoPen); p.setCompositionMode(QPainter::CompositionMode_SourceOver); p.setBrush(QBrush(alert_colors[alert.status])); p.drawRoundedRect(r, radius, radius); QLinearGradient g(0, r.y(), 0, r.bottom()); g.setColorAt(0, QColor::fromRgbF(0, 0, 0, 0.05)); g.setColorAt(1, QColor::fromRgbF(0, 0, 0, 0.35)); p.setCompositionMode(QPainter::CompositionMode_DestinationOver); p.setBrush(QBrush(g)); p.drawRoundedRect(r, radius, radius); p.setCompositionMode(QPainter::CompositionMode_SourceOver); // text const QPoint c = r.center(); p.setPen(QColor(0xff, 0xff, 0xff)); p.setRenderHint(QPainter::TextAntialiasing); if (alert.size == cereal::ControlsState::AlertSize::SMALL) { p.setFont(InterFont(74, QFont::DemiBold)); p.drawText(r, Qt::AlignCenter, alert.text1); } else if (alert.size == cereal::ControlsState::AlertSize::MID) { p.setFont(InterFont(88, QFont::Bold)); p.drawText(QRect(0, c.y() - 125, width(), 150), Qt::AlignHCenter | Qt::AlignTop, alert.text1); p.setFont(InterFont(66)); p.drawText(QRect(0, c.y() + 21, width(), 90), Qt::AlignHCenter, alert.text2); } else if (alert.size == cereal::ControlsState::AlertSize::FULL) { bool l = alert.text1.length() > 15; p.setFont(InterFont(l ? 132 : 177, QFont::Bold)); p.drawText(QRect(0, r.y() + (l ? 240 : 270), width(), 600), Qt::AlignHCenter | Qt::TextWordWrap, alert.text1); p.setFont(InterFont(88)); p.drawText(QRect(0, r.height() - (l ? 361 : 420), width(), 300), Qt::AlignHCenter | Qt::TextWordWrap, alert.text2); } }
2301_81045437/openpilot
selfdrive/ui/qt/onroad/alerts.cc
C++
mit
4,281
#pragma once #include <QWidget> #include "selfdrive/ui/ui.h" class OnroadAlerts : public QWidget { Q_OBJECT public: OnroadAlerts(QWidget *parent = 0) : QWidget(parent) {} void updateState(const UIState &s); void clear(); protected: struct Alert { QString text1; QString text2; QString type; cereal::ControlsState::AlertSize size; cereal::ControlsState::AlertStatus status; bool equal(const Alert &other) const { return text1 == other.text1 && text2 == other.text2 && type == other.type; } }; const QMap<cereal::ControlsState::AlertStatus, QColor> alert_colors = { {cereal::ControlsState::AlertStatus::NORMAL, QColor(0x15, 0x15, 0x15, 0xf1)}, {cereal::ControlsState::AlertStatus::USER_PROMPT, QColor(0xDA, 0x6F, 0x25, 0xf1)}, {cereal::ControlsState::AlertStatus::CRITICAL, QColor(0xC9, 0x22, 0x31, 0xf1)}, }; void paintEvent(QPaintEvent*) override; OnroadAlerts::Alert getAlert(const SubMaster &sm, uint64_t started_frame); QColor bg; Alert alert = {}; };
2301_81045437/openpilot
selfdrive/ui/qt/onroad/alerts.h
C++
mit
1,034
#include "selfdrive/ui/qt/onroad/annotated_camera.h" #include <QPainter> #include <algorithm> #include <cmath> #include "common/swaglog.h" #include "selfdrive/ui/qt/onroad/buttons.h" #include "selfdrive/ui/qt/util.h" // Window that shows camera view and variety of info drawn on top AnnotatedCameraWidget::AnnotatedCameraWidget(VisionStreamType type, QWidget* parent) : fps_filter(UI_FREQ, 3, 1. / UI_FREQ), CameraWidget("camerad", type, true, parent) { pm = std::make_unique<PubMaster, const std::initializer_list<const char *>>({"uiDebug"}); main_layout = new QVBoxLayout(this); main_layout->setMargin(UI_BORDER_SIZE); main_layout->setSpacing(0); experimental_btn = new ExperimentalButton(this); main_layout->addWidget(experimental_btn, 0, Qt::AlignTop | Qt::AlignRight); map_settings_btn = new MapSettingsButton(this); main_layout->addWidget(map_settings_btn, 0, Qt::AlignBottom | Qt::AlignRight); dm_img = loadPixmap("../assets/img_driver_face.png", {img_size + 5, img_size + 5}); } void AnnotatedCameraWidget::updateState(const UIState &s) { const int SET_SPEED_NA = 255; const SubMaster &sm = *(s.sm); const bool cs_alive = sm.alive("controlsState"); const bool nav_alive = sm.alive("navInstruction") && sm["navInstruction"].getValid(); const auto cs = sm["controlsState"].getControlsState(); const auto car_state = sm["carState"].getCarState(); const auto nav_instruction = sm["navInstruction"].getNavInstruction(); // Handle older routes where vCruiseCluster is not set float v_cruise = cs.getVCruiseCluster() == 0.0 ? cs.getVCruise() : cs.getVCruiseCluster(); setSpeed = cs_alive ? v_cruise : SET_SPEED_NA; is_cruise_set = setSpeed > 0 && (int)setSpeed != SET_SPEED_NA; if (is_cruise_set && !s.scene.is_metric) { setSpeed *= KM_TO_MILE; } // Handle older routes where vEgoCluster is not set v_ego_cluster_seen = v_ego_cluster_seen || car_state.getVEgoCluster() != 0.0; float v_ego = v_ego_cluster_seen ? car_state.getVEgoCluster() : car_state.getVEgo(); speed = cs_alive ? std::max<float>(0.0, v_ego) : 0.0; speed *= s.scene.is_metric ? MS_TO_KPH : MS_TO_MPH; auto speed_limit_sign = nav_instruction.getSpeedLimitSign(); speedLimit = nav_alive ? nav_instruction.getSpeedLimit() : 0.0; speedLimit *= (s.scene.is_metric ? MS_TO_KPH : MS_TO_MPH); has_us_speed_limit = (nav_alive && speed_limit_sign == cereal::NavInstruction::SpeedLimitSign::MUTCD); has_eu_speed_limit = (nav_alive && speed_limit_sign == cereal::NavInstruction::SpeedLimitSign::VIENNA); is_metric = s.scene.is_metric; speedUnit = s.scene.is_metric ? tr("km/h") : tr("mph"); hideBottomIcons = (cs.getAlertSize() != cereal::ControlsState::AlertSize::NONE); status = s.status; // update engageability/experimental mode button experimental_btn->updateState(s); // update DM icon auto dm_state = sm["driverMonitoringState"].getDriverMonitoringState(); dmActive = dm_state.getIsActiveMode(); rightHandDM = dm_state.getIsRHD(); // DM icon transition dm_fade_state = std::clamp(dm_fade_state+0.2*(0.5-dmActive), 0.0, 1.0); // hide map settings button for alerts and flip for right hand DM if (map_settings_btn->isEnabled()) { map_settings_btn->setVisible(!hideBottomIcons); main_layout->setAlignment(map_settings_btn, (rightHandDM ? Qt::AlignLeft : Qt::AlignRight) | Qt::AlignBottom); } } void AnnotatedCameraWidget::drawHud(QPainter &p) { p.save(); // Header gradient QLinearGradient bg(0, UI_HEADER_HEIGHT - (UI_HEADER_HEIGHT / 2.5), 0, UI_HEADER_HEIGHT); bg.setColorAt(0, QColor::fromRgbF(0, 0, 0, 0.45)); bg.setColorAt(1, QColor::fromRgbF(0, 0, 0, 0)); p.fillRect(0, 0, width(), UI_HEADER_HEIGHT, bg); QString speedLimitStr = (speedLimit > 1) ? QString::number(std::nearbyint(speedLimit)) : "–"; QString speedStr = QString::number(std::nearbyint(speed)); QString setSpeedStr = is_cruise_set ? QString::number(std::nearbyint(setSpeed)) : "–"; // Draw outer box + border to contain set speed and speed limit const int sign_margin = 12; const int us_sign_height = 186; const int eu_sign_size = 176; const QSize default_size = {172, 204}; QSize set_speed_size = default_size; if (is_metric || has_eu_speed_limit) set_speed_size.rwidth() = 200; if (has_us_speed_limit && speedLimitStr.size() >= 3) set_speed_size.rwidth() = 223; if (has_us_speed_limit) set_speed_size.rheight() += us_sign_height + sign_margin; else if (has_eu_speed_limit) set_speed_size.rheight() += eu_sign_size + sign_margin; int top_radius = 32; int bottom_radius = has_eu_speed_limit ? 100 : 32; QRect set_speed_rect(QPoint(60 + (default_size.width() - set_speed_size.width()) / 2, 45), set_speed_size); p.setPen(QPen(whiteColor(75), 6)); p.setBrush(blackColor(166)); drawRoundedRect(p, set_speed_rect, top_radius, top_radius, bottom_radius, bottom_radius); // Draw MAX QColor max_color = QColor(0x80, 0xd8, 0xa6, 0xff); QColor set_speed_color = whiteColor(); if (is_cruise_set) { if (status == STATUS_DISENGAGED) { max_color = whiteColor(); } else if (status == STATUS_OVERRIDE) { max_color = QColor(0x91, 0x9b, 0x95, 0xff); } else if (speedLimit > 0) { auto interp_color = [=](QColor c1, QColor c2, QColor c3) { return speedLimit > 0 ? interpColor(setSpeed, {speedLimit + 5, speedLimit + 15, speedLimit + 25}, {c1, c2, c3}) : c1; }; max_color = interp_color(max_color, QColor(0xff, 0xe4, 0xbf), QColor(0xff, 0xbf, 0xbf)); set_speed_color = interp_color(set_speed_color, QColor(0xff, 0x95, 0x00), QColor(0xff, 0x00, 0x00)); } } else { max_color = QColor(0xa6, 0xa6, 0xa6, 0xff); set_speed_color = QColor(0x72, 0x72, 0x72, 0xff); } p.setFont(InterFont(40, QFont::DemiBold)); p.setPen(max_color); p.drawText(set_speed_rect.adjusted(0, 27, 0, 0), Qt::AlignTop | Qt::AlignHCenter, tr("MAX")); p.setFont(InterFont(90, QFont::Bold)); p.setPen(set_speed_color); p.drawText(set_speed_rect.adjusted(0, 77, 0, 0), Qt::AlignTop | Qt::AlignHCenter, setSpeedStr); const QRect sign_rect = set_speed_rect.adjusted(sign_margin, default_size.height(), -sign_margin, -sign_margin); // US/Canada (MUTCD style) sign if (has_us_speed_limit) { p.setPen(Qt::NoPen); p.setBrush(whiteColor()); p.drawRoundedRect(sign_rect, 24, 24); p.setPen(QPen(blackColor(), 6)); p.drawRoundedRect(sign_rect.adjusted(9, 9, -9, -9), 16, 16); p.setFont(InterFont(28, QFont::DemiBold)); p.drawText(sign_rect.adjusted(0, 22, 0, 0), Qt::AlignTop | Qt::AlignHCenter, tr("SPEED")); p.drawText(sign_rect.adjusted(0, 51, 0, 0), Qt::AlignTop | Qt::AlignHCenter, tr("LIMIT")); p.setFont(InterFont(70, QFont::Bold)); p.drawText(sign_rect.adjusted(0, 85, 0, 0), Qt::AlignTop | Qt::AlignHCenter, speedLimitStr); } // EU (Vienna style) sign if (has_eu_speed_limit) { p.setPen(Qt::NoPen); p.setBrush(whiteColor()); p.drawEllipse(sign_rect); p.setPen(QPen(Qt::red, 20)); p.drawEllipse(sign_rect.adjusted(16, 16, -16, -16)); p.setFont(InterFont((speedLimitStr.size() >= 3) ? 60 : 70, QFont::Bold)); p.setPen(blackColor()); p.drawText(sign_rect, Qt::AlignCenter, speedLimitStr); } // current speed p.setFont(InterFont(176, QFont::Bold)); drawText(p, rect().center().x(), 210, speedStr); p.setFont(InterFont(66)); drawText(p, rect().center().x(), 290, speedUnit, 200); p.restore(); } void AnnotatedCameraWidget::drawText(QPainter &p, int x, int y, const QString &text, int alpha) { QRect real_rect = p.fontMetrics().boundingRect(text); real_rect.moveCenter({x, y - real_rect.height() / 2}); p.setPen(QColor(0xff, 0xff, 0xff, alpha)); p.drawText(real_rect.x(), real_rect.bottom(), text); } void AnnotatedCameraWidget::initializeGL() { CameraWidget::initializeGL(); qInfo() << "OpenGL version:" << QString((const char*)glGetString(GL_VERSION)); qInfo() << "OpenGL vendor:" << QString((const char*)glGetString(GL_VENDOR)); qInfo() << "OpenGL renderer:" << QString((const char*)glGetString(GL_RENDERER)); qInfo() << "OpenGL language version:" << QString((const char*)glGetString(GL_SHADING_LANGUAGE_VERSION)); prev_draw_t = millis_since_boot(); setBackgroundColor(bg_colors[STATUS_DISENGAGED]); } void AnnotatedCameraWidget::updateFrameMat() { CameraWidget::updateFrameMat(); UIState *s = uiState(); int w = width(), h = height(); s->fb_w = w; s->fb_h = h; // Apply transformation such that video pixel coordinates match video // 1) Put (0, 0) in the middle of the video // 2) Apply same scaling as video // 3) Put (0, 0) in top left corner of video s->car_space_transform.reset(); s->car_space_transform.translate(w / 2 - x_offset, h / 2 - y_offset) .scale(zoom, zoom) .translate(-intrinsic_matrix.v[2], -intrinsic_matrix.v[5]); } void AnnotatedCameraWidget::drawLaneLines(QPainter &painter, const UIState *s) { painter.save(); const UIScene &scene = s->scene; SubMaster &sm = *(s->sm); // lanelines for (int i = 0; i < std::size(scene.lane_line_vertices); ++i) { painter.setBrush(QColor::fromRgbF(1.0, 1.0, 1.0, std::clamp<float>(scene.lane_line_probs[i], 0.0, 0.7))); painter.drawPolygon(scene.lane_line_vertices[i]); } // road edges for (int i = 0; i < std::size(scene.road_edge_vertices); ++i) { painter.setBrush(QColor::fromRgbF(1.0, 0, 0, std::clamp<float>(1.0 - scene.road_edge_stds[i], 0.0, 1.0))); painter.drawPolygon(scene.road_edge_vertices[i]); } // paint path QLinearGradient bg(0, height(), 0, 0); if (sm["controlsState"].getControlsState().getExperimentalMode()) { // The first half of track_vertices are the points for the right side of the path // and the indices match the positions of accel from uiPlan const auto &acceleration = sm["uiPlan"].getUiPlan().getAccel(); const int max_len = std::min<int>(scene.track_vertices.length() / 2, acceleration.size()); for (int i = 0; i < max_len; ++i) { // Some points are out of frame if (scene.track_vertices[i].y() < 0 || scene.track_vertices[i].y() > height()) continue; // Flip so 0 is bottom of frame float lin_grad_point = (height() - scene.track_vertices[i].y()) / height(); // speed up: 120, slow down: 0 float path_hue = fmax(fmin(60 + acceleration[i] * 35, 120), 0); // FIXME: painter.drawPolygon can be slow if hue is not rounded path_hue = int(path_hue * 100 + 0.5) / 100; float saturation = fmin(fabs(acceleration[i] * 1.5), 1); float lightness = util::map_val(saturation, 0.0f, 1.0f, 0.95f, 0.62f); // lighter when grey float alpha = util::map_val(lin_grad_point, 0.75f / 2.f, 0.75f, 0.4f, 0.0f); // matches previous alpha fade bg.setColorAt(lin_grad_point, QColor::fromHslF(path_hue / 360., saturation, lightness, alpha)); // Skip a point, unless next is last i += (i + 2) < max_len ? 1 : 0; } } else { bg.setColorAt(0.0, QColor::fromHslF(148 / 360., 0.94, 0.51, 0.4)); bg.setColorAt(0.5, QColor::fromHslF(112 / 360., 1.0, 0.68, 0.35)); bg.setColorAt(1.0, QColor::fromHslF(112 / 360., 1.0, 0.68, 0.0)); } painter.setBrush(bg); painter.drawPolygon(scene.track_vertices); painter.restore(); } void AnnotatedCameraWidget::drawDriverState(QPainter &painter, const UIState *s) { const UIScene &scene = s->scene; painter.save(); // base icon int offset = UI_BORDER_SIZE + btn_size / 2; int x = rightHandDM ? width() - offset : offset; int y = height() - offset; float opacity = dmActive ? 0.65 : 0.2; drawIcon(painter, QPoint(x, y), dm_img, blackColor(70), opacity); // face QPointF face_kpts_draw[std::size(default_face_kpts_3d)]; float kp; for (int i = 0; i < std::size(default_face_kpts_3d); ++i) { kp = (scene.face_kpts_draw[i].v[2] - 8) / 120 + 1.0; face_kpts_draw[i] = QPointF(scene.face_kpts_draw[i].v[0] * kp + x, scene.face_kpts_draw[i].v[1] * kp + y); } painter.setPen(QPen(QColor::fromRgbF(1.0, 1.0, 1.0, opacity), 5.2, Qt::SolidLine, Qt::RoundCap)); painter.drawPolyline(face_kpts_draw, std::size(default_face_kpts_3d)); // tracking arcs const int arc_l = 133; const float arc_t_default = 6.7; const float arc_t_extend = 12.0; QColor arc_color = QColor::fromRgbF(0.545 - 0.445 * s->engaged(), 0.545 + 0.4 * s->engaged(), 0.545 - 0.285 * s->engaged(), 0.4 * (1.0 - dm_fade_state)); float delta_x = -scene.driver_pose_sins[1] * arc_l / 2; float delta_y = -scene.driver_pose_sins[0] * arc_l / 2; painter.setPen(QPen(arc_color, arc_t_default+arc_t_extend*fmin(1.0, scene.driver_pose_diff[1] * 5.0), Qt::SolidLine, Qt::RoundCap)); painter.drawArc(QRectF(std::fmin(x + delta_x, x), y - arc_l / 2, fabs(delta_x), arc_l), (scene.driver_pose_sins[1]>0 ? 90 : -90) * 16, 180 * 16); painter.setPen(QPen(arc_color, arc_t_default+arc_t_extend*fmin(1.0, scene.driver_pose_diff[0] * 5.0), Qt::SolidLine, Qt::RoundCap)); painter.drawArc(QRectF(x - arc_l / 2, std::fmin(y + delta_y, y), arc_l, fabs(delta_y)), (scene.driver_pose_sins[0]>0 ? 0 : 180) * 16, 180 * 16); painter.restore(); } void AnnotatedCameraWidget::drawLead(QPainter &painter, const cereal::RadarState::LeadData::Reader &lead_data, const QPointF &vd) { painter.save(); const float speedBuff = 10.; const float leadBuff = 40.; const float d_rel = lead_data.getDRel(); const float v_rel = lead_data.getVRel(); float fillAlpha = 0; if (d_rel < leadBuff) { fillAlpha = 255 * (1.0 - (d_rel / leadBuff)); if (v_rel < 0) { fillAlpha += 255 * (-1 * (v_rel / speedBuff)); } fillAlpha = (int)(fmin(fillAlpha, 255)); } float sz = std::clamp((25 * 30) / (d_rel / 3 + 30), 15.0f, 30.0f) * 2.35; float x = std::clamp((float)vd.x(), 0.f, width() - sz / 2); float y = std::fmin(height() - sz * .6, (float)vd.y()); float g_xo = sz / 5; float g_yo = sz / 10; QPointF glow[] = {{x + (sz * 1.35) + g_xo, y + sz + g_yo}, {x, y - g_yo}, {x - (sz * 1.35) - g_xo, y + sz + g_yo}}; painter.setBrush(QColor(218, 202, 37, 255)); painter.drawPolygon(glow, std::size(glow)); // chevron QPointF chevron[] = {{x + (sz * 1.25), y + sz}, {x, y}, {x - (sz * 1.25), y + sz}}; painter.setBrush(redColor(fillAlpha)); painter.drawPolygon(chevron, std::size(chevron)); painter.restore(); } void AnnotatedCameraWidget::paintGL() { UIState *s = uiState(); SubMaster &sm = *(s->sm); const double start_draw_t = millis_since_boot(); const cereal::ModelDataV2::Reader &model = sm["modelV2"].getModelV2(); // draw camera frame { std::lock_guard lk(frame_lock); if (frames.empty()) { if (skip_frame_count > 0) { skip_frame_count--; qDebug() << "skipping frame, not ready"; return; } } else { // skip drawing up to this many frames if we're // missing camera frames. this smooths out the // transitions from the narrow and wide cameras skip_frame_count = 5; } // Wide or narrow cam dependent on speed bool has_wide_cam = available_streams.count(VISION_STREAM_WIDE_ROAD); if (has_wide_cam) { float v_ego = sm["carState"].getCarState().getVEgo(); if ((v_ego < 10) || available_streams.size() == 1) { wide_cam_requested = true; } else if (v_ego > 15) { wide_cam_requested = false; } wide_cam_requested = wide_cam_requested && sm["controlsState"].getControlsState().getExperimentalMode(); // for replay of old routes, never go to widecam wide_cam_requested = wide_cam_requested && s->scene.calibration_wide_valid; } CameraWidget::setStreamType(wide_cam_requested ? VISION_STREAM_WIDE_ROAD : VISION_STREAM_ROAD); s->scene.wide_cam = CameraWidget::getStreamType() == VISION_STREAM_WIDE_ROAD; if (s->scene.calibration_valid) { auto calib = s->scene.wide_cam ? s->scene.view_from_wide_calib : s->scene.view_from_calib; CameraWidget::updateCalibration(calib); } else { CameraWidget::updateCalibration(DEFAULT_CALIBRATION); } CameraWidget::setFrameId(model.getFrameId()); CameraWidget::paintGL(); } QPainter painter(this); painter.setRenderHint(QPainter::Antialiasing); painter.setPen(Qt::NoPen); if (s->scene.world_objects_visible) { update_model(s, model, sm["uiPlan"].getUiPlan()); drawLaneLines(painter, s); if (s->scene.longitudinal_control && sm.rcv_frame("radarState") > s->scene.started_frame) { auto radar_state = sm["radarState"].getRadarState(); update_leads(s, radar_state, model.getPosition()); auto lead_one = radar_state.getLeadOne(); auto lead_two = radar_state.getLeadTwo(); if (lead_one.getStatus()) { drawLead(painter, lead_one, s->scene.lead_vertices[0]); } if (lead_two.getStatus() && (std::abs(lead_one.getDRel() - lead_two.getDRel()) > 3.0)) { drawLead(painter, lead_two, s->scene.lead_vertices[1]); } } } // DMoji if (!hideBottomIcons && (sm.rcv_frame("driverStateV2") > s->scene.started_frame)) { update_dmonitoring(s, sm["driverStateV2"].getDriverStateV2(), dm_fade_state, rightHandDM); drawDriverState(painter, s); } drawHud(painter); double cur_draw_t = millis_since_boot(); double dt = cur_draw_t - prev_draw_t; double fps = fps_filter.update(1. / dt * 1000); if (fps < 15) { LOGW("slow frame rate: %.2f fps", fps); } prev_draw_t = cur_draw_t; // publish debug msg MessageBuilder msg; auto m = msg.initEvent().initUiDebug(); m.setDrawTimeMillis(cur_draw_t - start_draw_t); pm->send("uiDebug", msg); } void AnnotatedCameraWidget::showEvent(QShowEvent *event) { CameraWidget::showEvent(event); ui_update_params(uiState()); prev_draw_t = millis_since_boot(); }
2301_81045437/openpilot
selfdrive/ui/qt/onroad/annotated_camera.cc
C++
mit
17,883
#pragma once #include <QVBoxLayout> #include <memory> #include "selfdrive/ui/qt/onroad/buttons.h" #include "selfdrive/ui/qt/widgets/cameraview.h" class AnnotatedCameraWidget : public CameraWidget { Q_OBJECT public: explicit AnnotatedCameraWidget(VisionStreamType type, QWidget* parent = 0); void updateState(const UIState &s); MapSettingsButton *map_settings_btn; private: void drawText(QPainter &p, int x, int y, const QString &text, int alpha = 255); QVBoxLayout *main_layout; ExperimentalButton *experimental_btn; QPixmap dm_img; float speed; QString speedUnit; float setSpeed; float speedLimit; bool is_cruise_set = false; bool is_metric = false; bool dmActive = false; bool hideBottomIcons = false; bool rightHandDM = false; float dm_fade_state = 1.0; bool has_us_speed_limit = false; bool has_eu_speed_limit = false; bool v_ego_cluster_seen = false; int status = STATUS_DISENGAGED; std::unique_ptr<PubMaster> pm; int skip_frame_count = 0; bool wide_cam_requested = false; protected: void paintGL() override; void initializeGL() override; void showEvent(QShowEvent *event) override; void updateFrameMat() override; void drawLaneLines(QPainter &painter, const UIState *s); void drawLead(QPainter &painter, const cereal::RadarState::LeadData::Reader &lead_data, const QPointF &vd); void drawHud(QPainter &p); void drawDriverState(QPainter &painter, const UIState *s); inline QColor redColor(int alpha = 255) { return QColor(201, 34, 49, alpha); } inline QColor whiteColor(int alpha = 255) { return QColor(255, 255, 255, alpha); } inline QColor blackColor(int alpha = 255) { return QColor(0, 0, 0, alpha); } double prev_draw_t = 0; FirstOrderFilter fps_filter; };
2301_81045437/openpilot
selfdrive/ui/qt/onroad/annotated_camera.h
C++
mit
1,752
#include "selfdrive/ui/qt/onroad/buttons.h" #include <QPainter> #include "selfdrive/ui/qt/util.h" void drawIcon(QPainter &p, const QPoint &center, const QPixmap &img, const QBrush &bg, float opacity) { p.setRenderHint(QPainter::Antialiasing); p.setOpacity(1.0); // bg dictates opacity of ellipse p.setPen(Qt::NoPen); p.setBrush(bg); p.drawEllipse(center, btn_size / 2, btn_size / 2); p.setOpacity(opacity); p.drawPixmap(center - QPoint(img.width() / 2, img.height() / 2), img); p.setOpacity(1.0); } // ExperimentalButton ExperimentalButton::ExperimentalButton(QWidget *parent) : experimental_mode(false), engageable(false), QPushButton(parent) { setFixedSize(btn_size, btn_size); engage_img = loadPixmap("../assets/img_chffr_wheel.png", {img_size, img_size}); experimental_img = loadPixmap("../assets/img_experimental.svg", {img_size, img_size}); QObject::connect(this, &QPushButton::clicked, this, &ExperimentalButton::changeMode); } void ExperimentalButton::changeMode() { const auto cp = (*uiState()->sm)["carParams"].getCarParams(); bool can_change = hasLongitudinalControl(cp) && params.getBool("ExperimentalModeConfirmed"); if (can_change) { params.putBool("ExperimentalMode", !experimental_mode); } } void ExperimentalButton::updateState(const UIState &s) { const auto cs = (*s.sm)["controlsState"].getControlsState(); bool eng = cs.getEngageable() || cs.getEnabled(); if ((cs.getExperimentalMode() != experimental_mode) || (eng != engageable)) { engageable = eng; experimental_mode = cs.getExperimentalMode(); update(); } } void ExperimentalButton::paintEvent(QPaintEvent *event) { QPainter p(this); QPixmap img = experimental_mode ? experimental_img : engage_img; drawIcon(p, QPoint(btn_size / 2, btn_size / 2), img, QColor(0, 0, 0, 166), (isDown() || !engageable) ? 0.6 : 1.0); } // MapSettingsButton MapSettingsButton::MapSettingsButton(QWidget *parent) : QPushButton(parent) { setFixedSize(btn_size, btn_size); settings_img = loadPixmap("../assets/navigation/icon_directions_outlined.svg", {img_size, img_size}); // hidden by default, made visible if map is created (has prime or mapbox token) setVisible(false); setEnabled(false); } void MapSettingsButton::paintEvent(QPaintEvent *event) { QPainter p(this); drawIcon(p, QPoint(btn_size / 2, btn_size / 2), settings_img, QColor(0, 0, 0, 166), isDown() ? 0.6 : 1.0); }
2301_81045437/openpilot
selfdrive/ui/qt/onroad/buttons.cc
C++
mit
2,418
#pragma once #include <QPushButton> #include "selfdrive/ui/ui.h" const int btn_size = 192; const int img_size = (btn_size / 4) * 3; class ExperimentalButton : public QPushButton { Q_OBJECT public: explicit ExperimentalButton(QWidget *parent = 0); void updateState(const UIState &s); private: void paintEvent(QPaintEvent *event) override; void changeMode(); Params params; QPixmap engage_img; QPixmap experimental_img; bool experimental_mode; bool engageable; }; class MapSettingsButton : public QPushButton { Q_OBJECT public: explicit MapSettingsButton(QWidget *parent = 0); private: void paintEvent(QPaintEvent *event) override; QPixmap settings_img; }; void drawIcon(QPainter &p, const QPoint &center, const QPixmap &img, const QBrush &bg, float opacity);
2301_81045437/openpilot
selfdrive/ui/qt/onroad/buttons.h
C++
mit
799
#include "selfdrive/ui/qt/onroad/onroad_home.h" #include <QPainter> #include <QStackedLayout> #ifdef ENABLE_MAPS #include "selfdrive/ui/qt/maps/map_helpers.h" #include "selfdrive/ui/qt/maps/map_panel.h" #endif #include "selfdrive/ui/qt/util.h" OnroadWindow::OnroadWindow(QWidget *parent) : QWidget(parent) { QVBoxLayout *main_layout = new QVBoxLayout(this); main_layout->setMargin(UI_BORDER_SIZE); QStackedLayout *stacked_layout = new QStackedLayout; stacked_layout->setStackingMode(QStackedLayout::StackAll); main_layout->addLayout(stacked_layout); nvg = new AnnotatedCameraWidget(VISION_STREAM_ROAD, this); QWidget * split_wrapper = new QWidget; split = new QHBoxLayout(split_wrapper); split->setContentsMargins(0, 0, 0, 0); split->setSpacing(0); split->addWidget(nvg); if (getenv("DUAL_CAMERA_VIEW")) { CameraWidget *arCam = new CameraWidget("camerad", VISION_STREAM_ROAD, true, this); split->insertWidget(0, arCam); } if (getenv("MAP_RENDER_VIEW")) { CameraWidget *map_render = new CameraWidget("navd", VISION_STREAM_MAP, false, this); split->insertWidget(0, map_render); } stacked_layout->addWidget(split_wrapper); alerts = new OnroadAlerts(this); alerts->setAttribute(Qt::WA_TransparentForMouseEvents, true); stacked_layout->addWidget(alerts); // setup stacking order alerts->raise(); setAttribute(Qt::WA_OpaquePaintEvent); QObject::connect(uiState(), &UIState::uiUpdate, this, &OnroadWindow::updateState); QObject::connect(uiState(), &UIState::offroadTransition, this, &OnroadWindow::offroadTransition); QObject::connect(uiState(), &UIState::primeChanged, this, &OnroadWindow::primeChanged); } void OnroadWindow::updateState(const UIState &s) { if (!s.scene.started) { return; } if (s.scene.map_on_left) { split->setDirection(QBoxLayout::LeftToRight); } else { split->setDirection(QBoxLayout::RightToLeft); } alerts->updateState(s); nvg->updateState(s); QColor bgColor = bg_colors[s.status]; if (bg != bgColor) { // repaint border bg = bgColor; update(); } } void OnroadWindow::mousePressEvent(QMouseEvent* e) { #ifdef ENABLE_MAPS if (map != nullptr) { bool sidebarVisible = geometry().x() > 0; bool show_map = !sidebarVisible; map->setVisible(show_map && !map->isVisible()); } #endif // propagation event to parent(HomeWindow) QWidget::mousePressEvent(e); } void OnroadWindow::createMapWidget() { #ifdef ENABLE_MAPS auto m = new MapPanel(get_mapbox_settings()); map = m; QObject::connect(m, &MapPanel::mapPanelRequested, this, &OnroadWindow::mapPanelRequested); QObject::connect(nvg->map_settings_btn, &MapSettingsButton::clicked, m, &MapPanel::toggleMapSettings); nvg->map_settings_btn->setEnabled(true); m->setFixedWidth(topWidget(this)->width() / 2 - UI_BORDER_SIZE); split->insertWidget(0, m); // hidden by default, made visible when navRoute is published m->setVisible(false); #endif } void OnroadWindow::offroadTransition(bool offroad) { #ifdef ENABLE_MAPS if (!offroad) { if (map == nullptr && (uiState()->hasPrime() || !MAPBOX_TOKEN.isEmpty())) { createMapWidget(); } } #endif alerts->clear(); } void OnroadWindow::primeChanged(bool prime) { #ifdef ENABLE_MAPS if (map && (!prime && MAPBOX_TOKEN.isEmpty())) { nvg->map_settings_btn->setEnabled(false); nvg->map_settings_btn->setVisible(false); map->deleteLater(); map = nullptr; } else if (!map && (prime || !MAPBOX_TOKEN.isEmpty())) { createMapWidget(); } #endif } void OnroadWindow::paintEvent(QPaintEvent *event) { QPainter p(this); p.fillRect(rect(), QColor(bg.red(), bg.green(), bg.blue(), 255)); }
2301_81045437/openpilot
selfdrive/ui/qt/onroad/onroad_home.cc
C++
mit
3,691
#pragma once #include "selfdrive/ui/qt/onroad/alerts.h" #include "selfdrive/ui/qt/onroad/annotated_camera.h" class OnroadWindow : public QWidget { Q_OBJECT public: OnroadWindow(QWidget* parent = 0); bool isMapVisible() const { return map && map->isVisible(); } void showMapPanel(bool show) { if (map) map->setVisible(show); } signals: void mapPanelRequested(); private: void createMapWidget(); void paintEvent(QPaintEvent *event); void mousePressEvent(QMouseEvent* e) override; OnroadAlerts *alerts; AnnotatedCameraWidget *nvg; QColor bg = bg_colors[STATUS_DISENGAGED]; QWidget *map = nullptr; QHBoxLayout* split; private slots: void offroadTransition(bool offroad); void primeChanged(bool prime); void updateState(const UIState &s); };
2301_81045437/openpilot
selfdrive/ui/qt/onroad/onroad_home.h
C++
mit
775
import os from cffi import FFI import sip from openpilot.common.ffi_wrapper import suffix from openpilot.common.basedir import BASEDIR def get_ffi(): lib = os.path.join(BASEDIR, "selfdrive", "ui", "qt", "libpython_helpers" + suffix()) ffi = FFI() ffi.cdef("void set_main_window(void *w);") return ffi, ffi.dlopen(lib) def set_main_window(widget): ffi, lib = get_ffi() lib.set_main_window(ffi.cast('void*', sip.unwrapinstance(widget)))
2301_81045437/openpilot
selfdrive/ui/qt/python_helpers.py
Python
mit
454
#include "selfdrive/ui/qt/qt_window.h" void setMainWindow(QWidget *w) { const float scale = util::getenv("SCALE", 1.0f); const QSize sz = QGuiApplication::primaryScreen()->size(); if (Hardware::PC() && scale == 1.0 && !(sz - DEVICE_SCREEN_SIZE).isValid()) { w->setMinimumSize(QSize(640, 480)); // allow resize smaller than fullscreen w->setMaximumSize(DEVICE_SCREEN_SIZE); w->resize(sz); } else { w->setFixedSize(DEVICE_SCREEN_SIZE * scale); } w->show(); #ifdef QCOM2 QPlatformNativeInterface *native = QGuiApplication::platformNativeInterface(); wl_surface *s = reinterpret_cast<wl_surface*>(native->nativeResourceForWindow("surface", w->windowHandle())); wl_surface_set_buffer_transform(s, WL_OUTPUT_TRANSFORM_270); wl_surface_commit(s); w->showFullScreen(); // ensure we have a valid eglDisplay, otherwise the ui will silently fail void *egl = native->nativeResourceForWindow("egldisplay", w->windowHandle()); assert(egl != nullptr); #endif } extern "C" { void set_main_window(void *w) { setMainWindow((QWidget*)w); } }
2301_81045437/openpilot
selfdrive/ui/qt/qt_window.cc
C++
mit
1,081
#pragma once #include <string> #include <QApplication> #include <QScreen> #include <QWidget> #ifdef QCOM2 #include <qpa/qplatformnativeinterface.h> #include <wayland-client-protocol.h> #include <QPlatformSurfaceEvent> #endif #include "system/hardware/hw.h" const QString ASSET_PATH = ":/"; const QSize DEVICE_SCREEN_SIZE = {2160, 1080}; void setMainWindow(QWidget *w);
2301_81045437/openpilot
selfdrive/ui/qt/qt_window.h
C++
mit
375
#include "selfdrive/ui/qt/request_repeater.h" RequestRepeater::RequestRepeater(QObject *parent, const QString &requestURL, const QString &cacheKey, int period, bool while_onroad) : HttpRequest(parent) { timer = new QTimer(this); timer->setTimerType(Qt::VeryCoarseTimer); QObject::connect(timer, &QTimer::timeout, [=]() { if ((!uiState()->scene.started || while_onroad) && device()->isAwake() && !active()) { sendRequest(requestURL); } }); timer->start(period * 1000); if (!cacheKey.isEmpty()) { prevResp = QString::fromStdString(params.get(cacheKey.toStdString())); if (!prevResp.isEmpty()) { QTimer::singleShot(500, [=]() { emit requestDone(prevResp, true, QNetworkReply::NoError); }); } QObject::connect(this, &HttpRequest::requestDone, [=](const QString &resp, bool success) { if (success && resp != prevResp) { params.put(cacheKey.toStdString(), resp.toStdString()); prevResp = resp; } }); } }
2301_81045437/openpilot
selfdrive/ui/qt/request_repeater.cc
C++
mit
1,016
#pragma once #include "common/util.h" #include "selfdrive/ui/qt/api.h" #include "selfdrive/ui/ui.h" class RequestRepeater : public HttpRequest { public: RequestRepeater(QObject *parent, const QString &requestURL, const QString &cacheKey = "", int period = 0, bool while_onroad=false); private: Params params; QTimer *timer; QString prevResp; };
2301_81045437/openpilot
selfdrive/ui/qt/request_repeater.h
C++
mit
356
#include <QApplication> #include <QHBoxLayout> #include <QLabel> #include <QPushButton> #include <QTimer> #include <QVBoxLayout> #include "selfdrive/ui/qt/qt_window.h" #include "selfdrive/ui/qt/setup/reset.h" #define NVME "/dev/nvme0n1" #define USERDATA "/dev/disk/by-partlabel/userdata" void Reset::doErase() { // best effort to wipe nvme std::system("sudo umount " NVME); std::system("yes | sudo mkfs.ext4 " NVME); int rm = std::system("sudo rm -rf /data/*"); std::system("sudo umount " USERDATA); int fmt = std::system("yes | sudo mkfs.ext4 " USERDATA); if (rm == 0 || fmt == 0) { std::system("sudo reboot"); } body->setText(tr("Reset failed. Reboot to try again.")); rebootBtn->show(); } void Reset::startReset() { body->setText(tr("Resetting device...\nThis may take up to a minute.")); rejectBtn->hide(); rebootBtn->hide(); confirmBtn->hide(); #ifdef __aarch64__ QTimer::singleShot(100, this, &Reset::doErase); #endif } void Reset::confirm() { const QString confirm_txt = tr("Are you sure you want to reset your device?"); if (body->text() != confirm_txt) { body->setText(confirm_txt); } else { startReset(); } } Reset::Reset(ResetMode mode, QWidget *parent) : QWidget(parent) { QVBoxLayout *main_layout = new QVBoxLayout(this); main_layout->setContentsMargins(45, 220, 45, 45); main_layout->setSpacing(0); QLabel *title = new QLabel(tr("System Reset")); title->setStyleSheet("font-size: 90px; font-weight: 600;"); main_layout->addWidget(title, 0, Qt::AlignTop | Qt::AlignLeft); main_layout->addSpacing(60); body = new QLabel(tr("System reset triggered. Press confirm to erase all content and settings. Press cancel to resume boot.")); body->setWordWrap(true); body->setStyleSheet("font-size: 80px; font-weight: light;"); main_layout->addWidget(body, 1, Qt::AlignTop | Qt::AlignLeft); QHBoxLayout *blayout = new QHBoxLayout(); main_layout->addLayout(blayout); blayout->setSpacing(50); rejectBtn = new QPushButton(tr("Cancel")); blayout->addWidget(rejectBtn); QObject::connect(rejectBtn, &QPushButton::clicked, QCoreApplication::instance(), &QCoreApplication::quit); rebootBtn = new QPushButton(tr("Reboot")); blayout->addWidget(rebootBtn); #ifdef __aarch64__ QObject::connect(rebootBtn, &QPushButton::clicked, [=]{ std::system("sudo reboot"); }); #endif confirmBtn = new QPushButton(tr("Confirm")); confirmBtn->setStyleSheet(R"( QPushButton { background-color: #465BEA; } QPushButton:pressed { background-color: #3049F4; } )"); blayout->addWidget(confirmBtn); QObject::connect(confirmBtn, &QPushButton::clicked, this, &Reset::confirm); bool recover = mode == ResetMode::RECOVER; rejectBtn->setVisible(!recover); rebootBtn->setVisible(recover); if (recover) { body->setText(tr("Unable to mount data partition. Partition may be corrupted. Press confirm to erase and reset your device.")); } setStyleSheet(R"( * { font-family: Inter; color: white; background-color: black; } QLabel { margin-left: 140; } QPushButton { height: 160; font-size: 55px; font-weight: 400; border-radius: 10px; background-color: #333333; } QPushButton:pressed { background-color: #444444; } )"); } int main(int argc, char *argv[]) { ResetMode mode = ResetMode::USER_RESET; if (argc > 1) { if (strcmp(argv[1], "--recover") == 0) { mode = ResetMode::RECOVER; } } QApplication a(argc, argv); Reset reset(mode); setMainWindow(&reset); return a.exec(); }
2301_81045437/openpilot
selfdrive/ui/qt/setup/reset.cc
C++
mit
3,620
#include <QLabel> #include <QPushButton> #include <QWidget> enum ResetMode { USER_RESET, // user initiated a factory reset from openpilot RECOVER, // userdata is corrupt for some reason, give a chance to recover }; class Reset : public QWidget { Q_OBJECT public: explicit Reset(ResetMode mode, QWidget *parent = 0); private: QLabel *body; QPushButton *rejectBtn; QPushButton *rebootBtn; QPushButton *confirmBtn; void doErase(); void startReset(); private slots: void confirm(); };
2301_81045437/openpilot
selfdrive/ui/qt/setup/reset.h
C++
mit
511
#include "selfdrive/ui/qt/setup/setup.h" #include <cstdio> #include <cstdlib> #include <sstream> #include <string> #include <QApplication> #include <QLabel> #include <QVBoxLayout> #include <curl/curl.h> #include "common/util.h" #include "system/hardware/hw.h" #include "selfdrive/ui/qt/api.h" #include "selfdrive/ui/qt/qt_window.h" #include "selfdrive/ui/qt/network/networking.h" #include "selfdrive/ui/qt/util.h" #include "selfdrive/ui/qt/widgets/input.h" const std::string USER_AGENT = "AGNOSSetup-"; const QString OPENPILOT_URL = "https://openpilot.comma.ai"; bool is_elf(char *fname) { FILE *fp = fopen(fname, "rb"); if (fp == NULL) { return false; } char buf[4]; size_t n = fread(buf, 1, 4, fp); fclose(fp); return n == 4 && buf[0] == 0x7f && buf[1] == 'E' && buf[2] == 'L' && buf[3] == 'F'; } void Setup::download(QString url) { // autocomplete incomplete urls if (QRegularExpression("^([^/.]+)/([^/]+)$").match(url).hasMatch()) { url.prepend("https://installer.comma.ai/"); } CURL *curl = curl_easy_init(); if (!curl) { emit finished(url, tr("Something went wrong. Reboot the device.")); return; } auto version = util::read_file("/VERSION"); struct curl_slist *list = NULL; list = curl_slist_append(list, ("X-openpilot-serial: " + Hardware::get_serial()).c_str()); char tmpfile[] = "/tmp/installer_XXXXXX"; FILE *fp = fdopen(mkstemp(tmpfile), "wb"); curl_easy_setopt(curl, CURLOPT_URL, url.toStdString().c_str()); curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, NULL); curl_easy_setopt(curl, CURLOPT_WRITEDATA, fp); curl_easy_setopt(curl, CURLOPT_VERBOSE, 1L); curl_easy_setopt(curl, CURLOPT_FOLLOWLOCATION, 1L); curl_easy_setopt(curl, CURLOPT_USERAGENT, (USER_AGENT + version).c_str()); curl_easy_setopt(curl, CURLOPT_HTTPHEADER, list); curl_easy_setopt(curl, CURLOPT_TIMEOUT, 30L); int ret = curl_easy_perform(curl); long res_status = 0; curl_easy_getinfo(curl, CURLINFO_RESPONSE_CODE, &res_status); if (ret != CURLE_OK || res_status != 200) { emit finished(url, tr("Ensure the entered URL is valid, and the device’s internet connection is good.")); } else if (!is_elf(tmpfile)) { emit finished(url, tr("No custom software found at this URL.")); } else { rename(tmpfile, "/tmp/installer"); FILE *fp_url = fopen("/tmp/installer_url", "w"); fprintf(fp_url, "%s", url.toStdString().c_str()); fclose(fp_url); emit finished(url); } curl_slist_free_all(list); curl_easy_cleanup(curl); fclose(fp); } QWidget * Setup::low_voltage() { QWidget *widget = new QWidget(); QVBoxLayout *main_layout = new QVBoxLayout(widget); main_layout->setContentsMargins(55, 0, 55, 55); main_layout->setSpacing(0); // inner text layout: warning icon, title, and body QVBoxLayout *inner_layout = new QVBoxLayout(); inner_layout->setContentsMargins(110, 144, 365, 0); main_layout->addLayout(inner_layout); QLabel *triangle = new QLabel(); triangle->setPixmap(QPixmap(ASSET_PATH + "offroad/icon_warning.png")); inner_layout->addWidget(triangle, 0, Qt::AlignTop | Qt::AlignLeft); inner_layout->addSpacing(80); QLabel *title = new QLabel(tr("WARNING: Low Voltage")); title->setStyleSheet("font-size: 90px; font-weight: 500; color: #FF594F;"); inner_layout->addWidget(title, 0, Qt::AlignTop | Qt::AlignLeft); inner_layout->addSpacing(25); QLabel *body = new QLabel(tr("Power your device in a car with a harness or proceed at your own risk.")); body->setWordWrap(true); body->setAlignment(Qt::AlignTop | Qt::AlignLeft); body->setStyleSheet("font-size: 80px; font-weight: 300;"); inner_layout->addWidget(body); inner_layout->addStretch(); // power off + continue buttons QHBoxLayout *blayout = new QHBoxLayout(); blayout->setSpacing(50); main_layout->addLayout(blayout, 0); QPushButton *poweroff = new QPushButton(tr("Power off")); poweroff->setObjectName("navBtn"); blayout->addWidget(poweroff); QObject::connect(poweroff, &QPushButton::clicked, this, [=]() { Hardware::poweroff(); }); QPushButton *cont = new QPushButton(tr("Continue")); cont->setObjectName("navBtn"); blayout->addWidget(cont); QObject::connect(cont, &QPushButton::clicked, this, &Setup::nextPage); return widget; } QWidget * Setup::getting_started() { QWidget *widget = new QWidget(); QHBoxLayout *main_layout = new QHBoxLayout(widget); main_layout->setMargin(0); QVBoxLayout *vlayout = new QVBoxLayout(); vlayout->setContentsMargins(165, 280, 100, 0); main_layout->addLayout(vlayout); QLabel *title = new QLabel(tr("Getting Started")); title->setStyleSheet("font-size: 90px; font-weight: 500;"); vlayout->addWidget(title, 0, Qt::AlignTop | Qt::AlignLeft); vlayout->addSpacing(90); QLabel *desc = new QLabel(tr("Before we get on the road, let’s finish installation and cover some details.")); desc->setWordWrap(true); desc->setStyleSheet("font-size: 80px; font-weight: 300;"); vlayout->addWidget(desc, 0, Qt::AlignTop | Qt::AlignLeft); vlayout->addStretch(); QPushButton *btn = new QPushButton(); btn->setIcon(QIcon(":/img_continue_triangle.svg")); btn->setIconSize(QSize(54, 106)); btn->setFixedSize(310, 1080); btn->setProperty("primary", true); btn->setStyleSheet("border: none;"); main_layout->addWidget(btn, 0, Qt::AlignRight); QObject::connect(btn, &QPushButton::clicked, this, &Setup::nextPage); return widget; } QWidget * Setup::network_setup() { QWidget *widget = new QWidget(); QVBoxLayout *main_layout = new QVBoxLayout(widget); main_layout->setContentsMargins(55, 50, 55, 50); // title QLabel *title = new QLabel(tr("Connect to Wi-Fi")); title->setStyleSheet("font-size: 90px; font-weight: 500;"); main_layout->addWidget(title, 0, Qt::AlignLeft | Qt::AlignTop); main_layout->addSpacing(25); // wifi widget Networking *networking = new Networking(this, false); networking->setStyleSheet("Networking {background-color: #292929; border-radius: 13px;}"); main_layout->addWidget(networking, 1); main_layout->addSpacing(35); // back + continue buttons QHBoxLayout *blayout = new QHBoxLayout; main_layout->addLayout(blayout); blayout->setSpacing(50); QPushButton *back = new QPushButton(tr("Back")); back->setObjectName("navBtn"); QObject::connect(back, &QPushButton::clicked, this, &Setup::prevPage); blayout->addWidget(back); QPushButton *cont = new QPushButton(); cont->setObjectName("navBtn"); cont->setProperty("primary", true); cont->setEnabled(false); QObject::connect(cont, &QPushButton::clicked, this, &Setup::nextPage); blayout->addWidget(cont); // setup timer for testing internet connection HttpRequest *request = new HttpRequest(this, false, 2500); QObject::connect(request, &HttpRequest::requestDone, [=](const QString &, bool success) { cont->setEnabled(success); if (success) { const bool wifi = networking->wifi->currentNetworkType() == NetworkType::WIFI; cont->setText(wifi ? tr("Continue") : tr("Continue without Wi-Fi")); } else { cont->setText(tr("Waiting for internet")); } repaint(); }); request->sendRequest(OPENPILOT_URL); QTimer *timer = new QTimer(this); QObject::connect(timer, &QTimer::timeout, [=]() { if (!request->active() && cont->isVisible()) { request->sendRequest(OPENPILOT_URL); } }); timer->start(1000); return widget; } QWidget * radio_button(QString title, QButtonGroup *group) { QPushButton *btn = new QPushButton(title); btn->setCheckable(true); group->addButton(btn); btn->setStyleSheet(R"( QPushButton { height: 230; padding-left: 100px; padding-right: 100px; text-align: left; font-size: 80px; font-weight: 400; border-radius: 10px; background-color: #4F4F4F; } QPushButton:checked { background-color: #465BEA; } )"); // checkmark icon QPixmap pix(":/img_circled_check.svg"); btn->setIcon(pix); btn->setIconSize(QSize(0, 0)); btn->setLayoutDirection(Qt::RightToLeft); QObject::connect(btn, &QPushButton::toggled, [=](bool checked) { btn->setIconSize(checked ? QSize(104, 104) : QSize(0, 0)); }); return btn; } QWidget * Setup::software_selection() { QWidget *widget = new QWidget(); QVBoxLayout *main_layout = new QVBoxLayout(widget); main_layout->setContentsMargins(55, 50, 55, 50); main_layout->setSpacing(0); // title QLabel *title = new QLabel(tr("Choose Software to Install")); title->setStyleSheet("font-size: 90px; font-weight: 500;"); main_layout->addWidget(title, 0, Qt::AlignLeft | Qt::AlignTop); main_layout->addSpacing(50); // openpilot + custom radio buttons QButtonGroup *group = new QButtonGroup(widget); group->setExclusive(true); QWidget *openpilot = radio_button(tr("openpilot"), group); main_layout->addWidget(openpilot); main_layout->addSpacing(30); QWidget *custom = radio_button(tr("Custom Software"), group); main_layout->addWidget(custom); main_layout->addStretch(); // back + continue buttons QHBoxLayout *blayout = new QHBoxLayout; main_layout->addLayout(blayout); blayout->setSpacing(50); QPushButton *back = new QPushButton(tr("Back")); back->setObjectName("navBtn"); QObject::connect(back, &QPushButton::clicked, this, &Setup::prevPage); blayout->addWidget(back); QPushButton *cont = new QPushButton(tr("Continue")); cont->setObjectName("navBtn"); cont->setEnabled(false); cont->setProperty("primary", true); blayout->addWidget(cont); QObject::connect(cont, &QPushButton::clicked, [=]() { auto w = currentWidget(); QTimer::singleShot(0, [=]() { setCurrentWidget(downloading_widget); }); QString url = OPENPILOT_URL; if (group->checkedButton() != openpilot) { url = InputDialog::getText(tr("Enter URL"), this, tr("for Custom Software")); } if (!url.isEmpty()) { QTimer::singleShot(1000, this, [=]() { download(url); }); } else { setCurrentWidget(w); } }); connect(group, QOverload<QAbstractButton *>::of(&QButtonGroup::buttonClicked), [=](QAbstractButton *btn) { btn->setChecked(true); cont->setEnabled(true); }); return widget; } QWidget * Setup::downloading() { QWidget *widget = new QWidget(); QVBoxLayout *main_layout = new QVBoxLayout(widget); QLabel *txt = new QLabel(tr("Downloading...")); txt->setStyleSheet("font-size: 90px; font-weight: 500;"); main_layout->addWidget(txt, 0, Qt::AlignCenter); return widget; } QWidget * Setup::download_failed(QLabel *url, QLabel *body) { QWidget *widget = new QWidget(); QVBoxLayout *main_layout = new QVBoxLayout(widget); main_layout->setContentsMargins(55, 185, 55, 55); main_layout->setSpacing(0); QLabel *title = new QLabel(tr("Download Failed")); title->setStyleSheet("font-size: 90px; font-weight: 500;"); main_layout->addWidget(title, 0, Qt::AlignTop | Qt::AlignLeft); main_layout->addSpacing(67); url->setWordWrap(true); url->setAlignment(Qt::AlignTop | Qt::AlignLeft); url->setStyleSheet("font-family: \"JetBrains Mono\"; font-size: 64px; font-weight: 400; margin-right: 100px;"); main_layout->addWidget(url); main_layout->addSpacing(48); body->setWordWrap(true); body->setAlignment(Qt::AlignTop | Qt::AlignLeft); body->setStyleSheet("font-size: 80px; font-weight: 300; margin-right: 100px;"); main_layout->addWidget(body); main_layout->addStretch(); // reboot + start over buttons QHBoxLayout *blayout = new QHBoxLayout(); blayout->setSpacing(50); main_layout->addLayout(blayout, 0); QPushButton *reboot = new QPushButton(tr("Reboot device")); reboot->setObjectName("navBtn"); blayout->addWidget(reboot); QObject::connect(reboot, &QPushButton::clicked, this, [=]() { Hardware::reboot(); }); QPushButton *restart = new QPushButton(tr("Start over")); restart->setObjectName("navBtn"); restart->setProperty("primary", true); blayout->addWidget(restart); QObject::connect(restart, &QPushButton::clicked, this, [=]() { setCurrentIndex(1); }); widget->setStyleSheet(R"( QLabel { margin-left: 117; } )"); return widget; } void Setup::prevPage() { setCurrentIndex(currentIndex() - 1); } void Setup::nextPage() { setCurrentIndex(currentIndex() + 1); } Setup::Setup(QWidget *parent) : QStackedWidget(parent) { if (std::getenv("MULTILANG")) { selectLanguage(); } std::stringstream buffer; buffer << std::ifstream("/sys/class/hwmon/hwmon1/in1_input").rdbuf(); float voltage = (float)std::atoi(buffer.str().c_str()) / 1000.; if (voltage < 7) { addWidget(low_voltage()); } addWidget(getting_started()); addWidget(network_setup()); addWidget(software_selection()); downloading_widget = downloading(); addWidget(downloading_widget); QLabel *url_label = new QLabel(); QLabel *body_label = new QLabel(); failed_widget = download_failed(url_label, body_label); addWidget(failed_widget); QObject::connect(this, &Setup::finished, [=](const QString &url, const QString &error) { qDebug() << "finished" << url << error; if (error.isEmpty()) { // hide setup on success QTimer::singleShot(3000, this, &QWidget::hide); } else { url_label->setText(url); body_label->setText(error); setCurrentWidget(failed_widget); } }); // TODO: revisit pressed bg color setStyleSheet(R"( * { color: white; font-family: Inter; } Setup { background-color: black; } QPushButton#navBtn { height: 160; font-size: 55px; font-weight: 400; border-radius: 10px; background-color: #333333; } QPushButton#navBtn:disabled, QPushButton[primary='true']:disabled { color: #808080; background-color: #333333; } QPushButton#navBtn:pressed { background-color: #444444; } QPushButton[primary='true'], #navBtn[primary='true'] { background-color: #465BEA; } QPushButton[primary='true']:pressed, #navBtn:pressed[primary='true'] { background-color: #3049F4; } )"); } void Setup::selectLanguage() { QMap<QString, QString> langs = getSupportedLanguages(); QString selection = MultiOptionDialog::getSelection(tr("Select a language"), langs.keys(), "", this); if (!selection.isEmpty()) { QString selectedLang = langs[selection]; Params().put("LanguageSetting", selectedLang.toStdString()); if (translator.load(":/" + selectedLang)) { qApp->installTranslator(&translator); } } } int main(int argc, char *argv[]) { QApplication a(argc, argv); Setup setup; setMainWindow(&setup); return a.exec(); }
2301_81045437/openpilot
selfdrive/ui/qt/setup/setup.cc
C++
mit
14,688
#pragma once #include <QLabel> #include <QStackedWidget> #include <QString> #include <QTranslator> #include <QWidget> class Setup : public QStackedWidget { Q_OBJECT public: explicit Setup(QWidget *parent = 0); private: void selectLanguage(); QWidget *low_voltage(); QWidget *getting_started(); QWidget *network_setup(); QWidget *software_selection(); QWidget *downloading(); QWidget *download_failed(QLabel *url, QLabel *body); QWidget *failed_widget; QWidget *downloading_widget; QTranslator translator; signals: void finished(const QString &url, const QString &error = ""); public slots: void nextPage(); void prevPage(); void download(QString url); };
2301_81045437/openpilot
selfdrive/ui/qt/setup/setup.h
C++
mit
694
#include <QDebug> #include <QTimer> #include <QVBoxLayout> #include "system/hardware/hw.h" #include "selfdrive/ui/qt/util.h" #include "selfdrive/ui/qt/qt_window.h" #include "selfdrive/ui/qt/setup/updater.h" #include "selfdrive/ui/qt/network/networking.h" Updater::Updater(const QString &updater_path, const QString &manifest_path, QWidget *parent) : updater(updater_path), manifest(manifest_path), QStackedWidget(parent) { assert(updater.size()); assert(manifest.size()); // initial prompt screen prompt = new QWidget; { QVBoxLayout *layout = new QVBoxLayout(prompt); layout->setContentsMargins(100, 250, 100, 100); QLabel *title = new QLabel(tr("Update Required")); title->setStyleSheet("font-size: 80px; font-weight: bold;"); layout->addWidget(title); layout->addSpacing(75); QLabel *desc = new QLabel(tr("An operating system update is required. Connect your device to Wi-Fi for the fastest update experience. The download size is approximately 1GB.")); desc->setWordWrap(true); desc->setStyleSheet("font-size: 65px;"); layout->addWidget(desc); layout->addStretch(); QHBoxLayout *hlayout = new QHBoxLayout; hlayout->setSpacing(30); layout->addLayout(hlayout); QPushButton *connect = new QPushButton(tr("Connect to Wi-Fi")); connect->setObjectName("navBtn"); QObject::connect(connect, &QPushButton::clicked, [=]() { setCurrentWidget(wifi); }); hlayout->addWidget(connect); QPushButton *install = new QPushButton(tr("Install")); install->setObjectName("navBtn"); install->setStyleSheet(R"( QPushButton { background-color: #465BEA; } QPushButton:pressed { background-color: #3049F4; } )"); QObject::connect(install, &QPushButton::clicked, this, &Updater::installUpdate); hlayout->addWidget(install); } // wifi connection screen wifi = new QWidget; { QVBoxLayout *layout = new QVBoxLayout(wifi); layout->setContentsMargins(100, 100, 100, 100); Networking *networking = new Networking(this, false); networking->setStyleSheet("Networking { background-color: #292929; border-radius: 13px; }"); layout->addWidget(networking, 1); QPushButton *back = new QPushButton(tr("Back")); back->setObjectName("navBtn"); back->setStyleSheet("padding-left: 60px; padding-right: 60px;"); QObject::connect(back, &QPushButton::clicked, [=]() { setCurrentWidget(prompt); }); layout->addWidget(back, 0, Qt::AlignLeft); } // progress screen progress = new QWidget; { QVBoxLayout *layout = new QVBoxLayout(progress); layout->setContentsMargins(150, 330, 150, 150); layout->setSpacing(0); text = new QLabel(tr("Loading...")); text->setStyleSheet("font-size: 90px; font-weight: 600;"); layout->addWidget(text, 0, Qt::AlignTop); layout->addSpacing(100); bar = new QProgressBar(); bar->setRange(0, 100); bar->setTextVisible(false); bar->setFixedHeight(72); layout->addWidget(bar, 0, Qt::AlignTop); layout->addStretch(); reboot = new QPushButton(tr("Reboot")); reboot->setObjectName("navBtn"); reboot->setStyleSheet("padding-left: 60px; padding-right: 60px;"); QObject::connect(reboot, &QPushButton::clicked, [=]() { Hardware::reboot(); }); layout->addWidget(reboot, 0, Qt::AlignLeft); reboot->hide(); layout->addStretch(); } addWidget(prompt); addWidget(wifi); addWidget(progress); setStyleSheet(R"( * { color: white; outline: none; font-family: Inter; } Updater { color: white; background-color: black; } QPushButton#navBtn { height: 160; font-size: 55px; font-weight: 400; border-radius: 10px; background-color: #333333; } QPushButton#navBtn:pressed { background-color: #444444; } QProgressBar { border: none; background-color: #292929; } QProgressBar::chunk { background-color: #364DEF; } )"); } void Updater::installUpdate() { setCurrentWidget(progress); QObject::connect(&proc, &QProcess::readyReadStandardOutput, this, &Updater::readProgress); QObject::connect(&proc, QOverload<int, QProcess::ExitStatus>::of(&QProcess::finished), this, &Updater::updateFinished); proc.setProcessChannelMode(QProcess::ForwardedErrorChannel); proc.start(updater, {"--swap", manifest}); } void Updater::readProgress() { auto lines = QString(proc.readAllStandardOutput()); for (const QString &line : lines.trimmed().split("\n")) { auto parts = line.split(":"); if (parts.size() == 2) { text->setText(parts[0]); bar->setValue((int)parts[1].toDouble()); } else { qDebug() << line; } } update(); } void Updater::updateFinished(int exitCode, QProcess::ExitStatus exitStatus) { qDebug() << "finished with " << exitCode; if (exitCode == 0) { Hardware::reboot(); } else { text->setText(tr("Update failed")); reboot->show(); } } int main(int argc, char *argv[]) { initApp(argc, argv); QApplication a(argc, argv); Updater updater(argv[1], argv[2]); setMainWindow(&updater); a.installEventFilter(&updater); return a.exec(); }
2301_81045437/openpilot
selfdrive/ui/qt/setup/updater.cc
C++
mit
5,241
#pragma once #include <QLabel> #include <QProcess> #include <QPushButton> #include <QProgressBar> #include <QStackedWidget> #include <QWidget> class Updater : public QStackedWidget { Q_OBJECT public: explicit Updater(const QString &updater_path, const QString &manifest_path, QWidget *parent = 0); private slots: void installUpdate(); void readProgress(); void updateFinished(int exitCode, QProcess::ExitStatus exitStatus); private: QProcess proc; QString updater, manifest; QLabel *text; QProgressBar *bar; QPushButton *reboot; QWidget *prompt, *wifi, *progress; };
2301_81045437/openpilot
selfdrive/ui/qt/setup/updater.h
C++
mit
595
#include "selfdrive/ui/qt/sidebar.h" #include <QMouseEvent> #include "selfdrive/ui/qt/util.h" void Sidebar::drawMetric(QPainter &p, const QPair<QString, QString> &label, QColor c, int y) { const QRect rect = {30, y, 240, 126}; p.setPen(Qt::NoPen); p.setBrush(QBrush(c)); p.setClipRect(rect.x() + 4, rect.y(), 18, rect.height(), Qt::ClipOperation::ReplaceClip); p.drawRoundedRect(QRect(rect.x() + 4, rect.y() + 4, 100, 118), 18, 18); p.setClipping(false); QPen pen = QPen(QColor(0xff, 0xff, 0xff, 0x55)); pen.setWidth(2); p.setPen(pen); p.setBrush(Qt::NoBrush); p.drawRoundedRect(rect, 20, 20); p.setPen(QColor(0xff, 0xff, 0xff)); p.setFont(InterFont(35, QFont::DemiBold)); p.drawText(rect.adjusted(22, 0, 0, 0), Qt::AlignCenter, label.first + "\n" + label.second); } Sidebar::Sidebar(QWidget *parent) : QFrame(parent), onroad(false), flag_pressed(false), settings_pressed(false) { home_img = loadPixmap("../assets/images/button_home.png", home_btn.size()); flag_img = loadPixmap("../assets/images/button_flag.png", home_btn.size()); settings_img = loadPixmap("../assets/images/button_settings.png", settings_btn.size(), Qt::IgnoreAspectRatio); connect(this, &Sidebar::valueChanged, [=] { update(); }); setAttribute(Qt::WA_OpaquePaintEvent); setSizePolicy(QSizePolicy::Fixed, QSizePolicy::Expanding); setFixedWidth(300); QObject::connect(uiState(), &UIState::uiUpdate, this, &Sidebar::updateState); pm = std::make_unique<PubMaster, const std::initializer_list<const char *>>({"userFlag"}); } void Sidebar::mousePressEvent(QMouseEvent *event) { if (onroad && home_btn.contains(event->pos())) { flag_pressed = true; update(); } else if (settings_btn.contains(event->pos())) { settings_pressed = true; update(); } } void Sidebar::mouseReleaseEvent(QMouseEvent *event) { if (flag_pressed || settings_pressed) { flag_pressed = settings_pressed = false; update(); } if (onroad && home_btn.contains(event->pos())) { MessageBuilder msg; msg.initEvent().initUserFlag(); pm->send("userFlag", msg); } else if (settings_btn.contains(event->pos())) { emit openSettings(); } } void Sidebar::offroadTransition(bool offroad) { onroad = !offroad; update(); } void Sidebar::updateState(const UIState &s) { if (!isVisible()) return; auto &sm = *(s.sm); auto deviceState = sm["deviceState"].getDeviceState(); setProperty("netType", network_type[deviceState.getNetworkType()]); int strength = (int)deviceState.getNetworkStrength(); setProperty("netStrength", strength > 0 ? strength + 1 : 0); ItemStatus connectStatus; auto last_ping = deviceState.getLastAthenaPingTime(); if (last_ping == 0) { connectStatus = ItemStatus{{tr("CONNECT"), tr("OFFLINE")}, warning_color}; } else { connectStatus = nanos_since_boot() - last_ping < 80e9 ? ItemStatus{{tr("CONNECT"), tr("ONLINE")}, good_color} : ItemStatus{{tr("CONNECT"), tr("ERROR")}, danger_color}; } setProperty("connectStatus", QVariant::fromValue(connectStatus)); ItemStatus tempStatus = {{tr("TEMP"), tr("HIGH")}, danger_color}; auto ts = deviceState.getThermalStatus(); if (ts == cereal::DeviceState::ThermalStatus::GREEN) { tempStatus = {{tr("TEMP"), tr("GOOD")}, good_color}; } else if (ts == cereal::DeviceState::ThermalStatus::YELLOW) { tempStatus = {{tr("TEMP"), tr("OK")}, warning_color}; } setProperty("tempStatus", QVariant::fromValue(tempStatus)); ItemStatus pandaStatus = {{tr("VEHICLE"), tr("ONLINE")}, good_color}; if (s.scene.pandaType == cereal::PandaState::PandaType::UNKNOWN) { pandaStatus = {{tr("NO"), tr("PANDA")}, danger_color}; } else if (s.scene.started && !sm["liveLocationKalman"].getLiveLocationKalman().getGpsOK()) { pandaStatus = {{tr("GPS"), tr("SEARCH")}, warning_color}; } setProperty("pandaStatus", QVariant::fromValue(pandaStatus)); } void Sidebar::paintEvent(QPaintEvent *event) { QPainter p(this); p.setPen(Qt::NoPen); p.setRenderHint(QPainter::Antialiasing); p.fillRect(rect(), QColor(57, 57, 57)); // buttons p.setOpacity(settings_pressed ? 0.65 : 1.0); p.drawPixmap(settings_btn.x(), settings_btn.y(), settings_img); p.setOpacity(onroad && flag_pressed ? 0.65 : 1.0); p.drawPixmap(home_btn.x(), home_btn.y(), onroad ? flag_img : home_img); p.setOpacity(1.0); // network int x = 58; const QColor gray(0x54, 0x54, 0x54); for (int i = 0; i < 5; ++i) { p.setBrush(i < net_strength ? Qt::white : gray); p.drawEllipse(x, 196, 27, 27); x += 37; } p.setFont(InterFont(35)); p.setPen(QColor(0xff, 0xff, 0xff)); const QRect r = QRect(50, 247, 100, 50); p.drawText(r, Qt::AlignCenter, net_type); // metrics drawMetric(p, temp_status.first, temp_status.second, 338); drawMetric(p, panda_status.first, panda_status.second, 496); drawMetric(p, connect_status.first, connect_status.second, 654); }
2301_81045437/openpilot
selfdrive/ui/qt/sidebar.cc
C++
mit
4,959
#pragma once #include <memory> #include <QFrame> #include <QMap> #include "selfdrive/ui/ui.h" typedef QPair<QPair<QString, QString>, QColor> ItemStatus; Q_DECLARE_METATYPE(ItemStatus); class Sidebar : public QFrame { Q_OBJECT Q_PROPERTY(ItemStatus connectStatus MEMBER connect_status NOTIFY valueChanged); Q_PROPERTY(ItemStatus pandaStatus MEMBER panda_status NOTIFY valueChanged); Q_PROPERTY(ItemStatus tempStatus MEMBER temp_status NOTIFY valueChanged); Q_PROPERTY(QString netType MEMBER net_type NOTIFY valueChanged); Q_PROPERTY(int netStrength MEMBER net_strength NOTIFY valueChanged); public: explicit Sidebar(QWidget* parent = 0); signals: void openSettings(int index = 0, const QString &param = ""); void valueChanged(); public slots: void offroadTransition(bool offroad); void updateState(const UIState &s); protected: void paintEvent(QPaintEvent *event) override; void mousePressEvent(QMouseEvent *event) override; void mouseReleaseEvent(QMouseEvent *event) override; void drawMetric(QPainter &p, const QPair<QString, QString> &label, QColor c, int y); QPixmap home_img, flag_img, settings_img; bool onroad, flag_pressed, settings_pressed; const QMap<cereal::DeviceState::NetworkType, QString> network_type = { {cereal::DeviceState::NetworkType::NONE, tr("--")}, {cereal::DeviceState::NetworkType::WIFI, tr("Wi-Fi")}, {cereal::DeviceState::NetworkType::ETHERNET, tr("ETH")}, {cereal::DeviceState::NetworkType::CELL2_G, tr("2G")}, {cereal::DeviceState::NetworkType::CELL3_G, tr("3G")}, {cereal::DeviceState::NetworkType::CELL4_G, tr("LTE")}, {cereal::DeviceState::NetworkType::CELL5_G, tr("5G")} }; const QRect home_btn = QRect(60, 860, 180, 180); const QRect settings_btn = QRect(50, 35, 200, 117); const QColor good_color = QColor(255, 255, 255); const QColor warning_color = QColor(218, 202, 37); const QColor danger_color = QColor(201, 34, 49); ItemStatus connect_status, panda_status, temp_status; QString net_type; int net_strength = 0; private: std::unique_ptr<PubMaster> pm; };
2301_81045437/openpilot
selfdrive/ui/qt/sidebar.h
C++
mit
2,091
#include "selfdrive/ui/qt/spinner.h" #include <algorithm> #include <cstdio> #include <iostream> #include <string> #include <QApplication> #include <QGridLayout> #include <QPainter> #include <QString> #include <QTransform> #include "system/hardware/hw.h" #include "selfdrive/ui/qt/qt_window.h" #include "selfdrive/ui/qt/util.h" TrackWidget::TrackWidget(QWidget *parent) : QWidget(parent) { setAttribute(Qt::WA_OpaquePaintEvent); setFixedSize(spinner_size); // pre-compute all the track imgs. make this a gif instead? QPixmap comma_img = loadPixmap("../assets/img_spinner_comma.png", spinner_size); QPixmap track_img = loadPixmap("../assets/img_spinner_track.png", spinner_size); QTransform transform(1, 0, 0, 1, width() / 2, height() / 2); QPixmap pm(spinner_size); QPainter p(&pm); p.setRenderHint(QPainter::SmoothPixmapTransform); for (int i = 0; i < track_imgs.size(); ++i) { p.resetTransform(); p.fillRect(0, 0, spinner_size.width(), spinner_size.height(), Qt::black); p.drawPixmap(0, 0, comma_img); p.setTransform(transform.rotate(360 / spinner_fps)); p.drawPixmap(-width() / 2, -height() / 2, track_img); track_imgs[i] = pm.copy(); } m_anim.setDuration(1000); m_anim.setStartValue(0); m_anim.setEndValue(int(track_imgs.size() -1)); m_anim.setLoopCount(-1); m_anim.start(); connect(&m_anim, SIGNAL(valueChanged(QVariant)), SLOT(update())); } void TrackWidget::paintEvent(QPaintEvent *event) { QPainter painter(this); painter.drawPixmap(0, 0, track_imgs[m_anim.currentValue().toInt()]); } // Spinner Spinner::Spinner(QWidget *parent) : QWidget(parent) { QGridLayout *main_layout = new QGridLayout(this); main_layout->setSpacing(0); main_layout->setMargin(200); main_layout->addWidget(new TrackWidget(this), 0, 0, Qt::AlignHCenter | Qt::AlignVCenter); text = new QLabel(); text->setWordWrap(true); text->setVisible(false); text->setAlignment(Qt::AlignCenter); main_layout->addWidget(text, 1, 0, Qt::AlignHCenter); progress_bar = new QProgressBar(); progress_bar->setRange(5, 100); progress_bar->setTextVisible(false); progress_bar->setVisible(false); progress_bar->setFixedHeight(20); main_layout->addWidget(progress_bar, 1, 0, Qt::AlignHCenter); setStyleSheet(R"( Spinner { background-color: black; } QLabel { color: white; font-size: 80px; background-color: transparent; } QProgressBar { background-color: #373737; width: 1000px; border solid white; border-radius: 10px; } QProgressBar::chunk { border-radius: 10px; background-color: white; } )"); notifier = new QSocketNotifier(fileno(stdin), QSocketNotifier::Read); QObject::connect(notifier, &QSocketNotifier::activated, this, &Spinner::update); } void Spinner::update(int n) { std::string line; std::getline(std::cin, line); if (line.length()) { bool number = std::all_of(line.begin(), line.end(), ::isdigit); text->setVisible(!number); progress_bar->setVisible(number); text->setText(QString::fromStdString(line)); if (number) { progress_bar->setValue(std::stoi(line)); } } } int main(int argc, char *argv[]) { initApp(argc, argv); QApplication a(argc, argv); Spinner spinner; setMainWindow(&spinner); return a.exec(); }
2301_81045437/openpilot
selfdrive/ui/qt/spinner.cc
C++
mit
3,338
#include <array> #include <QLabel> #include <QPixmap> #include <QProgressBar> #include <QSocketNotifier> #include <QVariantAnimation> #include <QWidget> constexpr int spinner_fps = 30; constexpr QSize spinner_size = QSize(360, 360); class TrackWidget : public QWidget { Q_OBJECT public: TrackWidget(QWidget *parent = nullptr); private: void paintEvent(QPaintEvent *event) override; std::array<QPixmap, spinner_fps> track_imgs; QVariantAnimation m_anim; }; class Spinner : public QWidget { Q_OBJECT public: explicit Spinner(QWidget *parent = 0); private: QLabel *text; QProgressBar *progress_bar; QSocketNotifier *notifier; public slots: void update(int n); };
2301_81045437/openpilot
selfdrive/ui/qt/spinner.h
C++
mit
691
#include <QApplication> #include <QLabel> #include <QPushButton> #include <QScrollBar> #include <QVBoxLayout> #include <QWidget> #include "system/hardware/hw.h" #include "selfdrive/ui/qt/util.h" #include "selfdrive/ui/qt/qt_window.h" #include "selfdrive/ui/qt/widgets/scrollview.h" int main(int argc, char *argv[]) { initApp(argc, argv); QApplication a(argc, argv); QWidget window; setMainWindow(&window); QGridLayout *main_layout = new QGridLayout(&window); main_layout->setMargin(50); QLabel *label = new QLabel(argv[1]); label->setWordWrap(true); label->setSizePolicy(QSizePolicy::Preferred, QSizePolicy::MinimumExpanding); ScrollView *scroll = new ScrollView(label); scroll->setVerticalScrollBarPolicy(Qt::ScrollBarAsNeeded); main_layout->addWidget(scroll, 0, 0, Qt::AlignTop); // Scroll to the bottom QObject::connect(scroll->verticalScrollBar(), &QAbstractSlider::rangeChanged, [=]() { scroll->verticalScrollBar()->setValue(scroll->verticalScrollBar()->maximum()); }); QPushButton *btn = new QPushButton(); #ifdef __aarch64__ btn->setText(QObject::tr("Reboot")); QObject::connect(btn, &QPushButton::clicked, [=]() { Hardware::reboot(); }); #else btn->setText(QObject::tr("Exit")); QObject::connect(btn, &QPushButton::clicked, &a, &QApplication::quit); #endif main_layout->addWidget(btn, 0, 0, Qt::AlignRight | Qt::AlignBottom); window.setStyleSheet(R"( * { outline: none; color: white; background-color: black; font-size: 60px; } QPushButton { padding: 50px; padding-right: 100px; padding-left: 100px; border: 2px solid white; border-radius: 20px; margin-right: 40px; } )"); return a.exec(); }
2301_81045437/openpilot
selfdrive/ui/qt/text.cc
C++
mit
1,742
#include "selfdrive/ui/qt/util.h" #include <map> #include <string> #include <vector> #include <QApplication> #include <QDir> #include <QFile> #include <QFileInfo> #include <QHash> #include <QJsonDocument> #include <QJsonObject> #include <QLayoutItem> #include <QStyleOption> #include <QPainterPath> #include <QTextStream> #include <QtXml/QDomDocument> #include "common/swaglog.h" #include "system/hardware/hw.h" QString getVersion() { static QString version = QString::fromStdString(Params().get("Version")); return version; } QString getBrand() { return QObject::tr("openpilot"); } QString getUserAgent() { return "openpilot-" + getVersion(); } std::optional<QString> getDongleId() { std::string id = Params().get("DongleId"); if (!id.empty() && (id != "UnregisteredDevice")) { return QString::fromStdString(id); } else { return {}; } } QMap<QString, QString> getSupportedLanguages() { QFile f(":/languages.json"); f.open(QIODevice::ReadOnly | QIODevice::Text); QString val = f.readAll(); QJsonObject obj = QJsonDocument::fromJson(val.toUtf8()).object(); QMap<QString, QString> map; for (auto key : obj.keys()) { map[key] = obj[key].toString(); } return map; } QString timeAgo(const QDateTime &date) { int diff = date.secsTo(QDateTime::currentDateTimeUtc()); QString s; if (diff < 60) { s = QObject::tr("now"); } else if (diff < 60 * 60) { int minutes = diff / 60; s = QObject::tr("%n minute(s) ago", "", minutes); } else if (diff < 60 * 60 * 24) { int hours = diff / (60 * 60); s = QObject::tr("%n hour(s) ago", "", hours); } else if (diff < 3600 * 24 * 7) { int days = diff / (60 * 60 * 24); s = QObject::tr("%n day(s) ago", "", days); } else { s = date.date().toString(); } return s; } void setQtSurfaceFormat() { QSurfaceFormat fmt; #ifdef __APPLE__ fmt.setVersion(3, 2); fmt.setProfile(QSurfaceFormat::OpenGLContextProfile::CoreProfile); fmt.setRenderableType(QSurfaceFormat::OpenGL); #else fmt.setRenderableType(QSurfaceFormat::OpenGLES); #endif fmt.setSamples(16); fmt.setStencilBufferSize(1); QSurfaceFormat::setDefaultFormat(fmt); } void sigTermHandler(int s) { std::signal(s, SIG_DFL); qApp->quit(); } void initApp(int argc, char *argv[], bool disable_hidpi) { Hardware::set_display_power(true); Hardware::set_brightness(65); // setup signal handlers to exit gracefully std::signal(SIGINT, sigTermHandler); std::signal(SIGTERM, sigTermHandler); QString app_dir; #ifdef __APPLE__ // Get the devicePixelRatio, and scale accordingly to maintain 1:1 rendering QApplication tmp(argc, argv); app_dir = QCoreApplication::applicationDirPath(); if (disable_hidpi) { qputenv("QT_SCALE_FACTOR", QString::number(1.0 / tmp.devicePixelRatio()).toLocal8Bit()); } #else app_dir = QFileInfo(util::readlink("/proc/self/exe").c_str()).path(); #endif qputenv("QT_DBL_CLICK_DIST", QByteArray::number(150)); // ensure the current dir matches the exectuable's directory QDir::setCurrent(app_dir); setQtSurfaceFormat(); } void swagLogMessageHandler(QtMsgType type, const QMessageLogContext &context, const QString &msg) { static std::map<QtMsgType, int> levels = { {QtMsgType::QtDebugMsg, CLOUDLOG_DEBUG}, {QtMsgType::QtInfoMsg, CLOUDLOG_INFO}, {QtMsgType::QtWarningMsg, CLOUDLOG_WARNING}, {QtMsgType::QtCriticalMsg, CLOUDLOG_ERROR}, {QtMsgType::QtSystemMsg, CLOUDLOG_ERROR}, {QtMsgType::QtFatalMsg, CLOUDLOG_CRITICAL}, }; std::string file, function; if (context.file != nullptr) file = context.file; if (context.function != nullptr) function = context.function; auto bts = msg.toUtf8(); cloudlog_e(levels[type], file.c_str(), context.line, function.c_str(), "%s", bts.constData()); } QWidget* topWidget(QWidget* widget) { while (widget->parentWidget() != nullptr) widget=widget->parentWidget(); return widget; } QPixmap loadPixmap(const QString &fileName, const QSize &size, Qt::AspectRatioMode aspectRatioMode) { if (size.isEmpty()) { return QPixmap(fileName); } else { return QPixmap(fileName).scaled(size, aspectRatioMode, Qt::SmoothTransformation); } } void drawRoundedRect(QPainter &painter, const QRectF &rect, qreal xRadiusTop, qreal yRadiusTop, qreal xRadiusBottom, qreal yRadiusBottom){ qreal w_2 = rect.width() / 2; qreal h_2 = rect.height() / 2; xRadiusTop = 100 * qMin(xRadiusTop, w_2) / w_2; yRadiusTop = 100 * qMin(yRadiusTop, h_2) / h_2; xRadiusBottom = 100 * qMin(xRadiusBottom, w_2) / w_2; yRadiusBottom = 100 * qMin(yRadiusBottom, h_2) / h_2; qreal x = rect.x(); qreal y = rect.y(); qreal w = rect.width(); qreal h = rect.height(); qreal rxx2Top = w*xRadiusTop/100; qreal ryy2Top = h*yRadiusTop/100; qreal rxx2Bottom = w*xRadiusBottom/100; qreal ryy2Bottom = h*yRadiusBottom/100; QPainterPath path; path.arcMoveTo(x, y, rxx2Top, ryy2Top, 180); path.arcTo(x, y, rxx2Top, ryy2Top, 180, -90); path.arcTo(x+w-rxx2Top, y, rxx2Top, ryy2Top, 90, -90); path.arcTo(x+w-rxx2Bottom, y+h-ryy2Bottom, rxx2Bottom, ryy2Bottom, 0, -90); path.arcTo(x, y+h-ryy2Bottom, rxx2Bottom, ryy2Bottom, 270, -90); path.closeSubpath(); painter.drawPath(path); } QColor interpColor(float xv, std::vector<float> xp, std::vector<QColor> fp) { assert(xp.size() == fp.size()); int N = xp.size(); int hi = 0; while (hi < N and xv > xp[hi]) hi++; int low = hi - 1; if (hi == N && xv > xp[low]) { return fp[fp.size() - 1]; } else if (hi == 0){ return fp[0]; } else { return QColor( (xv - xp[low]) * (fp[hi].red() - fp[low].red()) / (xp[hi] - xp[low]) + fp[low].red(), (xv - xp[low]) * (fp[hi].green() - fp[low].green()) / (xp[hi] - xp[low]) + fp[low].green(), (xv - xp[low]) * (fp[hi].blue() - fp[low].blue()) / (xp[hi] - xp[low]) + fp[low].blue(), (xv - xp[low]) * (fp[hi].alpha() - fp[low].alpha()) / (xp[hi] - xp[low]) + fp[low].alpha()); } } static QHash<QString, QByteArray> load_bootstrap_icons() { QHash<QString, QByteArray> icons; QFile f(":/bootstrap-icons.svg"); if (f.open(QIODevice::ReadOnly | QIODevice::Text)) { QDomDocument xml; xml.setContent(&f); QDomNode n = xml.documentElement().firstChild(); while (!n.isNull()) { QDomElement e = n.toElement(); if (!e.isNull() && e.hasAttribute("id")) { QString svg_str; QTextStream stream(&svg_str); n.save(stream, 0); svg_str.replace("<symbol", "<svg"); svg_str.replace("</symbol>", "</svg>"); icons[e.attribute("id")] = svg_str.toUtf8(); } n = n.nextSibling(); } } return icons; } QPixmap bootstrapPixmap(const QString &id) { static QHash<QString, QByteArray> icons = load_bootstrap_icons(); QPixmap pixmap; if (auto it = icons.find(id); it != icons.end()) { pixmap.loadFromData(it.value(), "svg"); } return pixmap; } bool hasLongitudinalControl(const cereal::CarParams::Reader &car_params) { // Using the experimental longitudinal toggle, returns whether longitudinal control // will be active without needing a restart of openpilot return car_params.getExperimentalLongitudinalAvailable() ? Params().getBool("ExperimentalLongitudinalEnabled") : car_params.getOpenpilotLongitudinalControl(); } // ParamWatcher ParamWatcher::ParamWatcher(QObject *parent) : QObject(parent) { watcher = new QFileSystemWatcher(this); QObject::connect(watcher, &QFileSystemWatcher::fileChanged, this, &ParamWatcher::fileChanged); } void ParamWatcher::fileChanged(const QString &path) { auto param_name = QFileInfo(path).fileName(); auto param_value = QString::fromStdString(params.get(param_name.toStdString())); auto it = params_hash.find(param_name); bool content_changed = (it == params_hash.end()) || (it.value() != param_value); params_hash[param_name] = param_value; // emit signal when the content changes. if (content_changed) { emit paramChanged(param_name, param_value); } } void ParamWatcher::addParam(const QString &param_name) { watcher->addPath(QString::fromStdString(params.getParamPath(param_name.toStdString()))); }
2301_81045437/openpilot
selfdrive/ui/qt/util.cc
C++
mit
8,167
#pragma once #include <optional> #include <vector> #include <QDateTime> #include <QFileSystemWatcher> #include <QPainter> #include <QPixmap> #include <QSurfaceFormat> #include <QWidget> #include "cereal/gen/cpp/car.capnp.h" #include "common/params.h" QString getVersion(); QString getBrand(); QString getUserAgent(); std::optional<QString> getDongleId(); QMap<QString, QString> getSupportedLanguages(); void setQtSurfaceFormat(); void sigTermHandler(int s); QString timeAgo(const QDateTime &date); void swagLogMessageHandler(QtMsgType type, const QMessageLogContext &context, const QString &msg); void initApp(int argc, char *argv[], bool disable_hidpi = true); QWidget* topWidget(QWidget* widget); QPixmap loadPixmap(const QString &fileName, const QSize &size = {}, Qt::AspectRatioMode aspectRatioMode = Qt::KeepAspectRatio); QPixmap bootstrapPixmap(const QString &id); void drawRoundedRect(QPainter &painter, const QRectF &rect, qreal xRadiusTop, qreal yRadiusTop, qreal xRadiusBottom, qreal yRadiusBottom); QColor interpColor(float xv, std::vector<float> xp, std::vector<QColor> fp); bool hasLongitudinalControl(const cereal::CarParams::Reader &car_params); struct InterFont : public QFont { InterFont(int pixel_size, QFont::Weight weight = QFont::Normal) : QFont("Inter") { setPixelSize(pixel_size); setWeight(weight); } }; class ParamWatcher : public QObject { Q_OBJECT public: ParamWatcher(QObject *parent); void addParam(const QString &param_name); signals: void paramChanged(const QString &param_name, const QString &param_value); private: void fileChanged(const QString &path); QFileSystemWatcher *watcher; QHash<QString, QString> params_hash; Params params; };
2301_81045437/openpilot
selfdrive/ui/qt/util.h
C++
mit
1,710
#include "selfdrive/ui/qt/widgets/cameraview.h" #ifdef __APPLE__ #include <OpenGL/gl3.h> #else #include <GLES3/gl3.h> #endif #include <cmath> #include <set> #include <string> #include <utility> #include <QOpenGLBuffer> #include <QOffscreenSurface> namespace { const char frame_vertex_shader[] = #ifdef __APPLE__ "#version 330 core\n" #else "#version 300 es\n" #endif "layout(location = 0) in vec4 aPosition;\n" "layout(location = 1) in vec2 aTexCoord;\n" "uniform mat4 uTransform;\n" "out vec2 vTexCoord;\n" "void main() {\n" " gl_Position = uTransform * aPosition;\n" " vTexCoord = aTexCoord;\n" "}\n"; const char frame_fragment_shader[] = #ifdef QCOM2 "#version 300 es\n" "#extension GL_OES_EGL_image_external_essl3 : enable\n" "precision mediump float;\n" "uniform samplerExternalOES uTexture;\n" "in vec2 vTexCoord;\n" "out vec4 colorOut;\n" "void main() {\n" " colorOut = texture(uTexture, vTexCoord);\n" // gamma to improve worst case visibility when dark " colorOut.rgb = pow(colorOut.rgb, vec3(1.0/1.28));\n" "}\n"; #else #ifdef __APPLE__ "#version 330 core\n" #else "#version 300 es\n" "precision mediump float;\n" #endif "uniform sampler2D uTextureY;\n" "uniform sampler2D uTextureUV;\n" "in vec2 vTexCoord;\n" "out vec4 colorOut;\n" "void main() {\n" " float y = texture(uTextureY, vTexCoord).r;\n" " vec2 uv = texture(uTextureUV, vTexCoord).rg - 0.5;\n" " float r = y + 1.402 * uv.y;\n" " float g = y - 0.344 * uv.x - 0.714 * uv.y;\n" " float b = y + 1.772 * uv.x;\n" " colorOut = vec4(r, g, b, 1.0);\n" "}\n"; #endif mat4 get_driver_view_transform(int screen_width, int screen_height, int stream_width, int stream_height) { const float driver_view_ratio = 2.0; const float yscale = stream_height * driver_view_ratio / stream_width; const float xscale = yscale*screen_height/screen_width*stream_width/stream_height; mat4 transform = (mat4){{ xscale, 0.0, 0.0, 0.0, 0.0, yscale, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, }}; return transform; } mat4 get_fit_view_transform(float widget_aspect_ratio, float frame_aspect_ratio) { float zx = 1, zy = 1; if (frame_aspect_ratio > widget_aspect_ratio) { zy = widget_aspect_ratio / frame_aspect_ratio; } else { zx = frame_aspect_ratio / widget_aspect_ratio; } const mat4 frame_transform = {{ zx, 0.0, 0.0, 0.0, 0.0, zy, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, }}; return frame_transform; } } // namespace CameraWidget::CameraWidget(std::string stream_name, VisionStreamType type, bool zoom, QWidget* parent) : stream_name(stream_name), active_stream_type(type), requested_stream_type(type), zoomed_view(zoom), QOpenGLWidget(parent) { setAttribute(Qt::WA_OpaquePaintEvent); qRegisterMetaType<std::set<VisionStreamType>>("availableStreams"); QObject::connect(this, &CameraWidget::vipcThreadConnected, this, &CameraWidget::vipcConnected, Qt::BlockingQueuedConnection); QObject::connect(this, &CameraWidget::vipcThreadFrameReceived, this, &CameraWidget::vipcFrameReceived, Qt::QueuedConnection); QObject::connect(this, &CameraWidget::vipcAvailableStreamsUpdated, this, &CameraWidget::availableStreamsUpdated, Qt::QueuedConnection); } CameraWidget::~CameraWidget() { makeCurrent(); stopVipcThread(); if (isValid()) { glDeleteVertexArrays(1, &frame_vao); glDeleteBuffers(1, &frame_vbo); glDeleteBuffers(1, &frame_ibo); glDeleteBuffers(2, textures); } doneCurrent(); } // Qt uses device-independent pixels, depending on platform this may be // different to what OpenGL uses int CameraWidget::glWidth() { return width() * devicePixelRatio(); } int CameraWidget::glHeight() { return height() * devicePixelRatio(); } void CameraWidget::initializeGL() { initializeOpenGLFunctions(); program = std::make_unique<QOpenGLShaderProgram>(context()); bool ret = program->addShaderFromSourceCode(QOpenGLShader::Vertex, frame_vertex_shader); assert(ret); ret = program->addShaderFromSourceCode(QOpenGLShader::Fragment, frame_fragment_shader); assert(ret); program->link(); GLint frame_pos_loc = program->attributeLocation("aPosition"); GLint frame_texcoord_loc = program->attributeLocation("aTexCoord"); auto [x1, x2, y1, y2] = requested_stream_type == VISION_STREAM_DRIVER ? std::tuple(0.f, 1.f, 1.f, 0.f) : std::tuple(1.f, 0.f, 1.f, 0.f); const uint8_t frame_indicies[] = {0, 1, 2, 0, 2, 3}; const float frame_coords[4][4] = { {-1.0, -1.0, x2, y1}, // bl {-1.0, 1.0, x2, y2}, // tl { 1.0, 1.0, x1, y2}, // tr { 1.0, -1.0, x1, y1}, // br }; glGenVertexArrays(1, &frame_vao); glBindVertexArray(frame_vao); glGenBuffers(1, &frame_vbo); glBindBuffer(GL_ARRAY_BUFFER, frame_vbo); glBufferData(GL_ARRAY_BUFFER, sizeof(frame_coords), frame_coords, GL_STATIC_DRAW); glEnableVertexAttribArray(frame_pos_loc); glVertexAttribPointer(frame_pos_loc, 2, GL_FLOAT, GL_FALSE, sizeof(frame_coords[0]), (const void *)0); glEnableVertexAttribArray(frame_texcoord_loc); glVertexAttribPointer(frame_texcoord_loc, 2, GL_FLOAT, GL_FALSE, sizeof(frame_coords[0]), (const void *)(sizeof(float) * 2)); glGenBuffers(1, &frame_ibo); glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, frame_ibo); glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(frame_indicies), frame_indicies, GL_STATIC_DRAW); glBindBuffer(GL_ARRAY_BUFFER, 0); glBindVertexArray(0); glUseProgram(program->programId()); #ifdef QCOM2 glUniform1i(program->uniformLocation("uTexture"), 0); #else glGenTextures(2, textures); glUniform1i(program->uniformLocation("uTextureY"), 0); glUniform1i(program->uniformLocation("uTextureUV"), 1); #endif } void CameraWidget::showEvent(QShowEvent *event) { if (!vipc_thread) { clearFrames(); vipc_thread = new QThread(); connect(vipc_thread, &QThread::started, [=]() { vipcThread(); }); connect(vipc_thread, &QThread::finished, vipc_thread, &QObject::deleteLater); vipc_thread->start(); } } void CameraWidget::stopVipcThread() { makeCurrent(); if (vipc_thread) { vipc_thread->requestInterruption(); vipc_thread->quit(); vipc_thread->wait(); vipc_thread = nullptr; } #ifdef QCOM2 EGLDisplay egl_display = eglGetCurrentDisplay(); assert(egl_display != EGL_NO_DISPLAY); for (auto &pair : egl_images) { eglDestroyImageKHR(egl_display, pair.second); assert(eglGetError() == EGL_SUCCESS); } egl_images.clear(); #endif } void CameraWidget::availableStreamsUpdated(std::set<VisionStreamType> streams) { available_streams = streams; } void CameraWidget::updateFrameMat() { int w = glWidth(), h = glHeight(); if (zoomed_view) { if (active_stream_type == VISION_STREAM_DRIVER) { if (stream_width > 0 && stream_height > 0) { frame_mat = get_driver_view_transform(w, h, stream_width, stream_height); } } else { // Project point at "infinity" to compute x and y offsets // to ensure this ends up in the middle of the screen // for narrow come and a little lower for wide cam. // TODO: use proper perspective transform? if (active_stream_type == VISION_STREAM_WIDE_ROAD) { intrinsic_matrix = ECAM_INTRINSIC_MATRIX; zoom = 2.0; } else { intrinsic_matrix = FCAM_INTRINSIC_MATRIX; zoom = 1.1; } const vec3 inf = {{1000., 0., 0.}}; const vec3 Ep = matvecmul3(calibration, inf); const vec3 Kep = matvecmul3(intrinsic_matrix, Ep); float x_offset_ = (Kep.v[0] / Kep.v[2] - intrinsic_matrix.v[2]) * zoom; float y_offset_ = (Kep.v[1] / Kep.v[2] - intrinsic_matrix.v[5]) * zoom; float max_x_offset = intrinsic_matrix.v[2] * zoom - w / 2 - 5; float max_y_offset = intrinsic_matrix.v[5] * zoom - h / 2 - 5; x_offset = std::clamp(x_offset_, -max_x_offset, max_x_offset); y_offset = std::clamp(y_offset_, -max_y_offset, max_y_offset); float zx = zoom * 2 * intrinsic_matrix.v[2] / w; float zy = zoom * 2 * intrinsic_matrix.v[5] / h; const mat4 frame_transform = {{ zx, 0.0, 0.0, -x_offset / w * 2, 0.0, zy, 0.0, y_offset / h * 2, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, }}; frame_mat = frame_transform; } } else if (stream_width > 0 && stream_height > 0) { // fit frame to widget size float widget_aspect_ratio = (float)w / h; float frame_aspect_ratio = (float)stream_width / stream_height; frame_mat = get_fit_view_transform(widget_aspect_ratio, frame_aspect_ratio); } } void CameraWidget::updateCalibration(const mat3 &calib) { calibration = calib; } void CameraWidget::paintGL() { glClearColor(bg.redF(), bg.greenF(), bg.blueF(), bg.alphaF()); glClear(GL_STENCIL_BUFFER_BIT | GL_COLOR_BUFFER_BIT); std::lock_guard lk(frame_lock); if (frames.empty()) return; int frame_idx = frames.size() - 1; // Always draw latest frame until sync logic is more stable // for (frame_idx = 0; frame_idx < frames.size() - 1; frame_idx++) { // if (frames[frame_idx].first == draw_frame_id) break; // } // Log duplicate/dropped frames if (frames[frame_idx].first == prev_frame_id) { qDebug() << "Drawing same frame twice" << frames[frame_idx].first; } else if (frames[frame_idx].first != prev_frame_id + 1) { qDebug() << "Skipped frame" << frames[frame_idx].first; } prev_frame_id = frames[frame_idx].first; VisionBuf *frame = frames[frame_idx].second; assert(frame != nullptr); updateFrameMat(); glViewport(0, 0, glWidth(), glHeight()); glBindVertexArray(frame_vao); glUseProgram(program->programId()); glPixelStorei(GL_UNPACK_ALIGNMENT, 1); #ifdef QCOM2 // no frame copy glActiveTexture(GL_TEXTURE0); glEGLImageTargetTexture2DOES(GL_TEXTURE_EXTERNAL_OES, egl_images[frame->idx]); assert(glGetError() == GL_NO_ERROR); #else // fallback to copy glPixelStorei(GL_UNPACK_ROW_LENGTH, stream_stride); glActiveTexture(GL_TEXTURE0); glBindTexture(GL_TEXTURE_2D, textures[0]); glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, stream_width, stream_height, GL_RED, GL_UNSIGNED_BYTE, frame->y); assert(glGetError() == GL_NO_ERROR); glPixelStorei(GL_UNPACK_ROW_LENGTH, stream_stride/2); glActiveTexture(GL_TEXTURE0 + 1); glBindTexture(GL_TEXTURE_2D, textures[1]); glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, stream_width/2, stream_height/2, GL_RG, GL_UNSIGNED_BYTE, frame->uv); assert(glGetError() == GL_NO_ERROR); #endif glUniformMatrix4fv(program->uniformLocation("uTransform"), 1, GL_TRUE, frame_mat.v); glEnableVertexAttribArray(0); glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_BYTE, (const void *)0); glDisableVertexAttribArray(0); glBindVertexArray(0); glBindTexture(GL_TEXTURE_2D, 0); glActiveTexture(GL_TEXTURE0); glPixelStorei(GL_UNPACK_ALIGNMENT, 4); glPixelStorei(GL_UNPACK_ROW_LENGTH, 0); } void CameraWidget::vipcConnected(VisionIpcClient *vipc_client) { makeCurrent(); stream_width = vipc_client->buffers[0].width; stream_height = vipc_client->buffers[0].height; stream_stride = vipc_client->buffers[0].stride; #ifdef QCOM2 EGLDisplay egl_display = eglGetCurrentDisplay(); assert(egl_display != EGL_NO_DISPLAY); for (auto &pair : egl_images) { eglDestroyImageKHR(egl_display, pair.second); } egl_images.clear(); for (int i = 0; i < vipc_client->num_buffers; i++) { // import buffers into OpenGL int fd = dup(vipc_client->buffers[i].fd); // eglDestroyImageKHR will close, so duplicate EGLint img_attrs[] = { EGL_WIDTH, (int)vipc_client->buffers[i].width, EGL_HEIGHT, (int)vipc_client->buffers[i].height, EGL_LINUX_DRM_FOURCC_EXT, DRM_FORMAT_NV12, EGL_DMA_BUF_PLANE0_FD_EXT, fd, EGL_DMA_BUF_PLANE0_OFFSET_EXT, 0, EGL_DMA_BUF_PLANE0_PITCH_EXT, (int)vipc_client->buffers[i].stride, EGL_DMA_BUF_PLANE1_FD_EXT, fd, EGL_DMA_BUF_PLANE1_OFFSET_EXT, (int)vipc_client->buffers[i].uv_offset, EGL_DMA_BUF_PLANE1_PITCH_EXT, (int)vipc_client->buffers[i].stride, EGL_NONE }; egl_images[i] = eglCreateImageKHR(egl_display, EGL_NO_CONTEXT, EGL_LINUX_DMA_BUF_EXT, 0, img_attrs); assert(eglGetError() == EGL_SUCCESS); } #else glBindTexture(GL_TEXTURE_2D, textures[0]); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); glTexImage2D(GL_TEXTURE_2D, 0, GL_R8, stream_width, stream_height, 0, GL_RED, GL_UNSIGNED_BYTE, nullptr); assert(glGetError() == GL_NO_ERROR); glBindTexture(GL_TEXTURE_2D, textures[1]); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); glTexImage2D(GL_TEXTURE_2D, 0, GL_RG8, stream_width/2, stream_height/2, 0, GL_RG, GL_UNSIGNED_BYTE, nullptr); assert(glGetError() == GL_NO_ERROR); #endif } void CameraWidget::vipcFrameReceived() { update(); } void CameraWidget::vipcThread() { VisionStreamType cur_stream = requested_stream_type; std::unique_ptr<VisionIpcClient> vipc_client; VisionIpcBufExtra meta_main = {0}; while (!QThread::currentThread()->isInterruptionRequested()) { if (!vipc_client || cur_stream != requested_stream_type) { clearFrames(); qDebug().nospace() << "connecting to stream " << requested_stream_type << ", was connected to " << cur_stream; cur_stream = requested_stream_type; vipc_client.reset(new VisionIpcClient(stream_name, cur_stream, false)); } active_stream_type = cur_stream; if (!vipc_client->connected) { clearFrames(); auto streams = VisionIpcClient::getAvailableStreams(stream_name, false); if (streams.empty()) { QThread::msleep(100); continue; } emit vipcAvailableStreamsUpdated(streams); if (!vipc_client->connect(false)) { QThread::msleep(100); continue; } emit vipcThreadConnected(vipc_client.get()); } if (VisionBuf *buf = vipc_client->recv(&meta_main, 1000)) { { std::lock_guard lk(frame_lock); frames.push_back(std::make_pair(meta_main.frame_id, buf)); while (frames.size() > FRAME_BUFFER_SIZE) { frames.pop_front(); } } emit vipcThreadFrameReceived(); } else { if (!isVisible()) { vipc_client->connected = false; } } } } void CameraWidget::clearFrames() { std::lock_guard lk(frame_lock); frames.clear(); available_streams.clear(); }
2301_81045437/openpilot
selfdrive/ui/qt/widgets/cameraview.cc
C++
mit
14,854
#pragma once #include <deque> #include <map> #include <memory> #include <mutex> #include <set> #include <string> #include <utility> #include <QOpenGLFunctions> #include <QOpenGLShaderProgram> #include <QOpenGLWidget> #include <QThread> #ifdef QCOM2 #define EGL_EGLEXT_PROTOTYPES #define EGL_NO_X11 #define GL_TEXTURE_EXTERNAL_OES 0x8D65 #include <EGL/egl.h> #include <EGL/eglext.h> #include <drm/drm_fourcc.h> #endif #include "cereal/visionipc/visionipc_client.h" #include "system/camerad/cameras/camera_common.h" #include "selfdrive/ui/ui.h" const int FRAME_BUFFER_SIZE = 5; static_assert(FRAME_BUFFER_SIZE <= YUV_BUFFER_COUNT); class CameraWidget : public QOpenGLWidget, protected QOpenGLFunctions { Q_OBJECT public: using QOpenGLWidget::QOpenGLWidget; explicit CameraWidget(std::string stream_name, VisionStreamType stream_type, bool zoom, QWidget* parent = nullptr); ~CameraWidget(); void setBackgroundColor(const QColor &color) { bg = color; } void setFrameId(int frame_id) { draw_frame_id = frame_id; } void setStreamType(VisionStreamType type) { requested_stream_type = type; } VisionStreamType getStreamType() { return active_stream_type; } void stopVipcThread(); signals: void clicked(); void vipcThreadConnected(VisionIpcClient *); void vipcThreadFrameReceived(); void vipcAvailableStreamsUpdated(std::set<VisionStreamType>); protected: void paintGL() override; void initializeGL() override; void resizeGL(int w, int h) override { updateFrameMat(); } void showEvent(QShowEvent *event) override; void mouseReleaseEvent(QMouseEvent *event) override { emit clicked(); } virtual void updateFrameMat(); void updateCalibration(const mat3 &calib); void vipcThread(); void clearFrames(); int glWidth(); int glHeight(); bool zoomed_view; GLuint frame_vao, frame_vbo, frame_ibo; GLuint textures[2]; mat4 frame_mat = {}; std::unique_ptr<QOpenGLShaderProgram> program; QColor bg = QColor("#000000"); #ifdef QCOM2 std::map<int, EGLImageKHR> egl_images; #endif std::string stream_name; int stream_width = 0; int stream_height = 0; int stream_stride = 0; std::atomic<VisionStreamType> active_stream_type; std::atomic<VisionStreamType> requested_stream_type; std::set<VisionStreamType> available_streams; QThread *vipc_thread = nullptr; // Calibration float x_offset = 0; float y_offset = 0; float zoom = 1.0; mat3 calibration = DEFAULT_CALIBRATION; mat3 intrinsic_matrix = FCAM_INTRINSIC_MATRIX; std::recursive_mutex frame_lock; std::deque<std::pair<uint32_t, VisionBuf*>> frames; uint32_t draw_frame_id = 0; uint32_t prev_frame_id = 0; protected slots: void vipcConnected(VisionIpcClient *vipc_client); void vipcFrameReceived(); void availableStreamsUpdated(std::set<VisionStreamType> streams); }; Q_DECLARE_METATYPE(std::set<VisionStreamType>);
2301_81045437/openpilot
selfdrive/ui/qt/widgets/cameraview.h
C++
mit
2,863
#include "selfdrive/ui/qt/widgets/controls.h" #include <QPainter> #include <QStyleOption> AbstractControl::AbstractControl(const QString &title, const QString &desc, const QString &icon, QWidget *parent) : QFrame(parent) { QVBoxLayout *main_layout = new QVBoxLayout(this); main_layout->setMargin(0); hlayout = new QHBoxLayout; hlayout->setMargin(0); hlayout->setSpacing(20); // left icon icon_label = new QLabel(this); hlayout->addWidget(icon_label); if (!icon.isEmpty()) { icon_pixmap = QPixmap(icon).scaledToWidth(80, Qt::SmoothTransformation); icon_label->setPixmap(icon_pixmap); icon_label->setSizePolicy(QSizePolicy(QSizePolicy::Fixed, QSizePolicy::Fixed)); } icon_label->setVisible(!icon.isEmpty()); // title title_label = new QPushButton(title); title_label->setFixedHeight(120); title_label->setStyleSheet("font-size: 50px; font-weight: 400; text-align: left; border: none;"); hlayout->addWidget(title_label, 1); // value next to control button value = new ElidedLabel(); value->setAlignment(Qt::AlignRight | Qt::AlignVCenter); value->setStyleSheet("color: #aaaaaa"); hlayout->addWidget(value); main_layout->addLayout(hlayout); // description description = new QLabel(desc); description->setContentsMargins(40, 20, 40, 20); description->setStyleSheet("font-size: 40px; color: grey"); description->setWordWrap(true); description->setVisible(false); main_layout->addWidget(description); connect(title_label, &QPushButton::clicked, [=]() { if (!description->isVisible()) { emit showDescriptionEvent(); } if (!description->text().isEmpty()) { description->setVisible(!description->isVisible()); } }); main_layout->addStretch(); } void AbstractControl::hideEvent(QHideEvent *e) { if (description != nullptr) { description->hide(); } } // controls ButtonControl::ButtonControl(const QString &title, const QString &text, const QString &desc, QWidget *parent) : AbstractControl(title, desc, "", parent) { btn.setText(text); btn.setStyleSheet(R"( QPushButton { padding: 0; border-radius: 50px; font-size: 35px; font-weight: 500; color: #E4E4E4; background-color: #393939; } QPushButton:pressed { background-color: #4a4a4a; } QPushButton:disabled { color: #33E4E4E4; } )"); btn.setFixedSize(250, 100); QObject::connect(&btn, &QPushButton::clicked, this, &ButtonControl::clicked); hlayout->addWidget(&btn); } // ElidedLabel ElidedLabel::ElidedLabel(QWidget *parent) : ElidedLabel({}, parent) {} ElidedLabel::ElidedLabel(const QString &text, QWidget *parent) : QLabel(text.trimmed(), parent) { setSizePolicy(QSizePolicy::Preferred, QSizePolicy::Preferred); setMinimumWidth(1); } void ElidedLabel::resizeEvent(QResizeEvent* event) { QLabel::resizeEvent(event); lastText_ = elidedText_ = ""; } void ElidedLabel::paintEvent(QPaintEvent *event) { const QString curText = text(); if (curText != lastText_) { elidedText_ = fontMetrics().elidedText(curText, Qt::ElideRight, contentsRect().width()); lastText_ = curText; } QPainter painter(this); drawFrame(&painter); QStyleOption opt; opt.initFrom(this); style()->drawItemText(&painter, contentsRect(), alignment(), opt.palette, isEnabled(), elidedText_, foregroundRole()); } // ParamControl ParamControl::ParamControl(const QString &param, const QString &title, const QString &desc, const QString &icon, QWidget *parent) : ToggleControl(title, desc, icon, false, parent) { key = param.toStdString(); QObject::connect(this, &ParamControl::toggleFlipped, this, &ParamControl::toggleClicked); } void ParamControl::toggleClicked(bool state) { auto do_confirm = [this]() { QString content("<body><h2 style=\"text-align: center;\">" + title_label->text() + "</h2><br>" "<p style=\"text-align: center; margin: 0 128px; font-size: 50px;\">" + getDescription() + "</p></body>"); return ConfirmationDialog(content, tr("Enable"), tr("Cancel"), true, this).exec(); }; bool confirmed = store_confirm && params.getBool(key + "Confirmed"); if (!confirm || confirmed || !state || do_confirm()) { if (store_confirm && state) params.putBool(key + "Confirmed", true); params.putBool(key, state); setIcon(state); } else { toggle.togglePosition(); } }
2301_81045437/openpilot
selfdrive/ui/qt/widgets/controls.cc
C++
mit
4,387
#pragma once #include <string> #include <vector> #include <QButtonGroup> #include <QFrame> #include <QHBoxLayout> #include <QLabel> #include <QPainter> #include <QPushButton> #include "common/params.h" #include "selfdrive/ui/qt/widgets/input.h" #include "selfdrive/ui/qt/widgets/toggle.h" class ElidedLabel : public QLabel { Q_OBJECT public: explicit ElidedLabel(QWidget *parent = 0); explicit ElidedLabel(const QString &text, QWidget *parent = 0); signals: void clicked(); protected: void paintEvent(QPaintEvent *event) override; void resizeEvent(QResizeEvent* event) override; void mouseReleaseEvent(QMouseEvent *event) override { if (rect().contains(event->pos())) { emit clicked(); } } QString lastText_, elidedText_; }; class AbstractControl : public QFrame { Q_OBJECT public: void setDescription(const QString &desc) { if (description) description->setText(desc); } void setTitle(const QString &title) { title_label->setText(title); } void setValue(const QString &val) { value->setText(val); } const QString getDescription() { return description->text(); } QLabel *icon_label; QPixmap icon_pixmap; public slots: void showDescription() { description->setVisible(true); } signals: void showDescriptionEvent(); protected: AbstractControl(const QString &title, const QString &desc = "", const QString &icon = "", QWidget *parent = nullptr); void hideEvent(QHideEvent *e) override; QHBoxLayout *hlayout; QPushButton *title_label; private: ElidedLabel *value; QLabel *description = nullptr; }; // widget to display a value class LabelControl : public AbstractControl { Q_OBJECT public: LabelControl(const QString &title, const QString &text = "", const QString &desc = "", QWidget *parent = nullptr) : AbstractControl(title, desc, "", parent) { label.setText(text); label.setAlignment(Qt::AlignRight | Qt::AlignVCenter); hlayout->addWidget(&label); } void setText(const QString &text) { label.setText(text); } private: ElidedLabel label; }; // widget for a button with a label class ButtonControl : public AbstractControl { Q_OBJECT public: ButtonControl(const QString &title, const QString &text, const QString &desc = "", QWidget *parent = nullptr); inline void setText(const QString &text) { btn.setText(text); } inline QString text() const { return btn.text(); } signals: void clicked(); public slots: void setEnabled(bool enabled) { btn.setEnabled(enabled); } private: QPushButton btn; }; class ToggleControl : public AbstractControl { Q_OBJECT public: ToggleControl(const QString &title, const QString &desc = "", const QString &icon = "", const bool state = false, QWidget *parent = nullptr) : AbstractControl(title, desc, icon, parent) { toggle.setFixedSize(150, 100); if (state) { toggle.togglePosition(); } hlayout->addWidget(&toggle); QObject::connect(&toggle, &Toggle::stateChanged, this, &ToggleControl::toggleFlipped); } void setEnabled(bool enabled) { toggle.setEnabled(enabled); toggle.update(); } signals: void toggleFlipped(bool state); protected: Toggle toggle; }; // widget to toggle params class ParamControl : public ToggleControl { Q_OBJECT public: ParamControl(const QString &param, const QString &title, const QString &desc, const QString &icon, QWidget *parent = nullptr); void setConfirmation(bool _confirm, bool _store_confirm) { confirm = _confirm; store_confirm = _store_confirm; } void setActiveIcon(const QString &icon) { active_icon_pixmap = QPixmap(icon).scaledToWidth(80, Qt::SmoothTransformation); } void refresh() { bool state = params.getBool(key); if (state != toggle.on) { toggle.togglePosition(); setIcon(state); } } void showEvent(QShowEvent *event) override { refresh(); } private: void toggleClicked(bool state); void setIcon(bool state) { if (state && !active_icon_pixmap.isNull()) { icon_label->setPixmap(active_icon_pixmap); } else if (!icon_pixmap.isNull()) { icon_label->setPixmap(icon_pixmap); } } std::string key; Params params; QPixmap active_icon_pixmap; bool confirm = false; bool store_confirm = false; }; class ButtonParamControl : public AbstractControl { Q_OBJECT public: ButtonParamControl(const QString &param, const QString &title, const QString &desc, const QString &icon, const std::vector<QString> &button_texts, const int minimum_button_width = 225) : AbstractControl(title, desc, icon) { const QString style = R"( QPushButton { border-radius: 50px; font-size: 40px; font-weight: 500; height:100px; padding: 0 25 0 25; color: #E4E4E4; background-color: #393939; } QPushButton:pressed { background-color: #4a4a4a; } QPushButton:checked:enabled { background-color: #33Ab4C; } QPushButton:disabled { color: #33E4E4E4; } )"; key = param.toStdString(); int value = atoi(params.get(key).c_str()); button_group = new QButtonGroup(this); button_group->setExclusive(true); for (int i = 0; i < button_texts.size(); i++) { QPushButton *button = new QPushButton(button_texts[i], this); button->setCheckable(true); button->setChecked(i == value); button->setStyleSheet(style); button->setMinimumWidth(minimum_button_width); hlayout->addWidget(button); button_group->addButton(button, i); } QObject::connect(button_group, QOverload<int>::of(&QButtonGroup::buttonClicked), [=](int id) { params.put(key, std::to_string(id)); }); } void setEnabled(bool enable) { for (auto btn : button_group->buttons()) { btn->setEnabled(enable); } } void setCheckedButton(int id) { button_group->button(id)->setChecked(true); } void refresh() { int value = atoi(params.get(key).c_str()); button_group->button(value)->setChecked(true); } void showEvent(QShowEvent *event) override { refresh(); } private: std::string key; Params params; QButtonGroup *button_group; }; class ListWidget : public QWidget { Q_OBJECT public: explicit ListWidget(QWidget *parent = 0) : QWidget(parent), outer_layout(this) { outer_layout.setMargin(0); outer_layout.setSpacing(0); outer_layout.addLayout(&inner_layout); inner_layout.setMargin(0); inner_layout.setSpacing(25); // default spacing is 25 outer_layout.addStretch(); } inline void addItem(QWidget *w) { inner_layout.addWidget(w); } inline void addItem(QLayout *layout) { inner_layout.addLayout(layout); } inline void setSpacing(int spacing) { inner_layout.setSpacing(spacing); } private: void paintEvent(QPaintEvent *) override { QPainter p(this); p.setPen(Qt::gray); for (int i = 0; i < inner_layout.count() - 1; ++i) { QWidget *widget = inner_layout.itemAt(i)->widget(); if (widget == nullptr || widget->isVisible()) { QRect r = inner_layout.itemAt(i)->geometry(); int bottom = r.bottom() + inner_layout.spacing() / 2; p.drawLine(r.left() + 40, bottom, r.right() - 40, bottom); } } } QVBoxLayout outer_layout; QVBoxLayout inner_layout; }; // convenience class for wrapping layouts class LayoutWidget : public QWidget { Q_OBJECT public: LayoutWidget(QLayout *l, QWidget *parent = nullptr) : QWidget(parent) { setLayout(l); } };
2301_81045437/openpilot
selfdrive/ui/qt/widgets/controls.h
C++
mit
7,533
#include "selfdrive/ui/qt/widgets/input.h" #include <QPushButton> #include <QButtonGroup> #include "system/hardware/hw.h" #include "selfdrive/ui/qt/util.h" #include "selfdrive/ui/qt/qt_window.h" #include "selfdrive/ui/qt/widgets/scrollview.h" DialogBase::DialogBase(QWidget *parent) : QDialog(parent) { Q_ASSERT(parent != nullptr); parent->installEventFilter(this); setStyleSheet(R"( * { outline: none; color: white; font-family: Inter; } DialogBase { background-color: black; } QPushButton { height: 160; font-size: 55px; font-weight: 400; border-radius: 10px; color: white; background-color: #333333; } QPushButton:pressed { background-color: #444444; } )"); } bool DialogBase::eventFilter(QObject *o, QEvent *e) { if (o == parent() && e->type() == QEvent::Hide) { reject(); } return QDialog::eventFilter(o, e); } int DialogBase::exec() { setMainWindow(this); return QDialog::exec(); } InputDialog::InputDialog(const QString &title, QWidget *parent, const QString &subtitle, bool secret) : DialogBase(parent) { main_layout = new QVBoxLayout(this); main_layout->setContentsMargins(50, 55, 50, 50); main_layout->setSpacing(0); // build header QHBoxLayout *header_layout = new QHBoxLayout(); QVBoxLayout *vlayout = new QVBoxLayout; header_layout->addLayout(vlayout); label = new QLabel(title, this); label->setStyleSheet("font-size: 90px; font-weight: bold;"); vlayout->addWidget(label, 1, Qt::AlignTop | Qt::AlignLeft); if (!subtitle.isEmpty()) { sublabel = new QLabel(subtitle, this); sublabel->setStyleSheet("font-size: 55px; font-weight: light; color: #BDBDBD;"); vlayout->addWidget(sublabel, 1, Qt::AlignTop | Qt::AlignLeft); } QPushButton* cancel_btn = new QPushButton(tr("Cancel")); cancel_btn->setFixedSize(386, 125); cancel_btn->setStyleSheet(R"( QPushButton { font-size: 48px; border-radius: 10px; color: #E4E4E4; background-color: #333333; } QPushButton:pressed { background-color: #444444; } )"); header_layout->addWidget(cancel_btn, 0, Qt::AlignRight); QObject::connect(cancel_btn, &QPushButton::clicked, this, &InputDialog::reject); QObject::connect(cancel_btn, &QPushButton::clicked, this, &InputDialog::cancel); main_layout->addLayout(header_layout); // text box main_layout->addStretch(2); QWidget *textbox_widget = new QWidget; textbox_widget->setObjectName("textbox"); QHBoxLayout *textbox_layout = new QHBoxLayout(textbox_widget); textbox_layout->setContentsMargins(50, 0, 50, 0); textbox_widget->setStyleSheet(R"( #textbox { margin-left: 50px; margin-right: 50px; border-radius: 0; border-bottom: 3px solid #BDBDBD; } * { border: none; font-size: 80px; font-weight: light; background-color: transparent; } )"); line = new QLineEdit(); line->setStyleSheet("lineedit-password-character: 8226; lineedit-password-mask-delay: 1500;"); textbox_layout->addWidget(line, 1); if (secret) { eye_btn = new QPushButton(); eye_btn->setCheckable(true); eye_btn->setFixedSize(150, 120); QObject::connect(eye_btn, &QPushButton::toggled, [=](bool checked) { if (checked) { eye_btn->setIcon(QIcon(ASSET_PATH + "img_eye_closed.svg")); eye_btn->setIconSize(QSize(81, 54)); line->setEchoMode(QLineEdit::Password); } else { eye_btn->setIcon(QIcon(ASSET_PATH + "img_eye_open.svg")); eye_btn->setIconSize(QSize(81, 44)); line->setEchoMode(QLineEdit::Normal); } }); eye_btn->toggle(); eye_btn->setChecked(false); textbox_layout->addWidget(eye_btn); } main_layout->addWidget(textbox_widget, 0, Qt::AlignBottom); main_layout->addSpacing(25); k = new Keyboard(this); QObject::connect(k, &Keyboard::emitEnter, this, &InputDialog::handleEnter); QObject::connect(k, &Keyboard::emitBackspace, this, [=]() { line->backspace(); }); QObject::connect(k, &Keyboard::emitKey, this, [=](const QString &key) { line->insert(key.left(1)); }); main_layout->addWidget(k, 2, Qt::AlignBottom); } QString InputDialog::getText(const QString &prompt, QWidget *parent, const QString &subtitle, bool secret, int minLength, const QString &defaultText) { InputDialog d = InputDialog(prompt, parent, subtitle, secret); d.line->setText(defaultText); d.setMinLength(minLength); const int ret = d.exec(); return ret ? d.text() : QString(); } QString InputDialog::text() { return line->text(); } void InputDialog::show() { setMainWindow(this); } void InputDialog::handleEnter() { if (line->text().length() >= minLength) { done(QDialog::Accepted); emitText(line->text()); } else { setMessage(tr("Need at least %n character(s)!", "", minLength), false); } } void InputDialog::setMessage(const QString &message, bool clearInputField) { label->setText(message); if (clearInputField) { line->setText(""); } } void InputDialog::setMinLength(int length) { minLength = length; } // ConfirmationDialog ConfirmationDialog::ConfirmationDialog(const QString &prompt_text, const QString &confirm_text, const QString &cancel_text, const bool rich, QWidget *parent) : DialogBase(parent) { QFrame *container = new QFrame(this); container->setStyleSheet(R"( QFrame { background-color: #1B1B1B; color: #C9C9C9; } #confirm_btn { background-color: #465BEA; } #confirm_btn:pressed { background-color: #3049F4; } )"); QVBoxLayout *main_layout = new QVBoxLayout(container); main_layout->setContentsMargins(32, rich ? 32 : 120, 32, 32); QLabel *prompt = new QLabel(prompt_text, this); prompt->setWordWrap(true); prompt->setAlignment(rich ? Qt::AlignLeft : Qt::AlignHCenter); prompt->setStyleSheet((rich ? "font-size: 42px; font-weight: light;" : "font-size: 70px; font-weight: bold;") + QString(" margin: 45px;")); main_layout->addWidget(rich ? (QWidget*)new ScrollView(prompt, this) : (QWidget*)prompt, 1, Qt::AlignTop); // cancel + confirm buttons QHBoxLayout *btn_layout = new QHBoxLayout(); btn_layout->setSpacing(30); main_layout->addLayout(btn_layout); if (cancel_text.length()) { QPushButton* cancel_btn = new QPushButton(cancel_text); btn_layout->addWidget(cancel_btn); QObject::connect(cancel_btn, &QPushButton::clicked, this, &ConfirmationDialog::reject); } if (confirm_text.length()) { QPushButton* confirm_btn = new QPushButton(confirm_text); confirm_btn->setObjectName("confirm_btn"); btn_layout->addWidget(confirm_btn); QObject::connect(confirm_btn, &QPushButton::clicked, this, &ConfirmationDialog::accept); } QVBoxLayout *outer_layout = new QVBoxLayout(this); int margin = rich ? 100 : 200; outer_layout->setContentsMargins(margin, margin, margin, margin); outer_layout->addWidget(container); } bool ConfirmationDialog::alert(const QString &prompt_text, QWidget *parent) { ConfirmationDialog d = ConfirmationDialog(prompt_text, tr("Ok"), "", false, parent); return d.exec(); } bool ConfirmationDialog::confirm(const QString &prompt_text, const QString &confirm_text, QWidget *parent) { ConfirmationDialog d = ConfirmationDialog(prompt_text, confirm_text, tr("Cancel"), false, parent); return d.exec(); } bool ConfirmationDialog::rich(const QString &prompt_text, QWidget *parent) { ConfirmationDialog d = ConfirmationDialog(prompt_text, tr("Ok"), "", true, parent); return d.exec(); } // MultiOptionDialog MultiOptionDialog::MultiOptionDialog(const QString &prompt_text, const QStringList &l, const QString &current, QWidget *parent) : DialogBase(parent) { QFrame *container = new QFrame(this); container->setStyleSheet(R"( QFrame { background-color: #1B1B1B; } #confirm_btn[enabled="false"] { background-color: #2B2B2B; } #confirm_btn:enabled { background-color: #465BEA; } #confirm_btn:enabled:pressed { background-color: #3049F4; } )"); QVBoxLayout *main_layout = new QVBoxLayout(container); main_layout->setContentsMargins(55, 50, 55, 50); QLabel *title = new QLabel(prompt_text, this); title->setStyleSheet("font-size: 70px; font-weight: 500;"); main_layout->addWidget(title, 0, Qt::AlignLeft | Qt::AlignTop); main_layout->addSpacing(25); QWidget *listWidget = new QWidget(this); QVBoxLayout *listLayout = new QVBoxLayout(listWidget); listLayout->setSpacing(20); listWidget->setStyleSheet(R"( QPushButton { height: 135; padding: 0px 50px; text-align: left; font-size: 55px; font-weight: 300; border-radius: 10px; background-color: #4F4F4F; } QPushButton:checked { background-color: #465BEA; } )"); QButtonGroup *group = new QButtonGroup(listWidget); group->setExclusive(true); QPushButton *confirm_btn = new QPushButton(tr("Select")); confirm_btn->setObjectName("confirm_btn"); confirm_btn->setEnabled(false); for (const QString &s : l) { QPushButton *selectionLabel = new QPushButton(s); selectionLabel->setCheckable(true); selectionLabel->setChecked(s == current); QObject::connect(selectionLabel, &QPushButton::toggled, [=](bool checked) { if (checked) selection = s; if (selection != current) { confirm_btn->setEnabled(true); } else { confirm_btn->setEnabled(false); } }); group->addButton(selectionLabel); listLayout->addWidget(selectionLabel); } // add stretch to keep buttons spaced correctly listLayout->addStretch(1); ScrollView *scroll_view = new ScrollView(listWidget, this); scroll_view->setVerticalScrollBarPolicy(Qt::ScrollBarAsNeeded); main_layout->addWidget(scroll_view); main_layout->addSpacing(35); // cancel + confirm buttons QHBoxLayout *blayout = new QHBoxLayout; main_layout->addLayout(blayout); blayout->setSpacing(50); QPushButton *cancel_btn = new QPushButton(tr("Cancel")); QObject::connect(cancel_btn, &QPushButton::clicked, this, &ConfirmationDialog::reject); QObject::connect(confirm_btn, &QPushButton::clicked, this, &ConfirmationDialog::accept); blayout->addWidget(cancel_btn); blayout->addWidget(confirm_btn); QVBoxLayout *outer_layout = new QVBoxLayout(this); outer_layout->setContentsMargins(50, 50, 50, 50); outer_layout->addWidget(container); } QString MultiOptionDialog::getSelection(const QString &prompt_text, const QStringList &l, const QString &current, QWidget *parent) { MultiOptionDialog d = MultiOptionDialog(prompt_text, l, current, parent); if (d.exec()) { return d.selection; } return ""; }
2301_81045437/openpilot
selfdrive/ui/qt/widgets/input.cc
C++
mit
10,716
#pragma once #include <QDialog> #include <QLabel> #include <QLineEdit> #include <QString> #include <QVBoxLayout> #include <QWidget> #include "selfdrive/ui/qt/widgets/keyboard.h" class DialogBase : public QDialog { Q_OBJECT protected: DialogBase(QWidget *parent); bool eventFilter(QObject *o, QEvent *e) override; public slots: int exec() override; }; class InputDialog : public DialogBase { Q_OBJECT public: explicit InputDialog(const QString &title, QWidget *parent, const QString &subtitle = "", bool secret = false); static QString getText(const QString &title, QWidget *parent, const QString &subtitle = "", bool secret = false, int minLength = -1, const QString &defaultText = ""); QString text(); void setMessage(const QString &message, bool clearInputField = true); void setMinLength(int length); void show(); private: int minLength; QLineEdit *line; Keyboard *k; QLabel *label; QLabel *sublabel; QVBoxLayout *main_layout; QPushButton *eye_btn; private slots: void handleEnter(); signals: void cancel(); void emitText(const QString &text); }; class ConfirmationDialog : public DialogBase { Q_OBJECT public: explicit ConfirmationDialog(const QString &prompt_text, const QString &confirm_text, const QString &cancel_text, const bool rich, QWidget* parent); static bool alert(const QString &prompt_text, QWidget *parent); static bool confirm(const QString &prompt_text, const QString &confirm_text, QWidget *parent); static bool rich(const QString &prompt_text, QWidget *parent); }; class MultiOptionDialog : public DialogBase { Q_OBJECT public: explicit MultiOptionDialog(const QString &prompt_text, const QStringList &l, const QString &current, QWidget *parent); static QString getSelection(const QString &prompt_text, const QStringList &l, const QString &current, QWidget *parent); QString selection; };
2301_81045437/openpilot
selfdrive/ui/qt/widgets/input.h
C++
mit
1,941
#include "selfdrive/ui/qt/widgets/keyboard.h" #include <vector> #include <QButtonGroup> #include <QHBoxLayout> #include <QMap> #include <QTouchEvent> #include <QVBoxLayout> const QString BACKSPACE_KEY = "⌫"; const QString ENTER_KEY = "→"; const QMap<QString, int> KEY_STRETCH = {{" ", 5}, {ENTER_KEY, 2}}; const QStringList CONTROL_BUTTONS = {"↑", "↓", "ABC", "#+=", "123", BACKSPACE_KEY, ENTER_KEY}; const float key_spacing_vertical = 20; const float key_spacing_horizontal = 15; KeyButton::KeyButton(const QString &text, QWidget *parent) : QPushButton(text, parent) { setAttribute(Qt::WA_AcceptTouchEvents); setFocusPolicy(Qt::NoFocus); } bool KeyButton::event(QEvent *event) { if (event->type() == QEvent::TouchBegin || event->type() == QEvent::TouchEnd) { QTouchEvent *touchEvent = static_cast<QTouchEvent *>(event); if (!touchEvent->touchPoints().empty()) { const QEvent::Type mouseType = event->type() == QEvent::TouchBegin ? QEvent::MouseButtonPress : QEvent::MouseButtonRelease; QMouseEvent mouseEvent(mouseType, touchEvent->touchPoints().front().pos(), Qt::LeftButton, Qt::LeftButton, Qt::NoModifier); QPushButton::event(&mouseEvent); event->accept(); parentWidget()->update(); return true; } } return QPushButton::event(event); } KeyboardLayout::KeyboardLayout(QWidget* parent, const std::vector<QVector<QString>>& layout) : QWidget(parent) { QVBoxLayout* main_layout = new QVBoxLayout(this); main_layout->setMargin(0); main_layout->setSpacing(0); QButtonGroup* btn_group = new QButtonGroup(this); QObject::connect(btn_group, SIGNAL(buttonClicked(QAbstractButton*)), parent, SLOT(handleButton(QAbstractButton*))); for (const auto &s : layout) { QHBoxLayout *hlayout = new QHBoxLayout; hlayout->setSpacing(0); if (main_layout->count() == 1) { hlayout->addSpacing(90); } for (const QString &p : s) { KeyButton* btn = new KeyButton(p); if (p == BACKSPACE_KEY) { btn->setAutoRepeat(true); } else if (p == ENTER_KEY) { btn->setStyleSheet(R"( QPushButton { background-color: #465BEA; } QPushButton:pressed { background-color: #444444; } )"); } btn->setFixedHeight(135 + key_spacing_vertical); btn_group->addButton(btn); hlayout->addWidget(btn, KEY_STRETCH.value(p, 1)); } if (main_layout->count() == 1) { hlayout->addSpacing(90); } main_layout->addLayout(hlayout); } setStyleSheet(QString(R"( QPushButton { font-size: 75px; margin-left: %1px; margin-right: %1px; margin-top: %2px; margin-bottom: %2px; padding: 0px; border-radius: 10px; color: #dddddd; background-color: #444444; } QPushButton:pressed { background-color: #333333; } )").arg(key_spacing_vertical / 2).arg(key_spacing_horizontal / 2)); } Keyboard::Keyboard(QWidget *parent) : QFrame(parent) { main_layout = new QStackedLayout(this); main_layout->setMargin(0); // lowercase std::vector<QVector<QString>> lowercase = { {"q", "w", "e", "r", "t", "y", "u", "i", "o", "p"}, {"a", "s", "d", "f", "g", "h", "j", "k", "l"}, {"↑", "z", "x", "c", "v", "b", "n", "m", BACKSPACE_KEY}, {"123", " ", ".", ENTER_KEY}, }; main_layout->addWidget(new KeyboardLayout(this, lowercase)); // uppercase std::vector<QVector<QString>> uppercase = { {"Q", "W", "E", "R", "T", "Y", "U", "I", "O", "P"}, {"A", "S", "D", "F", "G", "H", "J", "K", "L"}, {"↓", "Z", "X", "C", "V", "B", "N", "M", BACKSPACE_KEY}, {"123", " ", ".", ENTER_KEY}, }; main_layout->addWidget(new KeyboardLayout(this, uppercase)); // numbers + specials std::vector<QVector<QString>> numbers = { {"1", "2", "3", "4", "5", "6", "7", "8", "9", "0"}, {"-", "/", ":", ";", "(", ")", "$", "&&", "@", "\""}, {"#+=", ".", ",", "?", "!", "`", BACKSPACE_KEY}, {"ABC", " ", ".", ENTER_KEY}, }; main_layout->addWidget(new KeyboardLayout(this, numbers)); // extra specials std::vector<QVector<QString>> specials = { {"[", "]", "{", "}", "#", "%", "^", "*", "+", "="}, {"_", "\\", "|", "~", "<", ">", "€", "£", "¥", "•"}, {"123", ".", ",", "?", "!", "'", BACKSPACE_KEY}, {"ABC", " ", ".", ENTER_KEY}, }; main_layout->addWidget(new KeyboardLayout(this, specials)); main_layout->setCurrentIndex(0); } void Keyboard::handleButton(QAbstractButton* btn) { const QString &key = btn->text(); if (CONTROL_BUTTONS.contains(key)) { if (key == "↓" || key == "ABC") { main_layout->setCurrentIndex(0); } else if (key == "↑") { main_layout->setCurrentIndex(1); } else if (key == "123") { main_layout->setCurrentIndex(2); } else if (key == "#+=") { main_layout->setCurrentIndex(3); } else if (key == ENTER_KEY) { main_layout->setCurrentIndex(0); emit emitEnter(); } else if (key == BACKSPACE_KEY) { emit emitBackspace(); } } else { if ("A" <= key && key <= "Z") { main_layout->setCurrentIndex(0); } emit emitKey(key); } }
2301_81045437/openpilot
selfdrive/ui/qt/widgets/keyboard.cc
C++
mit
5,183
#pragma once #include <vector> #include <QFrame> #include <QPushButton> #include <QStackedLayout> class KeyButton : public QPushButton { Q_OBJECT public: KeyButton(const QString &text, QWidget *parent = 0); bool event(QEvent *event) override; }; class KeyboardLayout : public QWidget { Q_OBJECT public: explicit KeyboardLayout(QWidget* parent, const std::vector<QVector<QString>>& layout); }; class Keyboard : public QFrame { Q_OBJECT public: explicit Keyboard(QWidget *parent = 0); private: QStackedLayout* main_layout; private slots: void handleButton(QAbstractButton* m_button); signals: void emitKey(const QString &s); void emitBackspace(); void emitEnter(); };
2301_81045437/openpilot
selfdrive/ui/qt/widgets/keyboard.h
C++
mit
701
#include "selfdrive/ui/qt/widgets/offroad_alerts.h" #include <algorithm> #include <string> #include <vector> #include <utility> #include <QHBoxLayout> #include <QJsonDocument> #include <QJsonObject> #include "common/util.h" #include "system/hardware/hw.h" #include "selfdrive/ui/qt/widgets/scrollview.h" AbstractAlert::AbstractAlert(bool hasRebootBtn, QWidget *parent) : QFrame(parent) { QVBoxLayout *main_layout = new QVBoxLayout(this); main_layout->setMargin(50); main_layout->setSpacing(30); QWidget *widget = new QWidget; scrollable_layout = new QVBoxLayout(widget); widget->setStyleSheet("background-color: transparent;"); main_layout->addWidget(new ScrollView(widget)); // bottom footer, dismiss + reboot buttons QHBoxLayout *footer_layout = new QHBoxLayout(); main_layout->addLayout(footer_layout); QPushButton *dismiss_btn = new QPushButton(tr("Close")); dismiss_btn->setFixedSize(400, 125); footer_layout->addWidget(dismiss_btn, 0, Qt::AlignBottom | Qt::AlignLeft); QObject::connect(dismiss_btn, &QPushButton::clicked, this, &AbstractAlert::dismiss); snooze_btn = new QPushButton(tr("Snooze Update")); snooze_btn->setVisible(false); snooze_btn->setFixedSize(550, 125); footer_layout->addWidget(snooze_btn, 0, Qt::AlignBottom | Qt::AlignRight); QObject::connect(snooze_btn, &QPushButton::clicked, [=]() { params.putBool("SnoozeUpdate", true); }); QObject::connect(snooze_btn, &QPushButton::clicked, this, &AbstractAlert::dismiss); snooze_btn->setStyleSheet(R"(color: white; background-color: #4F4F4F;)"); if (hasRebootBtn) { QPushButton *rebootBtn = new QPushButton(tr("Reboot and Update")); rebootBtn->setFixedSize(600, 125); footer_layout->addWidget(rebootBtn, 0, Qt::AlignBottom | Qt::AlignRight); QObject::connect(rebootBtn, &QPushButton::clicked, [=]() { Hardware::reboot(); }); } setStyleSheet(R"( * { font-size: 48px; color: white; } QFrame { border-radius: 30px; background-color: #393939; } QPushButton { color: black; font-weight: 500; border-radius: 30px; background-color: white; } )"); } int OffroadAlert::refresh() { // build widgets for each offroad alert on first refresh if (alerts.empty()) { QString json = util::read_file("../controls/lib/alerts_offroad.json").c_str(); QJsonObject obj = QJsonDocument::fromJson(json.toUtf8()).object(); // descending sort labels by severity std::vector<std::pair<std::string, int>> sorted; for (auto it = obj.constBegin(); it != obj.constEnd(); ++it) { sorted.push_back({it.key().toStdString(), it.value()["severity"].toInt()}); } std::sort(sorted.begin(), sorted.end(), [=](auto &l, auto &r) { return l.second > r.second; }); for (auto &[key, severity] : sorted) { QLabel *l = new QLabel(this); alerts[key] = l; l->setMargin(60); l->setWordWrap(true); l->setStyleSheet(QString("background-color: %1").arg(severity ? "#E22C2C" : "#292929")); scrollable_layout->addWidget(l); } scrollable_layout->addStretch(1); } int alertCount = 0; for (const auto &[key, label] : alerts) { QString text; std::string bytes = params.get(key); if (bytes.size()) { auto doc_par = QJsonDocument::fromJson(bytes.c_str()); text = tr(doc_par["text"].toString().toUtf8().data()); auto extra = doc_par["extra"].toString(); if (!extra.isEmpty()) { text = text.arg(extra); } } label->setText(text); label->setVisible(!text.isEmpty()); alertCount += !text.isEmpty(); } snooze_btn->setVisible(!alerts["Offroad_ConnectivityNeeded"]->text().isEmpty()); return alertCount; } UpdateAlert::UpdateAlert(QWidget *parent) : AbstractAlert(true, parent) { releaseNotes = new QLabel(this); releaseNotes->setWordWrap(true); releaseNotes->setAlignment(Qt::AlignTop); scrollable_layout->addWidget(releaseNotes); } bool UpdateAlert::refresh() { bool updateAvailable = params.getBool("UpdateAvailable"); if (updateAvailable) { releaseNotes->setText(params.get("UpdaterNewReleaseNotes").c_str()); } return updateAvailable; }
2301_81045437/openpilot
selfdrive/ui/qt/widgets/offroad_alerts.cc
C++
mit
4,184
#pragma once #include <map> #include <string> #include <QLabel> #include <QPushButton> #include <QVBoxLayout> #include "common/params.h" class AbstractAlert : public QFrame { Q_OBJECT protected: AbstractAlert(bool hasRebootBtn, QWidget *parent = nullptr); QPushButton *snooze_btn; QVBoxLayout *scrollable_layout; Params params; signals: void dismiss(); }; class UpdateAlert : public AbstractAlert { Q_OBJECT public: UpdateAlert(QWidget *parent = 0); bool refresh(); private: QLabel *releaseNotes = nullptr; }; class OffroadAlert : public AbstractAlert { Q_OBJECT public: explicit OffroadAlert(QWidget *parent = 0) : AbstractAlert(false, parent) {} int refresh(); private: std::map<std::string, QLabel*> alerts; };
2301_81045437/openpilot
selfdrive/ui/qt/widgets/offroad_alerts.h
C++
mit
755
#include "selfdrive/ui/qt/widgets/prime.h" #include <QDebug> #include <QJsonDocument> #include <QJsonObject> #include <QLabel> #include <QPushButton> #include <QStackedWidget> #include <QTimer> #include <QVBoxLayout> #include <QrCode.hpp> #include "selfdrive/ui/qt/request_repeater.h" #include "selfdrive/ui/qt/util.h" #include "selfdrive/ui/qt/qt_window.h" #include "selfdrive/ui/qt/widgets/wifi.h" using qrcodegen::QrCode; PairingQRWidget::PairingQRWidget(QWidget* parent) : QWidget(parent) { timer = new QTimer(this); connect(timer, &QTimer::timeout, this, &PairingQRWidget::refresh); } void PairingQRWidget::showEvent(QShowEvent *event) { refresh(); timer->start(5 * 60 * 1000); device()->setOffroadBrightness(100); } void PairingQRWidget::hideEvent(QHideEvent *event) { timer->stop(); device()->setOffroadBrightness(BACKLIGHT_OFFROAD); } void PairingQRWidget::refresh() { QString pairToken = CommaApi::create_jwt({{"pair", true}}); QString qrString = "https://connect.comma.ai/?pair=" + pairToken; this->updateQrCode(qrString); update(); } void PairingQRWidget::updateQrCode(const QString &text) { QrCode qr = QrCode::encodeText(text.toUtf8().data(), QrCode::Ecc::LOW); qint32 sz = qr.getSize(); QImage im(sz, sz, QImage::Format_RGB32); QRgb black = qRgb(0, 0, 0); QRgb white = qRgb(255, 255, 255); for (int y = 0; y < sz; y++) { for (int x = 0; x < sz; x++) { im.setPixel(x, y, qr.getModule(x, y) ? black : white); } } // Integer division to prevent anti-aliasing int final_sz = ((width() / sz) - 1) * sz; img = QPixmap::fromImage(im.scaled(final_sz, final_sz, Qt::KeepAspectRatio), Qt::MonoOnly); } void PairingQRWidget::paintEvent(QPaintEvent *e) { QPainter p(this); p.fillRect(rect(), Qt::white); QSize s = (size() - img.size()) / 2; p.drawPixmap(s.width(), s.height(), img); } PairingPopup::PairingPopup(QWidget *parent) : DialogBase(parent) { QHBoxLayout *hlayout = new QHBoxLayout(this); hlayout->setContentsMargins(0, 0, 0, 0); hlayout->setSpacing(0); setStyleSheet("PairingPopup { background-color: #E0E0E0; }"); // text QVBoxLayout *vlayout = new QVBoxLayout(); vlayout->setContentsMargins(85, 70, 50, 70); vlayout->setSpacing(50); hlayout->addLayout(vlayout, 1); { QPushButton *close = new QPushButton(QIcon(":/icons/close.svg"), "", this); close->setIconSize(QSize(80, 80)); close->setStyleSheet("border: none;"); vlayout->addWidget(close, 0, Qt::AlignLeft); QObject::connect(close, &QPushButton::clicked, this, &QDialog::reject); vlayout->addSpacing(30); QLabel *title = new QLabel(tr("Pair your device to your comma account"), this); title->setStyleSheet("font-size: 75px; color: black;"); title->setWordWrap(true); vlayout->addWidget(title); QLabel *instructions = new QLabel(QString(R"( <ol type='1' style='margin-left: 15px;'> <li style='margin-bottom: 50px;'>%1</li> <li style='margin-bottom: 50px;'>%2</li> <li style='margin-bottom: 50px;'>%3</li> </ol> )").arg(tr("Go to https://connect.comma.ai on your phone")) .arg(tr("Click \"add new device\" and scan the QR code on the right")) .arg(tr("Bookmark connect.comma.ai to your home screen to use it like an app")), this); instructions->setStyleSheet("font-size: 47px; font-weight: bold; color: black;"); instructions->setWordWrap(true); vlayout->addWidget(instructions); vlayout->addStretch(); } // QR code PairingQRWidget *qr = new PairingQRWidget(this); hlayout->addWidget(qr, 1); } PrimeUserWidget::PrimeUserWidget(QWidget *parent) : QFrame(parent) { setObjectName("primeWidget"); QVBoxLayout *mainLayout = new QVBoxLayout(this); mainLayout->setContentsMargins(56, 40, 56, 40); mainLayout->setSpacing(20); QLabel *subscribed = new QLabel(tr("✓ SUBSCRIBED")); subscribed->setStyleSheet("font-size: 41px; font-weight: bold; color: #86FF4E;"); mainLayout->addWidget(subscribed); QLabel *commaPrime = new QLabel(tr("comma prime")); commaPrime->setStyleSheet("font-size: 75px; font-weight: bold;"); mainLayout->addWidget(commaPrime); } PrimeAdWidget::PrimeAdWidget(QWidget* parent) : QFrame(parent) { QVBoxLayout *main_layout = new QVBoxLayout(this); main_layout->setContentsMargins(80, 90, 80, 60); main_layout->setSpacing(0); QLabel *upgrade = new QLabel(tr("Upgrade Now")); upgrade->setStyleSheet("font-size: 75px; font-weight: bold;"); main_layout->addWidget(upgrade, 0, Qt::AlignTop); main_layout->addSpacing(50); QLabel *description = new QLabel(tr("Become a comma prime member at connect.comma.ai")); description->setStyleSheet("font-size: 56px; font-weight: light; color: white;"); description->setWordWrap(true); main_layout->addWidget(description, 0, Qt::AlignTop); main_layout->addStretch(); QLabel *features = new QLabel(tr("PRIME FEATURES:")); features->setStyleSheet("font-size: 41px; font-weight: bold; color: #E5E5E5;"); main_layout->addWidget(features, 0, Qt::AlignBottom); main_layout->addSpacing(30); QVector<QString> bullets = {tr("Remote access"), tr("24/7 LTE connectivity"), tr("1 year of drive storage"), tr("Turn-by-turn navigation")}; for (auto &b : bullets) { const QString check = "<b><font color='#465BEA'>✓</font></b> "; QLabel *l = new QLabel(check + b); l->setAlignment(Qt::AlignLeft); l->setStyleSheet("font-size: 50px; margin-bottom: 15px;"); main_layout->addWidget(l, 0, Qt::AlignBottom); } setStyleSheet(R"( PrimeAdWidget { border-radius: 10px; background-color: #333333; } )"); } SetupWidget::SetupWidget(QWidget* parent) : QFrame(parent) { mainLayout = new QStackedWidget; // Unpaired, registration prompt layout QFrame* finishRegistration = new QFrame; finishRegistration->setObjectName("primeWidget"); QVBoxLayout* finishRegistationLayout = new QVBoxLayout(finishRegistration); finishRegistationLayout->setSpacing(38); finishRegistationLayout->setContentsMargins(64, 48, 64, 48); QLabel* registrationTitle = new QLabel(tr("Finish Setup")); registrationTitle->setStyleSheet("font-size: 75px; font-weight: bold;"); finishRegistationLayout->addWidget(registrationTitle); QLabel* registrationDescription = new QLabel(tr("Pair your device with comma connect (connect.comma.ai) and claim your comma prime offer.")); registrationDescription->setWordWrap(true); registrationDescription->setStyleSheet("font-size: 50px; font-weight: light;"); finishRegistationLayout->addWidget(registrationDescription); finishRegistationLayout->addStretch(); QPushButton* pair = new QPushButton(tr("Pair device")); pair->setStyleSheet(R"( QPushButton { font-size: 55px; font-weight: 500; border-radius: 10px; background-color: #465BEA; padding: 64px; } QPushButton:pressed { background-color: #3049F4; } )"); finishRegistationLayout->addWidget(pair); popup = new PairingPopup(this); QObject::connect(pair, &QPushButton::clicked, popup, &PairingPopup::exec); mainLayout->addWidget(finishRegistration); // build stacked layout QVBoxLayout *outer_layout = new QVBoxLayout(this); outer_layout->setContentsMargins(0, 0, 0, 0); outer_layout->addWidget(mainLayout); QWidget *content = new QWidget; QVBoxLayout *content_layout = new QVBoxLayout(content); content_layout->setContentsMargins(0, 0, 0, 0); content_layout->setSpacing(30); primeUser = new PrimeUserWidget; content_layout->addWidget(primeUser); WiFiPromptWidget *wifi_prompt = new WiFiPromptWidget; QObject::connect(wifi_prompt, &WiFiPromptWidget::openSettings, this, &SetupWidget::openSettings); content_layout->addWidget(wifi_prompt); content_layout->addStretch(); mainLayout->addWidget(content); primeUser->setVisible(uiState()->hasPrime()); mainLayout->setCurrentIndex(1); setStyleSheet(R"( #primeWidget { border-radius: 10px; background-color: #333333; } )"); // Retain size while hidden QSizePolicy sp_retain = sizePolicy(); sp_retain.setRetainSizeWhenHidden(true); setSizePolicy(sp_retain); // set up API requests if (auto dongleId = getDongleId()) { QString url = CommaApi::BASE_URL + "/v1.1/devices/" + *dongleId + "/"; RequestRepeater* repeater = new RequestRepeater(this, url, "ApiCache_Device", 5); QObject::connect(repeater, &RequestRepeater::requestDone, this, &SetupWidget::replyFinished); } } void SetupWidget::replyFinished(const QString &response, bool success) { if (!success) return; QJsonDocument doc = QJsonDocument::fromJson(response.toUtf8()); if (doc.isNull()) { qDebug() << "JSON Parse failed on getting pairing and prime status"; return; } QJsonObject json = doc.object(); bool is_paired = json["is_paired"].toBool(); PrimeType prime_type = static_cast<PrimeType>(json["prime_type"].toInt()); uiState()->setPrimeType(is_paired ? prime_type : PrimeType::UNPAIRED); if (!is_paired) { mainLayout->setCurrentIndex(0); } else { popup->reject(); primeUser->setVisible(uiState()->hasPrime()); mainLayout->setCurrentIndex(1); } }
2301_81045437/openpilot
selfdrive/ui/qt/widgets/prime.cc
C++
mit
9,182
#pragma once #include <QLabel> #include <QStackedWidget> #include <QVBoxLayout> #include <QWidget> #include "selfdrive/ui/qt/widgets/input.h" // pairing QR code class PairingQRWidget : public QWidget { Q_OBJECT public: explicit PairingQRWidget(QWidget* parent = 0); void paintEvent(QPaintEvent*) override; private: QPixmap img; QTimer *timer; void updateQrCode(const QString &text); void showEvent(QShowEvent *event) override; void hideEvent(QHideEvent *event) override; private slots: void refresh(); }; // pairing popup widget class PairingPopup : public DialogBase { Q_OBJECT public: explicit PairingPopup(QWidget* parent); }; // widget for paired users with prime class PrimeUserWidget : public QFrame { Q_OBJECT public: explicit PrimeUserWidget(QWidget* parent = 0); }; // widget for paired users without prime class PrimeAdWidget : public QFrame { Q_OBJECT public: explicit PrimeAdWidget(QWidget* parent = 0); }; // container widget class SetupWidget : public QFrame { Q_OBJECT public: explicit SetupWidget(QWidget* parent = 0); signals: void openSettings(int index = 0, const QString &param = ""); private: PairingPopup *popup; QStackedWidget *mainLayout; PrimeUserWidget *primeUser; private slots: void replyFinished(const QString &response, bool success); };
2301_81045437/openpilot
selfdrive/ui/qt/widgets/prime.h
C++
mit
1,332
#include "selfdrive/ui/qt/widgets/scrollview.h" #include <QScrollBar> #include <QScroller> // TODO: disable horizontal scrolling and resize ScrollView::ScrollView(QWidget *w, QWidget *parent) : QScrollArea(parent) { setWidget(w); setWidgetResizable(true); setVerticalScrollBarPolicy(Qt::ScrollBarAlwaysOff); setHorizontalScrollBarPolicy(Qt::ScrollBarAlwaysOff); setStyleSheet("background-color: transparent; border:none"); QString style = R"( QScrollBar:vertical { border: none; background: transparent; width: 10px; margin: 0; } QScrollBar::handle:vertical { min-height: 0px; border-radius: 5px; background-color: white; } QScrollBar::add-line:vertical, QScrollBar::sub-line:vertical { height: 0px; } QScrollBar::add-page:vertical, QScrollBar::sub-page:vertical { background: none; } )"; verticalScrollBar()->setStyleSheet(style); horizontalScrollBar()->setStyleSheet(style); QScroller *scroller = QScroller::scroller(this->viewport()); QScrollerProperties sp = scroller->scrollerProperties(); sp.setScrollMetric(QScrollerProperties::VerticalOvershootPolicy, QVariant::fromValue<QScrollerProperties::OvershootPolicy>(QScrollerProperties::OvershootAlwaysOff)); sp.setScrollMetric(QScrollerProperties::HorizontalOvershootPolicy, QVariant::fromValue<QScrollerProperties::OvershootPolicy>(QScrollerProperties::OvershootAlwaysOff)); sp.setScrollMetric(QScrollerProperties::MousePressEventDelay, 0.01); scroller->grabGesture(this->viewport(), QScroller::LeftMouseButtonGesture); scroller->setScrollerProperties(sp); } void ScrollView::hideEvent(QHideEvent *e) { verticalScrollBar()->setValue(0); }
2301_81045437/openpilot
selfdrive/ui/qt/widgets/scrollview.cc
C++
mit
1,720
#pragma once #include <QScrollArea> class ScrollView : public QScrollArea { Q_OBJECT public: explicit ScrollView(QWidget *w = nullptr, QWidget *parent = nullptr); protected: void hideEvent(QHideEvent *e) override; };
2301_81045437/openpilot
selfdrive/ui/qt/widgets/scrollview.h
C++
mit
226
#include "selfdrive/ui/qt/widgets/ssh_keys.h" #include "common/params.h" #include "selfdrive/ui/qt/api.h" #include "selfdrive/ui/qt/widgets/input.h" SshControl::SshControl() : ButtonControl(tr("SSH Keys"), "", tr("Warning: This grants SSH access to all public keys in your GitHub settings. Never enter a GitHub username " "other than your own. A comma employee will NEVER ask you to add their GitHub username.")) { QObject::connect(this, &ButtonControl::clicked, [=]() { if (text() == tr("ADD")) { QString username = InputDialog::getText(tr("Enter your GitHub username"), this); if (username.length() > 0) { setText(tr("LOADING")); setEnabled(false); getUserKeys(username); } } else { params.remove("GithubUsername"); params.remove("GithubSshKeys"); refresh(); } }); refresh(); } void SshControl::refresh() { QString param = QString::fromStdString(params.get("GithubSshKeys")); if (param.length()) { setValue(QString::fromStdString(params.get("GithubUsername"))); setText(tr("REMOVE")); } else { setValue(""); setText(tr("ADD")); } setEnabled(true); } void SshControl::getUserKeys(const QString &username) { HttpRequest *request = new HttpRequest(this, false); QObject::connect(request, &HttpRequest::requestDone, [=](const QString &resp, bool success) { if (success) { if (!resp.isEmpty()) { params.put("GithubUsername", username.toStdString()); params.put("GithubSshKeys", resp.toStdString()); } else { ConfirmationDialog::alert(tr("Username '%1' has no keys on GitHub").arg(username), this); } } else { if (request->timeout()) { ConfirmationDialog::alert(tr("Request timed out"), this); } else { ConfirmationDialog::alert(tr("Username '%1' doesn't exist on GitHub").arg(username), this); } } refresh(); request->deleteLater(); }); request->sendRequest("https://github.com/" + username + ".keys"); }
2301_81045437/openpilot
selfdrive/ui/qt/widgets/ssh_keys.cc
C++
mit
2,059
#pragma once #include <QPushButton> #include "system/hardware/hw.h" #include "selfdrive/ui/qt/widgets/controls.h" // SSH enable toggle class SshToggle : public ToggleControl { Q_OBJECT public: SshToggle() : ToggleControl(tr("Enable SSH"), "", "", Hardware::get_ssh_enabled()) { QObject::connect(this, &SshToggle::toggleFlipped, [=](bool state) { Hardware::set_ssh_enabled(state); }); } }; // SSH key management widget class SshControl : public ButtonControl { Q_OBJECT public: SshControl(); private: Params params; void refresh(); void getUserKeys(const QString &username); };
2301_81045437/openpilot
selfdrive/ui/qt/widgets/ssh_keys.h
C++
mit
615
#include "selfdrive/ui/qt/widgets/toggle.h" #include <QPainter> Toggle::Toggle(QWidget *parent) : QAbstractButton(parent), _height(80), _height_rect(60), on(false), _anim(new QPropertyAnimation(this, "offset_circle", this)) { _radius = _height / 2; _x_circle = _radius; _y_circle = _radius; _y_rect = (_height - _height_rect)/2; circleColor = QColor(0xffffff); // placeholder green = QColor(0xffffff); // placeholder setEnabled(true); } void Toggle::paintEvent(QPaintEvent *e) { this->setFixedHeight(_height); QPainter p(this); p.setPen(Qt::NoPen); p.setRenderHint(QPainter::Antialiasing, true); // Draw toggle background left p.setBrush(green); p.drawRoundedRect(QRect(0, _y_rect, _x_circle + _radius, _height_rect), _height_rect/2, _height_rect/2); // Draw toggle background right p.setBrush(QColor(0x393939)); p.drawRoundedRect(QRect(_x_circle - _radius, _y_rect, width() - (_x_circle - _radius), _height_rect), _height_rect/2, _height_rect/2); // Draw toggle circle p.setBrush(circleColor); p.drawEllipse(QRectF(_x_circle - _radius, _y_circle - _radius, 2 * _radius, 2 * _radius)); } void Toggle::mouseReleaseEvent(QMouseEvent *e) { if (!enabled) { return; } const int left = _radius; const int right = width() - _radius; if ((_x_circle != left && _x_circle != right) || !this->rect().contains(e->localPos().toPoint())) { // If mouse release isn't in rect or animation is running, don't parse touch events return; } if (e->button() & Qt::LeftButton) { togglePosition(); emit stateChanged(on); } } void Toggle::togglePosition() { on = !on; const int left = _radius; const int right = width() - _radius; _anim->setStartValue(on ? left + immediateOffset : right - immediateOffset); _anim->setEndValue(on ? right : left); _anim->setDuration(animation_duration); _anim->start(); repaint(); } void Toggle::enterEvent(QEvent *e) { QAbstractButton::enterEvent(e); } bool Toggle::getEnabled() { return enabled; } void Toggle::setEnabled(bool value) { enabled = value; if (value) { circleColor.setRgb(0xfafafa); green.setRgb(0x33ab4c); } else { circleColor.setRgb(0x888888); green.setRgb(0x227722); } }
2301_81045437/openpilot
selfdrive/ui/qt/widgets/toggle.cc
C++
mit
2,228
#pragma once #include <QAbstractButton> #include <QMouseEvent> #include <QPropertyAnimation> class Toggle : public QAbstractButton { Q_OBJECT Q_PROPERTY(int offset_circle READ offset_circle WRITE set_offset_circle CONSTANT) public: Toggle(QWidget* parent = nullptr); void togglePosition(); bool on; int animation_duration = 150; int immediateOffset = 0; int offset_circle() const { return _x_circle; } void set_offset_circle(int o) { _x_circle = o; update(); } bool getEnabled(); void setEnabled(bool value); protected: void paintEvent(QPaintEvent*) override; void mouseReleaseEvent(QMouseEvent*) override; void enterEvent(QEvent*) override; private: QColor circleColor; QColor green; bool enabled = true; int _x_circle, _y_circle; int _height, _radius; int _height_rect, _y_rect; QPropertyAnimation *_anim = nullptr; signals: void stateChanged(bool new_state); };
2301_81045437/openpilot
selfdrive/ui/qt/widgets/toggle.h
C++
mit
932
#include "selfdrive/ui/qt/widgets/wifi.h" #include <QHBoxLayout> #include <QLabel> #include <QPixmap> #include <QPushButton> WiFiPromptWidget::WiFiPromptWidget(QWidget *parent) : QFrame(parent) { stack = new QStackedLayout(this); // Setup Wi-Fi QFrame *setup = new QFrame; QVBoxLayout *setup_layout = new QVBoxLayout(setup); setup_layout->setContentsMargins(56, 40, 56, 40); setup_layout->setSpacing(20); { QHBoxLayout *title_layout = new QHBoxLayout; title_layout->setSpacing(32); { QLabel *icon = new QLabel; QPixmap pixmap("../assets/offroad/icon_wifi_strength_full.svg"); icon->setPixmap(pixmap.scaledToWidth(80, Qt::SmoothTransformation)); title_layout->addWidget(icon); QLabel *title = new QLabel(tr("Setup Wi-Fi")); title->setStyleSheet("font-size: 64px; font-weight: 600;"); title_layout->addWidget(title); title_layout->addStretch(); } setup_layout->addLayout(title_layout); QLabel *desc = new QLabel(tr("Connect to Wi-Fi to upload driving data and help improve openpilot")); desc->setStyleSheet("font-size: 40px; font-weight: 400;"); desc->setWordWrap(true); setup_layout->addWidget(desc); QPushButton *settings_btn = new QPushButton(tr("Open Settings")); connect(settings_btn, &QPushButton::clicked, [=]() { emit openSettings(1); }); settings_btn->setStyleSheet(R"( QPushButton { font-size: 48px; font-weight: 500; border-radius: 10px; background-color: #465BEA; padding: 32px; } QPushButton:pressed { background-color: #3049F4; } )"); setup_layout->addWidget(settings_btn); } stack->addWidget(setup); // Uploading data QWidget *uploading = new QWidget; QVBoxLayout *uploading_layout = new QVBoxLayout(uploading); uploading_layout->setContentsMargins(64, 56, 64, 56); uploading_layout->setSpacing(36); { QHBoxLayout *title_layout = new QHBoxLayout; { QLabel *title = new QLabel(tr("Ready to upload")); title->setStyleSheet("font-size: 64px; font-weight: 600;"); title->setWordWrap(true); title->setSizePolicy(QSizePolicy::Expanding, QSizePolicy::Minimum); title_layout->addWidget(title); title_layout->addStretch(); QLabel *icon = new QLabel; QPixmap pixmap("../assets/offroad/icon_wifi_uploading.svg"); icon->setPixmap(pixmap.scaledToWidth(120, Qt::SmoothTransformation)); title_layout->addWidget(icon); } uploading_layout->addLayout(title_layout); QLabel *desc = new QLabel(tr("Training data will be pulled periodically while your device is on Wi-Fi")); desc->setStyleSheet("font-size: 48px; font-weight: 400;"); desc->setWordWrap(true); uploading_layout->addWidget(desc); } stack->addWidget(uploading); setStyleSheet(R"( WiFiPromptWidget { background-color: #333333; border-radius: 10px; } )"); QObject::connect(uiState(), &UIState::uiUpdate, this, &WiFiPromptWidget::updateState); } void WiFiPromptWidget::updateState(const UIState &s) { if (!isVisible()) return; auto &sm = *(s.sm); auto network_type = sm["deviceState"].getDeviceState().getNetworkType(); auto uploading = network_type == cereal::DeviceState::NetworkType::WIFI || network_type == cereal::DeviceState::NetworkType::ETHERNET; stack->setCurrentIndex(uploading ? 1 : 0); }
2301_81045437/openpilot
selfdrive/ui/qt/widgets/wifi.cc
C++
mit
3,410
#pragma once #include <QFrame> #include <QStackedLayout> #include <QWidget> #include "selfdrive/ui/ui.h" class WiFiPromptWidget : public QFrame { Q_OBJECT public: explicit WiFiPromptWidget(QWidget* parent = 0); signals: void openSettings(int index = 0, const QString &param = ""); public slots: void updateState(const UIState &s); protected: QStackedLayout *stack; };
2301_81045437/openpilot
selfdrive/ui/qt/widgets/wifi.h
C++
mit
385
#include "selfdrive/ui/qt/window.h" #include <QFontDatabase> #include "system/hardware/hw.h" MainWindow::MainWindow(QWidget *parent) : QWidget(parent) { main_layout = new QStackedLayout(this); main_layout->setMargin(0); homeWindow = new HomeWindow(this); main_layout->addWidget(homeWindow); QObject::connect(homeWindow, &HomeWindow::openSettings, this, &MainWindow::openSettings); QObject::connect(homeWindow, &HomeWindow::closeSettings, this, &MainWindow::closeSettings); settingsWindow = new SettingsWindow(this); main_layout->addWidget(settingsWindow); QObject::connect(settingsWindow, &SettingsWindow::closeSettings, this, &MainWindow::closeSettings); QObject::connect(settingsWindow, &SettingsWindow::reviewTrainingGuide, [=]() { onboardingWindow->showTrainingGuide(); main_layout->setCurrentWidget(onboardingWindow); }); QObject::connect(settingsWindow, &SettingsWindow::showDriverView, [=] { homeWindow->showDriverView(true); }); onboardingWindow = new OnboardingWindow(this); main_layout->addWidget(onboardingWindow); QObject::connect(onboardingWindow, &OnboardingWindow::onboardingDone, [=]() { main_layout->setCurrentWidget(homeWindow); }); if (!onboardingWindow->completed()) { main_layout->setCurrentWidget(onboardingWindow); } QObject::connect(uiState(), &UIState::offroadTransition, [=](bool offroad) { if (!offroad) { closeSettings(); } }); QObject::connect(device(), &Device::interactiveTimeout, [=]() { if (main_layout->currentWidget() == settingsWindow) { closeSettings(); } }); // load fonts QFontDatabase::addApplicationFont("../assets/fonts/Inter-Black.ttf"); QFontDatabase::addApplicationFont("../assets/fonts/Inter-Bold.ttf"); QFontDatabase::addApplicationFont("../assets/fonts/Inter-ExtraBold.ttf"); QFontDatabase::addApplicationFont("../assets/fonts/Inter-ExtraLight.ttf"); QFontDatabase::addApplicationFont("../assets/fonts/Inter-Medium.ttf"); QFontDatabase::addApplicationFont("../assets/fonts/Inter-Regular.ttf"); QFontDatabase::addApplicationFont("../assets/fonts/Inter-SemiBold.ttf"); QFontDatabase::addApplicationFont("../assets/fonts/Inter-Thin.ttf"); QFontDatabase::addApplicationFont("../assets/fonts/JetBrainsMono-Medium.ttf"); // no outline to prevent the focus rectangle setStyleSheet(R"( * { font-family: Inter; outline: none; } )"); setAttribute(Qt::WA_NoSystemBackground); } void MainWindow::openSettings(int index, const QString &param) { main_layout->setCurrentWidget(settingsWindow); settingsWindow->setCurrentPanel(index, param); } void MainWindow::closeSettings() { main_layout->setCurrentWidget(homeWindow); if (uiState()->scene.started) { homeWindow->showSidebar(false); } } bool MainWindow::eventFilter(QObject *obj, QEvent *event) { bool ignore = false; switch (event->type()) { case QEvent::TouchBegin: case QEvent::TouchUpdate: case QEvent::TouchEnd: case QEvent::MouseButtonPress: case QEvent::MouseMove: { // ignore events when device is awakened by resetInteractiveTimeout ignore = !device()->isAwake(); device()->resetInteractiveTimeout(); break; } default: break; } return ignore; }
2301_81045437/openpilot
selfdrive/ui/qt/window.cc
C++
mit
3,274
#pragma once #include <QStackedLayout> #include <QWidget> #include "selfdrive/ui/qt/home.h" #include "selfdrive/ui/qt/offroad/onboarding.h" #include "selfdrive/ui/qt/offroad/settings.h" class MainWindow : public QWidget { Q_OBJECT public: explicit MainWindow(QWidget *parent = 0); private: bool eventFilter(QObject *obj, QEvent *event) override; void openSettings(int index = 0, const QString &param = ""); void closeSettings(); QStackedLayout *main_layout; HomeWindow *homeWindow; SettingsWindow *settingsWindow; OnboardingWindow *onboardingWindow; };
2301_81045437/openpilot
selfdrive/ui/qt/window.h
C++
mit
577
import math import numpy as np import time import wave from cereal import car, messaging from openpilot.common.basedir import BASEDIR from openpilot.common.filter_simple import FirstOrderFilter from openpilot.common.realtime import Ratekeeper from openpilot.common.retry import retry from openpilot.common.swaglog import cloudlog from openpilot.system import micd SAMPLE_RATE = 48000 SAMPLE_BUFFER = 4096 # (approx 100ms) MAX_VOLUME = 1.0 MIN_VOLUME = 0.1 CONTROLS_TIMEOUT = 5 # 5 seconds FILTER_DT = 1. / (micd.SAMPLE_RATE / micd.FFT_SAMPLES) AMBIENT_DB = 30 # DB where MIN_VOLUME is applied DB_SCALE = 30 # AMBIENT_DB + DB_SCALE is where MAX_VOLUME is applied AudibleAlert = car.CarControl.HUDControl.AudibleAlert sound_list: dict[int, tuple[str, int | None, float]] = { # AudibleAlert, file name, play count (none for infinite) AudibleAlert.engage: ("engage.wav", 1, MAX_VOLUME), AudibleAlert.disengage: ("disengage.wav", 1, MAX_VOLUME), AudibleAlert.refuse: ("refuse.wav", 1, MAX_VOLUME), AudibleAlert.prompt: ("prompt.wav", 1, MAX_VOLUME), AudibleAlert.promptRepeat: ("prompt.wav", None, MAX_VOLUME), AudibleAlert.promptDistracted: ("prompt_distracted.wav", None, MAX_VOLUME), AudibleAlert.warningSoft: ("warning_soft.wav", None, MAX_VOLUME), AudibleAlert.warningImmediate: ("warning_immediate.wav", None, MAX_VOLUME), } def check_controls_timeout_alert(sm): controls_missing = time.monotonic() - sm.recv_time['controlsState'] if controls_missing > CONTROLS_TIMEOUT: if sm['controlsState'].enabled and (controls_missing - CONTROLS_TIMEOUT) < 10: return True return False class Soundd: def __init__(self): self.load_sounds() self.current_alert = AudibleAlert.none self.current_volume = MIN_VOLUME self.current_sound_frame = 0 self.controls_timeout_alert = False self.spl_filter_weighted = FirstOrderFilter(0, 2.5, FILTER_DT, initialized=False) def load_sounds(self): self.loaded_sounds: dict[int, np.ndarray] = {} # Load all sounds for sound in sound_list: filename, play_count, volume = sound_list[sound] wavefile = wave.open(BASEDIR + "/selfdrive/assets/sounds/" + filename, 'r') assert wavefile.getnchannels() == 1 assert wavefile.getsampwidth() == 2 assert wavefile.getframerate() == SAMPLE_RATE length = wavefile.getnframes() self.loaded_sounds[sound] = np.frombuffer(wavefile.readframes(length), dtype=np.int16).astype(np.float32) / (2**16/2) def get_sound_data(self, frames): # get "frames" worth of data from the current alert sound, looping when required ret = np.zeros(frames, dtype=np.float32) if self.current_alert != AudibleAlert.none: num_loops = sound_list[self.current_alert][1] sound_data = self.loaded_sounds[self.current_alert] written_frames = 0 current_sound_frame = self.current_sound_frame % len(sound_data) loops = self.current_sound_frame // len(sound_data) while written_frames < frames and (num_loops is None or loops < num_loops): available_frames = sound_data.shape[0] - current_sound_frame frames_to_write = min(available_frames, frames - written_frames) ret[written_frames:written_frames+frames_to_write] = sound_data[current_sound_frame:current_sound_frame+frames_to_write] written_frames += frames_to_write self.current_sound_frame += frames_to_write return ret * self.current_volume def callback(self, data_out: np.ndarray, frames: int, time, status) -> None: if status: cloudlog.warning(f"soundd stream over/underflow: {status}") data_out[:frames, 0] = self.get_sound_data(frames) def update_alert(self, new_alert): current_alert_played_once = self.current_alert == AudibleAlert.none or self.current_sound_frame > len(self.loaded_sounds[self.current_alert]) if self.current_alert != new_alert and (new_alert != AudibleAlert.none or current_alert_played_once): self.current_alert = new_alert self.current_sound_frame = 0 def get_audible_alert(self, sm): if sm.updated['controlsState']: new_alert = sm['controlsState'].alertSound.raw self.update_alert(new_alert) elif check_controls_timeout_alert(sm): self.update_alert(AudibleAlert.warningImmediate) self.controls_timeout_alert = True elif self.controls_timeout_alert: self.update_alert(AudibleAlert.none) self.controls_timeout_alert = False def calculate_volume(self, weighted_db): volume = ((weighted_db - AMBIENT_DB) / DB_SCALE) * (MAX_VOLUME - MIN_VOLUME) + MIN_VOLUME return math.pow(10, (np.clip(volume, MIN_VOLUME, MAX_VOLUME) - 1)) @retry(attempts=7, delay=3) def get_stream(self, sd): # reload sounddevice to reinitialize portaudio sd._terminate() sd._initialize() return sd.OutputStream(channels=1, samplerate=SAMPLE_RATE, callback=self.callback, blocksize=SAMPLE_BUFFER) def soundd_thread(self): # sounddevice must be imported after forking processes import sounddevice as sd sm = messaging.SubMaster(['controlsState', 'microphone']) with self.get_stream(sd) as stream: rk = Ratekeeper(20) cloudlog.info(f"soundd stream started: {stream.samplerate=} {stream.channels=} {stream.dtype=} {stream.device=}, {stream.blocksize=}") while True: sm.update(0) if sm.updated['microphone'] and self.current_alert == AudibleAlert.none: # only update volume filter when not playing alert self.spl_filter_weighted.update(sm["microphone"].soundPressureWeightedDb) self.current_volume = self.calculate_volume(float(self.spl_filter_weighted.x)) self.get_audible_alert(sm) rk.keep_time() assert stream.active def main(): s = Soundd() s.soundd_thread() if __name__ == "__main__": main()
2301_81045437/openpilot
selfdrive/ui/soundd.py
Python
mit
5,838
#!/bin/sh if [ -f /TICI ] && [ ! -f _spinner ]; then cp qt/spinner_larch64 _spinner fi exec ./_spinner "$1"
2301_81045437/openpilot
selfdrive/ui/spinner
Shell
mit
112
#!/usr/bin/env python3 import time import cereal.messaging as messaging if __name__ == "__main__": while True: pm = messaging.PubMaster(['carParams', 'carState']) batt = 1. while True: msg = messaging.new_message('carParams') msg.carParams.carName = "BODY" msg.carParams.notCar = True pm.send('carParams', msg) for b in range(100, 0, -1): msg = messaging.new_message('carState') msg.carState.charging = True msg.carState.fuelGauge = b / 100. pm.send('carState', msg) time.sleep(0.1) time.sleep(1)
2301_81045437/openpilot
selfdrive/ui/tests/body.py
Python
mit
590
#!/bin/bash set -e UI_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"/.. TEST_TEXT="(WRAPPED_SOURCE_TEXT)" TEST_TS_FILE=$UI_DIR/translations/main_test_en.ts TEST_QM_FILE=$UI_DIR/translations/main_test_en.qm # translation strings UNFINISHED="<translation type=\"unfinished\"><\/translation>" TRANSLATED="<translation>$TEST_TEXT<\/translation>" mkdir -p $UI_DIR/translations rm -f $TEST_TS_FILE $TEST_QM_FILE lupdate -recursive "$UI_DIR" -ts $TEST_TS_FILE sed -i "s/$UNFINISHED/$TRANSLATED/" $TEST_TS_FILE lrelease $TEST_TS_FILE
2301_81045437/openpilot
selfdrive/ui/tests/create_test_translations.sh
Shell
mit
550
#!/usr/bin/env python3 import os import sys import time import json from openpilot.common.basedir import BASEDIR from openpilot.common.params import Params from openpilot.selfdrive.controls.lib.alertmanager import set_offroad_alert if __name__ == "__main__": params = Params() with open(os.path.join(BASEDIR, "selfdrive/controls/lib/alerts_offroad.json")) as f: offroad_alerts = json.load(f) t = 10 if len(sys.argv) < 2 else int(sys.argv[1]) while True: print("setting alert update") params.put_bool("UpdateAvailable", True) r = open(os.path.join(BASEDIR, "RELEASES.md")).read() r = r[:r.find('\n\n')] # Slice latest release notes params.put("UpdaterNewReleaseNotes", r + "\n") time.sleep(t) params.put_bool("UpdateAvailable", False) # cycle through normal alerts for a in offroad_alerts: print("setting alert:", a) set_offroad_alert(a, True) time.sleep(t) set_offroad_alert(a, False) print("no alert") time.sleep(t)
2301_81045437/openpilot
selfdrive/ui/tests/cycle_offroad_alerts.py
Python
mit
1,005
#include <QApplication> #include <QSoundEffect> #include <QTimer> #include <QDebug> int main(int argc, char **argv) { QApplication a(argc, argv); QTimer::singleShot(0, [=]{ QSoundEffect s; const char *vol = getenv("VOLUME"); s.setVolume(vol ? atof(vol) : 1.0); for (int i = 1; i < argc; i++) { QString fn = argv[i]; qDebug() << "playing" << fn; QEventLoop loop; s.setSource(QUrl::fromLocalFile(fn)); QEventLoop::connect(&s, &QSoundEffect::loadedChanged, &loop, &QEventLoop::quit); loop.exec(); s.play(); QEventLoop::connect(&s, &QSoundEffect::playingChanged, &loop, &QEventLoop::quit); loop.exec(); } QCoreApplication::exit(); }); return a.exec(); }
2301_81045437/openpilot
selfdrive/ui/tests/playsound.cc
C++
mit
741
#define CATCH_CONFIG_RUNNER #include "catch2/catch.hpp" #include <QApplication> #include <QDebug> #include <QDir> #include <QTranslator> int main(int argc, char **argv) { // unit tests for Qt QApplication app(argc, argv); QString language_file = "main_test_en"; qDebug() << "Loading language:" << language_file; QTranslator translator; QString translationsPath = QDir::cleanPath(qApp->applicationDirPath() + "/../translations"); if (!translator.load(language_file, translationsPath)) { qDebug() << "Failed to load translation file!"; } app.installTranslator(&translator); const int res = Catch::Session().run(argc, argv); return (res < 0xff ? res : 0xff); }
2301_81045437/openpilot
selfdrive/ui/tests/test_runner.cc
C++
mit
689
#include "catch2/catch.hpp" #include "common/params.h" #include "selfdrive/ui/qt/window.h" const QString TEST_TEXT = "(WRAPPED_SOURCE_TEXT)"; // what each string should be translated to QRegExp RE_NUM("\\d*"); QStringList getParentWidgets(QWidget* widget){ QStringList parentWidgets; while (widget->parentWidget() != Q_NULLPTR) { widget = widget->parentWidget(); parentWidgets.append(widget->metaObject()->className()); } return parentWidgets; } template <typename T> void checkWidgetTrWrap(MainWindow &w) { for (auto widget : w.findChildren<T>()) { const QString text = widget->text(); bool isNumber = RE_NUM.exactMatch(text); bool wrapped = text.contains(TEST_TEXT); QString parentWidgets = getParentWidgets(widget).join("->"); if (!text.isEmpty() && !isNumber && !wrapped) { FAIL(("\"" + text + "\" must be wrapped. Parent widgets: " + parentWidgets).toStdString()); } // warn if source string wrapped, but UI adds text // TODO: add way to ignore this if (wrapped && text != TEST_TEXT) { WARN(("\"" + text + "\" is dynamic and needs a custom retranslate function. Parent widgets: " + parentWidgets).toStdString()); } } } // Tests all strings in the UI are wrapped with tr() TEST_CASE("UI: test all strings wrapped") { Params().remove("LanguageSetting"); Params().remove("HardwareSerial"); Params().remove("DongleId"); qputenv("TICI", "1"); MainWindow w; checkWidgetTrWrap<QPushButton*>(w); checkWidgetTrWrap<QLabel*>(w); }
2301_81045437/openpilot
selfdrive/ui/tests/test_translations.cc
C++
mit
1,519
<html> <style> .column { float: left; width: 50%; padding: 5px; } .row::after { content: ""; clear: both; display: table; } .image { width: 100%; } </style> {% for name, (image, ref_image) in cases.items() %} <h1>{{name}}</h1> <div class="row"> <div class="column"> <img class="image" src="{{ image }}" /> </div> </div> <br> {% endfor %} </html>
2301_81045437/openpilot
selfdrive/ui/tests/test_ui/template.html
HTML
mit
378
#include "selfdrive/ui/tests/ui_snapshot.h" #include <QApplication> #include <QCommandLineParser> #include <QDir> #include <QImage> #include <QPainter> #include "selfdrive/ui/qt/home.h" #include "selfdrive/ui/qt/util.h" #include "selfdrive/ui/qt/window.h" #include "selfdrive/ui/ui.h" void saveWidgetAsImage(QWidget *widget, const QString &fileName) { QImage image(widget->size(), QImage::Format_ARGB32); QPainter painter(&image); widget->render(&painter); image.save(fileName); } int main(int argc, char *argv[]) { initApp(argc, argv); QApplication app(argc, argv); QCommandLineParser parser; parser.setApplicationDescription("Take a snapshot of the UI."); parser.addHelpOption(); parser.addOption(QCommandLineOption(QStringList() << "o" << "output", "Output image file path. The file's suffix is used to " "determine the format. Supports PNG and JPEG formats. " "Defaults to \"snapshot.png\".", "file", "snapshot.png")); parser.process(app); const QString output = parser.value("output"); if (output.isEmpty()) { qCritical() << "No output file specified"; return 1; } auto current = QDir::current(); // change working directory to find assets if (!QDir::setCurrent(QCoreApplication::applicationDirPath() + QDir::separator() + "..")) { qCritical() << "Failed to set current directory"; return 1; } MainWindow w; w.setFixedSize(2160, 1080); w.show(); app.installEventFilter(&w); // restore working directory QDir::setCurrent(current.absolutePath()); // wait for the UI to update QObject::connect(uiState(), &UIState::uiUpdate, [&](const UIState &s) { saveWidgetAsImage(&w, output); app.quit(); }); return app.exec(); }
2301_81045437/openpilot
selfdrive/ui/tests/ui_snapshot.cc
C++
mit
1,916
#pragma once #include <QWidget> void saveWidgetAsImage(QWidget *widget, const QString &fileName);
2301_81045437/openpilot
selfdrive/ui/tests/ui_snapshot.h
C
mit
100
#!/bin/sh if [ -f /TICI ] && [ ! -f _text ]; then cp qt/text_larch64 _text fi exec ./_text "$1"
2301_81045437/openpilot
selfdrive/ui/text
Shell
mit
100
#!/usr/bin/env python3 import argparse import json import os import pathlib import xml.etree.ElementTree as ET from typing import cast import requests TRANSLATIONS_DIR = pathlib.Path(__file__).resolve().parent TRANSLATIONS_LANGUAGES = TRANSLATIONS_DIR / "languages.json" OPENAI_MODEL = "gpt-4" OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY") OPENAI_PROMPT = "You are a professional translator from English to {language} (ISO 639 language code). " + \ "The following sentence or word is in the GUI of a software called openpilot, translate it accordingly." def get_language_files(languages: list[str] = None) -> dict[str, pathlib.Path]: files = {} with open(TRANSLATIONS_LANGUAGES) as fp: language_dict = json.load(fp) for filename in language_dict.values(): path = TRANSLATIONS_DIR / f"{filename}.ts" language = path.stem.split("main_")[1] if languages is None or language in languages: files[language] = path return files def translate_phrase(text: str, language: str) -> str: response = requests.post( "https://api.openai.com/v1/chat/completions", json={ "model": OPENAI_MODEL, "messages": [ { "role": "system", "content": OPENAI_PROMPT.format(language=language), }, { "role": "user", "content": text, }, ], "temperature": 0.8, "max_tokens": 1024, "top_p": 1, }, headers={ "Authorization": f"Bearer {OPENAI_API_KEY}", "Content-Type": "application/json", }, ) if 400 <= response.status_code < 600: raise requests.HTTPError(f'Error {response.status_code}: {response.json()}', response=response) data = response.json() return cast(str, data["choices"][0]["message"]["content"]) def translate_file(path: pathlib.Path, language: str, all_: bool) -> None: tree = ET.parse(path) root = tree.getroot() for context in root.findall("./context"): name = context.find("name") if name is None: raise ValueError("name not found") print(f"Context: {name.text}") for message in context.findall("./message"): source = message.find("source") translation = message.find("translation") if source is None or translation is None: raise ValueError("source or translation not found") if not all_ and translation.attrib.get("type") != "unfinished": continue llm_translation = translate_phrase(cast(str, source.text), language) print(f"Source: {source.text}\n" + f"Current translation: {translation.text}\n" + f"LLM translation: {llm_translation}") translation.text = llm_translation with path.open("w", encoding="utf-8") as fp: fp.write('<?xml version="1.0" encoding="utf-8"?>\n' + '<!DOCTYPE TS>\n' + ET.tostring(root, encoding="utf-8").decode()) def main(): arg_parser = argparse.ArgumentParser("Auto translate") group = arg_parser.add_mutually_exclusive_group(required=True) group.add_argument("-a", "--all-files", action="store_true", help="Translate all files") group.add_argument("-f", "--file", nargs="+", help="Translate the selected files. (Example: -f fr de)") arg_parser.add_argument("-t", "--all-translations", action="store_true", default=False, help="Translate all sections. (Default: only unfinished)") args = arg_parser.parse_args() if OPENAI_API_KEY is None: print("OpenAI API key is missing. (Hint: use `export OPENAI_API_KEY=YOUR-KEY` before you run the script).\n" + "If you don't have one go to: https://beta.openai.com/account/api-keys.") exit(1) files = get_language_files(None if args.all_files else args.file) if args.file: missing_files = set(args.file) - set(files) if len(missing_files): print(f"No language files found: {missing_files}") exit(1) print(f"Translation mode: {'all' if args.all_translations else 'only unfinished'}. Files: {list(files)}") for lang, path in files.items(): print(f"Translate {lang} ({path})") translate_file(path, lang, args.all_translations) if __name__ == "__main__": main()
2301_81045437/openpilot
selfdrive/ui/translations/auto_translate.py
Python
mit
4,174
#!/usr/bin/env python3 import json import os import requests import xml.etree.ElementTree as ET from openpilot.common.basedir import BASEDIR from openpilot.selfdrive.ui.tests.test_translations import UNFINISHED_TRANSLATION_TAG from openpilot.selfdrive.ui.update_translations import LANGUAGES_FILE, TRANSLATIONS_DIR TRANSLATION_TAG = "<translation" BADGE_HEIGHT = 20 + 8 SHIELDS_URL = "https://img.shields.io/badge" if __name__ == "__main__": with open(LANGUAGES_FILE) as f: translation_files = json.load(f) badge_svg = [] max_badge_width = 0 # keep track of max width to set parent element for idx, (name, file) in enumerate(translation_files.items()): with open(os.path.join(TRANSLATIONS_DIR, f"{file}.ts")) as tr_f: tr_file = tr_f.read() total_translations = 0 unfinished_translations = 0 for line in tr_file.splitlines(): if TRANSLATION_TAG in line: total_translations += 1 if UNFINISHED_TRANSLATION_TAG in line: unfinished_translations += 1 percent_finished = int(100 - (unfinished_translations / total_translations * 100.)) color = "green" if percent_finished == 100 else "orange" if percent_finished > 90 else "red" # Download badge badge_label = f"LANGUAGE {name}" badge_message = f"{percent_finished}% complete" if unfinished_translations != 0: badge_message += f" ({unfinished_translations} unfinished)" r = requests.get(f"{SHIELDS_URL}/{badge_label}-{badge_message}-{color}", timeout=10) assert r.status_code == 200, "Error downloading badge" content_svg = r.content.decode("utf-8") xml = ET.fromstring(content_svg) assert "width" in xml.attrib max_badge_width = max(max_badge_width, int(xml.attrib["width"])) # Make tag ids in each badge unique to combine them into one svg for tag in ("r", "s"): content_svg = content_svg.replace(f'id="{tag}"', f'id="{tag}{idx}"') content_svg = content_svg.replace(f'"url(#{tag})"', f'"url(#{tag}{idx})"') badge_svg.extend([f'<g transform="translate(0, {idx * BADGE_HEIGHT})">', content_svg, "</g>"]) badge_svg.insert(0, '<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" ' + f'height="{len(translation_files) * BADGE_HEIGHT}" width="{max_badge_width}">') badge_svg.append("</svg>") with open(os.path.join(BASEDIR, "translation_badge.svg"), "w") as badge_f: badge_f.write("\n".join(badge_svg))
2301_81045437/openpilot
selfdrive/ui/translations/create_badges.py
Python
mit
2,456
#include "selfdrive/ui/ui.h" #include <algorithm> #include <cassert> #include <cmath> #include <QtConcurrent> #include "common/transformations/orientation.hpp" #include "common/params.h" #include "common/swaglog.h" #include "common/util.h" #include "common/watchdog.h" #include "system/hardware/hw.h" #define BACKLIGHT_DT 0.05 #define BACKLIGHT_TS 10.00 // Projects a point in car to space to the corresponding point in full frame // image space. static bool calib_frame_to_full_frame(const UIState *s, float in_x, float in_y, float in_z, QPointF *out) { const float margin = 500.0f; const QRectF clip_region{-margin, -margin, s->fb_w + 2 * margin, s->fb_h + 2 * margin}; const vec3 pt = (vec3){{in_x, in_y, in_z}}; const vec3 Ep = matvecmul3(s->scene.wide_cam ? s->scene.view_from_wide_calib : s->scene.view_from_calib, pt); const vec3 KEp = matvecmul3(s->scene.wide_cam ? ECAM_INTRINSIC_MATRIX : FCAM_INTRINSIC_MATRIX, Ep); // Project. QPointF point = s->car_space_transform.map(QPointF{KEp.v[0] / KEp.v[2], KEp.v[1] / KEp.v[2]}); if (clip_region.contains(point)) { *out = point; return true; } return false; } int get_path_length_idx(const cereal::XYZTData::Reader &line, const float path_height) { const auto line_x = line.getX(); int max_idx = 0; for (int i = 1; i < line_x.size() && line_x[i] <= path_height; ++i) { max_idx = i; } return max_idx; } void update_leads(UIState *s, const cereal::RadarState::Reader &radar_state, const cereal::XYZTData::Reader &line) { for (int i = 0; i < 2; ++i) { auto lead_data = (i == 0) ? radar_state.getLeadOne() : radar_state.getLeadTwo(); if (lead_data.getStatus()) { float z = line.getZ()[get_path_length_idx(line, lead_data.getDRel())]; calib_frame_to_full_frame(s, lead_data.getDRel(), -lead_data.getYRel(), z + 1.22, &s->scene.lead_vertices[i]); } } } void update_line_data(const UIState *s, const cereal::XYZTData::Reader &line, float y_off, float z_off, QPolygonF *pvd, int max_idx, bool allow_invert=true) { const auto line_x = line.getX(), line_y = line.getY(), line_z = line.getZ(); QPointF left, right; pvd->clear(); for (int i = 0; i <= max_idx; i++) { // highly negative x positions are drawn above the frame and cause flickering, clip to zy plane of camera if (line_x[i] < 0) continue; bool l = calib_frame_to_full_frame(s, line_x[i], line_y[i] - y_off, line_z[i] + z_off, &left); bool r = calib_frame_to_full_frame(s, line_x[i], line_y[i] + y_off, line_z[i] + z_off, &right); if (l && r) { // For wider lines the drawn polygon will "invert" when going over a hill and cause artifacts if (!allow_invert && pvd->size() && left.y() > pvd->back().y()) { continue; } pvd->push_back(left); pvd->push_front(right); } } } void update_model(UIState *s, const cereal::ModelDataV2::Reader &model, const cereal::UiPlan::Reader &plan) { UIScene &scene = s->scene; auto plan_position = plan.getPosition(); if (plan_position.getX().size() < model.getPosition().getX().size()) { plan_position = model.getPosition(); } float max_distance = std::clamp(*(plan_position.getX().end() - 1), MIN_DRAW_DISTANCE, MAX_DRAW_DISTANCE); // update lane lines const auto lane_lines = model.getLaneLines(); const auto lane_line_probs = model.getLaneLineProbs(); int max_idx = get_path_length_idx(lane_lines[0], max_distance); for (int i = 0; i < std::size(scene.lane_line_vertices); i++) { scene.lane_line_probs[i] = lane_line_probs[i]; update_line_data(s, lane_lines[i], 0.025 * scene.lane_line_probs[i], 0, &scene.lane_line_vertices[i], max_idx); } // update road edges const auto road_edges = model.getRoadEdges(); const auto road_edge_stds = model.getRoadEdgeStds(); for (int i = 0; i < std::size(scene.road_edge_vertices); i++) { scene.road_edge_stds[i] = road_edge_stds[i]; update_line_data(s, road_edges[i], 0.025, 0, &scene.road_edge_vertices[i], max_idx); } // update path auto lead_one = (*s->sm)["radarState"].getRadarState().getLeadOne(); if (lead_one.getStatus()) { const float lead_d = lead_one.getDRel() * 2.; max_distance = std::clamp((float)(lead_d - fmin(lead_d * 0.35, 10.)), 0.0f, max_distance); } max_idx = get_path_length_idx(plan_position, max_distance); update_line_data(s, plan_position, 0.9, 1.22, &scene.track_vertices, max_idx, false); } void update_dmonitoring(UIState *s, const cereal::DriverStateV2::Reader &driverstate, float dm_fade_state, bool is_rhd) { UIScene &scene = s->scene; const auto driver_orient = is_rhd ? driverstate.getRightDriverData().getFaceOrientation() : driverstate.getLeftDriverData().getFaceOrientation(); for (int i = 0; i < std::size(scene.driver_pose_vals); i++) { float v_this = (i == 0 ? (driver_orient[i] < 0 ? 0.7 : 0.9) : 0.4) * driver_orient[i]; scene.driver_pose_diff[i] = fabs(scene.driver_pose_vals[i] - v_this); scene.driver_pose_vals[i] = 0.8 * v_this + (1 - 0.8) * scene.driver_pose_vals[i]; scene.driver_pose_sins[i] = sinf(scene.driver_pose_vals[i]*(1.0-dm_fade_state)); scene.driver_pose_coss[i] = cosf(scene.driver_pose_vals[i]*(1.0-dm_fade_state)); } auto [sin_y, sin_x, sin_z] = scene.driver_pose_sins; auto [cos_y, cos_x, cos_z] = scene.driver_pose_coss; const mat3 r_xyz = (mat3){{ cos_x * cos_z, cos_x * sin_z, -sin_x, -sin_y * sin_x * cos_z - cos_y * sin_z, -sin_y * sin_x * sin_z + cos_y * cos_z, -sin_y * cos_x, cos_y * sin_x * cos_z - sin_y * sin_z, cos_y * sin_x * sin_z + sin_y * cos_z, cos_y * cos_x, }}; // transform vertices for (int kpi = 0; kpi < std::size(default_face_kpts_3d); kpi++) { vec3 kpt_this = matvecmul3(r_xyz, default_face_kpts_3d[kpi]); scene.face_kpts_draw[kpi] = (vec3){{kpt_this.v[0], kpt_this.v[1], (float)(kpt_this.v[2] * (1.0-dm_fade_state) + 8 * dm_fade_state)}}; } } static void update_sockets(UIState *s) { s->sm->update(0); } static void update_state(UIState *s) { SubMaster &sm = *(s->sm); UIScene &scene = s->scene; if (sm.updated("liveCalibration")) { auto live_calib = sm["liveCalibration"].getLiveCalibration(); auto rpy_list = live_calib.getRpyCalib(); auto wfde_list = live_calib.getWideFromDeviceEuler(); Eigen::Vector3d rpy; Eigen::Vector3d wfde; if (rpy_list.size() == 3) rpy << rpy_list[0], rpy_list[1], rpy_list[2]; if (wfde_list.size() == 3) wfde << wfde_list[0], wfde_list[1], wfde_list[2]; Eigen::Matrix3d device_from_calib = euler2rot(rpy); Eigen::Matrix3d wide_from_device = euler2rot(wfde); Eigen::Matrix3d view_from_device; view_from_device << 0, 1, 0, 0, 0, 1, 1, 0, 0; Eigen::Matrix3d view_from_calib = view_from_device * device_from_calib; Eigen::Matrix3d view_from_wide_calib = view_from_device * wide_from_device * device_from_calib; for (int i = 0; i < 3; i++) { for (int j = 0; j < 3; j++) { scene.view_from_calib.v[i*3 + j] = view_from_calib(i, j); scene.view_from_wide_calib.v[i*3 + j] = view_from_wide_calib(i, j); } } scene.calibration_valid = live_calib.getCalStatus() == cereal::LiveCalibrationData::Status::CALIBRATED; scene.calibration_wide_valid = wfde_list.size() == 3; } if (sm.updated("pandaStates")) { auto pandaStates = sm["pandaStates"].getPandaStates(); if (pandaStates.size() > 0) { scene.pandaType = pandaStates[0].getPandaType(); if (scene.pandaType != cereal::PandaState::PandaType::UNKNOWN) { scene.ignition = false; for (const auto& pandaState : pandaStates) { scene.ignition |= pandaState.getIgnitionLine() || pandaState.getIgnitionCan(); } } } } else if ((s->sm->frame - s->sm->rcv_frame("pandaStates")) > 5*UI_FREQ) { scene.pandaType = cereal::PandaState::PandaType::UNKNOWN; } if (sm.updated("carParams")) { scene.longitudinal_control = sm["carParams"].getCarParams().getOpenpilotLongitudinalControl(); } if (sm.updated("wideRoadCameraState")) { auto cam_state = sm["wideRoadCameraState"].getWideRoadCameraState(); float scale = (cam_state.getSensor() == cereal::FrameData::ImageSensor::AR0231) ? 6.0f : 1.0f; scene.light_sensor = std::max(100.0f - scale * cam_state.getExposureValPercent(), 0.0f); } else if (!sm.allAliveAndValid({"wideRoadCameraState"})) { scene.light_sensor = -1; } scene.started = sm["deviceState"].getDeviceState().getStarted() && scene.ignition; scene.world_objects_visible = scene.world_objects_visible || (scene.started && sm.rcv_frame("liveCalibration") > scene.started_frame && sm.rcv_frame("modelV2") > scene.started_frame && sm.rcv_frame("uiPlan") > scene.started_frame); } void ui_update_params(UIState *s) { auto params = Params(); s->scene.is_metric = params.getBool("IsMetric"); s->scene.map_on_left = params.getBool("NavSettingLeftSide"); } void UIState::updateStatus() { if (scene.started && sm->updated("controlsState")) { auto controls_state = (*sm)["controlsState"].getControlsState(); auto state = controls_state.getState(); if (state == cereal::ControlsState::OpenpilotState::PRE_ENABLED || state == cereal::ControlsState::OpenpilotState::OVERRIDING) { status = STATUS_OVERRIDE; } else { status = controls_state.getEnabled() ? STATUS_ENGAGED : STATUS_DISENGAGED; } } // Handle onroad/offroad transition if (scene.started != started_prev || sm->frame == 1) { if (scene.started) { status = STATUS_DISENGAGED; scene.started_frame = sm->frame; } started_prev = scene.started; scene.world_objects_visible = false; emit offroadTransition(!scene.started); } } UIState::UIState(QObject *parent) : QObject(parent) { sm = std::make_unique<SubMaster, const std::initializer_list<const char *>>({ "modelV2", "controlsState", "liveCalibration", "radarState", "deviceState", "pandaStates", "carParams", "driverMonitoringState", "carState", "liveLocationKalman", "driverStateV2", "wideRoadCameraState", "managerState", "navInstruction", "navRoute", "uiPlan", "clocks", }); Params params; language = QString::fromStdString(params.get("LanguageSetting")); auto prime_value = params.get("PrimeType"); if (!prime_value.empty()) { prime_type = static_cast<PrimeType>(std::atoi(prime_value.c_str())); } // update timer timer = new QTimer(this); QObject::connect(timer, &QTimer::timeout, this, &UIState::update); timer->start(1000 / UI_FREQ); } void UIState::update() { update_sockets(this); update_state(this); updateStatus(); if (sm->frame % UI_FREQ == 0) { watchdog_kick(nanos_since_boot()); } emit uiUpdate(*this); } void UIState::setPrimeType(PrimeType type) { if (type != prime_type) { bool prev_prime = hasPrime(); prime_type = type; Params().put("PrimeType", std::to_string(prime_type)); emit primeTypeChanged(prime_type); bool prime = hasPrime(); if (prev_prime != prime) { emit primeChanged(prime); } } } Device::Device(QObject *parent) : brightness_filter(BACKLIGHT_OFFROAD, BACKLIGHT_TS, BACKLIGHT_DT), QObject(parent) { setAwake(true); resetInteractiveTimeout(); QObject::connect(uiState(), &UIState::uiUpdate, this, &Device::update); } void Device::update(const UIState &s) { updateBrightness(s); updateWakefulness(s); } void Device::setAwake(bool on) { if (on != awake) { awake = on; Hardware::set_display_power(awake); LOGD("setting display power %d", awake); emit displayPowerChanged(awake); } } void Device::resetInteractiveTimeout(int timeout) { if (timeout == -1) { timeout = (ignition_on ? 10 : 30); } interactive_timeout = timeout * UI_FREQ; } void Device::updateBrightness(const UIState &s) { float clipped_brightness = offroad_brightness; if (s.scene.started && s.scene.light_sensor > 0) { clipped_brightness = s.scene.light_sensor; // CIE 1931 - https://www.photonstophotos.net/GeneralTopics/Exposure/Psychometric_Lightness_and_Gamma.htm if (clipped_brightness <= 8) { clipped_brightness = (clipped_brightness / 903.3); } else { clipped_brightness = std::pow((clipped_brightness + 16.0) / 116.0, 3.0); } // Scale back to 10% to 100% clipped_brightness = std::clamp(100.0f * clipped_brightness, 10.0f, 100.0f); } int brightness = brightness_filter.update(clipped_brightness); if (!awake) { brightness = 0; } if (brightness != last_brightness) { if (!brightness_future.isRunning()) { brightness_future = QtConcurrent::run(Hardware::set_brightness, brightness); last_brightness = brightness; } } } void Device::updateWakefulness(const UIState &s) { bool ignition_just_turned_off = !s.scene.ignition && ignition_on; ignition_on = s.scene.ignition; if (ignition_just_turned_off) { resetInteractiveTimeout(); } else if (interactive_timeout > 0 && --interactive_timeout == 0) { emit interactiveTimeout(); } setAwake(s.scene.ignition || interactive_timeout > 0); } UIState *uiState() { static UIState ui_state; return &ui_state; } Device *device() { static Device _device; return &_device; }
2301_81045437/openpilot
selfdrive/ui/ui.cc
C++
mit
13,477
#pragma once #include <memory> #include <string> #include <QObject> #include <QTimer> #include <QColor> #include <QFuture> #include <QPolygonF> #include <QTransform> #include "cereal/messaging/messaging.h" #include "common/mat.h" #include "common/params.h" #include "common/timing.h" #include "system/hardware/hw.h" const int UI_BORDER_SIZE = 30; const int UI_HEADER_HEIGHT = 420; const int UI_FREQ = 20; // Hz const int BACKLIGHT_OFFROAD = 50; const float MIN_DRAW_DISTANCE = 10.0; const float MAX_DRAW_DISTANCE = 100.0; constexpr mat3 DEFAULT_CALIBRATION = {{ 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0 }}; constexpr mat3 FCAM_INTRINSIC_MATRIX = (mat3){{2648.0, 0.0, 1928.0 / 2, 0.0, 2648.0, 1208.0 / 2, 0.0, 0.0, 1.0}}; // tici ecam focal probably wrong? magnification is not consistent across frame // Need to retrain model before this can be changed constexpr mat3 ECAM_INTRINSIC_MATRIX = (mat3){{567.0, 0.0, 1928.0 / 2, 0.0, 567.0, 1208.0 / 2, 0.0, 0.0, 1.0}}; constexpr vec3 default_face_kpts_3d[] = { {-5.98, -51.20, 8.00}, {-17.64, -49.14, 8.00}, {-23.81, -46.40, 8.00}, {-29.98, -40.91, 8.00}, {-32.04, -37.49, 8.00}, {-34.10, -32.00, 8.00}, {-36.16, -21.03, 8.00}, {-36.16, 6.40, 8.00}, {-35.47, 10.51, 8.00}, {-32.73, 19.43, 8.00}, {-29.30, 26.29, 8.00}, {-24.50, 33.83, 8.00}, {-19.01, 41.37, 8.00}, {-14.21, 46.17, 8.00}, {-12.16, 47.54, 8.00}, {-4.61, 49.60, 8.00}, {4.99, 49.60, 8.00}, {12.53, 47.54, 8.00}, {14.59, 46.17, 8.00}, {19.39, 41.37, 8.00}, {24.87, 33.83, 8.00}, {29.67, 26.29, 8.00}, {33.10, 19.43, 8.00}, {35.84, 10.51, 8.00}, {36.53, 6.40, 8.00}, {36.53, -21.03, 8.00}, {34.47, -32.00, 8.00}, {32.42, -37.49, 8.00}, {30.36, -40.91, 8.00}, {24.19, -46.40, 8.00}, {18.02, -49.14, 8.00}, {6.36, -51.20, 8.00}, {-5.98, -51.20, 8.00}, }; typedef enum UIStatus { STATUS_DISENGAGED, STATUS_OVERRIDE, STATUS_ENGAGED, } UIStatus; enum PrimeType { UNKNOWN = -2, UNPAIRED = -1, NONE = 0, MAGENTA = 1, LITE = 2, BLUE = 3, MAGENTA_NEW = 4, PURPLE = 5, }; const QColor bg_colors [] = { [STATUS_DISENGAGED] = QColor(0x17, 0x33, 0x49, 0xc8), [STATUS_OVERRIDE] = QColor(0x91, 0x9b, 0x95, 0xf1), [STATUS_ENGAGED] = QColor(0x17, 0x86, 0x44, 0xf1), }; typedef struct UIScene { bool calibration_valid = false; bool calibration_wide_valid = false; bool wide_cam = true; mat3 view_from_calib = DEFAULT_CALIBRATION; mat3 view_from_wide_calib = DEFAULT_CALIBRATION; cereal::PandaState::PandaType pandaType; // modelV2 float lane_line_probs[4]; float road_edge_stds[2]; QPolygonF track_vertices; QPolygonF lane_line_vertices[4]; QPolygonF road_edge_vertices[2]; // lead QPointF lead_vertices[2]; // DMoji state float driver_pose_vals[3]; float driver_pose_diff[3]; float driver_pose_sins[3]; float driver_pose_coss[3]; vec3 face_kpts_draw[std::size(default_face_kpts_3d)]; cereal::LongitudinalPersonality personality; float light_sensor = -1; bool started, ignition, is_metric, map_on_left, longitudinal_control; bool world_objects_visible = false; uint64_t started_frame; } UIScene; class UIState : public QObject { Q_OBJECT public: UIState(QObject* parent = 0); void updateStatus(); inline bool engaged() const { return scene.started && (*sm)["controlsState"].getControlsState().getEnabled(); } void setPrimeType(PrimeType type); inline PrimeType primeType() const { return prime_type; } inline bool hasPrime() const { return prime_type > PrimeType::NONE; } int fb_w = 0, fb_h = 0; std::unique_ptr<SubMaster> sm; UIStatus status; UIScene scene = {}; QString language; QTransform car_space_transform; signals: void uiUpdate(const UIState &s); void offroadTransition(bool offroad); void primeChanged(bool prime); void primeTypeChanged(PrimeType prime_type); private slots: void update(); private: QTimer *timer; bool started_prev = false; PrimeType prime_type = PrimeType::UNKNOWN; }; UIState *uiState(); // device management class class Device : public QObject { Q_OBJECT public: Device(QObject *parent = 0); bool isAwake() { return awake; } void setOffroadBrightness(int brightness) { offroad_brightness = std::clamp(brightness, 0, 100); } private: bool awake = false; int interactive_timeout = 0; bool ignition_on = false; int offroad_brightness = BACKLIGHT_OFFROAD; int last_brightness = 0; FirstOrderFilter brightness_filter; QFuture<void> brightness_future; void updateBrightness(const UIState &s); void updateWakefulness(const UIState &s); void setAwake(bool on); signals: void displayPowerChanged(bool on); void interactiveTimeout(); public slots: void resetInteractiveTimeout(int timeout = -1); void update(const UIState &s); }; Device *device(); void ui_update_params(UIState *s); int get_path_length_idx(const cereal::XYZTData::Reader &line, const float path_height); void update_model(UIState *s, const cereal::ModelDataV2::Reader &model, const cereal::UiPlan::Reader &plan); void update_dmonitoring(UIState *s, const cereal::DriverStateV2::Reader &driverstate, float dm_fade_state, bool is_rhd); void update_leads(UIState *s, const cereal::RadarState::Reader &radar_state, const cereal::XYZTData::Reader &line); void update_line_data(const UIState *s, const cereal::XYZTData::Reader &line, float y_off, float z_off, QPolygonF *pvd, int max_idx, bool allow_invert);
2301_81045437/openpilot
selfdrive/ui/ui.h
C++
mit
5,636
#!/usr/bin/env python3 import os import signal signal.signal(signal.SIGINT, signal.SIG_DFL) import cereal.messaging as messaging from openpilot.system.hardware import HARDWARE from PyQt5.QtCore import Qt, QTimer from PyQt5.QtWidgets import QLabel, QWidget, QVBoxLayout, QStackedLayout, QApplication from openpilot.selfdrive.ui.qt.python_helpers import set_main_window if __name__ == "__main__": app = QApplication([]) win = QWidget() set_main_window(win) bg = QLabel("", alignment=Qt.AlignCenter) alert1 = QLabel() alert2 = QLabel() vlayout = QVBoxLayout() vlayout.addWidget(alert1, alignment=Qt.AlignCenter) vlayout.addWidget(alert2, alignment=Qt.AlignCenter) tmp = QWidget() tmp.setLayout(vlayout) stack = QStackedLayout(win) stack.addWidget(tmp) stack.addWidget(bg) stack.setStackingMode(QStackedLayout.StackAll) win.setObjectName("win") win.setStyleSheet(""" #win { background-color: black; } QLabel { color: white; font-size: 40px; } """) sm = messaging.SubMaster(['deviceState', 'controlsState']) def update(): sm.update(0) onroad = sm.all_checks(['deviceState']) and sm['deviceState'].started if onroad: cs = sm['controlsState'] color = ("grey" if str(cs.state) in ("overriding", "preEnabled") else "green") if cs.enabled else "blue" bg.setText("\U0001F44D" if cs.engageable else "\U0001F6D1") bg.setStyleSheet(f"font-size: 100px; background-color: {color};") bg.show() alert1.setText(cs.alertText1) alert2.setText(cs.alertText2) if not sm.alive['controlsState']: alert1.setText("waiting for controls...") else: bg.hide() alert1.setText("") alert2.setText("offroad") HARDWARE.set_screen_brightness(100 if onroad else 40) os.system("echo 0 > /sys/class/backlight/panel0-backlight/bl_power") timer = QTimer() timer.timeout.connect(update) timer.start(50) app.exec_()
2301_81045437/openpilot
selfdrive/ui/ui.py
Python
mit
1,970
#!/usr/bin/env python3 import argparse import json import os from openpilot.common.basedir import BASEDIR UI_DIR = os.path.join(BASEDIR, "selfdrive", "ui") TRANSLATIONS_DIR = os.path.join(UI_DIR, "translations") LANGUAGES_FILE = os.path.join(TRANSLATIONS_DIR, "languages.json") TRANSLATIONS_INCLUDE_FILE = os.path.join(TRANSLATIONS_DIR, "alerts_generated.h") PLURAL_ONLY = ["main_en"] # base language, only create entries for strings with plural forms def generate_translations_include(): # offroad alerts # TODO translate events from openpilot.selfdrive/controls/lib/events.py content = "// THIS IS AN AUTOGENERATED FILE, PLEASE EDIT alerts_offroad.json\n" with open(os.path.join(BASEDIR, "selfdrive/controls/lib/alerts_offroad.json")) as f: for alert in json.load(f).values(): content += f'QT_TRANSLATE_NOOP("OffroadAlert", R"({alert["text"]})");\n' with open(TRANSLATIONS_INCLUDE_FILE, "w") as f: f.write(content) def update_translations(vanish: bool = False, translation_files: None | list[str] = None, translations_dir: str = TRANSLATIONS_DIR): generate_translations_include() if translation_files is None: with open(LANGUAGES_FILE) as f: translation_files = json.load(f).values() for file in translation_files: tr_file = os.path.join(translations_dir, f"{file}.ts") args = f"lupdate -locations none -recursive {UI_DIR} -ts {tr_file} -I {BASEDIR}" if vanish: args += " -no-obsolete" if file in PLURAL_ONLY: args += " -pluralonly" ret = os.system(args) assert ret == 0 if __name__ == "__main__": parser = argparse.ArgumentParser(description="Update translation files for UI", formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument("--vanish", action="store_true", help="Remove translations with source text no longer found") args = parser.parse_args() update_translations(args.vanish)
2301_81045437/openpilot
selfdrive/ui/update_translations.py
Python
mit
1,942
#include <QApplication> #include <QtWidgets> #include "selfdrive/ui/qt/qt_window.h" #include "selfdrive/ui/qt/util.h" #include "selfdrive/ui/qt/widgets/cameraview.h" int main(int argc, char *argv[]) { initApp(argc, argv); QApplication a(argc, argv); QWidget w; setMainWindow(&w); QVBoxLayout *layout = new QVBoxLayout(&w); layout->setMargin(0); layout->setSpacing(0); { QHBoxLayout *hlayout = new QHBoxLayout(); layout->addLayout(hlayout); hlayout->addWidget(new CameraWidget("navd", VISION_STREAM_MAP, false)); hlayout->addWidget(new CameraWidget("camerad", VISION_STREAM_ROAD, false)); } { QHBoxLayout *hlayout = new QHBoxLayout(); layout->addLayout(hlayout); hlayout->addWidget(new CameraWidget("camerad", VISION_STREAM_DRIVER, false)); hlayout->addWidget(new CameraWidget("camerad", VISION_STREAM_WIDE_ROAD, false)); } return a.exec(); }
2301_81045437/openpilot
selfdrive/ui/watch3.cc
C++
mit
905
import re import SCons from SCons.Action import Action from SCons.Scanner import Scanner pyx_from_import_re = re.compile(r'^from\s+(\S+)\s+cimport', re.M) pyx_import_re = re.compile(r'^cimport\s+(\S+)', re.M) cdef_import_re = re.compile(r'^cdef extern from\s+.(\S+).:', re.M) def pyx_scan(node, env, path, arg=None): contents = node.get_text_contents() # from <module> cimport ... matches = pyx_from_import_re.findall(contents) # cimport <module> matches += pyx_import_re.findall(contents) # Modules can be either .pxd or .pyx files files = [m.replace('.', '/') + '.pxd' for m in matches] files += [m.replace('.', '/') + '.pyx' for m in matches] # cdef extern from <file> files += cdef_import_re.findall(contents) # Handle relative imports cur_dir = str(node.get_dir()) files = [cur_dir + f if f.startswith('/') else f for f in files] # Filter out non-existing files (probably system imports) files = [f for f in files if env.File(f).exists()] return env.File(files) pyxscanner = Scanner(function=pyx_scan, skeys=['.pyx', '.pxd'], recursive=True) cythonAction = Action("$CYTHONCOM") def create_builder(env): try: cython = env['BUILDERS']['Cython'] except KeyError: cython = SCons.Builder.Builder( action=cythonAction, emitter={}, suffix=cython_suffix_emitter, single_source=1 ) env.Append(SCANNERS=pyxscanner) env['BUILDERS']['Cython'] = cython return cython def cython_suffix_emitter(env, source): return "$CYTHONCFILESUFFIX" def generate(env): env["CYTHON"] = "cythonize" env["CYTHONCOM"] = "$CYTHON $CYTHONFLAGS $SOURCE" env["CYTHONCFILESUFFIX"] = ".cpp" c_file, _ = SCons.Tool.createCFileBuilders(env) c_file.suffix['.pyx'] = cython_suffix_emitter c_file.add_action('.pyx', cythonAction) c_file.suffix['.py'] = cython_suffix_emitter c_file.add_action('.py', cythonAction) create_builder(env) def exists(env): return True
2301_81045437/openpilot
site_scons/site_tools/cython.py
Python
mit
1,948
#!/usr/bin/env python3 from __future__ import annotations import base64 import bz2 import hashlib import io import json import os import queue import random import select import socket import sys import tempfile import threading import time from dataclasses import asdict, dataclass, replace from datetime import datetime from functools import partial from queue import Queue from typing import cast from collections.abc import Callable import requests from jsonrpc import JSONRPCResponseManager, dispatcher from websocket import (ABNF, WebSocket, WebSocketException, WebSocketTimeoutException, create_connection) import cereal.messaging as messaging from cereal import log from cereal.services import SERVICE_LIST from openpilot.common.api import Api from openpilot.common.file_helpers import CallbackReader from openpilot.common.params import Params from openpilot.common.realtime import set_core_affinity from openpilot.system.hardware import HARDWARE, PC from openpilot.system.loggerd.xattr_cache import getxattr, setxattr from openpilot.common.swaglog import cloudlog from openpilot.system.version import get_build_metadata from openpilot.system.hardware.hw import Paths ATHENA_HOST = os.getenv('ATHENA_HOST', 'wss://athena.comma.ai') HANDLER_THREADS = int(os.getenv('HANDLER_THREADS', "4")) LOCAL_PORT_WHITELIST = {8022} LOG_ATTR_NAME = 'user.upload' LOG_ATTR_VALUE_MAX_UNIX_TIME = int.to_bytes(2147483647, 4, sys.byteorder) RECONNECT_TIMEOUT_S = 70 RETRY_DELAY = 10 # seconds MAX_RETRY_COUNT = 30 # Try for at most 5 minutes if upload fails immediately MAX_AGE = 31 * 24 * 3600 # seconds WS_FRAME_SIZE = 4096 NetworkType = log.DeviceState.NetworkType UploadFileDict = dict[str, str | int | float | bool] UploadItemDict = dict[str, str | bool | int | float | dict[str, str]] UploadFilesToUrlResponse = dict[str, int | list[UploadItemDict] | list[str]] @dataclass class UploadFile: fn: str url: str headers: dict[str, str] allow_cellular: bool @classmethod def from_dict(cls, d: dict) -> UploadFile: return cls(d.get("fn", ""), d.get("url", ""), d.get("headers", {}), d.get("allow_cellular", False)) @dataclass class UploadItem: path: str url: str headers: dict[str, str] created_at: int id: str | None retry_count: int = 0 current: bool = False progress: float = 0 allow_cellular: bool = False @classmethod def from_dict(cls, d: dict) -> UploadItem: return cls(d["path"], d["url"], d["headers"], d["created_at"], d["id"], d["retry_count"], d["current"], d["progress"], d["allow_cellular"]) dispatcher["echo"] = lambda s: s recv_queue: Queue[str] = queue.Queue() send_queue: Queue[str] = queue.Queue() upload_queue: Queue[UploadItem] = queue.Queue() low_priority_send_queue: Queue[str] = queue.Queue() log_recv_queue: Queue[str] = queue.Queue() cancelled_uploads: set[str] = set() cur_upload_items: dict[int, UploadItem | None] = {} def strip_bz2_extension(fn: str) -> str: if fn.endswith('.bz2'): return fn[:-4] return fn class AbortTransferException(Exception): pass class UploadQueueCache: @staticmethod def initialize(upload_queue: Queue[UploadItem]) -> None: try: upload_queue_json = Params().get("AthenadUploadQueue") if upload_queue_json is not None: for item in json.loads(upload_queue_json): upload_queue.put(UploadItem.from_dict(item)) except Exception: cloudlog.exception("athena.UploadQueueCache.initialize.exception") @staticmethod def cache(upload_queue: Queue[UploadItem]) -> None: try: queue: list[UploadItem | None] = list(upload_queue.queue) items = [asdict(i) for i in queue if i is not None and (i.id not in cancelled_uploads)] Params().put("AthenadUploadQueue", json.dumps(items)) except Exception: cloudlog.exception("athena.UploadQueueCache.cache.exception") def handle_long_poll(ws: WebSocket, exit_event: threading.Event | None) -> None: end_event = threading.Event() threads = [ threading.Thread(target=ws_manage, args=(ws, end_event), name='ws_manage'), threading.Thread(target=ws_recv, args=(ws, end_event), name='ws_recv'), threading.Thread(target=ws_send, args=(ws, end_event), name='ws_send'), threading.Thread(target=upload_handler, args=(end_event,), name='upload_handler'), threading.Thread(target=log_handler, args=(end_event,), name='log_handler'), threading.Thread(target=stat_handler, args=(end_event,), name='stat_handler'), ] + [ threading.Thread(target=jsonrpc_handler, args=(end_event,), name=f'worker_{x}') for x in range(HANDLER_THREADS) ] for thread in threads: thread.start() try: while not end_event.wait(0.1): if exit_event is not None and exit_event.is_set(): end_event.set() except (KeyboardInterrupt, SystemExit): end_event.set() raise finally: for thread in threads: cloudlog.debug(f"athena.joining {thread.name}") thread.join() def jsonrpc_handler(end_event: threading.Event) -> None: dispatcher["startLocalProxy"] = partial(startLocalProxy, end_event) while not end_event.is_set(): try: data = recv_queue.get(timeout=1) if "method" in data: cloudlog.event("athena.jsonrpc_handler.call_method", data=data) response = JSONRPCResponseManager.handle(data, dispatcher) send_queue.put_nowait(response.json) elif "id" in data and ("result" in data or "error" in data): log_recv_queue.put_nowait(data) else: raise Exception("not a valid request or response") except queue.Empty: pass except Exception as e: cloudlog.exception("athena jsonrpc handler failed") send_queue.put_nowait(json.dumps({"error": str(e)})) def retry_upload(tid: int, end_event: threading.Event, increase_count: bool = True) -> None: item = cur_upload_items[tid] if item is not None and item.retry_count < MAX_RETRY_COUNT: new_retry_count = item.retry_count + 1 if increase_count else item.retry_count item = replace( item, retry_count=new_retry_count, progress=0, current=False ) upload_queue.put_nowait(item) UploadQueueCache.cache(upload_queue) cur_upload_items[tid] = None for _ in range(RETRY_DELAY): time.sleep(1) if end_event.is_set(): break def cb(sm, item, tid, end_event: threading.Event, sz: int, cur: int) -> None: # Abort transfer if connection changed to metered after starting upload # or if athenad is shutting down to re-connect the websocket sm.update(0) metered = sm['deviceState'].networkMetered if metered and (not item.allow_cellular): raise AbortTransferException if end_event.is_set(): raise AbortTransferException cur_upload_items[tid] = replace(item, progress=cur / sz if sz else 1) def upload_handler(end_event: threading.Event) -> None: sm = messaging.SubMaster(['deviceState']) tid = threading.get_ident() while not end_event.is_set(): cur_upload_items[tid] = None try: cur_upload_items[tid] = item = replace(upload_queue.get(timeout=1), current=True) if item.id in cancelled_uploads: cancelled_uploads.remove(item.id) continue # Remove item if too old age = datetime.now() - datetime.fromtimestamp(item.created_at / 1000) if age.total_seconds() > MAX_AGE: cloudlog.event("athena.upload_handler.expired", item=item, error=True) continue # Check if uploading over metered connection is allowed sm.update(0) metered = sm['deviceState'].networkMetered network_type = sm['deviceState'].networkType.raw if metered and (not item.allow_cellular): retry_upload(tid, end_event, False) continue try: fn = item.path try: sz = os.path.getsize(fn) except OSError: sz = -1 cloudlog.event("athena.upload_handler.upload_start", fn=fn, sz=sz, network_type=network_type, metered=metered, retry_count=item.retry_count) response = _do_upload(item, partial(cb, sm, item, tid, end_event)) if response.status_code not in (200, 201, 401, 403, 412): cloudlog.event("athena.upload_handler.retry", status_code=response.status_code, fn=fn, sz=sz, network_type=network_type, metered=metered) retry_upload(tid, end_event) else: cloudlog.event("athena.upload_handler.success", fn=fn, sz=sz, network_type=network_type, metered=metered) UploadQueueCache.cache(upload_queue) except (requests.exceptions.Timeout, requests.exceptions.ConnectionError, requests.exceptions.SSLError): cloudlog.event("athena.upload_handler.timeout", fn=fn, sz=sz, network_type=network_type, metered=metered) retry_upload(tid, end_event) except AbortTransferException: cloudlog.event("athena.upload_handler.abort", fn=fn, sz=sz, network_type=network_type, metered=metered) retry_upload(tid, end_event, False) except queue.Empty: pass except Exception: cloudlog.exception("athena.upload_handler.exception") def _do_upload(upload_item: UploadItem, callback: Callable = None) -> requests.Response: path = upload_item.path compress = False # If file does not exist, but does exist without the .bz2 extension we will compress on the fly if not os.path.exists(path) and os.path.exists(strip_bz2_extension(path)): path = strip_bz2_extension(path) compress = True with open(path, "rb") as f: content = f.read() if compress: cloudlog.event("athena.upload_handler.compress", fn=path, fn_orig=upload_item.path) content = bz2.compress(content) with io.BytesIO(content) as data: return requests.put(upload_item.url, data=CallbackReader(data, callback, len(content)) if callback else data, headers={**upload_item.headers, 'Content-Length': str(len(content))}, timeout=30) # security: user should be able to request any message from their car @dispatcher.add_method def getMessage(service: str, timeout: int = 1000) -> dict: if service is None or service not in SERVICE_LIST: raise Exception("invalid service") socket = messaging.sub_sock(service, timeout=timeout) ret = messaging.recv_one(socket) if ret is None: raise TimeoutError # this is because capnp._DynamicStructReader doesn't have typing information return cast(dict, ret.to_dict()) @dispatcher.add_method def getVersion() -> dict[str, str]: build_metadata = get_build_metadata() return { "version": build_metadata.openpilot.version, "remote": build_metadata.openpilot.git_normalized_origin, "branch": build_metadata.channel, "commit": build_metadata.openpilot.git_commit, } @dispatcher.add_method def setNavDestination(latitude: int = 0, longitude: int = 0, place_name: str = None, place_details: str = None) -> dict[str, int]: destination = { "latitude": latitude, "longitude": longitude, "place_name": place_name, "place_details": place_details, } Params().put("NavDestination", json.dumps(destination)) return {"success": 1} def scan_dir(path: str, prefix: str) -> list[str]: files = [] # only walk directories that match the prefix # (glob and friends traverse entire dir tree) with os.scandir(path) as i: for e in i: rel_path = os.path.relpath(e.path, Paths.log_root()) if e.is_dir(follow_symlinks=False): # add trailing slash rel_path = os.path.join(rel_path, '') # if prefix is a partial dir name, current dir will start with prefix # if prefix is a partial file name, prefix with start with dir name if rel_path.startswith(prefix) or prefix.startswith(rel_path): files.extend(scan_dir(e.path, prefix)) else: if rel_path.startswith(prefix): files.append(rel_path) return files @dispatcher.add_method def listDataDirectory(prefix='') -> list[str]: return scan_dir(Paths.log_root(), prefix) @dispatcher.add_method def uploadFileToUrl(fn: str, url: str, headers: dict[str, str]) -> UploadFilesToUrlResponse: # this is because mypy doesn't understand that the decorator doesn't change the return type response: UploadFilesToUrlResponse = uploadFilesToUrls([{ "fn": fn, "url": url, "headers": headers, }]) return response @dispatcher.add_method def uploadFilesToUrls(files_data: list[UploadFileDict]) -> UploadFilesToUrlResponse: files = map(UploadFile.from_dict, files_data) items: list[UploadItemDict] = [] failed: list[str] = [] for file in files: if len(file.fn) == 0 or file.fn[0] == '/' or '..' in file.fn or len(file.url) == 0: failed.append(file.fn) continue path = os.path.join(Paths.log_root(), file.fn) if not os.path.exists(path) and not os.path.exists(strip_bz2_extension(path)): failed.append(file.fn) continue # Skip item if already in queue url = file.url.split('?')[0] if any(url == item['url'].split('?')[0] for item in listUploadQueue()): continue item = UploadItem( path=path, url=file.url, headers=file.headers, created_at=int(time.time() * 1000), id=None, allow_cellular=file.allow_cellular, ) upload_id = hashlib.sha1(str(item).encode()).hexdigest() item = replace(item, id=upload_id) upload_queue.put_nowait(item) items.append(asdict(item)) UploadQueueCache.cache(upload_queue) resp: UploadFilesToUrlResponse = {"enqueued": len(items), "items": items} if failed: resp["failed"] = failed return resp @dispatcher.add_method def listUploadQueue() -> list[UploadItemDict]: items = list(upload_queue.queue) + list(cur_upload_items.values()) return [asdict(i) for i in items if (i is not None) and (i.id not in cancelled_uploads)] @dispatcher.add_method def cancelUpload(upload_id: str | list[str]) -> dict[str, int | str]: if not isinstance(upload_id, list): upload_id = [upload_id] uploading_ids = {item.id for item in list(upload_queue.queue)} cancelled_ids = uploading_ids.intersection(upload_id) if len(cancelled_ids) == 0: return {"success": 0, "error": "not found"} cancelled_uploads.update(cancelled_ids) return {"success": 1} @dispatcher.add_method def setRouteViewed(route: str) -> dict[str, int | str]: # maintain a list of the last 10 routes viewed in connect params = Params() r = params.get("AthenadRecentlyViewedRoutes", encoding="utf8") routes = [] if r is None else r.split(",") routes.append(route) # remove duplicates routes = list(dict.fromkeys(routes)) params.put("AthenadRecentlyViewedRoutes", ",".join(routes[-10:])) return {"success": 1} def startLocalProxy(global_end_event: threading.Event, remote_ws_uri: str, local_port: int) -> dict[str, int]: try: if local_port not in LOCAL_PORT_WHITELIST: raise Exception("Requested local port not whitelisted") cloudlog.debug("athena.startLocalProxy.starting") dongle_id = Params().get("DongleId").decode('utf8') identity_token = Api(dongle_id).get_token() ws = create_connection(remote_ws_uri, cookie="jwt=" + identity_token, enable_multithread=True) # Set TOS to keep connection responsive while under load. # DSCP of 36/HDD_LINUX_AC_VI with the minimum delay flag ws.sock.setsockopt(socket.IPPROTO_IP, socket.IP_TOS, 0x90) ssock, csock = socket.socketpair() local_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) local_sock.connect(('127.0.0.1', local_port)) local_sock.setblocking(False) proxy_end_event = threading.Event() threads = [ threading.Thread(target=ws_proxy_recv, args=(ws, local_sock, ssock, proxy_end_event, global_end_event)), threading.Thread(target=ws_proxy_send, args=(ws, local_sock, csock, proxy_end_event)) ] for thread in threads: thread.start() cloudlog.debug("athena.startLocalProxy.started") return {"success": 1} except Exception as e: cloudlog.exception("athenad.startLocalProxy.exception") raise e @dispatcher.add_method def getPublicKey() -> str | None: if not os.path.isfile(Paths.persist_root() + '/comma/id_rsa.pub'): return None with open(Paths.persist_root() + '/comma/id_rsa.pub') as f: return f.read() @dispatcher.add_method def getSshAuthorizedKeys() -> str: return Params().get("GithubSshKeys", encoding='utf8') or '' @dispatcher.add_method def getGithubUsername() -> str: return Params().get("GithubUsername", encoding='utf8') or '' @dispatcher.add_method def getSimInfo(): return HARDWARE.get_sim_info() @dispatcher.add_method def getNetworkType(): return HARDWARE.get_network_type() @dispatcher.add_method def getNetworkMetered() -> bool: network_type = HARDWARE.get_network_type() return HARDWARE.get_network_metered(network_type) @dispatcher.add_method def getNetworks(): return HARDWARE.get_networks() @dispatcher.add_method def takeSnapshot() -> str | dict[str, str] | None: from openpilot.system.camerad.snapshot.snapshot import jpeg_write, snapshot ret = snapshot() if ret is not None: def b64jpeg(x): if x is not None: f = io.BytesIO() jpeg_write(f, x) return base64.b64encode(f.getvalue()).decode("utf-8") else: return None return {'jpegBack': b64jpeg(ret[0]), 'jpegFront': b64jpeg(ret[1])} else: raise Exception("not available while camerad is started") def get_logs_to_send_sorted() -> list[str]: # TODO: scan once then use inotify to detect file creation/deletion curr_time = int(time.time()) logs = [] for log_entry in os.listdir(Paths.swaglog_root()): log_path = os.path.join(Paths.swaglog_root(), log_entry) time_sent = 0 try: value = getxattr(log_path, LOG_ATTR_NAME) if value is not None: time_sent = int.from_bytes(value, sys.byteorder) except (ValueError, TypeError): pass # assume send failed and we lost the response if sent more than one hour ago if not time_sent or curr_time - time_sent > 3600: logs.append(log_entry) # excluding most recent (active) log file return sorted(logs)[:-1] def log_handler(end_event: threading.Event) -> None: if PC: return log_files = [] last_scan = 0. while not end_event.is_set(): try: curr_scan = time.monotonic() if curr_scan - last_scan > 10: log_files = get_logs_to_send_sorted() last_scan = curr_scan # send one log curr_log = None if len(log_files) > 0: log_entry = log_files.pop() # newest log file cloudlog.debug(f"athena.log_handler.forward_request {log_entry}") try: curr_time = int(time.time()) log_path = os.path.join(Paths.swaglog_root(), log_entry) setxattr(log_path, LOG_ATTR_NAME, int.to_bytes(curr_time, 4, sys.byteorder)) with open(log_path) as f: jsonrpc = { "method": "forwardLogs", "params": { "logs": f.read() }, "jsonrpc": "2.0", "id": log_entry } low_priority_send_queue.put_nowait(json.dumps(jsonrpc)) curr_log = log_entry except OSError: pass # file could be deleted by log rotation # wait for response up to ~100 seconds # always read queue at least once to process any old responses that arrive for _ in range(100): if end_event.is_set(): break try: log_resp = json.loads(log_recv_queue.get(timeout=1)) log_entry = log_resp.get("id") log_success = "result" in log_resp and log_resp["result"].get("success") cloudlog.debug(f"athena.log_handler.forward_response {log_entry} {log_success}") if log_entry and log_success: log_path = os.path.join(Paths.swaglog_root(), log_entry) try: setxattr(log_path, LOG_ATTR_NAME, LOG_ATTR_VALUE_MAX_UNIX_TIME) except OSError: pass # file could be deleted by log rotation if curr_log == log_entry: break except queue.Empty: if curr_log is None: break except Exception: cloudlog.exception("athena.log_handler.exception") def stat_handler(end_event: threading.Event) -> None: STATS_DIR = Paths.stats_root() while not end_event.is_set(): last_scan = 0. curr_scan = time.monotonic() try: if curr_scan - last_scan > 10: stat_filenames = list(filter(lambda name: not name.startswith(tempfile.gettempprefix()), os.listdir(STATS_DIR))) if len(stat_filenames) > 0: stat_path = os.path.join(STATS_DIR, stat_filenames[0]) with open(stat_path) as f: jsonrpc = { "method": "storeStats", "params": { "stats": f.read() }, "jsonrpc": "2.0", "id": stat_filenames[0] } low_priority_send_queue.put_nowait(json.dumps(jsonrpc)) os.remove(stat_path) last_scan = curr_scan except Exception: cloudlog.exception("athena.stat_handler.exception") time.sleep(0.1) def ws_proxy_recv(ws: WebSocket, local_sock: socket.socket, ssock: socket.socket, end_event: threading.Event, global_end_event: threading.Event) -> None: while not (end_event.is_set() or global_end_event.is_set()): try: r = select.select((ws.sock,), (), (), 30) if r[0]: data = ws.recv() if isinstance(data, str): data = data.encode("utf-8") local_sock.sendall(data) except WebSocketTimeoutException: pass except Exception: cloudlog.exception("athenad.ws_proxy_recv.exception") break cloudlog.debug("athena.ws_proxy_recv closing sockets") ssock.close() local_sock.close() ws.close() cloudlog.debug("athena.ws_proxy_recv done closing sockets") end_event.set() def ws_proxy_send(ws: WebSocket, local_sock: socket.socket, signal_sock: socket.socket, end_event: threading.Event) -> None: while not end_event.is_set(): try: r, _, _ = select.select((local_sock, signal_sock), (), ()) if r: if r[0].fileno() == signal_sock.fileno(): # got end signal from ws_proxy_recv end_event.set() break data = local_sock.recv(4096) if not data: # local_sock is dead end_event.set() break ws.send(data, ABNF.OPCODE_BINARY) except Exception: cloudlog.exception("athenad.ws_proxy_send.exception") end_event.set() cloudlog.debug("athena.ws_proxy_send closing sockets") signal_sock.close() cloudlog.debug("athena.ws_proxy_send done closing sockets") def ws_recv(ws: WebSocket, end_event: threading.Event) -> None: last_ping = int(time.monotonic() * 1e9) while not end_event.is_set(): try: opcode, data = ws.recv_data(control_frame=True) if opcode in (ABNF.OPCODE_TEXT, ABNF.OPCODE_BINARY): if opcode == ABNF.OPCODE_TEXT: data = data.decode("utf-8") recv_queue.put_nowait(data) elif opcode == ABNF.OPCODE_PING: last_ping = int(time.monotonic() * 1e9) Params().put("LastAthenaPingTime", str(last_ping)) except WebSocketTimeoutException: ns_since_last_ping = int(time.monotonic() * 1e9) - last_ping if ns_since_last_ping > RECONNECT_TIMEOUT_S * 1e9: cloudlog.exception("athenad.ws_recv.timeout") end_event.set() except Exception: cloudlog.exception("athenad.ws_recv.exception") end_event.set() def ws_send(ws: WebSocket, end_event: threading.Event) -> None: while not end_event.is_set(): try: try: data = send_queue.get_nowait() except queue.Empty: data = low_priority_send_queue.get(timeout=1) for i in range(0, len(data), WS_FRAME_SIZE): frame = data[i:i+WS_FRAME_SIZE] last = i + WS_FRAME_SIZE >= len(data) opcode = ABNF.OPCODE_TEXT if i == 0 else ABNF.OPCODE_CONT ws.send_frame(ABNF.create_frame(frame, opcode, last)) except queue.Empty: pass except Exception: cloudlog.exception("athenad.ws_send.exception") end_event.set() def ws_manage(ws: WebSocket, end_event: threading.Event) -> None: params = Params() onroad_prev = None sock = ws.sock while True: onroad = params.get_bool("IsOnroad") if onroad != onroad_prev: onroad_prev = onroad if sock is not None: # While not sending data, onroad, we can expect to time out in 7 + (7 * 2) = 21s # offroad, we can expect to time out in 30 + (10 * 3) = 60s # FIXME: TCP_USER_TIMEOUT is effectively 2x for some reason (32s), so it's mostly unused sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_USER_TIMEOUT, 16000 if onroad else 0) sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, 7 if onroad else 30) sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, 7 if onroad else 10) sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPCNT, 2 if onroad else 3) if end_event.wait(5): break def backoff(retries: int) -> int: return random.randrange(0, min(128, int(2 ** retries))) def main(exit_event: threading.Event = None): try: set_core_affinity([0, 1, 2, 3]) except Exception: cloudlog.exception("failed to set core affinity") params = Params() dongle_id = params.get("DongleId", encoding='utf-8') UploadQueueCache.initialize(upload_queue) ws_uri = ATHENA_HOST + "/ws/v2/" + dongle_id api = Api(dongle_id) conn_start = None conn_retries = 0 while exit_event is None or not exit_event.is_set(): try: if conn_start is None: conn_start = time.monotonic() cloudlog.event("athenad.main.connecting_ws", ws_uri=ws_uri, retries=conn_retries) ws = create_connection(ws_uri, cookie="jwt=" + api.get_token(), enable_multithread=True, timeout=30.0) cloudlog.event("athenad.main.connected_ws", ws_uri=ws_uri, retries=conn_retries, duration=time.monotonic() - conn_start) conn_start = None conn_retries = 0 cur_upload_items.clear() handle_long_poll(ws, exit_event) except (KeyboardInterrupt, SystemExit): break except (ConnectionError, TimeoutError, WebSocketException): conn_retries += 1 params.remove("LastAthenaPingTime") except Exception: cloudlog.exception("athenad.main.exception") conn_retries += 1 params.remove("LastAthenaPingTime") time.sleep(backoff(conn_retries)) if __name__ == "__main__": main()
2301_81045437/openpilot
system/athena/athenad.py
Python
mit
26,914
#!/usr/bin/env python3 import time from multiprocessing import Process from openpilot.common.params import Params from openpilot.system.manager.process import launcher from openpilot.common.swaglog import cloudlog from openpilot.system.hardware import HARDWARE from openpilot.system.version import get_build_metadata ATHENA_MGR_PID_PARAM = "AthenadPid" def main(): params = Params() dongle_id = params.get("DongleId").decode('utf-8') build_metadata = get_build_metadata() cloudlog.bind_global(dongle_id=dongle_id, version=build_metadata.openpilot.version, origin=build_metadata.openpilot.git_normalized_origin, branch=build_metadata.channel, commit=build_metadata.openpilot.git_commit, dirty=build_metadata.openpilot.is_dirty, device=HARDWARE.get_device_type()) try: while 1: cloudlog.info("starting athena daemon") proc = Process(name='athenad', target=launcher, args=('system.athena.athenad', 'athenad')) proc.start() proc.join() cloudlog.event("athenad exited", exitcode=proc.exitcode) time.sleep(5) except Exception: cloudlog.exception("manage_athenad.exception") finally: params.remove(ATHENA_MGR_PID_PARAM) if __name__ == '__main__': main()
2301_81045437/openpilot
system/athena/manage_athenad.py
Python
mit
1,358
#!/usr/bin/env python3 import time import json import jwt from pathlib import Path from datetime import datetime, timedelta from openpilot.common.api import api_get from openpilot.common.params import Params from openpilot.common.spinner import Spinner from openpilot.selfdrive.controls.lib.alertmanager import set_offroad_alert from openpilot.system.hardware import HARDWARE, PC from openpilot.system.hardware.hw import Paths from openpilot.common.swaglog import cloudlog UNREGISTERED_DONGLE_ID = "UnregisteredDevice" def is_registered_device() -> bool: dongle = Params().get("DongleId", encoding='utf-8') return dongle not in (None, UNREGISTERED_DONGLE_ID) def register(show_spinner=False) -> str | None: params = Params() IMEI = params.get("IMEI", encoding='utf8') HardwareSerial = params.get("HardwareSerial", encoding='utf8') dongle_id: str | None = params.get("DongleId", encoding='utf8') needs_registration = None in (IMEI, HardwareSerial, dongle_id) pubkey = Path(Paths.persist_root()+"/comma/id_rsa.pub") if not pubkey.is_file(): dongle_id = UNREGISTERED_DONGLE_ID cloudlog.warning(f"missing public key: {pubkey}") elif needs_registration: if show_spinner: spinner = Spinner() spinner.update("registering device") # Create registration token, in the future, this key will make JWTs directly with open(Paths.persist_root()+"/comma/id_rsa.pub") as f1, open(Paths.persist_root()+"/comma/id_rsa") as f2: public_key = f1.read() private_key = f2.read() # Block until we get the imei serial = HARDWARE.get_serial() start_time = time.monotonic() imei1: str | None = None imei2: str | None = None while imei1 is None and imei2 is None: try: imei1, imei2 = HARDWARE.get_imei(0), HARDWARE.get_imei(1) except Exception: cloudlog.exception("Error getting imei, trying again...") time.sleep(1) if time.monotonic() - start_time > 60 and show_spinner: spinner.update(f"registering device - serial: {serial}, IMEI: ({imei1}, {imei2})") params.put("IMEI", imei1) params.put("HardwareSerial", serial) backoff = 0 start_time = time.monotonic() while True: try: register_token = jwt.encode({'register': True, 'exp': datetime.utcnow() + timedelta(hours=1)}, private_key, algorithm='RS256') cloudlog.info("getting pilotauth") resp = api_get("v2/pilotauth/", method='POST', timeout=15, imei=imei1, imei2=imei2, serial=serial, public_key=public_key, register_token=register_token) if resp.status_code in (402, 403): cloudlog.info(f"Unable to register device, got {resp.status_code}") dongle_id = UNREGISTERED_DONGLE_ID else: dongleauth = json.loads(resp.text) dongle_id = dongleauth["dongle_id"] break except Exception: cloudlog.exception("failed to authenticate") backoff = min(backoff + 1, 15) time.sleep(backoff) if time.monotonic() - start_time > 60 and show_spinner: spinner.update(f"registering device - serial: {serial}, IMEI: ({imei1}, {imei2})") if show_spinner: spinner.close() if dongle_id: params.put("DongleId", dongle_id) set_offroad_alert("Offroad_UnofficialHardware", (dongle_id == UNREGISTERED_DONGLE_ID) and not PC) return dongle_id if __name__ == "__main__": print(register())
2301_81045437/openpilot
system/athena/registration.py
Python
mit
3,443
import http.server import socket class MockResponse: def __init__(self, json, status_code): self.json = json self.text = json self.status_code = status_code class EchoSocket: def __init__(self, port): self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.socket.bind(('127.0.0.1', port)) self.socket.listen(1) def run(self): conn, _ = self.socket.accept() conn.settimeout(5.0) try: while True: data = conn.recv(4096) if data: print(f'EchoSocket got {data}') conn.sendall(data) else: break finally: conn.shutdown(0) conn.close() self.socket.shutdown(0) self.socket.close() class MockApi: def __init__(self, dongle_id): pass def get_token(self): return "fake-token" class MockWebsocket: sock = socket.socket() def __init__(self, recv_queue, send_queue): self.recv_queue = recv_queue self.send_queue = send_queue def recv(self): data = self.recv_queue.get() if isinstance(data, Exception): raise data return data def send(self, data, opcode): self.send_queue.put_nowait((data, opcode)) def close(self): pass class HTTPRequestHandler(http.server.SimpleHTTPRequestHandler): def do_PUT(self): length = int(self.headers['Content-Length']) self.rfile.read(length) self.send_response(201, "Created") self.end_headers()
2301_81045437/openpilot
system/athena/tests/helpers.py
Python
mit
1,446
Import('env', 'arch', 'cereal', 'messaging', 'common', 'gpucommon', 'visionipc') libs = ['m', 'pthread', common, 'jpeg', 'OpenCL', 'yuv', cereal, messaging, 'zmq', 'capnp', 'kj', visionipc, gpucommon, 'atomic'] camera_obj = env.Object(['cameras/camera_qcom2.cc', 'cameras/camera_common.cc', 'cameras/camera_util.cc', 'sensors/ar0231.cc', 'sensors/ox03c10.cc', 'sensors/os04c10.cc']) env.Program('camerad', ['main.cc', camera_obj], LIBS=libs) if GetOption("extras") and arch == "x86_64": env.Program('test/test_ae_gray', ['test/test_ae_gray.cc', camera_obj], LIBS=libs)
2301_81045437/openpilot
system/camerad/SConscript
Python
mit
599
#include "system/camerad/cameras/camera_common.h" #include <cassert> #include <string> #include "third_party/libyuv/include/libyuv.h" #include <jpeglib.h> #include "common/clutil.h" #include "common/swaglog.h" #include "third_party/linux/include/msm_media_info.h" #include "system/camerad/cameras/camera_qcom2.h" #ifdef QCOM2 #include "CL/cl_ext_qcom.h" #endif ExitHandler do_exit; class ImgProc { public: ImgProc(cl_device_id device_id, cl_context context, const CameraBuf *b, const CameraState *s, int buf_width, int uv_offset) { char args[4096]; const SensorInfo *ci = s->ci.get(); snprintf(args, sizeof(args), "-cl-fast-relaxed-math -cl-denorms-are-zero -Isensors " "-DFRAME_WIDTH=%d -DFRAME_HEIGHT=%d -DFRAME_STRIDE=%d -DFRAME_OFFSET=%d " "-DRGB_WIDTH=%d -DRGB_HEIGHT=%d -DYUV_STRIDE=%d -DUV_OFFSET=%d " "-DSENSOR_ID=%hu -DHDR_OFFSET=%d -DVIGNETTING=%d ", ci->frame_width, ci->frame_height, ci->hdr_offset > 0 ? ci->frame_stride * 2 : ci->frame_stride, ci->frame_offset, b->rgb_width, b->rgb_height, buf_width, uv_offset, static_cast<unsigned short>(ci->image_sensor), ci->hdr_offset, s->camera_num == 1); const char *cl_file = "cameras/process_raw.cl"; cl_program prg_imgproc = cl_program_from_file(context, device_id, cl_file, args); krnl_ = CL_CHECK_ERR(clCreateKernel(prg_imgproc, "process_raw", &err)); CL_CHECK(clReleaseProgram(prg_imgproc)); } void queue(cl_command_queue q, cl_mem cam_buf_cl, cl_mem buf_cl, int width, int height, cl_event *imgproc_event, int expo_time) { CL_CHECK(clSetKernelArg(krnl_, 0, sizeof(cl_mem), &cam_buf_cl)); CL_CHECK(clSetKernelArg(krnl_, 1, sizeof(cl_mem), &buf_cl)); CL_CHECK(clSetKernelArg(krnl_, 2, sizeof(cl_int), &expo_time)); const size_t globalWorkSize[] = {size_t(width / 2), size_t(height / 2)}; const int imgproc_local_worksize = 16; const size_t localWorkSize[] = {imgproc_local_worksize, imgproc_local_worksize}; CL_CHECK(clEnqueueNDRangeKernel(q, krnl_, 2, NULL, globalWorkSize, localWorkSize, 0, 0, imgproc_event)); } ~ImgProc() { CL_CHECK(clReleaseKernel(krnl_)); } private: cl_kernel krnl_; }; void CameraBuf::init(cl_device_id device_id, cl_context context, CameraState *s, VisionIpcServer * v, int frame_cnt, VisionStreamType type) { vipc_server = v; stream_type = type; frame_buf_count = frame_cnt; const SensorInfo *ci = s->ci.get(); // RAW frame const int frame_size = (ci->frame_height + ci->extra_height) * ci->frame_stride; camera_bufs = std::make_unique<VisionBuf[]>(frame_buf_count); camera_bufs_metadata = std::make_unique<FrameMetadata[]>(frame_buf_count); for (int i = 0; i < frame_buf_count; i++) { camera_bufs[i].allocate(frame_size); camera_bufs[i].init_cl(device_id, context); } LOGD("allocated %d CL buffers", frame_buf_count); rgb_width = ci->frame_width; rgb_height = ci->hdr_offset > 0 ? (ci->frame_height - ci->hdr_offset) / 2 : ci->frame_height; int nv12_width = VENUS_Y_STRIDE(COLOR_FMT_NV12, rgb_width); int nv12_height = VENUS_Y_SCANLINES(COLOR_FMT_NV12, rgb_height); assert(nv12_width == VENUS_UV_STRIDE(COLOR_FMT_NV12, rgb_width)); assert(nv12_height/2 == VENUS_UV_SCANLINES(COLOR_FMT_NV12, rgb_height)); size_t nv12_uv_offset = nv12_width * nv12_height; // the encoder HW tells us the size it wants after setting it up. // TODO: VENUS_BUFFER_SIZE should give the size, but it's too small. dependent on encoder settings? size_t nv12_size = (rgb_width >= 2688 ? 2900 : 2346)*nv12_width; vipc_server->create_buffers_with_sizes(stream_type, YUV_BUFFER_COUNT, false, rgb_width, rgb_height, nv12_size, nv12_width, nv12_uv_offset); LOGD("created %d YUV vipc buffers with size %dx%d", YUV_BUFFER_COUNT, nv12_width, nv12_height); imgproc = new ImgProc(device_id, context, this, s, nv12_width, nv12_uv_offset); const cl_queue_properties props[] = {0}; //CL_QUEUE_PRIORITY_KHR, CL_QUEUE_PRIORITY_HIGH_KHR, 0}; q = CL_CHECK_ERR(clCreateCommandQueueWithProperties(context, device_id, props, &err)); } CameraBuf::~CameraBuf() { for (int i = 0; i < frame_buf_count; i++) { camera_bufs[i].free(); } if (imgproc) delete imgproc; if (q) CL_CHECK(clReleaseCommandQueue(q)); } bool CameraBuf::acquire() { if (!safe_queue.try_pop(cur_buf_idx, 50)) return false; if (camera_bufs_metadata[cur_buf_idx].frame_id == -1) { LOGE("no frame data? wtf"); return false; } cur_frame_data = camera_bufs_metadata[cur_buf_idx]; cur_yuv_buf = vipc_server->get_buffer(stream_type); cur_camera_buf = &camera_bufs[cur_buf_idx]; double start_time = millis_since_boot(); cl_event event; imgproc->queue(q, camera_bufs[cur_buf_idx].buf_cl, cur_yuv_buf->buf_cl, rgb_width, rgb_height, &event, cur_frame_data.integ_lines); clWaitForEvents(1, &event); CL_CHECK(clReleaseEvent(event)); cur_frame_data.processing_time = (millis_since_boot() - start_time) / 1000.0; VisionIpcBufExtra extra = { cur_frame_data.frame_id, cur_frame_data.timestamp_sof, cur_frame_data.timestamp_eof, }; cur_yuv_buf->set_frame_id(cur_frame_data.frame_id); vipc_server->send(cur_yuv_buf, &extra); return true; } void CameraBuf::queue(size_t buf_idx) { safe_queue.push(buf_idx); } // common functions void fill_frame_data(cereal::FrameData::Builder &framed, const FrameMetadata &frame_data, CameraState *c) { framed.setFrameId(frame_data.frame_id); framed.setRequestId(frame_data.request_id); framed.setTimestampEof(frame_data.timestamp_eof); framed.setTimestampSof(frame_data.timestamp_sof); framed.setIntegLines(frame_data.integ_lines); framed.setGain(frame_data.gain); framed.setHighConversionGain(frame_data.high_conversion_gain); framed.setMeasuredGreyFraction(frame_data.measured_grey_fraction); framed.setTargetGreyFraction(frame_data.target_grey_fraction); framed.setProcessingTime(frame_data.processing_time); const float ev = c->cur_ev[frame_data.frame_id % 3]; const float perc = util::map_val(ev, c->ci->min_ev, c->ci->max_ev, 0.0f, 100.0f); framed.setExposureValPercent(perc); framed.setSensor(c->ci->image_sensor); } kj::Array<uint8_t> get_raw_frame_image(const CameraBuf *b) { const uint8_t *dat = (const uint8_t *)b->cur_camera_buf->addr; kj::Array<uint8_t> frame_image = kj::heapArray<uint8_t>(b->cur_camera_buf->len); uint8_t *resized_dat = frame_image.begin(); memcpy(resized_dat, dat, b->cur_camera_buf->len); return kj::mv(frame_image); } static kj::Array<capnp::byte> yuv420_to_jpeg(const CameraBuf *b, int thumbnail_width, int thumbnail_height) { int downscale = b->cur_yuv_buf->width / thumbnail_width; assert(downscale * thumbnail_height == b->cur_yuv_buf->height); int in_stride = b->cur_yuv_buf->stride; // make the buffer big enough. jpeg_write_raw_data requires 16-pixels aligned height to be used. std::unique_ptr<uint8[]> buf(new uint8_t[(thumbnail_width * ((thumbnail_height + 15) & ~15) * 3) / 2]); uint8_t *y_plane = buf.get(); uint8_t *u_plane = y_plane + thumbnail_width * thumbnail_height; uint8_t *v_plane = u_plane + (thumbnail_width * thumbnail_height) / 4; { // subsampled conversion from nv12 to yuv for (int hy = 0; hy < thumbnail_height/2; hy++) { for (int hx = 0; hx < thumbnail_width/2; hx++) { int ix = hx * downscale + (downscale-1)/2; int iy = hy * downscale + (downscale-1)/2; y_plane[(hy*2 + 0)*thumbnail_width + (hx*2 + 0)] = b->cur_yuv_buf->y[(iy*2 + 0) * in_stride + ix*2 + 0]; y_plane[(hy*2 + 0)*thumbnail_width + (hx*2 + 1)] = b->cur_yuv_buf->y[(iy*2 + 0) * in_stride + ix*2 + 1]; y_plane[(hy*2 + 1)*thumbnail_width + (hx*2 + 0)] = b->cur_yuv_buf->y[(iy*2 + 1) * in_stride + ix*2 + 0]; y_plane[(hy*2 + 1)*thumbnail_width + (hx*2 + 1)] = b->cur_yuv_buf->y[(iy*2 + 1) * in_stride + ix*2 + 1]; u_plane[hy*thumbnail_width/2 + hx] = b->cur_yuv_buf->uv[iy*in_stride + ix*2 + 0]; v_plane[hy*thumbnail_width/2 + hx] = b->cur_yuv_buf->uv[iy*in_stride + ix*2 + 1]; } } } struct jpeg_compress_struct cinfo; struct jpeg_error_mgr jerr; cinfo.err = jpeg_std_error(&jerr); jpeg_create_compress(&cinfo); uint8_t *thumbnail_buffer = nullptr; size_t thumbnail_len = 0; jpeg_mem_dest(&cinfo, &thumbnail_buffer, &thumbnail_len); cinfo.image_width = thumbnail_width; cinfo.image_height = thumbnail_height; cinfo.input_components = 3; jpeg_set_defaults(&cinfo); jpeg_set_colorspace(&cinfo, JCS_YCbCr); // configure sampling factors for yuv420. cinfo.comp_info[0].h_samp_factor = 2; // Y cinfo.comp_info[0].v_samp_factor = 2; cinfo.comp_info[1].h_samp_factor = 1; // U cinfo.comp_info[1].v_samp_factor = 1; cinfo.comp_info[2].h_samp_factor = 1; // V cinfo.comp_info[2].v_samp_factor = 1; cinfo.raw_data_in = TRUE; jpeg_set_quality(&cinfo, 50, TRUE); jpeg_start_compress(&cinfo, TRUE); JSAMPROW y[16], u[8], v[8]; JSAMPARRAY planes[3]{y, u, v}; for (int line = 0; line < cinfo.image_height; line += 16) { for (int i = 0; i < 16; ++i) { y[i] = y_plane + (line + i) * cinfo.image_width; if (i % 2 == 0) { int offset = (cinfo.image_width / 2) * ((i + line) / 2); u[i / 2] = u_plane + offset; v[i / 2] = v_plane + offset; } } jpeg_write_raw_data(&cinfo, planes, 16); } jpeg_finish_compress(&cinfo); jpeg_destroy_compress(&cinfo); kj::Array<capnp::byte> dat = kj::heapArray<capnp::byte>(thumbnail_buffer, thumbnail_len); free(thumbnail_buffer); return dat; } static void publish_thumbnail(PubMaster *pm, const CameraBuf *b) { auto thumbnail = yuv420_to_jpeg(b, b->rgb_width / 4, b->rgb_height / 4); if (thumbnail.size() == 0) return; MessageBuilder msg; auto thumbnaild = msg.initEvent().initThumbnail(); thumbnaild.setFrameId(b->cur_frame_data.frame_id); thumbnaild.setTimestampEof(b->cur_frame_data.timestamp_eof); thumbnaild.setThumbnail(thumbnail); pm->send("thumbnail", msg); } float set_exposure_target(const CameraBuf *b, Rect ae_xywh, int x_skip, int y_skip) { int lum_med; uint32_t lum_binning[256] = {0}; const uint8_t *pix_ptr = b->cur_yuv_buf->y; unsigned int lum_total = 0; for (int y = ae_xywh.y; y < ae_xywh.y + ae_xywh.h; y += y_skip) { for (int x = ae_xywh.x; x < ae_xywh.x + ae_xywh.w; x += x_skip) { uint8_t lum = pix_ptr[(y * b->rgb_width) + x]; lum_binning[lum]++; lum_total += 1; } } // Find mean lumimance value unsigned int lum_cur = 0; for (lum_med = 255; lum_med >= 0; lum_med--) { lum_cur += lum_binning[lum_med]; if (lum_cur >= lum_total / 2) { break; } } return lum_med / 256.0; } void *processing_thread(MultiCameraState *cameras, CameraState *cs, process_thread_cb callback) { const char *thread_name = nullptr; if (cs == &cameras->road_cam) { thread_name = "RoadCamera"; } else if (cs == &cameras->driver_cam) { thread_name = "DriverCamera"; } else { thread_name = "WideRoadCamera"; } util::set_thread_name(thread_name); uint32_t cnt = 0; while (!do_exit) { if (!cs->buf.acquire()) continue; callback(cameras, cs, cnt); if (cs == &(cameras->road_cam) && cameras->pm && cnt % 100 == 3) { // this takes 10ms??? publish_thumbnail(cameras->pm, &(cs->buf)); } ++cnt; } return NULL; } std::thread start_process_thread(MultiCameraState *cameras, CameraState *cs, process_thread_cb callback) { return std::thread(processing_thread, cameras, cs, callback); } void camerad_thread() { cl_device_id device_id = cl_get_device_id(CL_DEVICE_TYPE_DEFAULT); #ifdef QCOM2 const cl_context_properties props[] = {CL_CONTEXT_PRIORITY_HINT_QCOM, CL_PRIORITY_HINT_HIGH_QCOM, 0}; cl_context context = CL_CHECK_ERR(clCreateContext(props, 1, &device_id, NULL, NULL, &err)); #else cl_context context = CL_CHECK_ERR(clCreateContext(NULL, 1, &device_id, NULL, NULL, &err)); #endif { MultiCameraState cameras = {}; VisionIpcServer vipc_server("camerad", device_id, context); cameras_open(&cameras); cameras_init(&vipc_server, &cameras, device_id, context); vipc_server.start_listener(); cameras_run(&cameras); } CL_CHECK(clReleaseContext(context)); } int open_v4l_by_name_and_index(const char name[], int index, int flags) { for (int v4l_index = 0; /**/; ++v4l_index) { std::string v4l_name = util::read_file(util::string_format("/sys/class/video4linux/v4l-subdev%d/name", v4l_index)); if (v4l_name.empty()) return -1; if (v4l_name.find(name) == 0) { if (index == 0) { return HANDLE_EINTR(open(util::string_format("/dev/v4l-subdev%d", v4l_index).c_str(), flags)); } index--; } } }
2301_81045437/openpilot
system/camerad/cameras/camera_common.cc
C++
mit
12,784
#pragma once #include <fcntl.h> #include <memory> #include <thread> #include "cereal/messaging/messaging.h" #include "cereal/visionipc/visionipc_server.h" #include "common/queue.h" #include "common/util.h" const int YUV_BUFFER_COUNT = 20; enum CameraType { RoadCam = 0, DriverCam, WideRoadCam }; // for debugging const bool env_disable_road = getenv("DISABLE_ROAD") != NULL; const bool env_disable_wide_road = getenv("DISABLE_WIDE_ROAD") != NULL; const bool env_disable_driver = getenv("DISABLE_DRIVER") != NULL; const bool env_debug_frames = getenv("DEBUG_FRAMES") != NULL; const bool env_log_raw_frames = getenv("LOG_RAW_FRAMES") != NULL; const bool env_ctrl_exp_from_params = getenv("CTRL_EXP_FROM_PARAMS") != NULL; typedef struct FrameMetadata { uint32_t frame_id; uint32_t request_id; // Timestamps uint64_t timestamp_sof; uint64_t timestamp_eof; // Exposure unsigned int integ_lines; bool high_conversion_gain; float gain; float measured_grey_fraction; float target_grey_fraction; float processing_time; } FrameMetadata; struct MultiCameraState; class CameraState; class ImgProc; class CameraBuf { private: VisionIpcServer *vipc_server; ImgProc *imgproc = nullptr; VisionStreamType stream_type; int cur_buf_idx; SafeQueue<int> safe_queue; int frame_buf_count; public: cl_command_queue q; FrameMetadata cur_frame_data; VisionBuf *cur_yuv_buf; VisionBuf *cur_camera_buf; std::unique_ptr<VisionBuf[]> camera_bufs; std::unique_ptr<FrameMetadata[]> camera_bufs_metadata; int rgb_width, rgb_height; CameraBuf() = default; ~CameraBuf(); void init(cl_device_id device_id, cl_context context, CameraState *s, VisionIpcServer * v, int frame_cnt, VisionStreamType type); bool acquire(); void queue(size_t buf_idx); }; typedef void (*process_thread_cb)(MultiCameraState *s, CameraState *c, int cnt); void fill_frame_data(cereal::FrameData::Builder &framed, const FrameMetadata &frame_data, CameraState *c); kj::Array<uint8_t> get_raw_frame_image(const CameraBuf *b); float set_exposure_target(const CameraBuf *b, Rect ae_xywh, int x_skip, int y_skip); std::thread start_process_thread(MultiCameraState *cameras, CameraState *cs, process_thread_cb callback); void cameras_init(VisionIpcServer *v, MultiCameraState *s, cl_device_id device_id, cl_context ctx); void cameras_open(MultiCameraState *s); void cameras_run(MultiCameraState *s); void cameras_close(MultiCameraState *s); void camerad_thread(); int open_v4l_by_name_and_index(const char name[], int index = 0, int flags = O_RDWR | O_NONBLOCK);
2301_81045437/openpilot
system/camerad/cameras/camera_common.h
C++
mit
2,577
#include "system/camerad/cameras/camera_qcom2.h" #include <poll.h> #include <sys/ioctl.h> #include <algorithm> #include <cassert> #include <cerrno> #include <cmath> #include <cstring> #include <string> #include <vector> #include "media/cam_defs.h" #include "media/cam_isp.h" #include "media/cam_isp_ife.h" #include "media/cam_req_mgr.h" #include "media/cam_sensor_cmn_header.h" #include "media/cam_sync.h" #include "common/swaglog.h" const int MIPI_SETTLE_CNT = 33; // Calculated by camera_freqs.py // For debugging: // echo "4294967295" > /sys/module/cam_debug_util/parameters/debug_mdl extern ExitHandler do_exit; int CameraState::clear_req_queue() { struct cam_req_mgr_flush_info req_mgr_flush_request = {0}; req_mgr_flush_request.session_hdl = session_handle; req_mgr_flush_request.link_hdl = link_handle; req_mgr_flush_request.flush_type = CAM_REQ_MGR_FLUSH_TYPE_ALL; int ret = do_cam_control(multi_cam_state->video0_fd, CAM_REQ_MGR_FLUSH_REQ, &req_mgr_flush_request, sizeof(req_mgr_flush_request)); // LOGD("flushed all req: %d", ret); return ret; } // ************** high level camera helpers **************** void CameraState::sensors_start() { if (!enabled) return; LOGD("starting sensor %d", camera_num); sensors_i2c(ci->start_reg_array.data(), ci->start_reg_array.size(), CAM_SENSOR_PACKET_OPCODE_SENSOR_CONFIG, ci->data_word); } void CameraState::sensors_poke(int request_id) { uint32_t cam_packet_handle = 0; int size = sizeof(struct cam_packet); auto pkt = mm.alloc<struct cam_packet>(size, &cam_packet_handle); pkt->num_cmd_buf = 0; pkt->kmd_cmd_buf_index = -1; pkt->header.size = size; pkt->header.op_code = CAM_SENSOR_PACKET_OPCODE_SENSOR_NOP; pkt->header.request_id = request_id; int ret = device_config(sensor_fd, session_handle, sensor_dev_handle, cam_packet_handle); if (ret != 0) { LOGE("** sensor %d FAILED poke, disabling", camera_num); enabled = false; return; } } void CameraState::sensors_i2c(const struct i2c_random_wr_payload* dat, int len, int op_code, bool data_word) { // LOGD("sensors_i2c: %d", len); uint32_t cam_packet_handle = 0; int size = sizeof(struct cam_packet)+sizeof(struct cam_cmd_buf_desc)*1; auto pkt = mm.alloc<struct cam_packet>(size, &cam_packet_handle); pkt->num_cmd_buf = 1; pkt->kmd_cmd_buf_index = -1; pkt->header.size = size; pkt->header.op_code = op_code; struct cam_cmd_buf_desc *buf_desc = (struct cam_cmd_buf_desc *)&pkt->payload; buf_desc[0].size = buf_desc[0].length = sizeof(struct i2c_rdwr_header) + len*sizeof(struct i2c_random_wr_payload); buf_desc[0].type = CAM_CMD_BUF_I2C; auto i2c_random_wr = mm.alloc<struct cam_cmd_i2c_random_wr>(buf_desc[0].size, (uint32_t*)&buf_desc[0].mem_handle); i2c_random_wr->header.count = len; i2c_random_wr->header.op_code = 1; i2c_random_wr->header.cmd_type = CAMERA_SENSOR_CMD_TYPE_I2C_RNDM_WR; i2c_random_wr->header.data_type = data_word ? CAMERA_SENSOR_I2C_TYPE_WORD : CAMERA_SENSOR_I2C_TYPE_BYTE; i2c_random_wr->header.addr_type = CAMERA_SENSOR_I2C_TYPE_WORD; memcpy(i2c_random_wr->random_wr_payload, dat, len*sizeof(struct i2c_random_wr_payload)); int ret = device_config(sensor_fd, session_handle, sensor_dev_handle, cam_packet_handle); if (ret != 0) { LOGE("** sensor %d FAILED i2c, disabling", camera_num); enabled = false; return; } } static cam_cmd_power *power_set_wait(cam_cmd_power *power, int16_t delay_ms) { cam_cmd_unconditional_wait *unconditional_wait = (cam_cmd_unconditional_wait *)((char *)power + (sizeof(struct cam_cmd_power) + (power->count - 1) * sizeof(struct cam_power_settings))); unconditional_wait->cmd_type = CAMERA_SENSOR_CMD_TYPE_WAIT; unconditional_wait->delay = delay_ms; unconditional_wait->op_code = CAMERA_SENSOR_WAIT_OP_SW_UCND; return (struct cam_cmd_power *)(unconditional_wait + 1); } int CameraState::sensors_init() { uint32_t cam_packet_handle = 0; int size = sizeof(struct cam_packet)+sizeof(struct cam_cmd_buf_desc)*2; auto pkt = mm.alloc<struct cam_packet>(size, &cam_packet_handle); pkt->num_cmd_buf = 2; pkt->kmd_cmd_buf_index = -1; pkt->header.op_code = 0x1000000 | CAM_SENSOR_PACKET_OPCODE_SENSOR_PROBE; pkt->header.size = size; struct cam_cmd_buf_desc *buf_desc = (struct cam_cmd_buf_desc *)&pkt->payload; buf_desc[0].size = buf_desc[0].length = sizeof(struct cam_cmd_i2c_info) + sizeof(struct cam_cmd_probe); buf_desc[0].type = CAM_CMD_BUF_LEGACY; auto i2c_info = mm.alloc<struct cam_cmd_i2c_info>(buf_desc[0].size, (uint32_t*)&buf_desc[0].mem_handle); auto probe = (struct cam_cmd_probe *)(i2c_info.get() + 1); probe->camera_id = camera_num; i2c_info->slave_addr = ci->getSlaveAddress(camera_num); // 0(I2C_STANDARD_MODE) = 100khz, 1(I2C_FAST_MODE) = 400khz //i2c_info->i2c_freq_mode = I2C_STANDARD_MODE; i2c_info->i2c_freq_mode = I2C_FAST_MODE; i2c_info->cmd_type = CAMERA_SENSOR_CMD_TYPE_I2C_INFO; probe->data_type = CAMERA_SENSOR_I2C_TYPE_WORD; probe->addr_type = CAMERA_SENSOR_I2C_TYPE_WORD; probe->op_code = 3; // don't care? probe->cmd_type = CAMERA_SENSOR_CMD_TYPE_PROBE; probe->reg_addr = ci->probe_reg_addr; probe->expected_data = ci->probe_expected_data; probe->data_mask = 0; //buf_desc[1].size = buf_desc[1].length = 148; buf_desc[1].size = buf_desc[1].length = 196; buf_desc[1].type = CAM_CMD_BUF_I2C; auto power_settings = mm.alloc<struct cam_cmd_power>(buf_desc[1].size, (uint32_t*)&buf_desc[1].mem_handle); // power on struct cam_cmd_power *power = power_settings.get(); power->count = 4; power->cmd_type = CAMERA_SENSOR_CMD_TYPE_PWR_UP; power->power_settings[0].power_seq_type = 3; // clock?? power->power_settings[1].power_seq_type = 1; // analog power->power_settings[2].power_seq_type = 2; // digital power->power_settings[3].power_seq_type = 8; // reset low power = power_set_wait(power, 1); // set clock power->count = 1; power->cmd_type = CAMERA_SENSOR_CMD_TYPE_PWR_UP; power->power_settings[0].power_seq_type = 0; power->power_settings[0].config_val_low = ci->mclk_frequency; power = power_set_wait(power, 1); // reset high power->count = 1; power->cmd_type = CAMERA_SENSOR_CMD_TYPE_PWR_UP; power->power_settings[0].power_seq_type = 8; power->power_settings[0].config_val_low = 1; // wait 650000 cycles @ 19.2 mhz = 33.8 ms power = power_set_wait(power, 34); // probe happens here // disable clock power->count = 1; power->cmd_type = CAMERA_SENSOR_CMD_TYPE_PWR_DOWN; power->power_settings[0].power_seq_type = 0; power->power_settings[0].config_val_low = 0; power = power_set_wait(power, 1); // reset high power->count = 1; power->cmd_type = CAMERA_SENSOR_CMD_TYPE_PWR_DOWN; power->power_settings[0].power_seq_type = 8; power->power_settings[0].config_val_low = 1; power = power_set_wait(power, 1); // reset low power->count = 1; power->cmd_type = CAMERA_SENSOR_CMD_TYPE_PWR_DOWN; power->power_settings[0].power_seq_type = 8; power->power_settings[0].config_val_low = 0; power = power_set_wait(power, 1); // power off power->count = 3; power->cmd_type = CAMERA_SENSOR_CMD_TYPE_PWR_DOWN; power->power_settings[0].power_seq_type = 2; power->power_settings[1].power_seq_type = 1; power->power_settings[2].power_seq_type = 3; int ret = do_cam_control(sensor_fd, CAM_SENSOR_PROBE_CMD, (void *)(uintptr_t)cam_packet_handle, 0); LOGD("probing the sensor: %d", ret); return ret; } void CameraState::config_isp(int io_mem_handle, int fence, int request_id, int buf0_mem_handle, int buf0_offset) { uint32_t cam_packet_handle = 0; int size = sizeof(struct cam_packet)+sizeof(struct cam_cmd_buf_desc)*2; if (io_mem_handle != 0) { size += sizeof(struct cam_buf_io_cfg); } auto pkt = mm.alloc<struct cam_packet>(size, &cam_packet_handle); pkt->num_cmd_buf = 2; pkt->kmd_cmd_buf_index = 0; // YUV has kmd_cmd_buf_offset = 1780 // I guess this is the ISP command // YUV also has patch_offset = 0x1030 and num_patches = 10 if (io_mem_handle != 0) { pkt->io_configs_offset = sizeof(struct cam_cmd_buf_desc)*pkt->num_cmd_buf; pkt->num_io_configs = 1; } if (io_mem_handle != 0) { pkt->header.op_code = 0xf000001; pkt->header.request_id = request_id; } else { pkt->header.op_code = 0xf000000; } pkt->header.size = size; struct cam_cmd_buf_desc *buf_desc = (struct cam_cmd_buf_desc *)&pkt->payload; struct cam_buf_io_cfg *io_cfg = (struct cam_buf_io_cfg *)((char*)&pkt->payload + pkt->io_configs_offset); // TODO: support MMU buf_desc[0].size = 65624; buf_desc[0].length = 0; buf_desc[0].type = CAM_CMD_BUF_DIRECT; buf_desc[0].meta_data = 3; buf_desc[0].mem_handle = buf0_mem_handle; buf_desc[0].offset = buf0_offset; // parsed by cam_isp_packet_generic_blob_handler struct isp_packet { uint32_t type_0; cam_isp_resource_hfr_config resource_hfr; uint32_t type_1; cam_isp_clock_config clock; uint64_t extra_rdi_hz[3]; uint32_t type_2; cam_isp_bw_config bw; struct cam_isp_bw_vote extra_rdi_vote[6]; } __attribute__((packed)) tmp; memset(&tmp, 0, sizeof(tmp)); tmp.type_0 = CAM_ISP_GENERIC_BLOB_TYPE_HFR_CONFIG; tmp.type_0 |= sizeof(cam_isp_resource_hfr_config) << 8; static_assert(sizeof(cam_isp_resource_hfr_config) == 0x20); tmp.resource_hfr = { .num_ports = 1, // 10 for YUV (but I don't think we need them) .port_hfr_config[0] = { .resource_type = CAM_ISP_IFE_OUT_RES_RDI_0, // CAM_ISP_IFE_OUT_RES_FULL for YUV .subsample_pattern = 1, .subsample_period = 0, .framedrop_pattern = 1, .framedrop_period = 0, }}; tmp.type_1 = CAM_ISP_GENERIC_BLOB_TYPE_CLOCK_CONFIG; tmp.type_1 |= (sizeof(cam_isp_clock_config) + sizeof(tmp.extra_rdi_hz)) << 8; static_assert((sizeof(cam_isp_clock_config) + sizeof(tmp.extra_rdi_hz)) == 0x38); tmp.clock = { .usage_type = 1, // dual mode .num_rdi = 4, .left_pix_hz = 404000000, .right_pix_hz = 404000000, .rdi_hz[0] = 404000000, }; tmp.type_2 = CAM_ISP_GENERIC_BLOB_TYPE_BW_CONFIG; tmp.type_2 |= (sizeof(cam_isp_bw_config) + sizeof(tmp.extra_rdi_vote)) << 8; static_assert((sizeof(cam_isp_bw_config) + sizeof(tmp.extra_rdi_vote)) == 0xe0); tmp.bw = { .usage_type = 1, // dual mode .num_rdi = 4, .left_pix_vote = { .resource_id = 0, .cam_bw_bps = 450000000, .ext_bw_bps = 450000000, }, .rdi_vote[0] = { .resource_id = 0, .cam_bw_bps = 8706200000, .ext_bw_bps = 8706200000, }, }; static_assert(offsetof(struct isp_packet, type_2) == 0x60); buf_desc[1].size = sizeof(tmp); buf_desc[1].offset = io_mem_handle != 0 ? 0x60 : 0; buf_desc[1].length = buf_desc[1].size - buf_desc[1].offset; buf_desc[1].type = CAM_CMD_BUF_GENERIC; buf_desc[1].meta_data = CAM_ISP_PACKET_META_GENERIC_BLOB_COMMON; auto buf2 = mm.alloc<uint32_t>(buf_desc[1].size, (uint32_t*)&buf_desc[1].mem_handle); memcpy(buf2.get(), &tmp, sizeof(tmp)); if (io_mem_handle != 0) { io_cfg[0].mem_handle[0] = io_mem_handle; io_cfg[0].planes[0] = (struct cam_plane_cfg){ .width = ci->frame_width, .height = ci->frame_height + ci->extra_height, .plane_stride = ci->frame_stride, .slice_height = ci->frame_height + ci->extra_height, .meta_stride = 0x0, // YUV has meta(stride=0x400, size=0x5000) .meta_size = 0x0, .meta_offset = 0x0, .packer_config = 0x0, // 0xb for YUV .mode_config = 0x0, // 0x9ef for YUV .tile_config = 0x0, .h_init = 0x0, .v_init = 0x0, }; io_cfg[0].format = ci->mipi_format; // CAM_FORMAT_UBWC_TP10 for YUV io_cfg[0].color_space = CAM_COLOR_SPACE_BASE; // CAM_COLOR_SPACE_BT601_FULL for YUV io_cfg[0].color_pattern = 0x5; // 0x0 for YUV io_cfg[0].bpp = (ci->mipi_format == CAM_FORMAT_MIPI_RAW_10 ? 0xa : 0xc); // bits per pixel io_cfg[0].resource_type = CAM_ISP_IFE_OUT_RES_RDI_0; // CAM_ISP_IFE_OUT_RES_FULL for YUV io_cfg[0].fence = fence; io_cfg[0].direction = CAM_BUF_OUTPUT; io_cfg[0].subsample_pattern = 0x1; io_cfg[0].framedrop_pattern = 0x1; } int ret = device_config(multi_cam_state->isp_fd, session_handle, isp_dev_handle, cam_packet_handle); assert(ret == 0); if (ret != 0) { LOGE("isp config failed"); } } void CameraState::enqueue_buffer(int i, bool dp) { int ret; int request_id = request_ids[i]; if (buf_handle[i] && sync_objs[i]) { // wait struct cam_sync_wait sync_wait = {0}; sync_wait.sync_obj = sync_objs[i]; sync_wait.timeout_ms = 50; // max dt tolerance, typical should be 23 ret = do_cam_control(multi_cam_state->cam_sync_fd, CAM_SYNC_WAIT, &sync_wait, sizeof(sync_wait)); if (ret != 0) { LOGE("failed to wait for sync: %d %d", ret, sync_wait.sync_obj); // TODO: handle frame drop cleanly } buf.camera_bufs_metadata[i].timestamp_eof = (uint64_t)nanos_since_boot(); // set true eof if (dp) buf.queue(i); // destroy old output fence struct cam_sync_info sync_destroy = {0}; sync_destroy.sync_obj = sync_objs[i]; ret = do_cam_control(multi_cam_state->cam_sync_fd, CAM_SYNC_DESTROY, &sync_destroy, sizeof(sync_destroy)); if (ret != 0) { LOGE("failed to destroy sync object: %d %d", ret, sync_destroy.sync_obj); } } // create output fence struct cam_sync_info sync_create = {0}; strcpy(sync_create.name, "NodeOutputPortFence"); ret = do_cam_control(multi_cam_state->cam_sync_fd, CAM_SYNC_CREATE, &sync_create, sizeof(sync_create)); if (ret != 0) { LOGE("failed to create fence: %d %d", ret, sync_create.sync_obj); } sync_objs[i] = sync_create.sync_obj; // schedule request with camera request manager struct cam_req_mgr_sched_request req_mgr_sched_request = {0}; req_mgr_sched_request.session_hdl = session_handle; req_mgr_sched_request.link_hdl = link_handle; req_mgr_sched_request.req_id = request_id; ret = do_cam_control(multi_cam_state->video0_fd, CAM_REQ_MGR_SCHED_REQ, &req_mgr_sched_request, sizeof(req_mgr_sched_request)); if (ret != 0) { LOGE("failed to schedule cam mgr request: %d %d", ret, request_id); } // poke sensor, must happen after schedule sensors_poke(request_id); // submit request to the ife config_isp(buf_handle[i], sync_objs[i], request_id, buf0_handle, 65632*(i+1)); } void CameraState::enqueue_req_multi(int start, int n, bool dp) { for (int i=start; i<start+n; ++i) { request_ids[(i - 1) % FRAME_BUF_COUNT] = i; enqueue_buffer((i - 1) % FRAME_BUF_COUNT, dp); } } // ******************* camera ******************* void CameraState::set_exposure_rect() { // set areas for each camera, shouldn't be changed std::vector<std::pair<Rect, float>> ae_targets = { // (Rect, F) std::make_pair((Rect){96, 250, 1734, 524}, 567.0), // wide std::make_pair((Rect){96, 160, 1734, 986}, 2648.0), // road std::make_pair((Rect){96, 242, 1736, 906}, 567.0) // driver }; int h_ref = 1208; /* exposure target intrinics is [ [F, 0, 0.5*ae_xywh[2]] [0, F, 0.5*H-ae_xywh[1]] [0, 0, 1] ] */ auto ae_target = ae_targets[camera_num]; Rect xywh_ref = ae_target.first; float fl_ref = ae_target.second; ae_xywh = (Rect){ std::max(0, buf.rgb_width / 2 - (int)(fl_pix / fl_ref * xywh_ref.w / 2)), std::max(0, buf.rgb_height / 2 - (int)(fl_pix / fl_ref * (h_ref / 2 - xywh_ref.y))), std::min((int)(fl_pix / fl_ref * xywh_ref.w), buf.rgb_width / 2 + (int)(fl_pix / fl_ref * xywh_ref.w / 2)), std::min((int)(fl_pix / fl_ref * xywh_ref.h), buf.rgb_height / 2 + (int)(fl_pix / fl_ref * (h_ref / 2 - xywh_ref.y))) }; } void CameraState::sensor_set_parameters() { target_grey_fraction = 0.3; dc_gain_enabled = false; dc_gain_weight = ci->dc_gain_min_weight; gain_idx = ci->analog_gain_rec_idx; exposure_time = 5; cur_ev[0] = cur_ev[1] = cur_ev[2] = (1 + dc_gain_weight * (ci->dc_gain_factor-1) / ci->dc_gain_max_weight) * ci->sensor_analog_gains[gain_idx] * exposure_time; } void CameraState::camera_map_bufs(MultiCameraState *s) { for (int i = 0; i < FRAME_BUF_COUNT; i++) { // configure ISP to put the image in place struct cam_mem_mgr_map_cmd mem_mgr_map_cmd = {0}; mem_mgr_map_cmd.mmu_hdls[0] = s->device_iommu; mem_mgr_map_cmd.num_hdl = 1; mem_mgr_map_cmd.flags = CAM_MEM_FLAG_HW_READ_WRITE; mem_mgr_map_cmd.fd = buf.camera_bufs[i].fd; int ret = do_cam_control(s->video0_fd, CAM_REQ_MGR_MAP_BUF, &mem_mgr_map_cmd, sizeof(mem_mgr_map_cmd)); LOGD("map buf req: (fd: %d) 0x%x %d", buf.camera_bufs[i].fd, mem_mgr_map_cmd.out.buf_handle, ret); buf_handle[i] = mem_mgr_map_cmd.out.buf_handle; } enqueue_req_multi(1, FRAME_BUF_COUNT, 0); } void CameraState::camera_init(MultiCameraState *s, VisionIpcServer * v, cl_device_id device_id, cl_context ctx, VisionStreamType yuv_type, float focal_len) { if (!enabled) return; LOGD("camera init %d", camera_num); request_id_last = 0; skipped = true; buf.init(device_id, ctx, this, v, FRAME_BUF_COUNT, yuv_type); camera_map_bufs(s); fl_pix = focal_len / ci->pixel_size_mm; set_exposure_rect(); } void CameraState::camera_open(MultiCameraState *multi_cam_state_, int camera_num_, bool enabled_) { multi_cam_state = multi_cam_state_; camera_num = camera_num_; enabled = enabled_; if (!enabled) return; sensor_fd = open_v4l_by_name_and_index("cam-sensor-driver", camera_num); assert(sensor_fd >= 0); LOGD("opened sensor for %d", camera_num); // init memorymanager for this camera mm.init(multi_cam_state->video0_fd); LOGD("-- Probing sensor %d", camera_num); auto init_sensor_lambda = [this](SensorInfo *sensor) { ci.reset(sensor); int ret = sensors_init(); if (ret == 0) { sensor_set_parameters(); } return ret == 0; }; // Try different sensors one by one until it success. if (!init_sensor_lambda(new AR0231) && !init_sensor_lambda(new OX03C10) && !init_sensor_lambda(new OS04C10)) { LOGE("** sensor %d FAILED bringup, disabling", camera_num); enabled = false; return; } LOGD("-- Probing sensor %d success", camera_num); // create session struct cam_req_mgr_session_info session_info = {}; int ret = do_cam_control(multi_cam_state->video0_fd, CAM_REQ_MGR_CREATE_SESSION, &session_info, sizeof(session_info)); LOGD("get session: %d 0x%X", ret, session_info.session_hdl); session_handle = session_info.session_hdl; // access the sensor LOGD("-- Accessing sensor"); auto sensor_dev_handle_ = device_acquire(sensor_fd, session_handle, nullptr); assert(sensor_dev_handle_); sensor_dev_handle = *sensor_dev_handle_; LOGD("acquire sensor dev"); LOG("-- Configuring sensor"); sensors_i2c(ci->init_reg_array.data(), ci->init_reg_array.size(), CAM_SENSOR_PACKET_OPCODE_SENSOR_CONFIG, ci->data_word); // NOTE: to be able to disable road and wide road, we still have to configure the sensor over i2c // If you don't do this, the strobe GPIO is an output (even in reset it seems!) if (!enabled) return; struct cam_isp_in_port_info in_port_info = { .res_type = (uint32_t[]){CAM_ISP_IFE_IN_RES_PHY_0, CAM_ISP_IFE_IN_RES_PHY_1, CAM_ISP_IFE_IN_RES_PHY_2}[camera_num], .lane_type = CAM_ISP_LANE_TYPE_DPHY, .lane_num = 4, .lane_cfg = 0x3210, .vc = 0x0, .dt = ci->frame_data_type, .format = ci->mipi_format, .test_pattern = 0x2, // 0x3? .usage_type = 0x0, .left_start = 0, .left_stop = ci->frame_width - 1, .left_width = ci->frame_width, .right_start = 0, .right_stop = ci->frame_width - 1, .right_width = ci->frame_width, .line_start = 0, .line_stop = ci->frame_height + ci->extra_height - 1, .height = ci->frame_height + ci->extra_height, .pixel_clk = 0x0, .batch_size = 0x0, .dsp_mode = CAM_ISP_DSP_MODE_NONE, .hbi_cnt = 0x0, .custom_csid = 0x0, .num_out_res = 0x1, .data[0] = (struct cam_isp_out_port_info){ .res_type = CAM_ISP_IFE_OUT_RES_RDI_0, .format = ci->mipi_format, .width = ci->frame_width, .height = ci->frame_height + ci->extra_height, .comp_grp_id = 0x0, .split_point = 0x0, .secure_mode = 0x0, }, }; struct cam_isp_resource isp_resource = { .resource_id = CAM_ISP_RES_ID_PORT, .handle_type = CAM_HANDLE_USER_POINTER, .res_hdl = (uint64_t)&in_port_info, .length = sizeof(in_port_info), }; auto isp_dev_handle_ = device_acquire(multi_cam_state->isp_fd, session_handle, &isp_resource); assert(isp_dev_handle_); isp_dev_handle = *isp_dev_handle_; LOGD("acquire isp dev"); csiphy_fd = open_v4l_by_name_and_index("cam-csiphy-driver", camera_num); assert(csiphy_fd >= 0); LOGD("opened csiphy for %d", camera_num); struct cam_csiphy_acquire_dev_info csiphy_acquire_dev_info = {.combo_mode = 0}; auto csiphy_dev_handle_ = device_acquire(csiphy_fd, session_handle, &csiphy_acquire_dev_info); assert(csiphy_dev_handle_); csiphy_dev_handle = *csiphy_dev_handle_; LOGD("acquire csiphy dev"); // config ISP alloc_w_mmu_hdl(multi_cam_state->video0_fd, 984480, (uint32_t*)&buf0_handle, 0x20, CAM_MEM_FLAG_HW_READ_WRITE | CAM_MEM_FLAG_KMD_ACCESS | CAM_MEM_FLAG_UMD_ACCESS | CAM_MEM_FLAG_CMD_BUF_TYPE, multi_cam_state->device_iommu, multi_cam_state->cdm_iommu); config_isp(0, 0, 1, buf0_handle, 0); // config csiphy LOG("-- Config CSI PHY"); { uint32_t cam_packet_handle = 0; int size = sizeof(struct cam_packet)+sizeof(struct cam_cmd_buf_desc)*1; auto pkt = mm.alloc<struct cam_packet>(size, &cam_packet_handle); pkt->num_cmd_buf = 1; pkt->kmd_cmd_buf_index = -1; pkt->header.size = size; struct cam_cmd_buf_desc *buf_desc = (struct cam_cmd_buf_desc *)&pkt->payload; buf_desc[0].size = buf_desc[0].length = sizeof(struct cam_csiphy_info); buf_desc[0].type = CAM_CMD_BUF_GENERIC; auto csiphy_info = mm.alloc<struct cam_csiphy_info>(buf_desc[0].size, (uint32_t*)&buf_desc[0].mem_handle); csiphy_info->lane_mask = 0x1f; csiphy_info->lane_assign = 0x3210;// skip clk. How is this 16 bit for 5 channels?? csiphy_info->csiphy_3phase = 0x0; // no 3 phase, only 2 conductors per lane csiphy_info->combo_mode = 0x0; csiphy_info->lane_cnt = 0x4; csiphy_info->secure_mode = 0x0; csiphy_info->settle_time = MIPI_SETTLE_CNT * 200000000ULL; csiphy_info->data_rate = 48000000; // Calculated by camera_freqs.py int ret_ = device_config(csiphy_fd, session_handle, csiphy_dev_handle, cam_packet_handle); assert(ret_ == 0); } // link devices LOG("-- Link devices"); struct cam_req_mgr_link_info req_mgr_link_info = {0}; req_mgr_link_info.session_hdl = session_handle; req_mgr_link_info.num_devices = 2; req_mgr_link_info.dev_hdls[0] = isp_dev_handle; req_mgr_link_info.dev_hdls[1] = sensor_dev_handle; ret = do_cam_control(multi_cam_state->video0_fd, CAM_REQ_MGR_LINK, &req_mgr_link_info, sizeof(req_mgr_link_info)); link_handle = req_mgr_link_info.link_hdl; LOGD("link: %d session: 0x%X isp: 0x%X sensors: 0x%X link: 0x%X", ret, session_handle, isp_dev_handle, sensor_dev_handle, link_handle); struct cam_req_mgr_link_control req_mgr_link_control = {0}; req_mgr_link_control.ops = CAM_REQ_MGR_LINK_ACTIVATE; req_mgr_link_control.session_hdl = session_handle; req_mgr_link_control.num_links = 1; req_mgr_link_control.link_hdls[0] = link_handle; ret = do_cam_control(multi_cam_state->video0_fd, CAM_REQ_MGR_LINK_CONTROL, &req_mgr_link_control, sizeof(req_mgr_link_control)); LOGD("link control: %d", ret); ret = device_control(csiphy_fd, CAM_START_DEV, session_handle, csiphy_dev_handle); LOGD("start csiphy: %d", ret); ret = device_control(multi_cam_state->isp_fd, CAM_START_DEV, session_handle, isp_dev_handle); LOGD("start isp: %d", ret); assert(ret == 0); // TODO: this is unneeded, should we be doing the start i2c in a different way? //ret = device_control(sensor_fd, CAM_START_DEV, session_handle, sensor_dev_handle); //LOGD("start sensor: %d", ret); } void cameras_init(VisionIpcServer *v, MultiCameraState *s, cl_device_id device_id, cl_context ctx) { s->driver_cam.camera_init(s, v, device_id, ctx, VISION_STREAM_DRIVER, DRIVER_FL_MM); s->road_cam.camera_init(s, v, device_id, ctx, VISION_STREAM_ROAD, ROAD_FL_MM); s->wide_road_cam.camera_init(s, v, device_id, ctx, VISION_STREAM_WIDE_ROAD, WIDE_FL_MM); s->pm = new PubMaster({"roadCameraState", "driverCameraState", "wideRoadCameraState", "thumbnail"}); } void cameras_open(MultiCameraState *s) { LOG("-- Opening devices"); // video0 is req_mgr, the target of many ioctls s->video0_fd = HANDLE_EINTR(open("/dev/v4l/by-path/platform-soc:qcom_cam-req-mgr-video-index0", O_RDWR | O_NONBLOCK)); assert(s->video0_fd >= 0); LOGD("opened video0"); // video1 is cam_sync, the target of some ioctls s->cam_sync_fd = HANDLE_EINTR(open("/dev/v4l/by-path/platform-cam_sync-video-index0", O_RDWR | O_NONBLOCK)); assert(s->cam_sync_fd >= 0); LOGD("opened video1 (cam_sync)"); // looks like there's only one of these s->isp_fd = open_v4l_by_name_and_index("cam-isp"); assert(s->isp_fd >= 0); LOGD("opened isp"); // query icp for MMU handles LOG("-- Query ICP for MMU handles"); struct cam_isp_query_cap_cmd isp_query_cap_cmd = {0}; struct cam_query_cap_cmd query_cap_cmd = {0}; query_cap_cmd.handle_type = 1; query_cap_cmd.caps_handle = (uint64_t)&isp_query_cap_cmd; query_cap_cmd.size = sizeof(isp_query_cap_cmd); int ret = do_cam_control(s->isp_fd, CAM_QUERY_CAP, &query_cap_cmd, sizeof(query_cap_cmd)); assert(ret == 0); LOGD("using MMU handle: %x", isp_query_cap_cmd.device_iommu.non_secure); LOGD("using MMU handle: %x", isp_query_cap_cmd.cdm_iommu.non_secure); s->device_iommu = isp_query_cap_cmd.device_iommu.non_secure; s->cdm_iommu = isp_query_cap_cmd.cdm_iommu.non_secure; // subscribe LOG("-- Subscribing"); struct v4l2_event_subscription sub = {0}; sub.type = V4L_EVENT_CAM_REQ_MGR_EVENT; sub.id = V4L_EVENT_CAM_REQ_MGR_SOF_BOOT_TS; ret = HANDLE_EINTR(ioctl(s->video0_fd, VIDIOC_SUBSCRIBE_EVENT, &sub)); LOGD("req mgr subscribe: %d", ret); s->driver_cam.camera_open(s, 2, !env_disable_driver); LOGD("driver camera opened"); s->road_cam.camera_open(s, 1, !env_disable_road); LOGD("road camera opened"); s->wide_road_cam.camera_open(s, 0, !env_disable_wide_road); LOGD("wide road camera opened"); } void CameraState::camera_close() { // stop devices LOG("-- Stop devices %d", camera_num); if (enabled) { // ret = device_control(sensor_fd, CAM_STOP_DEV, session_handle, sensor_dev_handle); // LOGD("stop sensor: %d", ret); int ret = device_control(multi_cam_state->isp_fd, CAM_STOP_DEV, session_handle, isp_dev_handle); LOGD("stop isp: %d", ret); ret = device_control(csiphy_fd, CAM_STOP_DEV, session_handle, csiphy_dev_handle); LOGD("stop csiphy: %d", ret); // link control stop LOG("-- Stop link control"); struct cam_req_mgr_link_control req_mgr_link_control = {0}; req_mgr_link_control.ops = CAM_REQ_MGR_LINK_DEACTIVATE; req_mgr_link_control.session_hdl = session_handle; req_mgr_link_control.num_links = 1; req_mgr_link_control.link_hdls[0] = link_handle; ret = do_cam_control(multi_cam_state->video0_fd, CAM_REQ_MGR_LINK_CONTROL, &req_mgr_link_control, sizeof(req_mgr_link_control)); LOGD("link control stop: %d", ret); // unlink LOG("-- Unlink"); struct cam_req_mgr_unlink_info req_mgr_unlink_info = {0}; req_mgr_unlink_info.session_hdl = session_handle; req_mgr_unlink_info.link_hdl = link_handle; ret = do_cam_control(multi_cam_state->video0_fd, CAM_REQ_MGR_UNLINK, &req_mgr_unlink_info, sizeof(req_mgr_unlink_info)); LOGD("unlink: %d", ret); // release devices LOGD("-- Release devices"); ret = device_control(multi_cam_state->isp_fd, CAM_RELEASE_DEV, session_handle, isp_dev_handle); LOGD("release isp: %d", ret); ret = device_control(csiphy_fd, CAM_RELEASE_DEV, session_handle, csiphy_dev_handle); LOGD("release csiphy: %d", ret); for (int i = 0; i < FRAME_BUF_COUNT; i++) { release(multi_cam_state->video0_fd, buf_handle[i]); } LOGD("released buffers"); } int ret = device_control(sensor_fd, CAM_RELEASE_DEV, session_handle, sensor_dev_handle); LOGD("release sensor: %d", ret); // destroyed session struct cam_req_mgr_session_info session_info = {.session_hdl = session_handle}; ret = do_cam_control(multi_cam_state->video0_fd, CAM_REQ_MGR_DESTROY_SESSION, &session_info, sizeof(session_info)); LOGD("destroyed session %d: %d", camera_num, ret); } void cameras_close(MultiCameraState *s) { s->driver_cam.camera_close(); s->road_cam.camera_close(); s->wide_road_cam.camera_close(); delete s->pm; } void CameraState::handle_camera_event(void *evdat) { if (!enabled) return; struct cam_req_mgr_message *event_data = (struct cam_req_mgr_message *)evdat; assert(event_data->session_hdl == session_handle); assert(event_data->u.frame_msg.link_hdl == link_handle); uint64_t timestamp = event_data->u.frame_msg.timestamp; int main_id = event_data->u.frame_msg.frame_id; int real_id = event_data->u.frame_msg.request_id; if (real_id != 0) { // next ready if (real_id == 1) {idx_offset = main_id;} int buf_idx = (real_id - 1) % FRAME_BUF_COUNT; // check for skipped frames if (main_id > frame_id_last + 1 && !skipped) { LOGE("camera %d realign", camera_num); clear_req_queue(); enqueue_req_multi(real_id + 1, FRAME_BUF_COUNT - 1, 0); skipped = true; } else if (main_id == frame_id_last + 1) { skipped = false; } // check for dropped requests if (real_id > request_id_last + 1) { LOGE("camera %d dropped requests %d %d", camera_num, real_id, request_id_last); enqueue_req_multi(request_id_last + 1 + FRAME_BUF_COUNT, real_id - (request_id_last + 1), 0); } // metas frame_id_last = main_id; request_id_last = real_id; auto &meta_data = buf.camera_bufs_metadata[buf_idx]; meta_data.frame_id = main_id - idx_offset; meta_data.request_id = real_id; meta_data.timestamp_sof = timestamp; exp_lock.lock(); meta_data.gain = analog_gain_frac * (1 + dc_gain_weight * (ci->dc_gain_factor-1) / ci->dc_gain_max_weight); meta_data.high_conversion_gain = dc_gain_enabled; meta_data.integ_lines = exposure_time; meta_data.measured_grey_fraction = measured_grey_fraction; meta_data.target_grey_fraction = target_grey_fraction; exp_lock.unlock(); // dispatch enqueue_req_multi(real_id + FRAME_BUF_COUNT, 1, 1); } else { // not ready if (main_id > frame_id_last + 10) { LOGE("camera %d reset after half second of no response", camera_num); clear_req_queue(); enqueue_req_multi(request_id_last + 1, FRAME_BUF_COUNT, 0); frame_id_last = main_id; skipped = true; } } } void CameraState::update_exposure_score(float desired_ev, int exp_t, int exp_g_idx, float exp_gain) { float score = ci->getExposureScore(desired_ev, exp_t, exp_g_idx, exp_gain, gain_idx); if (score < best_ev_score) { new_exp_t = exp_t; new_exp_g = exp_g_idx; best_ev_score = score; } } void CameraState::set_camera_exposure(float grey_frac) { if (!enabled) return; const float dt = 0.05; const float ts_grey = 10.0; const float ts_ev = 0.05; const float k_grey = (dt / ts_grey) / (1.0 + dt / ts_grey); const float k_ev = (dt / ts_ev) / (1.0 + dt / ts_ev); // It takes 3 frames for the commanded exposure settings to take effect. The first frame is already started by the time // we reach this function, the other 2 are due to the register buffering in the sensor. // Therefore we use the target EV from 3 frames ago, the grey fraction that was just measured was the result of that control action. // TODO: Lower latency to 2 frames, by using the histogram outputted by the sensor we can do AE before the debayering is complete const float cur_ev_ = cur_ev[buf.cur_frame_data.frame_id % 3]; // Scale target grey between 0.1 and 0.4 depending on lighting conditions float new_target_grey = std::clamp(0.4 - 0.3 * log2(1.0 + ci->target_grey_factor*cur_ev_) / log2(6000.0), 0.1, 0.4); float target_grey = (1.0 - k_grey) * target_grey_fraction + k_grey * new_target_grey; float desired_ev = std::clamp(cur_ev_ * target_grey / grey_frac, ci->min_ev, ci->max_ev); float k = (1.0 - k_ev) / 3.0; desired_ev = (k * cur_ev[0]) + (k * cur_ev[1]) + (k * cur_ev[2]) + (k_ev * desired_ev); best_ev_score = 1e6; new_exp_g = 0; new_exp_t = 0; // Hysteresis around high conversion gain // We usually want this on since it results in lower noise, but turn off in very bright day scenes bool enable_dc_gain = dc_gain_enabled; if (!enable_dc_gain && target_grey < ci->dc_gain_on_grey) { enable_dc_gain = true; dc_gain_weight = ci->dc_gain_min_weight; } else if (enable_dc_gain && target_grey > ci->dc_gain_off_grey) { enable_dc_gain = false; dc_gain_weight = ci->dc_gain_max_weight; } if (enable_dc_gain && dc_gain_weight < ci->dc_gain_max_weight) {dc_gain_weight += 1;} if (!enable_dc_gain && dc_gain_weight > ci->dc_gain_min_weight) {dc_gain_weight -= 1;} std::string gain_bytes, time_bytes; if (env_ctrl_exp_from_params) { gain_bytes = params.get("CameraDebugExpGain"); time_bytes = params.get("CameraDebugExpTime"); } if (gain_bytes.size() > 0 && time_bytes.size() > 0) { // Override gain and exposure time gain_idx = std::stoi(gain_bytes); exposure_time = std::stoi(time_bytes); new_exp_g = gain_idx; new_exp_t = exposure_time; enable_dc_gain = false; } else { // Simple brute force optimizer to choose sensor parameters // to reach desired EV for (int g = std::max((int)ci->analog_gain_min_idx, gain_idx - 1); g <= std::min((int)ci->analog_gain_max_idx, gain_idx + 1); g++) { float gain = ci->sensor_analog_gains[g] * (1 + dc_gain_weight * (ci->dc_gain_factor-1) / ci->dc_gain_max_weight); // Compute optimal time for given gain int t = std::clamp(int(std::round(desired_ev / gain)), ci->exposure_time_min, ci->exposure_time_max); // Only go below recommended gain when absolutely necessary to not overexpose if (g < ci->analog_gain_rec_idx && t > 20 && g < gain_idx) { continue; } update_exposure_score(desired_ev, t, g, gain); } } exp_lock.lock(); measured_grey_fraction = grey_frac; target_grey_fraction = target_grey; analog_gain_frac = ci->sensor_analog_gains[new_exp_g]; gain_idx = new_exp_g; exposure_time = new_exp_t; dc_gain_enabled = enable_dc_gain; float gain = analog_gain_frac * (1 + dc_gain_weight * (ci->dc_gain_factor-1) / ci->dc_gain_max_weight); cur_ev[buf.cur_frame_data.frame_id % 3] = exposure_time * gain; exp_lock.unlock(); // Processing a frame takes right about 50ms, so we need to wait a few ms // so we don't send i2c commands around the frame start. int ms = (nanos_since_boot() - buf.cur_frame_data.timestamp_sof) / 1000000; if (ms < 60) { util::sleep_for(60 - ms); } // LOGE("ae - camera %d, cur_t %.5f, sof %.5f, dt %.5f", camera_num, 1e-9 * nanos_since_boot(), 1e-9 * buf.cur_frame_data.timestamp_sof, 1e-9 * (nanos_since_boot() - buf.cur_frame_data.timestamp_sof)); auto exp_reg_array = ci->getExposureRegisters(exposure_time, new_exp_g, dc_gain_enabled); sensors_i2c(exp_reg_array.data(), exp_reg_array.size(), CAM_SENSOR_PACKET_OPCODE_SENSOR_CONFIG, ci->data_word); } static void process_driver_camera(MultiCameraState *s, CameraState *c, int cnt) { c->set_camera_exposure(set_exposure_target(&c->buf, c->ae_xywh, 2, 4)); MessageBuilder msg; auto framed = msg.initEvent().initDriverCameraState(); framed.setFrameType(cereal::FrameData::FrameType::FRONT); fill_frame_data(framed, c->buf.cur_frame_data, c); c->ci->processRegisters(c, framed); s->pm->send("driverCameraState", msg); } void process_road_camera(MultiCameraState *s, CameraState *c, int cnt) { const CameraBuf *b = &c->buf; MessageBuilder msg; auto framed = c == &s->road_cam ? msg.initEvent().initRoadCameraState() : msg.initEvent().initWideRoadCameraState(); fill_frame_data(framed, b->cur_frame_data, c); if (env_log_raw_frames && c == &s->road_cam && cnt % 100 == 5) { // no overlap with qlog decimation framed.setImage(get_raw_frame_image(b)); } LOGT(c->buf.cur_frame_data.frame_id, "%s: Image set", c == &s->road_cam ? "RoadCamera" : "WideRoadCamera"); c->ci->processRegisters(c, framed); s->pm->send(c == &s->road_cam ? "roadCameraState" : "wideRoadCameraState", msg); const int skip = 2; c->set_camera_exposure(set_exposure_target(b, c->ae_xywh, skip, skip)); } void cameras_run(MultiCameraState *s) { LOG("-- Starting threads"); std::vector<std::thread> threads; if (s->driver_cam.enabled) threads.push_back(start_process_thread(s, &s->driver_cam, process_driver_camera)); if (s->road_cam.enabled) threads.push_back(start_process_thread(s, &s->road_cam, process_road_camera)); if (s->wide_road_cam.enabled) threads.push_back(start_process_thread(s, &s->wide_road_cam, process_road_camera)); // start devices LOG("-- Starting devices"); s->driver_cam.sensors_start(); s->road_cam.sensors_start(); s->wide_road_cam.sensors_start(); // poll events LOG("-- Dequeueing Video events"); while (!do_exit) { struct pollfd fds[1] = {{0}}; fds[0].fd = s->video0_fd; fds[0].events = POLLPRI; int ret = poll(fds, std::size(fds), 1000); if (ret < 0) { if (errno == EINTR || errno == EAGAIN) continue; LOGE("poll failed (%d - %d)", ret, errno); break; } if (!fds[0].revents) continue; struct v4l2_event ev = {0}; ret = HANDLE_EINTR(ioctl(fds[0].fd, VIDIOC_DQEVENT, &ev)); if (ret == 0) { if (ev.type == V4L_EVENT_CAM_REQ_MGR_EVENT) { struct cam_req_mgr_message *event_data = (struct cam_req_mgr_message *)ev.u.data; if (env_debug_frames) { printf("sess_hdl 0x%6X, link_hdl 0x%6X, frame_id %lu, req_id %lu, timestamp %.2f ms, sof_status %d\n", event_data->session_hdl, event_data->u.frame_msg.link_hdl, event_data->u.frame_msg.frame_id, event_data->u.frame_msg.request_id, event_data->u.frame_msg.timestamp/1e6, event_data->u.frame_msg.sof_status); } // for debugging //do_exit = do_exit || event_data->u.frame_msg.frame_id > (30*20); if (event_data->session_hdl == s->road_cam.session_handle) { s->road_cam.handle_camera_event(event_data); } else if (event_data->session_hdl == s->wide_road_cam.session_handle) { s->wide_road_cam.handle_camera_event(event_data); } else if (event_data->session_hdl == s->driver_cam.session_handle) { s->driver_cam.handle_camera_event(event_data); } else { LOGE("Unknown vidioc event source"); assert(false); } } else { LOGE("unhandled event %d\n", ev.type); } } else { LOGE("VIDIOC_DQEVENT failed, errno=%d", errno); } } LOG(" ************** STOPPING **************"); for (auto &t : threads) t.join(); cameras_close(s); }
2301_81045437/openpilot
system/camerad/cameras/camera_qcom2.cc
C++
mit
39,392
#pragma once #include <memory> #include <utility> #include "system/camerad/cameras/camera_common.h" #include "system/camerad/cameras/camera_util.h" #include "system/camerad/sensors/sensor.h" #include "common/params.h" #include "common/util.h" #define FRAME_BUF_COUNT 4 #define ROAD_FL_MM 8.0f #define WIDE_FL_MM 1.71f #define DRIVER_FL_MM 1.71f class CameraState { public: MultiCameraState *multi_cam_state; std::unique_ptr<const SensorInfo> ci; bool enabled; std::mutex exp_lock; int exposure_time; bool dc_gain_enabled; int dc_gain_weight; int gain_idx; float analog_gain_frac; float cur_ev[3]; float best_ev_score; int new_exp_g; int new_exp_t; Rect ae_xywh; float measured_grey_fraction; float target_grey_fraction; unique_fd sensor_fd; unique_fd csiphy_fd; int camera_num; float fl_pix; void handle_camera_event(void *evdat); void update_exposure_score(float desired_ev, int exp_t, int exp_g_idx, float exp_gain); void set_camera_exposure(float grey_frac); void sensors_start(); void camera_open(MultiCameraState *multi_cam_state, int camera_num, bool enabled); void set_exposure_rect(); void sensor_set_parameters(); void camera_map_bufs(MultiCameraState *s); void camera_init(MultiCameraState *s, VisionIpcServer *v, cl_device_id device_id, cl_context ctx, VisionStreamType yuv_type, float focal_len); void camera_close(); int32_t session_handle; int32_t sensor_dev_handle; int32_t isp_dev_handle; int32_t csiphy_dev_handle; int32_t link_handle; int buf0_handle; int buf_handle[FRAME_BUF_COUNT]; int sync_objs[FRAME_BUF_COUNT]; int request_ids[FRAME_BUF_COUNT]; int request_id_last; int frame_id_last; int idx_offset; bool skipped; CameraBuf buf; MemoryManager mm; void config_isp(int io_mem_handle, int fence, int request_id, int buf0_mem_handle, int buf0_offset); void enqueue_req_multi(int start, int n, bool dp); void enqueue_buffer(int i, bool dp); int clear_req_queue(); int sensors_init(); void sensors_poke(int request_id); void sensors_i2c(const struct i2c_random_wr_payload* dat, int len, int op_code, bool data_word); private: // for debugging Params params; }; typedef struct MultiCameraState { unique_fd video0_fd; unique_fd cam_sync_fd; unique_fd isp_fd; int device_iommu; int cdm_iommu; CameraState road_cam; CameraState wide_road_cam; CameraState driver_cam; PubMaster *pm; } MultiCameraState;
2301_81045437/openpilot
system/camerad/cameras/camera_qcom2.h
C++
mit
2,467
#include "system/camerad/cameras/camera_util.h" #include <string.h> #include <cassert> #include <sys/ioctl.h> #include <sys/mman.h> #include "common/swaglog.h" #include "common/util.h" // ************** low level camera helpers **************** int do_cam_control(int fd, int op_code, void *handle, int size) { struct cam_control camcontrol = {0}; camcontrol.op_code = op_code; camcontrol.handle = (uint64_t)handle; if (size == 0) { camcontrol.size = 8; camcontrol.handle_type = CAM_HANDLE_MEM_HANDLE; } else { camcontrol.size = size; camcontrol.handle_type = CAM_HANDLE_USER_POINTER; } int ret = HANDLE_EINTR(ioctl(fd, VIDIOC_CAM_CONTROL, &camcontrol)); if (ret == -1) { LOGE("VIDIOC_CAM_CONTROL error: op_code %d - errno %d", op_code, errno); } return ret; } std::optional<int32_t> device_acquire(int fd, int32_t session_handle, void *data, uint32_t num_resources) { struct cam_acquire_dev_cmd cmd = { .session_handle = session_handle, .handle_type = CAM_HANDLE_USER_POINTER, .num_resources = (uint32_t)(data ? num_resources : 0), .resource_hdl = (uint64_t)data, }; int err = do_cam_control(fd, CAM_ACQUIRE_DEV, &cmd, sizeof(cmd)); return err == 0 ? std::make_optional(cmd.dev_handle) : std::nullopt; } int device_config(int fd, int32_t session_handle, int32_t dev_handle, uint64_t packet_handle) { struct cam_config_dev_cmd cmd = { .session_handle = session_handle, .dev_handle = dev_handle, .packet_handle = packet_handle, }; return do_cam_control(fd, CAM_CONFIG_DEV, &cmd, sizeof(cmd)); } int device_control(int fd, int op_code, int session_handle, int dev_handle) { // start stop and release are all the same struct cam_start_stop_dev_cmd cmd { .session_handle = session_handle, .dev_handle = dev_handle }; return do_cam_control(fd, op_code, &cmd, sizeof(cmd)); } void *alloc_w_mmu_hdl(int video0_fd, int len, uint32_t *handle, int align, int flags, int mmu_hdl, int mmu_hdl2) { struct cam_mem_mgr_alloc_cmd mem_mgr_alloc_cmd = {0}; mem_mgr_alloc_cmd.len = len; mem_mgr_alloc_cmd.align = align; mem_mgr_alloc_cmd.flags = flags; mem_mgr_alloc_cmd.num_hdl = 0; if (mmu_hdl != 0) { mem_mgr_alloc_cmd.mmu_hdls[0] = mmu_hdl; mem_mgr_alloc_cmd.num_hdl++; } if (mmu_hdl2 != 0) { mem_mgr_alloc_cmd.mmu_hdls[1] = mmu_hdl2; mem_mgr_alloc_cmd.num_hdl++; } do_cam_control(video0_fd, CAM_REQ_MGR_ALLOC_BUF, &mem_mgr_alloc_cmd, sizeof(mem_mgr_alloc_cmd)); *handle = mem_mgr_alloc_cmd.out.buf_handle; void *ptr = NULL; if (mem_mgr_alloc_cmd.out.fd > 0) { ptr = mmap(NULL, len, PROT_READ | PROT_WRITE, MAP_SHARED, mem_mgr_alloc_cmd.out.fd, 0); assert(ptr != MAP_FAILED); } // LOGD("allocated: %x %d %llx mapped %p", mem_mgr_alloc_cmd.out.buf_handle, mem_mgr_alloc_cmd.out.fd, mem_mgr_alloc_cmd.out.vaddr, ptr); return ptr; } void release(int video0_fd, uint32_t handle) { struct cam_mem_mgr_release_cmd mem_mgr_release_cmd = {0}; mem_mgr_release_cmd.buf_handle = handle; int ret = do_cam_control(video0_fd, CAM_REQ_MGR_RELEASE_BUF, &mem_mgr_release_cmd, sizeof(mem_mgr_release_cmd)); assert(ret == 0); } void release_fd(int video0_fd, uint32_t handle) { // handle to fd close(handle>>16); release(video0_fd, handle); } void *MemoryManager::alloc_buf(int size, uint32_t *handle) { lock.lock(); void *ptr; if (!cached_allocations[size].empty()) { ptr = cached_allocations[size].front(); cached_allocations[size].pop(); *handle = handle_lookup[ptr]; } else { ptr = alloc_w_mmu_hdl(video0_fd, size, handle); handle_lookup[ptr] = *handle; size_lookup[ptr] = size; } lock.unlock(); memset(ptr, 0, size); return ptr; } void MemoryManager::free(void *ptr) { lock.lock(); cached_allocations[size_lookup[ptr]].push(ptr); lock.unlock(); } MemoryManager::~MemoryManager() { for (auto& x : cached_allocations) { while (!x.second.empty()) { void *ptr = x.second.front(); x.second.pop(); LOGD("freeing cached allocation %p with size %d", ptr, size_lookup[ptr]); munmap(ptr, size_lookup[ptr]); release_fd(video0_fd, handle_lookup[ptr]); handle_lookup.erase(ptr); size_lookup.erase(ptr); } } }
2301_81045437/openpilot
system/camerad/cameras/camera_util.cc
C++
mit
4,245
#pragma once #include <functional> #include <map> #include <memory> #include <mutex> #include <optional> #include <queue> #include <media/cam_req_mgr.h> std::optional<int32_t> device_acquire(int fd, int32_t session_handle, void *data, uint32_t num_resources=1); int device_config(int fd, int32_t session_handle, int32_t dev_handle, uint64_t packet_handle); int device_control(int fd, int op_code, int session_handle, int dev_handle); int do_cam_control(int fd, int op_code, void *handle, int size); void *alloc_w_mmu_hdl(int video0_fd, int len, uint32_t *handle, int align = 8, int flags = CAM_MEM_FLAG_KMD_ACCESS | CAM_MEM_FLAG_UMD_ACCESS | CAM_MEM_FLAG_CMD_BUF_TYPE, int mmu_hdl = 0, int mmu_hdl2 = 0); void release(int video0_fd, uint32_t handle); class MemoryManager { public: void init(int _video0_fd) { video0_fd = _video0_fd; } ~MemoryManager(); template <class T> auto alloc(int len, uint32_t *handle) { return std::unique_ptr<T, std::function<void(void *)>>((T*)alloc_buf(len, handle), [this](void *ptr) { this->free(ptr); }); } private: void *alloc_buf(int len, uint32_t *handle); void free(void *ptr); std::mutex lock; std::map<void *, uint32_t> handle_lookup; std::map<void *, int> size_lookup; std::map<int, std::queue<void *> > cached_allocations; int video0_fd; };
2301_81045437/openpilot
system/camerad/cameras/camera_util.h
C++
mit
1,367
#include "ar0231_cl.h" #include "ox03c10_cl.h" #include "os04c10_cl.h" #define UV_WIDTH RGB_WIDTH / 2 #define UV_HEIGHT RGB_HEIGHT / 2 #define RGB_TO_Y(r, g, b) ((((mul24(b, 13) + mul24(g, 65) + mul24(r, 33)) + 64) >> 7) + 16) #define RGB_TO_U(r, g, b) ((mul24(b, 56) - mul24(g, 37) - mul24(r, 19) + 0x8080) >> 8) #define RGB_TO_V(r, g, b) ((mul24(r, 56) - mul24(g, 47) - mul24(b, 9) + 0x8080) >> 8) #define AVERAGE(x, y, z, w) ((convert_ushort(x) + convert_ushort(y) + convert_ushort(z) + convert_ushort(w) + 1) >> 1) #if defined(BGGR) #define ROW_READ_ORDER (int[]){3, 2, 1, 0} #define RGB_WRITE_ORDER (int[]){2, 3, 0, 1} #else #define ROW_READ_ORDER (int[]){0, 1, 2, 3} #define RGB_WRITE_ORDER (int[]){0, 1, 2, 3} #endif float get_vignetting_s(float r) { if (r < 62500) { return (1.0f + 0.0000008f*r); } else if (r < 490000) { return (0.9625f + 0.0000014f*r); } else if (r < 1102500) { return (1.26434f + 0.0000000000016f*r*r); } else { return (0.53503625f + 0.0000000000022f*r*r); } } int4 parse_12bit(uchar8 pvs) { // lower bits scambled? return (int4)(((int)pvs.s0<<4) + (pvs.s1>>4), ((int)pvs.s2<<4) + (pvs.s4&0xF), ((int)pvs.s3<<4) + (pvs.s4>>4), ((int)pvs.s5<<4) + (pvs.s7&0xF)); } int4 parse_10bit(uchar8 pvs, uchar ext, bool aligned) { if (aligned) { return (int4)(((int)pvs.s0 << 2) + (pvs.s1 & 0b00000011), ((int)pvs.s2 << 2) + ((pvs.s6 & 0b11000000) / 64), ((int)pvs.s3 << 2) + ((pvs.s6 & 0b00110000) / 16), ((int)pvs.s4 << 2) + ((pvs.s6 & 0b00001100) / 4)); } else { return (int4)(((int)pvs.s0 << 2) + ((pvs.s3 & 0b00110000) / 16), ((int)pvs.s1 << 2) + ((pvs.s3 & 0b00001100) / 4), ((int)pvs.s2 << 2) + ((pvs.s3 & 0b00000011)), ((int)pvs.s4 << 2) + ((ext & 0b11000000) / 64)); } } float get_k(float a, float b, float c, float d) { return 2.0 - (fabs(a - b) + fabs(c - d)); } __kernel void process_raw(const __global uchar * in, __global uchar * out, int expo_time) { const int gid_x = get_global_id(0); const int gid_y = get_global_id(1); // estimate vignetting #if VIGNETTING int gx = (gid_x*2 - RGB_WIDTH/2); int gy = (gid_y*2 - RGB_HEIGHT/2); const float vignette_factor = get_vignetting_s((gx*gx + gy*gy) / VIGNETTE_RSZ); #else const float vignette_factor = 1.0; #endif const int row_before_offset = (gid_y == 0) ? 2 : 0; const int row_after_offset = (gid_y == (RGB_HEIGHT/2 - 1)) ? 1 : 3; float3 rgb_tmp; uchar3 rgb_out[4]; // output is 2x2 window // read offset int start_idx; #if BIT_DEPTH == 10 bool aligned10; if (gid_x % 2 == 0) { aligned10 = true; start_idx = (2 * gid_y - 1) * FRAME_STRIDE + (5 * gid_x / 2 - 2) + (FRAME_STRIDE * FRAME_OFFSET); } else { aligned10 = false; start_idx = (2 * gid_y - 1) * FRAME_STRIDE + (5 * (gid_x - 1) / 2 + 1) + (FRAME_STRIDE * FRAME_OFFSET); } #else start_idx = (2 * gid_y - 1) * FRAME_STRIDE + (3 * gid_x - 2) + (FRAME_STRIDE * FRAME_OFFSET); #endif // read in 4 rows, 8 uchars each uchar8 dat[4]; // row_before dat[0] = vload8(0, in + start_idx + FRAME_STRIDE*row_before_offset); // row_0 if (gid_x == 0 && gid_y == 0) { // this wasn't a problem due to extra rows dat[1] = vload8(0, in + start_idx + FRAME_STRIDE*1 + 2); dat[1] = (uchar8)(0, 0, dat[1].s0, dat[1].s1, dat[1].s2, dat[1].s3, dat[1].s4, dat[1].s5); } else { dat[1] = vload8(0, in + start_idx + FRAME_STRIDE*1); } // row_1 dat[2] = vload8(0, in + start_idx + FRAME_STRIDE*2); // row_after dat[3] = vload8(0, in + start_idx + FRAME_STRIDE*row_after_offset); // need extra bit for 10-bit, 4 rows, 1 uchar each #if BIT_DEPTH == 10 uchar extra_dat[4]; if (!aligned10) { extra_dat[0] = in[start_idx + FRAME_STRIDE*row_before_offset + 8]; extra_dat[1] = in[start_idx + FRAME_STRIDE*1 + 8]; extra_dat[2] = in[start_idx + FRAME_STRIDE*2 + 8]; extra_dat[3] = in[start_idx + FRAME_STRIDE*row_after_offset + 8]; } #endif // read odd rows for staggered second exposure #if HDR_OFFSET > 0 uchar8 short_dat[4]; short_dat[0] = vload8(0, in + start_idx + FRAME_STRIDE*(row_before_offset+HDR_OFFSET/2) + FRAME_STRIDE/2); short_dat[1] = vload8(0, in + start_idx + FRAME_STRIDE*(1+HDR_OFFSET/2) + FRAME_STRIDE/2); short_dat[2] = vload8(0, in + start_idx + FRAME_STRIDE*(2+HDR_OFFSET/2) + FRAME_STRIDE/2); short_dat[3] = vload8(0, in + start_idx + FRAME_STRIDE*(row_after_offset+HDR_OFFSET/2) + FRAME_STRIDE/2); #if BIT_DEPTH == 10 uchar short_extra_dat[4]; if (!aligned10) { short_extra_dat[0] = in[start_idx + FRAME_STRIDE*(row_before_offset+HDR_OFFSET/2) + FRAME_STRIDE/2 + 8]; short_extra_dat[1] = in[start_idx + FRAME_STRIDE*(1+HDR_OFFSET/2) + FRAME_STRIDE/2 + 8]; short_extra_dat[2] = in[start_idx + FRAME_STRIDE*(2+HDR_OFFSET/2) + FRAME_STRIDE/2 + 8]; short_extra_dat[3] = in[start_idx + FRAME_STRIDE*(row_after_offset+HDR_OFFSET/2) + FRAME_STRIDE/2 + 8]; } #endif #endif // parse into floats 0.0-1.0 float4 v_rows[4]; #if BIT_DEPTH == 10 // for now it's always HDR int4 parsed = parse_10bit(dat[0], extra_dat[0], aligned10); int4 short_parsed = parse_10bit(short_dat[0], short_extra_dat[0], aligned10); v_rows[ROW_READ_ORDER[0]] = normalize_pv_hdr(parsed, short_parsed, vignette_factor, expo_time); parsed = parse_10bit(dat[1], extra_dat[1], aligned10); short_parsed = parse_10bit(short_dat[1], short_extra_dat[1], aligned10); v_rows[ROW_READ_ORDER[1]] = normalize_pv_hdr(parsed, short_parsed, vignette_factor, expo_time); parsed = parse_10bit(dat[2], extra_dat[2], aligned10); short_parsed = parse_10bit(short_dat[2], short_extra_dat[2], aligned10); v_rows[ROW_READ_ORDER[2]] = normalize_pv_hdr(parsed, short_parsed, vignette_factor, expo_time); parsed = parse_10bit(dat[3], extra_dat[3], aligned10); short_parsed = parse_10bit(short_dat[3], short_extra_dat[3], aligned10); v_rows[ROW_READ_ORDER[3]] = normalize_pv_hdr(parsed, short_parsed, vignette_factor, expo_time); #else // no HDR here int4 parsed = parse_12bit(dat[0]); v_rows[ROW_READ_ORDER[0]] = normalize_pv(parsed, vignette_factor); parsed = parse_12bit(dat[1]); v_rows[ROW_READ_ORDER[1]] = normalize_pv(parsed, vignette_factor); parsed = parse_12bit(dat[2]); v_rows[ROW_READ_ORDER[2]] = normalize_pv(parsed, vignette_factor); parsed = parse_12bit(dat[3]); v_rows[ROW_READ_ORDER[3]] = normalize_pv(parsed, vignette_factor); #endif // mirror padding if (gid_x == 0) { v_rows[0].s0 = v_rows[0].s2; v_rows[1].s0 = v_rows[1].s2; v_rows[2].s0 = v_rows[2].s2; v_rows[3].s0 = v_rows[3].s2; } else if (gid_x == RGB_WIDTH/2 - 1) { v_rows[0].s3 = v_rows[0].s1; v_rows[1].s3 = v_rows[1].s1; v_rows[2].s3 = v_rows[2].s1; v_rows[3].s3 = v_rows[3].s1; } // debayering // a simplified version of https://opensignalprocessingjournal.com/contents/volumes/V6/TOSIGPJ-6-1/TOSIGPJ-6-1.pdf const float k01 = get_k(v_rows[0].s0, v_rows[1].s1, v_rows[0].s2, v_rows[1].s1); const float k02 = get_k(v_rows[0].s2, v_rows[1].s1, v_rows[2].s2, v_rows[1].s1); const float k03 = get_k(v_rows[2].s0, v_rows[1].s1, v_rows[2].s2, v_rows[1].s1); const float k04 = get_k(v_rows[0].s0, v_rows[1].s1, v_rows[2].s0, v_rows[1].s1); rgb_tmp.x = (k02*v_rows[1].s2+k04*v_rows[1].s0)/(k02+k04); // R_G1 rgb_tmp.y = v_rows[1].s1; // G1(R) rgb_tmp.z = (k01*v_rows[0].s1+k03*v_rows[2].s1)/(k01+k03); // B_G1 rgb_out[RGB_WRITE_ORDER[0]] = convert_uchar3_sat(apply_gamma(color_correct(clamp(rgb_tmp, 0.0, 1.0)), expo_time) * 255.0); const float k11 = get_k(v_rows[0].s1, v_rows[2].s1, v_rows[0].s3, v_rows[2].s3); const float k12 = get_k(v_rows[0].s2, v_rows[1].s1, v_rows[1].s3, v_rows[2].s2); const float k13 = get_k(v_rows[0].s1, v_rows[0].s3, v_rows[2].s1, v_rows[2].s3); const float k14 = get_k(v_rows[0].s2, v_rows[1].s3, v_rows[2].s2, v_rows[1].s1); rgb_tmp.x = v_rows[1].s2; // R rgb_tmp.y = (k11*(v_rows[0].s2+v_rows[2].s2)*0.5+k13*(v_rows[1].s3+v_rows[1].s1)*0.5)/(k11+k13); // G_R rgb_tmp.z = (k12*(v_rows[0].s3+v_rows[2].s1)*0.5+k14*(v_rows[0].s1+v_rows[2].s3)*0.5)/(k12+k14); // B_R rgb_out[RGB_WRITE_ORDER[1]] = convert_uchar3_sat(apply_gamma(color_correct(clamp(rgb_tmp, 0.0, 1.0)), expo_time) * 255.0); const float k21 = get_k(v_rows[1].s0, v_rows[3].s0, v_rows[1].s2, v_rows[3].s2); const float k22 = get_k(v_rows[1].s1, v_rows[2].s0, v_rows[2].s2, v_rows[3].s1); const float k23 = get_k(v_rows[1].s0, v_rows[1].s2, v_rows[3].s0, v_rows[3].s2); const float k24 = get_k(v_rows[1].s1, v_rows[2].s2, v_rows[3].s1, v_rows[2].s0); rgb_tmp.x = (k22*(v_rows[1].s2+v_rows[3].s0)*0.5+k24*(v_rows[1].s0+v_rows[3].s2)*0.5)/(k22+k24); // R_B rgb_tmp.y = (k21*(v_rows[1].s1+v_rows[3].s1)*0.5+k23*(v_rows[2].s2+v_rows[2].s0)*0.5)/(k21+k23); // G_B rgb_tmp.z = v_rows[2].s1; // B rgb_out[RGB_WRITE_ORDER[2]] = convert_uchar3_sat(apply_gamma(color_correct(clamp(rgb_tmp, 0.0, 1.0)), expo_time) * 255.0); const float k31 = get_k(v_rows[1].s1, v_rows[2].s2, v_rows[1].s3, v_rows[2].s2); const float k32 = get_k(v_rows[1].s3, v_rows[2].s2, v_rows[3].s3, v_rows[2].s2); const float k33 = get_k(v_rows[3].s1, v_rows[2].s2, v_rows[3].s3, v_rows[2].s2); const float k34 = get_k(v_rows[1].s1, v_rows[2].s2, v_rows[3].s1, v_rows[2].s2); rgb_tmp.x = (k31*v_rows[1].s2+k33*v_rows[3].s2)/(k31+k33); // R_G2 rgb_tmp.y = v_rows[2].s2; // G2(B) rgb_tmp.z = (k32*v_rows[2].s3+k34*v_rows[2].s1)/(k32+k34); // B_G2 rgb_out[RGB_WRITE_ORDER[3]] = convert_uchar3_sat(apply_gamma(color_correct(clamp(rgb_tmp, 0.0, 1.0)), expo_time) * 255.0); // rgb2yuv(nv12) uchar2 yy = (uchar2)( RGB_TO_Y(rgb_out[0].s0, rgb_out[0].s1, rgb_out[0].s2), RGB_TO_Y(rgb_out[1].s0, rgb_out[1].s1, rgb_out[1].s2) ); vstore2(yy, 0, out + mad24(gid_y * 2, YUV_STRIDE, gid_x * 2)); yy = (uchar2)( RGB_TO_Y(rgb_out[2].s0, rgb_out[2].s1, rgb_out[2].s2), RGB_TO_Y(rgb_out[3].s0, rgb_out[3].s1, rgb_out[3].s2) ); vstore2(yy, 0, out + mad24(gid_y * 2 + 1, YUV_STRIDE, gid_x * 2)); const short ar = AVERAGE(rgb_out[0].s0, rgb_out[1].s0, rgb_out[2].s0, rgb_out[3].s0); const short ag = AVERAGE(rgb_out[0].s1, rgb_out[1].s1, rgb_out[2].s1, rgb_out[3].s1); const short ab = AVERAGE(rgb_out[0].s2, rgb_out[1].s2, rgb_out[2].s2, rgb_out[3].s2); uchar2 uv = (uchar2)( RGB_TO_U(ar, ag, ab), RGB_TO_V(ar, ag, ab) ); vstore2(uv, 0, out + UV_OFFSET + mad24(gid_y, YUV_STRIDE, gid_x * 2)); }
2301_81045437/openpilot
system/camerad/cameras/process_raw.cl
OpenCL
mit
10,753
#include "system/camerad/cameras/camera_common.h" #include <cassert> #include "common/params.h" #include "common/util.h" #include "system/hardware/hw.h" int main(int argc, char *argv[]) { if (Hardware::PC()) { printf("exiting, camerad is not meant to run on PC\n"); return 0; } int ret = util::set_realtime_priority(53); assert(ret == 0); ret = util::set_core_affinity({6}); assert(ret == 0 || Params().getBool("IsOffroad")); // failure ok while offroad due to offlining cores camerad_thread(); return 0; }
2301_81045437/openpilot
system/camerad/main.cc
C++
mit
536
#include <cassert> #include "common/swaglog.h" #include "system/camerad/cameras/camera_common.h" #include "system/camerad/cameras/camera_qcom2.h" #include "system/camerad/sensors/sensor.h" namespace { const size_t AR0231_REGISTERS_HEIGHT = 2; // TODO: this extra height is universal and doesn't apply per camera const size_t AR0231_STATS_HEIGHT = 2 + 8; const float sensor_analog_gains_AR0231[] = { 1.0 / 8.0, 2.0 / 8.0, 2.0 / 7.0, 3.0 / 7.0, // 0, 1, 2, 3 3.0 / 6.0, 4.0 / 6.0, 4.0 / 5.0, 5.0 / 5.0, // 4, 5, 6, 7 5.0 / 4.0, 6.0 / 4.0, 6.0 / 3.0, 7.0 / 3.0, // 8, 9, 10, 11 7.0 / 2.0, 8.0 / 2.0, 8.0 / 1.0}; // 12, 13, 14, 15 = bypass std::map<uint16_t, std::pair<int, int>> ar0231_build_register_lut(CameraState *c, uint8_t *data) { // This function builds a lookup table from register address, to a pair of indices in the // buffer where to read this address. The buffer contains padding bytes, // as well as markers to indicate the type of the next byte. // // 0xAA is used to indicate the MSB of the address, 0xA5 for the LSB of the address. // Every byte of data (MSB and LSB) is preceded by 0x5A. Specifying an address is optional // for contiguous ranges. See page 27-29 of the AR0231 Developer guide for more information. int max_i[] = {1828 / 2 * 3, 1500 / 2 * 3}; auto get_next_idx = [](int cur_idx) { return (cur_idx % 3 == 1) ? cur_idx + 2 : cur_idx + 1; // Every third byte is padding }; std::map<uint16_t, std::pair<int, int>> registers; for (int register_row = 0; register_row < 2; register_row++) { uint8_t *registers_raw = data + c->ci->frame_stride * register_row; assert(registers_raw[0] == 0x0a); // Start of line int value_tag_count = 0; int first_val_idx = 0; uint16_t cur_addr = 0; for (int i = 1; i <= max_i[register_row]; i = get_next_idx(get_next_idx(i))) { int val_idx = get_next_idx(i); uint8_t tag = registers_raw[i]; uint16_t val = registers_raw[val_idx]; if (tag == 0xAA) { // Register MSB tag cur_addr = val << 8; } else if (tag == 0xA5) { // Register LSB tag cur_addr |= val; cur_addr -= 2; // Next value tag will increment address again } else if (tag == 0x5A) { // Value tag // First tag if (value_tag_count % 2 == 0) { cur_addr += 2; first_val_idx = val_idx; } else { registers[cur_addr] = std::make_pair(first_val_idx + c->ci->frame_stride * register_row, val_idx + c->ci->frame_stride * register_row); } value_tag_count++; } } } return registers; } float ar0231_parse_temp_sensor(uint16_t calib1, uint16_t calib2, uint16_t data_reg) { // See AR0231 Developer Guide - page 36 float slope = (125.0 - 55.0) / ((float)calib1 - (float)calib2); float t0 = 55.0 - slope * (float)calib2; return t0 + slope * (float)data_reg; } } // namespace AR0231::AR0231() { image_sensor = cereal::FrameData::ImageSensor::AR0231; pixel_size_mm = 0.003; data_word = true; frame_width = 1928; frame_height = 1208; frame_stride = (frame_width * 12 / 8) + 4; extra_height = AR0231_REGISTERS_HEIGHT + AR0231_STATS_HEIGHT; registers_offset = 0; frame_offset = AR0231_REGISTERS_HEIGHT; stats_offset = AR0231_REGISTERS_HEIGHT + frame_height; start_reg_array.assign(std::begin(start_reg_array_ar0231), std::end(start_reg_array_ar0231)); init_reg_array.assign(std::begin(init_array_ar0231), std::end(init_array_ar0231)); probe_reg_addr = 0x3000; probe_expected_data = 0x354; mipi_format = CAM_FORMAT_MIPI_RAW_12; frame_data_type = 0x12; // Changing stats to 0x2C doesn't work, so change pixels to 0x12 instead mclk_frequency = 19200000; //Hz dc_gain_factor = 2.5; dc_gain_min_weight = 0; dc_gain_max_weight = 1; dc_gain_on_grey = 0.2; dc_gain_off_grey = 0.3; exposure_time_min = 2; // with HDR, fastest ss exposure_time_max = 0x0855; // with HDR, slowest ss, 40ms analog_gain_min_idx = 0x1; // 0.25x analog_gain_rec_idx = 0x6; // 0.8x analog_gain_max_idx = 0xD; // 4.0x analog_gain_cost_delta = 0; analog_gain_cost_low = 0.1; analog_gain_cost_high = 5.0; for (int i = 0; i <= analog_gain_max_idx; i++) { sensor_analog_gains[i] = sensor_analog_gains_AR0231[i]; } min_ev = exposure_time_min * sensor_analog_gains[analog_gain_min_idx]; max_ev = exposure_time_max * dc_gain_factor * sensor_analog_gains[analog_gain_max_idx]; target_grey_factor = 1.0; } void AR0231::processRegisters(CameraState *c, cereal::FrameData::Builder &framed) const { const uint8_t expected_preamble[] = {0x0a, 0xaa, 0x55, 0x20, 0xa5, 0x55}; uint8_t *data = (uint8_t *)c->buf.cur_camera_buf->addr + c->ci->registers_offset; if (memcmp(data, expected_preamble, std::size(expected_preamble)) != 0) { LOGE("unexpected register data found"); return; } if (ar0231_register_lut.empty()) { ar0231_register_lut = ar0231_build_register_lut(c, data); } std::map<uint16_t, uint16_t> registers; for (uint16_t addr : {0x2000, 0x2002, 0x20b0, 0x20b2, 0x30c6, 0x30c8, 0x30ca, 0x30cc}) { auto offset = ar0231_register_lut[addr]; registers[addr] = ((uint16_t)data[offset.first] << 8) | data[offset.second]; } uint32_t frame_id = ((uint32_t)registers[0x2000] << 16) | registers[0x2002]; framed.setFrameIdSensor(frame_id); float temp_0 = ar0231_parse_temp_sensor(registers[0x30c6], registers[0x30c8], registers[0x20b0]); float temp_1 = ar0231_parse_temp_sensor(registers[0x30ca], registers[0x30cc], registers[0x20b2]); framed.setTemperaturesC({temp_0, temp_1}); } std::vector<i2c_random_wr_payload> AR0231::getExposureRegisters(int exposure_time, int new_exp_g, bool dc_gain_enabled) const { uint16_t analog_gain_reg = 0xFF00 | (new_exp_g << 4) | new_exp_g; return { {0x3366, analog_gain_reg}, {0x3362, (uint16_t)(dc_gain_enabled ? 0x1 : 0x0)}, {0x3012, (uint16_t)exposure_time}, }; } int AR0231::getSlaveAddress(int port) const { assert(port >= 0 && port <= 2); return (int[]){0x20, 0x30, 0x20}[port]; } float AR0231::getExposureScore(float desired_ev, int exp_t, int exp_g_idx, float exp_gain, int gain_idx) const { // Cost of ev diff float score = std::abs(desired_ev - (exp_t * exp_gain)) * 10; // Cost of absolute gain float m = exp_g_idx > analog_gain_rec_idx ? analog_gain_cost_high : analog_gain_cost_low; score += std::abs(exp_g_idx - (int)analog_gain_rec_idx) * m; // Cost of changing gain score += std::abs(exp_g_idx - gain_idx) * (score + 1.0) / 10.0; return score; }
2301_81045437/openpilot
system/camerad/sensors/ar0231.cc
C++
mit
6,591
#if SENSOR_ID == 1 #define BIT_DEPTH 12 #define PV_MAX 4096 #define BLACK_LVL 168 #define VIGNETTE_RSZ 1.0f float4 normalize_pv(int4 parsed, float vignette_factor) { float4 pv = (convert_float4(parsed) - BLACK_LVL) / (PV_MAX - BLACK_LVL); return clamp(pv*vignette_factor, 0.0, 1.0); } float3 color_correct(float3 rgb) { float3 corrected = rgb.x * (float3)(1.82717181, -0.31231438, 0.07307673); corrected += rgb.y * (float3)(-0.5743977, 1.36858544, -0.53183455); corrected += rgb.z * (float3)(-0.25277411, -0.05627105, 1.45875782); return corrected; } float3 apply_gamma(float3 rgb, int expo_time) { // tone mapping params const float gamma_k = 0.75; const float gamma_b = 0.125; const float mp = 0.01; // ideally midpoint should be adaptive const float rk = 9 - 100*mp; // poly approximation for s curve return (rgb > mp) ? ((rk * (rgb-mp) * (1-(gamma_k*mp+gamma_b)) * (1+1/(rk*(1-mp))) / (1+rk*(rgb-mp))) + gamma_k*mp + gamma_b) : ((rk * (rgb-mp) * (gamma_k*mp+gamma_b) * (1+1/(rk*mp)) / (1-rk*(rgb-mp))) + gamma_k*mp + gamma_b); } #endif
2301_81045437/openpilot
system/camerad/sensors/ar0231_cl.h
C
mit
1,079
#pragma once const struct i2c_random_wr_payload start_reg_array_ar0231[] = {{0x301A, 0x91C}}; const struct i2c_random_wr_payload stop_reg_array_ar0231[] = {{0x301A, 0x918}}; const struct i2c_random_wr_payload init_array_ar0231[] = { {0x301A, 0x0018}, // RESET_REGISTER // CLOCK Settings // input clock is 19.2 / 2 * 0x37 = 528 MHz // pixclk is 528 / 6 = 88 MHz // full roll time is 1000/(PIXCLK/(LINE_LENGTH_PCK*FRAME_LENGTH_LINES)) = 39.99 ms // img roll time is 1000/(PIXCLK/(LINE_LENGTH_PCK*Y_OUTPUT_CONTROL)) = 22.85 ms {0x302A, 0x0006}, // VT_PIX_CLK_DIV {0x302C, 0x0001}, // VT_SYS_CLK_DIV {0x302E, 0x0002}, // PRE_PLL_CLK_DIV {0x3030, 0x0037}, // PLL_MULTIPLIER {0x3036, 0x000C}, // OP_PIX_CLK_DIV {0x3038, 0x0001}, // OP_SYS_CLK_DIV // FORMAT {0x3040, 0xC000}, // READ_MODE {0x3004, 0x0000}, // X_ADDR_START_ {0x3008, 0x0787}, // X_ADDR_END_ {0x3002, 0x0000}, // Y_ADDR_START_ {0x3006, 0x04B7}, // Y_ADDR_END_ {0x3032, 0x0000}, // SCALING_MODE {0x30A2, 0x0001}, // X_ODD_INC_ {0x30A6, 0x0001}, // Y_ODD_INC_ {0x3402, 0x0788}, // X_OUTPUT_CONTROL {0x3404, 0x04B8}, // Y_OUTPUT_CONTROL {0x3064, 0x1982}, // SMIA_TEST {0x30BA, 0x11F2}, // DIGITAL_CTRL // Enable external trigger and disable GPIO outputs {0x30CE, 0x0120}, // SLAVE_SH_SYNC_MODE | FRAME_START_MODE {0x340A, 0xE0}, // GPIO3_INPUT_DISABLE | GPIO2_INPUT_DISABLE | GPIO1_INPUT_DISABLE {0x340C, 0x802}, // GPIO_HIDRV_EN | GPIO0_ISEL=2 // Readout timing {0x300C, 0x0672}, // LINE_LENGTH_PCK (valid for 3-exposure HDR) {0x300A, 0x0855}, // FRAME_LENGTH_LINES {0x3042, 0x0000}, // EXTRA_DELAY // Readout Settings {0x31AE, 0x0204}, // SERIAL_FORMAT, 4-lane MIPI {0x31AC, 0x0C0C}, // DATA_FORMAT_BITS, 12 -> 12 {0x3342, 0x1212}, // MIPI_F1_PDT_EDT {0x3346, 0x1212}, // MIPI_F2_PDT_EDT {0x334A, 0x1212}, // MIPI_F3_PDT_EDT {0x334E, 0x1212}, // MIPI_F4_PDT_EDT {0x3344, 0x0011}, // MIPI_F1_VDT_VC {0x3348, 0x0111}, // MIPI_F2_VDT_VC {0x334C, 0x0211}, // MIPI_F3_VDT_VC {0x3350, 0x0311}, // MIPI_F4_VDT_VC {0x31B0, 0x0053}, // FRAME_PREAMBLE {0x31B2, 0x003B}, // LINE_PREAMBLE {0x301A, 0x001C}, // RESET_REGISTER // Noise Corrections {0x3092, 0x0C24}, // ROW_NOISE_CONTROL {0x337A, 0x0C80}, // DBLC_SCALE0 {0x3370, 0x03B1}, // DBLC {0x3044, 0x0400}, // DARK_CONTROL // Enable temperature sensor {0x30B4, 0x0007}, // TEMPSENS0_CTRL_REG {0x30B8, 0x0007}, // TEMPSENS1_CTRL_REG // Enable dead pixel correction using // the 1D line correction scheme {0x31E0, 0x0003}, // HDR Settings {0x3082, 0x0004}, // OPERATION_MODE_CTRL {0x3238, 0x0444}, // EXPOSURE_RATIO {0x1008, 0x0361}, // FINE_INTEGRATION_TIME_MIN {0x100C, 0x0589}, // FINE_INTEGRATION_TIME2_MIN {0x100E, 0x07B1}, // FINE_INTEGRATION_TIME3_MIN {0x1010, 0x0139}, // FINE_INTEGRATION_TIME4_MIN // TODO: do these have to be lower than LINE_LENGTH_PCK? {0x3014, 0x08CB}, // FINE_INTEGRATION_TIME_ {0x321E, 0x0894}, // FINE_INTEGRATION_TIME2 {0x31D0, 0x0000}, // COMPANDING, no good in 10 bit? {0x33DA, 0x0000}, // COMPANDING {0x318E, 0x0200}, // PRE_HDR_GAIN_EN // DLO Settings {0x3100, 0x4000}, // DLO_CONTROL0 {0x3280, 0x0CCC}, // T1 G1 {0x3282, 0x0CCC}, // T1 R {0x3284, 0x0CCC}, // T1 B {0x3286, 0x0CCC}, // T1 G2 {0x3288, 0x0FA0}, // T2 G1 {0x328A, 0x0FA0}, // T2 R {0x328C, 0x0FA0}, // T2 B {0x328E, 0x0FA0}, // T2 G2 // Initial Gains {0x3022, 0x0001}, // GROUPED_PARAMETER_HOLD_ {0x3366, 0xFF77}, // ANALOG_GAIN (1x) {0x3060, 0x3333}, // ANALOG_COLOR_GAIN {0x3362, 0x0000}, // DC GAIN {0x305A, 0x00F8}, // red gain {0x3058, 0x0122}, // blue gain {0x3056, 0x009A}, // g1 gain {0x305C, 0x009A}, // g2 gain {0x3022, 0x0000}, // GROUPED_PARAMETER_HOLD_ // Initial Integration Time {0x3012, 0x0005}, };
2301_81045437/openpilot
system/camerad/sensors/ar0231_registers.h
C
mit
3,828
#include "system/camerad/sensors/sensor.h" namespace { const float sensor_analog_gains_OS04C10[] = { 1.0, 1.0625, 1.125, 1.1875, 1.25, 1.3125, 1.375, 1.4375, 1.5, 1.5625, 1.6875, 1.8125, 1.9375, 2.0, 2.125, 2.25, 2.375, 2.5, 2.625, 2.75, 2.875, 3.0, 3.125, 3.375, 3.625, 3.875, 4.0, 4.25, 4.5, 4.75, 5.0, 5.25, 5.5, 5.75, 6.0, 6.25, 6.5, 7.0, 7.5, 8.0, 8.5, 9.0, 9.5, 10.0, 10.5, 11.0, 11.5, 12.0, 12.5, 13.0, 13.5, 14.0, 14.5, 15.0, 15.5}; const uint32_t os04c10_analog_gains_reg[] = { 0x080, 0x088, 0x090, 0x098, 0x0A0, 0x0A8, 0x0B0, 0x0B8, 0x0C0, 0x0C8, 0x0D8, 0x0E8, 0x0F8, 0x100, 0x110, 0x120, 0x130, 0x140, 0x150, 0x160, 0x170, 0x180, 0x190, 0x1B0, 0x1D0, 0x1F0, 0x200, 0x220, 0x240, 0x260, 0x280, 0x2A0, 0x2C0, 0x2E0, 0x300, 0x320, 0x340, 0x380, 0x3C0, 0x400, 0x440, 0x480, 0x4C0, 0x500, 0x540, 0x580, 0x5C0, 0x600, 0x640, 0x680, 0x6C0, 0x700, 0x740, 0x780, 0x7C0}; } // namespace OS04C10::OS04C10() { image_sensor = cereal::FrameData::ImageSensor::OS04C10; pixel_size_mm = 0.002; data_word = false; hdr_offset = 64 * 2 + 8; // stagger frame_width = 2688; frame_height = 1520 * 2 + hdr_offset; frame_stride = (frame_width * 10 / 8); // no alignment extra_height = 0; frame_offset = 0; start_reg_array.assign(std::begin(start_reg_array_os04c10), std::end(start_reg_array_os04c10)); init_reg_array.assign(std::begin(init_array_os04c10), std::end(init_array_os04c10)); probe_reg_addr = 0x300a; probe_expected_data = 0x5304; mipi_format = CAM_FORMAT_MIPI_RAW_10; frame_data_type = 0x2b; mclk_frequency = 24000000; // Hz dc_gain_factor = 1; dc_gain_min_weight = 1; // always on is fine dc_gain_max_weight = 1; dc_gain_on_grey = 0.9; dc_gain_off_grey = 1.0; exposure_time_min = 2; exposure_time_max = 2400; analog_gain_min_idx = 0x0; analog_gain_rec_idx = 0x0; // 1x analog_gain_max_idx = 0x36; analog_gain_cost_delta = -1; analog_gain_cost_low = 0.4; analog_gain_cost_high = 6.4; for (int i = 0; i <= analog_gain_max_idx; i++) { sensor_analog_gains[i] = sensor_analog_gains_OS04C10[i]; } min_ev = (exposure_time_min) * sensor_analog_gains[analog_gain_min_idx]; max_ev = exposure_time_max * dc_gain_factor * sensor_analog_gains[analog_gain_max_idx]; target_grey_factor = 0.01; } std::vector<i2c_random_wr_payload> OS04C10::getExposureRegisters(int exposure_time, int new_exp_g, bool dc_gain_enabled) const { uint32_t long_time = exposure_time; uint32_t real_gain = os04c10_analog_gains_reg[new_exp_g]; return { {0x3501, long_time>>8}, {0x3502, long_time&0xFF}, {0x3508, real_gain>>8}, {0x3509, real_gain&0xFF}, {0x350c, real_gain>>8}, {0x350d, real_gain&0xFF}, }; } int OS04C10::getSlaveAddress(int port) const { assert(port >= 0 && port <= 2); return (int[]){0x6C, 0x20, 0x6C}[port]; } float OS04C10::getExposureScore(float desired_ev, int exp_t, int exp_g_idx, float exp_gain, int gain_idx) const { float score = std::abs(desired_ev - (exp_t * exp_gain)); float m = exp_g_idx > analog_gain_rec_idx ? analog_gain_cost_high : analog_gain_cost_low; score += std::abs(exp_g_idx - (int)analog_gain_rec_idx) * m; score += ((1 - analog_gain_cost_delta) + analog_gain_cost_delta * (exp_g_idx - analog_gain_min_idx) / (analog_gain_max_idx - analog_gain_min_idx)) * std::abs(exp_g_idx - gain_idx) * 3.0; return score; }
2301_81045437/openpilot
system/camerad/sensors/os04c10.cc
C++
mit
3,398
#if SENSOR_ID == 3 #define BGGR #define BIT_DEPTH 10 #define PV_MAX10 1023 #define PV_MAX16 65536 // gamma curve is calibrated to 16bit #define BLACK_LVL 64 #define VIGNETTE_RSZ 2.2545f float combine_dual_pvs(float lv, float sv, int expo_time) { float svc = fmax(sv * expo_time, (float)(64 * (PV_MAX10 - BLACK_LVL))); float svd = sv * fmin(expo_time, 8.0) / 8; if (expo_time > 64) { if (lv < PV_MAX10 - BLACK_LVL) { return lv / (PV_MAX16 - BLACK_LVL); } else { return (svc / 64) / (PV_MAX16 - BLACK_LVL); } } else { if (lv > 32) { return (lv * 64 / fmax(expo_time, 8.0)) / (PV_MAX16 - BLACK_LVL); } else { return svd / (PV_MAX16 - BLACK_LVL); } } } float4 normalize_pv_hdr(int4 parsed, int4 short_parsed, float vignette_factor, int expo_time) { float4 pl = convert_float4(parsed - BLACK_LVL); float4 ps = convert_float4(short_parsed - BLACK_LVL); float4 pv; pv.s0 = combine_dual_pvs(pl.s0, ps.s0, expo_time); pv.s1 = combine_dual_pvs(pl.s1, ps.s1, expo_time); pv.s2 = combine_dual_pvs(pl.s2, ps.s2, expo_time); pv.s3 = combine_dual_pvs(pl.s3, ps.s3, expo_time); return clamp(pv*vignette_factor, 0.0, 1.0); } float3 color_correct(float3 rgb) { float3 corrected = rgb.x * (float3)(1.55361989, -0.268894615, -0.000593219); corrected += rgb.y * (float3)(-0.421217301, 1.51883144, -0.69760146); corrected += rgb.z * (float3)(-0.132402589, -0.249936825, 1.69819468); return corrected; } float3 apply_gamma(float3 rgb, int expo_time) { float s = log2((float)expo_time); if (s < 6) {s = fmin(12.0 - s, 9.0);} // log function adaptive to number of bits return clamp(log(1 + rgb*(PV_MAX16 - BLACK_LVL)) * (0.48*s*s - 12.92*s + 115.0) - (1.08*s*s - 29.2*s + 260.0), 0.0, 255.0) / 255.0; } #endif
2301_81045437/openpilot
system/camerad/sensors/os04c10_cl.h
C
mit
1,783
#pragma once const struct i2c_random_wr_payload start_reg_array_os04c10[] = {{0x100, 1}}; const struct i2c_random_wr_payload stop_reg_array_os04c10[] = {{0x100, 0}}; const struct i2c_random_wr_payload init_array_os04c10[] = { // DP_2688X1520_NEWSTG_MIPI0776Mbps_30FPS_10BIT_FOURLANE {0x0103, 0x01}, // PLL {0x0301, 0x84}, {0x0303, 0x01}, {0x0305, 0x61}, {0x0306, 0x01}, {0x0307, 0x17}, {0x0323, 0x04}, {0x0324, 0x01}, {0x0325, 0x7a}, {0x3012, 0x06}, {0x3013, 0x02}, {0x3016, 0x72}, {0x3021, 0x03}, {0x3106, 0x21}, {0x3107, 0xa1}, // ? {0x3624, 0x00}, {0x3625, 0x4c}, {0x3660, 0x04}, {0x3666, 0xa5}, {0x3667, 0xa5}, {0x366a, 0x54}, {0x3673, 0x0d}, {0x3672, 0x0d}, {0x3671, 0x0d}, {0x3670, 0x0d}, {0x3685, 0x0a}, {0x3694, 0x0d}, {0x3693, 0x0d}, {0x3692, 0x0d}, {0x3691, 0x0d}, {0x3696, 0x4c}, {0x3697, 0x4c}, {0x3698, 0x00}, {0x3699, 0x80}, {0x369a, 0x80}, {0x369b, 0x1f}, {0x369c, 0x1f}, {0x369d, 0x80}, {0x369e, 0x40}, {0x369f, 0x21}, {0x36a0, 0x12}, {0x36a1, 0xdd}, {0x36a2, 0x66}, {0x370a, 0x00}, {0x370e, 0x00}, {0x3710, 0x00}, {0x3713, 0x04}, {0x3725, 0x02}, {0x372a, 0x03}, {0x3738, 0xce}, {0x3748, 0x00}, {0x374a, 0x00}, {0x374c, 0x00}, {0x374e, 0x00}, {0x3756, 0x00}, {0x3757, 0x00}, {0x3767, 0x00}, {0x3771, 0x00}, {0x377b, 0x28}, {0x377c, 0x00}, {0x377d, 0x0c}, {0x3781, 0x03}, {0x3782, 0x00}, {0x3789, 0x14}, {0x3795, 0x02}, {0x379c, 0x00}, {0x379d, 0x00}, {0x37b8, 0x04}, {0x37ba, 0x03}, {0x37bb, 0x00}, {0x37bc, 0x04}, {0x37be, 0x26}, {0x37c4, 0x11}, {0x37c5, 0x80}, {0x37c6, 0x14}, {0x37c7, 0xa8}, {0x37da, 0x11}, {0x381f, 0x08}, // {0x3829, 0x03}, // {0x3832, 0x00}, {0x3881, 0x00}, {0x3888, 0x04}, {0x388b, 0x00}, {0x3c80, 0x10}, {0x3c86, 0x00}, // {0x3c8c, 0x20}, {0x3c9f, 0x01}, {0x3d85, 0x1b}, {0x3d8c, 0x71}, {0x3d8d, 0xe2}, {0x3f00, 0x0b}, {0x3f06, 0x04}, // BLC {0x400a, 0x01}, {0x400b, 0x50}, {0x400e, 0x08}, {0x4043, 0x7e}, {0x4045, 0x7e}, {0x4047, 0x7e}, {0x4049, 0x7e}, {0x4090, 0x14}, {0x40b0, 0x00}, {0x40b1, 0x00}, {0x40b2, 0x00}, {0x40b3, 0x00}, {0x40b4, 0x00}, {0x40b5, 0x00}, {0x40b7, 0x00}, {0x40b8, 0x00}, {0x40b9, 0x00}, {0x40ba, 0x01}, {0x4301, 0x00}, {0x4303, 0x00}, {0x4502, 0x04}, {0x4503, 0x00}, {0x4504, 0x06}, {0x4506, 0x00}, {0x4507, 0x57}, {0x4803, 0x00}, {0x480c, 0x32}, {0x480e, 0x04}, {0x4813, 0xe4}, {0x4819, 0x70}, {0x481f, 0x30}, {0x4823, 0x3f}, {0x4825, 0x30}, {0x4833, 0x10}, {0x484b, 0x07}, {0x488b, 0x00}, {0x4d00, 0x04}, {0x4d01, 0xad}, {0x4d02, 0xbc}, {0x4d03, 0xa1}, {0x4d04, 0x1f}, {0x4d05, 0x4c}, {0x4d0b, 0x01}, {0x4e00, 0x2a}, {0x4e0d, 0x00}, // ISP {0x5001, 0x00}, {0x5004, 0x00}, {0x5080, 0x04}, {0x5036, 0x80}, {0x5180, 0x70}, {0x5181, 0x10}, // DPC {0x520a, 0x03}, {0x520b, 0x06}, {0x520c, 0x0c}, {0x580b, 0x0f}, {0x580d, 0x00}, {0x580f, 0x00}, {0x5820, 0x00}, {0x5821, 0x00}, {0x301c, 0xf8}, {0x301e, 0xb4}, {0x301f, 0xf0}, {0x3022, 0x01}, {0x3109, 0xe7}, {0x3600, 0x00}, {0x3610, 0x75}, {0x3611, 0x85}, {0x3613, 0x3a}, {0x3615, 0x60}, {0x3621, 0x90}, {0x3620, 0x0c}, {0x3629, 0x00}, {0x3661, 0x04}, {0x3664, 0x70}, {0x3665, 0x00}, {0x3681, 0x80}, {0x3682, 0x40}, {0x3683, 0x21}, {0x3684, 0x12}, {0x3700, 0x2a}, {0x3701, 0x12}, {0x3703, 0x28}, {0x3704, 0x0e}, {0x3706, 0x4a}, {0x3709, 0x4a}, {0x370b, 0xa2}, {0x370c, 0x01}, {0x370f, 0x00}, {0x3714, 0x24}, {0x3716, 0x04}, {0x3719, 0x11}, {0x371a, 0x1e}, {0x3720, 0x00}, {0x3724, 0x13}, {0x373f, 0xb0}, {0x3741, 0x4a}, {0x3743, 0x4a}, {0x3745, 0x4a}, {0x3747, 0x4a}, {0x3749, 0xa2}, {0x374b, 0xa2}, {0x374d, 0xa2}, {0x374f, 0xa2}, {0x3755, 0x10}, {0x376c, 0x00}, {0x378d, 0x30}, {0x3790, 0x4a}, {0x3791, 0xa2}, {0x3798, 0x40}, {0x379e, 0x00}, {0x379f, 0x04}, {0x37a1, 0x10}, {0x37a2, 0x1e}, {0x37a8, 0x10}, {0x37a9, 0x1e}, {0x37ac, 0xa0}, {0x37b9, 0x01}, {0x37bd, 0x01}, {0x37bf, 0x26}, {0x37c0, 0x11}, {0x37c2, 0x04}, {0x37cd, 0x19}, // {0x37e0, 0x08}, // {0x37e6, 0x04}, {0x37e5, 0x02}, // {0x37e1, 0x0c}, // {0x3737, 0x04}, {0x37d8, 0x02}, // {0x37e2, 0x10}, {0x3739, 0x10}, {0x3662, 0x10}, // {0x37e4, 0x20}, // {0x37e3, 0x08}, {0x37d9, 0x08}, {0x4040, 0x00}, {0x4041, 0x07}, {0x4008, 0x02}, {0x4009, 0x0d}, // FSIN {0x3002, 0x22}, {0x3663, 0x22}, {0x368a, 0x04}, {0x3822, 0x44}, {0x3823, 0x00}, {0x3829, 0x03}, {0x3832, 0xf8}, {0x382c, 0x00}, {0x3844, 0x06}, {0x3843, 0x00}, {0x382a, 0x00}, {0x382b, 0x0c}, // 2704x1536 -> 2688x1520 out {0x3800, 0x00}, {0x3801, 0x00}, {0x3802, 0x00}, {0x3803, 0x00}, {0x3804, 0x0a}, {0x3805, 0x8f}, {0x3806, 0x05}, {0x3807, 0xff}, {0x3808, 0x0a}, {0x3809, 0x80}, {0x380a, 0x05}, {0x380b, 0xf0}, {0x3811, 0x08}, {0x3813, 0x08}, {0x3814, 0x01}, {0x3815, 0x01}, {0x3816, 0x01}, {0x3817, 0x01}, {0x380c, 0x04}, {0x380d, 0x2e}, // HTS {0x380e, 0x09}, {0x380f, 0xdb}, // VTS {0x3820, 0xb0}, {0x3821, 0x04}, {0x3880, 0x00}, {0x3882, 0x20}, {0x3c91, 0x0b}, {0x3c94, 0x45}, // {0x3cad, 0x00}, // {0x3cae, 0x00}, {0x4000, 0xf3}, {0x4001, 0x60}, {0x4003, 0x40}, {0x4300, 0xff}, {0x4302, 0x0f}, {0x4305, 0x93}, {0x4505, 0x84}, {0x4809, 0x0e}, {0x480a, 0x04}, {0x4837, 0x14}, {0x4c00, 0x08}, {0x4c01, 0x08}, {0x4c04, 0x00}, {0x4c05, 0x00}, {0x5000, 0xf9}, // {0x0100, 0x01}, // {0x320d, 0x00}, // {0x3208, 0xa0}, // {0x3822, 0x14}, // initialize exposure {0x3503, 0x88}, // long {0x3500, 0x00}, {0x3501, 0x00}, {0x3502, 0x10}, {0x3508, 0x00}, {0x3509, 0x80}, {0x350a, 0x04}, {0x350b, 0x00}, // short {0x3510, 0x00}, {0x3511, 0x00}, {0x3512, 0x40}, {0x350c, 0x00}, {0x350d, 0x80}, {0x350e, 0x04}, {0x350f, 0x00}, // wb // b {0x5100, 0x06}, {0x5101, 0x7e}, {0x5140, 0x06}, {0x5141, 0x7e}, // g {0x5102, 0x04}, {0x5103, 0x00}, {0x5142, 0x04}, {0x5143, 0x00}, // r {0x5104, 0x08}, {0x5105, 0xd6}, {0x5144, 0x08}, {0x5145, 0xd6}, };
2301_81045437/openpilot
system/camerad/sensors/os04c10_registers.h
C
mit
6,209
#include "system/camerad/sensors/sensor.h" namespace { const float sensor_analog_gains_OX03C10[] = { 1.0, 1.0625, 1.125, 1.1875, 1.25, 1.3125, 1.375, 1.4375, 1.5, 1.5625, 1.6875, 1.8125, 1.9375, 2.0, 2.125, 2.25, 2.375, 2.5, 2.625, 2.75, 2.875, 3.0, 3.125, 3.375, 3.625, 3.875, 4.0, 4.25, 4.5, 4.75, 5.0, 5.25, 5.5, 5.75, 6.0, 6.25, 6.5, 7.0, 7.5, 8.0, 8.5, 9.0, 9.5, 10.0, 10.5, 11.0, 11.5, 12.0, 12.5, 13.0, 13.5, 14.0, 14.5, 15.0, 15.5}; const uint32_t ox03c10_analog_gains_reg[] = { 0x100, 0x110, 0x120, 0x130, 0x140, 0x150, 0x160, 0x170, 0x180, 0x190, 0x1B0, 0x1D0, 0x1F0, 0x200, 0x220, 0x240, 0x260, 0x280, 0x2A0, 0x2C0, 0x2E0, 0x300, 0x320, 0x360, 0x3A0, 0x3E0, 0x400, 0x440, 0x480, 0x4C0, 0x500, 0x540, 0x580, 0x5C0, 0x600, 0x640, 0x680, 0x700, 0x780, 0x800, 0x880, 0x900, 0x980, 0xA00, 0xA80, 0xB00, 0xB80, 0xC00, 0xC80, 0xD00, 0xD80, 0xE00, 0xE80, 0xF00, 0xF80}; const uint32_t VS_TIME_MIN_OX03C10 = 1; const uint32_t VS_TIME_MAX_OX03C10 = 34; // vs < 35 } // namespace OX03C10::OX03C10() { image_sensor = cereal::FrameData::ImageSensor::OX03C10; pixel_size_mm = 0.003; data_word = false; frame_width = 1928; frame_height = 1208; frame_stride = (frame_width * 12 / 8) + 4; extra_height = 16; // top 2 + bot 14 frame_offset = 2; start_reg_array.assign(std::begin(start_reg_array_ox03c10), std::end(start_reg_array_ox03c10)); init_reg_array.assign(std::begin(init_array_ox03c10), std::end(init_array_ox03c10)); probe_reg_addr = 0x300a; probe_expected_data = 0x5803; mipi_format = CAM_FORMAT_MIPI_RAW_12; frame_data_type = 0x2c; // one is 0x2a, two are 0x2b mclk_frequency = 24000000; //Hz dc_gain_factor = 7.32; dc_gain_min_weight = 1; // always on is fine dc_gain_max_weight = 1; dc_gain_on_grey = 0.9; dc_gain_off_grey = 1.0; exposure_time_min = 2; // 1x exposure_time_max = 2016; analog_gain_min_idx = 0x0; analog_gain_rec_idx = 0x0; // 1x analog_gain_max_idx = 0x36; analog_gain_cost_delta = -1; analog_gain_cost_low = 0.4; analog_gain_cost_high = 6.4; for (int i = 0; i <= analog_gain_max_idx; i++) { sensor_analog_gains[i] = sensor_analog_gains_OX03C10[i]; } min_ev = (exposure_time_min + VS_TIME_MIN_OX03C10) * sensor_analog_gains[analog_gain_min_idx]; max_ev = exposure_time_max * dc_gain_factor * sensor_analog_gains[analog_gain_max_idx]; target_grey_factor = 0.01; } std::vector<i2c_random_wr_payload> OX03C10::getExposureRegisters(int exposure_time, int new_exp_g, bool dc_gain_enabled) const { // t_HCG&t_LCG + t_VS on LPD, t_SPD on SPD uint32_t hcg_time = exposure_time; uint32_t lcg_time = hcg_time; uint32_t spd_time = std::min(std::max((uint32_t)exposure_time, (exposure_time_max + VS_TIME_MAX_OX03C10) / 3), exposure_time_max + VS_TIME_MAX_OX03C10); uint32_t vs_time = std::min(std::max((uint32_t)exposure_time / 40, VS_TIME_MIN_OX03C10), VS_TIME_MAX_OX03C10); uint32_t real_gain = ox03c10_analog_gains_reg[new_exp_g]; return { {0x3501, hcg_time>>8}, {0x3502, hcg_time&0xFF}, {0x3581, lcg_time>>8}, {0x3582, lcg_time&0xFF}, {0x3541, spd_time>>8}, {0x3542, spd_time&0xFF}, {0x35c2, vs_time&0xFF}, {0x3508, real_gain>>8}, {0x3509, real_gain&0xFF}, }; } int OX03C10::getSlaveAddress(int port) const { assert(port >= 0 && port <= 2); return (int[]){0x6C, 0x20, 0x6C}[port]; } float OX03C10::getExposureScore(float desired_ev, int exp_t, int exp_g_idx, float exp_gain, int gain_idx) const { float score = std::abs(desired_ev - (exp_t * exp_gain)); float m = exp_g_idx > analog_gain_rec_idx ? analog_gain_cost_high : analog_gain_cost_low; score += std::abs(exp_g_idx - (int)analog_gain_rec_idx) * m; score += ((1 - analog_gain_cost_delta) + analog_gain_cost_delta * (exp_g_idx - analog_gain_min_idx) / (analog_gain_max_idx - analog_gain_min_idx)) * std::abs(exp_g_idx - gain_idx) * 5.0; return score; }
2301_81045437/openpilot
system/camerad/sensors/ox03c10.cc
C++
mit
3,934
#pragma once const struct i2c_random_wr_payload start_reg_array_ox03c10[] = {{0x100, 1}}; const struct i2c_random_wr_payload stop_reg_array_ox03c10[] = {{0x100, 0}}; const struct i2c_random_wr_payload init_array_ox03c10[] = { {0x103, 1}, {0x107, 1}, // X3C_1920x1280_60fps_HDR4_LFR_PWL12_mipi1200 // TPM {0x4d5a, 0x1a}, {0x4d09, 0xff}, {0x4d09, 0xdf}, /*) // group 4 {0x3208, 0x04}, {0x4620, 0x04}, {0x3208, 0x14}, // group 5 {0x3208, 0x05}, {0x4620, 0x04}, {0x3208, 0x15}, // group 2 {0x3208, 0x02}, {0x3507, 0x00}, {0x3208, 0x12}, // delay launch group 2 {0x3208, 0xa2},*/ // PLL setup {0x0301, 0xc8}, // pll1_divs, pll1_predivp, pll1_divpix {0x0303, 0x01}, // pll1_prediv {0x0304, 0x01}, {0x0305, 0x2c}, // pll1_loopdiv = 300 {0x0306, 0x04}, // pll1_divmipi = 4 {0x0307, 0x01}, // pll1_divm = 1 {0x0316, 0x00}, {0x0317, 0x00}, {0x0318, 0x00}, {0x0323, 0x05}, // pll2_prediv {0x0324, 0x01}, {0x0325, 0x2c}, // pll2_divp = 300 // SCLK/PCLK {0x0400, 0xe0}, {0x0401, 0x80}, {0x0403, 0xde}, {0x0404, 0x34}, {0x0405, 0x3b}, {0x0406, 0xde}, {0x0407, 0x08}, {0x0408, 0xe0}, {0x0409, 0x7f}, {0x040a, 0xde}, {0x040b, 0x34}, {0x040c, 0x47}, {0x040d, 0xd8}, {0x040e, 0x08}, // xchk {0x2803, 0xfe}, {0x280b, 0x00}, {0x280c, 0x79}, // SC ctrl {0x3001, 0x03}, // io_pad_oen {0x3002, 0xfc}, // io_pad_oen {0x3005, 0x80}, // io_pad_out {0x3007, 0x01}, // io_pad_sel {0x3008, 0x80}, // io_pad_sel // FSIN first frame /* {0x3009, 0x2}, {0x3015, 0x2}, {0x3822, 0x20}, {0x3823, 0x58}, {0x3826, 0x0}, {0x3827, 0x8}, {0x3881, 0x4}, {0x3882, 0x8}, {0x3883, 0x0D}, {0x3836, 0x1F}, {0x3837, 0x40}, */ // FSIN with external pulses {0x3009, 0x2}, {0x3015, 0x2}, {0x383E, 0x80}, {0x3881, 0x4}, {0x3882, 0x8}, {0x3883, 0x0D}, {0x3836, 0x1F}, {0x3837, 0x40}, {0x3892, 0x44}, {0x3823, 0x48}, {0x3012, 0x41}, // SC_PHY_CTRL = 4 lane MIPI {0x3020, 0x05}, // SC_CTRL_20 // this is not in the datasheet, listed as RSVD // but the camera doesn't work without it {0x3700, 0x28}, {0x3701, 0x15}, {0x3702, 0x19}, {0x3703, 0x23}, {0x3704, 0x0a}, {0x3705, 0x00}, {0x3706, 0x3e}, {0x3707, 0x0d}, {0x3708, 0x50}, {0x3709, 0x5a}, {0x370a, 0x00}, {0x370b, 0x96}, {0x3711, 0x11}, {0x3712, 0x13}, {0x3717, 0x02}, {0x3718, 0x73}, {0x372c, 0x40}, {0x3733, 0x01}, {0x3738, 0x36}, {0x3739, 0x36}, {0x373a, 0x25}, {0x373b, 0x25}, {0x373f, 0x21}, {0x3740, 0x21}, {0x3741, 0x21}, {0x3742, 0x21}, {0x3747, 0x28}, {0x3748, 0x28}, {0x3749, 0x19}, {0x3755, 0x1a}, {0x3756, 0x0a}, {0x3757, 0x1c}, {0x3765, 0x19}, {0x3766, 0x05}, {0x3767, 0x05}, {0x3768, 0x13}, {0x376c, 0x07}, {0x3778, 0x20}, {0x377c, 0xc8}, {0x3781, 0x02}, {0x3783, 0x02}, {0x379c, 0x58}, {0x379e, 0x00}, {0x379f, 0x00}, {0x37a0, 0x00}, {0x37bc, 0x22}, {0x37c0, 0x01}, {0x37c4, 0x3e}, {0x37c5, 0x3e}, {0x37c6, 0x2a}, {0x37c7, 0x28}, {0x37c8, 0x02}, {0x37c9, 0x12}, {0x37cb, 0x29}, {0x37cd, 0x29}, {0x37d2, 0x00}, {0x37d3, 0x73}, {0x37d6, 0x00}, {0x37d7, 0x6b}, {0x37dc, 0x00}, {0x37df, 0x54}, {0x37e2, 0x00}, {0x37e3, 0x00}, {0x37f8, 0x00}, {0x37f9, 0x01}, {0x37fa, 0x00}, {0x37fb, 0x19}, // also RSVD {0x3c03, 0x01}, {0x3c04, 0x01}, {0x3c06, 0x21}, {0x3c08, 0x01}, {0x3c09, 0x01}, {0x3c0a, 0x01}, {0x3c0b, 0x21}, {0x3c13, 0x21}, {0x3c14, 0x82}, {0x3c16, 0x13}, {0x3c21, 0x00}, {0x3c22, 0xf3}, {0x3c37, 0x12}, {0x3c38, 0x31}, {0x3c3c, 0x00}, {0x3c3d, 0x03}, {0x3c44, 0x16}, {0x3c5c, 0x8a}, {0x3c5f, 0x03}, {0x3c61, 0x80}, {0x3c6f, 0x2b}, {0x3c70, 0x5f}, {0x3c71, 0x2c}, {0x3c72, 0x2c}, {0x3c73, 0x2c}, {0x3c76, 0x12}, // PEC checks {0x3182, 0x12}, {0x320e, 0x00}, {0x320f, 0x00}, // RSVD {0x3211, 0x61}, {0x3215, 0xcd}, {0x3219, 0x08}, {0x3506, 0x20}, {0x3507, 0x00}, // hcg fine exposure {0x350a, 0x01}, {0x350b, 0x00}, {0x350c, 0x00}, // hcg digital gain {0x3586, 0x40}, {0x3587, 0x00}, // lcg fine exposure {0x358a, 0x01}, {0x358b, 0x00}, {0x358c, 0x00}, // lcg digital gain {0x3546, 0x20}, {0x3547, 0x00}, // spd fine exposure {0x354a, 0x01}, {0x354b, 0x00}, {0x354c, 0x00}, // spd digital gain {0x35c6, 0xb0}, {0x35c7, 0x00}, // vs fine exposure {0x35ca, 0x01}, {0x35cb, 0x00}, {0x35cc, 0x00}, // vs digital gain // also RSVD {0x3600, 0x8f}, {0x3605, 0x16}, {0x3609, 0xf0}, {0x360a, 0x01}, {0x360e, 0x1d}, {0x360f, 0x10}, {0x3610, 0x70}, {0x3611, 0x3a}, {0x3612, 0x28}, {0x361a, 0x29}, {0x361b, 0x6c}, {0x361c, 0x0b}, {0x361d, 0x00}, {0x361e, 0xfc}, {0x362a, 0x00}, {0x364d, 0x0f}, {0x364e, 0x18}, {0x364f, 0x12}, {0x3653, 0x1c}, {0x3654, 0x00}, {0x3655, 0x1f}, {0x3656, 0x1f}, {0x3657, 0x0c}, {0x3658, 0x0a}, {0x3659, 0x14}, {0x365a, 0x18}, {0x365b, 0x14}, {0x365c, 0x10}, {0x365e, 0x12}, {0x3674, 0x08}, {0x3677, 0x3a}, {0x3678, 0x3a}, {0x3679, 0x19}, // Y_ADDR_START = 4 {0x3802, 0x00}, {0x3803, 0x04}, // Y_ADDR_END = 0x50b {0x3806, 0x05}, {0x3807, 0x0b}, // X_OUTPUT_SIZE = 0x780 = 1920 (changed to 1928) {0x3808, 0x07}, {0x3809, 0x88}, // Y_OUTPUT_SIZE = 0x500 = 1280 (changed to 1208) {0x380a, 0x04}, {0x380b, 0xb8}, // horizontal timing 0x447 {0x380c, 0x04}, {0x380d, 0x47}, // rows per frame (was 0x2ae) // 0x8ae = 53.65 ms {0x380e, 0x08}, {0x380f, 0x15}, // this should be triggered by FSIN, not free running {0x3810, 0x00}, {0x3811, 0x08}, // x cutoff {0x3812, 0x00}, {0x3813, 0x04}, // y cutoff {0x3816, 0x01}, {0x3817, 0x01}, {0x381c, 0x18}, {0x381e, 0x01}, {0x381f, 0x01}, // don't mirror, just flip {0x3820, 0x04}, {0x3821, 0x19}, {0x3832, 0xF0}, {0x3834, 0xF0}, {0x384c, 0x02}, {0x384d, 0x0d}, {0x3850, 0x00}, {0x3851, 0x42}, {0x3852, 0x00}, {0x3853, 0x40}, {0x3858, 0x04}, {0x388c, 0x02}, {0x388d, 0x2b}, // APC {0x3b40, 0x05}, {0x3b41, 0x40}, {0x3b42, 0x00}, {0x3b43, 0x90}, {0x3b44, 0x00}, {0x3b45, 0x20}, {0x3b46, 0x00}, {0x3b47, 0x20}, {0x3b48, 0x19}, {0x3b49, 0x12}, {0x3b4a, 0x16}, {0x3b4b, 0x2e}, {0x3b4c, 0x00}, {0x3b4d, 0x00}, {0x3b86, 0x00}, {0x3b87, 0x34}, {0x3b88, 0x00}, {0x3b89, 0x08}, {0x3b8a, 0x05}, {0x3b8b, 0x00}, {0x3b8c, 0x07}, {0x3b8d, 0x80}, {0x3b8e, 0x00}, {0x3b8f, 0x00}, {0x3b92, 0x05}, {0x3b93, 0x00}, {0x3b94, 0x07}, {0x3b95, 0x80}, {0x3b9e, 0x09}, // OTP {0x3d82, 0x73}, {0x3d85, 0x05}, {0x3d8a, 0x03}, {0x3d8b, 0xff}, {0x3d99, 0x00}, {0x3d9a, 0x9f}, {0x3d9b, 0x00}, {0x3d9c, 0xa0}, {0x3da4, 0x00}, {0x3da7, 0x50}, // DTR {0x420e, 0x6b}, {0x420f, 0x6e}, {0x4210, 0x06}, {0x4211, 0xc1}, {0x421e, 0x02}, {0x421f, 0x45}, {0x4220, 0xe1}, {0x4221, 0x01}, {0x4301, 0xff}, {0x4307, 0x03}, {0x4308, 0x13}, {0x430a, 0x13}, {0x430d, 0x93}, {0x430f, 0x57}, {0x4310, 0x95}, {0x4311, 0x16}, {0x4316, 0x00}, {0x4317, 0x38}, // both embedded rows are enabled {0x4319, 0x03}, // spd dcg {0x431a, 0x00}, // 8 bit mipi {0x431b, 0x00}, {0x431d, 0x2a}, {0x431e, 0x11}, {0x431f, 0x20}, // enable PWL (pwl0_en), 12 bits //{0x431f, 0x00}, // disable PWL {0x4320, 0x19}, {0x4323, 0x80}, {0x4324, 0x00}, {0x4503, 0x4e}, {0x4505, 0x00}, {0x4509, 0x00}, {0x450a, 0x00}, {0x4580, 0xf8}, {0x4583, 0x07}, {0x4584, 0x6a}, {0x4585, 0x08}, {0x4586, 0x05}, {0x4587, 0x04}, {0x4588, 0x73}, {0x4589, 0x05}, {0x458a, 0x1f}, {0x458b, 0x02}, {0x458c, 0xdc}, {0x458d, 0x03}, {0x458e, 0x02}, {0x4597, 0x07}, {0x4598, 0x40}, {0x4599, 0x0e}, {0x459a, 0x0e}, {0x459b, 0xfb}, {0x459c, 0xf3}, {0x4602, 0x00}, {0x4603, 0x13}, {0x4604, 0x00}, {0x4609, 0x0a}, {0x460a, 0x30}, {0x4610, 0x00}, {0x4611, 0x70}, {0x4612, 0x01}, {0x4613, 0x00}, {0x4614, 0x00}, {0x4615, 0x70}, {0x4616, 0x01}, {0x4617, 0x00}, {0x4800, 0x04}, // invert output PCLK {0x480a, 0x22}, {0x4813, 0xe4}, // mipi {0x4814, 0x2a}, {0x4837, 0x0d}, {0x484b, 0x47}, {0x484f, 0x00}, {0x4887, 0x51}, {0x4d00, 0x4a}, {0x4d01, 0x18}, {0x4d05, 0xff}, {0x4d06, 0x88}, {0x4d08, 0x63}, {0x4d09, 0xdf}, {0x4d15, 0x7d}, {0x4d1a, 0x20}, {0x4d30, 0x0a}, {0x4d31, 0x00}, {0x4d34, 0x7d}, {0x4d3c, 0x7d}, {0x4f00, 0x00}, {0x4f01, 0x00}, {0x4f02, 0x00}, {0x4f03, 0x20}, {0x4f04, 0xe0}, {0x6a00, 0x00}, {0x6a01, 0x20}, {0x6a02, 0x00}, {0x6a03, 0x20}, {0x6a04, 0x02}, {0x6a05, 0x80}, {0x6a06, 0x01}, {0x6a07, 0xe0}, {0x6a08, 0xcf}, {0x6a09, 0x01}, {0x6a0a, 0x40}, {0x6a20, 0x00}, {0x6a21, 0x02}, {0x6a22, 0x00}, {0x6a23, 0x00}, {0x6a24, 0x00}, {0x6a25, 0x00}, {0x6a26, 0x00}, {0x6a27, 0x00}, {0x6a28, 0x00}, // isp {0x5000, 0x8f}, {0x5001, 0x75}, {0x5002, 0x7f}, // PWL0 //{0x5002, 0x3f}, // PWL disable {0x5003, 0x7a}, {0x5004, 0x3e}, {0x5005, 0x1e}, {0x5006, 0x1e}, {0x5007, 0x1e}, {0x5008, 0x00}, {0x500c, 0x00}, {0x502c, 0x00}, {0x502e, 0x00}, {0x502f, 0x00}, {0x504b, 0x00}, {0x5053, 0x00}, {0x505b, 0x00}, {0x5063, 0x00}, {0x5070, 0x00}, {0x5074, 0x04}, {0x507a, 0x04}, {0x507b, 0x09}, {0x5500, 0x02}, {0x5700, 0x02}, {0x5900, 0x02}, {0x6007, 0x04}, {0x6008, 0x05}, {0x6009, 0x02}, {0x600b, 0x08}, {0x600c, 0x07}, {0x600d, 0x88}, {0x6016, 0x00}, {0x6027, 0x04}, {0x6028, 0x05}, {0x6029, 0x02}, {0x602b, 0x08}, {0x602c, 0x07}, {0x602d, 0x88}, {0x6047, 0x04}, {0x6048, 0x05}, {0x6049, 0x02}, {0x604b, 0x08}, {0x604c, 0x07}, {0x604d, 0x88}, {0x6067, 0x04}, {0x6068, 0x05}, {0x6069, 0x02}, {0x606b, 0x08}, {0x606c, 0x07}, {0x606d, 0x88}, {0x6087, 0x04}, {0x6088, 0x05}, {0x6089, 0x02}, {0x608b, 0x08}, {0x608c, 0x07}, {0x608d, 0x88}, // 12-bit PWL0 {0x5e00, 0x00}, // m_ndX_exp[0:32] // 9*2+0xa*3+0xb*2+0xc*2+0xd*2+0xe*2+0xf*2+0x10*2+0x11*2+0x12*4+0x13*3+0x14*3+0x15*3+0x16 = 518 {0x5e01, 0x09}, {0x5e02, 0x09}, {0x5e03, 0x0a}, {0x5e04, 0x0a}, {0x5e05, 0x0a}, {0x5e06, 0x0b}, {0x5e07, 0x0b}, {0x5e08, 0x0c}, {0x5e09, 0x0c}, {0x5e0a, 0x0d}, {0x5e0b, 0x0d}, {0x5e0c, 0x0e}, {0x5e0d, 0x0e}, {0x5e0e, 0x0f}, {0x5e0f, 0x0f}, {0x5e10, 0x10}, {0x5e11, 0x10}, {0x5e12, 0x11}, {0x5e13, 0x11}, {0x5e14, 0x12}, {0x5e15, 0x12}, {0x5e16, 0x12}, {0x5e17, 0x12}, {0x5e18, 0x13}, {0x5e19, 0x13}, {0x5e1a, 0x13}, {0x5e1b, 0x14}, {0x5e1c, 0x14}, {0x5e1d, 0x14}, {0x5e1e, 0x15}, {0x5e1f, 0x15}, {0x5e20, 0x15}, {0x5e21, 0x16}, // m_ndY_val[0:32] // 0x200+0xff+0x100*3+0x80*12+0x40*16 = 4095 {0x5e22, 0x00}, {0x5e23, 0x02}, {0x5e24, 0x00}, {0x5e25, 0x00}, {0x5e26, 0x00}, {0x5e27, 0xff}, {0x5e28, 0x00}, {0x5e29, 0x01}, {0x5e2a, 0x00}, {0x5e2b, 0x00}, {0x5e2c, 0x01}, {0x5e2d, 0x00}, {0x5e2e, 0x00}, {0x5e2f, 0x01}, {0x5e30, 0x00}, {0x5e31, 0x00}, {0x5e32, 0x00}, {0x5e33, 0x80}, {0x5e34, 0x00}, {0x5e35, 0x00}, {0x5e36, 0x80}, {0x5e37, 0x00}, {0x5e38, 0x00}, {0x5e39, 0x80}, {0x5e3a, 0x00}, {0x5e3b, 0x00}, {0x5e3c, 0x80}, {0x5e3d, 0x00}, {0x5e3e, 0x00}, {0x5e3f, 0x80}, {0x5e40, 0x00}, {0x5e41, 0x00}, {0x5e42, 0x80}, {0x5e43, 0x00}, {0x5e44, 0x00}, {0x5e45, 0x80}, {0x5e46, 0x00}, {0x5e47, 0x00}, {0x5e48, 0x80}, {0x5e49, 0x00}, {0x5e4a, 0x00}, {0x5e4b, 0x80}, {0x5e4c, 0x00}, {0x5e4d, 0x00}, {0x5e4e, 0x80}, {0x5e4f, 0x00}, {0x5e50, 0x00}, {0x5e51, 0x80}, {0x5e52, 0x00}, {0x5e53, 0x00}, {0x5e54, 0x80}, {0x5e55, 0x00}, {0x5e56, 0x00}, {0x5e57, 0x40}, {0x5e58, 0x00}, {0x5e59, 0x00}, {0x5e5a, 0x40}, {0x5e5b, 0x00}, {0x5e5c, 0x00}, {0x5e5d, 0x40}, {0x5e5e, 0x00}, {0x5e5f, 0x00}, {0x5e60, 0x40}, {0x5e61, 0x00}, {0x5e62, 0x00}, {0x5e63, 0x40}, {0x5e64, 0x00}, {0x5e65, 0x00}, {0x5e66, 0x40}, {0x5e67, 0x00}, {0x5e68, 0x00}, {0x5e69, 0x40}, {0x5e6a, 0x00}, {0x5e6b, 0x00}, {0x5e6c, 0x40}, {0x5e6d, 0x00}, {0x5e6e, 0x00}, {0x5e6f, 0x40}, {0x5e70, 0x00}, {0x5e71, 0x00}, {0x5e72, 0x40}, {0x5e73, 0x00}, {0x5e74, 0x00}, {0x5e75, 0x40}, {0x5e76, 0x00}, {0x5e77, 0x00}, {0x5e78, 0x40}, {0x5e79, 0x00}, {0x5e7a, 0x00}, {0x5e7b, 0x40}, {0x5e7c, 0x00}, {0x5e7d, 0x00}, {0x5e7e, 0x40}, {0x5e7f, 0x00}, {0x5e80, 0x00}, {0x5e81, 0x40}, {0x5e82, 0x00}, {0x5e83, 0x00}, {0x5e84, 0x40}, // disable PWL /*{0x5e01, 0x18}, {0x5e02, 0x00}, {0x5e03, 0x00}, {0x5e04, 0x00}, {0x5e05, 0x00}, {0x5e06, 0x00}, {0x5e07, 0x00}, {0x5e08, 0x00}, {0x5e09, 0x00}, {0x5e0a, 0x00}, {0x5e0b, 0x00}, {0x5e0c, 0x00}, {0x5e0d, 0x00}, {0x5e0e, 0x00}, {0x5e0f, 0x00}, {0x5e10, 0x00}, {0x5e11, 0x00}, {0x5e12, 0x00}, {0x5e13, 0x00}, {0x5e14, 0x00}, {0x5e15, 0x00}, {0x5e16, 0x00}, {0x5e17, 0x00}, {0x5e18, 0x00}, {0x5e19, 0x00}, {0x5e1a, 0x00}, {0x5e1b, 0x00}, {0x5e1c, 0x00}, {0x5e1d, 0x00}, {0x5e1e, 0x00}, {0x5e1f, 0x00}, {0x5e20, 0x00}, {0x5e21, 0x00}, {0x5e22, 0x00}, {0x5e23, 0x0f}, {0x5e24, 0xFF},*/ {0x4001, 0x2b}, // BLC_CTRL_1 {0x4008, 0x02}, {0x4009, 0x03}, {0x4018, 0x12}, {0x4022, 0x40}, {0x4023, 0x20}, // all black level targets are 0x40 {0x4026, 0x00}, {0x4027, 0x40}, {0x4028, 0x00}, {0x4029, 0x40}, {0x402a, 0x00}, {0x402b, 0x40}, {0x402c, 0x00}, {0x402d, 0x40}, {0x407e, 0xcc}, {0x407f, 0x18}, {0x4080, 0xff}, {0x4081, 0xff}, {0x4082, 0x01}, {0x4083, 0x53}, {0x4084, 0x01}, {0x4085, 0x2b}, {0x4086, 0x00}, {0x4087, 0xb3}, {0x4640, 0x40}, {0x4641, 0x11}, {0x4642, 0x0e}, {0x4643, 0xee}, {0x4646, 0x0f}, {0x4648, 0x00}, {0x4649, 0x03}, {0x4f00, 0x00}, {0x4f01, 0x00}, {0x4f02, 0x80}, {0x4f03, 0x2c}, {0x4f04, 0xf8}, {0x4d09, 0xff}, {0x4d09, 0xdf}, {0x5003, 0x7a}, {0x5b80, 0x08}, {0x5c00, 0x08}, {0x5c80, 0x00}, {0x5bbe, 0x12}, {0x5c3e, 0x12}, {0x5cbe, 0x12}, {0x5b8a, 0x80}, {0x5b8b, 0x80}, {0x5b8c, 0x80}, {0x5b8d, 0x80}, {0x5b8e, 0x60}, {0x5b8f, 0x80}, {0x5b90, 0x80}, {0x5b91, 0x80}, {0x5b92, 0x80}, {0x5b93, 0x20}, {0x5b94, 0x80}, {0x5b95, 0x80}, {0x5b96, 0x80}, {0x5b97, 0x20}, {0x5b98, 0x00}, {0x5b99, 0x80}, {0x5b9a, 0x40}, {0x5b9b, 0x20}, {0x5b9c, 0x00}, {0x5b9d, 0x00}, {0x5b9e, 0x80}, {0x5b9f, 0x00}, {0x5ba0, 0x00}, {0x5ba1, 0x00}, {0x5ba2, 0x00}, {0x5ba3, 0x00}, {0x5ba4, 0x00}, {0x5ba5, 0x00}, {0x5ba6, 0x00}, {0x5ba7, 0x00}, {0x5ba8, 0x02}, {0x5ba9, 0x00}, {0x5baa, 0x02}, {0x5bab, 0x76}, {0x5bac, 0x03}, {0x5bad, 0x08}, {0x5bae, 0x00}, {0x5baf, 0x80}, {0x5bb0, 0x00}, {0x5bb1, 0xc0}, {0x5bb2, 0x01}, {0x5bb3, 0x00}, // m_nNormCombineWeight {0x5c0a, 0x80}, {0x5c0b, 0x80}, {0x5c0c, 0x80}, {0x5c0d, 0x80}, {0x5c0e, 0x60}, {0x5c0f, 0x80}, {0x5c10, 0x80}, {0x5c11, 0x80}, {0x5c12, 0x60}, {0x5c13, 0x20}, {0x5c14, 0x80}, {0x5c15, 0x80}, {0x5c16, 0x80}, {0x5c17, 0x20}, {0x5c18, 0x00}, {0x5c19, 0x80}, {0x5c1a, 0x40}, {0x5c1b, 0x20}, {0x5c1c, 0x00}, {0x5c1d, 0x00}, {0x5c1e, 0x80}, {0x5c1f, 0x00}, {0x5c20, 0x00}, {0x5c21, 0x00}, {0x5c22, 0x00}, {0x5c23, 0x00}, {0x5c24, 0x00}, {0x5c25, 0x00}, {0x5c26, 0x00}, {0x5c27, 0x00}, // m_nCombinThreL {0x5c28, 0x02}, {0x5c29, 0x00}, {0x5c2a, 0x02}, {0x5c2b, 0x76}, {0x5c2c, 0x03}, {0x5c2d, 0x08}, // m_nCombinThreS {0x5c2e, 0x00}, {0x5c2f, 0x80}, {0x5c30, 0x00}, {0x5c31, 0xc0}, {0x5c32, 0x01}, {0x5c33, 0x00}, // m_nNormCombineWeight {0x5c8a, 0x80}, {0x5c8b, 0x80}, {0x5c8c, 0x80}, {0x5c8d, 0x80}, {0x5c8e, 0x80}, {0x5c8f, 0x80}, {0x5c90, 0x80}, {0x5c91, 0x80}, {0x5c92, 0x80}, {0x5c93, 0x60}, {0x5c94, 0x80}, {0x5c95, 0x80}, {0x5c96, 0x80}, {0x5c97, 0x60}, {0x5c98, 0x40}, {0x5c99, 0x80}, {0x5c9a, 0x80}, {0x5c9b, 0x80}, {0x5c9c, 0x40}, {0x5c9d, 0x00}, {0x5c9e, 0x80}, {0x5c9f, 0x80}, {0x5ca0, 0x80}, {0x5ca1, 0x20}, {0x5ca2, 0x00}, {0x5ca3, 0x80}, {0x5ca4, 0x80}, {0x5ca5, 0x00}, {0x5ca6, 0x00}, {0x5ca7, 0x00}, {0x5ca8, 0x01}, {0x5ca9, 0x00}, {0x5caa, 0x02}, {0x5cab, 0x00}, {0x5cac, 0x03}, {0x5cad, 0x08}, {0x5cae, 0x01}, {0x5caf, 0x00}, {0x5cb0, 0x02}, {0x5cb1, 0x00}, {0x5cb2, 0x03}, {0x5cb3, 0x08}, // combine ISP {0x5be7, 0x80}, {0x5bc9, 0x80}, {0x5bca, 0x80}, {0x5bcb, 0x80}, {0x5bcc, 0x80}, {0x5bcd, 0x80}, {0x5bce, 0x80}, {0x5bcf, 0x80}, {0x5bd0, 0x80}, {0x5bd1, 0x80}, {0x5bd2, 0x20}, {0x5bd3, 0x80}, {0x5bd4, 0x40}, {0x5bd5, 0x20}, {0x5bd6, 0x00}, {0x5bd7, 0x00}, {0x5bd8, 0x00}, {0x5bd9, 0x00}, {0x5bda, 0x00}, {0x5bdb, 0x00}, {0x5bdc, 0x00}, {0x5bdd, 0x00}, {0x5bde, 0x00}, {0x5bdf, 0x00}, {0x5be0, 0x00}, {0x5be1, 0x00}, {0x5be2, 0x00}, {0x5be3, 0x00}, {0x5be4, 0x00}, {0x5be5, 0x00}, {0x5be6, 0x00}, // m_nSPDCombineWeight {0x5c49, 0x80}, {0x5c4a, 0x80}, {0x5c4b, 0x80}, {0x5c4c, 0x80}, {0x5c4d, 0x40}, {0x5c4e, 0x80}, {0x5c4f, 0x80}, {0x5c50, 0x80}, {0x5c51, 0x60}, {0x5c52, 0x20}, {0x5c53, 0x80}, {0x5c54, 0x80}, {0x5c55, 0x80}, {0x5c56, 0x20}, {0x5c57, 0x00}, {0x5c58, 0x80}, {0x5c59, 0x40}, {0x5c5a, 0x20}, {0x5c5b, 0x00}, {0x5c5c, 0x00}, {0x5c5d, 0x80}, {0x5c5e, 0x00}, {0x5c5f, 0x00}, {0x5c60, 0x00}, {0x5c61, 0x00}, {0x5c62, 0x00}, {0x5c63, 0x00}, {0x5c64, 0x00}, {0x5c65, 0x00}, {0x5c66, 0x00}, // m_nSPDCombineWeight {0x5cc9, 0x80}, {0x5cca, 0x80}, {0x5ccb, 0x80}, {0x5ccc, 0x80}, {0x5ccd, 0x80}, {0x5cce, 0x80}, {0x5ccf, 0x80}, {0x5cd0, 0x80}, {0x5cd1, 0x80}, {0x5cd2, 0x60}, {0x5cd3, 0x80}, {0x5cd4, 0x80}, {0x5cd5, 0x80}, {0x5cd6, 0x60}, {0x5cd7, 0x40}, {0x5cd8, 0x80}, {0x5cd9, 0x80}, {0x5cda, 0x80}, {0x5cdb, 0x40}, {0x5cdc, 0x20}, {0x5cdd, 0x80}, {0x5cde, 0x80}, {0x5cdf, 0x80}, {0x5ce0, 0x20}, {0x5ce1, 0x00}, {0x5ce2, 0x80}, {0x5ce3, 0x80}, {0x5ce4, 0x80}, {0x5ce5, 0x00}, {0x5ce6, 0x00}, {0x5d74, 0x01}, {0x5d75, 0x00}, {0x5d1f, 0x81}, {0x5d11, 0x00}, {0x5d12, 0x10}, {0x5d13, 0x10}, {0x5d15, 0x05}, {0x5d16, 0x05}, {0x5d17, 0x05}, {0x5d08, 0x03}, {0x5d09, 0xb6}, {0x5d0a, 0x03}, {0x5d0b, 0xb6}, {0x5d18, 0x03}, {0x5d19, 0xb6}, {0x5d62, 0x01}, {0x5d40, 0x02}, {0x5d41, 0x01}, {0x5d63, 0x1f}, {0x5d64, 0x00}, {0x5d65, 0x80}, {0x5d56, 0x00}, {0x5d57, 0x20}, {0x5d58, 0x00}, {0x5d59, 0x20}, {0x5d5a, 0x00}, {0x5d5b, 0x0c}, {0x5d5c, 0x02}, {0x5d5d, 0x40}, {0x5d5e, 0x02}, {0x5d5f, 0x40}, {0x5d60, 0x03}, {0x5d61, 0x40}, {0x5d4a, 0x02}, {0x5d4b, 0x40}, {0x5d4c, 0x02}, {0x5d4d, 0x40}, {0x5d4e, 0x02}, {0x5d4f, 0x40}, {0x5d50, 0x18}, {0x5d51, 0x80}, {0x5d52, 0x18}, {0x5d53, 0x80}, {0x5d54, 0x18}, {0x5d55, 0x80}, {0x5d46, 0x20}, {0x5d47, 0x00}, {0x5d48, 0x22}, {0x5d49, 0x00}, {0x5d42, 0x20}, {0x5d43, 0x00}, {0x5d44, 0x22}, {0x5d45, 0x00}, {0x5004, 0x1e}, {0x4221, 0x03}, // this is changed from 1 -> 3 // DCG exposure coarse // {0x3501, 0x01}, {0x3502, 0xc8}, // SPD exposure coarse // {0x3541, 0x01}, {0x3542, 0xc8}, // VS exposure coarse // {0x35c1, 0x00}, {0x35c2, 0x01}, // crc reference {0x420e, 0x66}, {0x420f, 0x5d}, {0x4210, 0xa8}, {0x4211, 0x55}, // crc stat check {0x507a, 0x5f}, {0x507b, 0x46}, // watchdog control {0x4f00, 0x00}, {0x4f01, 0x01}, {0x4f02, 0x80}, {0x4f04, 0x2c}, // color balance gains // blue {0x5280, 0x06}, {0x5281, 0xCB}, // hcg {0x5480, 0x06}, {0x5481, 0xCB}, // lcg {0x5680, 0x06}, {0x5681, 0xCB}, // spd {0x5880, 0x06}, {0x5881, 0xCB}, // vs // green(blue) {0x5282, 0x04}, {0x5283, 0x00}, {0x5482, 0x04}, {0x5483, 0x00}, {0x5682, 0x04}, {0x5683, 0x00}, {0x5882, 0x04}, {0x5883, 0x00}, // green(red) {0x5284, 0x04}, {0x5285, 0x00}, {0x5484, 0x04}, {0x5485, 0x00}, {0x5684, 0x04}, {0x5685, 0x00}, {0x5884, 0x04}, {0x5885, 0x00}, // red {0x5286, 0x08}, {0x5287, 0xDE}, {0x5486, 0x08}, {0x5487, 0xDE}, {0x5686, 0x08}, {0x5687, 0xDE}, {0x5886, 0x08}, {0x5887, 0xDE}, // fixed gains {0x3588, 0x01}, {0x3589, 0x00}, {0x35c8, 0x01}, {0x35c9, 0x00}, {0x3548, 0x0F}, {0x3549, 0x00}, {0x35c1, 0x00}, };
2301_81045437/openpilot
system/camerad/sensors/ox03c10_registers.h
C
mit
19,668
#pragma once #include <cassert> #include <cstdint> #include <map> #include <utility> #include <vector> #include "media/cam_sensor.h" #include "system/camerad/cameras/camera_common.h" #include "system/camerad/sensors/ar0231_registers.h" #include "system/camerad/sensors/ox03c10_registers.h" #include "system/camerad/sensors/os04c10_registers.h" #define ANALOG_GAIN_MAX_CNT 55 class SensorInfo { public: SensorInfo() = default; virtual std::vector<i2c_random_wr_payload> getExposureRegisters(int exposure_time, int new_exp_g, bool dc_gain_enabled) const { return {}; } virtual float getExposureScore(float desired_ev, int exp_t, int exp_g_idx, float exp_gain, int gain_idx) const {return 0; } virtual int getSlaveAddress(int port) const { assert(0); } virtual void processRegisters(CameraState *c, cereal::FrameData::Builder &framed) const {} cereal::FrameData::ImageSensor image_sensor = cereal::FrameData::ImageSensor::UNKNOWN; float pixel_size_mm; uint32_t frame_width, frame_height; uint32_t frame_stride; uint32_t frame_offset = 0; uint32_t extra_height = 0; int registers_offset = -1; int stats_offset = -1; int hdr_offset = -1; int exposure_time_min; int exposure_time_max; float dc_gain_factor; int dc_gain_min_weight; int dc_gain_max_weight; float dc_gain_on_grey; float dc_gain_off_grey; float sensor_analog_gains[ANALOG_GAIN_MAX_CNT]; int analog_gain_min_idx; int analog_gain_max_idx; int analog_gain_rec_idx; int analog_gain_cost_delta; float analog_gain_cost_low; float analog_gain_cost_high; float target_grey_factor; float min_ev; float max_ev; bool data_word; uint32_t probe_reg_addr; uint32_t probe_expected_data; std::vector<i2c_random_wr_payload> start_reg_array; std::vector<i2c_random_wr_payload> init_reg_array; uint32_t mipi_format; uint32_t mclk_frequency; uint32_t frame_data_type; }; class AR0231 : public SensorInfo { public: AR0231(); std::vector<i2c_random_wr_payload> getExposureRegisters(int exposure_time, int new_exp_g, bool dc_gain_enabled) const override; float getExposureScore(float desired_ev, int exp_t, int exp_g_idx, float exp_gain, int gain_idx) const override; int getSlaveAddress(int port) const override; void processRegisters(CameraState *c, cereal::FrameData::Builder &framed) const override; private: mutable std::map<uint16_t, std::pair<int, int>> ar0231_register_lut; }; class OX03C10 : public SensorInfo { public: OX03C10(); std::vector<i2c_random_wr_payload> getExposureRegisters(int exposure_time, int new_exp_g, bool dc_gain_enabled) const override; float getExposureScore(float desired_ev, int exp_t, int exp_g_idx, float exp_gain, int gain_idx) const override; int getSlaveAddress(int port) const override; }; class OS04C10 : public SensorInfo { public: OS04C10(); std::vector<i2c_random_wr_payload> getExposureRegisters(int exposure_time, int new_exp_g, bool dc_gain_enabled) const override; float getExposureScore(float desired_ev, int exp_t, int exp_g_idx, float exp_gain, int gain_idx) const override; int getSlaveAddress(int port) const override; };
2301_81045437/openpilot
system/camerad/sensors/sensor.h
C++
mit
3,128
#!/usr/bin/env python3 import subprocess import time import numpy as np from PIL import Image import cereal.messaging as messaging from cereal.visionipc import VisionIpcClient, VisionStreamType from openpilot.common.params import Params from openpilot.common.realtime import DT_MDL from openpilot.system.hardware import PC from openpilot.selfdrive.controls.lib.alertmanager import set_offroad_alert from openpilot.system.manager.process_config import managed_processes VISION_STREAMS = { "roadCameraState": VisionStreamType.VISION_STREAM_ROAD, "driverCameraState": VisionStreamType.VISION_STREAM_DRIVER, "wideRoadCameraState": VisionStreamType.VISION_STREAM_WIDE_ROAD, } def jpeg_write(fn, dat): img = Image.fromarray(dat) img.save(fn, "JPEG") def yuv_to_rgb(y, u, v): ul = np.repeat(np.repeat(u, 2).reshape(u.shape[0], y.shape[1]), 2, axis=0).reshape(y.shape) vl = np.repeat(np.repeat(v, 2).reshape(v.shape[0], y.shape[1]), 2, axis=0).reshape(y.shape) yuv = np.dstack((y, ul, vl)).astype(np.int16) yuv[:, :, 1:] -= 128 m = np.array([ [1.00000, 1.00000, 1.00000], [0.00000, -0.39465, 2.03211], [1.13983, -0.58060, 0.00000], ]) rgb = np.dot(yuv, m).clip(0, 255) return rgb.astype(np.uint8) def extract_image(buf): y = np.array(buf.data[:buf.uv_offset], dtype=np.uint8).reshape((-1, buf.stride))[:buf.height, :buf.width] u = np.array(buf.data[buf.uv_offset::2], dtype=np.uint8).reshape((-1, buf.stride//2))[:buf.height//2, :buf.width//2] v = np.array(buf.data[buf.uv_offset+1::2], dtype=np.uint8).reshape((-1, buf.stride//2))[:buf.height//2, :buf.width//2] return yuv_to_rgb(y, u, v) def get_snapshots(frame="roadCameraState", front_frame="driverCameraState"): sockets = [s for s in (frame, front_frame) if s is not None] sm = messaging.SubMaster(sockets) vipc_clients = {s: VisionIpcClient("camerad", VISION_STREAMS[s], True) for s in sockets} # wait 4 sec from camerad startup for focus and exposure while sm[sockets[0]].frameId < int(4. / DT_MDL): sm.update() for client in vipc_clients.values(): client.connect(True) # grab images rear, front = None, None if frame is not None: c = vipc_clients[frame] rear = extract_image(c.recv()) if front_frame is not None: c = vipc_clients[front_frame] front = extract_image(c.recv()) return rear, front def snapshot(): params = Params() if (not params.get_bool("IsOffroad")) or params.get_bool("IsTakingSnapshot"): print("Already taking snapshot") return None, None front_camera_allowed = params.get_bool("RecordFront") params.put_bool("IsTakingSnapshot", True) set_offroad_alert("Offroad_IsTakingSnapshot", True) time.sleep(2.0) # Give thermald time to read the param, or if just started give camerad time to start # Check if camerad is already started try: subprocess.check_call(["pgrep", "camerad"]) print("Camerad already running") params.put_bool("IsTakingSnapshot", False) params.remove("Offroad_IsTakingSnapshot") return None, None except subprocess.CalledProcessError: pass try: # Allow testing on replay on PC if not PC: managed_processes['camerad'].start() frame = "wideRoadCameraState" front_frame = "driverCameraState" if front_camera_allowed else None rear, front = get_snapshots(frame, front_frame) finally: managed_processes['camerad'].stop() params.put_bool("IsTakingSnapshot", False) set_offroad_alert("Offroad_IsTakingSnapshot", False) if not front_camera_allowed: front = None return rear, front if __name__ == "__main__": pic, fpic = snapshot() if pic is not None: print(pic.shape) jpeg_write("/tmp/back.jpg", pic) if fpic is not None: jpeg_write("/tmp/front.jpg", fpic) else: print("Error taking snapshot")
2301_81045437/openpilot
system/camerad/snapshot/snapshot.py
Python
mit
3,820
#!/usr/bin/env python3 # type: ignore import cereal.messaging as messaging all_sockets = ['roadCameraState', 'driverCameraState', 'wideRoadCameraState'] prev_id = [None,None,None] this_id = [None,None,None] dt = [None,None,None] num_skipped = [0,0,0] if __name__ == "__main__": sm = messaging.SubMaster(all_sockets) while True: sm.update() for i in range(len(all_sockets)): if not sm.updated[all_sockets[i]]: continue this_id[i] = sm[all_sockets[i]].frameId if prev_id[i] is None: prev_id[i] = this_id[i] continue dt[i] = this_id[i] - prev_id[i] if dt[i] != 1: num_skipped[i] += dt[i] - 1 print(all_sockets[i] ,dt[i] - 1, num_skipped[i]) prev_id[i] = this_id[i]
2301_81045437/openpilot
system/camerad/test/check_skips.py
Python
mit
753
#!/usr/bin/env python3 import argparse import os from tqdm import tqdm from openpilot.tools.lib.logreader import LogReader if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("route", help="The route name") args = parser.parse_args() out_path = os.path.join("jpegs", f"{args.route.replace('|', '_').replace('/', '_')}") os.makedirs(out_path, exist_ok=True) lr = LogReader(args.route) for msg in tqdm(lr): if msg.which() == 'thumbnail': with open(os.path.join(out_path, f"{msg.thumbnail.frameId}.jpg"), 'wb') as f: f.write(msg.thumbnail.thumbnail) elif msg.which() == 'navThumbnail': with open(os.path.join(out_path, f"nav_{msg.navThumbnail.frameId}.jpg"), 'wb') as f: f.write(msg.navThumbnail.thumbnail)
2301_81045437/openpilot
system/camerad/test/get_thumbnails_for_segment.py
Python
mit
789
#!/bin/sh cd .. while :; do ./camerad & pid="$!" sleep 2 kill -2 $pid wait $pid done
2301_81045437/openpilot
system/camerad/test/stress_restart.sh
Shell
mit
95
#define CATCH_CONFIG_MAIN #include "catch2/catch.hpp" #include <cassert> #include <cmath> #include <cstring> #include "common/util.h" #include "system/camerad/cameras/camera_common.h" #define W 240 #define H 160 #define TONE_SPLITS 3 float gts[TONE_SPLITS * TONE_SPLITS * TONE_SPLITS * TONE_SPLITS] = { 0.917969, 0.917969, 0.375000, 0.917969, 0.375000, 0.375000, 0.187500, 0.187500, 0.187500, 0.917969, 0.375000, 0.375000, 0.187500, 0.187500, 0.187500, 0.187500, 0.187500, 0.187500, 0.093750, 0.093750, 0.093750, 0.093750, 0.093750, 0.093750, 0.093750, 0.093750, 0.093750, 0.917969, 0.375000, 0.375000, 0.187500, 0.187500, 0.187500, 0.187500, 0.187500, 0.187500, 0.093750, 0.093750, 0.093750, 0.093750, 0.093750, 0.093750, 0.093750, 0.093750, 0.093750, 0.093750, 0.093750, 0.093750, 0.093750, 0.093750, 0.093750, 0.093750, 0.093750, 0.093750, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000}; TEST_CASE("camera.test_set_exposure_target") { // set up fake camerabuf CameraBuf cb = {}; VisionBuf vb = {}; uint8_t * fb_y = new uint8_t[W*H]; vb.y = fb_y; cb.cur_yuv_buf = &vb; cb.rgb_width = W; cb.rgb_height = H; Rect rect = {0, 0, W-1, H-1}; printf("AE test patterns %dx%d\n", cb.rgb_width, cb.rgb_height); // mix of 5 tones uint8_t l[5] = {0, 24, 48, 96, 235}; // 235 is yuv max bool passed = true; float rtol = 0.05; // generate pattern and calculate EV int cnt = 0; for (int i_0=0; i_0<TONE_SPLITS; i_0++) { for (int i_1=0; i_1<TONE_SPLITS; i_1++) { for (int i_2=0; i_2<TONE_SPLITS; i_2++) { for (int i_3=0; i_3<TONE_SPLITS; i_3++) { int h_0 = i_0 * H / TONE_SPLITS; int h_1 = i_1 * (H - h_0) / TONE_SPLITS; int h_2 = i_2 * (H - h_0 - h_1) / TONE_SPLITS; int h_3 = i_3 * (H - h_0 - h_1 - h_2) / TONE_SPLITS; int h_4 = H - h_0 - h_1 - h_2 - h_3; memset(&fb_y[0], l[0], h_0*W); memset(&fb_y[h_0*W], l[1], h_1*W); memset(&fb_y[h_0*W+h_1*W], l[2], h_2*W); memset(&fb_y[h_0*W+h_1*W+h_2*W], l[3], h_3*W); memset(&fb_y[h_0*W+h_1*W+h_2*W+h_3*W], l[4], h_4*W); float ev = set_exposure_target((const CameraBuf*) &cb, rect, 1, 1); // printf("%d/%d/%d/%d/%d ev is %f\n", h_0, h_1, h_2, h_3, h_4, ev); // printf("%f\n", ev); // compare to gt float evgt = gts[cnt]; if (fabs(ev - evgt) > rtol*evgt) { passed = false; } // report printf("%d/%d/%d/%d/%d: ev %f, gt %f, err %f\n", h_0, h_1, h_2, h_3, h_4, ev, evgt, fabs(ev - evgt) / (evgt != 0 ? evgt : 0.00001f)); cnt++; } } } } assert(passed); delete[] fb_y; }
2301_81045437/openpilot
system/camerad/test/test_ae_gray.cc
C++
mit
2,940
import os from typing import cast from openpilot.system.hardware.base import HardwareBase from openpilot.system.hardware.tici.hardware import Tici from openpilot.system.hardware.pc.hardware import Pc TICI = os.path.isfile('/TICI') AGNOS = os.path.isfile('/AGNOS') PC = not TICI if TICI: HARDWARE = cast(HardwareBase, Tici()) else: HARDWARE = cast(HardwareBase, Pc())
2301_81045437/openpilot
system/hardware/__init__.py
Python
mit
375
#pragma once #include <cstdlib> #include <fstream> #include <map> #include <string> #include "cereal/messaging/messaging.h" // no-op base hw class class HardwareNone { public: static constexpr float MAX_VOLUME = 0.7; static constexpr float MIN_VOLUME = 0.2; static std::string get_os_version() { return ""; } static std::string get_name() { return ""; } static cereal::InitData::DeviceType get_device_type() { return cereal::InitData::DeviceType::UNKNOWN; } static int get_voltage() { return 0; } static int get_current() { return 0; } static std::string get_serial() { return "cccccc"; } static std::map<std::string, std::string> get_init_logs() { return {}; } static void reboot() {} static void poweroff() {} static void set_brightness(int percent) {} static void set_display_power(bool on) {} static bool get_ssh_enabled() { return false; } static void set_ssh_enabled(bool enabled) {} static void config_cpu_rendering(bool offscreen); static bool PC() { return false; } static bool TICI() { return false; } static bool AGNOS() { return false; } };
2301_81045437/openpilot
system/hardware/base.h
C++
mit
1,107
from abc import abstractmethod, ABC from collections import namedtuple from cereal import log ThermalConfig = namedtuple('ThermalConfig', ['cpu', 'gpu', 'mem', 'bat', 'pmic']) NetworkType = log.DeviceState.NetworkType class HardwareBase(ABC): @staticmethod def get_cmdline() -> dict[str, str]: with open('/proc/cmdline') as f: cmdline = f.read() return {kv[0]: kv[1] for kv in [s.split('=') for s in cmdline.split(' ')] if len(kv) == 2} @staticmethod def read_param_file(path, parser, default=0): try: with open(path) as f: return parser(f.read()) except Exception: return default def booted(self) -> bool: return True @abstractmethod def reboot(self, reason=None): pass @abstractmethod def uninstall(self): pass @abstractmethod def get_os_version(self): pass @abstractmethod def get_device_type(self): pass @abstractmethod def get_sound_card_online(self): pass @abstractmethod def get_imei(self, slot) -> str: pass @abstractmethod def get_serial(self): pass @abstractmethod def get_network_info(self): pass @abstractmethod def get_network_type(self): pass @abstractmethod def get_sim_info(self): pass @abstractmethod def get_network_strength(self, network_type): pass def get_network_metered(self, network_type) -> bool: return network_type not in (NetworkType.none, NetworkType.wifi, NetworkType.ethernet) @staticmethod def set_bandwidth_limit(upload_speed_kbps: int, download_speed_kbps: int) -> None: pass @abstractmethod def get_current_power_draw(self): pass @abstractmethod def get_som_power_draw(self): pass @abstractmethod def shutdown(self): pass @abstractmethod def get_thermal_config(self): pass @abstractmethod def set_screen_brightness(self, percentage): pass @abstractmethod def get_screen_brightness(self): pass @abstractmethod def set_power_save(self, powersave_enabled): pass @abstractmethod def get_gpu_usage_percent(self): pass def get_modem_version(self): return None def get_modem_nv(self): return None @abstractmethod def get_modem_temperatures(self): pass @abstractmethod def get_nvme_temperatures(self): pass @abstractmethod def initialize_hardware(self): pass def configure_modem(self): pass @abstractmethod def get_networks(self): pass def has_internal_panda(self) -> bool: return False def reset_internal_panda(self): pass def recover_internal_panda(self): pass def get_modem_data_usage(self): return -1, -1
2301_81045437/openpilot
system/hardware/base.py
Python
mit
2,670